diff --git a/.github/workflows/devnet-deploy.yml b/.github/workflows/devnet-deploy.yml new file mode 100644 index 00000000000..f49feb403a3 --- /dev/null +++ b/.github/workflows/devnet-deploy.yml @@ -0,0 +1,133 @@ +name: Deploy devnet + +on: + workflow_dispatch: + inputs: + namespace: + description: The namespace to deploy to, e.g. smoke + required: true + aztec_docker_image: + description: The Aztec Docker image to use + required: true + deployment_mnemonic_secret_name: + description: The name of the secret which holds the boot node's contract deployment mnemonic + required: true + default: testnet-deployment-mnemonic + respect_tf_lock: + description: Whether to respect the Terraform lock + required: false + default: "true" + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: false + +env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + CONTRACT_S3_BUCKET: s3://static.aztec.network + CLUSTER_NAME: aztec-gke + REGION: us-west1-a + NAMESPACE: ${{ inputs.namespace }} + AZTEC_DOCKER_IMAGE: ${{ inputs.aztec_docker_image }} + +jobs: + deploy-network: + uses: ./.github/workflows/network-deploy.yml + with: + namespace: ${{ github.event.inputs.namespace }} + values_file: release-devnet + aztec_docker_image: ${{ github.event.inputs.aztec_docker_image }} + deployment_mnemonic_secret_name: ${{ github.event.inputs.deployment_mnemonic_secret_name }} + respect_tf_lock: ${{ github.event.inputs.respect_tf_lock }} + secrets: + GCP_SA_KEY: ${{ secrets.GCP_SA_KEY }} + + + bootstrap-network: + runs-on: ubuntu-latest + needs: deploy-network + steps: + - uses: ./.github/ci-setup-action + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }} + aws-region: eu-west-2 + + - name: Authenticate to Google Cloud + uses: google-github-actions/auth@v2 + with: + credentials_json: ${{ secrets.GCP_SA_KEY }} + + - name: Set up Cloud SDK + uses: google-github-actions/setup-gcloud@v2 + + - name: Setup kubectl access + run: | + gcloud components install kubectl gke-gcloud-auth-plugin --quiet + gcloud container clusters get-credentials ${{ env.CLUSTER_NAME }} --region ${{ env.REGION }} + + - name: Setup helm + run: | + curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3 + chmod +x get_helm.sh + sudo ./get_helm.sh + rm get_helm.sh + + - name: Bootstrap network + run: | + set -eu -o pipefail + + pxe_port_forward_pid="" + ethereum_port_forward_pid="" + + cleanup() { + echo "Cleaning up port-forward processes..." + if [ -n "$pxe_port_forward_pid" ]; then + kill $pxe_port_forward_pid 2>/dev/null || true + fi + if [ -n "$ethereum_port_forward_pid" ]; then + kill $ethereum_port_forward_pid 2>/dev/null || true + fi + } + + trap cleanup EXIT + + echo "Waiting for PXE pods to be ready..." + if ! kubectl wait --for=condition=ready pod -l app=$NAMESPACE-aztec-network-pxe --timeout=10m; then + echo "Error: PXE pods did not become ready within timeout" + exit 1 + fi + + helm get values $NAMESPACE -n $NAMESPACE -o json --all > helm_values.json + + PXE_PORT="$(jq -r .pxe.service.nodePort helm_values.json)" + ETHEREUM_PORT="$(jq -r ethereum.service.port helm_values.json)" + L1_CHAIN_ID="$(jq -r .ethereum.chainId helm_values.json)" + + MNEMONIC="$(jq -r .aztec.l1DeploymentMnemonic helm_values.json)" + echo "::add-mask::$MNEMONIC" + + rm helm_values.json + + kubectl port-forward -n $NAMESPACE svc/$NAMESPACE-aztec-network-pxe $PXE_PORT & + pxe_port_forward_pid=$! + kubectl port-forward -n $NAMESPACE svc/$NAMESPACE-aztec-network-ethereum $ETHEREUM_PORT & + ethereum_port_forward_pid=$! + + # wait for port-forwards to establish + sleep 5 + + docker run --rm $AZTEC_DOCKER_IMAGE bootstrap-network \ + --network host \ + --rpc-url http://127.0.0.1:$PXE_PORT \ + --l1-rpc-url http://127.0.0.1:$ETHEREUM_PORT \ + --l1-chain-id $CHAIN_ID \ + --mnemonic $MNEMONIC \ + --json | tee ./basic_contracts.json + + aws s3 cp ./basic_contracts.json ${{ env.CONTRACT_S3_BUCKET }}/devnet/basic_contracts.json + diff --git a/.github/workflows/devnet-deploys.yml b/.github/workflows/devnet-deploys.yml deleted file mode 100644 index 1561702cb01..00000000000 --- a/.github/workflows/devnet-deploys.yml +++ /dev/null @@ -1,787 +0,0 @@ -name: Deploy to network -on: - # push: - # branches: [devnet, provernet, alphanet] - workflow_dispatch: - inputs: - no_rebuild_images: - description: "Don't rebuild images" - required: false - type: boolean - no_deploy: - description: "Skip deployment (only release images)" - required: false - type: boolean - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# We only reference secret variables here where we put them into the environment so as to not create confusion - -# Anvil Accounts. Anvil provides 10 pre-funded accounts for the mnemonic we have specified in FORK_MNEMONIC. We are using: -# 1. The first account (index 0) is used in SEQ_1_PUBLISHER_PRIVATE_KEY -# 2. The 3rd account (index 2) is used in PROVER_1_PUBLISHER_PRIVATE_KEY -# 3. The 9th account (index 8) is used in this workflow for deploying contracts etc -# 4. The 10th account (index 9) is used by the deployed faucet -# TODO: Convert all this so we take the provided mnemonic and derive the keys from the above indices -env: - DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }} - GIT_COMMIT: ${{ github.sha }} - DEPLOY_TAG: none - L1_CHAIN_ID: 677692 - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - API_KEY: ${{ secrets.DEVNET_API_KEY }} - FORK_ADMIN_API_KEY: ${{ secrets.DEVNET_API_KEY }} - FORK_MNEMONIC: ${{ secrets.FORK_MNEMONIC }} - CONTRACT_PUBLISHER_PRIVATE_KEY: ${{ secrets.CONTRACT_PUBLISHER_PRIVATE_KEY }} - CONTRACT_S3_BUCKET: s3://static.aztec.network - - # TF Vars - TF_VAR_DOCKERHUB_ACCOUNT: aztecprotocol - TF_VAR_L1_CHAIN_ID: 677692 - TF_VAR_DEPLOY_TAG: none - TF_VAR_IMAGE_TAG: ${{ github.sha }} - TF_VAR_API_KEY: ${{ secrets.DEVNET_API_KEY }} - - # Node / Sequencer - TF_VAR_BOOTSTRAP_NODES: "" - TF_VAR_P2P_ENABLED: "false" - TF_VAR_SEQUENCER_PRIVATE_KEYS: '["${{ secrets.SEQ_1_PUBLISHER_PRIVATE_KEY }}"]' - TF_VAR_NODE_P2P_PRIVATE_KEYS: '[""]' - TF_VAR_SEQ_MAX_SECONDS_BETWEEN_BLOCKS: 0 # disable auto block building - TF_VAR_SEQ_MIN_SECONDS_BETWEEN_BLOCKS: 0 # disable auto block building - TF_VAR_SEQ_MIN_TX_PER_BLOCK: 1 - TF_VAR_SEQ_MAX_TX_PER_BLOCK: 64 - TF_VAR_NODE_P2P_TCP_PORT: 40000 - TF_VAR_NODE_P2P_UDP_PORT: 45000 - TF_VAR_NODE_LB_RULE_PRIORITY: 500 - - # Prover Node - TF_VAR_PROVER_NODE_LB_RULE_PRIORITY: 6000 - TF_VAR_PROVER_PRIVATE_KEYS: '["${{ secrets.PROVER_1_PUBLISHER_PRIVATE_KEY }}"]' - - # Anvil - TF_VAR_FORK_MNEMONIC: ${{ secrets.FORK_MNEMONIC }} - TF_VAR_INFURA_API_KEY: ${{ secrets.INFURA_API_KEY }} - TF_VAR_FORK_ADMIN_API_KEY: ${{ secrets.DEVNET_API_KEY }} - TF_VAR_MAINNET_FORK_CPU_UNITS: 2048 - TF_VAR_MAINNET_FORK_MEMORY_UNITS: 4096 - - # Faucet - TF_VAR_FAUCET_ACCOUNT_INDEX: 9 - TF_VAR_FAUCET_LB_RULE_PRIORITY: 600 - - # Prover - TF_VAR_AGENTS_PER_PROVER: 1 - TF_VAR_PROVING_ENABLED: false - - # Transaction Bot - TF_VAR_BOT_API_KEY: ${{ secrets.BOT_API_KEY }} - TF_VAR_BOT_PRIVATE_KEY: "" - TF_VAR_BOT_NO_START: true - TF_VAR_BOT_PRIVATE_TRANSFERS_PER_TX: 0 # no private transfers - TF_VAR_BOT_PUBLIC_TRANSFERS_PER_TX: 1 - TF_VAR_BOT_TX_MINED_WAIT_SECONDS: 4800 - TF_VAR_BOT_FOLLOW_CHAIN: "PROVEN" - TF_VAR_BOT_TX_INTERVAL_SECONDS: 180 - TF_VAR_BOT_COUNT: 1 - TF_VAR_BOT_FLUSH_SETUP_TRANSACTIONS: false - TF_VAR_BOT_MAX_PENDING_TXS: 1 - - # PXE - TF_VAR_PXE_LB_RULE_PRIORITY: 4000 - -jobs: - setup: - uses: ./.github/workflows/setup-runner.yml - with: - username: ${{ github.actor }} - runner_type: builder-x86 - secrets: inherit - - # Set network specific variables as outputs from this job to be referenced in later jobs - set-network: - needs: setup - runs-on: ${{ github.actor }}-x86 - outputs: - deploy_tag: ${{ steps.set_network_vars.outputs.deploy_tag }} - branch_name: ${{ steps.set_network_vars.outputs.branch_name }} - network_api_key: ${{ steps.set_network_vars.outputs.network_api_key }} - network_fork_admin_api_key: ${{ steps.set_network_vars.outputs.network_fork_admin_api_key }} - agents_per_prover: ${{ steps.set_network_vars.outputs.agents_per_prover }} - bot_interval: ${{ steps.set_network_vars.outputs.bot_interval }} - node_tcp_range_start: ${{ steps.set_network_vars.outputs.node_tcp_range_start }} - node_udp_range_start: ${{ steps.set_network_vars.outputs.node_udp_range_start }} - prover_node_tcp_range_start: ${{ steps.set_network_vars.outputs.prover_node_tcp_range_start }} - prover_node_udp_range_start: ${{ steps.set_network_vars.outputs.prover_node_udp_range_start }} - node_lb_priority_range_start: ${{ steps.set_network_vars.outputs.node_lb_priority_range_start }} - pxe_lb_priority_range_start: ${{ steps.set_network_vars.outputs.pxe_lb_priority_range_start }} - prover_node_lb_priority_range_start: ${{ steps.set_network_vars.outputs.prover_node_lb_priority_range_start }} - faucet_lb_priority: ${{ steps.set_network_vars.outputs.faucet_lb_priority }} - max_txs_per_block: ${{ steps.set_network_vars.outputs.max_txs_per_block }} - bot_follow_chain: ${{ steps.set_network_vars.outputs.bot_follow_chain }} - min_txs_per_block: ${{ steps.set_network_vars.outputs.min_txs_per_block }} - bot_flush_setup_txs: ${{ steps.set_network_vars.outputs.bot_flush_setup_txs }} - bot_max_pending_txs: ${{ steps.set_network_vars.outputs.bot_max_pending_txs }} - mainnet_fork_cpu_units: ${{ steps.set_network_vars.outputs.mainnet_fork_cpu_units }} - mainnet_fork_memory_units: ${{ steps.set_network_vars.outputs.mainnet_fork_memory_units }} - bot_skip_simulation: ${{ steps.set_network_vars.outputs.bot_skip_simulation }} - bot_l2_gas_limit: ${{ steps.set_network_vars.outputs.bot_l2_gas_limit }} - bot_da_gas_limit: ${{ steps.set_network_vars.outputs.bot_da_gas_limit }} - bot_count: ${{ steps.set_network_vars.outputs.bot_count }} - steps: - - name: Set network vars - shell: bash - run: | - env - export BRANCH_NAME=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}} - if [ "$BRANCH_NAME" = "devnet" ] - then - echo "deploy_tag=devnet" >> $GITHUB_OUTPUT - echo "branch_name=devnet" >> $GITHUB_OUTPUT - echo "network_api_key=DEVNET_API_KEY" >> $GITHUB_OUTPUT - echo "network_fork_admin_api_key=DEVNET_API_KEY" >> $GITHUB_OUTPUT - echo "agents_per_prover=2" >> $GITHUB_OUTPUT - echo "bot_interval=180" >> $GITHUB_OUTPUT - echo "node_tcp_range_start=40100" >> $GITHUB_OUTPUT - echo "node_udp_range_start=45100" >> $GITHUB_OUTPUT - echo "prover_node_tcp_range_start=41100" >> $GITHUB_OUTPUT - echo "prover_node_udp_range_start=46100" >> $GITHUB_OUTPUT - echo "node_lb_priority_range_start=4100" >> $GITHUB_OUTPUT - echo "pxe_lb_priority_range_start=5100" >> $GITHUB_OUTPUT - echo "prover_node_lb_priority_range_start=6100" >> $GITHUB_OUTPUT - echo "faucet_lb_priority=601" >> $GITHUB_OUTPUT - echo "min_txs_per_block=1" >> $GITHUB_OUTPUT - echo "max_txs_per_block=64" >> $GITHUB_OUTPUT - echo "bot_follow_chain=PROVEN" >> $GITHUB_OUTPUT - echo "bot_flush_setup_txs=false" >> $GITHUB_OUTPUT - echo "bot_max_pending_txs=1" >> $GITHUB_OUTPUT - echo "mainnet_fork_cpu_units=2048" >> $GITHUB_OUTPUT - echo "mainnet_fork_memory_units=4096" >> $GITHUB_OUTPUT - echo "bot_skip_simulation=false" >> $GITHUB_OUTPUT - echo "bot_l2_gas_limit=" >> $GITHUB_OUTPUT - echo "bot_da_gas_limit=" >> $GITHUB_OUTPUT - echo "bot_count=1" >> $GITHUB_OUTPUT - elif [ "$BRANCH_NAME" = "provernet" ] - then - echo "deploy_tag=provernet" >> $GITHUB_OUTPUT - echo "branch_name=provernet" >> $GITHUB_OUTPUT - echo "network_api_key=PROVERNET_API_KEY" >> $GITHUB_OUTPUT - echo "network_fork_admin_api_key=PROVERNET_FORK_ADMIN_API_KEY" >> $GITHUB_OUTPUT - echo "agents_per_prover=8" >> $GITHUB_OUTPUT - echo "bot_interval=10" >> $GITHUB_OUTPUT - echo "node_tcp_range_start=40200" >> $GITHUB_OUTPUT - echo "node_udp_range_start=45200" >> $GITHUB_OUTPUT - echo "prover_node_tcp_range_start=41200" >> $GITHUB_OUTPUT - echo "prover_node_udp_range_start=46200" >> $GITHUB_OUTPUT - echo "node_lb_priority_range_start=4200" >> $GITHUB_OUTPUT - echo "pxe_lb_priority_range_start=5200" >> $GITHUB_OUTPUT - echo "prover_node_lb_priority_range_start=6200" >> $GITHUB_OUTPUT - echo "faucet_lb_priority=602" >> $GITHUB_OUTPUT - echo "min_txs_per_block=4" >> $GITHUB_OUTPUT - echo "max_txs_per_block=4" >> $GITHUB_OUTPUT - echo "bot_follow_chain=NONE" >> $GITHUB_OUTPUT - echo "bot_flush_setup_txs=true" >> $GITHUB_OUTPUT - echo "bot_max_pending_txs=32" >> $GITHUB_OUTPUT - echo "mainnet_fork_cpu_units=8192" >> $GITHUB_OUTPUT - echo "mainnet_fork_memory_units=32768" >> $GITHUB_OUTPUT - echo "bot_skip_simulation=true" >> $GITHUB_OUTPUT - echo "bot_l2_gas_limit=1000000000" >> $GITHUB_OUTPUT - echo "bot_da_gas_limit=1000000000" >> $GITHUB_OUTPUT - echo "bot_count=1" >> $GITHUB_OUTPUT - elif [ "$BRANCH_NAME" = "alphanet" ] - then - echo "deploy_tag=alphanet" >> $GITHUB_OUTPUT - echo "branch_name=alphanet" >> $GITHUB_OUTPUT - echo "network_api_key=ALPHANET_API_KEY" >> $GITHUB_OUTPUT - echo "network_fork_admin_api_key=ALPHANET_API_KEY" >> $GITHUB_OUTPUT - echo "agents_per_prover=1" >> $GITHUB_OUTPUT - echo "bot_interval=10" >> $GITHUB_OUTPUT - echo "node_tcp_range_start=40000" >> $GITHUB_OUTPUT - echo "node_udp_range_start=45000" >> $GITHUB_OUTPUT - echo "prover_node_tcp_range_start=41000" >> $GITHUB_OUTPUT - echo "prover_node_udp_range_start=46000" >> $GITHUB_OUTPUT - echo "node_lb_priority_range_start=4000" >> $GITHUB_OUTPUT - echo "pxe_lb_priority_range_start=5000" >> $GITHUB_OUTPUT - echo "prover_node_lb_priority_range_start=6000" >> $GITHUB_OUTPUT - echo "faucet_lb_priority=600" >> $GITHUB_OUTPUT - echo "min_txs_per_block=1" >> $GITHUB_OUTPUT - echo "max_txs_per_block=64" >> $GITHUB_OUTPUT - echo "bot_follow_chain=PROVEN" >> $GITHUB_OUTPUT - echo "bot_flush_setup_txs=false" >> $GITHUB_OUTPUT - echo "bot_max_pending_txs=1" >> $GITHUB_OUTPUT - echo "mainnet_fork_cpu_units=2048" >> $GITHUB_OUTPUT - echo "mainnet_fork_memory_units=4096" >> $GITHUB_OUTPUT - echo "bot_skip_simulation=false" >> $GITHUB_OUTPUT - echo "bot_l2_gas_limit=" >> $GITHUB_OUTPUT - echo "bot_da_gas_limit=" >> $GITHUB_OUTPUT - echo "bot_count=1" >> $GITHUB_OUTPUT - else - echo "Unrecognized Branch!!" - exit 1 - fi - id: set_network_vars - - build-mainnet-fork: - needs: set-network - env: - BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - TF_VAR_DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - TF_VAR_API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - TF_VAR_FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - API_KEY_NAME: ${{ needs.set-network.outputs.network_api_key }} - runs-on: ${{ github.actor }}-x86 - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - fetch-depth: 0 - - uses: ./.github/ci-setup-action - with: - concurrency_key: build-mainnet-fork-${{ github.actor }} - dockerhub_password: "${{ env.DOCKERHUB_PASSWORD }}" - - - name: Build & push mainnet fork image - if: ${{ github.event.inputs.no_rebuild_images == 'false' }} - run: | - earthly-ci --no-output --push ./iac/mainnet-fork+export-mainnet-fork --DIST_TAG=${{ env.DEPLOY_TAG }} - earthly-ci --no-output --push ./iac/mainnet-fork+export-mainnet-fork --DIST_TAG=${{ github.sha }} - - build-aztec: - needs: set-network - env: - BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - TF_VAR_DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - TF_VAR_API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - API_KEY_NAME: ${{ needs.set-network.outputs.network_api_key }} - runs-on: ${{ github.actor }}-x86 - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - fetch-depth: 0 - - uses: ./.github/ci-setup-action - with: - concurrency_key: build-aztec-${{ github.actor }} - dockerhub_password: "${{ env.DOCKERHUB_PASSWORD }}" - - name: "Build & Push aztec images" - timeout-minutes: 40 - if: ${{ github.event.inputs.no_rebuild_images == 'false' }} - run: | - env - earthly-ci --no-output --push ./yarn-project+export-aztec-arch --DIST_TAG=${{ github.sha }} --ARCH=x86_64 - earthly-ci --no-output --push ./yarn-project+export-aztec-arch --DIST_TAG=${{ env.DEPLOY_TAG }} --ARCH=x86_64 - - - name: "Re-tag Aztec image" - if: ${{ github.event.inputs.no_rebuild_images == 'true' }} - run: | - env - docker pull aztecprotocol/aztec:${{ env.DEPLOY_TAG }}-x86_64 - docker tag aztecprotocol/aztec:${{ env.DEPLOY_TAG }}-x86_64 aztecprotocol/aztec:${{ github.sha }}-x86_64 - docker push aztecprotocol/aztec:${{ github.sha }}-x86_64 - - build-aztec-nargo: - needs: [set-network, build-aztec] - env: - BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - TF_VAR_DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - TF_VAR_API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - API_KEY_NAME: ${{ needs.set-network.outputs.network_api_key }} - runs-on: ${{ github.actor }}-x86 - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - fetch-depth: 0 - - uses: ./.github/ci-setup-action - with: - concurrency_key: build-aztec-nargo-${{ github.actor }} - dockerhub_password: "${{ env.DOCKERHUB_PASSWORD }}" - - - name: Build & push aztec nargo image - if: ${{ github.event.inputs.no_rebuild_images == 'false' }} - run: | - earthly-ci --no-output --push ./aztec-nargo+export-aztec-nargo --DIST_TAG=${{ github.sha }} --ARCH=x86_64 - earthly-ci --no-output --push ./aztec-nargo+export-aztec-nargo --DIST_TAG=${{ env.DEPLOY_TAG }} --ARCH=x86_64 - - publish-aztec-manifests: - needs: [set-network, build-aztec, build-aztec-nargo] - env: - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - - uses: ./.github/ci-setup-action - with: - dockerhub_password: "${{ env.DOCKERHUB_PASSWORD }}" - - - name: "Publish aztec manifests" - run: | - env - docker pull aztecprotocol/aztec:${{ github.sha }}-x86_64 - - docker manifest create aztecprotocol/aztec:${{ env.DEPLOY_TAG }} \ - aztecprotocol/aztec:${{ github.sha }}-x86_64 - docker manifest create aztecprotocol/aztec:${{ github.sha }} \ - aztecprotocol/aztec:${{ github.sha }}-x86_64 - - docker manifest push aztecprotocol/aztec:${{ env.DEPLOY_TAG }} - docker manifest push aztecprotocol/aztec:${{ github.sha }} - - - name: "Publish aztec-nargo manifests" - if: ${{ github.event.inputs.no_rebuild_images == 'false' }} - run: | - docker pull aztecprotocol/aztec-nargo:${{ github.sha }}-x86_64 - - docker manifest create aztecprotocol/aztec-nargo:${{ env.DEPLOY_TAG }} \ - aztecprotocol/aztec-nargo:${{ github.sha }}-x86_64 - docker manifest create aztecprotocol/aztec-nargo:${{ github.sha }} \ - aztecprotocol/aztec-nargo:${{ github.sha }}-x86_64 - - docker manifest push aztecprotocol/aztec-nargo:${{ env.DEPLOY_TAG }} - docker manifest push aztecprotocol/aztec-nargo:${{ github.sha }} - - build-faucet: - needs: [set-network, build-aztec] - env: - BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - TF_VAR_DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - TF_VAR_API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - TF_VAR_FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - API_KEY_NAME: ${{ needs.set-network.outputs.network_api_key }} - runs-on: ${{ github.actor }}-x86 - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - fetch-depth: 0 - - uses: ./.github/ci-setup-action - with: - concurrency_key: build-faucet-${{ github.actor }} - dockerhub_password: "${{ env.DOCKERHUB_PASSWORD }}" - - name: "Build & Push aztec images" - timeout-minutes: 40 - if: ${{ github.event.inputs.no_rebuild_images == 'false' }} - run: | - env - earthly-ci --no-output --push ./yarn-project+export-aztec-faucet --DIST_TAG=${{ env.DEPLOY_TAG }} - earthly-ci --no-output --push ./yarn-project+export-aztec-faucet --DIST_TAG=${{ github.sha }} - - - name: "Re-tag Aztec image" - if: ${{ github.event.inputs.no_rebuild_images == 'true' }} - run: | - env - docker pull aztecprotocol/aztec-faucet:${{ env.DEPLOY_TAG }} - docker tag aztecprotocol/aztec-faucet:${{ env.DEPLOY_TAG }} aztecprotocol/aztec-faucet:${{ github.sha }} - docker push aztecprotocol/aztec-faucet:${{ github.sha }} - - build-cli-wallet: - needs: [set-network, build-aztec] - env: - BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - TF_VAR_DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - TF_VAR_API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - API_KEY_NAME: ${{ needs.set-network.outputs.network_api_key }} - runs-on: ${{ github.actor }}-x86 - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - fetch-depth: 0 - - uses: ./.github/ci-setup-action - with: - concurrency_key: build-cli-wallet-${{ github.actor }} - dockerhub_password: "${{ env.DOCKERHUB_PASSWORD }}" - - name: "Build & Push aztec images" - timeout-minutes: 40 - if: ${{ github.event.inputs.no_rebuild_images == 'false' }} - run: | - env - earthly-ci --no-output --push ./yarn-project+export-cli-wallet --DIST_TAG=${{ env.DEPLOY_TAG }} --ARCH=x86_64 - earthly-ci --no-output --push ./yarn-project+export-cli-wallet --DIST_TAG=${{ github.sha }} --ARCH=x86_64 - - publish-cli-wallet-manifest: - needs: [set-network, build-cli-wallet] - env: - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - runs-on: ${{ github.actor }}-x86 - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - fetch-depth: 0 - - uses: ./.github/ci-setup-action - with: - dockerhub_password: "${{ env.DOCKERHUB_PASSWORD }}" - - - name: "Publish aztec CLI wallet manifests" - if: ${{ github.event.inputs.no_rebuild_images == 'false' }} - run: | - env - docker pull aztecprotocol/cli-wallet:${{ github.sha }}-x86_64 - - docker manifest create aztecprotocol/cli-wallet:${{ env.DEPLOY_TAG }} \ - aztecprotocol/cli-wallet:${{ github.sha }}-x86_64 - docker manifest create aztecprotocol/cli-wallet:${{ github.sha }} \ - aztecprotocol/cli-wallet:${{ github.sha }}-x86_64 - - docker manifest push aztecprotocol/cli-wallet:${{ env.DEPLOY_TAG }} - docker manifest push aztecprotocol/cli-wallet:${{ github.sha }} - - build-end: - runs-on: ubuntu-latest - needs: - [ - set-network, - build-faucet, - build-mainnet-fork, - publish-aztec-manifests, - publish-cli-wallet-manifest, - ] - env: - BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - TF_VAR_DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - TF_VAR_API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - - uses: ./.github/ci-setup-action - - uses: hashicorp/setup-terraform@v3 - with: - terraform_version: 1.7.5 - - terraform-deploy: - if: ${{ github.event.inputs.no_deploy == 'false' }} - runs-on: ubuntu-latest - needs: [build-end, set-network] - env: - BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - TF_VAR_DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - TF_VAR_API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - TF_VAR_FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - TF_VAR_AGENTS_PER_PROVER: ${{ needs.set-network.outputs.agents_per_prover }} - TF_VAR_BOT_TX_INTERVAL_SECONDS: ${{ needs.set-network.outputs.bot_interval }} - TF_VAR_NODE_LB_RULE_PRIORITY: ${{ needs.set-network.outputs.node_lb_priority_range_start }} - TF_VAR_PXE_LB_RULE_PRIORITY: ${{ needs.set-network.outputs.pxe_lb_priority_range_start }} - TF_VAR_PROVER_NODE_LB_RULE_PRIORITY: ${{ needs.set-network.outputs.prover_node_lb_priority_range_start }} - TF_VAR_SEQ_MIN_TX_PER_BLOCK: 1 - TF_VAR_SEQ_MAX_TX_PER_BLOCK: ${{ needs.set-network.outputs.max_txs_per_block }} - TF_VAR_MAINNET_FORK_CPU_UNITS: ${{ needs.set-network.outputs.mainnet_fork_cpu_units }} - TF_VAR_MAINNET_FORK_MEMORY_UNITS: ${{ needs.set-network.outputs.mainnet_fork_memory_units }} - TF_VAR_BOT_SKIP_PUBLIC_SIMULATION: ${{ needs.set-network.outputs.bot_skip_simulation }} - TF_VAR_BOT_L2_GAS_LIMIT: ${{ needs.set-network.outputs.bot_l2_gas_limit }} - TF_VAR_BOT_DA_GAS_LIMIT: ${{ needs.set-network.outputs.bot_da_gas_limit }} - TF_VAR_BOT_COUNT: ${{ needs.set-network.outputs.bot_count }} - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - - uses: ./.github/ci-setup-action - - uses: hashicorp/setup-terraform@v3 - with: - terraform_version: 1.7.5 - - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }} - aws-region: eu-west-2 - - - name: Deploy mainnet fork - working-directory: ./iac/mainnet-fork/terraform - run: | - env - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/mainnet-fork" - terraform apply -input=false -auto-approve - - - name: Wait for mainnet fork deployment - run: | - ./.github/scripts/wait_for_infra.sh mainnet-fork ${{ env.DEPLOY_TAG }} ${{ env.API_KEY }} - - - name: Deploy L1 Contracts - run: | - set -e - set -o pipefail - - docker pull aztecprotocol/aztec:${{ env.DEPLOY_TAG }} - docker run aztecprotocol/aztec:${{ env.DEPLOY_TAG }} deploy-l1-contracts \ - --private-key ${{ env.CONTRACT_PUBLISHER_PRIVATE_KEY }} \ - --rpc-url https://${{ env.DEPLOY_TAG }}-mainnet-fork.aztec.network:8545/admin-${{ env.FORK_ADMIN_API_KEY }} \ - --l1-chain-id ${{ env.L1_CHAIN_ID }} \ - --salt ${{ github.run_id }} \ - --json | tee ./l1_contracts.json - - # upload contract addresses to S3 - aws s3 cp ./l1_contracts.json ${{ env.CONTRACT_S3_BUCKET }}/${{ env.DEPLOY_TAG }}/l1_contracts.json - - # export contract addresses so they can be used by subsequent terraform deployments - function extract() { - jq -r ".$1" ./l1_contracts.json - } - - echo "TF_VAR_ROLLUP_CONTRACT_ADDRESS=$(extract rollupAddress)" >>$GITHUB_ENV - echo "TF_VAR_REGISTRY_CONTRACT_ADDRESS=$(extract registryAddress)" >>$GITHUB_ENV - echo "TF_VAR_INBOX_CONTRACT_ADDRESS=$(extract inboxAddress)" >>$GITHUB_ENV - echo "TF_VAR_OUTBOX_CONTRACT_ADDRESS=$(extract outboxAddress)" >>$GITHUB_ENV - echo "TF_VAR_FEE_JUICE_CONTRACT_ADDRESS=$(extract feeJuiceAddress)" >>$GITHUB_ENV - echo "TF_VAR_FEE_JUICE_PORTAL_CONTRACT_ADDRESS=$(extract feeJuicePortalAddress)" >>$GITHUB_ENV - - - name: Apply l1-contracts Terraform - working-directory: ./l1-contracts/terraform - run: | - env - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/l1-contracts" - terraform apply -input=false -auto-approve - - - name: Disable transactions bot - working-directory: ./yarn-project/aztec/terraform/bot - run: | - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/bot" - terraform apply -input=false -auto-approve - - - name: Deploy Aztec Nodes - working-directory: ./yarn-project/aztec/terraform/node - run: | - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/aztec-node" - terraform apply -input=false -auto-approve -var="NODE_P2P_TCP_PORT=${{ needs.set-network.outputs.node_tcp_range_start }}" -var="NODE_P2P_UDP_PORT=${{ needs.set-network.outputs.node_udp_range_start }}" - - - name: Deploy Aztec Prover Nodes - working-directory: ./yarn-project/aztec/terraform/prover-node - run: | - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/aztec-prover-node" - terraform apply -input=false -auto-approve -var="NODE_P2P_TCP_PORT=${{ needs.set-network.outputs.prover_node_tcp_range_start }}" -var="NODE_P2P_UDP_PORT=${{ needs.set-network.outputs.prover_node_udp_range_start }}" - - - name: Deploy Provers - working-directory: ./yarn-project/aztec/terraform/prover - run: | - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/prover" - terraform apply -input=false -auto-approve - - - name: Deploy PXE - working-directory: ./yarn-project/aztec/terraform/pxe - run: | - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/pxe" - terraform apply -input=false -auto-approve -replace="aws_efs_file_system.pxe_data_store" - - bootstrap: - if: ${{ github.event.inputs.no_deploy == 'false' }} - runs-on: ubuntu-latest - needs: [terraform-deploy, set-network] - env: - BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - TF_VAR_DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - TF_VAR_API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - TF_VAR_FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - - - uses: ./.github/ci-setup-action - - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }} - aws-region: eu-west-2 - - - name: Wait for PXE to be available - run: | - env - ./.github/scripts/wait_for_infra.sh pxe ${{ env.DEPLOY_TAG }} ${{ env.API_KEY }} - - - name: Setup protocol contracts - run: | - set -e - set -o pipefail - docker pull aztecprotocol/aztec:${{ env.DEPLOY_TAG }} - docker run aztecprotocol/aztec:${{ env.DEPLOY_TAG }} setup-protocol-contracts \ - --rpc-url https://api.aztec.network/${{ env.DEPLOY_TAG }}/aztec-pxe/${{ env.API_KEY }} \ - --l1-chain-id ${{ env.L1_CHAIN_ID }} \ - --json | tee ./protocol_contracts.json - - aws s3 cp ./protocol_contracts.json ${{ env.CONTRACT_S3_BUCKET }}/${{ env.DEPLOY_TAG }}/protocol_contracts.json - - - name: Bootstrap network - run: | - set -e - set -o pipefail - docker run aztecprotocol/aztec:${{ env.DEPLOY_TAG }} bootstrap-network \ - --rpc-url https://api.aztec.network/${{ env.DEPLOY_TAG }}/aztec-pxe/${{ env.API_KEY }} \ - --l1-rpc-url https://${{ env.DEPLOY_TAG }}-mainnet-fork.aztec.network:8545/admin-${{ env.FORK_ADMIN_API_KEY }} \ - --l1-chain-id ${{ env.L1_CHAIN_ID }} \ - --l1-private-key ${{ env.CONTRACT_PUBLISHER_PRIVATE_KEY }} \ - --json | tee ./basic_contracts.json - - aws s3 cp ./basic_contracts.json ${{ env.CONTRACT_S3_BUCKET }}/${{ env.DEPLOY_TAG }}/basic_contracts.json - - deploy-faucet: - if: ${{ github.event.inputs.no_deploy == 'false' }} - runs-on: ubuntu-latest - needs: [terraform-deploy, bootstrap, set-network] - env: - BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - TF_VAR_DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - TF_VAR_API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - TF_VAR_FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - TF_VAR_FAUCET_LB_RULE_PRIORITY: ${{ needs.set-network.outputs.faucet_lb_priority }} - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - - uses: ./.github/ci-setup-action - - uses: hashicorp/setup-terraform@v3 - with: - terraform_version: 1.7.5 - - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }} - aws-region: eu-west-2 - - - name: Retrieve contract addresses - run: | - set -e - aws s3 cp ${{ env.CONTRACT_S3_BUCKET }}/${{ env.DEPLOY_TAG }}/l1_contracts.json ./l1_contracts.json - aws s3 cp ${{ env.CONTRACT_S3_BUCKET }}/${{ env.DEPLOY_TAG }}/basic_contracts.json ./basic_contracts.json - - echo "TF_VAR_FEE_JUICE_CONTRACT_ADDRESS=$(jq -r '.feeJuiceAddress' ./l1_contracts.json)" >>$GITHUB_ENV - echo "TF_VAR_DEV_COIN_CONTRACT_ADDRESS=$(jq -r '.devCoinL1' ./basic_contracts.json)" >>$GITHUB_ENV - - - name: Deploy Faucet - working-directory: ./yarn-project/aztec-faucet/terraform - run: | - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/aztec-faucet" - terraform apply -input=false -auto-approve - - enable-proving: - if: ${{ github.event.inputs.no_deploy == 'false' }} - runs-on: ubuntu-latest - needs: [deploy-faucet, set-network] - env: - BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - TF_VAR_DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - TF_VAR_API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - TF_VAR_FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - TF_VAR_AGENTS_PER_PROVER: ${{ needs.set-network.outputs.agents_per_prover }} - TF_VAR_BOT_TX_INTERVAL_SECONDS: ${{ needs.set-network.outputs.bot_interval }} - TF_VAR_NODE_LB_RULE_PRIORITY: ${{ needs.set-network.outputs.node_lb_priority_range_start }} - TF_VAR_PXE_LB_RULE_PRIORITY: ${{ needs.set-network.outputs.pxe_lb_priority_range_start }} - TF_VAR_PROVER_NODE_LB_RULE_PRIORITY: ${{ needs.set-network.outputs.prover_node_lb_priority_range_start }} - TF_VAR_BOT_FLUSH_SETUP_TRANSACTIONS: ${{ needs.set-network.outputs.bot_flush_setup_txs }} - TF_VAR_BOT_MAX_PENDING_TXS: ${{ needs.set-network.outputs.bot_max_pending_txs }} - TF_VAR_SEQ_MIN_TX_PER_BLOCK: ${{ needs.set-network.outputs.min_txs_per_block }} - TF_VAR_SEQ_MAX_TX_PER_BLOCK: ${{ needs.set-network.outputs.max_txs_per_block }} - TF_VAR_BOT_FOLLOW_CHAIN: ${{ needs.set-network.outputs.bot_follow_chain }} - TF_VAR_PROVING_ENABLED: true - TF_VAR_BOT_NO_START: false - TF_VAR_BOT_SKIP_PUBLIC_SIMULATION: ${{ needs.set-network.outputs.bot_skip_simulation }} - TF_VAR_BOT_L2_GAS_LIMIT: ${{ needs.set-network.outputs.bot_l2_gas_limit }} - TF_VAR_BOT_DA_GAS_LIMIT: ${{ needs.set-network.outputs.bot_da_gas_limit }} - TF_VAR_BOT_COUNT: ${{ needs.set-network.outputs.bot_count }} - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - - uses: ./.github/ci-setup-action - - uses: hashicorp/setup-terraform@v3 - with: - terraform_version: 1.7.5 - - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }} - aws-region: eu-west-2 - - # Unneded for now, since the prover-node runs with simulated proofs and submits them to L1, which does not verify them yet. - # - name: Set latest block as proven - # working-directory: ./yarn-project/aztec/terraform/pxe - # run: | - # set -eo pipefail - # docker run aztecprotocol/aztec:${{ env.DEPLOY_TAG }} set-proven-through \ - # --rpc-url https://api.aztec.network/${{ env.DEPLOY_TAG }}/aztec-pxe/${{ env.API_KEY }} \ - # --l1-rpc-url https://${{ env.DEPLOY_TAG }}-mainnet-fork.aztec.network:8545/admin-${{ env.FORK_ADMIN_API_KEY }} \ - # --l1-chain-id ${{ env.L1_CHAIN_ID }} \ - # --l1-private-key ${{ env.CONTRACT_PUBLISHER_PRIVATE_KEY }} - - - name: Deploy PXE - working-directory: ./yarn-project/aztec/terraform/pxe - run: | - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/pxe" - terraform apply -input=false -auto-approve - - - name: Deploy Aztec Nodes - working-directory: ./yarn-project/aztec/terraform/node - run: | - env - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/aztec-node" - terraform apply -input=false -auto-approve -var="NODE_P2P_TCP_PORT=${{ needs.set-network.outputs.node_tcp_range_start }}" -var="NODE_P2P_UDP_PORT=${{ needs.set-network.outputs.node_udp_range_start }}" - - - name: Deploy Aztec Prover Nodes - working-directory: ./yarn-project/aztec/terraform/prover-node - run: | - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/aztec-prover-node" - terraform apply -input=false -auto-approve -var="NODE_P2P_TCP_PORT=${{ needs.set-network.outputs.prover_node_tcp_range_start }}" -var="NODE_P2P_UDP_PORT=${{ needs.set-network.outputs.prover_node_udp_range_start }}" - - - name: Deploy Provers - working-directory: ./yarn-project/aztec/terraform/prover - run: | - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/prover" - terraform apply -input=false -auto-approve - - - name: Wait for PXE to be available - run: | - ./.github/scripts/wait_for_infra.sh pxe ${{ env.DEPLOY_TAG }} ${{ env.API_KEY }} - - - name: Deploy verifier - working-directory: ./yarn-project/aztec/terraform/pxe - run: | - set -eo pipefail - docker run aztecprotocol/aztec:${{ env.DEPLOY_TAG }} deploy-l1-verifier \ - --rpc-url https://api.aztec.network/${{ env.DEPLOY_TAG }}/aztec-pxe/${{ env.API_KEY }} \ - --l1-rpc-url https://${{ env.DEPLOY_TAG }}-mainnet-fork.aztec.network:8545/admin-${{ env.FORK_ADMIN_API_KEY }} \ - --l1-chain-id ${{ env.L1_CHAIN_ID }} \ - --l1-private-key ${{ env.CONTRACT_PUBLISHER_PRIVATE_KEY }} - - - name: Enable transactions bot - working-directory: ./yarn-project/aztec/terraform/bot - run: | - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/bot" - terraform apply -input=false -auto-approve diff --git a/.github/workflows/network-deploy.yml b/.github/workflows/network-deploy.yml index 8ceba615141..b497a5a6142 100644 --- a/.github/workflows/network-deploy.yml +++ b/.github/workflows/network-deploy.yml @@ -1,6 +1,33 @@ name: Aztec Network Deployment on: + workflow_call: + inputs: + namespace: + description: The namespace to deploy to, e.g. smoke + required: true + type: string + values_file: + description: The values file to use, e.g. 1-validators.yaml + required: true + type: string + aztec_docker_image: + description: The Aztec Docker image to use, e.g. aztecprotocol/aztec:da809c58290f9590836f45ec59376cbf04d3c4ce-x86_64 + required: true + type: string + deployment_mnemonic_secret_name: + description: The name of the secret which holds the boot node's contract deployment mnemonic + required: true + type: string + default: testnet-deployment-mnemonic + respect_tf_lock: + description: Whether to respect the Terraform lock + required: false + type: string + default: "true" + secrets: + GCP_SA_KEY: + required: true workflow_dispatch: inputs: namespace: @@ -12,6 +39,14 @@ on: aztec_docker_image: description: The Aztec Docker image to use, e.g. aztecprotocol/aztec:da809c58290f9590836f45ec59376cbf04d3c4ce-x86_64 required: true + deployment_mnemonic_secret_name: + description: The name of the secret which holds the boot node's contract deployment mnemonic + required: true + default: testnet-deployment-mnemonic + respect_tf_lock: + description: Whether to respect the Terraform lock + required: false + default: "true" jobs: network_deployment: @@ -26,6 +61,7 @@ jobs: AZTEC_DOCKER_IMAGE: ${{ inputs.aztec_docker_image }} NAMESPACE: ${{ inputs.namespace }} VALUES_FILE: ${{ inputs.values_file }} + DEPLOYMENT_MNEMONIC_SECRET_NAME: ${{ inputs.deployment_mnemonic_secret_name }} CHART_PATH: ./spartan/aztec-network CLUSTER_NAME: aztec-gke REGION: us-west1-a @@ -62,6 +98,12 @@ jobs: echo "Terraform state bucket already exists" fi + - name: Grab the boot node deployment mnemonic + id: get-mnemonic + run: | + echo "::add-mask::$(gcloud secrets versions access latest --secret=${{ env.DEPLOYMENT_MNEMONIC_SECRET_NAME }})" + echo "mnemonic=$(gcloud secrets versions access latest --secret=${{ env.DEPLOYMENT_MNEMONIC_SECRET_NAME }})" >> "$GITHUB_OUTPUT" + - name: Setup Terraform uses: hashicorp/setup-terraform@v2 with: @@ -82,8 +124,10 @@ jobs: -var="values_file=${{ env.VALUES_FILE }}" \ -var="gke_cluster_context=${{ env.GKE_CLUSTER_CONTEXT }}" \ -var="aztec_docker_image=${{ env.AZTEC_DOCKER_IMAGE }}" \ - -out=tfplan + -var="l1_deployment_mnemonic=${{ steps.get-mnemonic.outputs.mnemonic }}" \ + -out=tfplan \ + -lock=${{ inputs.respect_tf_lock }} - name: Terraform Apply working-directory: ./spartan/terraform/deploy-release - run: terraform apply -auto-approve tfplan + run: terraform apply -lock=${{ inputs.respect_tf_lock }} -auto-approve tfplan diff --git a/.github/workflows/sepolia-deploy.yml b/.github/workflows/sepolia-deploy.yml index d908f89827d..4f736c56dd8 100644 --- a/.github/workflows/sepolia-deploy.yml +++ b/.github/workflows/sepolia-deploy.yml @@ -85,6 +85,7 @@ jobs: echo "TF_VAR_OUTBOX_CONTRACT_ADDRESS=$(extract outboxAddress)" >>$GITHUB_ENV echo "TF_VAR_AVAILABILITY_ORACLE_CONTRACT_ADDRESS=$(extract availabilityOracleAddress)" >>$GITHUB_ENV echo "TF_VAR_FEE_JUICE_CONTRACT_ADDRESS=$(extract feeJuiceAddress)" >>$GITHUB_ENV + echo "TF_VAR_STAKING_ASSET_CONTRACT_ADDRESS=$(extract stakingAssetAddress)" >>$GITHUB_ENV echo "TF_VAR_FEE_JUICE_PORTAL_CONTRACT_ADDRESS=$(extract feeJuicePortalAddress)" >>$GITHUB_ENV - name: Apply l1-contracts Terraform diff --git a/.noir-sync-commit b/.noir-sync-commit index 9bbde85e56b..b6e1166fe48 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -68c32b4ffd9b069fe4b119327dbf4018c17ab9d4 +31640e91ba75b9c5200ea66d1f54cc5185e0d196 diff --git a/README.md b/README.md index 83a2d819572..7b2c6d3c5ea 100644 --- a/README.md +++ b/README.md @@ -47,7 +47,7 @@ This provide an interactive environment for debugging the CI test. ## Debugging -Logging goes through the [DebugLogger](yarn-project/foundation/src/log/debug.ts) module in Typescript. To see the log output, set a `DEBUG` environment variable to the name of the module you want to debug, to `aztec:*`, or to `*` to see all logs. +Logging goes through the [DebugLogger](yarn-project/foundation/src/log/debug.ts) module in Typescript. `LOG_LEVEL` controls the default log level, and one can set alternate levels for specific modules, such as `debug; warn: module1, module2; error: module3`. ## Releases diff --git a/aztec-up/bin/.aztec-run b/aztec-up/bin/.aztec-run index a7ab48a5313..142e71fb75c 100755 --- a/aztec-up/bin/.aztec-run +++ b/aztec-up/bin/.aztec-run @@ -104,8 +104,8 @@ while [[ "$#" -gt 0 ]]; do done DOCKER_ENV="-e HOME=$HOME" -if ! [ -z "${DEBUG:-}" ] ; then - DOCKER_ENV="-e DEBUG=$DEBUG" +if ! [ -z "${LOG_LEVEL:-}" ] ; then + DOCKER_ENV="-e LOG_LEVEL=$LOG_LEVEL" fi for env in ${ENV_VARS_TO_INJECT:-}; do # SSH_AUTH_SOCK must be handled separately diff --git a/aztec-up/bin/docker-compose.sandbox.yml b/aztec-up/bin/docker-compose.sandbox.yml index 39d6ff20319..186ba3dec69 100644 --- a/aztec-up/bin/docker-compose.sandbox.yml +++ b/aztec-up/bin/docker-compose.sandbox.yml @@ -20,7 +20,7 @@ services: ports: - "${PXE_PORT:-8080}:${PXE_PORT:-8080}" environment: - DEBUG: # Loaded from the user shell if explicitly set + LOG_LEVEL: # Loaded from the user shell if explicitly set HOST_WORKDIR: "${PWD}" # Loaded from the user shell to show log files absolute path in host ETHEREUM_HOST: ${ETHEREUM_HOST:-http://ethereum}:${ANVIL_PORT:-8545} L1_CHAIN_ID: 31337 diff --git a/aztec-up/bin/docker-compose.test.yml b/aztec-up/bin/docker-compose.test.yml index 796e4c69b5a..d3ad459b9de 100644 --- a/aztec-up/bin/docker-compose.test.yml +++ b/aztec-up/bin/docker-compose.test.yml @@ -2,7 +2,6 @@ services: txe: image: "aztecprotocol/aztec" environment: - DEBUG: # Loaded from the user shell if explicitly set LOG_LEVEL: # Loaded from the user shell if explicitly set HOST_WORKDIR: "${PWD}" # Loaded from the user shell to show log files absolute path in host volumes: diff --git a/aztec-up/terraform/main.tf b/aztec-up/terraform/main.tf index 63f44df06df..3adcae03b5d 100644 --- a/aztec-up/terraform/main.tf +++ b/aztec-up/terraform/main.tf @@ -108,18 +108,6 @@ resource "null_resource" "upload_public_directory" { } } -# resource "aws_route53_record" "subdomain_record" { -# zone_id = data.terraform_remote_state.aztec2_iac.outputs.aws_route53_zone_id -# name = "install.aztec.network" -# type = "A" - -# alias { -# name = aws_s3_bucket_website_configuration.website_bucket.website_domain -# zone_id = aws_s3_bucket.install_bucket.hosted_zone_id -# evaluate_target_health = true -# } -# } - resource "aws_cloudfront_distribution" "install" { origin { domain_name = aws_s3_bucket.install_bucket.website_endpoint diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index 38a4c4fc6ef..cf104d5e010 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = 3195a1b30b3bcfd635f9b4a899c49cb517283685 - parent = 94e6e1a954911b81e6af85edff55c64f13595b20 + commit = 2eb1f892e7890770c42ca618e841fd173fa8b0c1 + parent = 4fcbc592c963389a132b5b72f0f68d1f6526943b method = merge cmdver = 0.4.6 diff --git a/barretenberg/bbup/.gitignore b/barretenberg/bbup/.gitignore deleted file mode 100644 index 96f4d2d1d9f..00000000000 --- a/barretenberg/bbup/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -node_modules -yarn.lock -*.js -.yarn diff --git a/barretenberg/bbup/.npmignore b/barretenberg/bbup/.npmignore deleted file mode 100644 index 7e4089baa7c..00000000000 --- a/barretenberg/bbup/.npmignore +++ /dev/null @@ -1,4 +0,0 @@ -node_modules -yarn.lock -*.ts -.yarn diff --git a/barretenberg/bbup/README.md b/barretenberg/bbup/README.md index a2c009fe5d0..6e8ce8343d5 100644 --- a/barretenberg/bbup/README.md +++ b/barretenberg/bbup/README.md @@ -6,12 +6,6 @@ It assumes you are using [Noir](https://noir-lang.org) as the frontend language. ## Installation -### Dependencies - -TODO - -### Installation script - BBup is an installer for whatever version of BB you may want. Install BBup with: ```bash @@ -19,7 +13,7 @@ curl -L bbup.dev | bash ``` > [!IMPORTANT] -> *Always* check what scripts do. The above one redirects to [the install script](https://github.com/AztecProtocol/aztec-packages/blob/master/barretenberg/bbup/install) which checks if you have `npm`, installing it with `nvm` otherwise. It then installs [bbup](https://github.com/AztecProtocol/aztec-packages/blob/master/barretenberg/bbup/bbup.ts) globally. +> *Always* check what scripts do. The above one redirects to [the install script](https://github.com/AztecProtocol/aztec-packages/blob/master/barretenberg/bbup/install) which installs [bbup](https://github.com/AztecProtocol/aztec-packages/blob/master/barretenberg/bbup/bbup) in your system's PATH ## Usage diff --git a/barretenberg/bbup/bbup b/barretenberg/bbup/bbup new file mode 100755 index 00000000000..c22a182c032 --- /dev/null +++ b/barretenberg/bbup/bbup @@ -0,0 +1,188 @@ +#!/usr/bin/env bash + +set -e + +# Colors and symbols +RED='\033[0;31m' +GREEN='\033[0;32m' +BLUE='\033[0;34m' +NC='\033[0m' +SUCCESS="✓" +ERROR="✗" + +# Utility functions +print_spinner() { + local pid=$1 + local delay=0.1 + local spinstr='|/-\' + while [ "$(ps a | awk '{print $1}' | grep $pid)" ]; do + local temp=${spinstr#?} + printf " [%c] " "$spinstr" + local spinstr=$temp${spinstr%"$temp"} + sleep $delay + printf "\b\b\b\b\b\b" + done + printf " \b\b\b\b" +} + +get_bb_version_for_noir() { + local noir_version=$1 + local url="" + local resolved_version="" + + if [ "$noir_version" = "stable" ] || [ "$noir_version" = "nightly" ]; then + # Get releases from GitHub API + local releases=$(curl -s "https://api.github.com/repos/noir-lang/noir/releases") + + if [ "$noir_version" = "stable" ]; then + resolved_version=$(echo "$releases" | grep -o '"tag_name": "[^"]*"' | grep -v "aztec\|nightly" | head -1 | cut -d'"' -f4) + else + resolved_version=$(echo "$releases" | grep -o '"tag_name": "nightly[^"]*"' | head -1 | cut -d'"' -f4) + fi + + url="https://raw.githubusercontent.com/noir-lang/noir/${resolved_version}/scripts/install_bb.sh" + else + url="https://raw.githubusercontent.com/noir-lang/noir/v${noir_version}/scripts/install_bb.sh" + fi + + # Extract BB version from install script + local install_script=$(curl -s "$url") + local bb_version=$(echo "$install_script" | grep 'VERSION=' | cut -d'"' -f2) + echo "$bb_version" +} + +install_bb() { + local version=$1 + local architecture=$(uname -m) + local platform="" + + # Convert architecture names + if [ "$architecture" = "arm64" ]; then + architecture="aarch64" + elif [ "$architecture" = "x86_64" ]; then + architecture="x86_64" + else + printf "${RED}${ERROR} Unsupported architecture: ${architecture}${NC}\n" + exit 1 + fi + + # Determine platform + if [ "$(uname)" = "Darwin" ]; then + platform="apple-darwin" + elif [ "$(uname)" = "Linux" ]; then + platform="linux-gnu" + else + printf "${RED}${ERROR} Unsupported platform: $(uname)${NC}\n" + exit 1 + fi + + local home_dir=$HOME + local bb_path="${home_dir}/.bb" + + printf "${BLUE}Installing to ${bb_path}${NC}\n" + + # Create temporary directory + local temp_dir=$(mktemp -d) + local temp_tar="${temp_dir}/temp.tar.gz" + + # Download and extract + local release_url="https://github.com/AztecProtocol/aztec-packages/releases/download/aztec-packages-v${version}" + local binary_url="${release_url}/barretenberg-${architecture}-${platform}.tar.gz" + + curl -L "$binary_url" -o "$temp_tar" + mkdir -p "$bb_path" + tar xzf "$temp_tar" -C "$bb_path" + rm -rf "$temp_dir" + + # Update shell configuration + update_shell_config "$bb_path" + + printf "${GREEN}${SUCCESS} Installed barretenberg to ${bb_path}${NC}\n" +} + +update_shell_config() { + local bb_bin_path=$1 + local path_entry="export PATH=\"${bb_bin_path}:\$PATH\"" + + # Update various shell configs if they exist + if [ -f "${HOME}/.bashrc" ]; then + echo "$path_entry" >> "${HOME}/.bashrc" + fi + + if [ -f "${HOME}/.zshrc" ]; then + echo "$path_entry" >> "${HOME}/.zshrc" + fi + + if [ -f "${HOME}/.config/fish/config.fish" ]; then + echo "set -gx PATH ${bb_bin_path} \$PATH" >> "${HOME}/.config/fish/config.fish" + fi + + # Update current session's PATH + export PATH="${bb_bin_path}:$PATH" +} + +# Main script +main() { + local version="" + local noir_version="" + + # Parse arguments + while [[ $# -gt 0 ]]; do + case $1 in + -v|--version) + version="$2" + shift 2 + ;; + -nv|--noir-version) + noir_version="$2" + shift 2 + ;; + *) + printf "${RED}${ERROR} Unknown option: $1${NC}\n" + exit 1 + ;; + esac + done + + # If no version specified, try to get current noir version + if [ -z "$version" ] && [ -z "$noir_version" ]; then + noir_version="current" + fi + + if [ "$noir_version" = "current" ]; then + printf "${BLUE}Querying noir version from nargo${NC}\n" + if ! command -v nargo &> /dev/null; then + printf "${RED}${ERROR} Could not get noir version from nargo --version. Please specify a version.${NC}\n" + exit 1 + fi + noir_version=$(nargo --version | grep -o 'nargo version = [0-9]\+\.[0-9]\+\.[0-9]\+\(-[a-zA-Z]\+\.[0-9]\+\)\?' | cut -d' ' -f4) + printf "${GREEN}${SUCCESS} Resolved noir version ${noir_version} from nargo${NC}\n" + fi + + if [ -n "$noir_version" ]; then + printf "${BLUE}Getting compatible barretenberg version for noir version ${noir_version}${NC}\n" + if [ "$noir_version" = "stable" ] || [ "$noir_version" = "nightly" ]; then + printf "${BLUE}Resolving noir version ${noir_version}...${NC}\n" + # Get releases from GitHub API to show the resolved version + local releases=$(curl -s "https://api.github.com/repos/noir-lang/noir/releases") + local resolved_version="" + if [ "$noir_version" = "stable" ]; then + resolved_version=$(echo "$releases" | grep -o '"tag_name": "[^"]*"' | grep -v "aztec\|nightly" | head -1 | cut -d'"' -f4) + else + resolved_version=$(echo "$releases" | grep -o '"tag_name": "nightly[^"]*"' | head -1 | cut -d'"' -f4) + fi + printf "${GREEN}${SUCCESS} Resolved noir version ${noir_version} to ${resolved_version}${NC}\n" + fi + version=$(get_bb_version_for_noir "$noir_version") + printf "${GREEN}${SUCCESS} Resolved to barretenberg version ${version}${NC}\n" + fi + + if [ -z "$version" ]; then + printf "${RED}${ERROR} No version specified and couldn't determine version from noir${NC}\n" + exit 1 + fi + + install_bb "$version" +} + +main "$@" diff --git a/barretenberg/bbup/bbup.js b/barretenberg/bbup/bbup.js deleted file mode 100755 index 13eb873dc20..00000000000 --- a/barretenberg/bbup/bbup.js +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/env node -import { Command } from "commander"; -const program = new Command(); -import { installBB } from "./shell.js"; -import ora from "ora"; -import logSymbols from "log-symbols"; -import { getBbVersionForNoir } from "./versions.js"; -import { execSync } from "child_process"; -const spinner = ora({ color: "blue", discardStdin: false }); -const bbup = program - .command("install", { isDefault: true }) - .description("Installs Barretenberg.") - .option("-f, --frontend", "Match the version of a specific frontend language", "noir"); -const options = bbup.opts(); -if (options.frontend === "noir") { - bbup - .requiredOption("-v, --version ", "The Noir version to match", "current") - .action(async ({ version }) => { - let resolvedVersion = version; - if (version === "current") { - spinner.start(`Querying noir version from nargo`); - try { - const output = execSync("nargo --version", { encoding: "utf-8" }); - resolvedVersion = output.match(/nargo version = (\d+\.\d+\.\d+)/)[1]; - spinner.stopAndPersist({ - text: `Resolved noir version ${resolvedVersion} from nargo`, - symbol: logSymbols.success, - }); - } - catch (e) { - spinner.stopAndPersist({ - text: `Could not get noir version from nargo --version. Please specify a version.`, - symbol: logSymbols.error, - }); - process.exit(1); - } - } - spinner.start(`Getting compatible barretenberg version for noir version ${resolvedVersion}`); - const compatibleVersion = await getBbVersionForNoir(resolvedVersion, spinner); - spinner.stopAndPersist({ - text: `Resolved to barretenberg version ${compatibleVersion}`, - symbol: logSymbols.success, - }); - spinner.start(`Installing barretenberg`); - await installBB(compatibleVersion, spinner); - }); -} -bbup.parse(); diff --git a/barretenberg/bbup/bbup.ts b/barretenberg/bbup/bbup.ts deleted file mode 100755 index d885715a0dc..00000000000 --- a/barretenberg/bbup/bbup.ts +++ /dev/null @@ -1,71 +0,0 @@ -#!/usr/bin/env node -import { Command, Option } from "commander"; -const program = new Command(); -import { installBB } from "./shell.js"; -import ora from "ora"; -import logSymbols from "log-symbols"; -import { getBbVersionForNoir } from "./versions.js"; -import { execSync } from "child_process"; - -const spinner = ora({ color: "blue", discardStdin: false }); - -const bbup = program - .command("install", { isDefault: true }) - .description("Installs Barretenberg.") - .addOption( - new Option( - "-v, --version ", - "The Barretenberg version to install" - ).implies({ noirVersion: null }) - ) - .addOption( - new Option( - "-nv, --noir-version ", - "The Noir version to match" - ).default("current") - ) - .action(async ({ version, noirVersion }) => { - let resolvedBBVersion = ""; - if (noirVersion) { - let resolvedNoirVersion = noirVersion; - if (noirVersion === "current") { - spinner.start(`Querying noir version from nargo`); - try { - const output = execSync("nargo --version", { encoding: "utf-8" }); - resolvedNoirVersion = output.match( - /nargo version = (\d+\.\d+\.\d+)/ - )![1]; - spinner.stopAndPersist({ - text: `Resolved noir version ${resolvedNoirVersion} from nargo`, - symbol: logSymbols.success, - }); - } catch (e) { - spinner.stopAndPersist({ - text: `Could not get noir version from nargo --version. Please specify a version.`, - symbol: logSymbols.error, - }); - process.exit(1); - } - } - - spinner.start( - `Getting compatible barretenberg version for noir version ${resolvedNoirVersion}` - ); - resolvedBBVersion = await getBbVersionForNoir( - resolvedNoirVersion, - spinner - ); - spinner.stopAndPersist({ - text: `Resolved to barretenberg version ${resolvedBBVersion}`, - symbol: logSymbols.success, - }); - } else if (version) { - resolvedBBVersion = version; - } - - spinner.start(`Installing barretenberg`); - - await installBB(resolvedBBVersion, spinner); - }); - -bbup.parse(); diff --git a/barretenberg/bbup/install b/barretenberg/bbup/install index 848086087c4..030c5f6c5c5 100755 --- a/barretenberg/bbup/install +++ b/barretenberg/bbup/install @@ -1,42 +1,72 @@ -#!/bin/bash +#!/usr/bin/env bash set -e -# Function to check if a command exists -command_exists() { - command -v "$1" >/dev/null 2>&1 -} +# Colors and symbols +RED='\033[0;31m' +GREEN='\033[0;32m' +BLUE='\033[0;34m' +NC='\033[0m' +SUCCESS="✓" +ERROR="✗" + +BB_DIR="${HOME}/.bb" +INSTALL_PATH="${BB_DIR}/bbup" +BBUP_URL="https://raw.githubusercontent.com/AztecProtocol/aztec-packages/master/barretenberg/bbup/bbup" -# Function to install NVM and Node.js -install_nvm_and_node() { - echo "Installing NVM..." - wget -qO- https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.1/install.sh | bash +# Create .bb directory if it doesn't exist +mkdir -p "$BB_DIR" - # Load NVM - export NVM_DIR="$HOME/.nvm" - [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" +# Download bbup +printf "${BLUE}Downloading bbup...${NC}\n" +if command -v curl &> /dev/null; then + curl -fsSL "$BBUP_URL" -o "$INSTALL_PATH" +elif command -v wget &> /dev/null; then + wget -q "$BBUP_URL" -O "$INSTALL_PATH" +else + printf "${RED}${ERROR} Neither curl nor wget found. Please install either curl or wget.${NC}\n" + exit 1 +fi + +if [ ! -f "$INSTALL_PATH" ]; then + printf "${RED}${ERROR} Failed to download bbup${NC}\n" + exit 1 +fi - # Install the latest LTS version of Node.js - echo "Installing the latest LTS version of Node.js..." - nvm install --lts +chmod 755 "$INSTALL_PATH" - # Use the installed version - nvm use --lts +# Add to shell config files if not already present +PATH_ENTRY="export PATH=\"\${HOME}/.bb:\${PATH}\"" +FISH_PATH_ENTRY="set -gx PATH \${HOME}/.bb \$PATH" - # Verify installation - node --version - npm --version +add_to_config() { + local config_file="$1" + local entry="$2" + if [ -f "$config_file" ] && ! grep -q "/.bb:" "$config_file"; then + echo "$entry" >> "$config_file" + return 0 + fi + return 1 } -# Check if NPM is installed -if ! command_exists npm; then - install_nvm_and_node +SHELL_UPDATED=false + +if add_to_config "${HOME}/.bashrc" "$PATH_ENTRY"; then + SHELL_UPDATED=true fi +if add_to_config "${HOME}/.zshrc" "$PATH_ENTRY"; then + SHELL_UPDATED=true +fi -# Install bbup globally -echo "Installing bbup..." -npm install -g bbup +if [ -f "${HOME}/.config/fish/config.fish" ] && ! grep -q "/.bb " "${HOME}/.config/fish/config.fish"; then + echo "$FISH_PATH_ENTRY" >> "${HOME}/.config/fish/config.fish" + SHELL_UPDATED=true +fi -echo "Installation complete. You can now use the 'bbup' command." -echo "Please restart your terminal or run 'source ~/.bashrc' (or your shell's equivalent) to start using bbup." +printf "${GREEN}${SUCCESS} Successfully installed bbup${NC}\n" +if [ "$SHELL_UPDATED" = true ]; then + printf "${BLUE}Please run 'source ~/.bashrc' or restart your terminal to use bbup${NC}\n" +else + printf "${BLUE}Your PATH already includes ~/.bb - you can run 'bbup' from anywhere${NC}\n" +fi diff --git a/barretenberg/bbup/package.json b/barretenberg/bbup/package.json deleted file mode 100644 index 935b3b06e1c..00000000000 --- a/barretenberg/bbup/package.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "name": "bbup", - "type": "module", - "description": "Barretenberg installation script", - "bin": "bbup.js", - "version": "0.0.7", - "license": "ISC", - "scripts": { - "start": "npx tsx bbup.ts", - "compile": "tsc bbup.ts --esModuleInterop true --module nodenext && chmod +x bbup.js", - "publish": "yarn compile && yarn npm publish --access public" - }, - "dependencies": { - "@inquirer/input": "^1.2.16", - "@inquirer/select": "^1.3.3", - "axios": "^1.7.7", - "commander": "^11.1.0", - "log-symbols": "^7.0.0", - "ora": "^8.1.0", - "tar-fs": "^3.0.6", - "tiged": "^2.12.6" - }, - "packageManager": "yarn@4.5.0" -} diff --git a/barretenberg/bbup/shell.js b/barretenberg/bbup/shell.js deleted file mode 100644 index 512c2ddb9cf..00000000000 --- a/barretenberg/bbup/shell.js +++ /dev/null @@ -1,68 +0,0 @@ -import { execSync } from "child_process"; -import logSymbols from "log-symbols"; -import os from "os"; -import axios from "axios"; -import fs from "fs"; -import { createGunzip } from "zlib"; -import tar from "tar-fs"; -import { promisify } from "util"; -import { pipeline } from "stream"; -import path from "path"; -export function sourceShellConfig() { - const shell = execSync("echo $SHELL", { encoding: "utf-8" }).trim(); - if (shell.includes("bash")) { - process.env.PATH = execSync("echo $PATH", { encoding: "utf-8" }).trim(); - } - else if (shell.includes("zsh")) { - process.env.PATH = execSync('zsh -c "echo $PATH"', { - encoding: "utf-8", - }).trim(); - } - else if (shell.includes("fish")) { - process.env.PATH = execSync('fish -c "echo $PATH"', { - encoding: "utf-8", - }).trim(); - } -} -export function exec(cmd, options = {}) { - return execSync(cmd, { - encoding: "utf-8", - stdio: "pipe", - ...options, - }); -} -export async function installBB(version, spinner) { - let architecture = os.arch(); - if (architecture === "arm64") { - architecture = "aarch64"; - } - else if (architecture === "x64") { - architecture = "x86_64"; - } - let platform = os.platform(); - if (platform === "darwin") { - platform = "apple-darwin"; - } - else if (platform === "linux") { - platform = "linux-gnu"; - } - const home = os.homedir(); - const bbPath = path.join(home, ".bb"); - spinner.start(`Installing to ${bbPath}`); - const tempTarPath = path.join(fs.mkdtempSync("bb-"), "temp.tar.gz"); - if (!["x86_64", "aarch64"].includes(architecture) || - !["linux-gnu", "apple-darwin"].includes(platform)) { - throw new Error(`Unsupported architecture ${architecture} and platform ${platform}`); - } - const releaseUrl = `https://github.com/AztecProtocol/aztec-packages/releases/download/aztec-packages-v${version}`; - const binaryUrl = `${releaseUrl}/barretenberg-${architecture}-${platform}.tar.gz`; - const response = await axios.get(binaryUrl, { responseType: "stream" }); - const pipelineAsync = promisify(pipeline); - await pipelineAsync(response.data, fs.createWriteStream(tempTarPath)); - await pipelineAsync(fs.createReadStream(tempTarPath), createGunzip(), tar.extract(bbPath)); - fs.rmSync(path.dirname(tempTarPath), { recursive: true }); - spinner.stopAndPersist({ - text: `Installed barretenberg to ${bbPath}`, - symbol: logSymbols.success, - }); -} diff --git a/barretenberg/bbup/shell.ts b/barretenberg/bbup/shell.ts deleted file mode 100644 index 5ab408778c7..00000000000 --- a/barretenberg/bbup/shell.ts +++ /dev/null @@ -1,85 +0,0 @@ -import { execSync } from "child_process"; -import logSymbols from "log-symbols"; -import { Ora } from "ora"; -import os from "os"; -import axios from "axios"; -import fs from "fs"; -import { createGunzip } from "zlib"; -import tar from "tar-fs"; -import { promisify } from "util"; - -import { pipeline } from "stream"; -import path from "path"; - -export function sourceShellConfig() { - const shell = execSync("echo $SHELL", { encoding: "utf-8" }).trim(); - - if (shell.includes("bash")) { - process.env.PATH = execSync("echo $PATH", { encoding: "utf-8" }).trim(); - } else if (shell.includes("zsh")) { - process.env.PATH = execSync('zsh -c "echo $PATH"', { - encoding: "utf-8", - }).trim(); - } else if (shell.includes("fish")) { - process.env.PATH = execSync('fish -c "echo $PATH"', { - encoding: "utf-8", - }).trim(); - } -} - -export function exec(cmd: string, options = {}) { - return execSync(cmd, { - encoding: "utf-8", - stdio: "pipe", - ...options, - }); -} -export async function installBB(version: string, spinner: Ora) { - let architecture = os.arch(); - if (architecture === "arm64") { - architecture = "aarch64"; - } else if (architecture === "x64") { - architecture = "x86_64"; - } - - let platform: string = os.platform(); - if (platform === "darwin") { - platform = "apple-darwin"; - } else if (platform === "linux") { - platform = "linux-gnu"; - } - - const home = os.homedir(); - const bbPath = path.join(home, ".bb"); - - spinner.start(`Installing to ${bbPath}`); - const tempTarPath = path.join(fs.mkdtempSync("bb-"), "temp.tar.gz"); - - if ( - !["x86_64", "aarch64"].includes(architecture) || - !["linux-gnu", "apple-darwin"].includes(platform) - ) { - throw new Error( - `Unsupported architecture ${architecture} and platform ${platform}` - ); - } - - const releaseUrl = `https://github.com/AztecProtocol/aztec-packages/releases/download/aztec-packages-v${version}`; - const binaryUrl = `${releaseUrl}/barretenberg-${architecture}-${platform}.tar.gz`; - - const response = await axios.get(binaryUrl, { responseType: "stream" }); - - const pipelineAsync = promisify(pipeline); - await pipelineAsync(response.data, fs.createWriteStream(tempTarPath)); - await pipelineAsync( - fs.createReadStream(tempTarPath), - createGunzip(), - tar.extract(bbPath) - ); - - fs.rmSync(path.dirname(tempTarPath), { recursive: true }); - spinner.stopAndPersist({ - text: `Installed barretenberg to ${bbPath}`, - symbol: logSymbols.success, - }); -} diff --git a/barretenberg/bbup/versions.js b/barretenberg/bbup/versions.js deleted file mode 100644 index 795107f3f13..00000000000 --- a/barretenberg/bbup/versions.js +++ /dev/null @@ -1,42 +0,0 @@ -import axios from 'axios'; -import logSymbols from 'log-symbols'; -async function getNamedVersions(githubToken) { - const fetchOpts = { - // eslint-disable-next-line camelcase - params: { per_page: 100 }, - headers: {}, - }; - if (githubToken) - fetchOpts.headers = { Authorization: `token ${githubToken}` }; - const { data } = await axios.get(`https://api.github.com/repos/noir-lang/noir/releases`, fetchOpts); - const stable = data.filter((release) => !release.tag_name.includes('aztec') && !release.tag_name.includes('nightly') && !release.prerelease)[0].tag_name; - const nightly = data.filter((release) => release.tag_name.startsWith('nightly'))[0].tag_name; - return { - stable, - nightly, - }; -} -export async function getBbVersionForNoir(noirVersion, spinner, githubToken) { - let url = ''; - if (noirVersion === 'stable' || noirVersion === 'nightly') { - spinner.start(`Resolving noir version ${noirVersion}...`); - const resolvedVersions = await getNamedVersions(githubToken); - spinner.stopAndPersist({ - text: `Resolved noir version ${noirVersion} to ${resolvedVersions[noirVersion]}`, - symbol: logSymbols.success, - }); - url = `https://raw.githubusercontent.com/noir-lang/noir/${resolvedVersions[noirVersion]}/scripts/install_bb.sh`; - } - else { - url = `https://raw.githubusercontent.com/noir-lang/noir/v${noirVersion}/scripts/install_bb.sh`; - } - try { - const { data } = await axios.get(url); - const versionMatch = data.match(/VERSION="([\d.]+)"/); - const version = versionMatch ? versionMatch[1] : null; - return version; - } - catch (e) { - throw new Error(e.message || e); - } -} diff --git a/barretenberg/bbup/versions.ts b/barretenberg/bbup/versions.ts deleted file mode 100644 index 36b979b54f9..00000000000 --- a/barretenberg/bbup/versions.ts +++ /dev/null @@ -1,64 +0,0 @@ -import axios from "axios"; -import logSymbols from "log-symbols"; -import { Ora } from "ora"; - -async function getNamedVersions(githubToken?: string) { - const fetchOpts = { - // eslint-disable-next-line camelcase - params: { per_page: 100 }, - headers: {}, - }; - - if (githubToken) - fetchOpts.headers = { Authorization: `token ${githubToken}` }; - - const { data } = await axios.get( - `https://api.github.com/repos/noir-lang/noir/releases`, - fetchOpts - ); - - const stable = data.filter( - (release: any) => - !release.tag_name.includes("aztec") && - !release.tag_name.includes("nightly") && - !release.prerelease - )[0].tag_name; - const nightly = data.filter((release: any) => - release.tag_name.startsWith("nightly") - )[0].tag_name; - - return { - stable, - nightly, - }; -} - -export async function getBbVersionForNoir( - noirVersion: string, - spinner: Ora, - githubToken?: string -) { - let url = ""; - - if (noirVersion === "stable" || noirVersion === "nightly") { - spinner.start(`Resolving noir version ${noirVersion}...`); - const resolvedVersions = await getNamedVersions(githubToken); - spinner.stopAndPersist({ - text: `Resolved noir version ${noirVersion} to ${resolvedVersions[noirVersion]}`, - symbol: logSymbols.success, - }); - url = `https://raw.githubusercontent.com/noir-lang/noir/${resolvedVersions[noirVersion]}/scripts/install_bb.sh`; - } else { - url = `https://raw.githubusercontent.com/noir-lang/noir/v${noirVersion}/scripts/install_bb.sh`; - } - - try { - const { data } = await axios.get(url); - const versionMatch = data.match(/VERSION="([\d.]+)"/); - const version = versionMatch ? versionMatch[1] : null; - - return version; - } catch (e: any) { - throw new Error(e.message || e); - } -} diff --git a/barretenberg/bbup/yarn.lock b/barretenberg/bbup/yarn.lock deleted file mode 100644 index caccde246a9..00000000000 --- a/barretenberg/bbup/yarn.lock +++ /dev/null @@ -1,999 +0,0 @@ -# This file is generated by running "yarn install" inside your project. -# Manual changes might be lost - proceed with caution! - -__metadata: - version: 8 - cacheKey: 10c0 - -"@inquirer/core@npm:^6.0.0": - version: 6.0.0 - resolution: "@inquirer/core@npm:6.0.0" - dependencies: - "@inquirer/type": "npm:^1.1.6" - "@types/mute-stream": "npm:^0.0.4" - "@types/node": "npm:^20.10.7" - "@types/wrap-ansi": "npm:^3.0.0" - ansi-escapes: "npm:^4.3.2" - chalk: "npm:^4.1.2" - cli-spinners: "npm:^2.9.2" - cli-width: "npm:^4.1.0" - figures: "npm:^3.2.0" - mute-stream: "npm:^1.0.0" - run-async: "npm:^3.0.0" - signal-exit: "npm:^4.1.0" - strip-ansi: "npm:^6.0.1" - wrap-ansi: "npm:^6.2.0" - checksum: 10c0/0663330936c9baea58d8a10e93de6c3446ab84ed909c41d7b3f6762842473b8f88e10d776326d89a278abfb3c4083240d0f5876293908eb1005d0026aa2cfb7d - languageName: node - linkType: hard - -"@inquirer/input@npm:^1.2.16": - version: 1.2.16 - resolution: "@inquirer/input@npm:1.2.16" - dependencies: - "@inquirer/core": "npm:^6.0.0" - "@inquirer/type": "npm:^1.1.6" - chalk: "npm:^4.1.2" - checksum: 10c0/89f612119ba208b34d693e013432898e5de4ddb61dde4b1cd326fb421a0bd16353872da915ec58f34ca5503b77081faf402bbea15033f84b7be8ac5e0672e4a8 - languageName: node - linkType: hard - -"@inquirer/select@npm:^1.3.3": - version: 1.3.3 - resolution: "@inquirer/select@npm:1.3.3" - dependencies: - "@inquirer/core": "npm:^6.0.0" - "@inquirer/type": "npm:^1.1.6" - ansi-escapes: "npm:^4.3.2" - chalk: "npm:^4.1.2" - figures: "npm:^3.2.0" - checksum: 10c0/695de7dc85bf1b4ae4d13bbacb39e73cf4ff12f04da5cff4f0cc046db6bb32ff6051d30753a94299370908051133535e0db7e011e3b61e9806908eb1a7ef6b39 - languageName: node - linkType: hard - -"@inquirer/type@npm:^1.1.6": - version: 1.5.5 - resolution: "@inquirer/type@npm:1.5.5" - dependencies: - mute-stream: "npm:^1.0.0" - checksum: 10c0/4c41736c09ba9426b5a9e44993bdd54e8f532e791518802e33866f233a2a6126a25c1c82c19d1abbf1df627e57b1b957dd3f8318ea96073d8bfc32193943bcb3 - languageName: node - linkType: hard - -"@types/mute-stream@npm:^0.0.4": - version: 0.0.4 - resolution: "@types/mute-stream@npm:0.0.4" - dependencies: - "@types/node": "npm:*" - checksum: 10c0/944730fd7b398c5078de3c3d4d0afeec8584283bc694da1803fdfca14149ea385e18b1b774326f1601baf53898ce6d121a952c51eb62d188ef6fcc41f725c0dc - languageName: node - linkType: hard - -"@types/node@npm:*": - version: 22.7.4 - resolution: "@types/node@npm:22.7.4" - dependencies: - undici-types: "npm:~6.19.2" - checksum: 10c0/c22bf54515c78ff3170142c1e718b90e2a0003419dc2d55f79c9c9362edd590a6ab1450deb09ff6e1b32d1b4698da407930b16285e8be3a009ea6cd2695cac01 - languageName: node - linkType: hard - -"@types/node@npm:^20.10.7": - version: 20.16.10 - resolution: "@types/node@npm:20.16.10" - dependencies: - undici-types: "npm:~6.19.2" - checksum: 10c0/c0c0c7ecb083ec638c2118e54b5242bb4c39a75608cbac9475cf15aaceb64b8bc997a87a0798e700a81d61651c8a7750ae0455be0f0996ada6e8b2bb818d90c5 - languageName: node - linkType: hard - -"@types/wrap-ansi@npm:^3.0.0": - version: 3.0.0 - resolution: "@types/wrap-ansi@npm:3.0.0" - checksum: 10c0/8d8f53363f360f38135301a06b596c295433ad01debd082078c33c6ed98b05a5c8fe8853a88265432126096084f4a135ec1564e3daad631b83296905509f90b3 - languageName: node - linkType: hard - -"agent-base@npm:6": - version: 6.0.2 - resolution: "agent-base@npm:6.0.2" - dependencies: - debug: "npm:4" - checksum: 10c0/dc4f757e40b5f3e3d674bc9beb4f1048f4ee83af189bae39be99f57bf1f48dde166a8b0a5342a84b5944ee8e6ed1e5a9d801858f4ad44764e84957122fe46261 - languageName: node - linkType: hard - -"ansi-colors@npm:^4.1.1": - version: 4.1.3 - resolution: "ansi-colors@npm:4.1.3" - checksum: 10c0/ec87a2f59902f74e61eada7f6e6fe20094a628dab765cfdbd03c3477599368768cffccdb5d3bb19a1b6c99126783a143b1fee31aab729b31ffe5836c7e5e28b9 - languageName: node - linkType: hard - -"ansi-escapes@npm:^4.3.2": - version: 4.3.2 - resolution: "ansi-escapes@npm:4.3.2" - dependencies: - type-fest: "npm:^0.21.3" - checksum: 10c0/da917be01871525a3dfcf925ae2977bc59e8c513d4423368645634bf5d4ceba5401574eb705c1e92b79f7292af5a656f78c5725a4b0e1cec97c4b413705c1d50 - languageName: node - linkType: hard - -"ansi-regex@npm:^5.0.1": - version: 5.0.1 - resolution: "ansi-regex@npm:5.0.1" - checksum: 10c0/9a64bb8627b434ba9327b60c027742e5d17ac69277960d041898596271d992d4d52ba7267a63ca10232e29f6107fc8a835f6ce8d719b88c5f8493f8254813737 - languageName: node - linkType: hard - -"ansi-regex@npm:^6.0.1": - version: 6.1.0 - resolution: "ansi-regex@npm:6.1.0" - checksum: 10c0/a91daeddd54746338478eef88af3439a7edf30f8e23196e2d6ed182da9add559c601266dbef01c2efa46a958ad6f1f8b176799657616c702b5b02e799e7fd8dc - languageName: node - linkType: hard - -"ansi-styles@npm:^4.0.0, ansi-styles@npm:^4.1.0": - version: 4.3.0 - resolution: "ansi-styles@npm:4.3.0" - dependencies: - color-convert: "npm:^2.0.1" - checksum: 10c0/895a23929da416f2bd3de7e9cb4eabd340949328ab85ddd6e484a637d8f6820d485f53933446f5291c3b760cbc488beb8e88573dd0f9c7daf83dccc8fe81b041 - languageName: node - linkType: hard - -"asynckit@npm:^0.4.0": - version: 0.4.0 - resolution: "asynckit@npm:0.4.0" - checksum: 10c0/d73e2ddf20c4eb9337e1b3df1a0f6159481050a5de457c55b14ea2e5cb6d90bb69e004c9af54737a5ee0917fcf2c9e25de67777bbe58261847846066ba75bc9d - languageName: node - linkType: hard - -"axios@npm:^1.7.7": - version: 1.7.7 - resolution: "axios@npm:1.7.7" - dependencies: - follow-redirects: "npm:^1.15.6" - form-data: "npm:^4.0.0" - proxy-from-env: "npm:^1.1.0" - checksum: 10c0/4499efc89e86b0b49ffddc018798de05fab26e3bf57913818266be73279a6418c3ce8f9e934c7d2d707ab8c095e837fc6c90608fb7715b94d357720b5f568af7 - languageName: node - linkType: hard - -"b4a@npm:^1.6.4, b4a@npm:^1.6.6": - version: 1.6.7 - resolution: "b4a@npm:1.6.7" - checksum: 10c0/ec2f004d1daae04be8c5a1f8aeb7fea213c34025e279db4958eb0b82c1729ee25f7c6e89f92a5f65c8a9cf2d017ce27e3dda912403341d1781bd74528a4849d4 - languageName: node - linkType: hard - -"balanced-match@npm:^1.0.0": - version: 1.0.2 - resolution: "balanced-match@npm:1.0.2" - checksum: 10c0/9308baf0a7e4838a82bbfd11e01b1cb0f0cf2893bc1676c27c2a8c0e70cbae1c59120c3268517a8ae7fb6376b4639ef81ca22582611dbee4ed28df945134aaee - languageName: node - linkType: hard - -"bare-events@npm:^2.0.0, bare-events@npm:^2.2.0": - version: 2.5.0 - resolution: "bare-events@npm:2.5.0" - checksum: 10c0/afbeec4e8be4d93fb4a3be65c3b4a891a2205aae30b5a38fafd42976cc76cf30dad348963fe330a0d70186e15dc507c11af42c89af5dddab2a54e5aff02e2896 - languageName: node - linkType: hard - -"bare-fs@npm:^2.1.1": - version: 2.3.5 - resolution: "bare-fs@npm:2.3.5" - dependencies: - bare-events: "npm:^2.0.0" - bare-path: "npm:^2.0.0" - bare-stream: "npm:^2.0.0" - checksum: 10c0/ff18cc9be7c557c38e0342681ba3672ae4b01e5696b567d4035e5995255dc6bc7d4df88ed210fa4d3eb940eb29512e924ebb42814c87fc59a2bee8cf83b7c2f9 - languageName: node - linkType: hard - -"bare-os@npm:^2.1.0": - version: 2.4.4 - resolution: "bare-os@npm:2.4.4" - checksum: 10c0/e7d1a7b2100c05da8d25b60d0d48cf850c6f57064577a3f2f51cf18d417fbcfd6967ed2d8314320914ed69e0f2ebcf54eb1b36092dd172d8e8f969cf8cccf041 - languageName: node - linkType: hard - -"bare-path@npm:^2.0.0, bare-path@npm:^2.1.0": - version: 2.1.3 - resolution: "bare-path@npm:2.1.3" - dependencies: - bare-os: "npm:^2.1.0" - checksum: 10c0/35587e177fc8fa5b13fb90bac8779b5ce49c99016d221ddaefe2232d02bd4295d79b941e14ae19fda75ec42a6fe5fb66c07d83ae7ec11462178e66b7be65ca74 - languageName: node - linkType: hard - -"bare-stream@npm:^2.0.0": - version: 2.3.0 - resolution: "bare-stream@npm:2.3.0" - dependencies: - b4a: "npm:^1.6.6" - streamx: "npm:^2.20.0" - checksum: 10c0/374a517542e6a0c3c07f3a1d567db612685e66708f79781112aa0e81c1f117ec561cc1ff3926144f15a2200316a77030c95dcc13a1b96d5303f0748798b764cf - languageName: node - linkType: hard - -"bbup@workspace:.": - version: 0.0.0-use.local - resolution: "bbup@workspace:." - dependencies: - "@inquirer/input": "npm:^1.2.16" - "@inquirer/select": "npm:^1.3.3" - axios: "npm:^1.7.7" - commander: "npm:^11.1.0" - log-symbols: "npm:^7.0.0" - ora: "npm:^8.1.0" - tar-fs: "npm:^3.0.6" - tiged: "npm:^2.12.6" - bin: - bbup: ./bbup.js - languageName: unknown - linkType: soft - -"brace-expansion@npm:^1.1.7": - version: 1.1.11 - resolution: "brace-expansion@npm:1.1.11" - dependencies: - balanced-match: "npm:^1.0.0" - concat-map: "npm:0.0.1" - checksum: 10c0/695a56cd058096a7cb71fb09d9d6a7070113c7be516699ed361317aca2ec169f618e28b8af352e02ab4233fb54eb0168460a40dc320bab0034b36ab59aaad668 - languageName: node - linkType: hard - -"chalk@npm:^4.1.2": - version: 4.1.2 - resolution: "chalk@npm:4.1.2" - dependencies: - ansi-styles: "npm:^4.1.0" - supports-color: "npm:^7.1.0" - checksum: 10c0/4a3fef5cc34975c898ffe77141450f679721df9dde00f6c304353fa9c8b571929123b26a0e4617bde5018977eb655b31970c297b91b63ee83bb82aeb04666880 - languageName: node - linkType: hard - -"chalk@npm:^5.3.0": - version: 5.3.0 - resolution: "chalk@npm:5.3.0" - checksum: 10c0/8297d436b2c0f95801103ff2ef67268d362021b8210daf8ddbe349695333eb3610a71122172ff3b0272f1ef2cf7cc2c41fdaa4715f52e49ffe04c56340feed09 - languageName: node - linkType: hard - -"chownr@npm:^2.0.0": - version: 2.0.0 - resolution: "chownr@npm:2.0.0" - checksum: 10c0/594754e1303672171cc04e50f6c398ae16128eb134a88f801bf5354fd96f205320f23536a045d9abd8b51024a149696e51231565891d4efdab8846021ecf88e6 - languageName: node - linkType: hard - -"cli-cursor@npm:^5.0.0": - version: 5.0.0 - resolution: "cli-cursor@npm:5.0.0" - dependencies: - restore-cursor: "npm:^5.0.0" - checksum: 10c0/7ec62f69b79f6734ab209a3e4dbdc8af7422d44d360a7cb1efa8a0887bbe466a6e625650c466fe4359aee44dbe2dc0b6994b583d40a05d0808a5cb193641d220 - languageName: node - linkType: hard - -"cli-spinners@npm:^2.9.2": - version: 2.9.2 - resolution: "cli-spinners@npm:2.9.2" - checksum: 10c0/907a1c227ddf0d7a101e7ab8b300affc742ead4b4ebe920a5bf1bc6d45dce2958fcd195eb28fa25275062fe6fa9b109b93b63bc8033396ed3bcb50297008b3a3 - languageName: node - linkType: hard - -"cli-width@npm:^4.1.0": - version: 4.1.0 - resolution: "cli-width@npm:4.1.0" - checksum: 10c0/1fbd56413578f6117abcaf858903ba1f4ad78370a4032f916745fa2c7e390183a9d9029cf837df320b0fdce8137668e522f60a30a5f3d6529ff3872d265a955f - languageName: node - linkType: hard - -"color-convert@npm:^2.0.1": - version: 2.0.1 - resolution: "color-convert@npm:2.0.1" - dependencies: - color-name: "npm:~1.1.4" - checksum: 10c0/37e1150172f2e311fe1b2df62c6293a342ee7380da7b9cfdba67ea539909afbd74da27033208d01d6d5cfc65ee7868a22e18d7e7648e004425441c0f8a15a7d7 - languageName: node - linkType: hard - -"color-name@npm:~1.1.4": - version: 1.1.4 - resolution: "color-name@npm:1.1.4" - checksum: 10c0/a1a3f914156960902f46f7f56bc62effc6c94e84b2cae157a526b1c1f74b677a47ec602bf68a61abfa2b42d15b7c5651c6dbe72a43af720bc588dff885b10f95 - languageName: node - linkType: hard - -"colorette@npm:1.2.1": - version: 1.2.1 - resolution: "colorette@npm:1.2.1" - checksum: 10c0/993422e8ef02c3e267ac49ea7f3457839ec261a27a9bf00c4eb1fab5eec40bc1e992972e6e4c392a488838c905c80410736dfc94109be6ae53f19434461022a6 - languageName: node - linkType: hard - -"combined-stream@npm:^1.0.8": - version: 1.0.8 - resolution: "combined-stream@npm:1.0.8" - dependencies: - delayed-stream: "npm:~1.0.0" - checksum: 10c0/0dbb829577e1b1e839fa82b40c07ffaf7de8a09b935cadd355a73652ae70a88b4320db322f6634a4ad93424292fa80973ac6480986247f1734a1137debf271d5 - languageName: node - linkType: hard - -"commander@npm:^11.1.0": - version: 11.1.0 - resolution: "commander@npm:11.1.0" - checksum: 10c0/13cc6ac875e48780250f723fb81c1c1178d35c5decb1abb1b628b3177af08a8554e76b2c0f29de72d69eef7c864d12613272a71fabef8047922bc622ab75a179 - languageName: node - linkType: hard - -"concat-map@npm:0.0.1": - version: 0.0.1 - resolution: "concat-map@npm:0.0.1" - checksum: 10c0/c996b1cfdf95b6c90fee4dae37e332c8b6eb7d106430c17d538034c0ad9a1630cb194d2ab37293b1bdd4d779494beee7786d586a50bd9376fd6f7bcc2bd4c98f - languageName: node - linkType: hard - -"debug@npm:4": - version: 4.3.7 - resolution: "debug@npm:4.3.7" - dependencies: - ms: "npm:^2.1.3" - peerDependenciesMeta: - supports-color: - optional: true - checksum: 10c0/1471db19c3b06d485a622d62f65947a19a23fbd0dd73f7fd3eafb697eec5360cde447fb075919987899b1a2096e85d35d4eb5a4de09a57600ac9cf7e6c8e768b - languageName: node - linkType: hard - -"delayed-stream@npm:~1.0.0": - version: 1.0.0 - resolution: "delayed-stream@npm:1.0.0" - checksum: 10c0/d758899da03392e6712f042bec80aa293bbe9e9ff1b2634baae6a360113e708b91326594c8a486d475c69d6259afb7efacdc3537bfcda1c6c648e390ce601b19 - languageName: node - linkType: hard - -"emoji-regex@npm:^10.3.0": - version: 10.4.0 - resolution: "emoji-regex@npm:10.4.0" - checksum: 10c0/a3fcedfc58bfcce21a05a5f36a529d81e88d602100145fcca3dc6f795e3c8acc4fc18fe773fbf9b6d6e9371205edb3afa2668ec3473fa2aa7fd47d2a9d46482d - languageName: node - linkType: hard - -"emoji-regex@npm:^8.0.0": - version: 8.0.0 - resolution: "emoji-regex@npm:8.0.0" - checksum: 10c0/b6053ad39951c4cf338f9092d7bfba448cdfd46fe6a2a034700b149ac9ffbc137e361cbd3c442297f86bed2e5f7576c1b54cc0a6bf8ef5106cc62f496af35010 - languageName: node - linkType: hard - -"end-of-stream@npm:^1.1.0": - version: 1.4.4 - resolution: "end-of-stream@npm:1.4.4" - dependencies: - once: "npm:^1.4.0" - checksum: 10c0/870b423afb2d54bb8d243c63e07c170409d41e20b47eeef0727547aea5740bd6717aca45597a9f2745525667a6b804c1e7bede41f856818faee5806dd9ff3975 - languageName: node - linkType: hard - -"enquirer@npm:2.3.6": - version: 2.3.6 - resolution: "enquirer@npm:2.3.6" - dependencies: - ansi-colors: "npm:^4.1.1" - checksum: 10c0/8e070e052c2c64326a2803db9084d21c8aaa8c688327f133bf65c4a712586beb126fd98c8a01cfb0433e82a4bd3b6262705c55a63e0f7fb91d06b9cedbde9a11 - languageName: node - linkType: hard - -"escape-string-regexp@npm:^1.0.5": - version: 1.0.5 - resolution: "escape-string-regexp@npm:1.0.5" - checksum: 10c0/a968ad453dd0c2724e14a4f20e177aaf32bb384ab41b674a8454afe9a41c5e6fe8903323e0a1052f56289d04bd600f81278edf140b0fcc02f5cac98d0f5b5371 - languageName: node - linkType: hard - -"fast-fifo@npm:^1.2.0, fast-fifo@npm:^1.3.2": - version: 1.3.2 - resolution: "fast-fifo@npm:1.3.2" - checksum: 10c0/d53f6f786875e8b0529f784b59b4b05d4b5c31c651710496440006a398389a579c8dbcd2081311478b5bf77f4b0b21de69109c5a4eabea9d8e8783d1eb864e4c - languageName: node - linkType: hard - -"figures@npm:^3.2.0": - version: 3.2.0 - resolution: "figures@npm:3.2.0" - dependencies: - escape-string-regexp: "npm:^1.0.5" - checksum: 10c0/9c421646ede432829a50bc4e55c7a4eb4bcb7cc07b5bab2f471ef1ab9a344595bbebb6c5c21470093fbb730cd81bbca119624c40473a125293f656f49cb47629 - languageName: node - linkType: hard - -"follow-redirects@npm:^1.15.6": - version: 1.15.9 - resolution: "follow-redirects@npm:1.15.9" - peerDependenciesMeta: - debug: - optional: true - checksum: 10c0/5829165bd112c3c0e82be6c15b1a58fa9dcfaede3b3c54697a82fe4a62dd5ae5e8222956b448d2f98e331525f05d00404aba7d696de9e761ef6e42fdc780244f - languageName: node - linkType: hard - -"form-data@npm:^4.0.0": - version: 4.0.0 - resolution: "form-data@npm:4.0.0" - dependencies: - asynckit: "npm:^0.4.0" - combined-stream: "npm:^1.0.8" - mime-types: "npm:^2.1.12" - checksum: 10c0/cb6f3ac49180be03ff07ba3ff125f9eba2ff0b277fb33c7fc47569fc5e616882c5b1c69b9904c4c4187e97dd0419dd03b134174756f296dec62041e6527e2c6e - languageName: node - linkType: hard - -"fs-extra@npm:10.1.0": - version: 10.1.0 - resolution: "fs-extra@npm:10.1.0" - dependencies: - graceful-fs: "npm:^4.2.0" - jsonfile: "npm:^6.0.1" - universalify: "npm:^2.0.0" - checksum: 10c0/5f579466e7109719d162a9249abbeffe7f426eb133ea486e020b89bc6d67a741134076bf439983f2eb79276ceaf6bd7b7c1e43c3fd67fe889863e69072fb0a5e - languageName: node - linkType: hard - -"fs-minipass@npm:^2.0.0": - version: 2.1.0 - resolution: "fs-minipass@npm:2.1.0" - dependencies: - minipass: "npm:^3.0.0" - checksum: 10c0/703d16522b8282d7299337539c3ed6edddd1afe82435e4f5b76e34a79cd74e488a8a0e26a636afc2440e1a23b03878e2122e3a2cfe375a5cf63c37d92b86a004 - languageName: node - linkType: hard - -"fs.realpath@npm:^1.0.0": - version: 1.0.0 - resolution: "fs.realpath@npm:1.0.0" - checksum: 10c0/444cf1291d997165dfd4c0d58b69f0e4782bfd9149fd72faa4fe299e68e0e93d6db941660b37dd29153bf7186672ececa3b50b7e7249477b03fdf850f287c948 - languageName: node - linkType: hard - -"fuzzysearch@npm:1.0.3": - version: 1.0.3 - resolution: "fuzzysearch@npm:1.0.3" - checksum: 10c0/de6ab4a84cb0d570d1b55c9b9c2bb435b2a781452d23e63911e95d333e3dd1badea743a1d1ab0cac6f28d7e262347dfce10632f0aa9e5df0baaae0270f49578f - languageName: node - linkType: hard - -"get-east-asian-width@npm:^1.0.0": - version: 1.2.0 - resolution: "get-east-asian-width@npm:1.2.0" - checksum: 10c0/914b1e217cf38436c24b4c60b4c45289e39a45bf9e65ef9fd343c2815a1a02b8a0215aeec8bf9c07c516089004b6e3826332481f40a09529fcadbf6e579f286b - languageName: node - linkType: hard - -"glob@npm:^7.1.3": - version: 7.2.3 - resolution: "glob@npm:7.2.3" - dependencies: - fs.realpath: "npm:^1.0.0" - inflight: "npm:^1.0.4" - inherits: "npm:2" - minimatch: "npm:^3.1.1" - once: "npm:^1.3.0" - path-is-absolute: "npm:^1.0.0" - checksum: 10c0/65676153e2b0c9095100fe7f25a778bf45608eeb32c6048cf307f579649bcc30353277b3b898a3792602c65764e5baa4f643714dfbdfd64ea271d210c7a425fe - languageName: node - linkType: hard - -"globalyzer@npm:0.1.0": - version: 0.1.0 - resolution: "globalyzer@npm:0.1.0" - checksum: 10c0/e16e47a5835cbe8a021423d4c7fcd9f5f85815b4190a7f50c1fdb95fc559d72e4fb30be96f106c66a99413f36d72da0f8323d19d27f60a8feec9d936139ec5a8 - languageName: node - linkType: hard - -"globrex@npm:^0.1.2": - version: 0.1.2 - resolution: "globrex@npm:0.1.2" - checksum: 10c0/a54c029520cf58bda1d8884f72bd49b4cd74e977883268d931fd83bcbd1a9eb96d57c7dbd4ad80148fb9247467ebfb9b215630b2ed7563b2a8de02e1ff7f89d1 - languageName: node - linkType: hard - -"graceful-fs@npm:^4.1.6, graceful-fs@npm:^4.2.0": - version: 4.2.11 - resolution: "graceful-fs@npm:4.2.11" - checksum: 10c0/386d011a553e02bc594ac2ca0bd6d9e4c22d7fa8cfbfc448a6d148c59ea881b092db9dbe3547ae4b88e55f1b01f7c4a2ecc53b310c042793e63aa44cf6c257f2 - languageName: node - linkType: hard - -"has-flag@npm:^4.0.0": - version: 4.0.0 - resolution: "has-flag@npm:4.0.0" - checksum: 10c0/2e789c61b7888d66993e14e8331449e525ef42aac53c627cc53d1c3334e768bcb6abdc4f5f0de1478a25beec6f0bd62c7549058b7ac53e924040d4f301f02fd1 - languageName: node - linkType: hard - -"https-proxy-agent@npm:5.0.0": - version: 5.0.0 - resolution: "https-proxy-agent@npm:5.0.0" - dependencies: - agent-base: "npm:6" - debug: "npm:4" - checksum: 10c0/670c04f7f0effb5a449c094ea037cbcfb28a5ab93ed22e8c343095202cc7288027869a5a21caf4ee3b8ea06f9624ef1e1fc9044669c0fd92617654ff39f30806 - languageName: node - linkType: hard - -"inflight@npm:^1.0.4": - version: 1.0.6 - resolution: "inflight@npm:1.0.6" - dependencies: - once: "npm:^1.3.0" - wrappy: "npm:1" - checksum: 10c0/7faca22584600a9dc5b9fca2cd5feb7135ac8c935449837b315676b4c90aa4f391ec4f42240178244b5a34e8bede1948627fda392ca3191522fc46b34e985ab2 - languageName: node - linkType: hard - -"inherits@npm:2": - version: 2.0.4 - resolution: "inherits@npm:2.0.4" - checksum: 10c0/4e531f648b29039fb7426fb94075e6545faa1eb9fe83c29f0b6d9e7263aceb4289d2d4557db0d428188eeb449cc7c5e77b0a0b2c4e248ff2a65933a0dee49ef2 - languageName: node - linkType: hard - -"is-fullwidth-code-point@npm:^3.0.0": - version: 3.0.0 - resolution: "is-fullwidth-code-point@npm:3.0.0" - checksum: 10c0/bb11d825e049f38e04c06373a8d72782eee0205bda9d908cc550ccb3c59b99d750ff9537982e01733c1c94a58e35400661f57042158ff5e8f3e90cf936daf0fc - languageName: node - linkType: hard - -"is-interactive@npm:^2.0.0": - version: 2.0.0 - resolution: "is-interactive@npm:2.0.0" - checksum: 10c0/801c8f6064f85199dc6bf99b5dd98db3282e930c3bc197b32f2c5b89313bb578a07d1b8a01365c4348c2927229234f3681eb861b9c2c92bee72ff397390fa600 - languageName: node - linkType: hard - -"is-unicode-supported@npm:^1.3.0": - version: 1.3.0 - resolution: "is-unicode-supported@npm:1.3.0" - checksum: 10c0/b8674ea95d869f6faabddc6a484767207058b91aea0250803cbf1221345cb0c56f466d4ecea375dc77f6633d248d33c47bd296fb8f4cdba0b4edba8917e83d8a - languageName: node - linkType: hard - -"is-unicode-supported@npm:^2.0.0": - version: 2.1.0 - resolution: "is-unicode-supported@npm:2.1.0" - checksum: 10c0/a0f53e9a7c1fdbcf2d2ef6e40d4736fdffff1c9f8944c75e15425118ff3610172c87bf7bc6c34d3903b04be59790bb2212ddbe21ee65b5a97030fc50370545a5 - languageName: node - linkType: hard - -"jsonfile@npm:^6.0.1": - version: 6.1.0 - resolution: "jsonfile@npm:6.1.0" - dependencies: - graceful-fs: "npm:^4.1.6" - universalify: "npm:^2.0.0" - dependenciesMeta: - graceful-fs: - optional: true - checksum: 10c0/4f95b5e8a5622b1e9e8f33c96b7ef3158122f595998114d1e7f03985649ea99cb3cd99ce1ed1831ae94c8c8543ab45ebd044207612f31a56fd08462140e46865 - languageName: node - linkType: hard - -"log-symbols@npm:^6.0.0": - version: 6.0.0 - resolution: "log-symbols@npm:6.0.0" - dependencies: - chalk: "npm:^5.3.0" - is-unicode-supported: "npm:^1.3.0" - checksum: 10c0/36636cacedba8f067d2deb4aad44e91a89d9efb3ead27e1846e7b82c9a10ea2e3a7bd6ce28a7ca616bebc60954ff25c67b0f92d20a6a746bb3cc52c3701891f6 - languageName: node - linkType: hard - -"log-symbols@npm:^7.0.0": - version: 7.0.0 - resolution: "log-symbols@npm:7.0.0" - dependencies: - is-unicode-supported: "npm:^2.0.0" - yoctocolors: "npm:^2.1.1" - checksum: 10c0/209eeb0009da6c3f9ebb736d3d65ff1ad3cb757b0c3ba66a5089d7463f77155ade88084c4db31b53341c89aeae3dc89dbc56888d2ae6ffd082bf96c4d2ac429d - languageName: node - linkType: hard - -"mime-db@npm:1.52.0": - version: 1.52.0 - resolution: "mime-db@npm:1.52.0" - checksum: 10c0/0557a01deebf45ac5f5777fe7740b2a5c309c6d62d40ceab4e23da9f821899ce7a900b7ac8157d4548ddbb7beffe9abc621250e6d182b0397ec7f10c7b91a5aa - languageName: node - linkType: hard - -"mime-types@npm:^2.1.12": - version: 2.1.35 - resolution: "mime-types@npm:2.1.35" - dependencies: - mime-db: "npm:1.52.0" - checksum: 10c0/82fb07ec56d8ff1fc999a84f2f217aa46cb6ed1033fefaabd5785b9a974ed225c90dc72fff460259e66b95b73648596dbcc50d51ed69cdf464af2d237d3149b2 - languageName: node - linkType: hard - -"mimic-function@npm:^5.0.0": - version: 5.0.1 - resolution: "mimic-function@npm:5.0.1" - checksum: 10c0/f3d9464dd1816ecf6bdf2aec6ba32c0728022039d992f178237d8e289b48764fee4131319e72eedd4f7f094e22ded0af836c3187a7edc4595d28dd74368fd81d - languageName: node - linkType: hard - -"minimatch@npm:^3.1.1": - version: 3.1.2 - resolution: "minimatch@npm:3.1.2" - dependencies: - brace-expansion: "npm:^1.1.7" - checksum: 10c0/0262810a8fc2e72cca45d6fd86bd349eee435eb95ac6aa45c9ea2180e7ee875ef44c32b55b5973ceabe95ea12682f6e3725cbb63d7a2d1da3ae1163c8b210311 - languageName: node - linkType: hard - -"minipass@npm:^3.0.0": - version: 3.3.6 - resolution: "minipass@npm:3.3.6" - dependencies: - yallist: "npm:^4.0.0" - checksum: 10c0/a114746943afa1dbbca8249e706d1d38b85ed1298b530f5808ce51f8e9e941962e2a5ad2e00eae7dd21d8a4aae6586a66d4216d1a259385e9d0358f0c1eba16c - languageName: node - linkType: hard - -"minipass@npm:^5.0.0": - version: 5.0.0 - resolution: "minipass@npm:5.0.0" - checksum: 10c0/a91d8043f691796a8ac88df039da19933ef0f633e3d7f0d35dcd5373af49131cf2399bfc355f41515dc495e3990369c3858cd319e5c2722b4753c90bf3152462 - languageName: node - linkType: hard - -"minizlib@npm:^2.1.1": - version: 2.1.2 - resolution: "minizlib@npm:2.1.2" - dependencies: - minipass: "npm:^3.0.0" - yallist: "npm:^4.0.0" - checksum: 10c0/64fae024e1a7d0346a1102bb670085b17b7f95bf6cfdf5b128772ec8faf9ea211464ea4add406a3a6384a7d87a0cd1a96263692134323477b4fb43659a6cab78 - languageName: node - linkType: hard - -"mkdirp@npm:^1.0.3": - version: 1.0.4 - resolution: "mkdirp@npm:1.0.4" - bin: - mkdirp: bin/cmd.js - checksum: 10c0/46ea0f3ffa8bc6a5bc0c7081ffc3907777f0ed6516888d40a518c5111f8366d97d2678911ad1a6882bf592fa9de6c784fea32e1687bb94e1f4944170af48a5cf - languageName: node - linkType: hard - -"mri@npm:1.1.6": - version: 1.1.6 - resolution: "mri@npm:1.1.6" - checksum: 10c0/dd29640dd5d4d3abc959156806adc2e7c6233b010609727499616f2047e9481dcbd3ba9b0bc7135428f1c42cca6b0475cee6f898b41ff8ccec730e4fa80de40d - languageName: node - linkType: hard - -"ms@npm:^2.1.3": - version: 2.1.3 - resolution: "ms@npm:2.1.3" - checksum: 10c0/d924b57e7312b3b63ad21fc5b3dc0af5e78d61a1fc7cfb5457edaf26326bf62be5307cc87ffb6862ef1c2b33b0233cdb5d4f01c4c958cc0d660948b65a287a48 - languageName: node - linkType: hard - -"mute-stream@npm:^1.0.0": - version: 1.0.0 - resolution: "mute-stream@npm:1.0.0" - checksum: 10c0/dce2a9ccda171ec979a3b4f869a102b1343dee35e920146776780de182f16eae459644d187e38d59a3d37adf85685e1c17c38cf7bfda7e39a9880f7a1d10a74c - languageName: node - linkType: hard - -"once@npm:^1.3.0, once@npm:^1.3.1, once@npm:^1.4.0": - version: 1.4.0 - resolution: "once@npm:1.4.0" - dependencies: - wrappy: "npm:1" - checksum: 10c0/5d48aca287dfefabd756621c5dfce5c91a549a93e9fdb7b8246bc4c4790aa2ec17b34a260530474635147aeb631a2dcc8b32c613df0675f96041cbb8244517d0 - languageName: node - linkType: hard - -"onetime@npm:^7.0.0": - version: 7.0.0 - resolution: "onetime@npm:7.0.0" - dependencies: - mimic-function: "npm:^5.0.0" - checksum: 10c0/5cb9179d74b63f52a196a2e7037ba2b9a893245a5532d3f44360012005c9cadb60851d56716ebff18a6f47129dab7168022445df47c2aff3b276d92585ed1221 - languageName: node - linkType: hard - -"ora@npm:^8.1.0": - version: 8.1.0 - resolution: "ora@npm:8.1.0" - dependencies: - chalk: "npm:^5.3.0" - cli-cursor: "npm:^5.0.0" - cli-spinners: "npm:^2.9.2" - is-interactive: "npm:^2.0.0" - is-unicode-supported: "npm:^2.0.0" - log-symbols: "npm:^6.0.0" - stdin-discarder: "npm:^0.2.2" - string-width: "npm:^7.2.0" - strip-ansi: "npm:^7.1.0" - checksum: 10c0/4ac9a6dd7fe915a354680f33ced21ee96d13d3c5ab0dc00b3c3ba9e3695ed141b1d045222990f5a71a9a91f801042a0b0d32e58dfc5509ff9b81efdd3fcf6339 - languageName: node - linkType: hard - -"path-is-absolute@npm:^1.0.0": - version: 1.0.1 - resolution: "path-is-absolute@npm:1.0.1" - checksum: 10c0/127da03c82172a2a50099cddbf02510c1791fc2cc5f7713ddb613a56838db1e8168b121a920079d052e0936c23005562059756d653b7c544c53185efe53be078 - languageName: node - linkType: hard - -"proxy-from-env@npm:^1.1.0": - version: 1.1.0 - resolution: "proxy-from-env@npm:1.1.0" - checksum: 10c0/fe7dd8b1bdbbbea18d1459107729c3e4a2243ca870d26d34c2c1bcd3e4425b7bcc5112362df2d93cc7fb9746f6142b5e272fd1cc5c86ddf8580175186f6ad42b - languageName: node - linkType: hard - -"pump@npm:^3.0.0": - version: 3.0.2 - resolution: "pump@npm:3.0.2" - dependencies: - end-of-stream: "npm:^1.1.0" - once: "npm:^1.3.1" - checksum: 10c0/5ad655cb2a7738b4bcf6406b24ad0970d680649d996b55ad20d1be8e0c02394034e4c45ff7cd105d87f1e9b96a0e3d06fd28e11fae8875da26e7f7a8e2c9726f - languageName: node - linkType: hard - -"queue-tick@npm:^1.0.1": - version: 1.0.1 - resolution: "queue-tick@npm:1.0.1" - checksum: 10c0/0db998e2c9b15215317dbcf801e9b23e6bcde4044e115155dae34f8e7454b9a783f737c9a725528d677b7a66c775eb7a955cf144fe0b87f62b575ce5bfd515a9 - languageName: node - linkType: hard - -"restore-cursor@npm:^5.0.0": - version: 5.1.0 - resolution: "restore-cursor@npm:5.1.0" - dependencies: - onetime: "npm:^7.0.0" - signal-exit: "npm:^4.1.0" - checksum: 10c0/c2ba89131eea791d1b25205bdfdc86699767e2b88dee2a590b1a6caa51737deac8bad0260a5ded2f7c074b7db2f3a626bcf1fcf3cdf35974cbeea5e2e6764f60 - languageName: node - linkType: hard - -"rimraf@npm:3.0.2": - version: 3.0.2 - resolution: "rimraf@npm:3.0.2" - dependencies: - glob: "npm:^7.1.3" - bin: - rimraf: bin.js - checksum: 10c0/9cb7757acb489bd83757ba1a274ab545eafd75598a9d817e0c3f8b164238dd90eba50d6b848bd4dcc5f3040912e882dc7ba71653e35af660d77b25c381d402e8 - languageName: node - linkType: hard - -"run-async@npm:^3.0.0": - version: 3.0.0 - resolution: "run-async@npm:3.0.0" - checksum: 10c0/b18b562ae37c3020083dcaae29642e4cc360c824fbfb6b7d50d809a9d5227bb986152d09310255842c8dce40526e82ca768f02f00806c91ba92a8dfa6159cb85 - languageName: node - linkType: hard - -"signal-exit@npm:^4.1.0": - version: 4.1.0 - resolution: "signal-exit@npm:4.1.0" - checksum: 10c0/41602dce540e46d599edba9d9860193398d135f7ff72cab629db5171516cfae628d21e7bfccde1bbfdf11c48726bc2a6d1a8fb8701125852fbfda7cf19c6aa83 - languageName: node - linkType: hard - -"stdin-discarder@npm:^0.2.2": - version: 0.2.2 - resolution: "stdin-discarder@npm:0.2.2" - checksum: 10c0/c78375e82e956d7a64be6e63c809c7f058f5303efcaf62ea48350af072bacdb99c06cba39209b45a071c1acbd49116af30df1df9abb448df78a6005b72f10537 - languageName: node - linkType: hard - -"streamx@npm:^2.15.0, streamx@npm:^2.20.0": - version: 2.20.1 - resolution: "streamx@npm:2.20.1" - dependencies: - bare-events: "npm:^2.2.0" - fast-fifo: "npm:^1.3.2" - queue-tick: "npm:^1.0.1" - text-decoder: "npm:^1.1.0" - dependenciesMeta: - bare-events: - optional: true - checksum: 10c0/34ffa2ee9465d70e18c7e2ba70189720c166d150ab83eb7700304620fa23ff42a69cb37d712ea4b5fc6234d8e74346a88bb4baceb873c6b05e52ac420f8abb4d - languageName: node - linkType: hard - -"string-width@npm:^4.1.0": - version: 4.2.3 - resolution: "string-width@npm:4.2.3" - dependencies: - emoji-regex: "npm:^8.0.0" - is-fullwidth-code-point: "npm:^3.0.0" - strip-ansi: "npm:^6.0.1" - checksum: 10c0/1e525e92e5eae0afd7454086eed9c818ee84374bb80328fc41217ae72ff5f065ef1c9d7f72da41de40c75fa8bb3dee63d92373fd492c84260a552c636392a47b - languageName: node - linkType: hard - -"string-width@npm:^7.2.0": - version: 7.2.0 - resolution: "string-width@npm:7.2.0" - dependencies: - emoji-regex: "npm:^10.3.0" - get-east-asian-width: "npm:^1.0.0" - strip-ansi: "npm:^7.1.0" - checksum: 10c0/eb0430dd43f3199c7a46dcbf7a0b34539c76fe3aa62763d0b0655acdcbdf360b3f66f3d58ca25ba0205f42ea3491fa00f09426d3b7d3040e506878fc7664c9b9 - languageName: node - linkType: hard - -"strip-ansi@npm:^6.0.0, strip-ansi@npm:^6.0.1": - version: 6.0.1 - resolution: "strip-ansi@npm:6.0.1" - dependencies: - ansi-regex: "npm:^5.0.1" - checksum: 10c0/1ae5f212a126fe5b167707f716942490e3933085a5ff6c008ab97ab2f272c8025d3aa218b7bd6ab25729ca20cc81cddb252102f8751e13482a5199e873680952 - languageName: node - linkType: hard - -"strip-ansi@npm:^7.1.0": - version: 7.1.0 - resolution: "strip-ansi@npm:7.1.0" - dependencies: - ansi-regex: "npm:^6.0.1" - checksum: 10c0/a198c3762e8832505328cbf9e8c8381de14a4fa50a4f9b2160138158ea88c0f5549fb50cb13c651c3088f47e63a108b34622ec18c0499b6c8c3a5ddf6b305ac4 - languageName: node - linkType: hard - -"supports-color@npm:^7.1.0": - version: 7.2.0 - resolution: "supports-color@npm:7.2.0" - dependencies: - has-flag: "npm:^4.0.0" - checksum: 10c0/afb4c88521b8b136b5f5f95160c98dee7243dc79d5432db7efc27efb219385bbc7d9427398e43dd6cc730a0f87d5085ce1652af7efbe391327bc0a7d0f7fc124 - languageName: node - linkType: hard - -"tar-fs@npm:^3.0.6": - version: 3.0.6 - resolution: "tar-fs@npm:3.0.6" - dependencies: - bare-fs: "npm:^2.1.1" - bare-path: "npm:^2.1.0" - pump: "npm:^3.0.0" - tar-stream: "npm:^3.1.5" - dependenciesMeta: - bare-fs: - optional: true - bare-path: - optional: true - checksum: 10c0/207b7c0f193495668bd9dbad09a0108ce4ffcfec5bce2133f90988cdda5c81fad83c99f963d01e47b565196594f7a17dbd063ae55b97b36268fcc843975278ee - languageName: node - linkType: hard - -"tar-stream@npm:^3.1.5": - version: 3.1.7 - resolution: "tar-stream@npm:3.1.7" - dependencies: - b4a: "npm:^1.6.4" - fast-fifo: "npm:^1.2.0" - streamx: "npm:^2.15.0" - checksum: 10c0/a09199d21f8714bd729993ac49b6c8efcb808b544b89f23378ad6ffff6d1cb540878614ba9d4cfec11a64ef39e1a6f009a5398371491eb1fda606ffc7f70f718 - languageName: node - linkType: hard - -"tar@npm:^6.1.11": - version: 6.2.1 - resolution: "tar@npm:6.2.1" - dependencies: - chownr: "npm:^2.0.0" - fs-minipass: "npm:^2.0.0" - minipass: "npm:^5.0.0" - minizlib: "npm:^2.1.1" - mkdirp: "npm:^1.0.3" - yallist: "npm:^4.0.0" - checksum: 10c0/a5eca3eb50bc11552d453488344e6507156b9193efd7635e98e867fab275d527af53d8866e2370cd09dfe74378a18111622ace35af6a608e5223a7d27fe99537 - languageName: node - linkType: hard - -"text-decoder@npm:^1.1.0": - version: 1.2.0 - resolution: "text-decoder@npm:1.2.0" - dependencies: - b4a: "npm:^1.6.4" - checksum: 10c0/398171bef376e06864cd6ba24e0787cc626bebc84a1bbda758d06a6e9b729cc8613f7923dd0d294abd88e8bb5cd7261aad5fda7911fb87253fe71b2b5ac6e507 - languageName: node - linkType: hard - -"tiged@npm:^2.12.6": - version: 2.12.7 - resolution: "tiged@npm:2.12.7" - dependencies: - colorette: "npm:1.2.1" - enquirer: "npm:2.3.6" - fs-extra: "npm:10.1.0" - fuzzysearch: "npm:1.0.3" - https-proxy-agent: "npm:5.0.0" - mri: "npm:1.1.6" - rimraf: "npm:3.0.2" - tar: "npm:^6.1.11" - tiny-glob: "npm:0.2.8" - bin: - degit: bin.js - tiged: bin.js - checksum: 10c0/925ef35312b956a88d87a707193288deb99ef4dcb9097e0b53f3f6e6bd6cfc20d5aff7d32848b67892d8092923936d53b9473643c9d715c3b757a105a4f0191a - languageName: node - linkType: hard - -"tiny-glob@npm:0.2.8": - version: 0.2.8 - resolution: "tiny-glob@npm:0.2.8" - dependencies: - globalyzer: "npm:0.1.0" - globrex: "npm:^0.1.2" - checksum: 10c0/3055a11a94e35a26630262a80404bc81e987d567824ea51dc6d937a8ea5b0e87088ac0c4acc3edcb34b94f074d103b78523293ccc737abc8cc5024eb227a9790 - languageName: node - linkType: hard - -"type-fest@npm:^0.21.3": - version: 0.21.3 - resolution: "type-fest@npm:0.21.3" - checksum: 10c0/902bd57bfa30d51d4779b641c2bc403cdf1371fb9c91d3c058b0133694fcfdb817aef07a47f40faf79039eecbaa39ee9d3c532deff244f3a19ce68cea71a61e8 - languageName: node - linkType: hard - -"undici-types@npm:~6.19.2": - version: 6.19.8 - resolution: "undici-types@npm:6.19.8" - checksum: 10c0/078afa5990fba110f6824823ace86073b4638f1d5112ee26e790155f481f2a868cc3e0615505b6f4282bdf74a3d8caad715fd809e870c2bb0704e3ea6082f344 - languageName: node - linkType: hard - -"universalify@npm:^2.0.0": - version: 2.0.1 - resolution: "universalify@npm:2.0.1" - checksum: 10c0/73e8ee3809041ca8b818efb141801a1004e3fc0002727f1531f4de613ea281b494a40909596dae4a042a4fb6cd385af5d4db2e137b1362e0e91384b828effd3a - languageName: node - linkType: hard - -"wrap-ansi@npm:^6.2.0": - version: 6.2.0 - resolution: "wrap-ansi@npm:6.2.0" - dependencies: - ansi-styles: "npm:^4.0.0" - string-width: "npm:^4.1.0" - strip-ansi: "npm:^6.0.0" - checksum: 10c0/baad244e6e33335ea24e86e51868fe6823626e3a3c88d9a6674642afff1d34d9a154c917e74af8d845fd25d170c4ea9cf69a47133c3f3656e1252b3d462d9f6c - languageName: node - linkType: hard - -"wrappy@npm:1": - version: 1.0.2 - resolution: "wrappy@npm:1.0.2" - checksum: 10c0/56fece1a4018c6a6c8e28fbc88c87e0fbf4ea8fd64fc6c63b18f4acc4bd13e0ad2515189786dd2c30d3eec9663d70f4ecf699330002f8ccb547e4a18231fc9f0 - languageName: node - linkType: hard - -"yallist@npm:^4.0.0": - version: 4.0.0 - resolution: "yallist@npm:4.0.0" - checksum: 10c0/2286b5e8dbfe22204ab66e2ef5cc9bbb1e55dfc873bbe0d568aa943eb255d131890dfd5bf243637273d31119b870f49c18fcde2c6ffbb7a7a092b870dc90625a - languageName: node - linkType: hard - -"yoctocolors@npm:^2.1.1": - version: 2.1.1 - resolution: "yoctocolors@npm:2.1.1" - checksum: 10c0/85903f7fa96f1c70badee94789fade709f9d83dab2ec92753d612d84fcea6d34c772337a9f8914c6bed2f5fc03a428ac5d893e76fab636da5f1236ab725486d0 - languageName: node - linkType: hard diff --git a/barretenberg/cpp/src/barretenberg/bb/api.hpp b/barretenberg/cpp/src/barretenberg/bb/api.hpp index f33568f1869..ab1a3628a4f 100644 --- a/barretenberg/cpp/src/barretenberg/bb/api.hpp +++ b/barretenberg/cpp/src/barretenberg/bb/api.hpp @@ -8,6 +8,7 @@ class API { struct Flags { std::optional output_type; // bytes, fields, bytes_and_fields, fields_msgpack std::optional input_type; // compiletime_stack, runtime_stack + bool no_auto_verify; // TODO(https://github.com/AztecProtocol/barretenberg/issues/1101): remove }; virtual void prove(const Flags& flags, diff --git a/barretenberg/cpp/src/barretenberg/bb/api_client_ivc.hpp b/barretenberg/cpp/src/barretenberg/bb/api_client_ivc.hpp index 37b251bd8cf..8576f918d40 100644 --- a/barretenberg/cpp/src/barretenberg/bb/api_client_ivc.hpp +++ b/barretenberg/cpp/src/barretenberg/bb/api_client_ivc.hpp @@ -141,8 +141,7 @@ class ClientIVCAPI : public API { ClientIVC ivc{ { E2E_FULL_TEST_STRUCTURE }, /*auto_verify_mode=*/true }; // Accumulate the entire program stack into the IVC - // TODO(https://github.com/AztecProtocol/barretenberg/issues/1116): remove manual setting of is_kernel once - // databus has been integrated into noir kernel programs + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1116): remove manual setting of is_kernel bool is_kernel = false; for (Program& program : folding_stack) { // Construct a bberg circuit from the acir representation then accumulate it into the IVC @@ -163,6 +162,47 @@ class ClientIVCAPI : public API { return ivc; }; + static ClientIVC _accumulate_without_auto_verify(std::vector& folding_stack) + { + using Builder = MegaCircuitBuilder; + using Program = acir_format::AcirProgram; + + using namespace acir_format; + + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1163) set these dynamically + init_bn254_crs(1 << 20); + init_grumpkin_crs(1 << 15); + + // TODO(#7371) dedupe this with the rest of the similar code + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1101): remove use of auto_verify_mode + ClientIVC ivc{ { E2E_FULL_TEST_STRUCTURE }, /*auto_verify_mode=*/false }; + + // Accumulate the entire program stack into the IVC + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1116): remove manual setting of is_kernel once + // databus has been integrated into noir kernel programs + bool is_kernel = false; + for (Program& program : folding_stack) { + + Builder circuit; + + is_kernel = !program.constraints.ivc_recursion_constraints.empty(); + if (is_kernel) { + vinfo("Accumulating KERNEL."); + circuit = create_kernel_circuit(program.constraints, ivc, program.witness); + } else { + vinfo("Accumulating APP."); + circuit = create_circuit( + program.constraints, /*recursive=*/false, 0, program.witness, false, ivc.goblin.op_queue); + } + + // Do one step of ivc accumulator or, if there is only one circuit in the stack, prove that circuit. In this + // case, no work is added to the Goblin opqueue, but VM proofs for trivial inputs are produced. + ivc.accumulate(circuit, /*one_circuit=*/folding_stack.size() == 1); + } + + return ivc; + }; + public: void prove(const API::Flags& flags, const std::filesystem::path& bytecode_path, @@ -179,7 +219,19 @@ class ClientIVCAPI : public API { std::vector folding_stack = _build_folding_stack(*flags.input_type, bytecode_path, witness_path); - ClientIVC ivc = _accumulate(folding_stack); + + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1163) set these dynamically + init_bn254_crs(1 << 20); + init_grumpkin_crs(1 << 15); + + ClientIVC ivc; + if (flags.no_auto_verify) { + vinfo("performing accumulation WITHOUT auto-verify"); + ivc = _accumulate_without_auto_verify(folding_stack); + } else { + vinfo("performing accumulation with auto-verify"); + ivc = _accumulate(folding_stack); + } ClientIVC::Proof proof = ivc.prove(); // Write the proof and verification keys into the working directory in 'binary' format (in practice it seems diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index 00b0b8a68e0..5b8bc5041fd 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -9,6 +9,7 @@ #include "barretenberg/constants.hpp" #include "barretenberg/dsl/acir_format/acir_format.hpp" #include "barretenberg/dsl/acir_format/acir_to_constraint_buf.hpp" +#include "barretenberg/dsl/acir_format/ivc_recursion_constraint.hpp" #include "barretenberg/dsl/acir_format/proof_surgeon.hpp" #include "barretenberg/dsl/acir_proofs/acir_composer.hpp" #include "barretenberg/dsl/acir_proofs/honk_contract.hpp" @@ -833,6 +834,62 @@ void write_vk_honk(const std::string& bytecodePath, const std::string& outputPat } } +/** + * @brief Compute and write to file a MegaHonk VK for a circuit to be accumulated in the IVC + * @note This method differes from write_vk_honk in that it handles kernel circuits which require special + * treatment (i.e. construction of mock IVC state to correctly complete the kernel logic). + * + * @param bytecodePath + * @param witnessPath + */ +void write_vk_for_ivc(const std::string& bytecodePath, const std::string& outputPath) +{ + using Builder = ClientIVC::ClientCircuit; + using Prover = ClientIVC::MegaProver; + using DeciderProvingKey = ClientIVC::DeciderProvingKey; + using VerificationKey = ClientIVC::MegaVerificationKey; + + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1163) set these dynamically + init_bn254_crs(1 << 20); + init_grumpkin_crs(1 << 15); + + auto constraints = get_constraint_system(bytecodePath, /*honk_recursion=*/false); + acir_format::WitnessVector witness = {}; + + TraceSettings trace_settings{ E2E_FULL_TEST_STRUCTURE }; + + // The presence of ivc recursion constraints determines whether or not the program is a kernel + bool is_kernel = !constraints.ivc_recursion_constraints.empty(); + + Builder builder; + if (is_kernel) { + // Create a mock IVC instance based on the IVC recursion constraints in the kernel program + ClientIVC mock_ivc = create_mock_ivc_from_constraints(constraints.ivc_recursion_constraints, trace_settings); + builder = acir_format::create_kernel_circuit(constraints, mock_ivc, witness); + } else { + builder = acir_format::create_circuit( + constraints, /*recursive=*/false, 0, witness, /*honk_recursion=*/false); + } + // Add public inputs corresponding to pairing point accumulator + builder.add_pairing_point_accumulator(stdlib::recursion::init_default_agg_obj_indices(builder)); + + // Construct the verification key via the prover-constructed proving key with the proper trace settings + auto proving_key = std::make_shared(builder, trace_settings); + Prover prover{ proving_key }; + init_bn254_crs(prover.proving_key->proving_key.circuit_size); + VerificationKey vk(prover.proving_key->proving_key); + + // Write the VK to file as a buffer + auto serialized_vk = to_buffer(vk); + if (outputPath == "-") { + writeRawBytesToStdout(serialized_vk); + vinfo("vk written to stdout"); + } else { + write_file(outputPath, serialized_vk); + vinfo("vk written to: ", outputPath); + } +} + /** * @brief Write a toml file containing recursive verifier inputs for a given program + witness * @@ -1073,7 +1130,8 @@ int main(int argc, char* argv[]) const API::Flags flags = [&args]() { return API::Flags{ .output_type = get_option(args, "--output_type", "fields_msgpack"), - .input_type = get_option(args, "--input_type", "compiletime_stack") }; + .input_type = get_option(args, "--input_type", "compiletime_stack"), + .no_auto_verify = flag_present(args, "--no_auto_verify") }; }(); const std::string command = args[0]; @@ -1227,6 +1285,9 @@ int main(int argc, char* argv[]) } else if (command == "write_vk_mega_honk") { std::string output_path = get_option(args, "-o", "./target/vk"); write_vk_honk(bytecode_path, output_path, recursive); + } else if (command == "write_vk_for_ivc") { + std::string output_path = get_option(args, "-o", "./target/vk"); + write_vk_for_ivc(bytecode_path, output_path); } else if (command == "proof_as_fields_honk") { std::string output_path = get_option(args, "-o", proof_path + "_fields.json"); proof_as_fields_honk(proof_path, output_path); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.cpp index a0d24e70e0b..cae59733e3b 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.cpp @@ -656,6 +656,11 @@ void handle_blackbox_func_call(Program::Opcode::BlackBoxFuncCall const& arg, af.honk_recursion_constraints.push_back(c); af.original_opcode_indices.honk_recursion_constraints.push_back(opcode_index); break; + case OINK: + case PG: + af.ivc_recursion_constraints.push_back(c); + af.original_opcode_indices.ivc_recursion_constraints.push_back(opcode_index); + break; case AVM: af.avm_recursion_constraints.push_back(c); af.original_opcode_indices.avm_recursion_constraints.push_back(opcode_index); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.cpp index 0549a1bbeed..d6bf0d93323 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.cpp @@ -86,6 +86,8 @@ void create_block_constraints(MegaCircuitBuilder& builder, process_call_data_operations(builder, constraint, has_valid_witness_assignments, init); // The presence of calldata is used to indicate that the present circuit is a kernel. This is needed in the // databus consistency checks to indicate that the corresponding return data belongs to a kernel (else an app). + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1165): is_kernel must be known prior to this stage + // since we must determine whether to use create_circuit or create_kernel_circuit. Resolve. builder.databus_propagation_data.is_kernel = true; } break; case BlockType::ReturnData: { diff --git a/barretenberg/cpp/src/barretenberg/stdlib/plonk_recursion/verifier/verifier.test.cpp b/barretenberg/cpp/src/barretenberg/stdlib/plonk_recursion/verifier/verifier.test.cpp index 02efc92c2cd..c1772d50f7c 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/plonk_recursion/verifier/verifier.test.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/plonk_recursion/verifier/verifier.test.cpp @@ -345,14 +345,8 @@ template class stdlib_verifier : public testing::Test { static void check_recursive_verification_circuit(OuterBuilder& outer_circuit, bool expected_result) { info("number of gates in recursive verification circuit = ", outer_circuit.get_estimated_num_finalized_gates()); - OuterComposer outer_composer; - auto prover = outer_composer.create_prover(outer_circuit); - auto verifier = outer_composer.create_verifier(outer_circuit); - auto proof = prover.construct_proof(); - auto result = verifier.verify_proof(proof); - // bool result = CircuitChecker::check(outer_circuit); + const bool result = CircuitChecker::check(outer_circuit); EXPECT_EQ(result, expected_result); - static_cast(expected_result); auto g2_lines = srs::get_bn254_crs_factory()->get_verifier_crs()->get_precomputed_g2_lines(); EXPECT_EQ(check_pairing_point_accum_public_inputs(outer_circuit, g2_lines), true); } diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/tests/recursive_verifier.test.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/tests/recursive_verifier.test.cpp index 7397d26196b..0d9920ad09b 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/tests/recursive_verifier.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/tests/recursive_verifier.test.cpp @@ -109,8 +109,6 @@ TEST_F(AvmRecursiveTests, recursion) verification_key->pcs_verification_key->pairing_check(agg_output.P0.get_value(), agg_output.P1.get_value()); ASSERT_TRUE(agg_output_valid) << "Pairing points (aggregation state) are not valid."; - - vinfo("Recursive verifier: num gates = ", outer_circuit.num_gates); ASSERT_FALSE(outer_circuit.failed()) << "Outer circuit has failed."; bool outer_circuit_checked = CircuitChecker::check(outer_circuit); @@ -139,6 +137,8 @@ TEST_F(AvmRecursiveTests, recursion) auto ultra_verification_key = std::make_shared(ultra_instance->proving_key); OuterVerifier ultra_verifier(ultra_verification_key); + vinfo("Recursive verifier: finalized num gates = ", outer_circuit.num_gates); + auto recursion_proof = ultra_prover.construct_proof(); bool recursion_verified = ultra_verifier.verify_proof(recursion_proof); EXPECT_TRUE(recursion_verified) << "recursion proof verification failed"; diff --git a/build-system/scripts/build b/build-system/scripts/build index 090878dd135..a67c9fdb6b2 100755 --- a/build-system/scripts/build +++ b/build-system/scripts/build @@ -68,7 +68,7 @@ if [ -d $ROOT_PATH/$PROJECT_DIR/terraform ]; then popd fi -# For each dependency, substitute references to the dependency in dockerfile, with the relevent built image uri. +# For each dependency, substitute references to the dependency in dockerfile, with the relevant built image uri. # This is necessary vs pulling and retagging the image, as that doesn't work with buildx. # We have to perform a bit of probing to determine which actual image we want to use. # When we used buildx to create a multiarch image, there will be no images with "-$ARCH" suffixes (normalise this?). @@ -91,7 +91,7 @@ for PARENT_REPO in $(query_manifest dependencies $REPOSITORY); do fi fi - # Substitute references to parent repo, with the relevent built image uri. + # Substitute references to parent repo, with the relevant built image uri. DEPLOY_URI=aztecprotocol/$PARENT_REPO PARENT_IMAGE_URI=$ECR_URL/$PARENT_REPO:$PARENT_IMAGE_TAG sed -i "s#^FROM \\(.*\\)${DEPLOY_URI}\\( \|$\\)#FROM \\1${PARENT_IMAGE_URI}\\2#" $DOCKERFILE diff --git a/cspell.json b/cspell.json index 6a62badf5be..8b9e5d15e7e 100644 --- a/cspell.json +++ b/cspell.json @@ -11,6 +11,7 @@ "asyncify", "auditability", "authwit", + "Automine", "autonat", "autorun", "awslogs", diff --git a/docker-compose.yml b/docker-compose.yml index 12d365c8e03..c0928538e49 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -6,8 +6,6 @@ services: platform: linux/amd64 environment: LOG_LEVEL: ${LOG_LEVEL:-info} - DEBUG: ${DEBUG:-aztec:*,-json-rpc:*,-aztec:circuits:artifact_hash,-aztec:randomness_singleton} - DEBUG_COLORS: 1 L1_CHAIN_ID: 31337 VERSION: 1 PXE_PROVER_ENABLED: ${PXE_PROVER_ENABLED:-1} @@ -38,8 +36,6 @@ services: platform: linux/amd64 environment: LOG_LEVEL: ${LOG_LEVEL:-info} - DEBUG: ${DEBUG:-aztec:*,-json-rpc:*,-aztec:circuits:artifact_hash,-aztec:randomness_singleton,-aztec:avm_simulator:*} - DEBUG_COLORS: 1 L1_CHAIN_ID: 31337 VERSION: 1 NODE_NO_WARNINGS: 1 diff --git a/docs/docs/aztec/concepts/accounts/authwit.md b/docs/docs/aztec/concepts/accounts/authwit.md index 79cb828ccb3..307c07a0edb 100644 --- a/docs/docs/aztec/concepts/accounts/authwit.md +++ b/docs/docs/aztec/concepts/accounts/authwit.md @@ -5,6 +5,8 @@ importance: 1 keywords: [authwit, authentication witness, accounts] --- +import Image from "@theme/IdealImage"; + Authentication Witness is a scheme for authenticating actions on Aztec, so users can allow third-parties (eg protocols or other users) to execute an action on their behalf. ## Background @@ -13,15 +15,7 @@ When building DeFi or other smart contracts, it is often desired to interact wit In the EVM world, this is often accomplished by having the user `approve` the protocol to transfer funds from their account, and then calling a `deposit` function on it afterwards. -```mermaid -sequenceDiagram - actor Alice - Alice->>Token: approve(Defi, 1000); - Alice->>Defi: deposit(Token, 1000); - activate Defi - Defi->>Token: transferFrom(Alice, Defi, 1000); - deactivate Defi -``` + This flow makes it rather simple for the application developer to implement the deposit function, but does not come without its downsides. @@ -36,16 +30,7 @@ This can lead to a series of issues though, eg: To avoid this, many protocols implement the `permit` flow, which uses a meta-transaction to let the user sign the approval off-chain, and pass it as an input to the `deposit` function, that way the user only has to send one transaction to make the deposit. -```mermaid -sequenceDiagram - actor Alice - Alice->>Alice: sign permit(Defi, 1000); - Alice->>Defi: deposit(Token, 1000, signature); - activate Defi - Defi->>Token: permit(Alice, Defi, 1000, signature); - Defi->>Token: transferFrom(Alice, Defi, 1000); - deactivate Defi -``` + This is a great improvement to infinite approvals, but still has its own sets of issues. For example, if the user is using a smart-contract wallet (such as Argent or Gnosis Safe), they will not be able to sign the permit message since the usual signature validation does not work well with contracts. [EIP-1271](https://eips.ethereum.org/EIPS/eip-1271) was proposed to give contracts a way to emulate this, but it is not widely adopted. @@ -57,7 +42,7 @@ All of these issues have been discussed in the community for a while, and there Adopting ERC20 for Aztec is not as simple as it might seem because of private state. -If you recall from the [Hybrid State model](../state_model/index.md), private state is generally only known by its owner and those they have shared it with. Because it relies on secrets, private state might be "owned" by a contract, but it needs someone with knowledge of these secrets to actually spend it. You might see where this is going. +If you recall from the [Hybrid State model](../storage/state_model/index.md), private state is generally only known by its owner and those they have shared it with. Because it relies on secrets, private state might be "owned" by a contract, but it needs someone with knowledge of these secrets to actually spend it. You might see where this is going. If we were to implement the `approve` with an allowance in private, you might know the allowance, but unless you also know about the individual notes that make up the user's balances, it would be of no use to you! It is private after all. To spend the user's funds you would need to know the decryption key, see [keys for more](./keys.md). @@ -105,32 +90,7 @@ This can be read as "defi is allowed to call token transfer function with the ar With this out of the way, let's look at how this would work in the graph below. The exact contents of the witness will differ between implementations as mentioned before, but for the sake of simplicity you can think of it as a signature, which the account contract can then use to validate if it really should allow the action. -```mermaid -sequenceDiagram - actor Alice - participant AC as Alice Account - participant Token - Alice->>AC: Defi.deposit(Token, 1000); - activate AC - AC->>Defi: deposit(Token, 1000); - activate Defi - Defi->>Token: transfer(Alice, Defi, 1000); - activate Token - Token->>AC: Check if Defi may call transfer(Alice, Defi, 1000); - AC-->>Alice: Please give me AuthWit for DeFi
calling transfer(Alice, Defi, 1000); - activate Alice - Alice-->>Alice: Produces Authentication witness - Alice-->>AC: AuthWit for transfer(Alice, Defi, 1000); - AC->>Token: AuthWit validity - deactivate Alice - Token->>Token: throw if invalid AuthWit - Token->>Token: transfer(Alice, Defi, 1000); - Token->>Defi: success - deactivate Token - Defi->>Defi: deposit(Token, 1000); - deactivate Defi - deactivate AC -``` + :::info Static call for AuthWit checks The call to the account contract for checking authentication should be a static call, meaning that it cannot change state or make calls that change state. If this call is not static, it could be used to re-enter the flow and change the state of the contract. @@ -144,36 +104,7 @@ The above flow could be re-entered at token transfer. It is mainly for show to i As noted earlier, we could use the ERC20 standard for public. But this seems like a waste when we have the ability to try righting some wrongs. Instead, we can expand our AuthWit scheme to also work in public. This is actually quite simple, instead of asking an oracle (which we can't do as easily because not private execution) we can just store the AuthWit in a shared registry, and look it up when we need it. While this needs the storage to be updated ahead of time (can be same tx), we can quite easily do so by batching the AuthWit updates with the interaction - a benefit of Account Contracts. A shared registry is used such that execution from the sequencers point of view will be more straight forward and predictable. Furthermore, since we have the authorization data directly in public state, if they are both set and unset (authorized and then used) in the same transaction, there will be no state effect after the transaction for the authorization which saves gas ⛽. -```mermaid -sequenceDiagram - actor Alice - participant AC as Alice Account - participant AR as Auth Registry - participant Token - participant Defi - rect rgb(191, 223, 255) - note right of Alice: Alice sends a batch - Alice->>AC: Authorize Defi to call transfer(Alice, Defi, 1000); - activate AC - Alice->>AC: Defi.deposit(Token, 1000); - end - AC->>AR: Authorize Defi to call transfer(Alice, Defi, 1000); - AR->>AR: add authorize to true - AC->>Defi: deposit(Token, 1000); - activate Defi - Defi->>Token: transfer(Alice, Defi, 1000); - activate Token - Token->>AR: Check if Defi may call transfer(Alice, Defi, 1000); - AR->>AR: set authorize to false - AR->>Token: AuthWit validity - Token->>Token: throw if invalid AuthWit - Token->>Token: transfer(Alice, Defi, 1000); - Token->>Defi: success - deactivate Token - Defi->>Defi: deposit(Token, 1000); - deactivate Defi - deactivate AC -``` + ### Replays diff --git a/docs/docs/aztec/concepts/pxe/index.md b/docs/docs/aztec/concepts/pxe/index.md index 6e54b1ed354..5f7dedf1b28 100644 --- a/docs/docs/aztec/concepts/pxe/index.md +++ b/docs/docs/aztec/concepts/pxe/index.md @@ -6,32 +6,13 @@ keywords: [pxe, private execution environment] importance: 1 --- +import Image from "@theme/IdealImage"; + The Private Execution Environment (or PXE, pronounced 'pixie') is a client-side library for the execution of private operations. It is a TypeScript library and can be run within Node, such as when you run the sandbox. In the future it could be run inside wallet software or a browser. The PXE generates proofs of private function execution, and sends these proofs along with public function requests to the sequencer. Private inputs never leave the client-side PXE. -```mermaid -graph TD; - - subgraph client[Client] - subgraph pxe [PXE] - acirSim[ACIR Simulator] - db[Database] - keyStore[KeyStore] - end - end - - subgraph server[Application Server] - subgraph pxeService [PXE Service] - acctMgmt[Account Management] - contractTxInteract[Contract & Transaction Interactions] - noteMgmt[Note Management] - end - end - - pxe -->|interfaces| server - -``` + ## PXE Service diff --git a/docs/docs/aztec/concepts/state_model/public_vm.md b/docs/docs/aztec/concepts/state_model/public_vm.md deleted file mode 100644 index d5c26f1d33d..00000000000 --- a/docs/docs/aztec/concepts/state_model/public_vm.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -title: Public VM ---- - -Refer to the [protocol specs section](../../../protocol-specs/public-vm/index.md) for the latest information about the Aztec Public VM. diff --git a/docs/docs/aztec/concepts/storage/index.md b/docs/docs/aztec/concepts/storage/index.md index 9b3987f901f..3e115ed8b44 100644 --- a/docs/docs/aztec/concepts/storage/index.md +++ b/docs/docs/aztec/concepts/storage/index.md @@ -10,56 +10,3 @@ In Aztec, private data and public data are stored in two trees; a public data tr These trees have in common that they store state for _all_ accounts on the Aztec network directly as leaves. This is different from Ethereum, where a state trie contains smaller tries that hold the individual accounts' storage. It also means that we need to be careful about how we allocate storage to ensure that they don't collide! We say that storage should be _siloed_ to its contract. The exact way of siloing differs a little for public and private storage. Which we will see in the following sections. - -## Public State Slots - -As mentioned in [State Model](../state_model/index.md), Aztec public state behaves similarly to public state on Ethereum from the point of view of the developer. Behind the scenes however, the storage is managed differently. As mentioned, public state has just one large sparse tree in Aztec - so we silo slots of public data by hashing it together with its contract address. - -The mental model is that we have a key-value store, where the siloed slot is the key, and the value is the data stored in that slot. You can think of the `real_storage_slot` identifying its position in the tree, and the `logical_storage_slot` identifying the position in the contract storage. - -```rust -real_storage_slot = H(contract_address, logical_storage_slot) -``` - -The siloing is performed by the [Kernel circuits](../circuits/index.md). - -For structs and arrays, we are logically using a similar storage slot computation to ethereum, e.g., as a struct with 3 fields would be stored in 3 consecutive slots. However, because the "actual" storage slot is computed as a hash of the contract address and the logical storage slot, the actual storage slot is not consecutive. - -## Private State Slots - Slots aren't real - -Private storage is a different beast. As you might remember from [Hybrid State Model](../state_model/index.md), private state is stored in encrypted logs and the corresponding private state commitments in append-only tree where each leaf is a commitment. Being append-only, means that leaves are never updated or deleted; instead a nullifier is emitted to signify that some note is no longer valid. A major reason we used this tree, is that lookups at a specific storage slot would leak information in the context of private state. If you could look up a specific address balance just by looking at the storage slot, even if encrypted you would be able to see it changing! That is not good privacy. - -Following this, the storage slot as we know it doesn't really exist. The leaves of the note hashes tree are just commitments to content (think of it as a hash of its content). - -Nevertheless, the concept of a storage slot is very useful when writing applications, since it allows us to reason about distinct and disjoint pieces of data. For example we can say that the balance of an account is stored in a specific slot and that the balance of another account is stored in another slot with the total supply stored in some third slot. By making sure that these slots are disjoint, we can be sure that the balances are not mixed up and that someone cannot use the total supply as their balance. - -### But how? - -If we include the storage slot, as part of the note whose commitment is stored in the note hashes tree, we can _logically link_ all the notes that make up the storage slot. For the case of a balance, we can say that the balance is the sum of all the notes that have the same storage slot - in the same way that your physical \$ balance might be the sum of all the notes in your wallet. - -Similarly to how we siloed the public storage slots, we can silo our private storage by hashing the logical storage slot together with the note content. - -```rust -note_hash = H(logical_storage_slot, note_content_hash); -``` - -This siloing (there will be more) is done in the application circuit, since it is not necessary for security of the network (but only the application). -:::info -The private variable wrappers `PrivateSet` and `PrivateMutable` in Aztec.nr include the `logical_storage_slot` in the commitments they compute, to make it easier for developers to write contracts without having to think about how to correctly handle storage slots. -::: - -When reading the values for these notes, the application circuit can then constrain the values to only read notes with a specific logical storage slot. - -To ensure that one contract cannot insert storage that other contracts would believe is theirs, we do a second siloing by hashing the `commitment` with the contract address. - -```rust -siloed_note_hash = H(contract_address, note_hash); -``` - -By doing this address-siloing at the kernel circuit we _force_ the inserted commitments to include and not lie about the `contract_address`. - -:::info -To ensure that nullifiers don't collide across contracts we also force this contract siloing at the kernel level. -::: - -For an example of this see [developer documentation on storage](../../../reference/developer_references/smart_contract_reference/storage/index.md). diff --git a/docs/docs/aztec/concepts/state_model/index.md b/docs/docs/aztec/concepts/storage/state_model/index.md similarity index 98% rename from docs/docs/aztec/concepts/state_model/index.md rename to docs/docs/aztec/concepts/storage/state_model/index.md index 37a0455456f..9aa78d92d3f 100644 --- a/docs/docs/aztec/concepts/state_model/index.md +++ b/docs/docs/aztec/concepts/storage/state_model/index.md @@ -35,4 +35,4 @@ This is achieved with two main features: ## Further reading -Read more about how to leverage the Aztec state model in Aztec contracts [here](../storage/index.md). +Read more about how to leverage the Aztec state model in Aztec contracts [here](../../storage/index.md). diff --git a/docs/docs/aztec/concepts/storage/state_model/public_vm.md b/docs/docs/aztec/concepts/storage/state_model/public_vm.md new file mode 100644 index 00000000000..528ee5f8d46 --- /dev/null +++ b/docs/docs/aztec/concepts/storage/state_model/public_vm.md @@ -0,0 +1,5 @@ +--- +title: Public VM +--- + +Refer to the [protocol specs section](../../../../protocol-specs/public-vm/index.md) for the latest information about the Aztec Public VM. diff --git a/docs/docs/aztec/concepts/storage/storage_slots.md b/docs/docs/aztec/concepts/storage/storage_slots.md new file mode 100644 index 00000000000..d3121c645d9 --- /dev/null +++ b/docs/docs/aztec/concepts/storage/storage_slots.md @@ -0,0 +1,55 @@ + +# Storage Slots + +## Public State Slots + +As mentioned in [State Model](../storage/state_model/index.md), Aztec public state behaves similarly to public state on Ethereum from the point of view of the developer. Behind the scenes however, the storage is managed differently. As mentioned, public state has just one large sparse tree in Aztec - so we silo slots of public data by hashing it together with its contract address. + +The mental model is that we have a key-value store, where the siloed slot is the key, and the value is the data stored in that slot. You can think of the `real_storage_slot` identifying its position in the tree, and the `logical_storage_slot` identifying the position in the contract storage. + +```rust +real_storage_slot = H(contract_address, logical_storage_slot) +``` + +The siloing is performed by the [Kernel circuits](../circuits/index.md). + +For structs and arrays, we are logically using a similar storage slot computation to ethereum, e.g., as a struct with 3 fields would be stored in 3 consecutive slots. However, because the "actual" storage slot is computed as a hash of the contract address and the logical storage slot, the actual storage slot is not consecutive. + +## Private State Slots + +Private storage is a different beast. As you might remember from [Hybrid State Model](../storage/state_model/index.md), private state is stored in encrypted logs and the corresponding private state commitments in append-only tree, called the note hash tree where each leaf is a commitment. Append-only means that leaves are never updated or deleted; instead a nullifier is emitted to signify that some note is no longer valid. A major reason we used this tree, is that updates at a specific storage slot would leak information in the context of private state, even if the value is encrypted. That is not good privacy. + +Following this, the storage slot as we know it doesn't really exist. The leaves of the note hashes tree are just commitments to content (think of it as a hash of its content). + +Nevertheless, the concept of a storage slot is very useful when writing applications, since it allows us to reason about distinct and disjoint pieces of data. For example we can say that the balance of an account is stored in a specific slot and that the balance of another account is stored in another slot with the total supply stored in some third slot. By making sure that these slots are disjoint, we can be sure that the balances are not mixed up and that someone cannot use the total supply as their balance. + +### Implementation + +If we include the storage slot, as part of the note whose commitment is stored in the note hashes tree, we can _logically link_ all the notes that make up the storage slot. For the case of a balance, we can say that the balance is the sum of all the notes that have the same storage slot - in the same way that your physical wallet balance is the sum of all the physical notes in your wallet. + +Similarly to how we siloed the public storage slots, we can silo our private storage by hashing the logical storage slot together with the note content. + +```rust +note_hash = H(logical_storage_slot, note_content_hash); +``` + +Note hash siloing is done in the application circuit, since it is not necessary for security of the network (but only the application). +:::info +The private variable wrappers `PrivateSet` and `PrivateMutable` in Aztec.nr include the `logical_storage_slot` in the commitments they compute, to make it easier for developers to write contracts without having to think about how to correctly handle storage slots. +::: + +When reading the values for these notes, the application circuit can then constrain the values to only read notes with a specific logical storage slot. + +To ensure that contracts can only modify their own logical storage, we do a second siloing by hashing the `commitment` with the contract address. + +```rust +siloed_note_hash = H(contract_address, note_hash); +``` + +By doing this address-siloing at the kernel circuit we _force_ the inserted commitments to include and not lie about the `contract_address`. + +:::info +To ensure that nullifiers don't collide across contracts we also force this contract siloing at the kernel level. +::: + +For an example of this see [developer documentation on storage](../../../reference/developer_references/smart_contract_reference/storage/index.md). diff --git a/docs/docs/aztec/concepts_overview.md b/docs/docs/aztec/concepts_overview.md new file mode 100644 index 00000000000..878cd2c3796 --- /dev/null +++ b/docs/docs/aztec/concepts_overview.md @@ -0,0 +1,152 @@ +--- +title: Concepts Overview +sidebar_position: 0 +tags: [protocol] +--- + +import Image from "@theme/IdealImage"; + +This page outlines Aztec's fundamental technical concepts. It is recommended to read this before diving into building on Aztec. + +## What is Aztec? + +Aztec is a privacy-first Layer 2 on Ethereum. It supports smart contracts with both private & public state and private & public execution. + +## High level view + + + +1. A user interacts with Aztec through Aztec.js (like web3js or ethersjs) +2. Private functions are executed in the PXE, which is client-side +3. Proofs and tree updates are sent to the Public VM (running on an Aztec node) +4. Public functions are executed in the Public VM +5. The Public VM rolls up the transactions that include private and public state updates into blocks +6. The block data and proof of a correct state transition are submitted to Ethereum for verification + +## Private and public execution + +Private functions are executed client side, on user devices to maintain maximum privacy. Public functions are executed by a remote network of nodes, similar to other blockchains. These distinct execution environments create a directional execution flow for a single transaction--a transaction begins in the private context on the user's device then moves to the public network. This means that private functions executed by a transaction can enqueue public functions to be executed later in the transaction life cycle, but public functions cannot call private functions. + +### Private Execution Environment (PXE) + +Private functions are executed first on the user's device in the Private Execution Environment (PXE, pronounced 'pixie'). It is a client-side library for the execution of private operations. It holds keys, notes, and generates proofs. It is included in aztec.js, a TypeScript library, and can be run within Node or the browser. + +### Aztec VM + +Public functions are executed by the Aztec Virtual Machine (AVM), which is conceptually similar to the Ethereum Virtual Machine (EVM). To learn more about how it works and its instruction set, go to the [protocol specs](../protocol-specs/public-vm/intro.md). + +The PXE is unaware of the Public VM. And the Public VM is unaware of the PXE. They are completely separate execution environments. This means: + +- The PXE and the Public VM cannot directly communicate with each other +- Private transactions in the PXE are executed first, followed by public transactions + +## Private and public state + +Private state works with UTXOs, which are chunks of data that we call notes. To keep things private, notes are stored in an [append-only UTXO tree](./concepts/storage/trees/index.md), and a nullifier is created when notes are invalidated (aka deleted). Nullifiers are stored in their own [nullifier tree](./concepts/storage/trees/index.md). + +Public state works similarly to other chains like Ethereum, behaving like a public ledger. Public data is stored in a [public data tree](./concepts/storage/trees/index.md#public-state-tree). + +![Public vs private state](../../static/img/public-and-private-state-diagram.png) + +Aztec [smart contract](./smart_contracts_overview.md) developers should keep in mind that different data types are used when manipulating private or public state. Working with private state is creating commitments and nullifiers to state, whereas working with public state is directly updating state. + +## Accounts and keys + +### Account abstraction + +Every account in Aztec is a smart contract (account abstraction). This allows implementing different schemes for authorizing transactions, nonce management, and fee payments. + +Developers can write their own account contract to define the rules by which user transactions are authorized and paid for, as well as how user keys are managed. + +Learn more about account contracts [here](./concepts/accounts/index.md). + +### Key pairs + +Each account in Aztec is backed by 3 key pairs: + +- A **nullifier key pair** used for note nullifier computation +- A **incoming viewing key pair** used to encrypt a note for the recipient +- A **outgoing viewing key pair** used to encrypt a note for the sender + +As Aztec has native account abstraction, accounts do not automatically have a signing key pair to authenticate transactions. This is up to the account contract developer to implement. + +## Noir + +Noir is a zero-knowledge domain specific language used for writing smart contracts for the Aztec network. It is also possible to write circuits with Noir that can be verified on or offchain. For more in-depth docs into the features of Noir, go to the [Noir documentation](https://noir-lang.org/). + +## What's next? + +### Start coding + +
+ + +

Developer Getting Started Guide

+
+ + Follow the getting started guide to start developing with the Aztec Sandbox + +
+
+ +### Dive deeper into how Aztec works + +Explore the Concepts for a deeper understanding into the components that make up Aztec: + +
+ + + +

Accounts

+
+ + Learn about Aztec's native account abstraction - every account in Aztec is a smart contract which defines the rules for whether a transaction is or is not valid + +
+ + + +

Protocol Circuits

+
+ + Central to Aztec's operations are circuits in the core protocol and the developer-written Aztec.nr contracts + +
+ + + +

PXE (pronounced 'pixie')

+
+ + The Private Execution Environment (or PXE) is a client-side library for the execution of private operations + +
+ + + +

State model

+
+ + Aztec has a hybrid public/private state model + +
+ + + +

Storage

+
+ + In Aztec, private data and public data are stored in two trees: a public data tree and a note hashes tree + +
+ + + +

Wallets

+
+ + Wallets expose to dapps an interface that allows them to act on behalf of the user, such as querying private state or sending transactions + +
+ +
diff --git a/docs/docs/aztec/overview.md b/docs/docs/aztec/overview.md deleted file mode 100644 index 32a353fa80e..00000000000 --- a/docs/docs/aztec/overview.md +++ /dev/null @@ -1,141 +0,0 @@ ---- -title: What is Aztec? -sidebar_position: 0 -id: overview -tags: [protocol] ---- - -import Image from "@theme/IdealImage"; - -This page outlines Aztec's fundamental technical concepts. - -## Aztec Overview - - - -1. A user interacts with Aztec through Aztec.js (like web3js or ethersjs) -2. Private functions are executed in the PXE, which is client-side -3. They are rolled up and sent to the Public VM (running on an Aztec node) -4. Public functions are executed in the Public VM -5. The Public VM rolls up the private & public transaction rollups -6. These rollups are submitted to Ethereum - -The PXE is unaware of the Public VM. And the Public VM is unaware of the PXE. They are completely separate execution environments. This means: - -- The PXE and the Public VM cannot directly communicate with each other -- Private transactions in the PXE are executed first, followed by public transactions - -### Private and public state - -Private state works with UTXOs, or what we call notes. To keep things private, everything is stored in an [append-only UTXO tree](./concepts/storage/trees/index.md), and a nullifier is created when notes are invalidated. Nullifiers are then stored in their own [nullifier tree](./concepts/storage/trees/index.md). - -Public state works similarly to other chains like Ethereum, behaving like a public ledger. Public data is stored in a [public data tree](./concepts/storage/trees/index.md#public-state-tree). - -Aztec [smart contract](./smart_contracts_overview.md) developers should keep in mind that different types are used when manipulating private or public state. Working with private state is creating commitments and nullifiers to state, whereas working with public state is directly updating state. - -## Accounts - -Every account in Aztec is a smart contract (account abstraction). This allows implementing different schemes for transaction signing, nonce management, and fee payments. - -Developers can write their own account contract to define the rules by which user transactions are authorized and paid for, as well as how user keys are managed. - -Learn more about account contracts [here](./concepts/accounts/index.md). - -## Smart contracts - -Developers can write [smart contracts](./smart_contracts_overview.md) that manipulate both public and private state. They are written in a framework on top of Noir, the zero-knowledge domain-specific language developed specifically for Aztec. Outside of Aztec, Noir is used for writing circuits that can be verified on EVM chains. - -Noir has its own doc site that you can find [here](https://noir-lang.org). - -## Communication with Ethereum - -Aztec allows private communications with Ethereum - ie no-one knows where the transaction is coming from, just that it is coming from somewhere on Aztec. - -This is achieved through portals - these are smart contracts deployed on an EVM that are related to the Ethereum smart contract you want to interact with. - -Learn more about portals [here](../protocol-specs/l1-smart-contracts/index.md). - -## Circuits - -Aztec operates on three types of circuits: - -- [Private kernel circuits](../aztec/concepts/circuits/kernels/private_kernel.md), which are executed by the user on their own device and prove correct execution of a function -- [Public kernel circuits](../aztec/concepts/circuits/kernels/public_kernel.md), which are executed by the [sequencer](./network/sequencer/index.md) and ensure the stack trace of transactions adheres to function execution rules -- [Rollup circuits](../aztec/concepts/circuits/index.md), which bundle all of the Aztec transactions into a proof that can be efficiently verified on Ethereum - -## What's next? - -### Dive deeper into how Aztec works - -Explore the Concepts for a deeper understanding into the components that make up Aztec: - -
- - - -

Accounts

-
- - Learn about Aztec's native account abstraction - every account in Aztec is a smart contract which defines the rules for whether a transaction is or is not valid - -
- - - -

Circuits

-
- - Central to Aztec's operations are circuits in the core protocol and the developer-written Aztec.nr contracts - -
- - - -

PXE (pronounced 'pixie')

-
- - The Private Execution Environment (or PXE) is a client-side library for the execution of private operations - -
- - - -

State model

-
- - Aztec has a hybrid public/private state model - -
- - - -

Storage

-
- - In Aztec, private data and public data are stored in two trees: a public data tree and a note hashes tree - -
- - - -

Wallets

-
- - Wallets expose to dapps an interface that allows them to act on behalf of the user, such as querying private state or sending transactions - -
- -
- -### Start coding - -
- - -

Developer Getting Started Guide

-
- - Follow the getting started guide to start developing with the Aztec Sandbox - -
-
\ No newline at end of file diff --git a/docs/docs/aztec/smart_contracts/contract_creation.md b/docs/docs/aztec/smart_contracts/contract_creation.md index b648178e14e..02858dca070 100644 --- a/docs/docs/aztec/smart_contracts/contract_creation.md +++ b/docs/docs/aztec/smart_contracts/contract_creation.md @@ -7,4 +7,4 @@ The latest information about contract deployment has moved to the protocol speci ## Further reading -To see how to deploy a contract in practice, check out the [dapp development tutorial](../../tutorials/codealong/simple_dapp/index.md). +To see how to deploy a contract in practice, check out the [dapp development tutorial](../../tutorials/codealong/js_tutorials/simple_dapp/index.md). diff --git a/docs/docs/guides/developer_guides/js_apps/test.md b/docs/docs/guides/developer_guides/js_apps/test.md index a290623a51c..383358fa8c2 100644 --- a/docs/docs/guides/developer_guides/js_apps/test.md +++ b/docs/docs/guides/developer_guides/js_apps/test.md @@ -8,7 +8,7 @@ In this guide we will cover how to interact with your Aztec.nr smart contracts i ## Prerequisites - A compiled contract with TS interface (read [how to compile](../smart_contracts/how_to_compile_contract.md)) -- Your sandbox running (read [getting started](../getting_started.md)) +- Your sandbox running (read [getting started](../../getting_started.md)) ## Create TS file and install libraries @@ -107,7 +107,7 @@ To query storage directly, you'll need to know the slot you want to access. This #### Querying private state -Private state in the Aztec is represented via sets of [private notes](../../../aztec/concepts/state_model/index.md#private-state). We can query the Private Execution Environment (PXE) for all notes encrypted for a given user in a contract slot. For example, this gets all notes encrypted for the `owner` user that are stored on the token contract address and on the slot that was calculated earlier. To calculate the actual balance, it extracts the `value` of each note, which is the first element, and sums them up. +Private state in the Aztec is represented via sets of [private notes](../../../aztec/concepts/storage/state_model/index.md#private-state). We can query the Private Execution Environment (PXE) for all notes encrypted for a given user in a contract slot. For example, this gets all notes encrypted for the `owner` user that are stored on the token contract address and on the slot that was calculated earlier. To calculate the actual balance, it extracts the `value` of each note, which is the first element, and sums them up. #include_code private-storage /yarn-project/end-to-end/src/guides/dapp_testing.test.ts typescript diff --git a/docs/docs/guides/developer_guides/local_env/creating_schnorr_accounts.md b/docs/docs/guides/developer_guides/local_env/creating_schnorr_accounts.md index e8668294c95..b45b653b7f9 100644 --- a/docs/docs/guides/developer_guides/local_env/creating_schnorr_accounts.md +++ b/docs/docs/guides/developer_guides/local_env/creating_schnorr_accounts.md @@ -18,7 +18,7 @@ An in-depth explainer about accounts on aztec can be found [here](../../../aztec ## Pre-requisites -Have a running Sandbox and a repository that interacts with it as explained [in the quickstart](../getting_started.md). +Have a running Sandbox and a repository that interacts with it as explained [in the quickstart](../../getting_started.md). Let's assume you have a file `src/index.ts` from the example used in the Sandbox page. diff --git a/docs/docs/guides/developer_guides/local_env/run_more_than_one_pxe_sandbox.md b/docs/docs/guides/developer_guides/local_env/run_more_than_one_pxe_sandbox.md index ad575e6baf6..d937f6f5f13 100644 --- a/docs/docs/guides/developer_guides/local_env/run_more_than_one_pxe_sandbox.md +++ b/docs/docs/guides/developer_guides/local_env/run_more_than_one_pxe_sandbox.md @@ -23,7 +23,7 @@ This removes any other arguments, allowing you to ensure an isolated environment In another terminal, run: ```bash -aztec start --port 8081 --pxe nodeUrl=http://host.docker.internal:8080/ +aztec start --port 8081 --pxe --pxe.nodeUrl=http://host.docker.internal:8080/ ``` This command uses the default ports, so they might need to be changed depending on yuor configuration. It will run the PXE on port `8081`. diff --git a/docs/docs/guides/developer_guides/smart_contracts/how_to_compile_contract.md b/docs/docs/guides/developer_guides/smart_contracts/how_to_compile_contract.md index 1c68a23fda7..056290ad93b 100644 --- a/docs/docs/guides/developer_guides/smart_contracts/how_to_compile_contract.md +++ b/docs/docs/guides/developer_guides/smart_contracts/how_to_compile_contract.md @@ -444,7 +444,7 @@ export class TokenContract extends ContractBase { } ``` -Read more about interacting with contracts using `aztec.js` [by following this tutorial](../../../tutorials/codealong/aztecjs-getting-started). +Read more about interacting with contracts using `aztec.js` [by following this tutorial](../../../tutorials/codealong/js_tutorials/aztecjs-getting-started.md). ### Aztec.nr interfaces diff --git a/docs/docs/guides/developer_guides/smart_contracts/how_to_deploy_contract.md b/docs/docs/guides/developer_guides/smart_contracts/how_to_deploy_contract.md index 7df9f76b1a8..634be0fa19d 100644 --- a/docs/docs/guides/developer_guides/smart_contracts/how_to_deploy_contract.md +++ b/docs/docs/guides/developer_guides/smart_contracts/how_to_deploy_contract.md @@ -12,7 +12,7 @@ Once you have [compiled](./how_to_compile_contract.md) your contracts you can pr - `aztec-nargo` installed (go to [Sandbox section](../../../reference/developer_references/sandbox_reference/sandbox-reference.md) for installation instructions) - contract artifacts ready (go to [How to Compile Contract](./how_to_compile_contract.md) for instructions on how to compile contracts) -- Aztec Sandbox running (go to [Getting Started](../getting_started.md) for instructions on how to install and run the sandbox) +- Aztec Sandbox running (go to [Getting Started](../../getting_started.md) for instructions on how to install and run the sandbox) ## Deploy diff --git a/docs/docs/guides/developer_guides/smart_contracts/testing_contracts/testing.md b/docs/docs/guides/developer_guides/smart_contracts/testing_contracts/testing.md index 86d470de664..42950468940 100644 --- a/docs/docs/guides/developer_guides/smart_contracts/testing_contracts/testing.md +++ b/docs/docs/guides/developer_guides/smart_contracts/testing_contracts/testing.md @@ -38,7 +38,7 @@ So to summarize: ### Running TXE -If you have [the sandbox](../../getting_started.md) installed, you can run TXE tests using: +If you have [the sandbox](../../../getting_started.md) installed, you can run TXE tests using: `aztec test` @@ -218,10 +218,11 @@ You can use `aztec.nr`'s oracles as usual for debug logging, as explained [here] :::warning Remember to set the following environment variables to activate debug logging: + ```bash -export DEBUG="aztec:*" export LOG_LEVEL="debug" ``` + ::: ### All Cheatcodes diff --git a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/call_functions.md b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/call_functions.md index fcca446ddad..2df88f48025 100644 --- a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/call_functions.md +++ b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/call_functions.md @@ -5,15 +5,13 @@ tags: [functions, contracts] --- - - A contract is a collection of persistent state variables and functions which may manipulate these variables. Functions and state variables within a contract's scope are said to belong to that contract. A contract can only access and modify its own state. If a contract wishes to access or modify another contract's state, it must make a call to an external function of the other contract. For anything to happen on the Aztec network, an external function of a contract needs to be called. -### Contract +### Defining a contract A contract may be declared and given a name using the `contract` keyword (see snippet below). By convention, contracts are named in `PascalCase`. @@ -32,4 +30,53 @@ contract MyContract { There is no [`main()` (GitHub link)](https://noir-lang.org/docs/getting_started/project_breakdown/#mainnr) function within a Noir `contract` scope. More than one function can be an entrypoint. ::: -To understand how to call a function from another contract, follow the [crowdfunding tutorial](../../../../tutorials/codealong/contract_tutorials/crowdfunding_contract.md). +### Add as a dependency in Nargo.toml + +Import the contract that you want to call into your `Nargo.toml` under `dependencies` like this: + +``` +token = { git="https://github.com/AztecProtocol/aztec-packages/", tag="#include_aztec_version", directory="noir-projects/noir-contracts/contracts/token_contract" } +``` + +### Import into your contract + +At the top of your contract, import the contract you want to call like this: + +``` +use token::Token; +``` + +### Call the function + +To call the function, you need to + +- Specify the address of the contract with `Contract::at(contract_address)` +- Call the function name with `.function_name()` +- Pass the parameters into the function call, like `.function_name(param1,param2)` +- Specify the type of call you want to make and pass a mut reference to the context, like `.call(&mut context)` + +#### Private calls + +To call a private function, you can just use `call()` like this: + +#include_code call_function noir-projects/noir-contracts/contracts/escrow_contract/src/main.nr rust + +#### Public -> Public calls + +To call a public function from a public function, it is the same as above. You can just use `call()` like this: + +#include_code public_to_public_call noir-projects/noir-contracts/contracts/lending_contract/src/main.nr rust + +#### Private -> Public calls + +To call a public function from private, you will need to enqueue it like this: + +#include_code enqueue_public /noir-projects/noir-contracts/contracts/lending_contract/src/main.nr rust + +Public functions are always executed after private execution. To learn why, read the [concepts overview](../../../../aztec/concepts_overview.md). + +#### Other call types + +There are other call types, for example to ensure no state changes are made. You can learn more about them in the [call types glossary](../../../../aztec/glossary/call_types.md). + + diff --git a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/common_patterns/index.md b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/common_patterns/index.md index b002d3dd13f..aacd44df9d8 100644 --- a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/common_patterns/index.md +++ b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/common_patterns/index.md @@ -108,7 +108,7 @@ Hence, it's necessary to add a "randomness" field to your note to prevent such a ### L1 -- L2 interactions -Refer to [Token Portal codealong tutorial on bridging tokens between L1 and L2](../../../../../tutorials/codealong/contract_tutorials/advanced/token_bridge/index.md) and/or [Uniswap smart contract example that shows how to swap on L1 using funds on L2](../../../../../tutorials/examples/uniswap/index.md). Both examples show how to: +Refer to [Token Portal codealong tutorial on bridging tokens between L1 and L2](../../../../../tutorials/codealong/contract_tutorials/token_bridge/index.md) and/or [Uniswap smart contract example that shows how to swap on L1 using funds on L2](../../../../../tutorials/examples/uniswap/index.md). Both examples show how to: 1. L1 -> L2 message flow 2. L2 -> L1 message flow diff --git a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/notes/index.md b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/notes/index.md index b40f4be495e..ec9f829577b 100644 --- a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/notes/index.md +++ b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/notes/index.md @@ -4,4 +4,4 @@ sidebar_position: 6 tags: [contracts, notes] --- -Notes are the fundamental data structure in Aztec when working with private state. In this section there are guides about how to work with `AddressNote`, `ValueNote`, and custom notes in Aztec.nr. You can learn more about notes in the [concepts section](../../../../../aztec/concepts/state_model/index.md#private-state). \ No newline at end of file +Notes are the fundamental data structure in Aztec when working with private state. In this section there are guides about how to work with `AddressNote`, `ValueNote`, and custom notes in Aztec.nr. You can learn more about notes in the [concepts section](../../../../../aztec/concepts/storage/state_model/index.md#private-state). \ No newline at end of file diff --git a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/portals/communicate_with_portal.md b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/portals/communicate_with_portal.md index d08078b0aec..439bf5f9377 100644 --- a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/portals/communicate_with_portal.md +++ b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/portals/communicate_with_portal.md @@ -5,7 +5,7 @@ tags: [contracts, portals] Is this your first time hearing the word `Portal`? You might want to check out the [protocol specs](../../../../../protocol-specs/l1-smart-contracts/index.md). -Follow the [token bridge tutorial](../../../../../tutorials/codealong/contract_tutorials/advanced/token_bridge/index.md) for hands-on experience writing and deploying a Portal contract. +Follow the [token bridge tutorial](../../../../../tutorials/codealong/contract_tutorials/token_bridge/index.md) for hands-on experience writing and deploying a Portal contract. ## Passing data to the rollup @@ -43,7 +43,7 @@ Note that while the `secret` and the `content` are both hashed, they are actuall ### Token bridge example -Computing the `content` must currently be done manually, as we are still adding a number of bytes utilities. A good example exists within the [Token bridge example (codealong tutorial)](../../../../../tutorials/codealong/contract_tutorials/advanced/token_bridge/index.md). +Computing the `content` must currently be done manually, as we are still adding a number of bytes utilities. A good example exists within the [Token bridge example (codealong tutorial)](../../../../../tutorials/codealong/contract_tutorials/token_bridge/index.md). #include_code claim_public /noir-projects/noir-contracts/contracts/token_bridge_contract/src/main.nr rust diff --git a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/storage/notes.md b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/storage/notes.md index bea6aa7461b..b4f737c7857 100644 --- a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/storage/notes.md +++ b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/storage/notes.md @@ -83,9 +83,9 @@ To update a value, its previous note hash(es) are nullified. The new note value Some optional background resources on notes can be found here: -- [High level network architecture](../../../../../aztec/overview.md), specifically the Private Execution Environment +- [High level network architecture](../../../../../aztec/concepts_overview.md), specifically the Private Execution Environment - [Transaction lifecycle (simple diagram)](../../../../../aztec/concepts/transactions.md#simple-example-of-the-private-transaction-lifecycle) -- [Public and Private state](../../../../../aztec/concepts/state_model/index.md) +- [Public and Private state](../../../../../aztec/concepts/storage/state_model/index.md) Notes touch several core components of the protocol, but we will focus on a the essentials first. diff --git a/docs/docs/guides/developer_guides/getting_started.md b/docs/docs/guides/getting_started.md similarity index 98% rename from docs/docs/guides/developer_guides/getting_started.md rename to docs/docs/guides/getting_started.md index cceabca3bfd..1e1eafa8750 100644 --- a/docs/docs/guides/developer_guides/getting_started.md +++ b/docs/docs/guides/getting_started.md @@ -225,8 +225,6 @@ Simulation result: 25n Now you have a development network running, so you're ready to start coding your first app with Aztec.nr and Aztec.js! -If you want to start coding, head over to the Tutorials & Examples section and write & deploy your first smart contract. -
@@ -237,3 +235,5 @@ If you want to start coding, head over to the Tutorials & Examples section and w
+ +If you'd rather clone a repo, check out the [Aztec Starter](https://github.com/AztecProtocol/aztec-starter). diff --git a/docs/docs/guides/index.md b/docs/docs/guides/index.md index 174895e4a37..cb047909a87 100644 --- a/docs/docs/guides/index.md +++ b/docs/docs/guides/index.md @@ -1,17 +1,20 @@ --- id: index sidebar_position: 0 -title: Guides +title: Guides and Tutorials --- -# Popular Guides +# Guides and Tutorials -Guides are step-by-step how-tos to achieve a specific goal. On this page you can find the most popular ones. You can also explore them all by checking out the sidebar. +In this section you will find: + +- A list of tutorials in order of increasing complexity, allowing you to write contracts and build applications on Aztec +- How-to guides for accomplishing quick, specific goals ## Getting Started
- +

Getting Started

@@ -21,94 +24,24 @@ Guides are step-by-step how-tos to achieve a specific goal. On this page you can
-## Building smart contracts +## Building applications
- - -

Compile a contract

-
- - Learn how to compile a smart contract and generate TypeScript bindings - -
- - - -

Deploy a contract

-
- - Deploy a contract to a local Aztec sandbox - -
- - + -

Testing Contracts

+

Contract Tutorials

- Write tests for your contracts and run them in the TXE + Go from zero to hero by following these tutorials in order, starting with a counter contract
- + -

Communicate with L1

+

Full stack app on Aztec

- How to use portals to communicate with L1 from your contract - -
-
- - - -## JavaScript - -
- - -

Send a transaction

-
- - Use Aztec.JS to send a transaction by calling a function on a smart contract - -
- - - -

Testing

-
- - Write end-to-end tests in Javascript using Aztec.JS - -
-
- - - -## Local environment - -
- - -

Update your environment

-
- - Update all aspects of your Aztec environment, including the sandbox, aztec-nargo, Aztec.nr packages, and Aztec.js packages - -
- - - -

Run more than one PXE

-
- - Test that your contracts can work with multiple interactions by running a second PXE + Learn how everything works together by building an app in JavaScript that connects to a contract
@@ -132,8 +65,4 @@ Guides are step-by-step how-tos to achieve a specific goal. On this page you can Participate in the Aztec protocol as a prover node, proving the rollup integrity that is pivotal to the protocol. Runs on hardware fit for data centers. - - - \ No newline at end of file diff --git a/docs/docs/index.mdx b/docs/docs/index.mdx index b38e104d38e..938bf165d38 100644 --- a/docs/docs/index.mdx +++ b/docs/docs/index.mdx @@ -7,29 +7,43 @@ sidebar_position: 0 # Aztec Documentation -## Aztec is a Privacy-First L2 on Ethereum +## What is Aztec? + +### Aztec is a Privacy-First L2 on Ethereum On Ethereum today, everything is publicly visible, by everyone. In the real world, people enjoy privacy. Aztec brings privacy to Ethereum. -## Get started +- private functions, executed and proved on a user's device +- public functions, executed in the Aztec Virtual Machine +- private state, stored as UTXOs that only the owner can decrypt +- public state, stored in a public merkle tree +- composability between private/public execution and private/public state +- public and private messaging with Ethereum + +To learn more about how Aztec achieves these things, check out the [Aztec concepts overview](/aztec/concepts_overview). -
- +## Start coding + +
+ -

Tutorials

+

Developer Getting Started Guide

- Start writing Aztec contracts with our tutorials. + Follow the getting started guide to start developing with the Aztec Sandbox
+
- +## Learn how Aztec works + +
+ -

References

+

Aztec Overview

- Review reference materials for building on Aztec. + Learn the core concepts that make up the Aztec Protocol -
-
+
\ No newline at end of file diff --git a/docs/docs/migration_notes.md b/docs/docs/migration_notes.md index bb841a9952e..ec28ac22ecc 100644 --- a/docs/docs/migration_notes.md +++ b/docs/docs/migration_notes.md @@ -6,6 +6,35 @@ keywords: [sandbox, aztec, notes, migration, updating, upgrading] Aztec is in full-speed development. Literally every version breaks compatibility with the previous ones. This page attempts to target errors and difficulties you might encounter when upgrading, and how to resolve them. +## 0.66 + +### DEBUG env var is removed + +The `DEBUG` variable is no longer used. Use `LOG_LEVEL` with one of `silent`, `fatal`, `error`, `warn`, `info`, `verbose`, `debug`, or `trace`. To tweak log levels per module, add a list of module prefixes with their overridden level. For example, LOG_LEVEL="info; verbose: aztec:sequencer, aztec:archiver; debug: aztec:kv-store" sets `info` as the default log level, `verbose` for the sequencer and archiver, and `debug` for the kv-store. Module name match is done by prefix. + +### `tty` resolve fallback required for browser bundling + +When bundling `aztec.js` for web, the `tty` package now needs to be specified as an empty fallback: + +```diff +resolve: { + plugins: [new ResolveTypeScriptPlugin()], + alias: { './node/index.js': false }, + fallback: { + crypto: false, + os: false, + fs: false, + path: false, + url: false, ++ tty: false, + worker_threads: false, + buffer: require.resolve('buffer/'), + util: require.resolve('util/'), + stream: require.resolve('stream-browserify'), + }, +}, +``` + ## 0.65 ### [aztec.nr] Removed SharedImmutable diff --git a/docs/docs/protocol-specs/public-vm/gen/_instruction-set.mdx b/docs/docs/protocol-specs/public-vm/gen/_instruction-set.mdx index 4d081801c4d..126d9e239ad 100644 --- a/docs/docs/protocol-specs/public-vm/gen/_instruction-set.mdx +++ b/docs/docs/protocol-specs/public-vm/gen/_instruction-set.mdx @@ -1054,6 +1054,7 @@ context.machineState.pc = loc`} - **Details**: Target location is an immediate value (a constant in the bytecode). - **Bit-size**: 48 +[![](/img/protocol-specs/public-vm/bit-formats/INTERNALCALL.png)](/img/protocol-specs/public-vm/bit-formats/INTERNALCALL.png) ### `INTERNALRETURN` Return from an internal call. Pop from the internal call stack and jump to the popped location. diff --git a/docs/docs/reference/developer_references/debugging.md b/docs/docs/reference/developer_references/debugging.md index 01b8d6d06aa..edb5c05ae10 100644 --- a/docs/docs/reference/developer_references/debugging.md +++ b/docs/docs/reference/developer_references/debugging.md @@ -53,7 +53,7 @@ debug_log_array(my_array); ### Start Sandbox in debug mode -Update the `DEBUG` environment variable in docker-compose.sandbox.yml to the following: +Set `LOG_LEVEL` to `verbose` or `debug`: ```yml # ~/.aztec/docker-compose.sandbox.yml @@ -65,8 +65,7 @@ aztec: ports: - "${PXE_PORT:-8080}:${PXE_PORT:-8080}" environment: - DEBUG: aztec:simulator:client_execution_context, aztec:sandbox, aztec:avm_simulator:debug_log - LOG_LEVEL: verbose # optionally add this for more logs + LOG_LEVEL: verbose # ... ``` diff --git a/docs/docs/reference/developer_references/sandbox_reference/sandbox-reference.md b/docs/docs/reference/developer_references/sandbox_reference/sandbox-reference.md index 82435874756..ce0f32341b5 100644 --- a/docs/docs/reference/developer_references/sandbox_reference/sandbox-reference.md +++ b/docs/docs/reference/developer_references/sandbox_reference/sandbox-reference.md @@ -6,7 +6,7 @@ sidebar_position: 0 :::tip -For a quick start, follow the [guide](../../../guides/developer_guides/getting_started) to install the sandbox. +For a quick start, follow the [guide](../../../guides/getting_started) to install the sandbox. ::: @@ -19,8 +19,7 @@ To change them, you can open `~/.aztec/docker-compose.sandbox.yml` and edit them **Sandbox** ```sh -DEBUG=aztec:* # The level of debugging logs to be displayed. using "aztec:*" will log everything. -LOG_LEVEL=debug # Setting to 'debug' will print the debug logs +LOG_LEVEL=debug # Options are 'fatal', 'error', 'warn', 'info', 'verbose', 'debug', 'trace' HOST_WORKDIR='${PWD}' # The location to store log outputs. Will use ~/.aztec where the docker-compose.yml file is stored by default. ETHEREUM_HOST=http://ethereum:8545 # The Ethereum JSON RPC URL. We use an anvil instance that runs in parallel to the sandbox on docker by default. L1_CHAIN_ID=31337 # The Chain ID that the Ethereum host is using. diff --git a/docs/docs/reference/developer_references/smart_contract_reference/storage/index.md b/docs/docs/reference/developer_references/smart_contract_reference/storage/index.md index f85fd6f3a82..d5c3bccd90e 100644 --- a/docs/docs/reference/developer_references/smart_contract_reference/storage/index.md +++ b/docs/docs/reference/developer_references/smart_contract_reference/storage/index.md @@ -101,6 +101,6 @@ require(minters[msg.sender], "caller is not minter"); ## Concepts mentioned -- [State Model](../../../../aztec/concepts/state_model/index.md) +- [State Model](../../../../aztec/concepts/storage/state_model/index.md) - [Public-private execution](../../../../aztec/smart_contracts/functions/public_private_calls.md) - [Function Contexts](../../../../aztec/smart_contracts/functions/context.md) diff --git a/docs/docs/reference/developer_references/smart_contract_reference/storage/private_state.md b/docs/docs/reference/developer_references/smart_contract_reference/storage/private_state.md index a2f84a70456..ec24ad687b8 100644 --- a/docs/docs/reference/developer_references/smart_contract_reference/storage/private_state.md +++ b/docs/docs/reference/developer_references/smart_contract_reference/storage/private_state.md @@ -4,7 +4,7 @@ title: Private State On this page we will look at how to manage private state in Aztec contracts. We will look at how to declare private state, how to read and write to it, and how to use it in your contracts. -For a higher level overview of the state model in Aztec, see the [hybrid state model](../../../../aztec/concepts/state_model/index.md) page. +For a higher level overview of the state model in Aztec, see the [hybrid state model](../../../../aztec/concepts/storage/state_model/index.md) page. ## Overview diff --git a/docs/docs/reference/developer_references/smart_contract_reference/storage/public_state.md b/docs/docs/reference/developer_references/smart_contract_reference/storage/public_state.md index 1c2c46cded4..d49aebd81e2 100644 --- a/docs/docs/reference/developer_references/smart_contract_reference/storage/public_state.md +++ b/docs/docs/reference/developer_references/smart_contract_reference/storage/public_state.md @@ -4,7 +4,7 @@ title: Public State On this page we will look at how to manage public state in Aztec contracts. We will look at how to declare public state, how to read and write to it, and how to use it in your contracts. -For a higher level overview of the state model in Aztec, see the [state model](../../../../aztec/concepts/state_model/index.md) concepts page. +For a higher level overview of the state model in Aztec, see the [state model](../../../../aztec/concepts/storage/state_model/index.md) concepts page. ## `PublicMutable` diff --git a/docs/docs/tutorials/codealong/contract_tutorials/advanced/_category_.json b/docs/docs/tutorials/codealong/contract_tutorials/advanced/_category_.json deleted file mode 100644 index 5fe169c13f3..00000000000 --- a/docs/docs/tutorials/codealong/contract_tutorials/advanced/_category_.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "label": "Advanced", - "position": 6, - "collapsible": true, - "collapsed": true -} diff --git a/docs/docs/tutorials/codealong/contract_tutorials/counter_contract.md b/docs/docs/tutorials/codealong/contract_tutorials/counter_contract.md index 22dc4e4e5b3..0ab8bc479b5 100644 --- a/docs/docs/tutorials/codealong/contract_tutorials/counter_contract.md +++ b/docs/docs/tutorials/codealong/contract_tutorials/counter_contract.md @@ -7,7 +7,7 @@ In this guide, we will create our first Aztec.nr smart contract. We will build a ## Prerequisites -- You have followed the [quickstart](../../../guides/developer_guides/getting_started.md) +- You have followed the [quickstart](../../../guides/getting_started.md) - Running Aztec Sandbox - Installed [Noir LSP](../../../guides/developer_guides/local_env/installing_noir_lsp.md) (optional) diff --git a/docs/docs/tutorials/codealong/contract_tutorials/crowdfunding_contract.md b/docs/docs/tutorials/codealong/contract_tutorials/crowdfunding_contract.md index 397ac584839..1f11c0735e0 100644 --- a/docs/docs/tutorials/codealong/contract_tutorials/crowdfunding_contract.md +++ b/docs/docs/tutorials/codealong/contract_tutorials/crowdfunding_contract.md @@ -26,7 +26,7 @@ Along the way you will: ### Install tools -Please ensure that you already have [Installed the Sandbox](../../../guides/developer_guides/getting_started) +Please ensure that you already have [Installed the Sandbox](../../../guides/getting_started) ### Create an Aztec project diff --git a/docs/docs/tutorials/codealong/contract_tutorials/private_voting_contract.md b/docs/docs/tutorials/codealong/contract_tutorials/private_voting_contract.md index 7f3aba4aa76..2c7f0d81f86 100644 --- a/docs/docs/tutorials/codealong/contract_tutorials/private_voting_contract.md +++ b/docs/docs/tutorials/codealong/contract_tutorials/private_voting_contract.md @@ -21,7 +21,7 @@ To keep things simple, we won't create ballots or allow for delegate voting. ## Prerequisites -- You have followed the [quickstart](../../../guides/developer_guides/getting_started) to install `aztec-nargo` and `aztec`. +- You have followed the [quickstart](../../../guides/getting_started) to install `aztec-nargo` and `aztec`. - Running Aztec Sandbox ## Set up a project diff --git a/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/0_setup.md b/docs/docs/tutorials/codealong/contract_tutorials/token_bridge/0_setup.md similarity index 98% rename from docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/0_setup.md rename to docs/docs/tutorials/codealong/contract_tutorials/token_bridge/0_setup.md index 200c4347b48..4ec44d065e7 100644 --- a/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/0_setup.md +++ b/docs/docs/tutorials/codealong/contract_tutorials/token_bridge/0_setup.md @@ -17,7 +17,7 @@ We recommend going through this setup to fully understand where things live. - [node v18+ (GitHub link)](https://github.com/tj/n) - [docker](https://docs.docker.com/) -- [Aztec sandbox](../../../../../guides/developer_guides/getting_started) - you should have this running before starting the tutorial +- [Aztec sandbox](../../../../../guides/getting_started) - you should have this running before starting the tutorial Start the sandbox diff --git a/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/1_depositing_to_aztec.md b/docs/docs/tutorials/codealong/contract_tutorials/token_bridge/1_depositing_to_aztec.md similarity index 100% rename from docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/1_depositing_to_aztec.md rename to docs/docs/tutorials/codealong/contract_tutorials/token_bridge/1_depositing_to_aztec.md diff --git a/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/2_minting_on_aztec.md b/docs/docs/tutorials/codealong/contract_tutorials/token_bridge/2_minting_on_aztec.md similarity index 100% rename from docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/2_minting_on_aztec.md rename to docs/docs/tutorials/codealong/contract_tutorials/token_bridge/2_minting_on_aztec.md diff --git a/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/3_withdrawing_to_l1.md b/docs/docs/tutorials/codealong/contract_tutorials/token_bridge/3_withdrawing_to_l1.md similarity index 98% rename from docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/3_withdrawing_to_l1.md rename to docs/docs/tutorials/codealong/contract_tutorials/token_bridge/3_withdrawing_to_l1.md index a0c1b508212..ae28246bde3 100644 --- a/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/3_withdrawing_to_l1.md +++ b/docs/docs/tutorials/codealong/contract_tutorials/token_bridge/3_withdrawing_to_l1.md @@ -19,7 +19,7 @@ The `exit_to_l1_public` function enables anyone to withdraw their L2 tokens back 1. Like with our deposit function, we need to create the L2 to L1 message. The content is the _amount_ to burn, the recipient address, and who can execute the withdraw on the L1 portal on behalf of the user. It can be `0x0` for anyone, or a specified address. 2. `context.message_portal()` passes this content to the kernel circuit which creates the proof for the transaction. The kernel circuit then adds the sender (the L2 address of the bridge + version of aztec) and the recipient (the portal to the L2 address + the chain ID of L1) under the hood, to create the message which gets added as part of the transaction data published by the sequencer and is stored in the outbox for consumption. 3. The `context.message_portal()` takes the recipient and content as input, and will insert a message into the outbox. We set the recipient to be the portal address read from storage of the contract. -4. Finally, you also burn the tokens on L2! Note that it burning is done at the end to follow the check effects interaction pattern. Note that the caller has to first approve the bridge contract to burn tokens on its behalf. Refer to [burn_public function on the token contract](../../token_contract.md#authorizing-token-spends). +4. Finally, you also burn the tokens on L2! Note that it burning is done at the end to follow the check effects interaction pattern. Note that the caller has to first approve the bridge contract to burn tokens on its behalf. Refer to [burn_public function on the token contract](../token_contract.md#burn_public). - We burn the tokens from the `msg_sender()`. Otherwise, a malicious user could burn someone else’s tokens and mint tokens on L1 to themselves. One could add another approval flow on the bridge but that might make it complex for other applications to call the bridge. ## Withdrawing Privately diff --git a/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/4_typescript_glue_code.md b/docs/docs/tutorials/codealong/contract_tutorials/token_bridge/4_typescript_glue_code.md similarity index 95% rename from docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/4_typescript_glue_code.md rename to docs/docs/tutorials/codealong/contract_tutorials/token_bridge/4_typescript_glue_code.md index 0600dbf1059..5754ce1f0f8 100644 --- a/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/4_typescript_glue_code.md +++ b/docs/docs/tutorials/codealong/contract_tutorials/token_bridge/4_typescript_glue_code.md @@ -167,9 +167,9 @@ Note - you might have a jest error at the end of each test saying "expected 1-2 ### Follow a more detailed Aztec.js tutorial -Follow the tutorial [here](../../../aztecjs-getting-started). +Follow the tutorial [here](../../js_tutorials/aztecjs-getting-started.md). ### Optional: Learn more about concepts mentioned here -- [Portals (protocol specs)](../../../../../protocol-specs/l1-smart-contracts/index.md) -- [Functions under the hood (concepts)](../../../../../aztec/smart_contracts/functions/function_transforms.md) +- [Portals (protocol specs)](../../../../protocol-specs/l1-smart-contracts/index.md#portals) +- [Functions under the hood (concepts)](../../../../aztec/smart_contracts/functions/function_transforms.md) diff --git a/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/index.md b/docs/docs/tutorials/codealong/contract_tutorials/token_bridge/index.md similarity index 97% rename from docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/index.md rename to docs/docs/tutorials/codealong/contract_tutorials/token_bridge/index.md index 8b3873c5a91..72a0c7061df 100644 --- a/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/index.md +++ b/docs/docs/tutorials/codealong/contract_tutorials/token_bridge/index.md @@ -1,11 +1,11 @@ --- title: Token Bridge -sidebar_position: 2 +sidebar_position: 6 --- import Image from "@theme/IdealImage"; -In this tutorial, we will learn how to build the entire flow of a cross-chain token using portals. If this is your first time hearing the word portal, you’ll want to read [this page in the protocol specs](../../../../../protocol-specs/l1-smart-contracts/index.md). +In this tutorial, we will learn how to build the entire flow of a cross-chain token using portals. If this is your first time hearing the word portal, you’ll want to read [this page in the protocol specs](../../../../protocol-specs/l1-smart-contracts/index.md). ## A refresher on Portals @@ -42,7 +42,7 @@ The goal for this tutorial is to create functionality such that a token can be b This is just a reference implementation for educational purposes only. It has not been through an in-depth security audit. -Let’s assume a token exists on Ethereum and Aztec (see a [the token tutorial](../../token_contract.md)). +Let’s assume a token exists on Ethereum and Aztec (see a [the token tutorial](../token_contract.md)). We will build: diff --git a/docs/docs/tutorials/codealong/contract_tutorials/token_contract.md b/docs/docs/tutorials/codealong/contract_tutorials/token_contract.md index e650379aee4..9b93a6e0e72 100644 --- a/docs/docs/tutorials/codealong/contract_tutorials/token_contract.md +++ b/docs/docs/tutorials/codealong/contract_tutorials/token_contract.md @@ -422,7 +422,7 @@ aztec codegen target -o src/artifacts ### Token Bridge Contract -The [token bridge tutorial](./advanced/token_bridge/index.md) is a great follow up to this one. +The [token bridge tutorial](.//token_bridge/index.md) is a great follow up to this one. It builds on the Token contract described here and goes into more detail about Aztec contract composability and Ethereum (L1) and Aztec (L2) cross-chain messaging. diff --git a/docs/docs/tutorials/codealong/js_tutorials/_category_.json b/docs/docs/tutorials/codealong/js_tutorials/_category_.json new file mode 100644 index 00000000000..79b35ed8528 --- /dev/null +++ b/docs/docs/tutorials/codealong/js_tutorials/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Dapp Tutorials", + "position": 1, + "collapsible": true, + "collapsed": true +} \ No newline at end of file diff --git a/docs/docs/tutorials/codealong/aztecjs-getting-started.md b/docs/docs/tutorials/codealong/js_tutorials/aztecjs-getting-started.md similarity index 93% rename from docs/docs/tutorials/codealong/aztecjs-getting-started.md rename to docs/docs/tutorials/codealong/js_tutorials/aztecjs-getting-started.md index d4eeae0b976..6dd6b117e5c 100644 --- a/docs/docs/tutorials/codealong/aztecjs-getting-started.md +++ b/docs/docs/tutorials/codealong/js_tutorials/aztecjs-getting-started.md @@ -1,13 +1,13 @@ --- title: Transferring Tokens with Aztec.js -sidebar_position: 1 +sidebar_position: 0 --- import Image from "@theme/IdealImage"; In this guide, we will retrieving the Sandbox and deploy a pre-written contract to it using Aztec.js. -This guide assumes you have followed the [quickstart](../../guides/developer_guides/getting_started). +This guide assumes you have followed the [quickstart](../../../guides/getting_started.md). ## Prerequisites @@ -75,7 +75,7 @@ yarn add @aztec/aztec.js @aztec/accounts @aztec/noir-contracts.js typescript @ty "build": "yarn clean && tsc -b", "build:dev": "tsc -b --watch", "clean": "rm -rf ./dest tsconfig.tsbuildinfo", - "start": "yarn build && DEBUG='token' node ./dest/index.js" + "start": "yarn build && LOG_LEVEL='info: token' node ./dest/index.js" }, ``` @@ -134,7 +134,7 @@ The sandbox is preloaded with multiple accounts so you don't have to sit and cre #include_code load_accounts /yarn-project/end-to-end/src/composed/e2e_sandbox_example.test.ts typescript -An explanation on accounts on Aztec can be found [here](../../aztec/concepts/accounts/index.md). +An explanation on accounts on Aztec can be found [here](../../../aztec/concepts/accounts/index.md). ## Deploy a contract @@ -295,7 +295,7 @@ This function takes: 2. A recipient 3. An amount of tokens to mint -This function starts as private to set up the creation of a [partial note](../../aztec/concepts/storage/partial_notes.md). The private function calls a public function that checks that the minter is authorized to mint new tokens an increments the public total supply. The recipient of the tokens remains private, but the minter and the amount of tokens minted are public. +This function starts as private to set up the creation of a [partial note](../../../aztec/concepts/storage/partial_notes.md). The private function calls a public function that checks that the minter is authorized to mint new tokens an increments the public total supply. The recipient of the tokens remains private, but the minter and the amount of tokens minted are public. Let's now use these functions to mint some tokens to Bob's account using Typescript, add this to `index.ts`: @@ -339,7 +339,7 @@ Our complete output should now be something like: token Bob's balance 10543 +43ms ``` -That's it! We have successfully deployed a token contract to an instance of the Aztec network and mined private state-transitioning transactions. We have also queried the resulting state all via the interfaces provided by the contract. To see exactly what has happened here, you can learn about the transaction flow [on the Concepts page here](../../aztec/concepts/transactions.md). +That's it! We have successfully deployed a token contract to an instance of the Aztec network and mined private state-transitioning transactions. We have also queried the resulting state all via the interfaces provided by the contract. To see exactly what has happened here, you can learn about the transaction flow [on the Concepts page here](../../../aztec/concepts/transactions.md). ## Next Steps @@ -349,5 +349,5 @@ Follow the [dapp tutorial](./simple_dapp/index.md). ### Optional: Learn more about concepts mentioned here -- [Authentication witness](../../aztec/concepts/accounts/authwit.md) -- [Functions under the hood](../../aztec/smart_contracts/functions/function_transforms.md) +- [Authentication witness](../../../aztec/concepts/accounts/authwit.md) +- [Functions under the hood](../../../aztec/smart_contracts/functions/function_transforms.md) diff --git a/docs/docs/tutorials/codealong/simple_dapp/0_project_setup.md b/docs/docs/tutorials/codealong/js_tutorials/simple_dapp/0_project_setup.md similarity index 100% rename from docs/docs/tutorials/codealong/simple_dapp/0_project_setup.md rename to docs/docs/tutorials/codealong/js_tutorials/simple_dapp/0_project_setup.md diff --git a/docs/docs/tutorials/codealong/simple_dapp/1_pxe_service.md b/docs/docs/tutorials/codealong/js_tutorials/simple_dapp/1_pxe_service.md similarity index 86% rename from docs/docs/tutorials/codealong/simple_dapp/1_pxe_service.md rename to docs/docs/tutorials/codealong/js_tutorials/simple_dapp/1_pxe_service.md index ba30343f577..727b94d079b 100644 --- a/docs/docs/tutorials/codealong/simple_dapp/1_pxe_service.md +++ b/docs/docs/tutorials/codealong/js_tutorials/simple_dapp/1_pxe_service.md @@ -4,7 +4,8 @@ PXE is a component of the Aztec Protocol that provides a private execution envir As an app developer, the PXE interface provides you with access to the user's accounts and their private state, as well as a connection to the network for accessing public global state. -The [Aztec Sandbox](../../../reference/developer_references/sandbox_reference/index.md) runs a local PXE and an Aztec Node, both connected to a local Ethereum development node like Anvil. +The [Aztec Sandbox](.././../../../reference/developer_references/sandbox_reference/sandbox-reference.md) runs a local PXE and an Aztec Node, both connected to a local Ethereum development node like Anvil. + The Sandbox also includes a set of pre-initialized accounts that you can use from your app. In this section, we'll connect to the Sandbox from our project. @@ -20,7 +21,7 @@ Let's create our first file `src/index.mjs` with the following contents: #include_code all yarn-project/end-to-end/src/sample-dapp/connect.mjs javascript -Make sure the [Sandbox is running](../../../guides/developer_guides/getting_started.md) and run the example +Make sure the [Sandbox is running](../../../../guides/getting_started.md) and run the example ```bash node src/index.mjs diff --git a/docs/docs/tutorials/codealong/simple_dapp/2_contract_deployment.md b/docs/docs/tutorials/codealong/js_tutorials/simple_dapp/2_contract_deployment.md similarity index 93% rename from docs/docs/tutorials/codealong/simple_dapp/2_contract_deployment.md rename to docs/docs/tutorials/codealong/js_tutorials/simple_dapp/2_contract_deployment.md index 392aacce391..324e3d59d0c 100644 --- a/docs/docs/tutorials/codealong/simple_dapp/2_contract_deployment.md +++ b/docs/docs/tutorials/codealong/js_tutorials/simple_dapp/2_contract_deployment.md @@ -3,7 +3,7 @@ To add contracts to your application, we'll start by creating a new `aztec-nargo` project. We'll then compile the contracts, and write a simple script to deploy them to our Sandbox. :::info -Follow the instructions [here](../../../reference/developer_references/sandbox_reference/index.md) to install `aztec-nargo` if you haven't done so already. +Follow the instructions [here](../../../../guides/getting_started.md) to install `aztec-nargo` if you haven't done so already. ::: ## Initialize Aztec project @@ -73,7 +73,7 @@ Here, we are using the `Contract` class with the compiled artifact to send a new Note that the token's `constructor()` method expects an `owner` address to set as the contract `admin`. We are using the first account from the Sandbox for this. :::info -If you are using the generated typescript classes, you can drop the generic `ContractDeployer` in favor of using the `deploy` method of the generated class, which will automatically load the artifact for you and type-check the constructor arguments. See the [How to deploy a contract](../../../guides/developer_guides/smart_contracts/how_to_deploy_contract.md) page for more info. +If you are using the generated typescript classes, you can drop the generic `ContractDeployer` in favor of using the `deploy` method of the generated class, which will automatically load the artifact for you and type-check the constructor arguments. See the [How to deploy a contract](../../../../guides/developer_guides/smart_contracts/how_to_deploy_contract.md) page for more info. ::: Run the snippet above as `node src/deploy.mjs`, and you should see the following output, along with a new `addresses.json` file in your project root: diff --git a/docs/docs/tutorials/codealong/simple_dapp/3_contract_interaction.md b/docs/docs/tutorials/codealong/js_tutorials/simple_dapp/3_contract_interaction.md similarity index 91% rename from docs/docs/tutorials/codealong/simple_dapp/3_contract_interaction.md rename to docs/docs/tutorials/codealong/js_tutorials/simple_dapp/3_contract_interaction.md index 769e53d6e00..72583b361af 100644 --- a/docs/docs/tutorials/codealong/simple_dapp/3_contract_interaction.md +++ b/docs/docs/tutorials/codealong/js_tutorials/simple_dapp/3_contract_interaction.md @@ -11,7 +11,7 @@ Let's start by showing our user's private balance for the token across their acc #include_code balance_of_private noir-projects/noir-contracts/contracts/token_contract/src/main.nr rust :::info -Note that this function will only return a valid response for accounts registered in the Private eXecution Environment (PXE), since it requires access to the [user's private state](../../../aztec/concepts/wallets/index.md#private-state). In other words, you cannot query the private balance of another user for the token contract. +Note that this function will only return a valid response for accounts registered in the Private eXecution Environment (PXE), since it requires access to the [user's private state](../../../../aztec/concepts/wallets/index.md#private-state). In other words, you cannot query the private balance of another user for the token contract. ::: To do this, let's first initialize a new `Contract` instance using `aztec.js` that represents our deployed token contracts. Create a new `src/contracts.mjs` file with the imports for our artifacts and other dependencies: @@ -99,12 +99,12 @@ At the time of this writing, there are no events emitted when new private notes ## Working with public state -While [private and public state](../../../aztec/concepts/state_model/index.md) are fundamentally different, the API for working with private and public functions and state from `aztec.js` is equivalent. To query the balance in public tokens for our user accounts, we can just call the `balance_of_public` view function in the contract: +While [private and public state](../../../../aztec/concepts/storage/state_model/index.md) are fundamentally different, the API for working with private and public functions and state from `aztec.js` is equivalent. To query the balance in public tokens for our user accounts, we can just call the `balance_of_public` view function in the contract: #include_code showPublicBalances yarn-project/end-to-end/src/sample-dapp/index.mjs javascript :::info -Since this we are working with public balances, we can now query the balance for any address, not just those registered in our local PXE. We can also send funds to addresses for which we don't know their [public encryption key](../../../aztec/concepts/accounts/keys.md#encryption-keys). +Since this we are working with public balances, we can now query the balance for any address, not just those registered in our local PXE. We can also send funds to addresses for which we don't know their [public encryption key](../../../../aztec/concepts/accounts/keys.md#encryption-keys). ::: Here, since the token contract does not mint any initial funds upon deployment, the balances for all of our user's accounts will be zero. diff --git a/docs/docs/tutorials/codealong/simple_dapp/4_testing.md b/docs/docs/tutorials/codealong/js_tutorials/simple_dapp/4_testing.md similarity index 87% rename from docs/docs/tutorials/codealong/simple_dapp/4_testing.md rename to docs/docs/tutorials/codealong/js_tutorials/simple_dapp/4_testing.md index f7b03eda1c9..d162345d448 100644 --- a/docs/docs/tutorials/codealong/simple_dapp/4_testing.md +++ b/docs/docs/tutorials/codealong/js_tutorials/simple_dapp/4_testing.md @@ -14,7 +14,7 @@ Start by installing our test runner, in this case jest: yarn add -D jest ``` -We'll need to [install and run the Sandbox](../../../reference/developer_references/sandbox_reference/sandbox-reference.md). +We'll need to [install and run the Sandbox](../../../../guides/getting_started.md). ## Test setup @@ -69,4 +69,6 @@ yarn node --experimental-vm-modules $(yarn bin jest) --testRegex '.*\.test\.mjs$ ## Next steps -Now that you have finished the tutorial, you can learn more about [writing contracts with Noir](../../../aztec/smart_contracts_overview.md) or read about the [fundamental concepts behind Aztec Network](../../../aztec/overview.md). +Have you written a contract from scratch? If not, follow a tutorial for [writing contracts with Noir](../../contract_tutorials/counter_contract.md) + +Or read about the [fundamental concepts behind Aztec Network](../../../../aztec/concepts_overview.md) and dive deeper into how things work. diff --git a/docs/docs/tutorials/codealong/simple_dapp/index.md b/docs/docs/tutorials/codealong/js_tutorials/simple_dapp/index.md similarity index 88% rename from docs/docs/tutorials/codealong/simple_dapp/index.md rename to docs/docs/tutorials/codealong/js_tutorials/simple_dapp/index.md index 26b04daebdc..5457d22e4c5 100644 --- a/docs/docs/tutorials/codealong/simple_dapp/index.md +++ b/docs/docs/tutorials/codealong/js_tutorials/simple_dapp/index.md @@ -1,10 +1,10 @@ --- -title: Dapp Tutorial +title: Node.js app that interacts with contracts --- In this tutorial we'll go through the steps for building a simple application that interacts with the Aztec Sandbox. We'll be building a console application using Javascript and NodeJS, but you may reuse the same concepts here for a web-based app. All Aztec libraries are written in Typescript and fully typed, so you can use Typescript instead of Javascript to make the most out of its type checker. -This tutorial will focus on environment setup, including creating accounts and deployments, as well as interacting with your contracts. It will not cover [how to write contracts in Noir](../../../aztec/smart_contracts_overview.md). +This tutorial will focus on environment setup, including creating accounts and deployments, as well as interacting with your contracts. It will not cover [how to write contracts in Noir](../../../../aztec/smart_contracts_overview.md). The full code for this tutorial is [available on the `aztec-packages` repository](https://github.com/AztecProtocol/aztec-packages/blob/master/yarn-project/end-to-end/src/sample-dapp). @@ -12,7 +12,7 @@ The full code for this tutorial is [available on the `aztec-packages` repository - Linux or OSX environment - [NodeJS](https://nodejs.org/) 18 or higher -- [Aztec Sandbox](../../../guides/developer_guides/getting_started) +- [Aztec Sandbox](../../../guides/getting_started) ## Prerequisites diff --git a/docs/docs/tutorials/examples/uniswap/index.md b/docs/docs/tutorials/examples/uniswap/index.md index c4fc55b539f..d31c03b0418 100644 --- a/docs/docs/tutorials/examples/uniswap/index.md +++ b/docs/docs/tutorials/examples/uniswap/index.md @@ -15,7 +15,7 @@ The flow will be: 2. We create an L2 → L1 message to swap on L1 3. On L1, the user gets their input tokens, consumes the swap message, and executes the swap 4. The user deposits the “output” tokens to the output token portal so it can be deposited into L2 -5. We will assume that token portals and token bridges for the input and output tokens must exist. These are what we built in the [token bridge tutorial](../../codealong/contract_tutorials/advanced/token_bridge/index.md). +5. We will assume that token portals and token bridges for the input and output tokens must exist. These are what we built in the [token bridge tutorial](../../codealong/contract_tutorials/token_bridge/index.md). The execution of swap on L1 should be designed such that any 3rd party can execute the swap on behalf of the user. This helps maintain user privacy by not requiring links between L1 and L2 activity. @@ -27,5 +27,5 @@ This reference will cover: This diagram describes the private flow. -This code works alongside a token portal that you can learn to build [in this codealong tutorial](../../codealong/contract_tutorials/advanced/token_bridge/index.md). +This code works alongside a token portal that you can learn to build [in this codealong tutorial](../../codealong/contract_tutorials/token_bridge/index.md). diff --git a/docs/docs/tutorials/examples/uniswap/l1_contract.md b/docs/docs/tutorials/examples/uniswap/l1_contract.md index 6eb246a7c18..c9b16928ed1 100644 --- a/docs/docs/tutorials/examples/uniswap/l1_contract.md +++ b/docs/docs/tutorials/examples/uniswap/l1_contract.md @@ -3,7 +3,7 @@ title: L1 contracts (EVM) sidebar_position: 2 --- -This page goes over the code in the L1 contract for Uniswap, which works alongside a [token portal (codealong tutorial)](../../codealong/contract_tutorials/advanced/token_bridge/index.md). +This page goes over the code in the L1 contract for Uniswap, which works alongside a [token portal (codealong tutorial)](../../codealong/contract_tutorials/token_bridge/index.md). ## Setup diff --git a/docs/docs/tutorials/examples/uniswap/l2_contract.md b/docs/docs/tutorials/examples/uniswap/l2_contract.md index 9d428e6332b..a3ce0e6de53 100644 --- a/docs/docs/tutorials/examples/uniswap/l2_contract.md +++ b/docs/docs/tutorials/examples/uniswap/l2_contract.md @@ -3,7 +3,7 @@ title: L2 Contracts (Aztec) sidebar_position: 1 --- -This page goes over the code in the L2 contract for Uniswap, which works alongside a [token bridge (codealong tutorial)](../../codealong/contract_tutorials/advanced/token_bridge/index.md). +This page goes over the code in the L2 contract for Uniswap, which works alongside a [token bridge (codealong tutorial)](../../codealong/contract_tutorials/token_bridge/index.md). ## Main.nr @@ -20,7 +20,7 @@ We just need to store the portal address for the token that we want to swap. 2. We fetch the underlying aztec token that needs to be swapped. 3. We transfer the user’s funds to the Uniswap contract. Like with Ethereum, the user must have provided approval to the Uniswap contract to do so. The user must provide the nonce they used in the approval for transfer, so that Uniswap can send it to the token contract, to prove it has appropriate approval. 4. Funds are added to the Uniswap contract. -5. Uniswap must exit the input tokens to L1. For this it has to approve the bridge to burn its tokens on its behalf and then actually exit the funds. We call the [`exit_to_l1_public()` method on the token bridge](../../codealong/contract_tutorials/advanced/token_bridge/index.md). We use the public flow for exiting since we are operating on public state. +5. Uniswap must exit the input tokens to L1. For this it has to approve the bridge to burn its tokens on its behalf and then actually exit the funds. We call the [`exit_to_l1_public()` method on the token bridge](../../codealong/contract_tutorials/token_bridge/index.md). We use the public flow for exiting since we are operating on public state. 6. It is not enough for us to simply emit a message to withdraw the funds. We also need to emit a message to display our swap intention. If we do not do this, there is nothing stopping a third party from calling the Uniswap portal with their own parameters and consuming our message. So the Uniswap portal (on L1) needs to know: diff --git a/docs/docs/tutorials/index.md b/docs/docs/tutorials/index.md deleted file mode 100644 index c6e9362c344..00000000000 --- a/docs/docs/tutorials/index.md +++ /dev/null @@ -1,112 +0,0 @@ ---- -id: index -sidebar_position: 0 -title: Tutorials and Examples ---- - -# Code-Along Tutorials and Examples - -In this section, you will find two things: code-along tutorials and code examples of Aztec applications. - -Tutorials will teach you how to build a full application or smart contract locally. Examples are not intended for you to replicate locally as they have more complex setups, but can be useful for exploring what you can do with Aztec. - -This page includes the most popular tutorials in order of increasing complexity. Explore the sidebar for more! - -## Code-Along Tutorials - -### Beginner: Write your first smart contract - -
- - -

Simple counter contract

-
- - Follow this tutorial to build, compile and deploy your first Aztec smart contract - a simple private counter - -
-
- - -### Intermediate: Write increasingly more complex contracts - -It is recommended to follow these in order. - -
- - -

Simple private voting contract

-
- - Build a contract with hybrid state and calling public functions from private - -
- - - -

Crowdfunding contract

-
- - A more complex contract that interacts with other contracts - -
- - - -

Token contract with hybrid state

-
- - A very complex contract for a token that can move across public & private state and be transferred to others - -
- - - -

Accounts contract

-
- - A simple accounts contract that will teach you about account abstraction in Aztec - -
-
- -
- -## Examples - -
- - -

Interacting with L1 Uniswap from L2 Aztec

-
- - An example app inspired by Aztec Connect that allows users to swap publicly & privately on L1 Uniswap from Aztec - -
- - - - - - -

Card game contract ↗️

-
- - A set of contracts that allow players to take turns playing cards - -
-
diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 678e9cce230..3c480ffaba6 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -223,9 +223,9 @@ const config = { items: [ { type: "doc", - docId: "aztec/overview", + docId: "index", position: "left", - label: "Concepts", + label: "Learn", }, { type: "docSidebar", @@ -233,17 +233,11 @@ const config = { position: "left", label: "Guides", }, - { - type: "docSidebar", - sidebarId: "tutorialsSidebar", - position: "left", - label: "Examples", - }, { type: "docSidebar", sidebarId: "referenceSidebar", position: "left", - label: "References", + label: "Reference", }, { type: "dropdown", @@ -255,6 +249,13 @@ const config = { value: 'GitHub', className: "dropdown-subtitle", }, + { + to: "https://github.com/AztecProtocol/aztec-starter", + label: "Aztec Starter repo", + target: "_blank", + rel: "noopener noreferrer", + className: "github-item", + }, { to: "https://github.com/AztecProtocol/aztec-packages", label: "Aztec Monorepo", @@ -343,7 +344,7 @@ const config = { }, { label: "Developer Getting Started Guide", - to: "/guides/developer_guides/getting_started", + to: "/guides/getting_started", }, { label: "Aztec.nr", diff --git a/docs/netlify.toml b/docs/netlify.toml index bf5475fff6b..3a0097de71b 100644 --- a/docs/netlify.toml +++ b/docs/netlify.toml @@ -8,7 +8,7 @@ [[redirects]] from = "/getting_started" - to = "/guides/developer_guides/getting_started" + to = "/guides/getting_started" [[redirects]] from = "/tutorials/simple_dapp/*" @@ -19,28 +19,28 @@ to = "/tutorials/codealong/simple_dapp" [[redirects]] - from = "/tutorials/contract_tutorials/advanced/token_bridge/typescript_glue_code" - to = "/tutorials/codealong/contract_tutorials/advanced/token_bridge/typescript_glue_code" + from = "/tutorials/contract_tutorials//token_bridge/typescript_glue_code" + to = "/tutorials/codealong/contract_tutorials/token_bridge/typescript_glue_code" [[redirects]] - from = "/tutorials/contract_tutorials/advanced/token_bridge/withdrawing_to_l1" - to = "/tutorials/codealong/contract_tutorials/advanced/token_bridge/withdrawing_to_l1" + from = "/tutorials/contract_tutorials//token_bridge/withdrawing_to_l1" + to = "/tutorials/codealong/contract_tutorials/token_bridge/withdrawing_to_l1" [[redirects]] - from = "/tutorials/contract_tutorials/advanced/token_bridge/minting_on_aztec" - to = "/tutorials/codealong/contract_tutorials/advanced/token_bridge/minting_on_aztec" + from = "/tutorials/contract_tutorials//token_bridge/minting_on_aztec" + to = "/tutorials/codealong/contract_tutorials/token_bridge/minting_on_aztec" [[redirects]] - from = "/tutorials/contract_tutorials/advanced/token_bridge/depositing_to_aztec" - to = "/tutorials/codealong/contract_tutorials/advanced/token_bridge/depositing_to_aztec" + from = "/tutorials/contract_tutorials//token_bridge/depositing_to_aztec" + to = "/tutorials/codealong/contract_tutorials/token_bridge/depositing_to_aztec" [[redirects]] - from = "/tutorials/contract_tutorials/advanced/token_bridge" - to = "/tutorials/codealong/contract_tutorials/advanced/token_bridge" + from = "/tutorials/contract_tutorials//token_bridge" + to = "/tutorials/codealong/contract_tutorials/token_bridge" [[redirects]] - from = "/tutorials/contract_tutorials/advanced/token_bridge/setup" - to = "/tutorials/codealong/contract_tutorials/advanced/token_bridge/setup" + from = "/tutorials/contract_tutorials//token_bridge/setup" + to = "/tutorials/codealong/contract_tutorials/token_bridge/setup" [[redirects]] from = "/tutorials/contract_tutorials/crowdfunding_contract" @@ -64,7 +64,7 @@ [[redirects]] from = "/developers/sandbox/*" - to = "/guides/developer_guides/getting_started" + to = "/guides/getting_started" [[redirects]] from = "/developers/contracts/*" @@ -72,7 +72,7 @@ [[redirects]] from = "/dev_docs/*" - to = "/guides/developer_guides/getting_started" + to = "/guides/getting_started" [[redirects]] from = "/aztec/cryptography/cryptography-roadmap" @@ -136,12 +136,16 @@ [[redirects]] from = "/reference/sandbox_reference/sandbox-reference" - to = "/guides/developer_guides/getting_started" + to = "/guides/getting_started" [[redirects]] from = "/reference/sandbox_reference" - to = "/guides/developer_guides/getting_started" + to = "/guides/getting_started" [[redirects]] - from = "/guides/developer_guides/getting_started/quickstart" - to = "/guides/developer_guides/getting_started" \ No newline at end of file + from = "/guides/getting_started/quickstart" + to = "/guides/getting_started" + +[[redirects]] +from = "/guides/developer_guides/getting_started/quickstart" +to = "/guides/developer_guides/getting_started" \ No newline at end of file diff --git a/docs/sidebars.js b/docs/sidebars.js index f8a9c10e42b..673ca9602fa 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -8,13 +8,13 @@ export default { sidebar: [ { type: "html", - value: 'Aztec Protocol', + value: 'Concepts', className: "sidebar-title", }, { type: "doc", - id: "aztec/overview", - label: "What is Aztec?", + id: "aztec/concepts_overview", + label: "Concepts Overview", }, { type: "category", @@ -68,54 +68,60 @@ export default { guidesSidebar: [ { - type: "doc", - label: "Popular Guides", - id: "guides/index", + type: "doc", + id: "guides/index", + label: "Guides and Tutorials", }, { type: "html", - value: 'Developer Guides', + value: 'Getting Started', className: "sidebar-title", }, { - type: "autogenerated", - dirName: "guides/developer_guides", + type: "doc", + label: "Quickstart", + id: "guides/getting_started" }, { type: "html", value: '', }, { - type: "doc", - label: "Privacy Considerations", - id: "guides/privacy_considerations", + type: "html", + value: 'Code-along Tutorials', + className: "sidebar-title", }, - ], - tutorialsSidebar: [ { - type: "doc", - label: "Tutorials and Examples", - id: "tutorials/index", + type: "autogenerated", + dirName: "tutorials/codealong", }, { type: "html", - value: 'Code-along Tutorials', + value: 'Advanced Examples', className: "sidebar-title", }, { type: "autogenerated", - dirName: "tutorials/codealong", + dirName: "tutorials/examples", + }, + { + type: "html", + value: '', }, { type: "html", - value: 'Examples', + value: 'How-to Guides', className: "sidebar-title", }, { type: "autogenerated", - dirName: "tutorials/examples", + dirName: "guides/developer_guides" }, - ], + { + type: "html", + value: '', + }, + ], referenceSidebar: [ { @@ -132,10 +138,20 @@ export default { type: "autogenerated", dirName: "reference/developer_references", }, + { + type: "html", + value: 'Considerations', + className: "sidebar-title", + }, { type: "doc", id: "migration_notes", }, + { + type: "doc", + label: "Privacy Considerations", + id: "guides/privacy_considerations" + }, { type: "html", value: '', @@ -144,7 +160,7 @@ export default { type: "doc", id: "aztec_connect_sunset", }, - ], + ], roadmapSidebar: [ { diff --git a/docs/src/components/TutorialCard/CardHeader/index.js b/docs/src/components/TutorialCard/CardHeader/index.js index e8a81543e0e..6ff779bafe5 100644 --- a/docs/src/components/TutorialCard/CardHeader/index.js +++ b/docs/src/components/TutorialCard/CardHeader/index.js @@ -1,7 +1,7 @@ import React, { CSSProperties } from 'react'; // CSSProperties allows inline styling with better type checking. import clsx from 'clsx'; // clsx helps manage conditional className names in a clean and concise manner. const CardHeader = ({ - className, // classNamees for the container card + className, // classNames for the container card style, // Custom styles for the container card children, // Content to be included within the card textAlign, @@ -41,4 +41,4 @@ const CardHeader = ({ ); } -export default CardHeader; \ No newline at end of file +export default CardHeader; diff --git a/docs/static/img/authwit.png b/docs/static/img/authwit.png new file mode 100644 index 00000000000..bf941e6b4e0 Binary files /dev/null and b/docs/static/img/authwit.png differ diff --git a/docs/static/img/authwit2.png b/docs/static/img/authwit2.png new file mode 100644 index 00000000000..80cbf1ad8ee Binary files /dev/null and b/docs/static/img/authwit2.png differ diff --git a/docs/static/img/authwit3.png b/docs/static/img/authwit3.png new file mode 100644 index 00000000000..d6b375565c0 Binary files /dev/null and b/docs/static/img/authwit3.png differ diff --git a/docs/static/img/authwit4.png b/docs/static/img/authwit4.png new file mode 100644 index 00000000000..2cb6f4d14a0 Binary files /dev/null and b/docs/static/img/authwit4.png differ diff --git a/docs/static/img/aztec-high-level.png b/docs/static/img/aztec-high-level.png new file mode 100644 index 00000000000..cb4813f3257 Binary files /dev/null and b/docs/static/img/aztec-high-level.png differ diff --git a/docs/static/img/aztec_high_level_network_architecture.png b/docs/static/img/aztec_high_level_network_architecture.png deleted file mode 100644 index db7401112ff..00000000000 Binary files a/docs/static/img/aztec_high_level_network_architecture.png and /dev/null differ diff --git a/docs/static/img/aztec_high_level_network_architecture.svg b/docs/static/img/aztec_high_level_network_architecture.svg deleted file mode 100644 index 12998f66e10..00000000000 --- a/docs/static/img/aztec_high_level_network_architecture.svg +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - - Aztec NetworkPrivate Execution Environment (PXE)UserAztec.jsACIR SimulatorAztec NodeSequencerEthereumRollup ContractProver NetworkProverProverProverProverDatabasePrivate Kernel CircuitPrivate Execution (Client-side)Public Execution (Network-side) \ No newline at end of file diff --git a/docs/static/img/context/sender_context_change.png b/docs/static/img/context/sender_context_change.png index 5f36f91c488..e0ec94fa1ee 100644 Binary files a/docs/static/img/context/sender_context_change.png and b/docs/static/img/context/sender_context_change.png differ diff --git a/docs/static/img/how-does-aztec-work.webp b/docs/static/img/how-does-aztec-work.webp deleted file mode 100644 index 011378fee2d..00000000000 Binary files a/docs/static/img/how-does-aztec-work.webp and /dev/null differ diff --git a/docs/static/img/public-and-private-state-diagram.png b/docs/static/img/public-and-private-state-diagram.png new file mode 100644 index 00000000000..cb1f6ef5349 Binary files /dev/null and b/docs/static/img/public-and-private-state-diagram.png differ diff --git a/docs/static/img/pxe.png b/docs/static/img/pxe.png new file mode 100644 index 00000000000..2d34ebae78f Binary files /dev/null and b/docs/static/img/pxe.png differ diff --git a/docs/static/img/transaction-lifecycle.png b/docs/static/img/transaction-lifecycle.png index c92950d71d2..96852449fcc 100644 Binary files a/docs/static/img/transaction-lifecycle.png and b/docs/static/img/transaction-lifecycle.png differ diff --git a/iac/main.tf b/iac/main.tf index 46b145be06a..5e1dec466d6 100644 --- a/iac/main.tf +++ b/iac/main.tf @@ -125,52 +125,3 @@ resource "aws_route53_record" "static" { evaluate_target_health = true } } - -resource "aws_s3_bucket" "sp_testnet_redirect" { - bucket = "sp-testnet.aztec.network" - - website { - redirect_all_requests_to { - host_name = "github.com" - protocol = "https" - path = "/AztecProtocol/aztec-packages/refs/heads/master/spartan/releases/create-spartan.sh" - } - } -} - -resource "aws_s3_bucket_public_access_block" "sp_testnet_public_access" { - bucket = aws_s3_bucket.sp_testnet_redirect.id - - block_public_acls = false - block_public_policy = false - ignore_public_acls = false - restrict_public_buckets = false -} - -resource "aws_s3_bucket_policy" "sp_testnet_policy" { - bucket = aws_s3_bucket.sp_testnet_redirect.id - - policy = jsonencode({ - Version = "2012-10-17" - Statement = [ - { - Effect = "Allow" - Principal = "*" - Action = "s3:GetObject" - Resource = "arn:aws:s3:::${aws_s3_bucket.sp_testnet_redirect.id}/*" - } - ] - }) -} - -resource "aws_route53_record" "sp_testnet" { - zone_id = data.terraform_remote_state.aztec2_iac.outputs.aws_route53_zone_id - name = "sp-testnet.aztec.network" - type = "A" - - alias { - name = aws_s3_bucket.sp_testnet_redirect.website_domain - zone_id = aws_s3_bucket.sp_testnet_redirect.hosted_zone_id - evaluate_target_health = true - } -} diff --git a/l1-contracts/.solhint.json b/l1-contracts/.solhint.json index f3b1b7f84df..8c347972f9c 100644 --- a/l1-contracts/.solhint.json +++ b/l1-contracts/.solhint.json @@ -27,7 +27,7 @@ "no-unused-vars": "error", "state-visibility": "error", "var-name-mixedcase": "error", - "private-func-leading-underscore": "error", + "private-func-leading-underscore": "warn", "private-vars-no-leading-underscore": "error", "func-param-name-leading-underscore": "error", "func-param-name-mixedcase": "error", diff --git a/l1-contracts/src/core/Leonidas.sol b/l1-contracts/src/core/Leonidas.sol index 6602a0085e6..28113a64fc9 100644 --- a/l1-contracts/src/core/Leonidas.sol +++ b/l1-contracts/src/core/Leonidas.sol @@ -2,16 +2,16 @@ // Copyright 2024 Aztec Labs. pragma solidity >=0.8.27; -import {ILeonidas} from "@aztec/core/interfaces/ILeonidas.sol"; -import {SampleLib} from "@aztec/core/libraries/crypto/SampleLib.sol"; -import {SignatureLib} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import {ILeonidas, EpochData, LeonidasStorage} from "@aztec/core/interfaces/ILeonidas.sol"; +import {Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; import {DataStructures} from "@aztec/core/libraries/DataStructures.sol"; import {Errors} from "@aztec/core/libraries/Errors.sol"; +import {LeonidasLib} from "@aztec/core/libraries/LeonidasLib/LeonidasLib.sol"; import { Timestamp, Slot, Epoch, SlotLib, EpochLib, TimeFns } from "@aztec/core/libraries/TimeMath.sol"; -import {Ownable} from "@oz/access/Ownable.sol"; -import {MessageHashUtils} from "@oz/utils/cryptography/MessageHashUtils.sol"; +import {Staking} from "@aztec/core/staking/Staking.sol"; +import {IERC20} from "@oz/token/ERC20/IERC20.sol"; import {EnumerableSet} from "@oz/utils/structs/EnumerableSet.sol"; /** @@ -21,33 +21,17 @@ import {EnumerableSet} from "@oz/utils/structs/EnumerableSet.sol"; * He define the structure needed for committee and leader selection and provides logic for validating that * the block and its "evidence" follows his rules. * - * @dev Leonidas is depending on Ares to add/remove warriors to/from his army competently. - * * @dev Leonidas have one thing in mind, he provide a reference of the LOGIC going on for the spartan selection. * He is not concerned about gas costs, he is a king, he just throw gas in the air like no-one cares. * It will be the duty of his successor (Pleistarchus) to optimize the costs with same functionality. * */ -contract Leonidas is Ownable, TimeFns, ILeonidas { +contract Leonidas is Staking, TimeFns, ILeonidas { using EnumerableSet for EnumerableSet.AddressSet; - using SignatureLib for SignatureLib.Signature; - using MessageHashUtils for bytes32; using SlotLib for Slot; using EpochLib for Epoch; - /** - * @notice The data structure for an epoch - * @param committee - The validator set for the epoch - * @param sampleSeed - The seed used to sample the validator set of the epoch - * @param nextSeed - The seed used to influence the NEXT epoch - */ - struct EpochData { - address[] committee; - uint256 sampleSeed; - uint256 nextSeed; - } - // The target number of validators in a committee // @todo #8021 uint256 public immutable TARGET_COMMITTEE_SIZE; @@ -55,57 +39,22 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { // The time that the contract was deployed Timestamp public immutable GENESIS_TIME; - // An enumerable set of validators that are up to date - EnumerableSet.AddressSet private validatorSet; - - // A mapping to snapshots of the validator set - mapping(Epoch => EpochData) public epochs; - - // The last stored randao value, same value as `seed` in the last inserted epoch - uint256 private lastSeed; + LeonidasStorage private leonidasStore; constructor( address _ares, + IERC20 _stakingAsset, + uint256 _minimumStake, uint256 _slotDuration, uint256 _epochDuration, uint256 _targetCommitteeSize - ) Ownable(_ares) TimeFns(_slotDuration, _epochDuration) { + ) Staking(_ares, _stakingAsset, _minimumStake) TimeFns(_slotDuration, _epochDuration) { GENESIS_TIME = Timestamp.wrap(block.timestamp); SLOT_DURATION = _slotDuration; EPOCH_DURATION = _epochDuration; TARGET_COMMITTEE_SIZE = _targetCommitteeSize; } - /** - * @notice Adds a validator to the validator set - * - * @dev Only ARES can add validators - * - * @dev Will setup the epoch if needed BEFORE adding the validator. - * This means that the validator will effectively be added to the NEXT epoch. - * - * @param _validator - The validator to add - */ - function addValidator(address _validator) external override(ILeonidas) onlyOwner { - setupEpoch(); - _addValidator(_validator); - } - - /** - * @notice Removes a validator from the validator set - * - * @dev Only ARES can add validators - * - * @dev Will setup the epoch if needed BEFORE removing the validator. - * This means that the validator will effectively be removed from the NEXT epoch. - * - * @param _validator - The validator to remove - */ - function removeValidator(address _validator) external override(ILeonidas) onlyOwner { - setupEpoch(); - validatorSet.remove(_validator); - } - /** * @notice Get the validator set for a given epoch * @@ -121,7 +70,7 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { override(ILeonidas) returns (address[] memory) { - return epochs[_epoch].committee; + return leonidasStore.epochs[_epoch].committee; } /** @@ -129,18 +78,32 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { * @return The validator set for the current epoch */ function getCurrentEpochCommittee() external view override(ILeonidas) returns (address[] memory) { - return _getCommitteeAt(Timestamp.wrap(block.timestamp)); + return LeonidasLib.getCommitteeAt( + leonidasStore, stakingStore, getCurrentEpoch(), TARGET_COMMITTEE_SIZE + ); } - /** - * @notice Get the validator set - * - * @dev Consider removing this to replace with a `size` and individual getter. - * - * @return The validator set - */ - function getValidators() external view override(ILeonidas) returns (address[] memory) { - return validatorSet.values(); + function deposit(address _attester, address _proposer, address _withdrawer, uint256 _amount) + public + override(Staking) + { + setupEpoch(); + require( + _attester != address(0) && _proposer != address(0), + Errors.Leonidas__InvalidDeposit(_attester, _proposer) + ); + super.deposit(_attester, _proposer, _withdrawer, _amount); + } + + function initiateWithdraw(address _attester, address _recipient) + public + override(Staking) + returns (bool) + { + // @note The attester might be chosen for the epoch, so the delay must be long enough + // to allow for that. + setupEpoch(); + return super.initiateWithdraw(_attester, _recipient); } /** @@ -149,49 +112,31 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { * - Set the seed for the epoch * - Update the last seed * - * @dev Since this is a reference optimising for simplicity, we store the actual validator set in the epoch structure. + * @dev Since this is a reference optimising for simplicity, we leonidasStore the actual validator set in the epoch structure. * This is very heavy on gas, so start crying because the gas here will melt the poles * https://i.giphy.com/U1aN4HTfJ2SmgB2BBK.webp */ function setupEpoch() public override(ILeonidas) { Epoch epochNumber = getCurrentEpoch(); - EpochData storage epoch = epochs[epochNumber]; + EpochData storage epoch = leonidasStore.epochs[epochNumber]; if (epoch.sampleSeed == 0) { - epoch.sampleSeed = _getSampleSeed(epochNumber); - epoch.nextSeed = lastSeed = _computeNextSeed(epochNumber); - - epoch.committee = _sampleValidators(epoch.sampleSeed); + epoch.sampleSeed = LeonidasLib.getSampleSeed(leonidasStore, epochNumber); + epoch.nextSeed = leonidasStore.lastSeed = _computeNextSeed(epochNumber); + epoch.committee = + LeonidasLib.sampleValidators(stakingStore, epoch.sampleSeed, TARGET_COMMITTEE_SIZE); } } /** - * @notice Get the number of validators in the validator set - * - * @return The number of validators in the validator set - */ - function getValidatorCount() public view override(ILeonidas) returns (uint256) { - return validatorSet.length(); - } - - /** - * @notice Get the number of validators in the validator set - * - * @return The number of validators in the validator set - */ - function getValidatorAt(uint256 _index) public view override(ILeonidas) returns (address) { - return validatorSet.at(_index); - } - - /** - * @notice Checks if an address is in the validator set + * @notice Get the attester set * - * @param _validator - The address to check + * @dev Consider removing this to replace with a `size` and individual getter. * - * @return True if the address is in the validator set, false otherwise + * @return The validator set */ - function isValidator(address _validator) public view override(ILeonidas) returns (bool) { - return validatorSet.contains(_validator); + function getAttesters() public view override(ILeonidas) returns (address[] memory) { + return stakingStore.attesters.values(); } /** @@ -261,31 +206,11 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { * @return The address of the proposer */ function getProposerAt(Timestamp _ts) public view override(ILeonidas) returns (address) { - Epoch epochNumber = getEpochAt(_ts); Slot slot = getSlotAt(_ts); - - EpochData storage epoch = epochs[epochNumber]; - - // If the epoch is setup, we can just return the proposer. Otherwise we have to emulate sampling - if (epoch.sampleSeed != 0) { - uint256 committeeSize = epoch.committee.length; - if (committeeSize == 0) { - return address(0); - } - - return - epoch.committee[_computeProposerIndex(epochNumber, slot, epoch.sampleSeed, committeeSize)]; - } - - // Allow anyone if there is no validator set - if (validatorSet.length() == 0) { - return address(0); - } - - // Emulate a sampling of the validators - uint256 sampleSeed = _getSampleSeed(epochNumber); - address[] memory committee = _sampleValidators(sampleSeed); - return committee[_computeProposerIndex(epochNumber, slot, sampleSeed, committee.length)]; + Epoch epochNumber = getEpochAtSlot(slot); + return LeonidasLib.getProposerAt( + leonidasStore, stakingStore, slot, epochNumber, TARGET_COMMITTEE_SIZE + ); } /** @@ -321,34 +246,19 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { return Epoch.wrap(_slotNumber.unwrap() / EPOCH_DURATION); } - /** - * @notice Adds a validator to the set WITHOUT setting up the epoch - * @param _validator - The validator to add - */ - function _addValidator(address _validator) internal { - validatorSet.add(_validator); - } - - function _getCommitteeAt(Timestamp _ts) internal view returns (address[] memory) { - Epoch epochNumber = getEpochAt(_ts); - EpochData storage epoch = epochs[epochNumber]; - - if (epoch.sampleSeed != 0) { - uint256 committeeSize = epoch.committee.length; - if (committeeSize == 0) { - return new address[](0); - } - return epoch.committee; - } - - // Allow anyone if there is no validator set - if (validatorSet.length() == 0) { - return new address[](0); - } + // Can be used to add validators without setting up the epoch, useful for the initial set. + function _cheat__Deposit( + address _attester, + address _proposer, + address _withdrawer, + uint256 _amount + ) internal { + require( + _attester != address(0) && _proposer != address(0), + Errors.Leonidas__InvalidDeposit(_attester, _proposer) + ); - // Emulate a sampling of the validators - uint256 sampleSeed = _getSampleSeed(epochNumber); - return _sampleValidators(sampleSeed); + super.deposit(_attester, _proposer, _withdrawer, _amount); } /** @@ -369,56 +279,20 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { */ function _validateLeonidas( Slot _slot, - SignatureLib.Signature[] memory _signatures, + Signature[] memory _signatures, bytes32 _digest, DataStructures.ExecutionFlags memory _flags ) internal view { - Timestamp ts = getTimestampForSlot(_slot); - address proposer = getProposerAt(ts); - - // @todo Consider getting rid of this option. - // If the proposer is open, we allow anyone to propose without needing any signatures - if (proposer == address(0)) { - return; - } - - // @todo We should allow to provide a signature instead of needing the proposer to broadcast. - require(proposer == msg.sender, Errors.Leonidas__InvalidProposer(proposer, msg.sender)); - - // @note This is NOT the efficient way to do it, but it is a very convenient way for us to do it - // that allows us to reduce the number of code paths. Also when changed with optimistic for - // pleistarchus, this will be changed, so we can live with it. - - if (_flags.ignoreSignatures) { - return; - } - - address[] memory committee = _getCommitteeAt(ts); - - uint256 needed = committee.length * 2 / 3 + 1; - require( - _signatures.length >= needed, - Errors.Leonidas__InsufficientAttestationsProvided(needed, _signatures.length) - ); - - // Validate the attestations - uint256 validAttestations = 0; - - bytes32 digest = _digest.toEthSignedMessageHash(); - for (uint256 i = 0; i < _signatures.length; i++) { - SignatureLib.Signature memory signature = _signatures[i]; - if (signature.isEmpty) { - continue; - } - - // The verification will throw if invalid - signature.verify(committee[i], digest); - validAttestations++; - } - - require( - validAttestations >= needed, - Errors.Leonidas__InsufficientAttestations(needed, validAttestations) + Epoch epochNumber = getEpochAtSlot(_slot); + LeonidasLib.validateLeonidas( + leonidasStore, + stakingStore, + _slot, + epochNumber, + _signatures, + _digest, + _flags, + TARGET_COMMITTEE_SIZE ); } @@ -435,82 +309,4 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { function _computeNextSeed(Epoch _epoch) private view returns (uint256) { return uint256(keccak256(abi.encode(_epoch, block.prevrandao))); } - - /** - * @notice Samples a validator set for a specific epoch - * - * @dev Only used internally, should never be called for anything but the "next" epoch - * Allowing us to always use `lastSeed`. - * - * @return The validators for the given epoch - */ - function _sampleValidators(uint256 _seed) private view returns (address[] memory) { - uint256 validatorSetSize = validatorSet.length(); - if (validatorSetSize == 0) { - return new address[](0); - } - - // If we have less validators than the target committee size, we just return the full set - if (validatorSetSize <= TARGET_COMMITTEE_SIZE) { - return validatorSet.values(); - } - - uint256[] memory indicies = - SampleLib.computeCommitteeClever(TARGET_COMMITTEE_SIZE, validatorSetSize, _seed); - - address[] memory committee = new address[](TARGET_COMMITTEE_SIZE); - for (uint256 i = 0; i < TARGET_COMMITTEE_SIZE; i++) { - committee[i] = validatorSet.at(indicies[i]); - } - return committee; - } - - /** - * @notice Get the sample seed for an epoch - * - * @dev This should behave as walking past the line, but it does not currently do that. - * If there are entire skips, e.g., 1, 2, 5 and we then go back and try executing - * for 4 we will get an invalid value because we will read lastSeed which is from 5. - * - * @dev The `_epoch` will never be 0 nor in the future - * - * @dev The return value will be equal to keccak256(n, block.prevrandao) for n being the last epoch - * setup. - * - * @return The sample seed for the epoch - */ - function _getSampleSeed(Epoch _epoch) private view returns (uint256) { - if (Epoch.unwrap(_epoch) == 0) { - return type(uint256).max; - } - uint256 sampleSeed = epochs[_epoch].sampleSeed; - if (sampleSeed != 0) { - return sampleSeed; - } - - sampleSeed = epochs[_epoch - Epoch.wrap(1)].nextSeed; - if (sampleSeed != 0) { - return sampleSeed; - } - - return lastSeed; - } - - /** - * @notice Computes the index of the committee member that acts as proposer for a given slot - * - * @param _epoch - The epoch to compute the proposer index for - * @param _slot - The slot to compute the proposer index for - * @param _seed - The seed to use for the computation - * @param _size - The size of the committee - * - * @return The index of the proposer - */ - function _computeProposerIndex(Epoch _epoch, Slot _slot, uint256 _seed, uint256 _size) - private - pure - returns (uint256) - { - return uint256(keccak256(abi.encode(_epoch, _slot, _seed))) % _size; - } } diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index ef98e18d6cc..11ee424ba4d 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -7,9 +7,13 @@ import {IProofCommitmentEscrow} from "@aztec/core/interfaces/IProofCommitmentEsc import { IRollup, ITestRollup, + CheatDepositArgs, FeeHeader, ManaBaseFeeComponents, BlockLog, + ChainTips, + RollupStore, + L1GasOracleValues, L1FeeData, SubmitEpochRootProofArgs } from "@aztec/core/interfaces/IRollup.sol"; @@ -19,46 +23,35 @@ import {IOutbox} from "@aztec/core/interfaces/messagebridge/IOutbox.sol"; import {Leonidas} from "@aztec/core/Leonidas.sol"; import {Constants} from "@aztec/core/libraries/ConstantsGen.sol"; import {MerkleLib} from "@aztec/core/libraries/crypto/MerkleLib.sol"; -import {SignatureLib} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import {Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; import {DataStructures} from "@aztec/core/libraries/DataStructures.sol"; -import {EpochProofQuoteLib} from "@aztec/core/libraries/EpochProofQuoteLib.sol"; import {Errors} from "@aztec/core/libraries/Errors.sol"; -import {FeeMath} from "@aztec/core/libraries/FeeMath.sol"; -import {HeaderLib} from "@aztec/core/libraries/HeaderLib.sol"; -import {ProposeArgs, ProposeLib} from "@aztec/core/libraries/ProposeLib.sol"; +import { + ExtRollupLib, + ValidateHeaderArgs, + Header, + SignedEpochProofQuote, + SubmitEpochRootProofInterimValues +} from "@aztec/core/libraries/RollupLibs/ExtRollupLib.sol"; +import {IntRollupLib, EpochProofQuote} from "@aztec/core/libraries/RollupLibs/IntRollupLib.sol"; +import {ProposeArgs, ProposeLib} from "@aztec/core/libraries/RollupLibs/ProposeLib.sol"; import {Timestamp, Slot, Epoch, SlotLib, EpochLib} from "@aztec/core/libraries/TimeMath.sol"; -import {TxsDecoder} from "@aztec/core/libraries/TxsDecoder.sol"; import {Inbox} from "@aztec/core/messagebridge/Inbox.sol"; import {Outbox} from "@aztec/core/messagebridge/Outbox.sol"; import {ProofCommitmentEscrow} from "@aztec/core/ProofCommitmentEscrow.sol"; import {IRewardDistributor} from "@aztec/governance/interfaces/IRewardDistributor.sol"; import {MockVerifier} from "@aztec/mock/MockVerifier.sol"; +import {Ownable} from "@oz/access/Ownable.sol"; import {IERC20} from "@oz/token/ERC20/IERC20.sol"; -import {SafeERC20} from "@oz/token/ERC20/utils/SafeERC20.sol"; import {EIP712} from "@oz/utils/cryptography/EIP712.sol"; -import {Math} from "@oz/utils/math/Math.sol"; -import {SafeCast} from "@oz/utils/math/SafeCast.sol"; import {Vm} from "forge-std/Vm.sol"; -struct ChainTips { - uint256 pendingBlockNumber; - uint256 provenBlockNumber; -} - struct Config { uint256 aztecSlotDuration; uint256 aztecEpochDuration; uint256 targetCommitteeSize; uint256 aztecEpochProofClaimWindowInL2Slots; -} - -struct SubmitEpochRootProofInterimValues { - uint256 previousBlockNumber; - uint256 endBlockNumber; - Epoch epochToProve; - Epoch startEpoch; - bool isFeeCanonical; - bool isRewardDistributorCanonical; + uint256 minimumStake; } /** @@ -67,23 +60,12 @@ struct SubmitEpochRootProofInterimValues { * @notice Rollup contract that is concerned about readability and velocity of development * not giving a damn about gas costs. */ -contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { - using SafeCast for uint256; +contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, Leonidas, IRollup, ITestRollup { using SlotLib for Slot; using EpochLib for Epoch; - using SafeERC20 for IERC20; using ProposeLib for ProposeArgs; - using FeeMath for uint256; - using FeeMath for ManaBaseFeeComponents; - - struct L1GasOracleValues { - L1FeeData pre; - L1FeeData post; - Slot slotOfChange; - } - - uint256 internal constant BLOB_GAS_PER_BLOB = 2 ** 17; - uint256 internal constant GAS_PER_BLOB_POINT_EVALUATION = 50_000; + using IntRollupLib for uint256; + using IntRollupLib for ManaBaseFeeComponents; Slot public constant LIFETIME = Slot.wrap(5); Slot public constant LAG = Slot.wrap(2); @@ -109,44 +91,32 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { IRewardDistributor public immutable REWARD_DISTRIBUTOR; IERC20 public immutable ASSET; - IVerifier public epochProofVerifier; - - ChainTips public tips; - DataStructures.EpochProofClaim public proofClaim; - - // @todo Validate assumption: - // Currently we assume that the archive root following a block is specific to the block - // e.g., changing any values in the block or header should in the end make its way to the archive - // - // More direct approach would be storing keccak256(header) as well - mapping(uint256 blockNumber => BlockLog log) internal blocks; - - bytes32 public vkTreeRoot; - bytes32 public protocolContractTreeRoot; + RollupStore internal rollupStore; // @note Assume that all blocks up to this value (inclusive) are automatically proven. Speeds up bootstrapping. // Testing only. This should be removed eventually. uint256 private assumeProvenThroughBlockNumber; - L1GasOracleValues public l1GasOracleValues; - constructor( IFeeJuicePortal _fpcJuicePortal, IRewardDistributor _rewardDistributor, + IERC20 _stakingAsset, bytes32 _vkTreeRoot, bytes32 _protocolContractTreeRoot, address _ares, - address[] memory _validators, Config memory _config ) + Ownable(_ares) Leonidas( _ares, + _stakingAsset, + _config.minimumStake, _config.aztecSlotDuration, _config.aztecEpochDuration, _config.targetCommitteeSize ) { - epochProofVerifier = new MockVerifier(); + rollupStore.epochProofVerifier = new MockVerifier(); FEE_JUICE_PORTAL = _fpcJuicePortal; REWARD_DISTRIBUTOR = _rewardDistributor; ASSET = _fpcJuicePortal.UNDERLYING(); @@ -155,8 +125,8 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { ); INBOX = IInbox(address(new Inbox(address(this), Constants.L1_TO_L2_MSG_SUBTREE_HEIGHT))); OUTBOX = IOutbox(address(new Outbox(address(this)))); - vkTreeRoot = _vkTreeRoot; - protocolContractTreeRoot = _protocolContractTreeRoot; + rollupStore.vkTreeRoot = _vkTreeRoot; + rollupStore.protocolContractTreeRoot = _protocolContractTreeRoot; VERSION = 1; L1_BLOCK_AT_GENESIS = block.number; CLAIM_DURATION_IN_L2_SLOTS = _config.aztecEpochProofClaimWindowInL2Slots; @@ -164,7 +134,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { IS_FOUNDRY_TEST = VM_ADDRESS.code.length > 0; // Genesis block - blocks[0] = BlockLog({ + rollupStore.blocks[0] = BlockLog({ feeHeader: FeeHeader({ excessMana: 0, feeAssetPriceNumerator: 0, @@ -176,13 +146,20 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { blockHash: bytes32(0), // TODO(palla/prover): The first block does not have hash zero slotNumber: Slot.wrap(0) }); - l1GasOracleValues = L1GasOracleValues({ + rollupStore.l1GasOracleValues = L1GasOracleValues({ pre: L1FeeData({baseFee: 1 gwei, blobFee: 1}), post: L1FeeData({baseFee: block.basefee, blobFee: _getBlobBaseFee()}), slotOfChange: LIFETIME }); - for (uint256 i = 0; i < _validators.length; i++) { - _addValidator(_validators[i]); + } + + function cheat__InitialiseValidatorSet(CheatDepositArgs[] memory _args) + external + override(ITestRollup) + onlyOwner + { + for (uint256 i = 0; i < _args.length; i++) { + _cheat__Deposit(_args[i].attester, _args[i].proposer, _args[i].withdrawer, _args[i].amount); } setupEpoch(); } @@ -218,7 +195,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { * @param _verifier - The new verifier contract */ function setEpochVerifier(address _verifier) external override(ITestRollup) onlyOwner { - epochProofVerifier = IVerifier(_verifier); + rollupStore.epochProofVerifier = IVerifier(_verifier); } /** @@ -229,7 +206,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { * @param _vkTreeRoot - The new vkTreeRoot to be used by proofs */ function setVkTreeRoot(bytes32 _vkTreeRoot) external override(ITestRollup) onlyOwner { - vkTreeRoot = _vkTreeRoot; + rollupStore.vkTreeRoot = _vkTreeRoot; } /** @@ -244,7 +221,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { override(ITestRollup) onlyOwner { - protocolContractTreeRoot = _protocolContractTreeRoot; + rollupStore.protocolContractTreeRoot = _protocolContractTreeRoot; } /** @@ -257,9 +234,9 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { */ function proposeAndClaim( ProposeArgs calldata _args, - SignatureLib.Signature[] memory _signatures, + Signature[] memory _signatures, bytes calldata _body, - EpochProofQuoteLib.SignedEpochProofQuote calldata _quote + SignedEpochProofQuote calldata _quote ) external override(IRollup) { propose(_args, _signatures, _body); claimEpochProofRight(_quote); @@ -291,99 +268,43 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { _prune(); } + // We want to compute the two epoch values before hand. Could we do partial interim? + // We compute these in here to avoid a lot of pain with linking libraries and passing + // external functions into internal functions as args. SubmitEpochRootProofInterimValues memory interimValues; - - interimValues.previousBlockNumber = tips.provenBlockNumber; + interimValues.previousBlockNumber = rollupStore.tips.provenBlockNumber; interimValues.endBlockNumber = interimValues.previousBlockNumber + _args.epochSize; - // @note The getEpochForBlock is expected to revert if the block is beyond pending. + // @note The _getEpochForBlock is expected to revert if the block is beyond pending. // If this changes you are gonna get so rekt you won't believe it. // I mean proving blocks that have been pruned rekt. - interimValues.epochToProve = getEpochForBlock(interimValues.endBlockNumber); interimValues.startEpoch = getEpochForBlock(interimValues.previousBlockNumber + 1); + interimValues.epochToProve = getEpochForBlock(interimValues.endBlockNumber); - // Ensure that the proof is not across epochs - require( - interimValues.startEpoch == interimValues.epochToProve, - Errors.Rollup__InvalidEpoch(interimValues.startEpoch, interimValues.epochToProve) + uint256 endBlockNumber = ExtRollupLib.submitEpochRootProof( + rollupStore, + _args, + interimValues, + PROOF_COMMITMENT_ESCROW, + FEE_JUICE_PORTAL, + REWARD_DISTRIBUTOR, + ASSET, + CUAUHXICALLI ); + emit L2ProofVerified(endBlockNumber, _args.args[6]); + } - bytes32[] memory publicInputs = - getEpochProofPublicInputs(_args.epochSize, _args.args, _args.fees, _args.aggregationObject); - - require(epochProofVerifier.verify(_args.proof, publicInputs), Errors.Rollup__InvalidProof()); - - if (proofClaim.epochToProve == interimValues.epochToProve) { - PROOF_COMMITMENT_ESCROW.unstakeBond(proofClaim.bondProvider, proofClaim.bondAmount); - } - - tips.provenBlockNumber = interimValues.endBlockNumber; - - // @note Only if the rollup is the canonical will it be able to meaningfully claim fees - // Otherwise, the fees are unbacked #7938. - interimValues.isFeeCanonical = address(this) == FEE_JUICE_PORTAL.canonicalRollup(); - interimValues.isRewardDistributorCanonical = - address(this) == REWARD_DISTRIBUTOR.canonicalRollup(); - - uint256 totalProverReward = 0; - uint256 totalBurn = 0; - - if (interimValues.isFeeCanonical || interimValues.isRewardDistributorCanonical) { - for (uint256 i = 0; i < _args.epochSize; i++) { - address coinbase = address(uint160(uint256(publicInputs[9 + i * 2]))); - uint256 reward = 0; - uint256 toProver = 0; - uint256 burn = 0; - - if (interimValues.isFeeCanonical) { - uint256 fees = uint256(publicInputs[10 + i * 2]); - if (fees > 0) { - // This is insanely expensive, and will be fixed as part of the general storage cost reduction. - // See #9826. - FeeHeader storage feeHeader = - blocks[interimValues.previousBlockNumber + 1 + i].feeHeader; - burn += feeHeader.congestionCost * feeHeader.manaUsed; - - reward += (fees - burn); - FEE_JUICE_PORTAL.distributeFees(address(this), fees); - } - } - - if (interimValues.isRewardDistributorCanonical) { - reward += REWARD_DISTRIBUTOR.claim(address(this)); - } - - if (coinbase == address(0)) { - toProver = reward; - } else { - // @note We are getting value from the `proofClaim`, which are not cleared. - // So if someone is posting the proof before a new claim is made, - // the reward will calculated based on the previous values. - toProver = Math.mulDiv(reward, proofClaim.basisPointFee, 10_000); - } - - uint256 toCoinbase = reward - toProver; - if (toCoinbase > 0) { - ASSET.safeTransfer(coinbase, toCoinbase); - } - - totalProverReward += toProver; - totalBurn += burn; - } - - if (totalProverReward > 0) { - // If there is a bond-provider give him the reward, otherwise give it to the submitter. - address proofRewardRecipient = - proofClaim.bondProvider == address(0) ? msg.sender : proofClaim.bondProvider; - ASSET.safeTransfer(proofRewardRecipient, totalProverReward); - } - - if (totalBurn > 0) { - ASSET.safeTransfer(CUAUHXICALLI, totalBurn); - } - } + function getProofClaim() + external + view + override(IRollup) + returns (DataStructures.EpochProofClaim memory) + { + return rollupStore.proofClaim; + } - emit L2ProofVerified(interimValues.endBlockNumber, _args.args[6]); + function getTips() external view override(IRollup) returns (ChainTips memory) { + return rollupStore.tips; } function status(uint256 _myHeaderBlockNumber) @@ -400,12 +321,35 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { ) { return ( - tips.provenBlockNumber, - blocks[tips.provenBlockNumber].archive, - tips.pendingBlockNumber, - blocks[tips.pendingBlockNumber].archive, + rollupStore.tips.provenBlockNumber, + rollupStore.blocks[rollupStore.tips.provenBlockNumber].archive, + rollupStore.tips.pendingBlockNumber, + rollupStore.blocks[rollupStore.tips.pendingBlockNumber].archive, archiveAt(_myHeaderBlockNumber), - getEpochForBlock(tips.provenBlockNumber) + getEpochForBlock(rollupStore.tips.provenBlockNumber) + ); + } + + /** + * @notice Returns the computed public inputs for the given epoch proof. + * + * @dev Useful for debugging and testing. Allows submitter to compare their + * own public inputs used for generating the proof vs the ones assembled + * by this contract when verifying it. + * + * @param _epochSize - The size of the epoch (to be promoted to a constant) + * @param _args - Array of public inputs to the proof (previousArchive, endArchive, previousBlockHash, endBlockHash, endTimestamp, outHash, proverId) + * @param _fees - Array of recipient-value pairs with fees to be distributed for the epoch + * @param _aggregationObject - The aggregation object for the proof + */ + function getEpochProofPublicInputs( + uint256 _epochSize, + bytes32[7] calldata _args, + bytes32[] calldata _fees, + bytes calldata _aggregationObject + ) external view override(IRollup) returns (bytes32[] memory) { + return ExtRollupLib.getEpochProofPublicInputs( + rollupStore, _epochSize, _args, _fees, _aggregationObject ); } @@ -428,17 +372,17 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { // Consider if a prune will hit in this slot uint256 pendingBlockNumber = - canPruneAtTime(_ts) ? tips.provenBlockNumber : tips.pendingBlockNumber; + canPruneAtTime(_ts) ? rollupStore.tips.provenBlockNumber : rollupStore.tips.pendingBlockNumber; - Slot lastSlot = blocks[pendingBlockNumber].slotNumber; + Slot lastSlot = rollupStore.blocks[pendingBlockNumber].slotNumber; require(slot > lastSlot, Errors.Rollup__SlotAlreadyInChain(lastSlot, slot)); // Make sure that the proposer is up to date and on the right chain (ie no reorgs) - bytes32 tipArchive = blocks[pendingBlockNumber].archive; + bytes32 tipArchive = rollupStore.blocks[pendingBlockNumber].archive; require(tipArchive == _archive, Errors.Rollup__InvalidArchive(tipArchive, _archive)); - SignatureLib.Signature[] memory sigs = new SignatureLib.Signature[](0); + Signature[] memory sigs = new Signature[](0); DataStructures.ExecutionFlags memory flags = DataStructures.ExecutionFlags({ignoreDA: true, ignoreSignatures: true}); _validateLeonidas(slot, sigs, _archive, flags); @@ -460,14 +404,14 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { */ function validateHeader( bytes calldata _header, - SignatureLib.Signature[] memory _signatures, + Signature[] memory _signatures, bytes32 _digest, Timestamp _currentTime, bytes32 _txsEffectsHash, DataStructures.ExecutionFlags memory _flags ) external view override(IRollup) { uint256 manaBaseFee = getManaBaseFeeAt(_currentTime, true); - HeaderLib.Header memory header = HeaderLib.decode(_header); + Header memory header = ExtRollupLib.decodeHeader(_header); _validateHeader( header, _signatures, _digest, _currentTime, manaBaseFee, _txsEffectsHash, _flags ); @@ -481,12 +425,12 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { Epoch epochToProve = getEpochToProve(); require( // If the epoch has been claimed, it cannot be claimed again - proofClaim.epochToProve != epochToProve + rollupStore.proofClaim.epochToProve != epochToProve // Edge case for if no claim has been made yet. // We know that the bondProvider is always set, // Since otherwise the claimEpochProofRight would have reverted, // because the zero address cannot have deposited funds into escrow. - || proofClaim.bondProvider == address(0), + || rollupStore.proofClaim.bondProvider == address(0), Errors.Rollup__ProofRightAlreadyClaimed() ); return epochToProve; @@ -498,13 +442,10 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { override(IRollup) returns (bytes32) { - return TxsDecoder.decode(_body); + return ExtRollupLib.computeTxsEffectsHash(_body); } - function claimEpochProofRight(EpochProofQuoteLib.SignedEpochProofQuote calldata _quote) - public - override(IRollup) - { + function claimEpochProofRight(SignedEpochProofQuote calldata _quote) public override(IRollup) { validateEpochProofRightClaimAtTime(Timestamp.wrap(block.timestamp), _quote); Slot currentSlot = getCurrentSlot(); @@ -515,7 +456,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { // Blocked on submitting epoch proofs to this contract. PROOF_COMMITMENT_ESCROW.stakeBond(_quote.quote.prover, _quote.quote.bondAmount); - proofClaim = DataStructures.EpochProofClaim({ + rollupStore.proofClaim = DataStructures.EpochProofClaim({ epochToProve: epochToProve, basisPointFee: _quote.quote.basisPointFee, bondAmount: _quote.quote.bondAmount, @@ -536,11 +477,10 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { * @param _signatures - Signatures from the validators * @param _body - The body of the L2 block */ - function propose( - ProposeArgs calldata _args, - SignatureLib.Signature[] memory _signatures, - bytes calldata _body - ) public override(IRollup) { + function propose(ProposeArgs calldata _args, Signature[] memory _signatures, bytes calldata _body) + public + override(IRollup) + { if (canPrune()) { _prune(); } @@ -549,15 +489,15 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { // The `body` is passed outside the "args" as it does not directly need to be in the digest // as long as the `txsEffectsHash` is included and matches what is in the header. // Which we are checking in the `_validateHeader` call below. - bytes32 txsEffectsHash = TxsDecoder.decode(_body); + bytes32 txsEffectsHash = ExtRollupLib.computeTxsEffectsHash(_body); // Decode and validate header - HeaderLib.Header memory header = HeaderLib.decode(_args.header); + Header memory header = ExtRollupLib.decodeHeader(_args.header); setupEpoch(); ManaBaseFeeComponents memory components = getManaBaseFeeComponentsAt(Timestamp.wrap(block.timestamp), true); - uint256 manaBaseFee = FeeMath.summedBaseFee(components); + uint256 manaBaseFee = components.summedBaseFee(); _validateHeader({ _header: header, _signatures: _signatures, @@ -568,15 +508,13 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { _flags: DataStructures.ExecutionFlags({ignoreDA: false, ignoreSignatures: false}) }); - uint256 blockNumber = ++tips.pendingBlockNumber; + uint256 blockNumber = ++rollupStore.tips.pendingBlockNumber; { - FeeHeader memory parentFeeHeader = blocks[blockNumber - 1].feeHeader; - uint256 excessMana = (parentFeeHeader.excessMana + parentFeeHeader.manaUsed).clampedAdd( - -int256(FeeMath.MANA_TARGET) - ); + FeeHeader memory parentFeeHeader = rollupStore.blocks[blockNumber - 1].feeHeader; + uint256 excessMana = IntRollupLib.computeExcessMana(parentFeeHeader); - blocks[blockNumber] = BlockLog({ + rollupStore.blocks[blockNumber] = BlockLog({ archive: _args.archive, blockHash: _args.blockHash, slotNumber: Slot.wrap(header.globalVariables.slotNumber), @@ -639,15 +577,16 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { function updateL1GasFeeOracle() public override(IRollup) { Slot slot = getCurrentSlot(); // The slot where we find a new queued value acceptable - Slot acceptableSlot = l1GasOracleValues.slotOfChange + (LIFETIME - LAG); + Slot acceptableSlot = rollupStore.l1GasOracleValues.slotOfChange + (LIFETIME - LAG); if (slot < acceptableSlot) { return; } - l1GasOracleValues.pre = l1GasOracleValues.post; - l1GasOracleValues.post = L1FeeData({baseFee: block.basefee, blobFee: _getBlobBaseFee()}); - l1GasOracleValues.slotOfChange = slot + LAG; + rollupStore.l1GasOracleValues.pre = rollupStore.l1GasOracleValues.post; + rollupStore.l1GasOracleValues.post = + L1FeeData({baseFee: block.basefee, blobFee: _getBlobBaseFee()}); + rollupStore.l1GasOracleValues.slotOfChange = slot + LAG; } /** @@ -656,8 +595,8 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { * @return The fee asset price */ function getFeeAssetPrice() public view override(IRollup) returns (uint256) { - return FeeMath.feeAssetPriceModifier( - blocks[tips.pendingBlockNumber].feeHeader.feeAssetPriceNumerator + return IntRollupLib.feeAssetPriceModifier( + rollupStore.blocks[rollupStore.tips.pendingBlockNumber].feeHeader.feeAssetPriceNumerator ); } @@ -668,10 +607,10 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { returns (L1FeeData memory) { Slot slot = getSlotAt(_timestamp); - if (slot < l1GasOracleValues.slotOfChange) { - return l1GasOracleValues.pre; + if (slot < rollupStore.l1GasOracleValues.slotOfChange) { + return rollupStore.l1GasOracleValues.pre; } - return l1GasOracleValues.post; + return rollupStore.l1GasOracleValues.post; } /** @@ -709,244 +648,49 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { returns (ManaBaseFeeComponents memory) { // If we can prune, we use the proven block, otherwise the pending block - uint256 blockOfInterest = - canPruneAtTime(_timestamp) ? tips.provenBlockNumber : tips.pendingBlockNumber; - - FeeHeader storage parentFeeHeader = blocks[blockOfInterest].feeHeader; - uint256 excessMana = (parentFeeHeader.excessMana + parentFeeHeader.manaUsed).clampedAdd( - -int256(FeeMath.MANA_TARGET) - ); - - L1FeeData memory fees = getL1FeesAt(_timestamp); - uint256 dataCost = - Math.mulDiv(3 * BLOB_GAS_PER_BLOB, fees.blobFee, FeeMath.MANA_TARGET, Math.Rounding.Ceil); - uint256 gasUsed = FeeMath.L1_GAS_PER_BLOCK_PROPOSED + 3 * GAS_PER_BLOB_POINT_EVALUATION - + FeeMath.L1_GAS_PER_EPOCH_VERIFIED / EPOCH_DURATION; - uint256 gasCost = Math.mulDiv(gasUsed, fees.baseFee, FeeMath.MANA_TARGET, Math.Rounding.Ceil); - uint256 provingCost = FeeMath.provingCostPerMana( - blocks[tips.pendingBlockNumber].feeHeader.provingCostPerManaNumerator + uint256 blockOfInterest = canPruneAtTime(_timestamp) + ? rollupStore.tips.provenBlockNumber + : rollupStore.tips.pendingBlockNumber; + + return ExtRollupLib.getManaBaseFeeComponentsAt( + rollupStore.blocks[blockOfInterest].feeHeader, + getL1FeesAt(_timestamp), + _inFeeAsset ? getFeeAssetPrice() : 1e9, + EPOCH_DURATION ); - - uint256 congestionMultiplier = FeeMath.congestionMultiplier(excessMana); - uint256 total = dataCost + gasCost + provingCost; - uint256 congestionCost = Math.mulDiv( - total, congestionMultiplier, FeeMath.MINIMUM_CONGESTION_MULTIPLIER, Math.Rounding.Floor - ) - total; - - uint256 feeAssetPrice = _inFeeAsset ? getFeeAssetPrice() : 1e9; - - // @todo @lherskind. The following is a crime against humanity, but it makes it - // very neat to plot etc from python, #10004 will fix it across the board - return ManaBaseFeeComponents({ - dataCost: Math.mulDiv(dataCost, feeAssetPrice, 1e9, Math.Rounding.Ceil), - gasCost: Math.mulDiv(gasCost, feeAssetPrice, 1e9, Math.Rounding.Ceil), - provingCost: Math.mulDiv(provingCost, feeAssetPrice, 1e9, Math.Rounding.Ceil), - congestionCost: Math.mulDiv(congestionCost, feeAssetPrice, 1e9, Math.Rounding.Ceil), - congestionMultiplier: congestionMultiplier - }); } - function quoteToDigest(EpochProofQuoteLib.EpochProofQuote memory _quote) + function quoteToDigest(EpochProofQuote memory _quote) public view override(IRollup) returns (bytes32) { - return _hashTypedDataV4(EpochProofQuoteLib.hash(_quote)); - } - - /** - * @notice Returns the computed public inputs for the given epoch proof. - * - * @dev Useful for debugging and testing. Allows submitter to compare their - * own public inputs used for generating the proof vs the ones assembled - * by this contract when verifying it. - * - * @param _epochSize - The size of the epoch (to be promoted to a constant) - * @param _args - Array of public inputs to the proof (previousArchive, endArchive, previousBlockHash, endBlockHash, endTimestamp, outHash, proverId) - * @param _fees - Array of recipient-value pairs with fees to be distributed for the epoch - * @param _aggregationObject - The aggregation object for the proof - */ - function getEpochProofPublicInputs( - uint256 _epochSize, - bytes32[7] calldata _args, - bytes32[] calldata _fees, - bytes calldata _aggregationObject - ) public view override(IRollup) returns (bytes32[] memory) { - uint256 previousBlockNumber = tips.provenBlockNumber; - uint256 endBlockNumber = previousBlockNumber + _epochSize; - - // Args are defined as an array because Solidity complains with "stack too deep" otherwise - // 0 bytes32 _previousArchive, - // 1 bytes32 _endArchive, - // 2 bytes32 _previousBlockHash, - // 3 bytes32 _endBlockHash, - // 4 bytes32 _endTimestamp, - // 5 bytes32 _outHash, - // 6 bytes32 _proverId, - - // TODO(#7373): Public inputs are not fully verified - - { - // We do it this way to provide better error messages than passing along the storage values - bytes32 expectedPreviousArchive = blocks[previousBlockNumber].archive; - require( - expectedPreviousArchive == _args[0], - Errors.Rollup__InvalidPreviousArchive(expectedPreviousArchive, _args[0]) - ); - - bytes32 expectedEndArchive = blocks[endBlockNumber].archive; - require( - expectedEndArchive == _args[1], Errors.Rollup__InvalidArchive(expectedEndArchive, _args[1]) - ); - - bytes32 expectedPreviousBlockHash = blocks[previousBlockNumber].blockHash; - // TODO: Remove 0 check once we inject the proper genesis block hash - require( - expectedPreviousBlockHash == 0 || expectedPreviousBlockHash == _args[2], - Errors.Rollup__InvalidPreviousBlockHash(expectedPreviousBlockHash, _args[2]) - ); - - bytes32 expectedEndBlockHash = blocks[endBlockNumber].blockHash; - require( - expectedEndBlockHash == _args[3], - Errors.Rollup__InvalidBlockHash(expectedEndBlockHash, _args[3]) - ); - } - - bytes32[] memory publicInputs = new bytes32[]( - Constants.ROOT_ROLLUP_PUBLIC_INPUTS_LENGTH + Constants.AGGREGATION_OBJECT_LENGTH - ); - - // Structure of the root rollup public inputs we need to reassemble: - // - // struct RootRollupPublicInputs { - // previous_archive: AppendOnlyTreeSnapshot, - // end_archive: AppendOnlyTreeSnapshot, - // previous_block_hash: Field, - // end_block_hash: Field, - // end_timestamp: u64, - // end_block_number: Field, - // out_hash: Field, - // fees: [FeeRecipient; Constants.AZTEC_EPOCH_DURATION], - // vk_tree_root: Field, - // protocol_contract_tree_root: Field, - // prover_id: Field - // } - - // previous_archive.root: the previous archive tree root - publicInputs[0] = _args[0]; - - // previous_archive.next_available_leaf_index: the previous archive next available index - // normally this should be equal to the block number (since leaves are 0-indexed and blocks 1-indexed) - // but in yarn-project/merkle-tree/src/new_tree.ts we prefill the tree so that block N is in leaf N - publicInputs[1] = bytes32(previousBlockNumber + 1); - - // end_archive.root: the new archive tree root - publicInputs[2] = _args[1]; - - // end_archive.next_available_leaf_index: the new archive next available index - publicInputs[3] = bytes32(endBlockNumber + 1); - - // previous_block_hash: the block hash just preceding this epoch - publicInputs[4] = _args[2]; - - // end_block_hash: the last block hash in the epoch - publicInputs[5] = _args[3]; - - // end_timestamp: the timestamp of the last block in the epoch - publicInputs[6] = _args[4]; - - // end_block_number: last block number in the epoch - publicInputs[7] = bytes32(endBlockNumber); - - // out_hash: root of this epoch's l2 to l1 message tree - publicInputs[8] = _args[5]; - - uint256 feesLength = Constants.AZTEC_MAX_EPOCH_DURATION * 2; - // fees[9 to (9+feesLength-1)]: array of recipient-value pairs - for (uint256 i = 0; i < feesLength; i++) { - publicInputs[9 + i] = _fees[i]; - } - uint256 feesEnd = 9 + feesLength; - - // vk_tree_root - publicInputs[feesEnd] = vkTreeRoot; - - // protocol_contract_tree_root - publicInputs[feesEnd + 1] = protocolContractTreeRoot; - - // prover_id: id of current epoch's prover - publicInputs[feesEnd + 2] = _args[6]; - - // the block proof is recursive, which means it comes with an aggregation object - // this snippet copies it into the public inputs needed for verification - // it also guards against empty _aggregationObject used with mocked proofs - uint256 aggregationLength = _aggregationObject.length / 32; - for (uint256 i = 0; i < Constants.AGGREGATION_OBJECT_LENGTH && i < aggregationLength; i++) { - bytes32 part; - assembly { - part := calldataload(add(_aggregationObject.offset, mul(i, 32))) - } - publicInputs[i + feesEnd + 3] = part; - } - - return publicInputs; + return _hashTypedDataV4(IntRollupLib.computeQuoteHash(_quote)); } - function validateEpochProofRightClaimAtTime( - Timestamp _ts, - EpochProofQuoteLib.SignedEpochProofQuote calldata _quote - ) public view override(IRollup) { - SignatureLib.verify(_quote.signature, _quote.quote.prover, quoteToDigest(_quote.quote)); - + function validateEpochProofRightClaimAtTime(Timestamp _ts, SignedEpochProofQuote calldata _quote) + public + view + override(IRollup) + { Slot currentSlot = getSlotAt(_ts); address currentProposer = getProposerAt(_ts); Epoch epochToProve = getEpochToProve(); - - require( - _quote.quote.validUntilSlot >= currentSlot, - Errors.Rollup__QuoteExpired(currentSlot, _quote.quote.validUntilSlot) - ); - - require( - _quote.quote.basisPointFee <= 10_000, - Errors.Rollup__InvalidBasisPointFee(_quote.quote.basisPointFee) - ); - - require( - currentProposer == address(0) || currentProposer == msg.sender, - Errors.Leonidas__InvalidProposer(currentProposer, msg.sender) - ); - - require( - _quote.quote.epochToProve == epochToProve, - Errors.Rollup__NotClaimingCorrectEpoch(epochToProve, _quote.quote.epochToProve) - ); - - require( - positionInEpoch(currentSlot) < CLAIM_DURATION_IN_L2_SLOTS, - Errors.Rollup__NotInClaimPhase(positionInEpoch(currentSlot), CLAIM_DURATION_IN_L2_SLOTS) - ); - - // if the epoch to prove is not the one that has been claimed, - // then whatever is in the proofClaim is stale - require( - proofClaim.epochToProve != epochToProve || proofClaim.proposerClaimant == address(0), - Errors.Rollup__ProofRightAlreadyClaimed() - ); - - require( - _quote.quote.bondAmount >= PROOF_COMMITMENT_MIN_BOND_AMOUNT_IN_TST, - Errors.Rollup__InsufficientBondAmount( - PROOF_COMMITMENT_MIN_BOND_AMOUNT_IN_TST, _quote.quote.bondAmount - ) - ); - - uint256 availableFundsInEscrow = PROOF_COMMITMENT_ESCROW.deposits(_quote.quote.prover); - require( - _quote.quote.bondAmount <= availableFundsInEscrow, - Errors.Rollup__InsufficientFundsInEscrow(_quote.quote.bondAmount, availableFundsInEscrow) + uint256 posInEpoch = positionInEpoch(currentSlot); + bytes32 digest = quoteToDigest(_quote.quote); + + ExtRollupLib.validateEpochProofRightClaimAtTime( + currentSlot, + currentProposer, + epochToProve, + posInEpoch, + _quote, + digest, + rollupStore.proofClaim, + CLAIM_DURATION_IN_L2_SLOTS, + PROOF_COMMITMENT_MIN_BOND_AMOUNT_IN_TST, + PROOF_COMMITMENT_ESCROW ); } @@ -956,31 +700,31 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { * @return bytes32 - The current archive root */ function archive() public view override(IRollup) returns (bytes32) { - return blocks[tips.pendingBlockNumber].archive; + return rollupStore.blocks[rollupStore.tips.pendingBlockNumber].archive; } function getProvenBlockNumber() public view override(IRollup) returns (uint256) { - return tips.provenBlockNumber; + return rollupStore.tips.provenBlockNumber; } function getPendingBlockNumber() public view override(IRollup) returns (uint256) { - return tips.pendingBlockNumber; + return rollupStore.tips.pendingBlockNumber; } function getBlock(uint256 _blockNumber) public view override(IRollup) returns (BlockLog memory) { require( - _blockNumber <= tips.pendingBlockNumber, - Errors.Rollup__InvalidBlockNumber(tips.pendingBlockNumber, _blockNumber) + _blockNumber <= rollupStore.tips.pendingBlockNumber, + Errors.Rollup__InvalidBlockNumber(rollupStore.tips.pendingBlockNumber, _blockNumber) ); - return blocks[_blockNumber]; + return rollupStore.blocks[_blockNumber]; } function getEpochForBlock(uint256 _blockNumber) public view override(IRollup) returns (Epoch) { require( - _blockNumber <= tips.pendingBlockNumber, - Errors.Rollup__InvalidBlockNumber(tips.pendingBlockNumber, _blockNumber) + _blockNumber <= rollupStore.tips.pendingBlockNumber, + Errors.Rollup__InvalidBlockNumber(rollupStore.tips.pendingBlockNumber, _blockNumber) ); - return getEpochAt(getTimestampForSlot(blocks[_blockNumber].slotNumber)); + return getEpochAt(getTimestampForSlot(rollupStore.blocks[_blockNumber].slotNumber)); } /** @@ -993,8 +737,11 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { * @return uint256 - The epoch to prove */ function getEpochToProve() public view override(IRollup) returns (Epoch) { - require(tips.provenBlockNumber != tips.pendingBlockNumber, Errors.Rollup__NoEpochToProve()); - return getEpochForBlock(getProvenBlockNumber() + 1); + require( + rollupStore.tips.provenBlockNumber != rollupStore.tips.pendingBlockNumber, + Errors.Rollup__NoEpochToProve() + ); + return getEpochForBlock(rollupStore.tips.provenBlockNumber + 1); } /** @@ -1005,8 +752,8 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { * @return bytes32 - The archive root of the block */ function archiveAt(uint256 _blockNumber) public view override(IRollup) returns (bytes32) { - if (_blockNumber <= tips.pendingBlockNumber) { - return blocks[_blockNumber].archive; + if (_blockNumber <= rollupStore.tips.pendingBlockNumber) { + return rollupStore.blocks[_blockNumber].archive; } return bytes32(0); } @@ -1017,14 +764,14 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { function canPruneAtTime(Timestamp _ts) public view override(IRollup) returns (bool) { if ( - tips.pendingBlockNumber == tips.provenBlockNumber - || tips.pendingBlockNumber <= assumeProvenThroughBlockNumber + rollupStore.tips.pendingBlockNumber == rollupStore.tips.provenBlockNumber + || rollupStore.tips.pendingBlockNumber <= assumeProvenThroughBlockNumber ) { return false; } Slot currentSlot = getSlotAt(_ts); - Epoch oldestPendingEpoch = getEpochForBlock(tips.provenBlockNumber + 1); + Epoch oldestPendingEpoch = getEpochForBlock(rollupStore.tips.provenBlockNumber + 1); Slot startSlotOfPendingEpoch = toSlots(oldestPendingEpoch); // suppose epoch 1 is proven, epoch 2 is pending, epoch 3 is the current epoch. @@ -1035,7 +782,8 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { < startSlotOfPendingEpoch + toSlots(Epoch.wrap(1)) + Slot.wrap(CLAIM_DURATION_IN_L2_SLOTS); bool claimExists = currentSlot < startSlotOfPendingEpoch + toSlots(Epoch.wrap(2)) - && proofClaim.epochToProve == oldestPendingEpoch && proofClaim.proposerClaimant != address(0); + && rollupStore.proofClaim.epochToProve == oldestPendingEpoch + && rollupStore.proofClaim.proposerClaimant != address(0); if (inClaimPhase || claimExists) { // If we are in the claim phase, do not prune @@ -1046,17 +794,17 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { function _prune() internal { // TODO #8656 - delete proofClaim; + delete rollupStore.proofClaim; - uint256 pending = tips.pendingBlockNumber; + uint256 pending = rollupStore.tips.pendingBlockNumber; // @note We are not deleting the blocks, but we are "winding back" the pendingTip to the last block that was proven. // We can do because any new block proposed will overwrite a previous block in the block log, // so no values should "survive". // People must therefore read the chain using the pendingTip as a boundary. - tips.pendingBlockNumber = tips.provenBlockNumber; + rollupStore.tips.pendingBlockNumber = rollupStore.tips.provenBlockNumber; - emit PrunedPending(tips.provenBlockNumber, pending); + emit PrunedPending(rollupStore.tips.provenBlockNumber, pending); } /** @@ -1070,18 +818,31 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { * @param _flags - Flags specific to the execution, whether certain checks should be skipped */ function _validateHeader( - HeaderLib.Header memory _header, - SignatureLib.Signature[] memory _signatures, + Header memory _header, + Signature[] memory _signatures, bytes32 _digest, Timestamp _currentTime, uint256 _manaBaseFee, bytes32 _txEffectsHash, DataStructures.ExecutionFlags memory _flags ) internal view { - uint256 pendingBlockNumber = - canPruneAtTime(_currentTime) ? tips.provenBlockNumber : tips.pendingBlockNumber; - _validateHeaderForSubmissionBase( - _header, _currentTime, _manaBaseFee, _txEffectsHash, pendingBlockNumber, _flags + uint256 pendingBlockNumber = canPruneAtTime(_currentTime) + ? rollupStore.tips.provenBlockNumber + : rollupStore.tips.pendingBlockNumber; + + ExtRollupLib.validateHeaderForSubmissionBase( + ValidateHeaderArgs({ + header: _header, + currentTime: _currentTime, + manaBaseFee: _manaBaseFee, + txsEffectsHash: _txEffectsHash, + pendingBlockNumber: pendingBlockNumber, + flags: _flags, + version: VERSION, + feeJuicePortal: FEE_JUICE_PORTAL, + getTimestampForSlot: this.getTimestampForSlot + }), + rollupStore.blocks ); _validateHeaderForSubmissionSequencerSelection( Slot.wrap(_header.globalVariables.slotNumber), _signatures, _digest, _currentTime, _flags @@ -1106,7 +867,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { */ function _validateHeaderForSubmissionSequencerSelection( Slot _slot, - SignatureLib.Signature[] memory _signatures, + Signature[] memory _signatures, bytes32 _digest, Timestamp _currentTime, DataStructures.ExecutionFlags memory _flags @@ -1127,100 +888,21 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { _validateLeonidas(_slot, _signatures, _digest, _flags); } - /** - * @notice Validate a header for submission to the pending chain (base checks) - * Base checks here being the checks that we wish to do regardless of the sequencer - * selection mechanism. - * - * Each of the following validation checks must pass, otherwise an error is thrown and we revert. - * - The chain ID MUST match the current chain ID - * - The version MUST match the current version - * - The block id MUST be the next block in the chain - * - The last archive root in the header MUST match the current archive - * - The slot MUST be larger than the slot of the previous block (ensures single block per slot) - * - The timestamp MUST be equal to GENESIS_TIME + slot * SLOT_DURATION - * - The `txsEffectsHash` of the header must match the computed `_txsEffectsHash` - * - This can be relaxed to happen at the time of `submitProof` instead - * - * @param _header - The header to validate - */ - function _validateHeaderForSubmissionBase( - HeaderLib.Header memory _header, - Timestamp _currentTime, - uint256 _manaBaseFee, - bytes32 _txsEffectsHash, - uint256 _pendingBlockNumber, - DataStructures.ExecutionFlags memory _flags - ) internal view { - require( - block.chainid == _header.globalVariables.chainId, - Errors.Rollup__InvalidChainId(block.chainid, _header.globalVariables.chainId) - ); - - require( - _header.globalVariables.version == VERSION, - Errors.Rollup__InvalidVersion(VERSION, _header.globalVariables.version) - ); - - require( - _header.globalVariables.blockNumber == _pendingBlockNumber + 1, - Errors.Rollup__InvalidBlockNumber( - _pendingBlockNumber + 1, _header.globalVariables.blockNumber - ) - ); - - bytes32 tipArchive = blocks[_pendingBlockNumber].archive; - require( - tipArchive == _header.lastArchive.root, - Errors.Rollup__InvalidArchive(tipArchive, _header.lastArchive.root) - ); - - Slot slot = Slot.wrap(_header.globalVariables.slotNumber); - Slot lastSlot = blocks[_pendingBlockNumber].slotNumber; - require(slot > lastSlot, Errors.Rollup__SlotAlreadyInChain(lastSlot, slot)); - - Timestamp timestamp = getTimestampForSlot(slot); - require( - Timestamp.wrap(_header.globalVariables.timestamp) == timestamp, - Errors.Rollup__InvalidTimestamp(timestamp, Timestamp.wrap(_header.globalVariables.timestamp)) - ); - - // @note If you are hitting this error, it is likely because the chain you use have a blocktime that differs - // from the value that we have in the constants. - // When you are encountering this, it will likely be as the sequencer expects to be able to include - // an Aztec block in the "next" ethereum block based on a timestamp that is 12 seconds in the future - // from the last block. However, if the actual will only be 1 second in the future, you will end up - // expecting this value to be in the future. - require(timestamp <= _currentTime, Errors.Rollup__TimestampInFuture(_currentTime, timestamp)); - - // Check if the data is available - require( - _flags.ignoreDA || _header.contentCommitment.txsEffectsHash == _txsEffectsHash, - Errors.Rollup__UnavailableTxs(_header.contentCommitment.txsEffectsHash) - ); - - // If not canonical rollup, require that the fees are zero - if (address(this) != FEE_JUICE_PORTAL.canonicalRollup()) { - require(_header.globalVariables.gasFees.feePerDaGas == 0, Errors.Rollup__NonZeroDaFee()); - require(_header.globalVariables.gasFees.feePerL2Gas == 0, Errors.Rollup__NonZeroL2Fee()); - } else { - require(_header.globalVariables.gasFees.feePerDaGas == 0, Errors.Rollup__NonZeroDaFee()); - require( - _header.globalVariables.gasFees.feePerL2Gas == _manaBaseFee, - Errors.Rollup__InvalidManaBaseFee(_manaBaseFee, _header.globalVariables.gasFees.feePerL2Gas) - ); - } - } - function _fakeBlockNumberAsProven(uint256 _blockNumber) private { - if (_blockNumber > tips.provenBlockNumber && _blockNumber <= tips.pendingBlockNumber) { - tips.provenBlockNumber = _blockNumber; + if ( + _blockNumber > rollupStore.tips.provenBlockNumber + && _blockNumber <= rollupStore.tips.pendingBlockNumber + ) { + rollupStore.tips.provenBlockNumber = _blockNumber; // If this results on a new epoch, create a fake claim for it // Otherwise nextEpochToProve will report an old epoch Epoch epoch = getEpochForBlock(_blockNumber); - if (Epoch.unwrap(epoch) == 0 || Epoch.unwrap(epoch) > Epoch.unwrap(proofClaim.epochToProve)) { - proofClaim = DataStructures.EpochProofClaim({ + if ( + Epoch.unwrap(epoch) == 0 + || Epoch.unwrap(epoch) > Epoch.unwrap(rollupStore.proofClaim.epochToProve) + ) { + rollupStore.proofClaim = DataStructures.EpochProofClaim({ epochToProve: epoch, basisPointFee: 0, bondAmount: 0, diff --git a/l1-contracts/src/core/interfaces/ILeonidas.sol b/l1-contracts/src/core/interfaces/ILeonidas.sol index ece101d7277..9624ead5043 100644 --- a/l1-contracts/src/core/interfaces/ILeonidas.sol +++ b/l1-contracts/src/core/interfaces/ILeonidas.sol @@ -4,11 +4,26 @@ pragma solidity >=0.8.27; import {Timestamp, Slot, Epoch} from "@aztec/core/libraries/TimeMath.sol"; -interface ILeonidas { - // Changing depending on sybil mechanism and slashing enforcement - function addValidator(address _validator) external; - function removeValidator(address _validator) external; +/** + * @notice The data structure for an epoch + * @param committee - The attesters for the epoch + * @param sampleSeed - The seed used to sample the attesters of the epoch + * @param nextSeed - The seed used to influence the NEXT epoch + */ +struct EpochData { + address[] committee; + uint256 sampleSeed; + uint256 nextSeed; +} +struct LeonidasStorage { + // A mapping to snapshots of the validator set + mapping(Epoch => EpochData) epochs; + // The last stored randao value, same value as `seed` in the last inserted epoch + uint256 lastSeed; +} + +interface ILeonidas { // Likely changing to optimize in Pleistarchus function setupEpoch() external; function getCurrentProposer() external view returns (address); @@ -17,9 +32,6 @@ interface ILeonidas { // Stable function getCurrentEpoch() external view returns (Epoch); function getCurrentSlot() external view returns (Slot); - function isValidator(address _validator) external view returns (bool); - function getValidatorCount() external view returns (uint256); - function getValidatorAt(uint256 _index) external view returns (address); // Consider removing below this point function getTimestampForSlot(Slot _slotNumber) external view returns (Timestamp); @@ -28,7 +40,7 @@ interface ILeonidas { // Get the current epoch committee function getCurrentEpochCommittee() external view returns (address[] memory); function getEpochCommittee(Epoch _epoch) external view returns (address[] memory); - function getValidators() external view returns (address[] memory); + function getAttesters() external view returns (address[] memory); function getEpochAt(Timestamp _ts) external view returns (Epoch); function getSlotAt(Timestamp _ts) external view returns (Slot); diff --git a/l1-contracts/src/core/interfaces/IRollup.sol b/l1-contracts/src/core/interfaces/IRollup.sol index f52266dfe8a..fb22590932e 100644 --- a/l1-contracts/src/core/interfaces/IRollup.sol +++ b/l1-contracts/src/core/interfaces/IRollup.sol @@ -2,13 +2,19 @@ // Copyright 2024 Aztec Labs. pragma solidity >=0.8.27; +import {IVerifier} from "@aztec/core/interfaces/IVerifier.sol"; import {IInbox} from "@aztec/core/interfaces/messagebridge/IInbox.sol"; import {IOutbox} from "@aztec/core/interfaces/messagebridge/IOutbox.sol"; -import {SignatureLib} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import {Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; import {DataStructures} from "@aztec/core/libraries/DataStructures.sol"; -import {EpochProofQuoteLib} from "@aztec/core/libraries/EpochProofQuoteLib.sol"; -import {ManaBaseFeeComponents} from "@aztec/core/libraries/FeeMath.sol"; -import {ProposeArgs} from "@aztec/core/libraries/ProposeLib.sol"; +import { + EpochProofQuote, + SignedEpochProofQuote +} from "@aztec/core/libraries/RollupLibs/EpochProofQuoteLib.sol"; +import { + FeeHeader, L1FeeData, ManaBaseFeeComponents +} from "@aztec/core/libraries/RollupLibs/FeeMath.sol"; +import {ProposeArgs} from "@aztec/core/libraries/RollupLibs/ProposeLib.sol"; import {Timestamp, Slot, Epoch} from "@aztec/core/libraries/TimeMath.sol"; struct SubmitEpochRootProofArgs { @@ -19,14 +25,6 @@ struct SubmitEpochRootProofArgs { bytes proof; } -struct FeeHeader { - uint256 excessMana; - uint256 feeAssetPriceNumerator; - uint256 manaUsed; - uint256 provingCostPerManaNumerator; - uint256 congestionCost; -} - struct BlockLog { FeeHeader feeHeader; bytes32 archive; @@ -34,9 +32,32 @@ struct BlockLog { Slot slotNumber; } -struct L1FeeData { - uint256 baseFee; - uint256 blobFee; +struct ChainTips { + uint256 pendingBlockNumber; + uint256 provenBlockNumber; +} + +struct L1GasOracleValues { + L1FeeData pre; + L1FeeData post; + Slot slotOfChange; +} + +struct RollupStore { + mapping(uint256 blockNumber => BlockLog log) blocks; + ChainTips tips; + bytes32 vkTreeRoot; + bytes32 protocolContractTreeRoot; + L1GasOracleValues l1GasOracleValues; + DataStructures.EpochProofClaim proofClaim; + IVerifier epochProofVerifier; +} + +struct CheatDepositArgs { + address attester; + address proposer; + address withdrawer; + uint256 amount; } interface ITestRollup { @@ -44,6 +65,7 @@ interface ITestRollup { function setVkTreeRoot(bytes32 _vkTreeRoot) external; function setProtocolContractTreeRoot(bytes32 _protocolContractTreeRoot) external; function setAssumeProvenThroughBlockNumber(uint256 _blockNumber) external; + function cheat__InitialiseValidatorSet(CheatDepositArgs[] memory _args) external; function getManaBaseFeeComponentsAt(Timestamp _timestamp, bool _inFeeAsset) external view @@ -65,19 +87,16 @@ interface IRollup { function prune() external; function updateL1GasFeeOracle() external; - function claimEpochProofRight(EpochProofQuoteLib.SignedEpochProofQuote calldata _quote) external; + function claimEpochProofRight(SignedEpochProofQuote calldata _quote) external; - function propose( - ProposeArgs calldata _args, - SignatureLib.Signature[] memory _signatures, - bytes calldata _body - ) external; + function propose(ProposeArgs calldata _args, Signature[] memory _signatures, bytes calldata _body) + external; function proposeAndClaim( ProposeArgs calldata _args, - SignatureLib.Signature[] memory _signatures, + Signature[] memory _signatures, bytes calldata _body, - EpochProofQuoteLib.SignedEpochProofQuote calldata _quote + SignedEpochProofQuote calldata _quote ) external; function submitEpochRootProof(SubmitEpochRootProofArgs calldata _args) external; @@ -86,7 +105,7 @@ interface IRollup { function validateHeader( bytes calldata _header, - SignatureLib.Signature[] memory _signatures, + Signature[] memory _signatures, bytes32 _digest, Timestamp _currentTime, bytes32 _txsEffecstHash, @@ -102,6 +121,9 @@ interface IRollup { // solhint-disable-next-line func-name-mixedcase function L1_BLOCK_AT_GENESIS() external view returns (uint256); + function getProofClaim() external view returns (DataStructures.EpochProofClaim memory); + function getTips() external view returns (ChainTips memory); + function status(uint256 _myHeaderBlockNumber) external view @@ -114,10 +136,7 @@ interface IRollup { Epoch provenEpochNumber ); - function quoteToDigest(EpochProofQuoteLib.EpochProofQuote memory _quote) - external - view - returns (bytes32); + function quoteToDigest(EpochProofQuote memory _quote) external view returns (bytes32); function getBlock(uint256 _blockNumber) external view returns (BlockLog memory); function getFeeAssetPrice() external view returns (uint256); function getManaBaseFeeAt(Timestamp _timestamp, bool _inFeeAsset) external view returns (uint256); @@ -131,10 +150,9 @@ interface IRollup { function getPendingBlockNumber() external view returns (uint256); function getEpochToProve() external view returns (Epoch); function getClaimableEpoch() external view returns (Epoch); - function validateEpochProofRightClaimAtTime( - Timestamp _ts, - EpochProofQuoteLib.SignedEpochProofQuote calldata _quote - ) external view; + function validateEpochProofRightClaimAtTime(Timestamp _ts, SignedEpochProofQuote calldata _quote) + external + view; function getEpochForBlock(uint256 _blockNumber) external view returns (Epoch); function getEpochProofPublicInputs( uint256 _epochSize, diff --git a/l1-contracts/src/core/interfaces/IStaking.sol b/l1-contracts/src/core/interfaces/IStaking.sol index 12d1cce4ab9..e2d469dd8a3 100644 --- a/l1-contracts/src/core/interfaces/IStaking.sol +++ b/l1-contracts/src/core/interfaces/IStaking.sol @@ -3,6 +3,7 @@ pragma solidity >=0.8.27; import {Timestamp} from "@aztec/core/libraries/TimeMath.sol"; +import {EnumerableSet} from "@oz/utils/structs/EnumerableSet.sol"; // None -> Does not exist in our setup // Validating -> Participating as validator @@ -33,6 +34,12 @@ struct Exit { address recipient; } +struct StakingStorage { + EnumerableSet.AddressSet attesters; + mapping(address attester => ValidatorInfo) info; + mapping(address attester => Exit) exits; +} + interface IStaking { event Deposit( address indexed attester, address indexed proposer, address indexed withdrawer, uint256 amount diff --git a/l1-contracts/src/core/libraries/EpochProofQuoteLib.sol b/l1-contracts/src/core/libraries/EpochProofQuoteLib.sol deleted file mode 100644 index be838b7a7ad..00000000000 --- a/l1-contracts/src/core/libraries/EpochProofQuoteLib.sol +++ /dev/null @@ -1,51 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 -// Copyright 2024 Aztec Labs. -pragma solidity >=0.8.27; - -import {SignatureLib} from "@aztec/core/libraries/crypto/SignatureLib.sol"; -import {Slot, Epoch} from "@aztec/core/libraries/TimeMath.sol"; - -library EpochProofQuoteLib { - /** - * @notice Struct encompassing an epoch proof quote - * @param epochToProve - The epoch number to prove - * @param validUntilSlot - The deadline of the quote, denoted in L2 slots - * @param bondAmount - The size of the bond - * @param prover - The address of the prover - * @param basisPointFee - The fee measured in basis points - */ - struct EpochProofQuote { - Epoch epochToProve; - Slot validUntilSlot; - uint256 bondAmount; - address prover; - uint32 basisPointFee; - } - - /** - * @notice A signed quote for the epoch proof - * @param quote - The Epoch Proof Quote - * @param signature - A signature on the quote - */ - struct SignedEpochProofQuote { - EpochProofQuote quote; - SignatureLib.Signature signature; - } - - bytes32 public constant EPOCH_PROOF_QUOTE_TYPEHASH = keccak256( - "EpochProofQuote(uint256 epochToProve,uint256 validUntilSlot,uint256 bondAmount,address prover,uint32 basisPointFee)" - ); - - function hash(EpochProofQuote memory _quote) internal pure returns (bytes32) { - return keccak256( - abi.encode( - EPOCH_PROOF_QUOTE_TYPEHASH, - _quote.epochToProve, - _quote.validUntilSlot, - _quote.bondAmount, - _quote.prover, - _quote.basisPointFee - ) - ); - } -} diff --git a/l1-contracts/src/core/libraries/Errors.sol b/l1-contracts/src/core/libraries/Errors.sol index 32d6e3a65ba..3b97bb534b5 100644 --- a/l1-contracts/src/core/libraries/Errors.sol +++ b/l1-contracts/src/core/libraries/Errors.sol @@ -98,6 +98,7 @@ library Errors { // Sequencer Selection (Leonidas) error Leonidas__EpochNotSetup(); // 0xcf4e597e error Leonidas__InvalidProposer(address expected, address actual); // 0xd02d278e + error Leonidas__InvalidDeposit(address attester, address proposer); // 0x1ef9a54b error Leonidas__InsufficientAttestations(uint256 minimumNeeded, uint256 provided); // 0xbf1ca4cb error Leonidas__InsufficientAttestationsProvided(uint256 minimumNeeded, uint256 provided); // 0xb3a697c2 diff --git a/l1-contracts/src/core/libraries/LeonidasLib/LeonidasLib.sol b/l1-contracts/src/core/libraries/LeonidasLib/LeonidasLib.sol new file mode 100644 index 00000000000..56b465bbbc6 --- /dev/null +++ b/l1-contracts/src/core/libraries/LeonidasLib/LeonidasLib.sol @@ -0,0 +1,268 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity >=0.8.27; + +import {EpochData, LeonidasStorage} from "@aztec/core/interfaces/ILeonidas.sol"; +import {StakingStorage} from "@aztec/core/interfaces/IStaking.sol"; +import {SampleLib} from "@aztec/core/libraries/crypto/SampleLib.sol"; +import {SignatureLib, Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import {DataStructures} from "@aztec/core/libraries/DataStructures.sol"; +import {Errors} from "@aztec/core/libraries/Errors.sol"; +import {Slot, Epoch} from "@aztec/core/libraries/TimeMath.sol"; +import {MessageHashUtils} from "@oz/utils/cryptography/MessageHashUtils.sol"; +import {EnumerableSet} from "@oz/utils/structs/EnumerableSet.sol"; + +library LeonidasLib { + using EnumerableSet for EnumerableSet.AddressSet; + using MessageHashUtils for bytes32; + using SignatureLib for Signature; + + /** + * @notice Samples a validator set for a specific epoch + * + * @dev Only used internally, should never be called for anything but the "next" epoch + * Allowing us to always use `lastSeed`. + * + * @return The validators for the given epoch + */ + function sampleValidators( + StakingStorage storage _stakingStore, + uint256 _seed, + uint256 _targetCommitteeSize + ) external view returns (address[] memory) { + return _sampleValidators(_stakingStore, _seed, _targetCommitteeSize); + } + + function getProposerAt( + LeonidasStorage storage _leonidasStore, + StakingStorage storage _stakingStore, + Slot _slot, + Epoch _epochNumber, + uint256 _targetCommitteeSize + ) external view returns (address) { + return _getProposerAt(_leonidasStore, _stakingStore, _slot, _epochNumber, _targetCommitteeSize); + } + + function getCommitteeAt( + LeonidasStorage storage _leonidasStore, + StakingStorage storage _stakingStore, + Epoch _epochNumber, + uint256 _targetCommitteeSize + ) external view returns (address[] memory) { + return _getCommitteeAt(_leonidasStore, _stakingStore, _epochNumber, _targetCommitteeSize); + } + + /** + * @notice Propose a pending block from the point-of-view of sequencer selection. Will: + * - Setup the epoch if needed (if epoch committee is empty skips the rest) + * - Validate that the proposer is the proposer of the slot + * - Validate that the signatures for attestations are indeed from the validatorset + * - Validate that the number of valid attestations is sufficient + * + * @dev Cases where errors are thrown: + * - If the epoch is not setup + * - If the proposer is not the real proposer AND the proposer is not open + * - If the number of valid attestations is insufficient + * + * @param _slot - The slot of the block + * @param _signatures - The signatures of the committee members + * @param _digest - The digest of the block + */ + function validateLeonidas( + LeonidasStorage storage _leonidasStore, + StakingStorage storage _stakingStore, + Slot _slot, + Epoch _epochNumber, + Signature[] memory _signatures, + bytes32 _digest, + DataStructures.ExecutionFlags memory _flags, + uint256 _targetCommitteeSize + ) external view { + // Same logic as we got in getProposerAt + // Done do avoid duplicate computing the committee + address[] memory committee = + _getCommitteeAt(_leonidasStore, _stakingStore, _epochNumber, _targetCommitteeSize); + address attester = committee.length == 0 + ? address(0) + : committee[computeProposerIndex( + _epochNumber, _slot, getSampleSeed(_leonidasStore, _epochNumber), committee.length + )]; + address proposer = _stakingStore.info[attester].proposer; + + // @todo Consider getting rid of this option. + // If the proposer is open, we allow anyone to propose without needing any signatures + if (proposer == address(0)) { + return; + } + + // @todo We should allow to provide a signature instead of needing the proposer to broadcast. + require(proposer == msg.sender, Errors.Leonidas__InvalidProposer(proposer, msg.sender)); + + if (_flags.ignoreSignatures) { + return; + } + + uint256 needed = committee.length * 2 / 3 + 1; + require( + _signatures.length >= needed, + Errors.Leonidas__InsufficientAttestationsProvided(needed, _signatures.length) + ); + + // Validate the attestations + uint256 validAttestations = 0; + + bytes32 digest = _digest.toEthSignedMessageHash(); + for (uint256 i = 0; i < _signatures.length; i++) { + // To avoid stack too deep errors + Signature memory signature = _signatures[i]; + if (signature.isEmpty) { + continue; + } + + // The verification will throw if invalid + signature.verify(committee[i], digest); + validAttestations++; + } + + require( + validAttestations >= needed, + Errors.Leonidas__InsufficientAttestations(needed, validAttestations) + ); + } + + /** + * @notice Get the sample seed for an epoch + * + * @dev This should behave as walking past the line, but it does not currently do that. + * If there are entire skips, e.g., 1, 2, 5 and we then go back and try executing + * for 4 we will get an invalid value because we will read lastSeed which is from 5. + * + * @dev The `_epoch` will never be 0 nor in the future + * + * @dev The return value will be equal to keccak256(n, block.prevrandao) for n being the last epoch + * setup. + * + * @return The sample seed for the epoch + */ + function getSampleSeed(LeonidasStorage storage _leonidasStore, Epoch _epoch) + internal + view + returns (uint256) + { + if (Epoch.unwrap(_epoch) == 0) { + return type(uint256).max; + } + uint256 sampleSeed = _leonidasStore.epochs[_epoch].sampleSeed; + if (sampleSeed != 0) { + return sampleSeed; + } + + sampleSeed = _leonidasStore.epochs[_epoch - Epoch.wrap(1)].nextSeed; + if (sampleSeed != 0) { + return sampleSeed; + } + + return _leonidasStore.lastSeed; + } + + /** + * @notice Samples a validator set for a specific epoch + * + * @dev Only used internally, should never be called for anything but the "next" epoch + * Allowing us to always use `lastSeed`. + * + * @return The validators for the given epoch + */ + function _sampleValidators( + StakingStorage storage _stakingStore, + uint256 _seed, + uint256 _targetCommitteeSize + ) private view returns (address[] memory) { + uint256 validatorSetSize = _stakingStore.attesters.length(); + if (validatorSetSize == 0) { + return new address[](0); + } + + // If we have less validators than the target committee size, we just return the full set + if (validatorSetSize <= _targetCommitteeSize) { + return _stakingStore.attesters.values(); + } + + uint256[] memory indices = + SampleLib.computeCommitteeClever(_targetCommitteeSize, validatorSetSize, _seed); + + address[] memory committee = new address[](_targetCommitteeSize); + for (uint256 i = 0; i < _targetCommitteeSize; i++) { + committee[i] = _stakingStore.attesters.at(indices[i]); + } + return committee; + } + + function _getProposerAt( + LeonidasStorage storage _leonidasStore, + StakingStorage storage _stakingStore, + Slot _slot, + Epoch _epochNumber, + uint256 _targetCommitteeSize + ) private view returns (address) { + // @note this is deliberately "bad" for the simple reason of code reduction. + // it does not need to actually return the full committee and then draw from it + // it can just return the proposer directly, but then we duplicate the code + // which we just don't have room for right now... + address[] memory committee = + _getCommitteeAt(_leonidasStore, _stakingStore, _epochNumber, _targetCommitteeSize); + if (committee.length == 0) { + return address(0); + } + + address attester = committee[computeProposerIndex( + _epochNumber, _slot, getSampleSeed(_leonidasStore, _epochNumber), committee.length + )]; + + return _stakingStore.info[attester].proposer; + } + + function _getCommitteeAt( + LeonidasStorage storage _leonidasStore, + StakingStorage storage _stakingStore, + Epoch _epochNumber, + uint256 _targetCommitteeSize + ) private view returns (address[] memory) { + EpochData storage epoch = _leonidasStore.epochs[_epochNumber]; + + if (epoch.sampleSeed != 0) { + uint256 committeeSize = epoch.committee.length; + if (committeeSize == 0) { + return new address[](0); + } + return epoch.committee; + } + + // Allow anyone if there is no validator set + if (_stakingStore.attesters.length() == 0) { + return new address[](0); + } + + // Emulate a sampling of the validators + uint256 sampleSeed = getSampleSeed(_leonidasStore, _epochNumber); + return _sampleValidators(_stakingStore, sampleSeed, _targetCommitteeSize); + } + + /** + * @notice Computes the index of the committee member that acts as proposer for a given slot + * + * @param _epoch - The epoch to compute the proposer index for + * @param _slot - The slot to compute the proposer index for + * @param _seed - The seed to use for the computation + * @param _size - The size of the committee + * + * @return The index of the proposer + */ + function computeProposerIndex(Epoch _epoch, Slot _slot, uint256 _seed, uint256 _size) + private + pure + returns (uint256) + { + return uint256(keccak256(abi.encode(_epoch, _slot, _seed))) % _size; + } +} diff --git a/l1-contracts/src/core/libraries/RollupLibs/EpochProofLib.sol b/l1-contracts/src/core/libraries/RollupLibs/EpochProofLib.sol new file mode 100644 index 00000000000..6920c00cb4e --- /dev/null +++ b/l1-contracts/src/core/libraries/RollupLibs/EpochProofLib.sol @@ -0,0 +1,276 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity >=0.8.27; + +import {IFeeJuicePortal} from "@aztec/core/interfaces/IFeeJuicePortal.sol"; +import {IProofCommitmentEscrow} from "@aztec/core/interfaces/IProofCommitmentEscrow.sol"; +import { + RollupStore, SubmitEpochRootProofArgs, FeeHeader +} from "@aztec/core/interfaces/IRollup.sol"; +import {Constants} from "@aztec/core/libraries/ConstantsGen.sol"; +import {Errors} from "@aztec/core/libraries/Errors.sol"; +import {Epoch} from "@aztec/core/libraries/TimeMath.sol"; +import {IRewardDistributor} from "@aztec/governance/interfaces/IRewardDistributor.sol"; +import {IERC20} from "@oz/token/ERC20/IERC20.sol"; +import {SafeERC20} from "@oz/token/ERC20/utils/SafeERC20.sol"; +import {Math} from "@oz/utils/math/Math.sol"; + +struct SubmitEpochRootProofAddresses { + IProofCommitmentEscrow proofCommitmentEscrow; + IFeeJuicePortal feeJuicePortal; + IRewardDistributor rewardDistributor; + IERC20 asset; + address cuauhxicalli; +} + +struct SubmitEpochRootProofInterimValues { + uint256 previousBlockNumber; + uint256 endBlockNumber; + Epoch epochToProve; + Epoch startEpoch; + bool isFeeCanonical; + bool isRewardDistributorCanonical; + uint256 totalProverReward; + uint256 totalBurn; +} + +library EpochProofLib { + using SafeERC20 for IERC20; + + function submitEpochRootProof( + RollupStore storage _rollupStore, + SubmitEpochRootProofArgs calldata _args, + SubmitEpochRootProofInterimValues memory _interimValues, + SubmitEpochRootProofAddresses memory _addresses + ) internal returns (uint256) { + // Ensure that the proof is not across epochs + require( + _interimValues.startEpoch == _interimValues.epochToProve, + Errors.Rollup__InvalidEpoch(_interimValues.startEpoch, _interimValues.epochToProve) + ); + + bytes32[] memory publicInputs = getEpochProofPublicInputs( + _rollupStore, _args.epochSize, _args.args, _args.fees, _args.aggregationObject + ); + + require( + _rollupStore.epochProofVerifier.verify(_args.proof, publicInputs), + Errors.Rollup__InvalidProof() + ); + + if (_rollupStore.proofClaim.epochToProve == _interimValues.epochToProve) { + _addresses.proofCommitmentEscrow.unstakeBond( + _rollupStore.proofClaim.bondProvider, _rollupStore.proofClaim.bondAmount + ); + } + + _rollupStore.tips.provenBlockNumber = _interimValues.endBlockNumber; + + // @note Only if the rollup is the canonical will it be able to meaningfully claim fees + // Otherwise, the fees are unbacked #7938. + _interimValues.isFeeCanonical = address(this) == _addresses.feeJuicePortal.canonicalRollup(); + _interimValues.isRewardDistributorCanonical = + address(this) == _addresses.rewardDistributor.canonicalRollup(); + + _interimValues.totalProverReward = 0; + _interimValues.totalBurn = 0; + + if (_interimValues.isFeeCanonical || _interimValues.isRewardDistributorCanonical) { + for (uint256 i = 0; i < _args.epochSize; i++) { + address coinbase = address(uint160(uint256(publicInputs[9 + i * 2]))); + uint256 reward = 0; + uint256 toProver = 0; + uint256 burn = 0; + + if (_interimValues.isFeeCanonical) { + uint256 fees = uint256(publicInputs[10 + i * 2]); + if (fees > 0) { + // This is insanely expensive, and will be fixed as part of the general storage cost reduction. + // See #9826. + FeeHeader storage feeHeader = + _rollupStore.blocks[_interimValues.previousBlockNumber + 1 + i].feeHeader; + burn += feeHeader.congestionCost * feeHeader.manaUsed; + + reward += (fees - burn); + _addresses.feeJuicePortal.distributeFees(address(this), fees); + } + } + + if (_interimValues.isRewardDistributorCanonical) { + reward += _addresses.rewardDistributor.claim(address(this)); + } + + if (coinbase == address(0)) { + toProver = reward; + } else { + // @note We are getting value from the `proofClaim`, which are not cleared. + // So if someone is posting the proof before a new claim is made, + // the reward will calculated based on the previous values. + toProver = Math.mulDiv(reward, _rollupStore.proofClaim.basisPointFee, 10_000); + } + + uint256 toCoinbase = reward - toProver; + if (toCoinbase > 0) { + _addresses.asset.safeTransfer(coinbase, toCoinbase); + } + + _interimValues.totalProverReward += toProver; + _interimValues.totalBurn += burn; + } + + if (_interimValues.totalProverReward > 0) { + // If there is a bond-provider give him the reward, otherwise give it to the submitter. + address proofRewardRecipient = _rollupStore.proofClaim.bondProvider == address(0) + ? msg.sender + : _rollupStore.proofClaim.bondProvider; + _addresses.asset.safeTransfer(proofRewardRecipient, _interimValues.totalProverReward); + } + + if (_interimValues.totalBurn > 0) { + _addresses.asset.safeTransfer(_addresses.cuauhxicalli, _interimValues.totalBurn); + } + } + + return _interimValues.endBlockNumber; + } + + /** + * @notice Returns the computed public inputs for the given epoch proof. + * + * @dev Useful for debugging and testing. Allows submitter to compare their + * own public inputs used for generating the proof vs the ones assembled + * by this contract when verifying it. + * + * @param _epochSize - The size of the epoch (to be promoted to a constant) + * @param _args - Array of public inputs to the proof (previousArchive, endArchive, previousBlockHash, endBlockHash, endTimestamp, outHash, proverId) + * @param _fees - Array of recipient-value pairs with fees to be distributed for the epoch + * @param _aggregationObject - The aggregation object for the proof + */ + function getEpochProofPublicInputs( + RollupStore storage _rollupStore, + uint256 _epochSize, + bytes32[7] calldata _args, + bytes32[] calldata _fees, + bytes calldata _aggregationObject + ) internal view returns (bytes32[] memory) { + uint256 previousBlockNumber = _rollupStore.tips.provenBlockNumber; + uint256 endBlockNumber = previousBlockNumber + _epochSize; + + // Args are defined as an array because Solidity complains with "stack too deep" otherwise + // 0 bytes32 _previousArchive, + // 1 bytes32 _endArchive, + // 2 bytes32 _previousBlockHash, + // 3 bytes32 _endBlockHash, + // 4 bytes32 _endTimestamp, + // 5 bytes32 _outHash, + // 6 bytes32 _proverId, + + // TODO(#7373): Public inputs are not fully verified + + { + // We do it this way to provide better error messages than passing along the storage values + bytes32 expectedPreviousArchive = _rollupStore.blocks[previousBlockNumber].archive; + require( + expectedPreviousArchive == _args[0], + Errors.Rollup__InvalidPreviousArchive(expectedPreviousArchive, _args[0]) + ); + + bytes32 expectedEndArchive = _rollupStore.blocks[endBlockNumber].archive; + require( + expectedEndArchive == _args[1], Errors.Rollup__InvalidArchive(expectedEndArchive, _args[1]) + ); + + bytes32 expectedPreviousBlockHash = _rollupStore.blocks[previousBlockNumber].blockHash; + // TODO: Remove 0 check once we inject the proper genesis block hash + require( + expectedPreviousBlockHash == 0 || expectedPreviousBlockHash == _args[2], + Errors.Rollup__InvalidPreviousBlockHash(expectedPreviousBlockHash, _args[2]) + ); + + bytes32 expectedEndBlockHash = _rollupStore.blocks[endBlockNumber].blockHash; + require( + expectedEndBlockHash == _args[3], + Errors.Rollup__InvalidBlockHash(expectedEndBlockHash, _args[3]) + ); + } + + bytes32[] memory publicInputs = new bytes32[]( + Constants.ROOT_ROLLUP_PUBLIC_INPUTS_LENGTH + Constants.AGGREGATION_OBJECT_LENGTH + ); + + // Structure of the root rollup public inputs we need to reassemble: + // + // struct RootRollupPublicInputs { + // previous_archive: AppendOnlyTreeSnapshot, + // end_archive: AppendOnlyTreeSnapshot, + // previous_block_hash: Field, + // end_block_hash: Field, + // end_timestamp: u64, + // end_block_number: Field, + // out_hash: Field, + // fees: [FeeRecipient; Constants.AZTEC_EPOCH_DURATION], + // vk_tree_root: Field, + // protocol_contract_tree_root: Field, + // prover_id: Field + // } + + // previous_archive.root: the previous archive tree root + publicInputs[0] = _args[0]; + + // previous_archive.next_available_leaf_index: the previous archive next available index + // normally this should be equal to the block number (since leaves are 0-indexed and blocks 1-indexed) + // but in yarn-project/merkle-tree/src/new_tree.ts we prefill the tree so that block N is in leaf N + publicInputs[1] = bytes32(previousBlockNumber + 1); + + // end_archive.root: the new archive tree root + publicInputs[2] = _args[1]; + + // end_archive.next_available_leaf_index: the new archive next available index + publicInputs[3] = bytes32(endBlockNumber + 1); + + // previous_block_hash: the block hash just preceding this epoch + publicInputs[4] = _args[2]; + + // end_block_hash: the last block hash in the epoch + publicInputs[5] = _args[3]; + + // end_timestamp: the timestamp of the last block in the epoch + publicInputs[6] = _args[4]; + + // end_block_number: last block number in the epoch + publicInputs[7] = bytes32(endBlockNumber); + + // out_hash: root of this epoch's l2 to l1 message tree + publicInputs[8] = _args[5]; + + uint256 feesLength = Constants.AZTEC_MAX_EPOCH_DURATION * 2; + // fees[9 to (9+feesLength-1)]: array of recipient-value pairs + for (uint256 i = 0; i < feesLength; i++) { + publicInputs[9 + i] = _fees[i]; + } + uint256 feesEnd = 9 + feesLength; + + // vk_tree_root + publicInputs[feesEnd] = _rollupStore.vkTreeRoot; + + // protocol_contract_tree_root + publicInputs[feesEnd + 1] = _rollupStore.protocolContractTreeRoot; + + // prover_id: id of current epoch's prover + publicInputs[feesEnd + 2] = _args[6]; + + // the block proof is recursive, which means it comes with an aggregation object + // this snippet copies it into the public inputs needed for verification + // it also guards against empty _aggregationObject used with mocked proofs + uint256 aggregationLength = _aggregationObject.length / 32; + for (uint256 i = 0; i < Constants.AGGREGATION_OBJECT_LENGTH && i < aggregationLength; i++) { + bytes32 part; + assembly { + part := calldataload(add(_aggregationObject.offset, mul(i, 32))) + } + publicInputs[i + feesEnd + 3] = part; + } + + return publicInputs; + } +} diff --git a/l1-contracts/src/core/libraries/RollupLibs/EpochProofQuoteLib.sol b/l1-contracts/src/core/libraries/RollupLibs/EpochProofQuoteLib.sol new file mode 100644 index 00000000000..bfa865d6d4e --- /dev/null +++ b/l1-contracts/src/core/libraries/RollupLibs/EpochProofQuoteLib.sol @@ -0,0 +1,51 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity >=0.8.27; + +import {Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import {Slot, Epoch} from "@aztec/core/libraries/TimeMath.sol"; + +/** + * @notice Struct encompassing an epoch proof quote + * @param epochToProve - The epoch number to prove + * @param validUntilSlot - The deadline of the quote, denoted in L2 slots + * @param bondAmount - The size of the bond + * @param prover - The address of the prover + * @param basisPointFee - The fee measured in basis points + */ +struct EpochProofQuote { + Epoch epochToProve; + Slot validUntilSlot; + uint256 bondAmount; + address prover; + uint32 basisPointFee; +} + +/** + * @notice A signed quote for the epoch proof + * @param quote - The Epoch Proof Quote + * @param signature - A signature on the quote + */ +struct SignedEpochProofQuote { + EpochProofQuote quote; + Signature signature; +} + +library EpochProofQuoteLib { + bytes32 public constant EPOCH_PROOF_QUOTE_TYPEHASH = keccak256( + "EpochProofQuote(uint256 epochToProve,uint256 validUntilSlot,uint256 bondAmount,address prover,uint32 basisPointFee)" + ); + + function hash(EpochProofQuote memory _quote) internal pure returns (bytes32) { + return keccak256( + abi.encode( + EPOCH_PROOF_QUOTE_TYPEHASH, + _quote.epochToProve, + _quote.validUntilSlot, + _quote.bondAmount, + _quote.prover, + _quote.basisPointFee + ) + ); + } +} diff --git a/l1-contracts/src/core/libraries/RollupLibs/ExtRollupLib.sol b/l1-contracts/src/core/libraries/RollupLibs/ExtRollupLib.sol new file mode 100644 index 00000000000..4b51f8efef0 --- /dev/null +++ b/l1-contracts/src/core/libraries/RollupLibs/ExtRollupLib.sol @@ -0,0 +1,113 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity >=0.8.27; + +import {IFeeJuicePortal} from "@aztec/core/interfaces/IFeeJuicePortal.sol"; +import {IProofCommitmentEscrow} from "@aztec/core/interfaces/IProofCommitmentEscrow.sol"; +import {IProofCommitmentEscrow} from "@aztec/core/interfaces/IProofCommitmentEscrow.sol"; +import {BlockLog, RollupStore, SubmitEpochRootProofArgs} from "@aztec/core/interfaces/IRollup.sol"; +import {IRewardDistributor} from "@aztec/governance/interfaces/IRewardDistributor.sol"; +import {IERC20} from "@oz/token/ERC20/IERC20.sol"; +import {DataStructures} from "./../DataStructures.sol"; +import {Slot, Epoch} from "./../TimeMath.sol"; +import { + EpochProofLib, + SubmitEpochRootProofAddresses, + SubmitEpochRootProofInterimValues +} from "./EpochProofLib.sol"; +import {SignedEpochProofQuote} from "./EpochProofQuoteLib.sol"; +import {FeeMath, ManaBaseFeeComponents, FeeHeader, L1FeeData} from "./FeeMath.sol"; +import {HeaderLib, Header} from "./HeaderLib.sol"; +import {TxsDecoder} from "./TxsDecoder.sol"; +import {ValidationLib, ValidateHeaderArgs} from "./ValidationLib.sol"; +// We are using this library such that we can more easily "link" just a larger external library +// instead of a few smaller ones. + +library ExtRollupLib { + function submitEpochRootProof( + RollupStore storage _rollupStore, + SubmitEpochRootProofArgs calldata _args, + SubmitEpochRootProofInterimValues memory _interimValues, + IProofCommitmentEscrow _proofCommitmentEscrow, + IFeeJuicePortal _feeJuicePortal, + IRewardDistributor _rewardDistributor, + IERC20 _asset, + address _cuauhxicalli + ) external returns (uint256) { + return EpochProofLib.submitEpochRootProof( + _rollupStore, + _args, + _interimValues, + SubmitEpochRootProofAddresses({ + proofCommitmentEscrow: _proofCommitmentEscrow, + feeJuicePortal: _feeJuicePortal, + rewardDistributor: _rewardDistributor, + asset: _asset, + cuauhxicalli: _cuauhxicalli + }) + ); + } + + function validateHeaderForSubmissionBase( + ValidateHeaderArgs memory _args, + mapping(uint256 blockNumber => BlockLog log) storage _blocks + ) external view { + ValidationLib.validateHeaderForSubmissionBase(_args, _blocks); + } + + function validateEpochProofRightClaimAtTime( + Slot _currentSlot, + address _currentProposer, + Epoch _epochToProve, + uint256 _posInEpoch, + SignedEpochProofQuote calldata _quote, + bytes32 _digest, + DataStructures.EpochProofClaim storage _proofClaim, + uint256 _claimDurationInL2Slots, + uint256 _proofCommitmentMinBondAmountInTst, + IProofCommitmentEscrow _proofCommitmentEscrow + ) external view { + ValidationLib.validateEpochProofRightClaimAtTime( + _currentSlot, + _currentProposer, + _epochToProve, + _posInEpoch, + _quote, + _digest, + _proofClaim, + _claimDurationInL2Slots, + _proofCommitmentMinBondAmountInTst, + _proofCommitmentEscrow + ); + } + + function getManaBaseFeeComponentsAt( + FeeHeader storage _parentFeeHeader, + L1FeeData memory _fees, + uint256 _feeAssetPrice, + uint256 _epochDuration + ) external view returns (ManaBaseFeeComponents memory) { + return + FeeMath.getManaBaseFeeComponentsAt(_parentFeeHeader, _fees, _feeAssetPrice, _epochDuration); + } + + function getEpochProofPublicInputs( + RollupStore storage _rollupStore, + uint256 _epochSize, + bytes32[7] calldata _args, + bytes32[] calldata _fees, + bytes calldata _aggregationObject + ) external view returns (bytes32[] memory) { + return EpochProofLib.getEpochProofPublicInputs( + _rollupStore, _epochSize, _args, _fees, _aggregationObject + ); + } + + function decodeHeader(bytes calldata _header) external pure returns (Header memory) { + return HeaderLib.decode(_header); + } + + function computeTxsEffectsHash(bytes calldata _body) external pure returns (bytes32) { + return TxsDecoder.decode(_body); + } +} diff --git a/l1-contracts/src/core/libraries/FeeMath.sol b/l1-contracts/src/core/libraries/RollupLibs/FeeMath.sol similarity index 64% rename from l1-contracts/src/core/libraries/FeeMath.sol rename to l1-contracts/src/core/libraries/RollupLibs/FeeMath.sol index 215c2e4739a..ef3a27ffafb 100644 --- a/l1-contracts/src/core/libraries/FeeMath.sol +++ b/l1-contracts/src/core/libraries/RollupLibs/FeeMath.sol @@ -6,7 +6,26 @@ import {Math} from "@oz/utils/math/Math.sol"; import {SafeCast} from "@oz/utils/math/SafeCast.sol"; import {SignedMath} from "@oz/utils/math/SignedMath.sol"; -import {Errors} from "./Errors.sol"; +import {Errors} from "../Errors.sol"; + +// These values are taken from the model, but mostly pulled out of the ass +uint256 constant MINIMUM_PROVING_COST_PER_MANA = 5415357955; +uint256 constant MAX_PROVING_COST_MODIFIER = 1000000000; +uint256 constant PROVING_UPDATE_FRACTION = 100000000000; + +uint256 constant MINIMUM_FEE_ASSET_PRICE = 10000000000; +uint256 constant MAX_FEE_ASSET_PRICE_MODIFIER = 1000000000; +uint256 constant FEE_ASSET_PRICE_UPDATE_FRACTION = 100000000000; + +uint256 constant L1_GAS_PER_BLOCK_PROPOSED = 150000; +uint256 constant L1_GAS_PER_EPOCH_VERIFIED = 1000000; + +uint256 constant MINIMUM_CONGESTION_MULTIPLIER = 1000000000; +uint256 constant MANA_TARGET = 100000000; +uint256 constant CONGESTION_UPDATE_FRACTION = 854700854; + +uint256 constant BLOB_GAS_PER_BLOB = 2 ** 17; +uint256 constant GAS_PER_BLOB_POINT_EVALUATION = 50_000; struct OracleInput { int256 provingCostModifier; @@ -21,27 +40,58 @@ struct ManaBaseFeeComponents { uint256 provingCost; } +struct FeeHeader { + uint256 excessMana; + uint256 feeAssetPriceNumerator; + uint256 manaUsed; + uint256 provingCostPerManaNumerator; + uint256 congestionCost; +} + +struct L1FeeData { + uint256 baseFee; + uint256 blobFee; +} + library FeeMath { using Math for uint256; using SafeCast for int256; using SafeCast for uint256; using SignedMath for int256; - // These values are taken from the model, but mostly pulled out of the ass - uint256 internal constant MINIMUM_PROVING_COST_PER_MANA = 5415357955; - uint256 internal constant MAX_PROVING_COST_MODIFIER = 1000000000; - uint256 internal constant PROVING_UPDATE_FRACTION = 100000000000; - - uint256 internal constant MINIMUM_FEE_ASSET_PRICE = 10000000000; - uint256 internal constant MAX_FEE_ASSET_PRICE_MODIFIER = 1000000000; - uint256 internal constant FEE_ASSET_PRICE_UPDATE_FRACTION = 100000000000; - - uint256 internal constant L1_GAS_PER_BLOCK_PROPOSED = 150000; - uint256 internal constant L1_GAS_PER_EPOCH_VERIFIED = 1000000; + function getManaBaseFeeComponentsAt( + FeeHeader storage _parentFeeHeader, + L1FeeData memory _fees, + uint256 _feeAssetPrice, + uint256 _epochDuration + ) internal view returns (ManaBaseFeeComponents memory) { + uint256 excessMana = FeeMath.clampedAdd( + _parentFeeHeader.excessMana + _parentFeeHeader.manaUsed, -int256(MANA_TARGET) + ); - uint256 internal constant MINIMUM_CONGESTION_MULTIPLIER = 1000000000; - uint256 internal constant MANA_TARGET = 100000000; - uint256 internal constant CONGESTION_UPDATE_FRACTION = 854700854; + uint256 dataCost = + Math.mulDiv(3 * BLOB_GAS_PER_BLOB, _fees.blobFee, MANA_TARGET, Math.Rounding.Ceil); + uint256 gasUsed = L1_GAS_PER_BLOCK_PROPOSED + 3 * GAS_PER_BLOB_POINT_EVALUATION + + L1_GAS_PER_EPOCH_VERIFIED / _epochDuration; + uint256 gasCost = Math.mulDiv(gasUsed, _fees.baseFee, MANA_TARGET, Math.Rounding.Ceil); + uint256 provingCost = FeeMath.provingCostPerMana(_parentFeeHeader.provingCostPerManaNumerator); + + uint256 congestionMultiplier_ = congestionMultiplier(excessMana); + uint256 total = dataCost + gasCost + provingCost; + uint256 congestionCost = Math.mulDiv( + total, congestionMultiplier_, MINIMUM_CONGESTION_MULTIPLIER, Math.Rounding.Floor + ) - total; + + // @todo @lherskind. The following is a crime against humanity, but it makes it + // very neat to plot etc from python, #10004 will fix it across the board + return ManaBaseFeeComponents({ + dataCost: Math.mulDiv(dataCost, _feeAssetPrice, 1e9, Math.Rounding.Ceil), + gasCost: Math.mulDiv(gasCost, _feeAssetPrice, 1e9, Math.Rounding.Ceil), + provingCost: Math.mulDiv(provingCost, _feeAssetPrice, 1e9, Math.Rounding.Ceil), + congestionCost: Math.mulDiv(congestionCost, _feeAssetPrice, 1e9, Math.Rounding.Ceil), + congestionMultiplier: congestionMultiplier_ + }); + } function assertValid(OracleInput memory _self) internal pure returns (bool) { require( diff --git a/l1-contracts/src/core/libraries/HeaderLib.sol b/l1-contracts/src/core/libraries/RollupLibs/HeaderLib.sol similarity index 90% rename from l1-contracts/src/core/libraries/HeaderLib.sol rename to l1-contracts/src/core/libraries/RollupLibs/HeaderLib.sol index 4cade7f20e6..15d26b46e74 100644 --- a/l1-contracts/src/core/libraries/HeaderLib.sol +++ b/l1-contracts/src/core/libraries/RollupLibs/HeaderLib.sol @@ -5,6 +5,56 @@ pragma solidity >=0.8.27; import {Constants} from "@aztec/core/libraries/ConstantsGen.sol"; import {Errors} from "@aztec/core/libraries/Errors.sol"; +struct AppendOnlyTreeSnapshot { + bytes32 root; + uint32 nextAvailableLeafIndex; +} + +struct PartialStateReference { + AppendOnlyTreeSnapshot noteHashTree; + AppendOnlyTreeSnapshot nullifierTree; + AppendOnlyTreeSnapshot contractTree; + AppendOnlyTreeSnapshot publicDataTree; +} + +struct StateReference { + AppendOnlyTreeSnapshot l1ToL2MessageTree; + // Note: Can't use "partial" name here as in protocol specs because it is a reserved solidity keyword + PartialStateReference partialStateReference; +} + +struct GasFees { + uint256 feePerDaGas; + uint256 feePerL2Gas; +} + +struct GlobalVariables { + uint256 chainId; + uint256 version; + uint256 blockNumber; + uint256 slotNumber; + uint256 timestamp; + address coinbase; + bytes32 feeRecipient; + GasFees gasFees; +} + +struct ContentCommitment { + uint256 numTxs; + bytes32 txsEffectsHash; + bytes32 inHash; + bytes32 outHash; +} + +struct Header { + AppendOnlyTreeSnapshot lastArchive; + ContentCommitment contentCommitment; + StateReference stateReference; + GlobalVariables globalVariables; + uint256 totalFees; + uint256 totalManaUsed; +} + /** * @title Header Library * @author Aztec Labs @@ -56,56 +106,6 @@ import {Errors} from "@aztec/core/libraries/Errors.sol"; * | --- | --- | --- */ library HeaderLib { - struct AppendOnlyTreeSnapshot { - bytes32 root; - uint32 nextAvailableLeafIndex; - } - - struct PartialStateReference { - AppendOnlyTreeSnapshot noteHashTree; - AppendOnlyTreeSnapshot nullifierTree; - AppendOnlyTreeSnapshot contractTree; - AppendOnlyTreeSnapshot publicDataTree; - } - - struct StateReference { - AppendOnlyTreeSnapshot l1ToL2MessageTree; - // Note: Can't use "partial" name here as in protocol specs because it is a reserved solidity keyword - PartialStateReference partialStateReference; - } - - struct GasFees { - uint256 feePerDaGas; - uint256 feePerL2Gas; - } - - struct GlobalVariables { - uint256 chainId; - uint256 version; - uint256 blockNumber; - uint256 slotNumber; - uint256 timestamp; - address coinbase; - bytes32 feeRecipient; - GasFees gasFees; - } - - struct ContentCommitment { - uint256 numTxs; - bytes32 txsEffectsHash; - bytes32 inHash; - bytes32 outHash; - } - - struct Header { - AppendOnlyTreeSnapshot lastArchive; - ContentCommitment contentCommitment; - StateReference stateReference; - GlobalVariables globalVariables; - uint256 totalFees; - uint256 totalManaUsed; - } - uint256 private constant HEADER_LENGTH = 0x288; // Header byte length /** diff --git a/l1-contracts/src/core/libraries/RollupLibs/IntRollupLib.sol b/l1-contracts/src/core/libraries/RollupLibs/IntRollupLib.sol new file mode 100644 index 00000000000..f05eda331dd --- /dev/null +++ b/l1-contracts/src/core/libraries/RollupLibs/IntRollupLib.sol @@ -0,0 +1,31 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity >=0.8.27; + +import {EpochProofQuoteLib, EpochProofQuote} from "./EpochProofQuoteLib.sol"; + +import {FeeMath, ManaBaseFeeComponents, FeeHeader, MANA_TARGET} from "./FeeMath.sol"; + +// We are using this library such that we can more easily "link" just a larger external library +// instead of a few smaller ones. +library IntRollupLib { + function computeQuoteHash(EpochProofQuote memory _quote) internal pure returns (bytes32) { + return EpochProofQuoteLib.hash(_quote); + } + + function summedBaseFee(ManaBaseFeeComponents memory _components) internal pure returns (uint256) { + return FeeMath.summedBaseFee(_components); + } + + function clampedAdd(uint256 _a, int256 _b) internal pure returns (uint256) { + return FeeMath.clampedAdd(_a, _b); + } + + function feeAssetPriceModifier(uint256 _numerator) internal pure returns (uint256) { + return FeeMath.feeAssetPriceModifier(_numerator); + } + + function computeExcessMana(FeeHeader memory _feeHeader) internal pure returns (uint256) { + return clampedAdd(_feeHeader.excessMana + _feeHeader.manaUsed, -int256(MANA_TARGET)); + } +} diff --git a/l1-contracts/src/core/libraries/ProposeLib.sol b/l1-contracts/src/core/libraries/RollupLibs/ProposeLib.sol similarity index 88% rename from l1-contracts/src/core/libraries/ProposeLib.sol rename to l1-contracts/src/core/libraries/RollupLibs/ProposeLib.sol index ab5330661f7..59b4e10a7a6 100644 --- a/l1-contracts/src/core/libraries/ProposeLib.sol +++ b/l1-contracts/src/core/libraries/RollupLibs/ProposeLib.sol @@ -3,7 +3,7 @@ pragma solidity >=0.8.27; import {SignatureLib} from "@aztec/core/libraries/crypto/SignatureLib.sol"; -import {OracleInput} from "@aztec/core/libraries/FeeMath.sol"; +import {OracleInput} from "./FeeMath.sol"; struct ProposeArgs { bytes32 archive; diff --git a/l1-contracts/src/core/libraries/TxsDecoder.sol b/l1-contracts/src/core/libraries/RollupLibs/TxsDecoder.sol similarity index 99% rename from l1-contracts/src/core/libraries/TxsDecoder.sol rename to l1-contracts/src/core/libraries/RollupLibs/TxsDecoder.sol index 4a7da2a7720..b3a12b47c2c 100644 --- a/l1-contracts/src/core/libraries/TxsDecoder.sol +++ b/l1-contracts/src/core/libraries/RollupLibs/TxsDecoder.sol @@ -82,7 +82,7 @@ library TxsDecoder { * @param _body - The L2 block body calldata. * @return The txs effects hash. */ - function decode(bytes calldata _body) external pure returns (bytes32) { + function decode(bytes calldata _body) internal pure returns (bytes32) { ArrayOffsets memory offsets; Counts memory counts; ConsumablesVars memory vars; diff --git a/l1-contracts/src/core/libraries/RollupLibs/ValidationLib.sol b/l1-contracts/src/core/libraries/RollupLibs/ValidationLib.sol new file mode 100644 index 00000000000..5db4c00b5a1 --- /dev/null +++ b/l1-contracts/src/core/libraries/RollupLibs/ValidationLib.sol @@ -0,0 +1,157 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity >=0.8.27; + +import {IFeeJuicePortal} from "@aztec/core/interfaces/IFeeJuicePortal.sol"; +import {IProofCommitmentEscrow} from "@aztec/core/interfaces/IProofCommitmentEscrow.sol"; +import {BlockLog} from "@aztec/core/interfaces/IRollup.sol"; +import {SignatureLib} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import {DataStructures} from "./../DataStructures.sol"; +import {Errors} from "./../Errors.sol"; +import {Timestamp, Slot, Epoch} from "./../TimeMath.sol"; +import {SignedEpochProofQuote} from "./EpochProofQuoteLib.sol"; +import {Header} from "./HeaderLib.sol"; + +struct ValidateHeaderArgs { + Header header; + Timestamp currentTime; + uint256 manaBaseFee; + bytes32 txsEffectsHash; + uint256 pendingBlockNumber; + DataStructures.ExecutionFlags flags; + uint256 version; + IFeeJuicePortal feeJuicePortal; + function(Slot) external view returns (Timestamp) getTimestampForSlot; +} + +library ValidationLib { + function validateHeaderForSubmissionBase( + ValidateHeaderArgs memory _args, + mapping(uint256 blockNumber => BlockLog log) storage _blocks + ) internal view { + require( + block.chainid == _args.header.globalVariables.chainId, + Errors.Rollup__InvalidChainId(block.chainid, _args.header.globalVariables.chainId) + ); + + require( + _args.header.globalVariables.version == _args.version, + Errors.Rollup__InvalidVersion(_args.version, _args.header.globalVariables.version) + ); + + require( + _args.header.globalVariables.blockNumber == _args.pendingBlockNumber + 1, + Errors.Rollup__InvalidBlockNumber( + _args.pendingBlockNumber + 1, _args.header.globalVariables.blockNumber + ) + ); + + bytes32 tipArchive = _blocks[_args.pendingBlockNumber].archive; + require( + tipArchive == _args.header.lastArchive.root, + Errors.Rollup__InvalidArchive(tipArchive, _args.header.lastArchive.root) + ); + + Slot slot = Slot.wrap(_args.header.globalVariables.slotNumber); + Slot lastSlot = _blocks[_args.pendingBlockNumber].slotNumber; + require(slot > lastSlot, Errors.Rollup__SlotAlreadyInChain(lastSlot, slot)); + + Timestamp timestamp = _args.getTimestampForSlot(slot); + require( + Timestamp.wrap(_args.header.globalVariables.timestamp) == timestamp, + Errors.Rollup__InvalidTimestamp( + timestamp, Timestamp.wrap(_args.header.globalVariables.timestamp) + ) + ); + + // @note If you are hitting this error, it is likely because the chain you use have a blocktime that differs + // from the value that we have in the constants. + // When you are encountering this, it will likely be as the sequencer expects to be able to include + // an Aztec block in the "next" ethereum block based on a timestamp that is 12 seconds in the future + // from the last block. However, if the actual will only be 1 second in the future, you will end up + // expecting this value to be in the future. + require( + timestamp <= _args.currentTime, Errors.Rollup__TimestampInFuture(_args.currentTime, timestamp) + ); + + // Check if the data is available + require( + _args.flags.ignoreDA || _args.header.contentCommitment.txsEffectsHash == _args.txsEffectsHash, + Errors.Rollup__UnavailableTxs(_args.header.contentCommitment.txsEffectsHash) + ); + + // If not canonical rollup, require that the fees are zero + if (address(this) != _args.feeJuicePortal.canonicalRollup()) { + require(_args.header.globalVariables.gasFees.feePerDaGas == 0, Errors.Rollup__NonZeroDaFee()); + require(_args.header.globalVariables.gasFees.feePerL2Gas == 0, Errors.Rollup__NonZeroL2Fee()); + } else { + require(_args.header.globalVariables.gasFees.feePerDaGas == 0, Errors.Rollup__NonZeroDaFee()); + require( + _args.header.globalVariables.gasFees.feePerL2Gas == _args.manaBaseFee, + Errors.Rollup__InvalidManaBaseFee( + _args.manaBaseFee, _args.header.globalVariables.gasFees.feePerL2Gas + ) + ); + } + } + + function validateEpochProofRightClaimAtTime( + Slot _currentSlot, + address _currentProposer, + Epoch _epochToProve, + uint256 _posInEpoch, + SignedEpochProofQuote calldata _quote, + bytes32 _digest, + DataStructures.EpochProofClaim storage _proofClaim, + uint256 _claimDurationInL2Slots, + uint256 _proofCommitmentMinBondAmountInTst, + IProofCommitmentEscrow _proofCommitmentEscrow + ) internal view { + SignatureLib.verify(_quote.signature, _quote.quote.prover, _digest); + + require( + _quote.quote.validUntilSlot >= _currentSlot, + Errors.Rollup__QuoteExpired(_currentSlot, _quote.quote.validUntilSlot) + ); + + require( + _quote.quote.basisPointFee <= 10_000, + Errors.Rollup__InvalidBasisPointFee(_quote.quote.basisPointFee) + ); + + require( + _currentProposer == address(0) || _currentProposer == msg.sender, + Errors.Leonidas__InvalidProposer(_currentProposer, msg.sender) + ); + + require( + _quote.quote.epochToProve == _epochToProve, + Errors.Rollup__NotClaimingCorrectEpoch(_epochToProve, _quote.quote.epochToProve) + ); + + require( + _posInEpoch < _claimDurationInL2Slots, + Errors.Rollup__NotInClaimPhase(_posInEpoch, _claimDurationInL2Slots) + ); + + // if the epoch to prove is not the one that has been claimed, + // then whatever is in the proofClaim is stale + require( + _proofClaim.epochToProve != _epochToProve || _proofClaim.proposerClaimant == address(0), + Errors.Rollup__ProofRightAlreadyClaimed() + ); + + require( + _quote.quote.bondAmount >= _proofCommitmentMinBondAmountInTst, + Errors.Rollup__InsufficientBondAmount( + _proofCommitmentMinBondAmountInTst, _quote.quote.bondAmount + ) + ); + + uint256 availableFundsInEscrow = _proofCommitmentEscrow.deposits(_quote.quote.prover); + require( + _quote.quote.bondAmount <= availableFundsInEscrow, + Errors.Rollup__InsufficientFundsInEscrow(_quote.quote.bondAmount, availableFundsInEscrow) + ); + } +} diff --git a/l1-contracts/src/core/libraries/crypto/SampleLib.sol b/l1-contracts/src/core/libraries/crypto/SampleLib.sol index a790dc6e56f..721aa7bd1be 100644 --- a/l1-contracts/src/core/libraries/crypto/SampleLib.sol +++ b/l1-contracts/src/core/libraries/crypto/SampleLib.sol @@ -33,7 +33,7 @@ library SampleLib { * @return indices - The indices of the committee */ function computeCommitteeStupid(uint256 _committeeSize, uint256 _indexCount, uint256 _seed) - external + internal pure returns (uint256[] memory) { @@ -63,7 +63,7 @@ library SampleLib { * @return indices - The indices of the committee */ function computeCommitteeClever(uint256 _committeeSize, uint256 _indexCount, uint256 _seed) - external + internal pure returns (uint256[] memory) { diff --git a/l1-contracts/src/core/libraries/crypto/SignatureLib.sol b/l1-contracts/src/core/libraries/crypto/SignatureLib.sol index 29e37357bc8..4223f5ddafe 100644 --- a/l1-contracts/src/core/libraries/crypto/SignatureLib.sol +++ b/l1-contracts/src/core/libraries/crypto/SignatureLib.sol @@ -4,14 +4,14 @@ pragma solidity ^0.8.27; import {Errors} from "@aztec/core/libraries/Errors.sol"; -library SignatureLib { - struct Signature { - bool isEmpty; - uint8 v; - bytes32 r; - bytes32 s; - } +struct Signature { + bool isEmpty; + uint8 v; + bytes32 r; + bytes32 s; +} +library SignatureLib { /** * @notice The domain seperator for the signatures */ diff --git a/l1-contracts/src/core/staking/Staking.sol b/l1-contracts/src/core/staking/Staking.sol index 7f0a0c3b446..0d75e74e1c1 100644 --- a/l1-contracts/src/core/staking/Staking.sol +++ b/l1-contracts/src/core/staking/Staking.sol @@ -3,7 +3,12 @@ pragma solidity >=0.8.27; import { - IStaking, ValidatorInfo, Exit, Status, OperatorInfo + IStaking, + ValidatorInfo, + Exit, + Status, + OperatorInfo, + StakingStorage } from "@aztec/core/interfaces/IStaking.sol"; import {Errors} from "@aztec/core/libraries/Errors.sol"; import {Timestamp} from "@aztec/core/libraries/TimeMath.sol"; @@ -22,11 +27,7 @@ contract Staking is IStaking { IERC20 public immutable STAKING_ASSET; uint256 public immutable MINIMUM_STAKE; - // address <=> index - EnumerableSet.AddressSet internal attesters; - - mapping(address attester => ValidatorInfo) internal info; - mapping(address attester => Exit) internal exits; + StakingStorage internal stakingStore; constructor(address _slasher, IERC20 _stakingAsset, uint256 _minimumStake) { SLASHER = _slasher; @@ -35,10 +36,10 @@ contract Staking is IStaking { } function finaliseWithdraw(address _attester) external override(IStaking) { - ValidatorInfo storage validator = info[_attester]; + ValidatorInfo storage validator = stakingStore.info[_attester]; require(validator.status == Status.EXITING, Errors.Staking__NotExiting(_attester)); - Exit storage exit = exits[_attester]; + Exit storage exit = stakingStore.exits[_attester]; require( exit.exitableAt <= Timestamp.wrap(block.timestamp), Errors.Staking__WithdrawalNotUnlockedYet(Timestamp.wrap(block.timestamp), exit.exitableAt) @@ -47,8 +48,8 @@ contract Staking is IStaking { uint256 amount = validator.stake; address recipient = exit.recipient; - delete exits[_attester]; - delete info[_attester]; + delete stakingStore.exits[_attester]; + delete stakingStore.info[_attester]; STAKING_ASSET.transfer(recipient, amount); @@ -58,14 +59,14 @@ contract Staking is IStaking { function slash(address _attester, uint256 _amount) external override(IStaking) { require(msg.sender == SLASHER, Errors.Staking__NotSlasher(SLASHER, msg.sender)); - ValidatorInfo storage validator = info[_attester]; + ValidatorInfo storage validator = stakingStore.info[_attester]; require(validator.status != Status.NONE, Errors.Staking__NoOneToSlash(_attester)); // There is a special, case, if exiting and past the limit, it is untouchable! require( !( validator.status == Status.EXITING - && exits[_attester].exitableAt <= Timestamp.wrap(block.timestamp) + && stakingStore.exits[_attester].exitableAt <= Timestamp.wrap(block.timestamp) ), Errors.Staking__CannotSlashExitedStake(_attester) ); @@ -75,7 +76,7 @@ contract Staking is IStaking { // When LIVING, he can only start exiting, we don't "really" exit him, because that cost // gas and cost edge cases around recipient, so lets just avoid that. if (validator.status == Status.VALIDATING && validator.stake < MINIMUM_STAKE) { - require(attesters.remove(_attester), Errors.Staking__FailedToRemove(_attester)); + require(stakingStore.attesters.remove(_attester), Errors.Staking__FailedToRemove(_attester)); validator.status = Status.LIVING; } @@ -88,28 +89,11 @@ contract Staking is IStaking { override(IStaking) returns (ValidatorInfo memory) { - return info[_attester]; - } - - function getProposerForAttester(address _attester) - external - view - override(IStaking) - returns (address) - { - return info[_attester].proposer; + return stakingStore.info[_attester]; } function getExit(address _attester) external view override(IStaking) returns (Exit memory) { - return exits[_attester]; - } - - function getAttesterAtIndex(uint256 _index) external view override(IStaking) returns (address) { - return attesters.at(_index); - } - - function getProposerAtIndex(uint256 _index) external view override(IStaking) returns (address) { - return info[attesters.at(_index)].proposer; + return stakingStore.exits[_attester]; } function getOperatorAtIndex(uint256 _index) @@ -118,8 +102,8 @@ contract Staking is IStaking { override(IStaking) returns (OperatorInfo memory) { - address attester = attesters.at(_index); - return OperatorInfo({proposer: info[attester].proposer, attester: attester}); + address attester = stakingStore.attesters.at(_index); + return OperatorInfo({proposer: stakingStore.info[attester].proposer, attester: attester}); } function deposit(address _attester, address _proposer, address _withdrawer, uint256 _amount) @@ -129,12 +113,15 @@ contract Staking is IStaking { { require(_amount >= MINIMUM_STAKE, Errors.Staking__InsufficientStake(_amount, MINIMUM_STAKE)); STAKING_ASSET.transferFrom(msg.sender, address(this), _amount); - require(info[_attester].status == Status.NONE, Errors.Staking__AlreadyRegistered(_attester)); - require(attesters.add(_attester), Errors.Staking__AlreadyActive(_attester)); + require( + stakingStore.info[_attester].status == Status.NONE, + Errors.Staking__AlreadyRegistered(_attester) + ); + require(stakingStore.attesters.add(_attester), Errors.Staking__AlreadyActive(_attester)); // If BLS, need to check possession of private key to avoid attacks. - info[_attester] = ValidatorInfo({ + stakingStore.info[_attester] = ValidatorInfo({ stake: _amount, withdrawer: _withdrawer, proposer: _proposer, @@ -150,7 +137,7 @@ contract Staking is IStaking { override(IStaking) returns (bool) { - ValidatorInfo storage validator = info[_attester]; + ValidatorInfo storage validator = stakingStore.info[_attester]; require( msg.sender == validator.withdrawer, @@ -161,12 +148,12 @@ contract Staking is IStaking { Errors.Staking__NothingToExit(_attester) ); if (validator.status == Status.VALIDATING) { - require(attesters.remove(_attester), Errors.Staking__FailedToRemove(_attester)); + require(stakingStore.attesters.remove(_attester), Errors.Staking__FailedToRemove(_attester)); } // Note that the "amount" is not stored here, but reusing the `validators` // We always exit fully. - exits[_attester] = + stakingStore.exits[_attester] = Exit({exitableAt: Timestamp.wrap(block.timestamp) + EXIT_DELAY, recipient: _recipient}); validator.status = Status.EXITING; @@ -176,6 +163,23 @@ contract Staking is IStaking { } function getActiveAttesterCount() public view override(IStaking) returns (uint256) { - return attesters.length(); + return stakingStore.attesters.length(); + } + + function getProposerForAttester(address _attester) + public + view + override(IStaking) + returns (address) + { + return stakingStore.info[_attester].proposer; + } + + function getAttesterAtIndex(uint256 _index) public view override(IStaking) returns (address) { + return stakingStore.attesters.at(_index); + } + + function getProposerAtIndex(uint256 _index) public view override(IStaking) returns (address) { + return stakingStore.info[stakingStore.attesters.at(_index)].proposer; } } diff --git a/l1-contracts/src/mock/MockFeeJuicePortal.sol b/l1-contracts/src/mock/MockFeeJuicePortal.sol index a7d56cae0e8..0d180556754 100644 --- a/l1-contracts/src/mock/MockFeeJuicePortal.sol +++ b/l1-contracts/src/mock/MockFeeJuicePortal.sol @@ -13,7 +13,7 @@ contract MockFeeJuicePortal is IFeeJuicePortal { IRegistry public constant REGISTRY = IRegistry(address(0)); constructor() { - UNDERLYING = new TestERC20(); + UNDERLYING = new TestERC20("test", "TEST", msg.sender); } function initialize() external override(IFeeJuicePortal) {} diff --git a/l1-contracts/src/mock/TestERC20.sol b/l1-contracts/src/mock/TestERC20.sol index 6236f94d758..3883b407347 100644 --- a/l1-contracts/src/mock/TestERC20.sol +++ b/l1-contracts/src/mock/TestERC20.sol @@ -2,13 +2,31 @@ // docs:start:contract pragma solidity >=0.8.27; +import {Ownable} from "@oz/access/Ownable.sol"; import {ERC20} from "@oz/token/ERC20/ERC20.sol"; -import {IMintableERC20} from "../governance/interfaces/IMintableERC20.sol"; +import {IMintableERC20} from "./../governance/interfaces/IMintableERC20.sol"; -contract TestERC20 is ERC20, IMintableERC20 { - constructor() ERC20("Portal", "PORTAL") {} +contract TestERC20 is ERC20, IMintableERC20, Ownable { + bool public freeForAll = false; - function mint(address _to, uint256 _amount) external override(IMintableERC20) { + modifier ownerOrFreeForAll() { + if (msg.sender != owner() && !freeForAll) { + revert("Not owner or free for all"); + } + _; + } + + constructor(string memory _name, string memory _symbol, address _owner) + ERC20(_name, _symbol) + Ownable(_owner) + {} + + // solhint-disable-next-line comprehensive-interface + function setFreeForAll(bool _freeForAll) external onlyOwner { + freeForAll = _freeForAll; + } + + function mint(address _to, uint256 _amount) external override(IMintableERC20) ownerOrFreeForAll { _mint(_to, _amount); } } diff --git a/l1-contracts/terraform/main.tf b/l1-contracts/terraform/main.tf index 5a720d5c204..d619a827877 100644 --- a/l1-contracts/terraform/main.tf +++ b/l1-contracts/terraform/main.tf @@ -57,6 +57,15 @@ output "fee_juice_contract_address" { value = var.FEE_JUICE_CONTRACT_ADDRESS } +variable "STAKING_ASSET_CONTRACT_ADDRESS" { + type = string + default = "" +} + +output "staking_asset_contract_address" { + value = var.STAKING_ASSET_CONTRACT_ADDRESS +} + variable "FEE_JUICE_PORTAL_CONTRACT_ADDRESS" { type = string default = "" diff --git a/l1-contracts/test/Rollup.t.sol b/l1-contracts/test/Rollup.t.sol index 6c5abf747ee..504db52ae57 100644 --- a/l1-contracts/test/Rollup.t.sol +++ b/l1-contracts/test/Rollup.t.sol @@ -6,8 +6,11 @@ import {DecoderBase} from "./decoders/Base.sol"; import {DataStructures} from "@aztec/core/libraries/DataStructures.sol"; import {Constants} from "@aztec/core/libraries/ConstantsGen.sol"; -import {SignatureLib} from "@aztec/core/libraries/crypto/SignatureLib.sol"; -import {EpochProofQuoteLib} from "@aztec/core/libraries/EpochProofQuoteLib.sol"; +import {Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import { + EpochProofQuote, + SignedEpochProofQuote +} from "@aztec/core/libraries/RollupLibs/EpochProofQuoteLib.sol"; import {Math} from "@oz/utils/math/Math.sol"; import {Registry} from "@aztec/governance/Registry.sol"; @@ -26,7 +29,9 @@ import {TestConstants} from "./harnesses/TestConstants.sol"; import {RewardDistributor} from "@aztec/governance/RewardDistributor.sol"; import {TxsDecoderHelper} from "./decoders/helpers/TxsDecoderHelper.sol"; import {IERC20Errors} from "@oz/interfaces/draft-IERC6093.sol"; -import {ProposeArgs, OracleInput, ProposeLib} from "@aztec/core/libraries/ProposeLib.sol"; +import { + ProposeArgs, OracleInput, ProposeLib +} from "@aztec/core/libraries/RollupLibs/ProposeLib.sol"; import { Timestamp, Slot, Epoch, SlotLib, EpochLib, TimeFns @@ -54,10 +59,10 @@ contract RollupTest is DecoderBase, TimeFns { FeeJuicePortal internal feeJuicePortal; IProofCommitmentEscrow internal proofCommitmentEscrow; RewardDistributor internal rewardDistributor; - SignatureLib.Signature[] internal signatures; + Signature[] internal signatures; - EpochProofQuoteLib.EpochProofQuote internal quote; - EpochProofQuoteLib.SignedEpochProofQuote internal signedQuote; + EpochProofQuote internal quote; + SignedEpochProofQuote internal signedQuote; uint256 internal privateKey; address internal signer; @@ -69,8 +74,12 @@ contract RollupTest is DecoderBase, TimeFns { */ modifier setUpFor(string memory _name) { { + testERC20 = new TestERC20("test", "TEST", address(this)); + leo = new Leonidas( address(1), + testERC20, + TestConstants.AZTEC_MINIMUM_STAKE, TestConstants.AZTEC_SLOT_DURATION, TestConstants.AZTEC_EPOCH_DURATION, TestConstants.AZTEC_TARGET_COMMITTEE_SIZE @@ -83,7 +92,6 @@ contract RollupTest is DecoderBase, TimeFns { } registry = new Registry(address(this)); - testERC20 = new TestERC20(); feeJuicePortal = new FeeJuicePortal( address(registry), address(testERC20), bytes32(Constants.FEE_JUICE_ADDRESS) ); @@ -93,7 +101,7 @@ contract RollupTest is DecoderBase, TimeFns { testERC20.mint(address(rewardDistributor), 1e6 ether); rollup = new Rollup( - feeJuicePortal, rewardDistributor, bytes32(0), bytes32(0), address(this), new address[](0) + feeJuicePortal, rewardDistributor, testERC20, bytes32(0), bytes32(0), address(this) ); inbox = Inbox(address(rollup.INBOX())); outbox = Outbox(address(rollup.OUTBOX())); @@ -107,7 +115,7 @@ contract RollupTest is DecoderBase, TimeFns { privateKey = 0x123456789abcdef123456789abcdef123456789abcdef123456789abcdef1234; signer = vm.addr(privateKey); uint256 bond = rollup.PROOF_COMMITMENT_MIN_BOND_AMOUNT_IN_TST(); - quote = EpochProofQuoteLib.EpochProofQuote({ + quote = EpochProofQuote({ epochToProve: Epoch.wrap(0), validUntilSlot: Slot.wrap(1), bondAmount: bond, @@ -231,18 +239,14 @@ contract RollupTest is DecoderBase, TimeFns { ); rollup.claimEpochProofRight(signedQuote); - ( - Epoch epochToProve, - uint256 basisPointFee, - uint256 bondAmount, - address bondProvider, - address proposerClaimant - ) = rollup.proofClaim(); - assertEq(epochToProve, signedQuote.quote.epochToProve, "Invalid epoch to prove"); - assertEq(basisPointFee, signedQuote.quote.basisPointFee, "Invalid basis point fee"); - assertEq(bondAmount, signedQuote.quote.bondAmount, "Invalid bond amount"); - assertEq(bondProvider, quote.prover, "Invalid bond provider"); - assertEq(proposerClaimant, address(this), "Invalid proposer claimant"); + DataStructures.EpochProofClaim memory epochProofClaim = rollup.getProofClaim(); + assertEq(epochProofClaim.epochToProve, signedQuote.quote.epochToProve, "Invalid epoch to prove"); + assertEq( + epochProofClaim.basisPointFee, signedQuote.quote.basisPointFee, "Invalid basis point fee" + ); + assertEq(epochProofClaim.bondAmount, signedQuote.quote.bondAmount, "Invalid bond amount"); + assertEq(epochProofClaim.bondProvider, quote.prover, "Invalid bond provider"); + assertEq(epochProofClaim.proposerClaimant, address(this), "Invalid proposer claimant"); assertEq( proofCommitmentEscrow.deposits(quote.prover), quote.bondAmount * 9, "Invalid escrow balance" ); @@ -1181,16 +1185,16 @@ contract RollupTest is DecoderBase, TimeFns { ); } - function _quoteToSignedQuote(EpochProofQuoteLib.EpochProofQuote memory _quote) + function _quoteToSignedQuote(EpochProofQuote memory _quote) internal view - returns (EpochProofQuoteLib.SignedEpochProofQuote memory) + returns (SignedEpochProofQuote memory) { bytes32 digest = rollup.quoteToDigest(_quote); (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, digest); - return EpochProofQuoteLib.SignedEpochProofQuote({ + return SignedEpochProofQuote({ quote: _quote, - signature: SignatureLib.Signature({isEmpty: false, v: v, r: r, s: s}) + signature: Signature({isEmpty: false, v: v, r: r, s: s}) }); } } diff --git a/l1-contracts/test/TestERC20.t.sol b/l1-contracts/test/TestERC20.t.sol index 3b7abc4cfa7..b95dcd9d49d 100644 --- a/l1-contracts/test/TestERC20.t.sol +++ b/l1-contracts/test/TestERC20.t.sol @@ -7,11 +7,18 @@ contract TestERC20Test is Test { TestERC20 testERC20; function setUp() public { - testERC20 = new TestERC20(); + testERC20 = new TestERC20("test", "TEST", address(this)); } function test_mint() public { testERC20.mint(address(this), 100); assertEq(testERC20.balanceOf(address(this)), 100); } + + function test_mint_only_owner(address _caller) public { + vm.assume(_caller != address(this)); + vm.expectRevert(); + vm.prank(_caller); + testERC20.mint(address(this), 100); + } } diff --git a/l1-contracts/test/decoders/Decoders.t.sol b/l1-contracts/test/decoders/Decoders.t.sol index c3d47db7bb8..2165f759911 100644 --- a/l1-contracts/test/decoders/Decoders.t.sol +++ b/l1-contracts/test/decoders/Decoders.t.sol @@ -8,7 +8,7 @@ import {Hash} from "@aztec/core/libraries/crypto/Hash.sol"; import {HeaderLibHelper} from "./helpers/HeaderLibHelper.sol"; import {TxsDecoderHelper} from "./helpers/TxsDecoderHelper.sol"; -import {HeaderLib} from "@aztec/core/libraries/HeaderLib.sol"; +import {HeaderLib, Header} from "@aztec/core/libraries/RollupLibs/HeaderLib.sol"; import {Constants} from "@aztec/core/libraries/ConstantsGen.sol"; /** @@ -39,7 +39,7 @@ contract DecodersTest is DecoderBase { // Header { DecoderBase.DecodedHeader memory referenceHeader = data.block.decodedHeader; - HeaderLib.Header memory header = headerHelper.decode(data.block.header); + Header memory header = headerHelper.decode(data.block.header); // GlobalVariables { diff --git a/l1-contracts/test/decoders/helpers/HeaderLibHelper.sol b/l1-contracts/test/decoders/helpers/HeaderLibHelper.sol index 02528023a7c..8b81756fa77 100644 --- a/l1-contracts/test/decoders/helpers/HeaderLibHelper.sol +++ b/l1-contracts/test/decoders/helpers/HeaderLibHelper.sol @@ -2,11 +2,11 @@ // Copyright 2024 Aztec Labs. pragma solidity >=0.8.27; -import {HeaderLib} from "@aztec/core/libraries/HeaderLib.sol"; +import {HeaderLib, Header} from "@aztec/core/libraries/RollupLibs/HeaderLib.sol"; contract HeaderLibHelper { // A wrapper used such that we get "calldata" and not memory - function decode(bytes calldata _header) public pure returns (HeaderLib.Header memory) { + function decode(bytes calldata _header) public pure returns (Header memory) { return HeaderLib.decode(_header); } } diff --git a/l1-contracts/test/decoders/helpers/TxsDecoderHelper.sol b/l1-contracts/test/decoders/helpers/TxsDecoderHelper.sol index 2f8db8d3378..45cd04ad139 100644 --- a/l1-contracts/test/decoders/helpers/TxsDecoderHelper.sol +++ b/l1-contracts/test/decoders/helpers/TxsDecoderHelper.sol @@ -2,7 +2,7 @@ // Copyright 2024 Aztec Labs. pragma solidity >=0.8.27; -import {TxsDecoder} from "@aztec/core/libraries/TxsDecoder.sol"; +import {TxsDecoder} from "@aztec/core/libraries/RollupLibs/TxsDecoder.sol"; import {MerkleLib} from "@aztec/core/libraries/crypto/MerkleLib.sol"; contract TxsDecoderHelper { diff --git a/l1-contracts/test/fee_portal/depositToAztecPublic.t.sol b/l1-contracts/test/fee_portal/depositToAztecPublic.t.sol index fc68df1a444..8eb7bcd301c 100644 --- a/l1-contracts/test/fee_portal/depositToAztecPublic.t.sol +++ b/l1-contracts/test/fee_portal/depositToAztecPublic.t.sol @@ -27,16 +27,15 @@ contract DepositToAztecPublic is Test { function setUp() public { registry = new Registry(OWNER); - token = new TestERC20(); + token = new TestERC20("test", "TEST", address(this)); feeJuicePortal = new FeeJuicePortal(address(registry), address(token), bytes32(Constants.FEE_JUICE_ADDRESS)); token.mint(address(feeJuicePortal), Constants.FEE_JUICE_INITIAL_MINT); feeJuicePortal.initialize(); rewardDistributor = new RewardDistributor(token, registry, address(this)); - rollup = new Rollup( - feeJuicePortal, rewardDistributor, bytes32(0), bytes32(0), address(this), new address[](0) - ); + rollup = + new Rollup(feeJuicePortal, rewardDistributor, token, bytes32(0), bytes32(0), address(this)); vm.prank(OWNER); registry.upgrade(address(rollup)); @@ -67,9 +66,8 @@ contract DepositToAztecPublic is Test { uint256 numberOfRollups = bound(_numberOfRollups, 1, 5); for (uint256 i = 0; i < numberOfRollups; i++) { - Rollup freshRollup = new Rollup( - feeJuicePortal, rewardDistributor, bytes32(0), bytes32(0), address(this), new address[](0) - ); + Rollup freshRollup = + new Rollup(feeJuicePortal, rewardDistributor, token, bytes32(0), bytes32(0), address(this)); vm.prank(OWNER); registry.upgrade(address(freshRollup)); } diff --git a/l1-contracts/test/fee_portal/distributeFees.t.sol b/l1-contracts/test/fee_portal/distributeFees.t.sol index bfb366e21c7..0308b2d9433 100644 --- a/l1-contracts/test/fee_portal/distributeFees.t.sol +++ b/l1-contracts/test/fee_portal/distributeFees.t.sol @@ -26,16 +26,15 @@ contract DistributeFees is Test { function setUp() public { registry = new Registry(OWNER); - token = new TestERC20(); + token = new TestERC20("test", "TEST", address(this)); feeJuicePortal = new FeeJuicePortal(address(registry), address(token), bytes32(Constants.FEE_JUICE_ADDRESS)); token.mint(address(feeJuicePortal), Constants.FEE_JUICE_INITIAL_MINT); feeJuicePortal.initialize(); rewardDistributor = new RewardDistributor(token, registry, address(this)); - rollup = new Rollup( - feeJuicePortal, rewardDistributor, bytes32(0), bytes32(0), address(this), new address[](0) - ); + rollup = + new Rollup(feeJuicePortal, rewardDistributor, token, bytes32(0), bytes32(0), address(this)); vm.prank(OWNER); registry.upgrade(address(rollup)); @@ -74,9 +73,8 @@ contract DistributeFees is Test { uint256 numberOfRollups = bound(_numberOfRollups, 1, 5); for (uint256 i = 0; i < numberOfRollups; i++) { - Rollup freshRollup = new Rollup( - feeJuicePortal, rewardDistributor, bytes32(0), bytes32(0), address(this), new address[](0) - ); + Rollup freshRollup = + new Rollup(feeJuicePortal, rewardDistributor, token, bytes32(0), bytes32(0), address(this)); vm.prank(OWNER); registry.upgrade(address(freshRollup)); } diff --git a/l1-contracts/test/fees/FeeModelTestPoints.t.sol b/l1-contracts/test/fees/FeeModelTestPoints.t.sol index 3dd5b0de248..368df77e602 100644 --- a/l1-contracts/test/fees/FeeModelTestPoints.t.sol +++ b/l1-contracts/test/fees/FeeModelTestPoints.t.sol @@ -4,7 +4,7 @@ pragma solidity >=0.8.27; import {TestBase} from "../base/Base.sol"; -import {OracleInput as FeeMathOracleInput} from "@aztec/core/libraries/FeeMath.sol"; +import {OracleInput as FeeMathOracleInput} from "@aztec/core/libraries/RollupLibs/FeeMath.sol"; // Remember that foundry json parsing is alphabetically done, so you MUST // sort the struct fields alphabetically or prepare for a headache. diff --git a/l1-contracts/test/fees/FeeRollup.t.sol b/l1-contracts/test/fees/FeeRollup.t.sol index 7f131fb9da5..8331d66d7ee 100644 --- a/l1-contracts/test/fees/FeeRollup.t.sol +++ b/l1-contracts/test/fees/FeeRollup.t.sol @@ -6,8 +6,8 @@ import {DecoderBase} from "../decoders/Base.sol"; import {DataStructures} from "@aztec/core/libraries/DataStructures.sol"; import {Constants} from "@aztec/core/libraries/ConstantsGen.sol"; -import {SignatureLib} from "@aztec/core/libraries/crypto/SignatureLib.sol"; -import {EpochProofQuoteLib} from "@aztec/core/libraries/EpochProofQuoteLib.sol"; +import {SignatureLib, Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import {EpochProofQuoteLib} from "@aztec/core/libraries/RollupLibs/EpochProofQuoteLib.sol"; import {Math} from "@oz/utils/math/Math.sol"; import {Registry} from "@aztec/governance/Registry.sol"; @@ -36,10 +36,11 @@ import {TxsDecoderHelper} from "../decoders/helpers/TxsDecoderHelper.sol"; import {IERC20Errors} from "@oz/interfaces/draft-IERC6093.sol"; import {IFeeJuicePortal} from "@aztec/core/interfaces/IFeeJuicePortal.sol"; import {IRewardDistributor} from "@aztec/governance/interfaces/IRewardDistributor.sol"; -import {OracleInput} from "@aztec/core/libraries/FeeMath.sol"; -import {ProposeArgs, OracleInput, ProposeLib} from "@aztec/core/libraries/ProposeLib.sol"; +import { + ProposeArgs, OracleInput, ProposeLib +} from "@aztec/core/libraries/RollupLibs/ProposeLib.sol"; import {IERC20} from "@oz/token/ERC20/IERC20.sol"; -import {FeeMath} from "@aztec/core/libraries/FeeMath.sol"; +import {FeeMath, MANA_TARGET} from "@aztec/core/libraries/RollupLibs/FeeMath.sol"; import { FeeHeader as FeeHeaderModel, @@ -92,7 +93,7 @@ contract FeeRollupTest is FeeModelTestPoints, DecoderBase { bytes header; bytes body; bytes32[] txHashes; - SignatureLib.Signature[] signatures; + Signature[] signatures; } DecoderBase.Full full = load("empty_block_1"); @@ -113,21 +114,24 @@ contract FeeRollupTest is FeeModelTestPoints, DecoderBase { vm.fee(l1Metadata[0].base_fee); vm.blobBaseFee(l1Metadata[0].blob_fee); - asset = new TestERC20(); + asset = new TestERC20("test", "TEST", address(this)); fakeCanonical = new FakeCanonical(IERC20(address(asset))); + asset.transferOwnership(address(fakeCanonical)); + rollup = new Rollup( IFeeJuicePortal(address(fakeCanonical)), IRewardDistributor(address(fakeCanonical)), + asset, bytes32(0), bytes32(0), address(this), - new address[](0), Config({ aztecSlotDuration: SLOT_DURATION, aztecEpochDuration: EPOCH_DURATION, targetCommitteeSize: 48, - aztecEpochProofClaimWindowInL2Slots: 16 + aztecEpochProofClaimWindowInL2Slots: 16, + minimumStake: 100 ether }) ); fakeCanonical.setCanonicalRollup(address(rollup)); @@ -156,7 +160,7 @@ contract FeeRollupTest is FeeModelTestPoints, DecoderBase { bytes32 blockHash = 0x267f79fe7e757b20e924fac9f78264a0d1c8c4b481fea21d0bbe74650d87a1f1; bytes32[] memory txHashes = new bytes32[](0); - SignatureLib.Signature[] memory signatures = new SignatureLib.Signature[](0); + Signature[] memory signatures = new Signature[](0); bytes memory body = full.block.body; bytes memory header = full.block.header; @@ -252,11 +256,11 @@ contract FeeRollupTest is FeeModelTestPoints, DecoderBase { rollup.getBlock(rollup.getPendingBlockNumber()).feeHeader; uint256 excessManaNoPrune = ( parentFeeHeaderNoPrune.excessMana + parentFeeHeaderNoPrune.manaUsed - ).clampedAdd(-int256(FeeMath.MANA_TARGET)); + ).clampedAdd(-int256(MANA_TARGET)); FeeHeader memory parentFeeHeaderPrune = rollup.getBlock(rollup.getProvenBlockNumber()).feeHeader; uint256 excessManaPrune = (parentFeeHeaderPrune.excessMana + parentFeeHeaderPrune.manaUsed) - .clampedAdd(-int256(FeeMath.MANA_TARGET)); + .clampedAdd(-int256(MANA_TARGET)); assertGt(excessManaNoPrune, excessManaPrune, "excess mana should be lower if we prune"); diff --git a/l1-contracts/test/fees/MinimalFeeModel.sol b/l1-contracts/test/fees/MinimalFeeModel.sol index 0e5284d0b43..90849e20248 100644 --- a/l1-contracts/test/fees/MinimalFeeModel.sol +++ b/l1-contracts/test/fees/MinimalFeeModel.sol @@ -2,7 +2,14 @@ // Copyright 2024 Aztec Labs. pragma solidity >=0.8.27; -import {FeeMath, OracleInput} from "@aztec/core/libraries/FeeMath.sol"; +import { + FeeMath, + OracleInput, + MANA_TARGET, + L1_GAS_PER_BLOCK_PROPOSED, + L1_GAS_PER_EPOCH_VERIFIED, + MINIMUM_CONGESTION_MULTIPLIER +} from "@aztec/core/libraries/RollupLibs/FeeMath.sol"; import {Timestamp, TimeFns, Slot, SlotLib} from "@aztec/core/libraries/TimeMath.sol"; import {Vm} from "forge-std/Vm.sol"; import { @@ -60,19 +67,17 @@ contract MinimalFeeModel is TimeFns { returns (ManaBaseFeeComponents memory) { L1Fees memory fees = getCurrentL1Fees(); - uint256 dataCost = Math.mulDiv( - _blobsUsed * BLOB_GAS_PER_BLOB, fees.blob_fee, FeeMath.MANA_TARGET, Math.Rounding.Ceil - ); - uint256 gasUsed = FeeMath.L1_GAS_PER_BLOCK_PROPOSED + _blobsUsed * GAS_PER_BLOB_POINT_EVALUATION - + FeeMath.L1_GAS_PER_EPOCH_VERIFIED / EPOCH_DURATION; - uint256 gasCost = Math.mulDiv(gasUsed, fees.base_fee, FeeMath.MANA_TARGET, Math.Rounding.Ceil); + uint256 dataCost = + Math.mulDiv(_blobsUsed * BLOB_GAS_PER_BLOB, fees.blob_fee, MANA_TARGET, Math.Rounding.Ceil); + uint256 gasUsed = L1_GAS_PER_BLOCK_PROPOSED + _blobsUsed * GAS_PER_BLOB_POINT_EVALUATION + + L1_GAS_PER_EPOCH_VERIFIED / EPOCH_DURATION; + uint256 gasCost = Math.mulDiv(gasUsed, fees.base_fee, MANA_TARGET, Math.Rounding.Ceil); uint256 provingCost = getProvingCost(); uint256 congestionMultiplier = FeeMath.congestionMultiplier(calcExcessMana()); uint256 total = dataCost + gasCost + provingCost; - uint256 congestionCost = - (total * congestionMultiplier / FeeMath.MINIMUM_CONGESTION_MULTIPLIER) - total; + uint256 congestionCost = (total * congestionMultiplier / MINIMUM_CONGESTION_MULTIPLIER) - total; uint256 feeAssetPrice = _inFeeAsset ? getFeeAssetPrice() : 1e9; @@ -91,7 +96,7 @@ contract MinimalFeeModel is TimeFns { function calcExcessMana() internal view returns (uint256) { FeeHeader storage parent = feeHeaders[populatedThrough]; - return (parent.excess_mana + parent.mana_used).clampedAdd(-int256(FeeMath.MANA_TARGET)); + return (parent.excess_mana + parent.mana_used).clampedAdd(-int256(MANA_TARGET)); } function addSlot(OracleInput memory _oracleInput) public { diff --git a/l1-contracts/test/fees/MinimalFeeModel.t.sol b/l1-contracts/test/fees/MinimalFeeModel.t.sol index cbc0149deff..f2b79e42f24 100644 --- a/l1-contracts/test/fees/MinimalFeeModel.t.sol +++ b/l1-contracts/test/fees/MinimalFeeModel.t.sol @@ -1,7 +1,7 @@ // SPDX-License-Identifier: UNLICENSED pragma solidity >=0.8.27; -import {OracleInput, FeeMath} from "@aztec/core/libraries/FeeMath.sol"; +import {OracleInput, FeeMath} from "@aztec/core/libraries/RollupLibs/FeeMath.sol"; import { FeeModelTestPoints, TestPoint, @@ -12,6 +12,10 @@ import { import {MinimalFeeModel} from "./MinimalFeeModel.sol"; import {Errors} from "@aztec/core/libraries/Errors.sol"; import {SlotLib, Slot} from "@aztec/core/libraries/TimeMath.sol"; +import { + MAX_PROVING_COST_MODIFIER, + MAX_FEE_ASSET_PRICE_MODIFIER +} from "@aztec/core/libraries/RollupLibs/FeeMath.sol"; contract MinimalFeeModelTest is FeeModelTestPoints { using SlotLib for Slot; @@ -78,8 +82,8 @@ contract MinimalFeeModelTest is FeeModelTestPoints { } function test_invalidOracleInput() public { - uint256 provingBoundary = FeeMath.MAX_PROVING_COST_MODIFIER + 1; - uint256 feeAssetPriceBoundary = FeeMath.MAX_FEE_ASSET_PRICE_MODIFIER + 1; + uint256 provingBoundary = MAX_PROVING_COST_MODIFIER + 1; + uint256 feeAssetPriceBoundary = MAX_FEE_ASSET_PRICE_MODIFIER + 1; vm.expectRevert(abi.encodeWithSelector(Errors.FeeMath__InvalidProvingCostModifier.selector)); model.addSlot( diff --git a/l1-contracts/test/governance/coin-issuer/Base.t.sol b/l1-contracts/test/governance/coin-issuer/Base.t.sol index b2812e4c7e9..ada73d4d4ac 100644 --- a/l1-contracts/test/governance/coin-issuer/Base.t.sol +++ b/l1-contracts/test/governance/coin-issuer/Base.t.sol @@ -14,7 +14,9 @@ contract CoinIssuerBase is Test { CoinIssuer internal nom; function _deploy(uint256 _rate) internal { - token = IMintableERC20(address(new TestERC20())); + TestERC20 testERC20 = new TestERC20("test", "TEST", address(this)); + token = IMintableERC20(address(testERC20)); nom = new CoinIssuer(token, _rate, address(this)); + testERC20.transferOwnership(address(nom)); } } diff --git a/l1-contracts/test/governance/governance/base.t.sol b/l1-contracts/test/governance/governance/base.t.sol index 28e51b0e934..cc5a9878a06 100644 --- a/l1-contracts/test/governance/governance/base.t.sol +++ b/l1-contracts/test/governance/governance/base.t.sol @@ -35,7 +35,7 @@ contract GovernanceBase is TestBase { uint256 proposalId; function setUp() public virtual { - token = IMintableERC20(address(new TestERC20())); + token = IMintableERC20(address(new TestERC20("test", "TEST", address(this)))); registry = new Registry(address(this)); governanceProposer = new GovernanceProposer(registry, 677, 1000); diff --git a/l1-contracts/test/governance/reward-distributor/Base.t.sol b/l1-contracts/test/governance/reward-distributor/Base.t.sol index 8b3c6c511b1..4c6014d5a2c 100644 --- a/l1-contracts/test/governance/reward-distributor/Base.t.sol +++ b/l1-contracts/test/governance/reward-distributor/Base.t.sol @@ -16,7 +16,7 @@ contract RewardDistributorBase is Test { RewardDistributor internal rewardDistributor; function setUp() public { - token = IMintableERC20(address(new TestERC20())); + token = IMintableERC20(address(new TestERC20("test", "TEST", address(this)))); registry = new Registry(address(this)); rewardDistributor = new RewardDistributor(token, registry, address(this)); } diff --git a/l1-contracts/test/governance/scenario/UpgradeGovernanceProposerTest.t.sol b/l1-contracts/test/governance/scenario/UpgradeGovernanceProposerTest.t.sol index aea558c9f56..8504653da17 100644 --- a/l1-contracts/test/governance/scenario/UpgradeGovernanceProposerTest.t.sol +++ b/l1-contracts/test/governance/scenario/UpgradeGovernanceProposerTest.t.sol @@ -18,6 +18,8 @@ import {ProposalLib} from "@aztec/governance/libraries/ProposalLib.sol"; import {Errors} from "@aztec/governance/libraries/Errors.sol"; import {NewGovernanceProposerPayload} from "./NewGovernanceProposerPayload.sol"; import {RewardDistributor} from "@aztec/governance/RewardDistributor.sol"; +import {CheatDepositArgs} from "@aztec/core/interfaces/IRollup.sol"; +import {TestConstants} from "../../harnesses/TestConstants.sol"; /** * @title UpgradeGovernanceProposerTest @@ -44,32 +46,36 @@ contract UpgradeGovernanceProposerTest is TestBase { address internal constant EMPEROR = address(uint160(bytes20("EMPEROR"))); function setUp() external { - token = IMintableERC20(address(new TestERC20())); + token = IMintableERC20(address(new TestERC20("test", "TEST", address(this)))); registry = new Registry(address(this)); governanceProposer = new GovernanceProposer(registry, 7, 10); governance = new Governance(token, address(governanceProposer)); - address[] memory initialValidators = new address[](VALIDATOR_COUNT); + CheatDepositArgs[] memory initialValidators = new CheatDepositArgs[](VALIDATOR_COUNT); for (uint256 i = 1; i <= VALIDATOR_COUNT; i++) { uint256 privateKey = uint256(keccak256(abi.encode("validator", i))); address validator = vm.addr(privateKey); privateKeys[validator] = privateKey; validators[i - 1] = validator; - initialValidators[i - 1] = validator; + initialValidators[i - 1] = CheatDepositArgs({ + attester: validator, + proposer: validator, + withdrawer: validator, + amount: TestConstants.AZTEC_MINIMUM_STAKE + }); } RewardDistributor rewardDistributor = new RewardDistributor(token, registry, address(this)); rollup = new Rollup( - new MockFeeJuicePortal(), - rewardDistributor, - bytes32(0), - bytes32(0), - address(this), - initialValidators + new MockFeeJuicePortal(), rewardDistributor, token, bytes32(0), bytes32(0), address(this) ); + token.mint(address(this), TestConstants.AZTEC_MINIMUM_STAKE * VALIDATOR_COUNT); + token.approve(address(rollup), TestConstants.AZTEC_MINIMUM_STAKE * VALIDATOR_COUNT); + rollup.cheat__InitialiseValidatorSet(initialValidators); + registry.upgrade(address(rollup)); registry.transferOwnership(address(governance)); diff --git a/l1-contracts/test/harnesses/Leonidas.sol b/l1-contracts/test/harnesses/Leonidas.sol index f19deae2550..a7c78f304b1 100644 --- a/l1-contracts/test/harnesses/Leonidas.sol +++ b/l1-contracts/test/harnesses/Leonidas.sol @@ -4,11 +4,14 @@ pragma solidity >=0.8.27; import {Leonidas as RealLeonidas} from "@aztec/core/Leonidas.sol"; import {TestConstants} from "./TestConstants.sol"; +import {TestERC20} from "@aztec/mock/TestERC20.sol"; contract Leonidas is RealLeonidas { constructor(address _ares) RealLeonidas( _ares, + new TestERC20("test", "TEST", address(this)), + 100e18, TestConstants.AZTEC_SLOT_DURATION, TestConstants.AZTEC_EPOCH_DURATION, TestConstants.AZTEC_TARGET_COMMITTEE_SIZE diff --git a/l1-contracts/test/harnesses/Rollup.sol b/l1-contracts/test/harnesses/Rollup.sol index 27f78d3864d..41d72b20de9 100644 --- a/l1-contracts/test/harnesses/Rollup.sol +++ b/l1-contracts/test/harnesses/Rollup.sol @@ -6,28 +6,30 @@ import {IFeeJuicePortal} from "@aztec/core/interfaces/IFeeJuicePortal.sol"; import {IRewardDistributor} from "@aztec/governance/interfaces/IRewardDistributor.sol"; import {Rollup as RealRollup, Config} from "@aztec/core/Rollup.sol"; import {TestConstants} from "./TestConstants.sol"; +import {IERC20} from "@oz/token/ERC20/IERC20.sol"; contract Rollup is RealRollup { constructor( IFeeJuicePortal _fpcJuicePortal, IRewardDistributor _rewardDistributor, + IERC20 _stakingAsset, bytes32 _vkTreeRoot, bytes32 _protocolContractTreeRoot, - address _ares, - address[] memory _validators + address _ares ) RealRollup( _fpcJuicePortal, _rewardDistributor, + _stakingAsset, _vkTreeRoot, _protocolContractTreeRoot, _ares, - _validators, Config({ aztecSlotDuration: TestConstants.AZTEC_SLOT_DURATION, aztecEpochDuration: TestConstants.AZTEC_EPOCH_DURATION, targetCommitteeSize: TestConstants.AZTEC_TARGET_COMMITTEE_SIZE, - aztecEpochProofClaimWindowInL2Slots: TestConstants.AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS + aztecEpochProofClaimWindowInL2Slots: TestConstants.AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS, + minimumStake: TestConstants.AZTEC_MINIMUM_STAKE }) ) {} diff --git a/l1-contracts/test/harnesses/TestConstants.sol b/l1-contracts/test/harnesses/TestConstants.sol index 4a79b3c97e7..371a2d8f594 100644 --- a/l1-contracts/test/harnesses/TestConstants.sol +++ b/l1-contracts/test/harnesses/TestConstants.sol @@ -9,4 +9,5 @@ library TestConstants { uint256 internal constant AZTEC_EPOCH_DURATION = 16; uint256 internal constant AZTEC_TARGET_COMMITTEE_SIZE = 48; uint256 internal constant AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS = 13; + uint256 internal constant AZTEC_MINIMUM_STAKE = 100e18; } diff --git a/l1-contracts/test/portals/TokenPortal.t.sol b/l1-contracts/test/portals/TokenPortal.t.sol index da7af0eb534..c043d69d0cd 100644 --- a/l1-contracts/test/portals/TokenPortal.t.sol +++ b/l1-contracts/test/portals/TokenPortal.t.sol @@ -21,9 +21,11 @@ import {TestERC20} from "@aztec/mock/TestERC20.sol"; import {NaiveMerkle} from "../merkle/Naive.sol"; import {MockFeeJuicePortal} from "@aztec/mock/MockFeeJuicePortal.sol"; import {RewardDistributor} from "@aztec/governance/RewardDistributor.sol"; +import {stdStorage, StdStorage} from "forge-std/Test.sol"; contract TokenPortalTest is Test { using Hash for DataStructures.L1ToL2Msg; + using stdStorage for StdStorage; event MessageConsumed(bytes32 indexed messageHash, address indexed recipient); @@ -58,15 +60,10 @@ contract TokenPortalTest is Test { function setUp() public { registry = new Registry(address(this)); - testERC20 = new TestERC20(); + testERC20 = new TestERC20("test", "TEST", address(this)); rewardDistributor = new RewardDistributor(testERC20, registry, address(this)); rollup = new Rollup( - new MockFeeJuicePortal(), - rewardDistributor, - bytes32(0), - bytes32(0), - address(this), - new address[](0) + new MockFeeJuicePortal(), rewardDistributor, testERC20, bytes32(0), bytes32(0), address(this) ); inbox = rollup.INBOX(); outbox = rollup.OUTBOX(); @@ -77,7 +74,7 @@ contract TokenPortalTest is Test { tokenPortal.initialize(address(registry), address(testERC20), l2TokenAddress); // Modify the proven block count - vm.store(address(rollup), bytes32(uint256(9)), bytes32(l2BlockNumber)); + stdstore.target(address(rollup)).sig("getProvenBlockNumber()").checked_write(l2BlockNumber); assertEq(rollup.getProvenBlockNumber(), l2BlockNumber); vm.deal(address(this), 100 ether); diff --git a/l1-contracts/test/portals/UniswapPortal.t.sol b/l1-contracts/test/portals/UniswapPortal.t.sol index ac646e17bac..fc91ef5d158 100644 --- a/l1-contracts/test/portals/UniswapPortal.t.sol +++ b/l1-contracts/test/portals/UniswapPortal.t.sol @@ -55,12 +55,7 @@ contract UniswapPortalTest is Test { registry = new Registry(address(this)); RewardDistributor rewardDistributor = new RewardDistributor(DAI, registry, address(this)); rollup = new Rollup( - new MockFeeJuicePortal(), - rewardDistributor, - bytes32(0), - bytes32(0), - address(this), - new address[](0) + new MockFeeJuicePortal(), rewardDistributor, DAI, bytes32(0), bytes32(0), address(this) ); registry.upgrade(address(rollup)); diff --git a/l1-contracts/test/prover-coordination/ProofCommitmentEscrow.t.sol b/l1-contracts/test/prover-coordination/ProofCommitmentEscrow.t.sol index 45178dc9b9e..18c07487dfe 100644 --- a/l1-contracts/test/prover-coordination/ProofCommitmentEscrow.t.sol +++ b/l1-contracts/test/prover-coordination/ProofCommitmentEscrow.t.sol @@ -31,7 +31,7 @@ contract TestProofCommitmentEscrow is Test { } function setUp() public { - TOKEN = new TestERC20(); + TOKEN = new TestERC20("test", "TEST", address(this)); ESCROW = new ProofCommitmentEscrow( TOKEN, address(this), TestConstants.AZTEC_SLOT_DURATION, TestConstants.AZTEC_EPOCH_DURATION ); diff --git a/l1-contracts/test/sparta/Sparta.t.sol b/l1-contracts/test/sparta/Sparta.t.sol index 9dff6c5babb..dc5340a39ae 100644 --- a/l1-contracts/test/sparta/Sparta.t.sol +++ b/l1-contracts/test/sparta/Sparta.t.sol @@ -6,21 +6,25 @@ import {DecoderBase} from "../decoders/Base.sol"; import {DataStructures} from "@aztec/core/libraries/DataStructures.sol"; import {Constants} from "@aztec/core/libraries/ConstantsGen.sol"; -import {SignatureLib} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import {Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; import {Inbox} from "@aztec/core/messagebridge/Inbox.sol"; import {Outbox} from "@aztec/core/messagebridge/Outbox.sol"; import {Errors} from "@aztec/core/libraries/Errors.sol"; import {Registry} from "@aztec/governance/Registry.sol"; import {Rollup} from "../harnesses/Rollup.sol"; -import {Leonidas} from "../harnesses/Leonidas.sol"; +import {Leonidas} from "@aztec/core/Leonidas.sol"; import {NaiveMerkle} from "../merkle/Naive.sol"; import {MerkleTestUtil} from "../merkle/TestUtil.sol"; import {TestERC20} from "@aztec/mock/TestERC20.sol"; import {TxsDecoderHelper} from "../decoders/helpers/TxsDecoderHelper.sol"; import {MessageHashUtils} from "@oz/utils/cryptography/MessageHashUtils.sol"; import {MockFeeJuicePortal} from "@aztec/mock/MockFeeJuicePortal.sol"; -import {ProposeArgs, OracleInput, ProposeLib} from "@aztec/core/libraries/ProposeLib.sol"; +import { + ProposeArgs, OracleInput, ProposeLib +} from "@aztec/core/libraries/RollupLibs/ProposeLib.sol"; +import {TestConstants} from "../harnesses/TestConstants.sol"; +import {CheatDepositArgs} from "@aztec/core/interfaces/IRollup.sol"; import {Slot, Epoch, SlotLib, EpochLib} from "@aztec/core/libraries/TimeMath.sol"; import {RewardDistributor} from "@aztec/governance/RewardDistributor.sol"; @@ -48,8 +52,10 @@ contract SpartaTest is DecoderBase { TxsDecoderHelper internal txsHelper; TestERC20 internal testERC20; RewardDistributor internal rewardDistributor; - SignatureLib.Signature internal emptySignature; - mapping(address validator => uint256 privateKey) internal privateKeys; + Signature internal emptySignature; + mapping(address attester => uint256 privateKey) internal attesterPrivateKeys; + mapping(address proposer => uint256 privateKey) internal proposerPrivateKeys; + mapping(address proposer => address attester) internal proposerToAttester; mapping(address => bool) internal _seenValidators; mapping(address => bool) internal _seenCommittee; @@ -59,7 +65,15 @@ contract SpartaTest is DecoderBase { modifier setup(uint256 _validatorCount) { string memory _name = "mixed_block_1"; { - Leonidas leonidas = new Leonidas(address(1)); + Leonidas leonidas = new Leonidas( + address(1), + testERC20, + TestConstants.AZTEC_MINIMUM_STAKE, + TestConstants.AZTEC_SLOT_DURATION, + TestConstants.AZTEC_EPOCH_DURATION, + TestConstants.AZTEC_TARGET_COMMITTEE_SIZE + ); + DecoderBase.Full memory full = load(_name); uint256 slotNumber = full.block.decodedHeader.globalVariables.slotNumber; uint256 initialTime = @@ -67,25 +81,37 @@ contract SpartaTest is DecoderBase { vm.warp(initialTime); } - address[] memory initialValidators = new address[](_validatorCount); + CheatDepositArgs[] memory initialValidators = new CheatDepositArgs[](_validatorCount); + for (uint256 i = 1; i < _validatorCount + 1; i++) { - uint256 privateKey = uint256(keccak256(abi.encode("validator", i))); - address validator = vm.addr(privateKey); - privateKeys[validator] = privateKey; - initialValidators[i - 1] = validator; + uint256 attesterPrivateKey = uint256(keccak256(abi.encode("attester", i))); + address attester = vm.addr(attesterPrivateKey); + attesterPrivateKeys[attester] = attesterPrivateKey; + uint256 proposerPrivateKey = uint256(keccak256(abi.encode("proposer", i))); + address proposer = vm.addr(proposerPrivateKey); + proposerPrivateKeys[proposer] = proposerPrivateKey; + + proposerToAttester[proposer] = attester; + + initialValidators[i - 1] = CheatDepositArgs({ + attester: attester, + proposer: proposer, + withdrawer: address(this), + amount: TestConstants.AZTEC_MINIMUM_STAKE + }); } - testERC20 = new TestERC20(); + testERC20 = new TestERC20("test", "TEST", address(this)); Registry registry = new Registry(address(this)); rewardDistributor = new RewardDistributor(testERC20, registry, address(this)); rollup = new Rollup( - new MockFeeJuicePortal(), - rewardDistributor, - bytes32(0), - bytes32(0), - address(this), - initialValidators + new MockFeeJuicePortal(), rewardDistributor, testERC20, bytes32(0), bytes32(0), address(this) ); + + testERC20.mint(address(this), TestConstants.AZTEC_MINIMUM_STAKE * _validatorCount); + testERC20.approve(address(rollup), TestConstants.AZTEC_MINIMUM_STAKE * _validatorCount); + rollup.cheat__InitialiseValidatorSet(initialValidators); + inbox = Inbox(address(rollup.INBOX())); outbox = Outbox(address(rollup.OUTBOX())); @@ -95,15 +121,15 @@ contract SpartaTest is DecoderBase { _; } - function testInitialCommitteMatch() public setup(4) { - address[] memory validators = rollup.getValidators(); + function testInitialCommitteeMatch() public setup(4) { + address[] memory attesters = rollup.getAttesters(); address[] memory committee = rollup.getCurrentEpochCommittee(); assertEq(rollup.getCurrentEpoch(), 0); - assertEq(validators.length, 4, "Invalid validator set size"); + assertEq(attesters.length, 4, "Invalid validator set size"); assertEq(committee.length, 4, "invalid committee set size"); - for (uint256 i = 0; i < validators.length; i++) { - _seenValidators[validators[i]] = true; + for (uint256 i = 0; i < attesters.length; i++) { + _seenValidators[attesters[i]] = true; } for (uint256 i = 0; i < committee.length; i++) { @@ -112,8 +138,10 @@ contract SpartaTest is DecoderBase { _seenCommittee[committee[i]] = true; } + // The proposer is not necessarily an attester, we have to map it back. We can do this here + // because we created a 1:1 link. In practice, there could be multiple attesters for the same proposer address proposer = rollup.getCurrentProposer(); - assertTrue(_seenCommittee[proposer]); + assertTrue(_seenCommittee[proposerToAttester[proposer]]); } function testProposerForNonSetupEpoch(uint8 _epochsToJump) public setup(4) { @@ -127,14 +155,18 @@ contract SpartaTest is DecoderBase { address expectedProposer = rollup.getCurrentProposer(); // Add a validator which will also setup the epoch - rollup.addValidator(address(0xdead)); + testERC20.mint(address(this), TestConstants.AZTEC_MINIMUM_STAKE); + testERC20.approve(address(rollup), TestConstants.AZTEC_MINIMUM_STAKE); + rollup.deposit( + address(0xdead), address(0xdead), address(0xdead), TestConstants.AZTEC_MINIMUM_STAKE + ); address actualProposer = rollup.getCurrentProposer(); assertEq(expectedProposer, actualProposer, "Invalid proposer"); } function testValidatorSetLargerThanCommittee(bool _insufficientSigs) public setup(100) { - assertGt(rollup.getValidators().length, rollup.TARGET_COMMITTEE_SIZE(), "Not enough validators"); + assertGt(rollup.getAttesters().length, rollup.TARGET_COMMITTEE_SIZE(), "Not enough validators"); uint256 committeeSize = rollup.TARGET_COMMITTEE_SIZE() * 2 / 3 + (_insufficientSigs ? 0 : 1); _testBlock("mixed_block_1", _insufficientSigs, committeeSize, false); @@ -199,7 +231,7 @@ contract SpartaTest is DecoderBase { address[] memory validators = rollup.getEpochCommittee(rollup.getCurrentEpoch()); ree.needed = validators.length * 2 / 3 + 1; - SignatureLib.Signature[] memory signatures = new SignatureLib.Signature[](_signatureCount); + Signature[] memory signatures = new Signature[](_signatureCount); bytes32 digest = ProposeLib.digest(args); for (uint256 i = 0; i < _signatureCount; i++) { @@ -239,7 +271,7 @@ contract SpartaTest is DecoderBase { return; } } else { - SignatureLib.Signature[] memory signatures = new SignatureLib.Signature[](0); + Signature[] memory signatures = new Signature[](0); rollup.propose(args, signatures, full.block.body); } @@ -298,13 +330,13 @@ contract SpartaTest is DecoderBase { function createSignature(address _signer, bytes32 _digest) internal view - returns (SignatureLib.Signature memory) + returns (Signature memory) { - uint256 privateKey = privateKeys[_signer]; + uint256 privateKey = attesterPrivateKeys[_signer]; bytes32 digest = _digest.toEthSignedMessageHash(); (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, digest); - return SignatureLib.Signature({isEmpty: false, v: v, r: r, s: s}); + return Signature({isEmpty: false, v: v, r: r, s: s}); } } diff --git a/l1-contracts/test/staking/StakingCheater.sol b/l1-contracts/test/staking/StakingCheater.sol index 224c732c6c9..ba89e1e07ab 100644 --- a/l1-contracts/test/staking/StakingCheater.sol +++ b/l1-contracts/test/staking/StakingCheater.sol @@ -14,14 +14,14 @@ contract StakingCheater is Staking { {} function cheat__SetStatus(address _attester, Status _status) external { - info[_attester].status = _status; + stakingStore.info[_attester].status = _status; } function cheat__AddAttester(address _attester) external { - attesters.add(_attester); + stakingStore.attesters.add(_attester); } function cheat__RemoveAttester(address _attester) external { - attesters.remove(_attester); + stakingStore.attesters.remove(_attester); } } diff --git a/l1-contracts/test/staking/base.t.sol b/l1-contracts/test/staking/base.t.sol index e47b6e8d24a..6aa8eaa8ca4 100644 --- a/l1-contracts/test/staking/base.t.sol +++ b/l1-contracts/test/staking/base.t.sol @@ -19,7 +19,7 @@ contract StakingBase is TestBase { address internal constant SLASHER = address(bytes20("SLASHER")); function setUp() public virtual { - stakingAsset = new TestERC20(); + stakingAsset = new TestERC20("test", "TEST", address(this)); staking = new StakingCheater(SLASHER, stakingAsset, MINIMUM_STAKE); } } diff --git a/noir-projects/Earthfile b/noir-projects/Earthfile index 4627c747703..0a876ac39f7 100644 --- a/noir-projects/Earthfile +++ b/noir-projects/Earthfile @@ -20,6 +20,7 @@ source: RUN yarn COPY mega_honk_circuits.json . + COPY ivc_integration_circuits.json . COPY --dir aztec-nr noir-contracts noir-protocol-circuits mock-protocol-circuits scripts . build-contracts: diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index 7405293e1c3..c3fc2d761dd 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = cd730ebea17805c9c25886d7d983d462a7232a8a + commit = fff58d7de36e6a81eec44ce0baad9eb29d4e70b4 method = merge cmdver = 0.4.6 - parent = 52936248d8455885f3576ccc4dec904a5d941ab4 + parent = 7ec0456e6f7a6227ae67c4b3b80668fb400d1570 diff --git a/noir-projects/aztec-nr/authwit/src/auth.nr b/noir-projects/aztec-nr/authwit/src/auth.nr index edbc461034c..6c21bfaecdb 100644 --- a/noir-projects/aztec-nr/authwit/src/auth.nr +++ b/noir-projects/aztec-nr/authwit/src/auth.nr @@ -193,12 +193,12 @@ use dep::aztec::protocol_types::{ global IS_VALID_SELECTOR: Field = 0x47dacd73; // 4 last bytes of poseidon2_hash_bytes("IS_VALID()") /** - * Assert that `on_behalf_of` have authorized the current call with a valid authentication witness + * Assert that `on_behalf_of` has authorized the current call with a valid authentication witness * - * Computing the `inner_hash` using the `msg_sender`, `selector` and `args_hash` and then making a call out to the + * Compute the `inner_hash` using the `msg_sender`, `selector` and `args_hash` and then make a call out to the * `on_behalf_of` contract to verify that the `inner_hash` is valid. * - * @param on_behalf_of The address that have authorized the current call + * @param on_behalf_of The address that has allegedly authorized the current call */ // docs:start:assert_current_call_valid_authwit pub fn assert_current_call_valid_authwit(context: &mut PrivateContext, on_behalf_of: AztecAddress) { @@ -217,7 +217,7 @@ pub fn assert_current_call_valid_authwit(context: &mut PrivateContext, on_behalf * Used as an internal function for `assert_current_call_valid_authwit` and can be used as a standalone function when * the `inner_hash` is from a different source, e.g., say a block of text etc. * - * @param on_behalf_of The address that have authorized the current call + * @param on_behalf_of The address that has allegedly authorized the current call * @param inner_hash The hash of the message to authorize */ pub fn assert_inner_hash_valid_authwit( @@ -235,21 +235,21 @@ pub fn assert_inner_hash_valid_authwit( .unpack_into(); assert(result == IS_VALID_SELECTOR, "Message not authorized by account"); // Compute the nullifier, similar computation to the outer hash, but without the chain_id and version. - // Those should already be handled in the verification, so we just need something to nullify, that allow same inner_hash for multiple actors. + // Those should already be handled in the verification, so we just need something to nullify, that allows the same inner_hash for multiple actors. let nullifier = compute_authwit_nullifier(on_behalf_of, inner_hash); context.push_nullifier(nullifier); } /** - * Assert that `on_behalf_of` have authorized the current call in the authentication registry + * Assert that `on_behalf_of` has authorized the current call in the authentication registry * - * Computing the `inner_hash` using the `msg_sender`, `selector` and `args_hash` and then making a call out to the + * Compute the `inner_hash` using the `msg_sender`, `selector` and `args_hash` and then make a call out to the * `on_behalf_of` contract to verify that the `inner_hash` is valid. * * Note that the authentication registry will take the `msg_sender` into account as the consumer, so this will only * work if the `msg_sender` is the same as the `consumer` when the `message_hash` was inserted into the registry. * - * @param on_behalf_of The address that have authorized the current call + * @param on_behalf_of The address that has allegedly authorized the current call */ // docs:start:assert_current_call_valid_authwit_public pub unconstrained fn assert_current_call_valid_authwit_public( @@ -266,15 +266,15 @@ pub unconstrained fn assert_current_call_valid_authwit_public( // docs:end:assert_current_call_valid_authwit_public /** - * Assert that `on_behalf_of` have authorized a speicifc `inner_hash` in the authentication registry + * Assert that `on_behalf_of` has authorized a specific `inner_hash` in the authentication registry * - * Computing the `inner_hash` using the `msg_sender`, `selector` and `args_hash` and then making a call out to the + * Compute the `inner_hash` using the `msg_sender`, `selector` and `args_hash` and then make a call out to the * `on_behalf_of` contract to verify that the `inner_hash` is valid. * * Note that the authentication registry will take the `msg_sender` into account as the consumer, so this will only * work if the `msg_sender` is the same as the `consumer` when the `message_hash` was inserted into the registry. * - * @param on_behalf_of The address that have authorized the `inner_hash` + * @param on_behalf_of The address that has allegedly authorized the `inner_hash` */ pub unconstrained fn assert_inner_hash_valid_authwit_public( context: &mut PublicContext, @@ -334,11 +334,11 @@ pub fn compute_inner_authwit_hash(args: [Field; N]) -> Field { } /** - * Computs the `authwit_nullifier` for a specific `on_behalf_of` and `inner_hash` + * Computes the `authwit_nullifier` for a specific `on_behalf_of` and `inner_hash` * * Using the `on_behalf_of` and the `inner_hash` to ensure that the nullifier is siloed for a specific `on_behalf_of`. * - * @param on_behalf_of The address that have authorized the `inner_hash` + * @param on_behalf_of The address that has authorized the `inner_hash` * @param inner_hash The hash of the message to authorize */ pub fn compute_authwit_nullifier(on_behalf_of: AztecAddress, inner_hash: Field) -> Field { diff --git a/noir-projects/ivc_integration_circuits.json b/noir-projects/ivc_integration_circuits.json new file mode 100644 index 00000000000..01971872d1f --- /dev/null +++ b/noir-projects/ivc_integration_circuits.json @@ -0,0 +1,8 @@ +[ + "mock_private_kernel_init", + "mock_private_kernel_inner", + "mock_private_kernel_reset.*", + "mock_private_kernel_tail.*", + "app_creator", + "app_reader" +] \ No newline at end of file diff --git a/noir-projects/mega_honk_circuits.json b/noir-projects/mega_honk_circuits.json index 37b25115596..1db696b11e6 100644 --- a/noir-projects/mega_honk_circuits.json +++ b/noir-projects/mega_honk_circuits.json @@ -1,6 +1,6 @@ [ - "private_kernel_init", - "private_kernel_inner", - "private_kernel_reset.*", - "private_kernel_tail.*" + "^private_kernel_init", + "^private_kernel_inner", + "^private_kernel_reset.*", + "^private_kernel_tail.*" ] \ No newline at end of file diff --git a/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-init/src/main.nr b/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-init/src/main.nr index 9a7b6ca0d63..f3ce82e3a98 100644 --- a/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-init/src/main.nr +++ b/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-init/src/main.nr @@ -1,11 +1,15 @@ use dep::mock_types::{ - AppPublicInputs, PrivateKernelPublicInputs, PrivateKernelPublicInputsBuilder, TxRequest, + AppPublicInputs, CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS, PrivateKernelPublicInputs, + PrivateKernelPublicInputsBuilder, PROOF_TYPE_OINK, TxRequest, }; fn main( tx: TxRequest, app_inputs: call_data(1) AppPublicInputs, + app_vk: [Field; CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS], ) -> return_data PrivateKernelPublicInputs { + std::verify_proof_with_type(app_vk, [], [], 0, PROOF_TYPE_OINK); + let mut private_kernel_inputs = PrivateKernelPublicInputsBuilder::from_tx(tx); private_kernel_inputs.ingest_app_inputs(app_inputs); private_kernel_inputs.finish() diff --git a/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-inner/src/main.nr b/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-inner/src/main.nr index 4dee3d46e75..707dab8d84f 100644 --- a/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-inner/src/main.nr +++ b/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-inner/src/main.nr @@ -1,9 +1,17 @@ -use dep::mock_types::{AppPublicInputs, PrivateKernelPublicInputs, PrivateKernelPublicInputsBuilder}; +use dep::mock_types::{ + AppPublicInputs, CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS, PrivateKernelPublicInputs, + PrivateKernelPublicInputsBuilder, PROOF_TYPE_PG, +}; fn main( prev_kernel_public_inputs: call_data(0) PrivateKernelPublicInputs, + kernel_vk: [Field; CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS], app_inputs: call_data(1) AppPublicInputs, + app_vk: [Field; CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS], ) -> return_data PrivateKernelPublicInputs { + std::verify_proof_with_type(kernel_vk, [], [], 0, PROOF_TYPE_PG); + std::verify_proof_with_type(app_vk, [], [], 0, PROOF_TYPE_PG); + let mut private_kernel_inputs = PrivateKernelPublicInputsBuilder::from_previous_kernel(prev_kernel_public_inputs); private_kernel_inputs.ingest_app_inputs(app_inputs); diff --git a/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-reset/src/main.nr b/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-reset/src/main.nr index 6c27e065204..1444732a0e6 100644 --- a/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-reset/src/main.nr +++ b/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-reset/src/main.nr @@ -1,13 +1,17 @@ use dep::mock_types::{ - MAX_COMMITMENT_READ_REQUESTS_PER_TX, MAX_COMMITMENTS_PER_TX, PrivateKernelPublicInputs, + CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS, MAX_COMMITMENT_READ_REQUESTS_PER_TX, + MAX_COMMITMENTS_PER_TX, PrivateKernelPublicInputs, PROOF_TYPE_PG, }; // Mock reset kernel that reset read requests. // It needs hints to locate the commitment that matches the read requests. fn main( mut prev_kernel_public_inputs: call_data(0) PrivateKernelPublicInputs, + kernel_vk: [Field; CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS], commitment_read_hints: [u32; MAX_COMMITMENT_READ_REQUESTS_PER_TX], ) -> return_data PrivateKernelPublicInputs { + std::verify_proof_with_type(kernel_vk, [], [], 0, PROOF_TYPE_PG); + for i in 0..MAX_COMMITMENT_READ_REQUESTS_PER_TX { if commitment_read_hints[i] != MAX_COMMITMENTS_PER_TX { assert_eq( diff --git a/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-tail/src/main.nr b/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-tail/src/main.nr index acac4a95543..082dd428a22 100644 --- a/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-tail/src/main.nr +++ b/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-tail/src/main.nr @@ -1,11 +1,15 @@ use dep::mock_types::{ - KernelPublicInputs, MAX_COMMITMENT_READ_REQUESTS_PER_TX, PrivateKernelPublicInputs, + CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS, KernelPublicInputs, + MAX_COMMITMENT_READ_REQUESTS_PER_TX, PrivateKernelPublicInputs, PROOF_TYPE_PG, }; // The tail kernel finishes the client IVC chain exposing the final public inputs with no remaining calls or unfulfilled read requests. fn main( prev_kernel_public_inputs: call_data(0) PrivateKernelPublicInputs, + kernel_vk: [Field; CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS], ) -> pub KernelPublicInputs { + std::verify_proof_with_type(kernel_vk, [], [], 0, PROOF_TYPE_PG); + assert_eq(prev_kernel_public_inputs.remaining_calls, 0); for i in 0..MAX_COMMITMENT_READ_REQUESTS_PER_TX { assert_eq(prev_kernel_public_inputs.read_requests[i], 0); diff --git a/noir-projects/mock-protocol-circuits/crates/mock-types/Nargo.toml b/noir-projects/mock-protocol-circuits/crates/mock-types/Nargo.toml index d5f57873b0c..e9b0542224a 100644 --- a/noir-projects/mock-protocol-circuits/crates/mock-types/Nargo.toml +++ b/noir-projects/mock-protocol-circuits/crates/mock-types/Nargo.toml @@ -4,4 +4,5 @@ type = "lib" authors = [""] compiler_version = ">=0.32.0" -[dependencies] \ No newline at end of file +[dependencies] +protocol_types = { path = "../../../noir-protocol-circuits/crates/types" } diff --git a/noir-projects/mock-protocol-circuits/crates/mock-types/src/lib.nr b/noir-projects/mock-protocol-circuits/crates/mock-types/src/lib.nr index 933e812174a..c5f69e887a0 100644 --- a/noir-projects/mock-protocol-circuits/crates/mock-types/src/lib.nr +++ b/noir-projects/mock-protocol-circuits/crates/mock-types/src/lib.nr @@ -3,6 +3,10 @@ global MAX_COMMITMENTS_PER_TX: u32 = 4; global MAX_COMMITMENT_READ_REQUESTS_PER_CALL: u32 = 2; global MAX_COMMITMENT_READ_REQUESTS_PER_TX: u32 = 4; +pub use protocol_types::constants::{ + CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS, PROOF_TYPE_OINK, PROOF_TYPE_PG, +}; + struct TxRequest { number_of_calls: u32, } diff --git a/noir-projects/noir-contracts/contracts/escrow_contract/src/main.nr b/noir-projects/noir-contracts/contracts/escrow_contract/src/main.nr index 4aa3759433a..219e8c8dd55 100644 --- a/noir-projects/noir-contracts/contracts/escrow_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/escrow_contract/src/main.nr @@ -43,7 +43,8 @@ contract Escrow { let note = storage.owner.get_note(); assert(note.address == sender); - + // docs:start:call_function Token::at(token).transfer(recipient, amount).call(&mut context); + // docs:end:call_function } } diff --git a/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr b/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr index b5781af3b20..d9d965cea05 100644 --- a/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr @@ -128,9 +128,11 @@ contract Lending { on_behalf_of: Field, collateral_asset: AztecAddress, ) { + // docs:start:public_to_public_call let _ = Token::at(collateral_asset) .transfer_in_public(context.msg_sender(), context.this_address(), amount, nonce) .call(&mut context); + // docs:end:public_to_public_call let _ = Lending::at(context.this_address()) ._deposit(AztecAddress::from_field(on_behalf_of), amount, collateral_asset) .call(&mut context); diff --git a/noir-projects/noir-contracts/contracts/schnorr_account_contract/Nargo.toml b/noir-projects/noir-contracts/contracts/schnorr_account_contract/Nargo.toml index 12cf4db0fe8..1211ad63c05 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_account_contract/Nargo.toml +++ b/noir-projects/noir-contracts/contracts/schnorr_account_contract/Nargo.toml @@ -7,3 +7,4 @@ type = "contract" [dependencies] aztec = { path = "../../../aztec-nr/aztec" } authwit = { path = "../../../aztec-nr/authwit" } +schnorr = { tag = "v0.1.1", git = "https://github.com/noir-lang/schnorr" } diff --git a/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr index fdd886d232e..b040ba5f1fe 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr @@ -6,8 +6,6 @@ use dep::aztec::macros::aztec; #[aztec] contract SchnorrAccount { - use dep::std; - use dep::authwit::{ account::AccountActions, auth::{compute_authwit_message_hash, compute_authwit_nullifier}, @@ -83,7 +81,7 @@ contract SchnorrAccount { is_infinite: false, }; // Verify signature of the payload bytes - std::schnorr::verify_signature(pub_key, signature, outer_hash.to_be_bytes::<32>()) + schnorr::verify_signature(pub_key, signature, outer_hash.to_be_bytes::<32>()) // docs:end:is_valid_impl } diff --git a/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/Nargo.toml b/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/Nargo.toml index 877f369a800..771cfa8fd28 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/Nargo.toml +++ b/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/Nargo.toml @@ -7,3 +7,4 @@ type = "contract" [dependencies] aztec = { path = "../../../aztec-nr/aztec" } authwit = { path = "../../../aztec-nr/authwit" } +schnorr = { tag = "v0.1.1", git = "https://github.com/noir-lang/schnorr" } diff --git a/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/src/main.nr index 1bed3b932e7..02582e3e097 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/src/main.nr @@ -45,7 +45,7 @@ contract SchnorrHardcodedAccount { } // Verify signature using hardcoded public key - std::schnorr::verify_signature(public_key, signature, outer_hash.to_be_bytes::<32>()) + schnorr::verify_signature(public_key, signature, outer_hash.to_be_bytes::<32>()) } // docs:end:is-valid } diff --git a/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/Nargo.toml b/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/Nargo.toml index 80c39efcba2..161993c5a73 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/Nargo.toml +++ b/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/Nargo.toml @@ -7,3 +7,4 @@ type = "contract" [dependencies] aztec = { path = "../../../aztec-nr/aztec" } authwit = { path = "../../../aztec-nr/authwit" } +schnorr = { tag = "v0.1.1", git = "https://github.com/noir-lang/schnorr" } diff --git a/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/util.nr b/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/util.nr index a3610085cea..e77e943006e 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/util.nr +++ b/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/util.nr @@ -1,6 +1,6 @@ use crate::auth_oracle::AuthWitness; use dep::aztec::prelude::AztecAddress; -use std::{embedded_curve_ops::EmbeddedCurvePoint, schnorr::verify_signature}; +use std::embedded_curve_ops::EmbeddedCurvePoint; pub fn recover_address(message_hash: Field, witness: AuthWitness) -> AztecAddress { let message_bytes: [u8; 32] = message_hash.to_be_bytes(); @@ -11,8 +11,7 @@ pub fn recover_address(message_hash: Field, witness: AuthWitness) -> AztecAddres }; // In a single key account contract we re-used ivpk_m as signing key - let verification = verify_signature(public_key, witness.signature, message_bytes); - assert(verification == true); + schnorr::assert_valid_signature(public_key, witness.signature, message_bytes); AztecAddress::compute(witness.keys, witness.partial_address) } diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr index fad92b5675a..2d70fde20a1 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr @@ -252,7 +252,7 @@ contract Token { } let from_ovpk_m = get_public_keys(from).ovpk_m; - // TODO: constrain encryption below - we are using unconstrained here only becuase of the following Noir issue + // TODO: constrain encryption below - we are using unconstrained here only because of the following Noir issue // https://github.com/noir-lang/noir/issues/5771 storage.balances.at(from).sub(from, U128::from_integer(amount)).emit( encode_and_encrypt_note_unconstrained(&mut context, from_ovpk_m, from, from), diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/public_base_rollup.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/public_base_rollup.nr index c994027d7ec..ce7add25489 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/public_base_rollup.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/public_base_rollup.nr @@ -97,6 +97,11 @@ impl PublicBaseRollupInputs { // self.tube_data.vk_data.validate_in_vk_tree([TUBE_VK_INDEX]); } + // Warning: Fake verification! TODO(#8470) + if !dep::std::runtime::is_unconstrained() { + self.avm_proof_data.fake_verify(); + } + // TODO(#8470) // if !dep::std::runtime::is_unconstrained() { // self.avm_proof_data.verify(); diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/avm_circuit_public_inputs.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/avm_circuit_public_inputs.nr index 7affd9031c9..b88a123a1fd 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/avm_circuit_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/avm_circuit_public_inputs.nr @@ -21,6 +21,8 @@ use crate::{ utils::reader::Reader, }; +use std::hash::{poseidon2, poseidon2_permutation}; + pub struct AvmCircuitPublicInputs { /////////////////////////////////// // Inputs. @@ -180,6 +182,37 @@ pub struct AvmProofData { pub vk_data: VkData, } +// The number of columns for the AVM recursive verifier we want to fake, i.e., the resulting +// verify() routine below will create a similar number of gates as a real AVM recursive verifier +// with the number of columns set by this constant. +pub global DUMMY_AVM_VERIFIER_NUM_COLUMNS: u32 = 2200; + +// Current AVM recursive verifier has 9500 gates per column. +// Note that the addition of a single column in AVM recursive verifier incurs 8500 gates. +// (some additional costs are due to lookups, relations, ...). +// 78 gates per Poseidon permutation +// 9500/78 = 121.8 +pub global DUMMY_AVM_VERIFIER_NUM_ITERATIONS: u32 = DUMMY_AVM_VERIFIER_NUM_COLUMNS * 122; + +// Warning: This is a fake avm recursive verification whose sole goal is to reproduce a similar +// computational effort (number of gates) as the real recursive verifier. +// TODO(#8470): Replace with the real AVM recursive verifier +impl AvmProofData { + pub fn fake_verify(self) { + let mut input_hash = poseidon2::Poseidon2::hash( + [self.public_inputs.transaction_fee, self.proof.fields[0], self.vk_data.vk.key[0]], + 3, + ); + + let mut result: [Field; 4] = [input_hash, 0, 0, 0]; + for i in 0..DUMMY_AVM_VERIFIER_NUM_ITERATIONS { + result = poseidon2_permutation(result, 4); + } + + assert(!result[0].lt(1)); + } +} + impl Verifiable for AvmProofData { fn verify(self) { // TODO(#8470) diff --git a/noir-projects/scripts/generate_vk_json.js b/noir-projects/scripts/generate_vk_json.js index c891d1f7ca4..a2942e69cde 100644 --- a/noir-projects/scripts/generate_vk_json.js +++ b/noir-projects/scripts/generate_vk_json.js @@ -4,6 +4,7 @@ const child_process = require("child_process"); const crypto = require("crypto"); const megaHonkPatterns = require("../mega_honk_circuits.json"); +const ivcIntegrationPatterns = require("../ivc_integration_circuits.json"); const { readVKFromS3, writeVKToS3, @@ -32,13 +33,19 @@ async function getBytecodeHash(artifactPath) { return crypto.createHash("md5").update(bytecode).digest("hex"); } -async function getArtifactHash(artifactPath, isMegaHonk, isRecursive) { +async function getArtifactHash( + artifactPath, + isMegaHonk, + isIvcIntegration, + isRecursive +) { const bytecodeHash = await getBytecodeHash(artifactPath); const barretenbergHash = await getBarretenbergHash(); return generateArtifactHash( barretenbergHash, bytecodeHash, isMegaHonk, + isIvcIntegration, isRecursive ); } @@ -66,14 +73,21 @@ function isMegaHonkCircuit(artifactName) { artifactName.match(new RegExp(pattern)) ); } +function isIvcIntegrationCircuit(artifactName) { + return ivcIntegrationPatterns.some((pattern) => + artifactName.match(new RegExp(pattern)) + ); +} async function processArtifact(artifactPath, artifactName, outputFolder) { const isMegaHonk = isMegaHonkCircuit(artifactName); + const isIvcIntegration = isIvcIntegrationCircuit(artifactName); const isRecursive = true; const artifactHash = await getArtifactHash( artifactPath, isMegaHonk, + isIvcIntegration, isRecursive ); @@ -93,6 +107,7 @@ async function processArtifact(artifactPath, artifactName, outputFolder) { artifactPath, artifactHash, isMegaHonk, + isIvcIntegration, isRecursive ); await writeVKToS3(artifactName, artifactHash, JSON.stringify(vkData)); @@ -109,10 +124,13 @@ async function generateVKData( artifactPath, artifactHash, isMegaHonk, + isIvcIntegration, isRecursive ) { if (isMegaHonk) { console.log("Generating new mega honk vk for", artifactName); + } else if (isIvcIntegration) { + console.log("Generating new IVC vk for", artifactName); } else { console.log("Generating new vk for", artifactName); } @@ -123,16 +141,22 @@ async function generateVKData( ); const jsonVkPath = vkJsonFileNameForArtifactName(outputFolder, artifactName); - const writeVkCommand = `${BB_BIN_PATH} ${ - isMegaHonk ? "write_vk_mega_honk" : "write_vk_ultra_honk" - } -h -b "${artifactPath}" -o "${binaryVkPath}" ${ + function getVkCommand() { + if (isMegaHonk) return "write_vk_mega_honk"; + if (isIvcIntegration) return "write_vk_for_ivc"; + return "write_vk_ultra_honk"; + } + + const writeVkCommand = `${BB_BIN_PATH} ${getVkCommand()} -h -b "${artifactPath}" -o "${binaryVkPath}" ${ isRecursive ? "--recursive" : "" }`; console.log("WRITE VK CMD: ", writeVkCommand); const vkAsFieldsCommand = `${BB_BIN_PATH} ${ - isMegaHonk ? "vk_as_fields_mega_honk" : "vk_as_fields_ultra_honk" + isMegaHonk || isIvcIntegration + ? "vk_as_fields_mega_honk" + : "vk_as_fields_ultra_honk" } -k "${binaryVkPath}" -o "${jsonVkPath}"`; await new Promise((resolve, reject) => { diff --git a/noir/noir-repo/.github/workflows/formatting.yml b/noir/noir-repo/.github/workflows/formatting.yml index 08c02af519f..ab92d452c79 100644 --- a/noir/noir-repo/.github/workflows/formatting.yml +++ b/noir/noir-repo/.github/workflows/formatting.yml @@ -15,18 +15,11 @@ concurrency: jobs: clippy: name: cargo clippy - runs-on: ${{ matrix.runner }} + runs-on: ubuntu-latest timeout-minutes: 30 env: RUSTFLAGS: -Dwarnings - strategy: - fail-fast: false - matrix: - include: - - runner: ubuntu-latest - target: x86_64-unknown-linux-gnu - steps: - name: Checkout uses: actions/checkout@v4 @@ -34,18 +27,41 @@ jobs: - name: Setup toolchain uses: dtolnay/rust-toolchain@1.74.1 with: - targets: ${{ matrix.target }} + targets: x86_64-unknown-linux-gnu components: clippy, rustfmt - uses: Swatinem/rust-cache@v2 with: - key: ${{ matrix.target }} + key: x86_64-unknown-linux-gnu cache-on-failure: true save-if: ${{ github.event_name != 'merge_group' }} - name: Run `cargo clippy` run: cargo clippy --all-targets --workspace --locked --release + rustfmt: + name: cargo fmt + runs-on: ubuntu-latest + timeout-minutes: 30 + env: + RUSTFLAGS: -Dwarnings + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup toolchain + uses: dtolnay/rust-toolchain@1.74.1 + with: + targets: x86_64-unknown-linux-gnu + components: clippy, rustfmt + + - uses: Swatinem/rust-cache@v2 + with: + key: x86_64-unknown-linux-gnu + cache-on-failure: true + save-if: ${{ github.event_name != 'merge_group' }} + - name: Run `cargo fmt` run: cargo fmt --all --check @@ -88,7 +104,6 @@ jobs: run: | mkdir dist cp ./target/release/nargo ./dist/nargo - 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz - name: Upload artifact uses: actions/upload-artifact@v4 diff --git a/noir/noir-repo/.github/workflows/gates_report.yml b/noir/noir-repo/.github/workflows/gates_report.yml deleted file mode 100644 index 0b0a527b69e..00000000000 --- a/noir/noir-repo/.github/workflows/gates_report.yml +++ /dev/null @@ -1,94 +0,0 @@ -name: Report gates diff - -on: - push: - branches: - - master - pull_request: - -jobs: - build-nargo: - runs-on: ubuntu-latest - strategy: - matrix: - target: [x86_64-unknown-linux-gnu] - - steps: - - name: Checkout Noir repo - uses: actions/checkout@v4 - - - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.74.1 - - - uses: Swatinem/rust-cache@v2 - with: - key: ${{ matrix.target }} - cache-on-failure: true - save-if: ${{ github.event_name != 'merge_group' }} - - - name: Build Nargo - run: cargo build --package nargo_cli --release - - - name: Package artifacts - run: | - mkdir dist - cp ./target/release/nargo ./dist/nargo - 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz - - - name: Upload artifact - uses: actions/upload-artifact@v4 - with: - name: nargo - path: ./dist/* - retention-days: 3 - - - compare_gates_reports: - needs: [build-nargo] - runs-on: ubuntu-latest - permissions: - pull-requests: write - - steps: - - uses: actions/checkout@v4 - - - name: Install `bb` - run: | - ./scripts/install_bb.sh - echo "$HOME/.bb/" >> $GITHUB_PATH - - - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo - - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V - - - name: Generate gates report - working-directory: ./test_programs - run: | - ./rebuild.sh - ./gates_report.sh - mv gates_report.json ../gates_report.json - - - name: Compare gates reports - id: gates_diff - uses: noir-lang/noir-gates-diff@1931aaaa848a1a009363d6115293f7b7fc72bb87 - with: - report: gates_report.json - summaryQuantile: 0.9 # only display the 10% most significant circuit size diffs in the summary (defaults to 20%) - - - name: Add gates diff to sticky comment - if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' - uses: marocchino/sticky-pull-request-comment@v2 - with: - # delete the comment in case changes no longer impact circuit sizes - delete: ${{ !steps.gates_diff.outputs.markdown }} - message: ${{ steps.gates_diff.outputs.markdown }} diff --git a/noir/noir-repo/.github/workflows/gates_report_brillig.yml b/noir/noir-repo/.github/workflows/gates_report_brillig.yml deleted file mode 100644 index e7ec30923f0..00000000000 --- a/noir/noir-repo/.github/workflows/gates_report_brillig.yml +++ /dev/null @@ -1,92 +0,0 @@ -name: Report Brillig bytecode size diff - -on: - push: - branches: - - master - pull_request: - -jobs: - build-nargo: - runs-on: ubuntu-latest - strategy: - matrix: - target: [x86_64-unknown-linux-gnu] - - steps: - - name: Checkout Noir repo - uses: actions/checkout@v4 - - - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.74.1 - - - uses: Swatinem/rust-cache@v2 - with: - key: ${{ matrix.target }} - cache-on-failure: true - save-if: ${{ github.event_name != 'merge_group' }} - - - name: Build Nargo - run: cargo build --package nargo_cli --release - - - name: Package artifacts - run: | - mkdir dist - cp ./target/release/nargo ./dist/nargo - 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz - - - name: Upload artifact - uses: actions/upload-artifact@v4 - with: - name: nargo - path: ./dist/* - retention-days: 3 - - compare_brillig_bytecode_size_reports: - needs: [build-nargo] - runs-on: ubuntu-latest - permissions: - pull-requests: write - - steps: - - uses: actions/checkout@v4 - - - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo - - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V - - - name: Generate Brillig bytecode size report - working-directory: ./test_programs - run: | - chmod +x gates_report_brillig.sh - ./gates_report_brillig.sh - mv gates_report_brillig.json ../gates_report_brillig.json - - - name: Compare Brillig bytecode size reports - id: brillig_bytecode_diff - uses: noir-lang/noir-gates-diff@d88f7523b013b9edd3f31c5cfddaef87a3fe1b48 - with: - report: gates_report_brillig.json - header: | - # Changes to Brillig bytecode sizes - brillig_report: true - summaryQuantile: 0.9 # only display the 10% most significant bytecode size diffs in the summary (defaults to 20%) - - - name: Add bytecode size diff to sticky comment - if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' - uses: marocchino/sticky-pull-request-comment@v2 - with: - header: brillig - # delete the comment in case changes no longer impact brillig bytecode sizes - delete: ${{ !steps.brillig_bytecode_diff.outputs.markdown }} - message: ${{ steps.brillig_bytecode_diff.outputs.markdown }} \ No newline at end of file diff --git a/noir/noir-repo/.github/workflows/gates_report_brillig_execution.yml b/noir/noir-repo/.github/workflows/gates_report_brillig_execution.yml deleted file mode 100644 index 0ef98f5045b..00000000000 --- a/noir/noir-repo/.github/workflows/gates_report_brillig_execution.yml +++ /dev/null @@ -1,92 +0,0 @@ -name: Report Brillig opcodes executed diff - -on: - push: - branches: - - master - pull_request: - -jobs: - build-nargo: - runs-on: ubuntu-latest - strategy: - matrix: - target: [x86_64-unknown-linux-gnu] - - steps: - - name: Checkout Noir repo - uses: actions/checkout@v4 - - - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.74.1 - - - uses: Swatinem/rust-cache@v2 - with: - key: ${{ matrix.target }} - cache-on-failure: true - save-if: ${{ github.event_name != 'merge_group' }} - - - name: Build Nargo - run: cargo build --package nargo_cli --release - - - name: Package artifacts - run: | - mkdir dist - cp ./target/release/nargo ./dist/nargo - 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz - - - name: Upload artifact - uses: actions/upload-artifact@v4 - with: - name: nargo - path: ./dist/* - retention-days: 3 - - compare_brillig_execution_reports: - needs: [build-nargo] - runs-on: ubuntu-latest - permissions: - pull-requests: write - - steps: - - uses: actions/checkout@v4 - - - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo - - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V - - - name: Generate Brillig execution report - working-directory: ./test_programs - run: | - chmod +x gates_report_brillig_execution.sh - ./gates_report_brillig_execution.sh - mv gates_report_brillig_execution.json ../gates_report_brillig_execution.json - - - name: Compare Brillig execution reports - id: brillig_execution_diff - uses: noir-lang/noir-gates-diff@d88f7523b013b9edd3f31c5cfddaef87a3fe1b48 - with: - report: gates_report_brillig_execution.json - header: | - # Changes to number of Brillig opcodes executed - brillig_report: true - summaryQuantile: 0.9 # only display the 10% most significant bytecode size diffs in the summary (defaults to 20%) - - - name: Add bytecode size diff to sticky comment - if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' - uses: marocchino/sticky-pull-request-comment@v2 - with: - header: brillig_execution - # delete the comment in case changes no longer impact brillig bytecode sizes - delete: ${{ !steps.brillig_execution_diff.outputs.markdown }} - message: ${{ steps.brillig_execution_diff.outputs.markdown }} \ No newline at end of file diff --git a/noir/noir-repo/.github/workflows/lockfile.yml b/noir/noir-repo/.github/workflows/lockfile.yml deleted file mode 100644 index 190e01745af..00000000000 --- a/noir/noir-repo/.github/workflows/lockfile.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Lockfile check - -on: - pull_request: - -# This will cancel previous runs when a branch or PR is updated -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }} - cancel-in-progress: true - -jobs: - yarn-lock: - runs-on: ubuntu-latest - timeout-minutes: 30 - - steps: - - name: Checkout - uses: actions/checkout@v4 - - # Errors if installation would result in modifications to yarn.lock - - name: Install - run: yarn --immutable - shell: bash diff --git a/noir/noir-repo/.github/workflows/release.yml b/noir/noir-repo/.github/workflows/release.yml index 7e0909224e5..59c3d9a1415 100644 --- a/noir/noir-repo/.github/workflows/release.yml +++ b/noir/noir-repo/.github/workflows/release.yml @@ -15,7 +15,7 @@ jobs: steps: - name: Run release-please id: release - uses: google-github-actions/release-please-action@v4 + uses: googleapis/release-please-action@v4 with: token: ${{ secrets.NOIR_RELEASES_TOKEN }} diff --git a/noir/noir-repo/.github/workflows/reports.yml b/noir/noir-repo/.github/workflows/reports.yml new file mode 100644 index 00000000000..8f8aeabb65e --- /dev/null +++ b/noir/noir-repo/.github/workflows/reports.yml @@ -0,0 +1,235 @@ +name: Reports + +on: + push: + branches: + - master + pull_request: + +jobs: + build-nargo: + runs-on: ubuntu-latest + + steps: + - name: Checkout Noir repo + uses: actions/checkout@v4 + + - name: Setup toolchain + uses: dtolnay/rust-toolchain@1.74.1 + + - uses: Swatinem/rust-cache@v2 + with: + key: x86_64-unknown-linux-gnu + cache-on-failure: true + save-if: ${{ github.event_name != 'merge_group' }} + + - name: Build Nargo + run: cargo build --package nargo_cli --release + + - name: Package artifacts + run: | + mkdir dist + cp ./target/release/nargo ./dist/nargo + + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + name: nargo + path: ./dist/* + retention-days: 3 + + + compare_gates_reports: + name: Circuit sizes + needs: [build-nargo] + runs-on: ubuntu-latest + permissions: + pull-requests: write + + steps: + - uses: actions/checkout@v4 + + - name: Install `bb` + run: | + ./scripts/install_bb.sh + echo "$HOME/.bb/" >> $GITHUB_PATH + + - name: Download nargo binary + uses: actions/download-artifact@v4 + with: + name: nargo + path: ./nargo + + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + nargo -V + + - name: Generate gates report + working-directory: ./test_programs + run: | + ./rebuild.sh + ./gates_report.sh + mv gates_report.json ../gates_report.json + + - name: Compare gates reports + id: gates_diff + uses: noir-lang/noir-gates-diff@1931aaaa848a1a009363d6115293f7b7fc72bb87 + with: + report: gates_report.json + summaryQuantile: 0.9 # only display the 10% most significant circuit size diffs in the summary (defaults to 20%) + + - name: Add gates diff to sticky comment + if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' + uses: marocchino/sticky-pull-request-comment@v2 + with: + # delete the comment in case changes no longer impact circuit sizes + delete: ${{ !steps.gates_diff.outputs.markdown }} + message: ${{ steps.gates_diff.outputs.markdown }} + + compare_brillig_bytecode_size_reports: + name: Brillig bytecode sizes + needs: [build-nargo] + runs-on: ubuntu-latest + permissions: + pull-requests: write + + steps: + - uses: actions/checkout@v4 + + - name: Download nargo binary + uses: actions/download-artifact@v4 + with: + name: nargo + path: ./nargo + + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + nargo -V + + - name: Generate Brillig bytecode size report + working-directory: ./test_programs + run: | + ./gates_report_brillig.sh + mv gates_report_brillig.json ../gates_report_brillig.json + + - name: Compare Brillig bytecode size reports + id: brillig_bytecode_diff + uses: noir-lang/noir-gates-diff@d88f7523b013b9edd3f31c5cfddaef87a3fe1b48 + with: + report: gates_report_brillig.json + header: | + # Changes to Brillig bytecode sizes + brillig_report: true + summaryQuantile: 0.9 # only display the 10% most significant bytecode size diffs in the summary (defaults to 20%) + + - name: Add bytecode size diff to sticky comment + if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' + uses: marocchino/sticky-pull-request-comment@v2 + with: + header: brillig + # delete the comment in case changes no longer impact brillig bytecode sizes + delete: ${{ !steps.brillig_bytecode_diff.outputs.markdown }} + message: ${{ steps.brillig_bytecode_diff.outputs.markdown }} + + compare_brillig_execution_reports: + name: Brillig execution trace sizes + needs: [build-nargo] + runs-on: ubuntu-latest + permissions: + pull-requests: write + + steps: + - uses: actions/checkout@v4 + + - name: Download nargo binary + uses: actions/download-artifact@v4 + with: + name: nargo + path: ./nargo + + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + nargo -V + + - name: Generate Brillig execution report + working-directory: ./test_programs + run: | + ./gates_report_brillig_execution.sh + mv gates_report_brillig_execution.json ../gates_report_brillig_execution.json + + - name: Compare Brillig execution reports + id: brillig_execution_diff + uses: noir-lang/noir-gates-diff@d88f7523b013b9edd3f31c5cfddaef87a3fe1b48 + with: + report: gates_report_brillig_execution.json + header: | + # Changes to number of Brillig opcodes executed + brillig_report: true + summaryQuantile: 0.9 # only display the 10% most significant bytecode size diffs in the summary (defaults to 20%) + + - name: Add bytecode size diff to sticky comment + if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' + uses: marocchino/sticky-pull-request-comment@v2 + with: + header: brillig_execution + # delete the comment in case changes no longer impact brillig bytecode sizes + delete: ${{ !steps.brillig_execution_diff.outputs.markdown }} + message: ${{ steps.brillig_execution_diff.outputs.markdown }} + + generate_memory_report: + name: Peak memory usage + needs: [build-nargo] + runs-on: ubuntu-latest + permissions: + pull-requests: write + + steps: + - uses: actions/checkout@v4 + + - name: Download nargo binary + uses: actions/download-artifact@v4 + with: + name: nargo + path: ./nargo + + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + nargo -V + + - name: Generate Memory report + working-directory: ./test_programs + run: | + ./memory_report.sh + mv memory_report.json ../memory_report.json + + - name: Parse memory report + id: memory_report + uses: noir-lang/noir-bench-report@ccb0d806a91d3bd86dba0ba3d580a814eed5673c + with: + report: memory_report.json + header: | + # Memory Report + memory_report: true + + - name: Add memory report to sticky comment + if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' + uses: marocchino/sticky-pull-request-comment@v2 + with: + header: memory + message: ${{ steps.memory_report.outputs.markdown }} diff --git a/noir/noir-repo/.github/workflows/test-js-packages.yml b/noir/noir-repo/.github/workflows/test-js-packages.yml index 4a5d0b8179b..6a9a918b955 100644 --- a/noir/noir-repo/.github/workflows/test-js-packages.yml +++ b/noir/noir-repo/.github/workflows/test-js-packages.yml @@ -13,6 +13,19 @@ concurrency: cancel-in-progress: true jobs: + yarn-lock: + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + # Errors if installation would result in modifications to yarn.lock + - name: Install + run: yarn --immutable + shell: bash + build-nargo: runs-on: ubuntu-22.04 timeout-minutes: 30 @@ -78,7 +91,6 @@ jobs: ./tooling/noirc_abi_wasm/web retention-days: 10 - build-noir-wasm: runs-on: ubuntu-latest timeout-minutes: 30 @@ -519,12 +531,25 @@ jobs: fail-fast: false matrix: project: - # Disabled as these are currently failing with many visibility errors - - { repo: AztecProtocol/aztec-nr, path: ./ } + - { repo: noir-lang/ec, path: ./ } + - { repo: noir-lang/eddsa, path: ./ } + - { repo: noir-lang/mimc, path: ./ } + - { repo: noir-lang/noir_sort, path: ./ } + - { repo: noir-lang/noir-edwards, path: ./ } + - { repo: noir-lang/noir-bignum, path: ./ } + - { repo: noir-lang/noir_bigcurve, path: ./ } + - { repo: noir-lang/noir_base64, path: ./ } + - { repo: noir-lang/noir_string_search, path: ./ } + - { repo: noir-lang/sparse_array, path: ./ } + - { repo: noir-lang/noir_rsa, path: ./lib } + - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/aztec-nr } - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-contracts } - # Disabled as aztec-packages requires a setup-step in order to generate a `Nargo.toml` - #- { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits } - - { repo: noir-lang/noir-edwards, path: ./, ref: 3188ea74fe3b059219a2ea87899589c266256d74 } + - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/parity-lib } + - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/private-kernel-lib } + - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/reset-kernel-lib } + - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/rollup-lib } + - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/types } + name: Check external repo - ${{ matrix.project.repo }} steps: - name: Checkout @@ -554,9 +579,12 @@ jobs: # Github actions seems to not expand "**" in globs by default. shopt -s globstar sed -i '/^compiler_version/d' ./**/Nargo.toml - - name: Run nargo check + + - name: Run nargo test working-directory: ./test-repo/${{ matrix.project.path }} - run: nargo check + run: nargo test --silence-warnings + env: + NARGO_IGNORE_TEST_FAILURES_FROM_FOREIGN_CALLS: true # This is a job which depends on all test jobs and reports the overall status. # This allows us to add/remove test jobs without having to update the required workflows. @@ -566,6 +594,7 @@ jobs: # We want this job to always run (even if the dependant jobs fail) as we want this job to fail rather than skipping. if: ${{ always() }} needs: + - yarn-lock - test-acvm_js-node - test-acvm_js-browser - test-noirc-abi diff --git a/noir/noir-repo/Cargo.lock b/noir/noir-repo/Cargo.lock index 94a84b89d05..96ceb94fcdd 100644 --- a/noir/noir-repo/Cargo.lock +++ b/noir/noir-repo/Cargo.lock @@ -13,7 +13,7 @@ dependencies = [ "criterion", "flate2", "fxhash", - "pprof 0.13.0", + "pprof", "serde", "serde-big-array", "serde-generate", @@ -158,6 +158,15 @@ dependencies = [ "memchr", ] +[[package]] +name = "aligned-vec" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e0966165eaf052580bd70eb1b32cb3d6245774c0104d1b2793e9650bf83b52a" +dependencies = [ + "equator", +] + [[package]] name = "android-tzdata" version = "0.1.1" @@ -604,7 +613,7 @@ dependencies = [ "lazy_static", "noir_grumpkin", "num-bigint", - "pprof 0.12.1", + "pprof", ] [[package]] @@ -1417,6 +1426,26 @@ dependencies = [ "log", ] +[[package]] +name = "equator" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c35da53b5a021d2484a7cc49b2ac7f2d840f8236a286f84202369bd338d761ea" +dependencies = [ + "equator-macro", +] + +[[package]] +name = "equator-macro" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3bf679796c0322556351f287a51b49e48f7c4986e727b5dd78c972d30e2e16cc" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + [[package]] name = "equivalent" version = "1.0.1" @@ -2789,6 +2818,7 @@ dependencies = [ "dirs", "file-lock", "fm", + "fxhash", "iai", "iter-extended", "lazy_static", @@ -2806,7 +2836,7 @@ dependencies = [ "notify", "notify-debouncer-full", "paste", - "pprof 0.13.0", + "pprof", "predicates 2.1.5", "prettytable-rs", "proptest", @@ -3151,6 +3181,7 @@ dependencies = [ "serde_json", "serde_with", "similar-asserts", + "test-case", "thiserror", "tracing", ] @@ -3577,32 +3608,11 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "pprof" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "978385d59daf9269189d052ca8a84c1acfd0715c0599a5d5188d4acc078ca46a" -dependencies = [ - "backtrace", - "cfg-if 1.0.0", - "criterion", - "findshlibs", - "inferno", - "libc", - "log", - "nix 0.26.4", - "once_cell", - "parking_lot 0.12.3", - "smallvec", - "symbolic-demangle", - "tempfile", - "thiserror", -] - -[[package]] -name = "pprof" -version = "0.13.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef5c97c51bd34c7e742402e216abdeb44d415fbe6ae41d56b114723e953711cb" +checksum = "ebbe2f8898beba44815fdc9e5a4ae9c929e21c5dc29b0c774a15555f7f58d6d0" dependencies = [ + "aligned-vec", "backtrace", "cfg-if 1.0.0", "criterion", diff --git a/noir/noir-repo/Cargo.toml b/noir/noir-repo/Cargo.toml index 94ebe54fde1..4ce0ddd999f 100644 --- a/noir/noir-repo/Cargo.toml +++ b/noir/noir-repo/Cargo.toml @@ -126,7 +126,7 @@ codespan-reporting = "0.11.1" criterion = "0.5.0" # Note that using the "frame-pointer" feature breaks framegraphs on linux # https://github.com/tikv/pprof-rs/pull/172 -pprof = { version = "0.13", features = ["flamegraph", "criterion"] } +pprof = { version = "0.14", features = ["flamegraph", "criterion"] } cfg-if = "1.0.0" dirs = "4" diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/brillig.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/brillig.rs index a9714ce29b2..ef75d088f8c 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/brillig.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/brillig.rs @@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize}; /// Inputs for the Brillig VM. These are the initial inputs /// that the Brillig VM will use to start. -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug, Hash)] pub enum BrilligInputs { Single(Expression), Array(Vec>), @@ -14,7 +14,7 @@ pub enum BrilligInputs { /// Outputs for the Brillig VM. Once the VM has completed /// execution, this will be the object that is returned. -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug, Hash)] pub enum BrilligOutputs { Simple(Witness), Array(Vec), @@ -23,7 +23,7 @@ pub enum BrilligOutputs { /// This is purely a wrapper struct around a list of Brillig opcode's which represents /// a full Brillig function to be executed by the Brillig VM. /// This is stored separately on a program and accessed through a [BrilligPointer]. -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Default, Debug)] +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Default, Debug, Hash)] pub struct BrilligBytecode { pub bytecode: Vec>, } diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs index 6282a33af6b..4ff581bf17a 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs @@ -25,7 +25,7 @@ use self::{brillig::BrilligBytecode, opcodes::BlockId}; /// Bounded Expressions are useful if you are eventually going to pass the ACIR /// into a proving system which supports PLONK, where arithmetic expressions have a /// finite fan-in. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default, Hash)] pub enum ExpressionWidth { #[default] Unbounded, @@ -36,13 +36,13 @@ pub enum ExpressionWidth { /// A program represented by multiple ACIR circuits. The execution trace of these /// circuits is dictated by construction of the [crate::native_types::WitnessStack]. -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Default, Hash)] pub struct Program { pub functions: Vec>, pub unconstrained_functions: Vec>, } -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Default, Hash)] pub struct Circuit { // current_witness_index is the highest witness index in the circuit. The next witness to be added to this circuit // will take on this value. (The value is cached here as an optimization.) @@ -69,13 +69,13 @@ pub struct Circuit { pub assert_messages: Vec<(OpcodeLocation, AssertionPayload)>, } -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] pub enum ExpressionOrMemory { Expression(Expression), Memory(BlockId), } -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] pub struct AssertionPayload { pub error_selector: u64, pub payload: Vec>, @@ -355,7 +355,7 @@ impl std::fmt::Debug for Program { } } -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default)] +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default, Hash)] pub struct PublicInputs(pub BTreeSet); impl PublicInputs { diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs index 06effd3c5b6..f47c40b0dd7 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs @@ -15,7 +15,7 @@ pub use black_box_function_call::{ }; pub use memory_operation::{BlockId, MemOp}; -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] pub enum BlockType { Memory, CallData(u32), @@ -29,7 +29,7 @@ impl BlockType { } #[allow(clippy::large_enum_variant)] -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] pub enum Opcode { /// An `AssertZero` opcode adds the constraint that `P(w) = 0`, where /// `w=(w_1,..w_n)` is a tuple of `n` witnesses, and `P` is a multi-variate diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs index dfdf9616306..9cf31e94eb4 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs @@ -9,13 +9,13 @@ use thiserror::Error; // Note: Some functions will not use all of the witness // So we need to supply how many bits of the witness is needed -#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] pub enum ConstantOrWitnessEnum { Constant(F), Witness(Witness), } -#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] pub struct FunctionInput { input: ConstantOrWitnessEnum, num_bits: u32, @@ -79,7 +79,7 @@ impl std::fmt::Display for FunctionInput { } } -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] pub enum BlackBoxFuncCall { AES128Encrypt { inputs: Vec>, diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/memory_operation.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/memory_operation.rs index 90e3ee0563a..c9a78983204 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/memory_operation.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/memory_operation.rs @@ -7,7 +7,7 @@ pub struct BlockId(pub u32); /// Operation on a block of memory /// We can either write or read at an index in memory -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug, Hash)] pub struct MemOp { /// A constant expression that can be 0 (read) or 1 (write) pub operation: Expression, diff --git a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml index 8829692b9b4..825a0ef0481 100644 --- a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml +++ b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml @@ -30,7 +30,7 @@ num-bigint.workspace = true [dev-dependencies] ark-std.workspace = true criterion = "0.5.0" -pprof = { version = "0.12", features = [ +pprof = { version = "0.14", features = [ "flamegraph", "frame-pointer", "criterion", diff --git a/noir/noir-repo/acvm-repo/brillig/src/black_box.rs b/noir/noir-repo/acvm-repo/brillig/src/black_box.rs index 9cc5349e45b..f185b36e6c8 100644 --- a/noir/noir-repo/acvm-repo/brillig/src/black_box.rs +++ b/noir/noir-repo/acvm-repo/brillig/src/black_box.rs @@ -3,7 +3,7 @@ use serde::{Deserialize, Serialize}; /// These opcodes provide an equivalent of ACIR blackbox functions. /// They are implemented as native functions in the VM. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash)] pub enum BlackBoxOp { /// Encrypts a message using AES128. AES128Encrypt { diff --git a/noir/noir-repo/acvm-repo/brillig/src/opcodes.rs b/noir/noir-repo/acvm-repo/brillig/src/opcodes.rs index 8b72b5a9b41..1cb31ca3d0a 100644 --- a/noir/noir-repo/acvm-repo/brillig/src/opcodes.rs +++ b/noir/noir-repo/acvm-repo/brillig/src/opcodes.rs @@ -56,7 +56,7 @@ impl MemoryAddress { } /// Describes the memory layout for an array/vector element -#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize, Hash)] pub enum HeapValueType { // A single field element is enough to represent the value with a given bit size Simple(BitSize), @@ -81,7 +81,7 @@ impl HeapValueType { } /// A fixed-sized array starting from a Brillig memory location. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy, Hash)] pub struct HeapArray { pub pointer: MemoryAddress, pub size: usize, @@ -94,13 +94,13 @@ impl Default for HeapArray { } /// A memory-sized vector passed starting from a Brillig memory location and with a memory-held size -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy, Hash)] pub struct HeapVector { pub pointer: MemoryAddress, pub size: MemoryAddress, } -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy, PartialOrd, Ord)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy, PartialOrd, Ord, Hash)] pub enum IntegerBitSize { U1, U8, @@ -152,7 +152,7 @@ impl std::fmt::Display for IntegerBitSize { } } -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy, PartialOrd, Ord)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy, PartialOrd, Ord, Hash)] pub enum BitSize { Field, Integer(IntegerBitSize), @@ -181,7 +181,7 @@ impl BitSize { /// While we are usually agnostic to how memory is passed within Brillig, /// this needs to be encoded somehow when dealing with an external system. /// For simplicity, the extra type information is given right in the ForeignCall instructions. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy, Hash)] pub enum ValueOrArray { /// A single value passed to or from an external call /// It is an 'immediate' value - used without dereferencing. @@ -198,7 +198,7 @@ pub enum ValueOrArray { HeapVector(HeapVector), } -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] pub enum BrilligOpcode { /// Takes the fields in addresses `lhs` and `rhs` /// Performs the specified binary operation @@ -314,7 +314,7 @@ pub enum BrilligOpcode { } /// Binary fixed-length field expressions -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash)] pub enum BinaryFieldOp { Add, Sub, @@ -332,7 +332,7 @@ pub enum BinaryFieldOp { } /// Binary fixed-length integer expressions -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash)] pub enum BinaryIntOp { Add, Sub, diff --git a/noir/noir-repo/compiler/noirc_driver/src/debug.rs b/noir/noir-repo/compiler/noirc_driver/src/debug.rs index f5eaede89b2..6044e6c0e65 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/debug.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/debug.rs @@ -8,7 +8,7 @@ use std::{ /// For a given file, we store the source code and the path to the file /// so consumers of the debug artifact can reconstruct the original source code structure. -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, Serialize, Deserialize, Hash)] pub struct DebugFile { pub source: String, pub path: PathBuf, diff --git a/noir/noir-repo/compiler/noirc_driver/src/lib.rs b/noir/noir-repo/compiler/noirc_driver/src/lib.rs index 72ea464805f..5bedefaf563 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/lib.rs @@ -13,7 +13,7 @@ use noirc_abi::{AbiParameter, AbiType, AbiValue}; use noirc_errors::{CustomDiagnostic, FileDiagnostic}; use noirc_evaluator::create_program; use noirc_evaluator::errors::RuntimeError; -use noirc_evaluator::ssa::SsaProgramArtifact; +use noirc_evaluator::ssa::{SsaLogging, SsaProgramArtifact}; use noirc_frontend::debug::build_debug_crate_file; use noirc_frontend::hir::def_map::{Contract, CrateDefMap}; use noirc_frontend::hir::Context; @@ -70,6 +70,11 @@ pub struct CompileOptions { #[arg(long, hide = true)] pub show_ssa: bool, + /// Only show SSA passes whose name contains the provided string. + /// This setting takes precedence over `show_ssa` if it's not empty. + #[arg(long, hide = true)] + pub show_ssa_pass_name: Option, + /// Emit the unoptimized SSA IR to file. /// The IR will be dumped into the workspace target directory, /// under `[compiled-package].ssa.json`. @@ -126,11 +131,19 @@ pub struct CompileOptions { #[arg(long)] pub skip_underconstrained_check: bool, - /// Setting to decide on an inlining strategy for brillig functions. + /// Setting to decide on an inlining strategy for Brillig functions. /// A more aggressive inliner should generate larger programs but more optimized /// A less aggressive inliner should generate smaller programs #[arg(long, hide = true, allow_hyphen_values = true, default_value_t = i64::MAX)] pub inliner_aggressiveness: i64, + + /// Setting the maximum acceptable increase in Brillig bytecode size due to + /// unrolling small loops. When left empty, any change is accepted as long + /// as it required fewer SSA instructions. + /// A higher value results in fewer jumps but a larger program. + /// A lower value keeps the original program if it was smaller, even if it has more jumps. + #[arg(long, hide = true, allow_hyphen_values = true)] + pub max_bytecode_increase_percent: Option, } pub fn parse_expression_width(input: &str) -> Result { @@ -321,6 +334,8 @@ pub fn compute_function_abi( /// /// On success this returns the compiled program alongside any warnings that were found. /// On error this returns the non-empty list of warnings and errors. +/// +/// See [compile_no_check] for further information about the use of `cached_program`. pub fn compile_main( context: &mut Context, crate_id: CrateId, @@ -542,6 +557,15 @@ pub const DEFAULT_EXPRESSION_WIDTH: ExpressionWidth = ExpressionWidth::Bounded { /// Compile the current crate using `main_function` as the entrypoint. /// /// This function assumes [`check_crate`] is called beforehand. +/// +/// If the program is not returned from cache, it is backend-agnostic and must go through a transformation +/// pass before usage in proof generation; if it's returned from cache these transformations might have +/// already been applied. +/// +/// The transformations are _not_ covered by the check that decides whether we can use the cached artifact. +/// That comparison is based on on [CompiledProgram::hash] which is a persisted version of the hash of the input +/// [`ast::Program`][noirc_frontend::monomorphization::ast::Program], whereas the output [`circuit::Program`][acir::circuit::Program] +/// contains the final optimized ACIR opcodes, including the transformation done after this compilation. #[tracing::instrument(level = "trace", skip_all, fields(function_name = context.function_name(&main_function)))] pub fn compile_no_check( context: &mut Context, @@ -556,8 +580,6 @@ pub fn compile_no_check( monomorphize(main_function, &mut context.def_interner)? }; - let hash = fxhash::hash64(&program); - let hashes_match = cached_program.as_ref().map_or(false, |program| program.hash == hash); if options.show_monomorphized { println!("{program}"); } @@ -571,13 +593,28 @@ pub fn compile_no_check( || options.show_ssa || options.emit_ssa; - if !force_compile && hashes_match { - info!("Program matches existing artifact, returning early"); - return Ok(cached_program.expect("cache must exist for hashes to match")); + // Hash the AST program, which is going to be used to fingerprint the compilation artifact. + let hash = fxhash::hash64(&program); + + if let Some(cached_program) = cached_program { + if !force_compile && cached_program.hash == hash { + info!("Program matches existing artifact, returning early"); + return Ok(cached_program); + } } + let return_visibility = program.return_visibility; let ssa_evaluator_options = noirc_evaluator::ssa::SsaEvaluatorOptions { - enable_ssa_logging: options.show_ssa, + ssa_logging: match &options.show_ssa_pass_name { + Some(string) => SsaLogging::Contains(string.clone()), + None => { + if options.show_ssa { + SsaLogging::All + } else { + SsaLogging::None + } + } + }, enable_brillig_logging: options.show_brillig, force_brillig_output: options.force_brillig, print_codegen_timings: options.benchmark_codegen, @@ -589,6 +626,7 @@ pub fn compile_no_check( emit_ssa: if options.emit_ssa { Some(context.package_build_path.clone()) } else { None }, skip_underconstrained_check: options.skip_underconstrained_check, inliner_aggressiveness: options.inliner_aggressiveness, + max_bytecode_increase_percent: options.max_bytecode_increase_percent, }; let SsaProgramArtifact { program, debug, warnings, names, brillig_names, error_types, .. } = diff --git a/noir/noir-repo/compiler/noirc_driver/src/program.rs b/noir/noir-repo/compiler/noirc_driver/src/program.rs index 88460482928..4b4d6662e8e 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/program.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/program.rs @@ -9,7 +9,7 @@ use serde::{Deserialize, Serialize}; use super::debug::DebugFile; -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, Hash)] pub struct CompiledProgram { pub noir_version: String, /// Hash of the [`Program`][noirc_frontend::monomorphization::ast::Program] from which this [`CompiledProgram`] diff --git a/noir/noir-repo/compiler/noirc_errors/src/debug_info.rs b/noir/noir-repo/compiler/noirc_errors/src/debug_info.rs index 77028f739bd..a5e12b37712 100644 --- a/noir/noir-repo/compiler/noirc_errors/src/debug_info.rs +++ b/noir/noir-repo/compiler/noirc_errors/src/debug_info.rs @@ -94,7 +94,7 @@ impl ProgramDebugInfo { } #[serde_as] -#[derive(Default, Debug, Clone, Deserialize, Serialize)] +#[derive(Default, Debug, Clone, Deserialize, Serialize, Hash)] pub struct DebugInfo { /// Map opcode index of an ACIR circuit into the source code location /// Serde does not support mapping keys being enums for json, so we indicate diff --git a/noir/noir-repo/compiler/noirc_evaluator/Cargo.toml b/noir/noir-repo/compiler/noirc_evaluator/Cargo.toml index e25b5bf855a..bb8c62cfd95 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/Cargo.toml +++ b/noir/noir-repo/compiler/noirc_evaluator/Cargo.toml @@ -33,6 +33,7 @@ cfg-if.workspace = true proptest.workspace = true similar-asserts.workspace = true num-traits.workspace = true +test-case.workspace = true [features] bn254 = ["noirc_frontend/bn254"] diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/acir/acir_variable.rs b/noir/noir-repo/compiler/noirc_evaluator/src/acir/acir_variable.rs index a42426e6c04..9f2c649ee3e 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/acir/acir_variable.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/acir/acir_variable.rs @@ -92,7 +92,7 @@ impl<'a> From<&'a SsaType> for AcirType { SsaType::Numeric(numeric_type) => AcirType::NumericType(*numeric_type), SsaType::Array(elements, size) => { let elements = elements.iter().map(|e| e.into()).collect(); - AcirType::Array(elements, *size) + AcirType::Array(elements, *size as usize) } _ => unreachable!("The type {value} cannot be represented in ACIR"), } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/acir/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/acir/mod.rs index 69679495b92..76f0dea95bb 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/acir/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/acir/mod.rs @@ -571,7 +571,7 @@ impl<'a> Context<'a> { AcirValue::Array(_) => { let block_id = self.block_id(param_id); let len = if matches!(typ, Type::Array(_, _)) { - typ.flattened_size() + typ.flattened_size() as usize } else { return Err(InternalError::Unexpected { expected: "Block params should be an array".to_owned(), @@ -816,7 +816,9 @@ impl<'a> Context<'a> { let inputs = vecmap(arguments, |arg| self.convert_value(*arg, dfg)); let output_count = result_ids .iter() - .map(|result_id| dfg.type_of_value(*result_id).flattened_size()) + .map(|result_id| { + dfg.type_of_value(*result_id).flattened_size() as usize + }) .sum(); let Some(acir_function_id) = @@ -948,7 +950,7 @@ impl<'a> Context<'a> { let block_id = self.block_id(&array_id); let array_typ = dfg.type_of_value(array_id); let len = if matches!(array_typ, Type::Array(_, _)) { - array_typ.flattened_size() + array_typ.flattened_size() as usize } else { Self::flattened_value_size(&output) }; @@ -1444,7 +1446,7 @@ impl<'a> Context<'a> { // a separate SSA value and restrictions on slice indices should be generated elsewhere in the SSA. let array_typ = dfg.type_of_value(array); let array_len = if !array_typ.contains_slice_element() { - array_typ.flattened_size() + array_typ.flattened_size() as usize } else { self.flattened_slice_size(array, dfg) }; @@ -1539,7 +1541,7 @@ impl<'a> Context<'a> { let value = self.convert_value(array, dfg); let array_typ = dfg.type_of_value(array); let len = if !array_typ.contains_slice_element() { - array_typ.flattened_size() + array_typ.flattened_size() as usize } else { self.flattened_slice_size(array, dfg) }; @@ -1810,7 +1812,7 @@ impl<'a> Context<'a> { return_values .iter() - .fold(0, |acc, value_id| acc + dfg.type_of_value(*value_id).flattened_size()) + .fold(0, |acc, value_id| acc + dfg.type_of_value(*value_id).flattened_size() as usize) } /// Converts an SSA terminator's return values into their ACIR representations @@ -2156,7 +2158,7 @@ impl<'a> Context<'a> { let inputs = vecmap(&arguments_no_slice_len, |arg| self.convert_value(*arg, dfg)); let output_count = result_ids.iter().fold(0usize, |sum, result_id| { - sum + dfg.try_get_array_length(*result_id).unwrap_or(1) + sum + dfg.try_get_array_length(*result_id).unwrap_or(1) as usize }); let vars = self.acir_context.black_box_function(black_box, inputs, output_count)?; @@ -2180,7 +2182,7 @@ impl<'a> Context<'a> { endian, field, radix, - array_length as u32, + array_length, result_type[0].clone().into(), ) .map(|array| vec![array]) @@ -2194,12 +2196,7 @@ impl<'a> Context<'a> { }; self.acir_context - .bit_decompose( - endian, - field, - array_length as u32, - result_type[0].clone().into(), - ) + .bit_decompose(endian, field, array_length, result_type[0].clone().into()) .map(|array| vec![array]) } Intrinsic::ArrayLen => { @@ -2220,7 +2217,7 @@ impl<'a> Context<'a> { let acir_value = self.convert_value(slice_contents, dfg); let array_len = if !slice_typ.contains_slice_element() { - slice_typ.flattened_size() + slice_typ.flattened_size() as usize } else { self.flattened_slice_size(slice_contents, dfg) }; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 1fa4985295a..9c88c559b59 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -1823,7 +1823,7 @@ impl<'block> BrilligBlock<'block> { Type::Array(_, nested_size) => { let inner_array = BrilligArray { pointer: self.brillig_context.allocate_register(), - size: *nested_size, + size: *nested_size as usize, }; self.allocate_foreign_call_result_array(element_type, inner_array); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs index 393d4c967c2..bf0a1bc7347 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs @@ -142,7 +142,7 @@ pub(crate) fn allocate_value( } Type::Array(item_typ, elem_count) => BrilligVariable::BrilligArray(BrilligArray { pointer: brillig_context.allocate_register(), - size: compute_array_length(&item_typ, elem_count), + size: compute_array_length(&item_typ, elem_count as usize), }), Type::Slice(_) => BrilligVariable::BrilligVector(BrilligVector { pointer: brillig_context.allocate_register(), diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs index 2779be103cd..3dea7b3e7f5 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs @@ -59,7 +59,7 @@ impl FunctionContext { vecmap(item_type.iter(), |item_typ| { FunctionContext::ssa_type_to_parameter(item_typ) }), - *size, + *size as usize, ), Type::Slice(_) => { panic!("ICE: Slice parameters cannot be derived from type information") diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs index 81d61e05cc4..0bb18448670 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs @@ -88,7 +88,7 @@ pub(crate) fn type_to_heap_value_type(typ: &Type) -> HeapValueType { ), Type::Array(elem_type, size) => HeapValueType::Array { value_types: elem_type.as_ref().iter().map(type_to_heap_value_type).collect(), - size: typ.element_size() * size, + size: typ.element_size() * *size as usize, }, Type::Slice(elem_type) => HeapValueType::Vector { value_types: elem_type.as_ref().iter().map(type_to_heap_value_type).collect(), diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/procedures/array_copy.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/procedures/array_copy.rs index 67f7cf2dc34..0a6e8824223 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/procedures/array_copy.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/procedures/array_copy.rs @@ -69,6 +69,8 @@ pub(super) fn compile_array_copy_procedure( BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, 1_usize.into(), ); + // Decrease the original ref count now that this copy is no longer pointing to it + ctx.codegen_usize_op(rc.address, rc.address, BrilligBinaryOp::Sub, 1); } }); } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/mod.rs index 1b61ae1a864..cb8c35cd8e0 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/mod.rs @@ -12,7 +12,7 @@ use self::{ }, }; use crate::ssa::{ - ir::function::{Function, FunctionId, RuntimeType}, + ir::function::{Function, FunctionId}, ssa_gen::Ssa, }; use fxhash::FxHashMap as HashMap; @@ -59,7 +59,7 @@ impl std::ops::Index for Brillig { } impl Ssa { - /// Compile to brillig brillig functions and ACIR functions reachable from them + /// Compile Brillig functions and ACIR functions reachable from them #[tracing::instrument(level = "trace", skip_all)] pub(crate) fn to_brillig(&self, enable_debug_trace: bool) -> Brillig { // Collect all the function ids that are reachable from brillig @@ -67,9 +67,7 @@ impl Ssa { let brillig_reachable_function_ids = self .functions .iter() - .filter_map(|(id, func)| { - matches!(func.runtime(), RuntimeType::Brillig(_)).then_some(*id) - }) + .filter_map(|(id, func)| func.runtime().is_brillig().then_some(*id)) .collect::>(); let mut brillig = Brillig::default(); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs b/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs index 994e97eabb8..75a3ceb3a72 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs @@ -63,7 +63,7 @@ pub enum RuntimeError { UnknownReference { call_stack: CallStack }, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, Hash)] pub enum SsaReport { Warning(InternalWarning), Bug(InternalBug), @@ -107,7 +107,7 @@ impl From for FileDiagnostic { } } -#[derive(Debug, PartialEq, Eq, Clone, Error, Serialize, Deserialize)] +#[derive(Debug, PartialEq, Eq, Clone, Error, Serialize, Deserialize, Hash)] pub enum InternalWarning { #[error("Return variable contains a constant value")] ReturnConstant { call_stack: CallStack }, @@ -115,7 +115,7 @@ pub enum InternalWarning { VerifyProof { call_stack: CallStack }, } -#[derive(Debug, PartialEq, Eq, Clone, Error, Serialize, Deserialize)] +#[derive(Debug, PartialEq, Eq, Clone, Error, Serialize, Deserialize, Hash)] pub enum InternalBug { #[error("Input to brillig function is in a separate subgraph to output")] IndependentSubgraph { call_stack: CallStack }, diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs index 97c1760d87c..8f31023f790 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs @@ -44,9 +44,16 @@ mod opt; pub(crate) mod parser; pub mod ssa_gen; +#[derive(Debug, Clone)] +pub enum SsaLogging { + None, + All, + Contains(String), +} + pub struct SsaEvaluatorOptions { /// Emit debug information for the intermediate SSA IR - pub enable_ssa_logging: bool, + pub ssa_logging: SsaLogging, pub enable_brillig_logging: bool, @@ -67,6 +74,11 @@ pub struct SsaEvaluatorOptions { /// The higher the value, the more inlined brillig functions will be. pub inliner_aggressiveness: i64, + + /// Maximum accepted percentage increase in the Brillig bytecode size after unrolling loops. + /// When `None` the size increase check is skipped altogether and any decrease in the SSA + /// instruction count is accepted. + pub max_bytecode_increase_percent: Option, } pub(crate) struct ArtifactsAndWarnings(Artifacts, Vec); @@ -85,46 +97,49 @@ pub(crate) fn optimize_into_acir( let mut ssa = SsaBuilder::new( program, - options.enable_ssa_logging, + options.ssa_logging.clone(), options.force_brillig_output, options.print_codegen_timings, &options.emit_ssa, )? - .run_pass(Ssa::defunctionalize, "After Defunctionalization:") - .run_pass(Ssa::remove_paired_rc, "After Removing Paired rc_inc & rc_decs:") - .run_pass(Ssa::separate_runtime, "After Runtime Separation:") - .run_pass(Ssa::resolve_is_unconstrained, "After Resolving IsUnconstrained:") - .run_pass(|ssa| ssa.inline_functions(options.inliner_aggressiveness), "After Inlining (1st):") + .run_pass(Ssa::defunctionalize, "Defunctionalization") + .run_pass(Ssa::remove_paired_rc, "Removing Paired rc_inc & rc_decs") + .run_pass(Ssa::separate_runtime, "Runtime Separation") + .run_pass(Ssa::resolve_is_unconstrained, "Resolving IsUnconstrained") + .run_pass(|ssa| ssa.inline_functions(options.inliner_aggressiveness), "Inlining (1st)") // Run mem2reg with the CFG separated into blocks - .run_pass(Ssa::mem2reg, "After Mem2Reg (1st):") - .run_pass(Ssa::simplify_cfg, "After Simplifying (1st):") - .run_pass(Ssa::as_slice_optimization, "After `as_slice` optimization") + .run_pass(Ssa::mem2reg, "Mem2Reg (1st)") + .run_pass(Ssa::simplify_cfg, "Simplifying (1st)") + .run_pass(Ssa::as_slice_optimization, "`as_slice` optimization") .try_run_pass( Ssa::evaluate_static_assert_and_assert_constant, - "After `static_assert` and `assert_constant`:", + "`static_assert` and `assert_constant`", + )? + .run_pass(Ssa::loop_invariant_code_motion, "Loop Invariant Code Motion") + .try_run_pass( + |ssa| ssa.unroll_loops_iteratively(options.max_bytecode_increase_percent), + "Unrolling", )? - .run_pass(Ssa::loop_invariant_code_motion, "After Loop Invariant Code Motion:") - .try_run_pass(Ssa::unroll_loops_iteratively, "After Unrolling:")? - .run_pass(Ssa::simplify_cfg, "After Simplifying (2nd):") - .run_pass(Ssa::flatten_cfg, "After Flattening:") - .run_pass(Ssa::remove_bit_shifts, "After Removing Bit Shifts:") + .run_pass(Ssa::simplify_cfg, "Simplifying (2nd)") + .run_pass(Ssa::flatten_cfg, "Flattening") + .run_pass(Ssa::remove_bit_shifts, "After Removing Bit Shifts") // Run mem2reg once more with the flattened CFG to catch any remaining loads/stores - .run_pass(Ssa::mem2reg, "After Mem2Reg (2nd):") + .run_pass(Ssa::mem2reg, "Mem2Reg (2nd)") // Run the inlining pass again to handle functions with `InlineType::NoPredicates`. // Before flattening is run, we treat functions marked with the `InlineType::NoPredicates` as an entry point. // This pass must come immediately following `mem2reg` as the succeeding passes // may create an SSA which inlining fails to handle. .run_pass( |ssa| ssa.inline_functions_with_no_predicates(options.inliner_aggressiveness), - "After Inlining (2nd):", + "Inlining (2nd)", ) - .run_pass(Ssa::remove_if_else, "After Remove IfElse:") - .run_pass(Ssa::fold_constants, "After Constant Folding:") - .run_pass(Ssa::remove_enable_side_effects, "After EnableSideEffectsIf removal:") - .run_pass(Ssa::fold_constants_using_constraints, "After Constraint Folding:") - .run_pass(Ssa::dead_instruction_elimination, "After Dead Instruction Elimination:") - .run_pass(Ssa::simplify_cfg, "After Simplifying:") - .run_pass(Ssa::array_set_optimization, "After Array Set Optimizations:") + .run_pass(Ssa::remove_if_else, "Remove IfElse") + .run_pass(Ssa::fold_constants, "Constant Folding") + .run_pass(Ssa::remove_enable_side_effects, "EnableSideEffectsIf removal") + .run_pass(Ssa::fold_constants_using_constraints, "Constraint Folding") + .run_pass(Ssa::dead_instruction_elimination, "Dead Instruction Elimination (1st)") + .run_pass(Ssa::simplify_cfg, "Simplifying:") + .run_pass(Ssa::array_set_optimization, "Array Set Optimizations") .finish(); let ssa_level_warnings = if options.skip_underconstrained_check { @@ -146,14 +161,11 @@ pub(crate) fn optimize_into_acir( let ssa = SsaBuilder { ssa, - print_ssa_passes: options.enable_ssa_logging, + ssa_logging: options.ssa_logging.clone(), print_codegen_timings: options.print_codegen_timings, } - .run_pass( - |ssa| ssa.fold_constants_with_brillig(&brillig), - "After Constant Folding with Brillig:", - ) - .run_pass(Ssa::dead_instruction_elimination, "After Dead Instruction Elimination:") + .run_pass(|ssa| ssa.fold_constants_with_brillig(&brillig), "Inlining Brillig Calls Inlining") + .run_pass(Ssa::dead_instruction_elimination, "Dead Instruction Elimination (2nd)") .finish(); drop(ssa_gen_span_guard); @@ -226,7 +238,7 @@ impl SsaProgramArtifact { } } -/// Compiles the [`Program`] into [`ACIR``][acvm::acir::circuit::Program]. +/// Compiles the [`Program`] into [`ACIR`][acvm::acir::circuit::Program]. /// /// The output ACIR is backend-agnostic and so must go through a transformation pass before usage in proof generation. #[tracing::instrument(level = "trace", skip_all)] @@ -411,14 +423,14 @@ fn split_public_and_private_inputs( // This is just a convenience object to bundle the ssa with `print_ssa_passes` for debug printing. struct SsaBuilder { ssa: Ssa, - print_ssa_passes: bool, + ssa_logging: SsaLogging, print_codegen_timings: bool, } impl SsaBuilder { fn new( program: Program, - print_ssa_passes: bool, + ssa_logging: SsaLogging, force_brillig_runtime: bool, print_codegen_timings: bool, emit_ssa: &Option, @@ -433,7 +445,7 @@ impl SsaBuilder { let ssa_path = emit_ssa.with_extension("ssa.json"); write_to_file(&serde_json::to_vec(&ssa).unwrap(), &ssa_path); } - Ok(SsaBuilder { print_ssa_passes, print_codegen_timings, ssa }.print("Initial SSA:")) + Ok(SsaBuilder { ssa_logging, print_codegen_timings, ssa }.print("Initial SSA:")) } fn finish(self) -> Ssa { @@ -450,19 +462,28 @@ impl SsaBuilder { } /// The same as `run_pass` but for passes that may fail - fn try_run_pass( - mut self, - pass: fn(Ssa) -> Result, - msg: &str, - ) -> Result { + fn try_run_pass(mut self, pass: F, msg: &str) -> Result + where + F: FnOnce(Ssa) -> Result, + { self.ssa = time(msg, self.print_codegen_timings, || pass(self.ssa))?; Ok(self.print(msg)) } fn print(mut self, msg: &str) -> Self { - if self.print_ssa_passes { + let print_ssa_pass = match &self.ssa_logging { + SsaLogging::None => false, + SsaLogging::All => true, + SsaLogging::Contains(string) => { + let string = string.to_lowercase(); + let string = string.strip_prefix("after ").unwrap_or(&string); + let string = string.strip_suffix(':').unwrap_or(string); + msg.to_lowercase().contains(string) + } + }; + if print_ssa_pass { self.ssa.normalize_ids(); - println!("{msg}\n{}", self.ssa); + println!("After {msg}:\n{}", self.ssa); } self } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs index e4a2eeb8c22..bd2585a3bfa 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs @@ -160,7 +160,7 @@ impl FunctionBuilder { for value in values { self.add_to_data_bus(*value, &mut databus); } - let len = databus.values.len(); + let len = databus.values.len() as u32; let array = (len > 0 && matches!(self.current_function.runtime(), RuntimeType::Acir(_))) .then(|| { @@ -223,9 +223,11 @@ impl FunctionBuilder { ssa_params: &[ValueId], mut flattened_params_databus_visibility: Vec, ) -> Vec { - let ssa_param_sizes: Vec<_> = ssa_params + let ssa_param_sizes: Vec = ssa_params .iter() - .map(|ssa_param| self.current_function.dfg[*ssa_param].get_type().flattened_size()) + .map(|ssa_param| { + self.current_function.dfg[*ssa_param].get_type().flattened_size() as usize + }) .collect(); let mut is_ssa_params_databus = Vec::with_capacity(ssa_params.len()); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index 0479f8da0b7..0ae61404442 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -441,29 +441,38 @@ impl FunctionBuilder { /// Insert instructions to increment the reference count of any array(s) stored /// within the given value. If the given value is not an array and does not contain /// any arrays, this does nothing. - pub(crate) fn increment_array_reference_count(&mut self, value: ValueId) { - self.update_array_reference_count(value, true); + /// + /// Returns whether a reference count instruction was issued. + pub(crate) fn increment_array_reference_count(&mut self, value: ValueId) -> bool { + self.update_array_reference_count(value, true) } /// Insert instructions to decrement the reference count of any array(s) stored /// within the given value. If the given value is not an array and does not contain /// any arrays, this does nothing. - pub(crate) fn decrement_array_reference_count(&mut self, value: ValueId) { - self.update_array_reference_count(value, false); + /// + /// Returns whether a reference count instruction was issued. + pub(crate) fn decrement_array_reference_count(&mut self, value: ValueId) -> bool { + self.update_array_reference_count(value, false) } /// Increment or decrement the given value's reference count if it is an array. /// If it is not an array, this does nothing. Note that inc_rc and dec_rc instructions /// are ignored outside of unconstrained code. - fn update_array_reference_count(&mut self, value: ValueId, increment: bool) { + /// + /// Returns whether a reference count instruction was issued. + fn update_array_reference_count(&mut self, value: ValueId, increment: bool) -> bool { match self.type_of_value(value) { - Type::Numeric(_) => (), - Type::Function => (), + Type::Numeric(_) => false, + Type::Function => false, Type::Reference(element) => { if element.contains_an_array() { let reference = value; let value = self.insert_load(reference, element.as_ref().clone()); self.update_array_reference_count(value, increment); + true + } else { + false } } Type::Array(..) | Type::Slice(..) => { @@ -474,6 +483,7 @@ impl FunctionBuilder { } else { self.insert_dec_rc(value); } + true } } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index e3f3f33682b..827944e22d1 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -307,13 +307,13 @@ impl DataFlowGraph { instruction_id: InstructionId, ctrl_typevars: Option>, ) { - self.results.insert(instruction_id, Default::default()); + let result_types = self.instruction_result_types(instruction_id, ctrl_typevars); + let results = vecmap(result_types.into_iter().enumerate(), |(position, typ)| { + let instruction = instruction_id; + self.values.insert(Value::Instruction { typ, position, instruction }) + }); - // Get all of the types that this instruction produces - // and append them as results. - for typ in self.instruction_result_types(instruction_id, ctrl_typevars) { - self.append_result(instruction_id, typ); - } + self.results.insert(instruction_id, results); } /// Return the result types of this instruction. @@ -370,22 +370,6 @@ impl DataFlowGraph { matches!(self.values[value].get_type(), Type::Reference(_)) } - /// Appends a result type to the instruction. - pub(crate) fn append_result(&mut self, instruction_id: InstructionId, typ: Type) -> ValueId { - let results = self.results.get_mut(&instruction_id).unwrap(); - let expected_res_position = results.len(); - - let value_id = self.values.insert(Value::Instruction { - typ, - position: expected_res_position, - instruction: instruction_id, - }); - - // Add value to the list of results for this instruction - results.push(value_id); - value_id - } - /// Replaces an instruction result with a fresh id. pub(crate) fn replace_result( &mut self, @@ -463,7 +447,7 @@ impl DataFlowGraph { /// If this value is an array, return the length of the array as indicated by its type. /// Otherwise, return None. - pub(crate) fn try_get_array_length(&self, value: ValueId) -> Option { + pub(crate) fn try_get_array_length(&self, value: ValueId) -> Option { match self.type_of_value(value) { Type::Array(_, length) => Some(length), _ => None, diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function.rs index b1233e3063e..6413107c04a 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function.rs @@ -197,6 +197,12 @@ impl Function { } } +impl Clone for Function { + fn clone(&self) -> Self { + Function::clone_with_id(self.id(), self) + } +} + impl std::fmt::Display for RuntimeType { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs index a0c23ad70aa..6ebd2aa1105 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs @@ -129,7 +129,7 @@ impl<'f> FunctionInserter<'f> { // another MakeArray instruction. Note that this assumes the function inserter is inserting // in control-flow order. Otherwise we could refer to ValueIds defined later in the program. let make_array = if let Instruction::MakeArray { elements, typ } = &instruction { - if self.array_is_constant(elements) { + if self.array_is_constant(elements) && self.function.runtime().is_acir() { if let Some(fetched_value) = self.get_cached_array(elements, typ) { assert_eq!(results.len(), 1); self.values.insert(results[0], fetched_value); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index f606fffbf91..76409f6a20a 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -11,7 +11,7 @@ use fxhash::FxHasher64; use iter_extended::vecmap; use noirc_frontend::hir_def::types::Type as HirType; -use crate::ssa::opt::flatten_cfg::value_merger::ValueMerger; +use crate::ssa::{ir::function::RuntimeType, opt::flatten_cfg::value_merger::ValueMerger}; use super::{ basic_block::BasicBlockId, @@ -315,7 +315,12 @@ pub(crate) enum Instruction { /// else_value /// } /// ``` - IfElse { then_condition: ValueId, then_value: ValueId, else_value: ValueId }, + IfElse { + then_condition: ValueId, + then_value: ValueId, + else_condition: ValueId, + else_value: ValueId, + }, /// Creates a new array or slice. /// @@ -389,9 +394,22 @@ impl Instruction { // This should never be side-effectful MakeArray { .. } => false, + // Some binary math can overflow or underflow + Binary(binary) => match binary.operator { + BinaryOp::Add | BinaryOp::Sub | BinaryOp::Mul | BinaryOp::Div | BinaryOp::Mod => { + true + } + BinaryOp::Eq + | BinaryOp::Lt + | BinaryOp::And + | BinaryOp::Or + | BinaryOp::Xor + | BinaryOp::Shl + | BinaryOp::Shr => false, + }, + // These can have different behavior depending on the EnableSideEffectsIf context. - Binary(_) - | Cast(_, _) + Cast(_, _) | Not(_) | Truncate { .. } | IfElse { .. } @@ -411,7 +429,7 @@ impl Instruction { /// conditional on whether the caller wants the predicate to be taken into account or not. pub(crate) fn can_be_deduplicated( &self, - dfg: &DataFlowGraph, + function: &Function, deduplicate_with_predicate: bool, ) -> bool { use Instruction::*; @@ -425,7 +443,7 @@ impl Instruction { | IncrementRc { .. } | DecrementRc { .. } => false, - Call { func, .. } => match dfg[*func] { + Call { func, .. } => match function.dfg[*func] { Value::Intrinsic(intrinsic) => { intrinsic.can_be_deduplicated(deduplicate_with_predicate) } @@ -435,8 +453,11 @@ impl Instruction { // We can deduplicate these instructions if we know the predicate is also the same. Constrain(..) | RangeCheck { .. } => deduplicate_with_predicate, - // This should never be side-effectful - MakeArray { .. } => true, + // Arrays can be mutated in unconstrained code so code that handles this case must + // take care to track whether the array was possibly mutated or not before + // deduplicating. Since we don't know if the containing pass checks for this, we + // can only assume these are safe to deduplicate in constrained code. + MakeArray { .. } => function.runtime().is_acir(), // These can have different behavior depending on the EnableSideEffectsIf context. // Replacing them with a similar instruction potentially enables replacing an instruction @@ -449,7 +470,7 @@ impl Instruction { | IfElse { .. } | ArrayGet { .. } | ArraySet { .. } => { - deduplicate_with_predicate || !self.requires_acir_gen_predicate(dfg) + deduplicate_with_predicate || !self.requires_acir_gen_predicate(&function.dfg) } } } @@ -478,8 +499,19 @@ impl Instruction { | ArraySet { .. } | MakeArray { .. } => true, + // Store instructions must be removed by DIE in acir code, any load + // instructions should already be unused by that point. + // + // Note that this check assumes that it is being performed after the flattening + // pass and after the last mem2reg pass. This is currently the case for the DIE + // pass where this check is done, but does mean that we cannot perform mem2reg + // after the DIE pass. + Store { .. } => { + matches!(function.runtime(), RuntimeType::Acir(_)) + && function.reachable_blocks().len() == 1 + } + Constrain(..) - | Store { .. } | EnableSideEffectsIf { .. } | IncrementRc { .. } | DecrementRc { .. } @@ -608,11 +640,14 @@ impl Instruction { assert_message: assert_message.clone(), } } - Instruction::IfElse { then_condition, then_value, else_value } => Instruction::IfElse { - then_condition: f(*then_condition), - then_value: f(*then_value), - else_value: f(*else_value), - }, + Instruction::IfElse { then_condition, then_value, else_condition, else_value } => { + Instruction::IfElse { + then_condition: f(*then_condition), + then_value: f(*then_value), + else_condition: f(*else_condition), + else_value: f(*else_value), + } + } Instruction::MakeArray { elements, typ } => Instruction::MakeArray { elements: elements.iter().copied().map(f).collect(), typ: typ.clone(), @@ -671,9 +706,10 @@ impl Instruction { | Instruction::RangeCheck { value, .. } => { f(*value); } - Instruction::IfElse { then_condition, then_value, else_value } => { + Instruction::IfElse { then_condition, then_value, else_condition, else_value } => { f(*then_condition); f(*then_value); + f(*else_condition); f(*else_value); } Instruction::MakeArray { elements, typ: _ } => { @@ -836,7 +872,7 @@ impl Instruction { None } } - Instruction::IfElse { then_condition, then_value, else_value } => { + Instruction::IfElse { then_condition, then_value, else_condition, else_value } => { let typ = dfg.type_of_value(*then_value); if let Some(constant) = dfg.get_numeric_constant(*then_condition) { @@ -855,11 +891,13 @@ impl Instruction { if matches!(&typ, Type::Numeric(_)) { let then_condition = *then_condition; + let else_condition = *else_condition; let result = ValueMerger::merge_numeric_values( dfg, block, then_condition, + else_condition, then_value, else_value, ); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 6ebe80128c0..a8db5e2ff94 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -56,11 +56,13 @@ pub(super) fn simplify_call( if let (Some(constant_args), Some(return_type)) = (constant_args, return_type.clone()) { let field = constant_args[0]; let limb_count = if let Type::Array(_, array_len) = return_type { - array_len as u32 + array_len } else { unreachable!("ICE: Intrinsic::ToRadix return type must be array") }; - constant_to_radix(endian, field, 2, limb_count, dfg, block, call_stack) + constant_to_radix(endian, field, 2, limb_count, |values| { + make_constant_array(dfg, values.into_iter(), Type::bool(), block, call_stack) + }) } else { SimplifyResult::None } @@ -71,11 +73,19 @@ pub(super) fn simplify_call( let field = constant_args[0]; let radix = constant_args[1].to_u128() as u32; let limb_count = if let Type::Array(_, array_len) = return_type { - array_len as u32 + array_len } else { unreachable!("ICE: Intrinsic::ToRadix return type must be array") }; - constant_to_radix(endian, field, radix, limb_count, dfg, block, call_stack) + constant_to_radix(endian, field, radix, limb_count, |values| { + make_constant_array( + dfg, + values.into_iter(), + Type::unsigned(8), + block, + call_stack, + ) + }) } else { SimplifyResult::None } @@ -351,7 +361,7 @@ pub(super) fn simplify_call( Intrinsic::IsUnconstrained => SimplifyResult::None, Intrinsic::DerivePedersenGenerators => { if let Some(Type::Array(_, len)) = return_type.clone() { - simplify_derive_generators(dfg, arguments, len as u32, block, call_stack) + simplify_derive_generators(dfg, arguments, len, block, call_stack) } else { unreachable!("Derive Pedersen Generators must return an array"); } @@ -432,8 +442,8 @@ fn simplify_slice_push_back( for elem in &arguments[2..] { slice.push_back(*elem); } - let slice_size = slice.len(); - let element_size = element_type.element_size(); + let slice_size = slice.len() as u32; + let element_size = element_type.element_size() as u32; let new_slice = make_array(dfg, slice, element_type, block, &call_stack); let set_last_slice_value_instr = Instruction::ArraySet { @@ -455,8 +465,12 @@ fn simplify_slice_push_back( let mut value_merger = ValueMerger::new(dfg, block, &mut slice_sizes, unknown, None, call_stack); - let new_slice = - value_merger.merge_values(len_not_equals_capacity, set_last_slice_value, new_slice); + let new_slice = value_merger.merge_values( + len_not_equals_capacity, + len_equals_capacity, + set_last_slice_value, + new_slice, + ); SimplifyResult::SimplifiedToMultiple(vec![new_slice_length, new_slice]) } @@ -621,7 +635,7 @@ fn make_constant_array( let result_constants: im::Vector<_> = results.map(|element| dfg.make_constant(element, typ.clone())).collect(); - let typ = Type::Array(Arc::new(vec![typ]), result_constants.len()); + let typ = Type::Array(Arc::new(vec![typ]), result_constants.len() as u32); make_array(dfg, result_constants, typ, block, call_stack) } @@ -660,9 +674,7 @@ fn constant_to_radix( field: FieldElement, radix: u32, limb_count: u32, - dfg: &mut DataFlowGraph, - block: BasicBlockId, - call_stack: &CallStack, + mut make_array: impl FnMut(Vec) -> ValueId, ) -> SimplifyResult { let bit_size = u32::BITS - (radix - 1).leading_zeros(); let radix_big = BigUint::from(radix); @@ -683,13 +695,7 @@ fn constant_to_radix( if endian == Endian::Big { limbs.reverse(); } - let result_array = make_constant_array( - dfg, - limbs.into_iter(), - Type::unsigned(bit_size), - block, - call_stack, - ); + let result_array = make_array(limbs); SimplifyResult::SimplifiedTo(result_array) } } @@ -816,7 +822,7 @@ fn simplify_derive_generators( results.push(dfg.make_constant(y, Type::field())); results.push(is_infinite); } - let len = results.len(); + let len = results.len() as u32; let typ = Type::Array(vec![Type::field(), Type::field(), Type::unsigned(1)].into(), len / 3); let result = make_array(dfg, results.into(), typ, block, call_stack); @@ -835,27 +841,27 @@ mod tests { #[test] fn simplify_derive_generators_has_correct_type() { - let src = " + let src = r#" brillig(inline) fn main f0 { b0(): - v0 = make_array [u8 68, u8 69, u8 70, u8 65, u8 85, u8 76, u8 84, u8 95, u8 68, u8 79, u8 77, u8 65, u8 73, u8 78, u8 95, u8 83, u8 69, u8 80, u8 65, u8 82, u8 65, u8 84, u8 79, u8 82] : [u8; 24] + v0 = make_array b"DEFAULT_DOMAIN_SEPARATOR" // This call was previously incorrectly simplified to something that returned `[Field; 3]` v2 = call derive_pedersen_generators(v0, u32 0) -> [(Field, Field, u1); 1] return v2 } - "; + "#; let ssa = Ssa::from_str(src).unwrap(); - let expected = " + let expected = r#" brillig(inline) fn main f0 { b0(): - v15 = make_array [u8 68, u8 69, u8 70, u8 65, u8 85, u8 76, u8 84, u8 95, u8 68, u8 79, u8 77, u8 65, u8 73, u8 78, u8 95, u8 83, u8 69, u8 80, u8 65, u8 82, u8 65, u8 84, u8 79, u8 82] : [u8; 24] + v15 = make_array b"DEFAULT_DOMAIN_SEPARATOR" v19 = make_array [Field 3728882899078719075161482178784387565366481897740339799480980287259621149274, Field -9903063709032878667290627648209915537972247634463802596148419711785767431332, u1 0] : [(Field, Field, u1); 1] return v19 } - "; + "#; assert_normalized_ssa_equals(ssa, expected); } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs index 6bebd21fe61..aa2952d5abc 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs @@ -5,8 +5,11 @@ use std::{ }; use acvm::acir::AcirField; +use im::Vector; use iter_extended::vecmap; +use crate::ssa::ir::types::{NumericType, Type}; + use super::{ basic_block::BasicBlockId, dfg::DataFlowGraph, @@ -209,13 +212,39 @@ fn display_instruction_inner( Instruction::RangeCheck { value, max_bit_size, .. } => { writeln!(f, "range_check {} to {} bits", show(*value), *max_bit_size,) } - Instruction::IfElse { then_condition, then_value, else_value } => { + Instruction::IfElse { then_condition, then_value, else_condition, else_value } => { let then_condition = show(*then_condition); let then_value = show(*then_value); + let else_condition = show(*else_condition); let else_value = show(*else_value); - writeln!(f, "if {then_condition} then {then_value} else {else_value}") + writeln!( + f, + "if {then_condition} then {then_value} else (if {else_condition}) {else_value}" + ) } Instruction::MakeArray { elements, typ } => { + // If the array is a byte array, we check if all the bytes are printable ascii characters + // and, if so, we print the array as a string literal (easier to understand). + // It could happen that the byte array is a random byte sequence that happens to be printable + // (it didn't come from a string literal) but this still reduces the noise in the output + // and actually represents the same value. + let (element_types, is_slice) = match typ { + Type::Array(types, _) => (types, false), + Type::Slice(types) => (types, true), + _ => panic!("Expected array or slice type for MakeArray"), + }; + if element_types.len() == 1 + && element_types[0] == Type::Numeric(NumericType::Unsigned { bit_size: 8 }) + { + if let Some(string) = try_byte_array_to_string(elements, function) { + if is_slice { + return writeln!(f, "make_array &b{:?}", string); + } else { + return writeln!(f, "make_array b{:?}", string); + } + } + } + write!(f, "make_array [")?; for (i, element) in elements.iter().enumerate() { @@ -230,6 +259,25 @@ fn display_instruction_inner( } } +fn try_byte_array_to_string(elements: &Vector, function: &Function) -> Option { + let mut string = String::new(); + for element in elements { + let element = function.dfg.get_numeric_constant(*element)?; + let element = element.try_to_u32()?; + if element > 0xFF { + return None; + } + let byte = element as u8; + if byte.is_ascii_alphanumeric() || byte.is_ascii_punctuation() || byte.is_ascii_whitespace() + { + string.push(byte as char); + } else { + return None; + } + } + Some(string) +} + fn result_types(function: &Function, results: &[ValueId]) -> String { let types = vecmap(results, |result| function.dfg.type_of_value(*result).to_string()); if types.is_empty() { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/types.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/types.rs index 130f1d59e46..4e4f7e8aa62 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/types.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/types.rs @@ -75,7 +75,7 @@ pub(crate) enum Type { Reference(Arc), /// An immutable array value with the given element type and length - Array(Arc, usize), + Array(Arc, u32), /// An immutable slice value with a given element type Slice(Arc), @@ -111,7 +111,7 @@ impl Type { } /// Creates the str type, of the given length N - pub(crate) fn str(length: usize) -> Type { + pub(crate) fn str(length: u32) -> Type { Type::Array(Arc::new(vec![Type::char()]), length) } @@ -161,7 +161,7 @@ impl Type { } /// Returns the flattened size of a Type - pub(crate) fn flattened_size(&self) -> usize { + pub(crate) fn flattened_size(&self) -> u32 { match self { Type::Array(elements, len) => { elements.iter().fold(0, |sum, elem| sum + (elem.flattened_size() * len)) @@ -190,6 +190,15 @@ impl Type { } } + /// Retrieves the array or slice type within this type, or panics if there is none. + pub(crate) fn get_contained_array(&self) -> &Type { + match self { + Type::Numeric(_) | Type::Function => panic!("Expected an array type"), + Type::Array(_, _) | Type::Slice(_) => self, + Type::Reference(element) => element.get_contained_array(), + } + } + pub(crate) fn element_types(self) -> Arc> { match self { Type::Array(element_types, _) | Type::Slice(element_types) => element_types, diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs index 76705dcc9db..75cdea349b7 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs @@ -33,7 +33,7 @@ impl Function { } } -fn known_slice_lengths(func: &Function) -> HashMap { +fn known_slice_lengths(func: &Function) -> HashMap { let mut known_slice_lengths = HashMap::default(); for block_id in func.reachable_blocks() { let block = &func.dfg[block_id]; @@ -61,7 +61,7 @@ fn known_slice_lengths(func: &Function) -> HashMap { fn replace_known_slice_lengths( func: &mut Function, - known_slice_lengths: HashMap, + known_slice_lengths: HashMap, ) { known_slice_lengths.into_iter().for_each(|(instruction_id, known_length)| { let call_returns = func.dfg.instruction_results(instruction_id); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs index ceda0c6272f..93ca428c6d0 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -149,7 +149,8 @@ impl Function { use_constraint_info: bool, brillig_info: Option, ) { - let mut context = Context::new(self, use_constraint_info, brillig_info); + let mut context = Context::new(use_constraint_info, brillig_info); + let mut dom = DominatorTree::with_function(self); context.block_queue.push_back(self.entry_block()); while let Some(block) = context.block_queue.pop_front() { @@ -158,7 +159,7 @@ impl Function { } context.visited_blocks.insert(block); - context.fold_constants_in_block(self, block); + context.fold_constants_in_block(self, &mut dom, block); } } } @@ -172,22 +173,15 @@ struct Context<'a> { /// Contains sets of values which are constrained to be equivalent to each other. /// - /// The mapping's structure is `side_effects_enabled_var => (constrained_value => [(block, simplified_value)])`. + /// The mapping's structure is `side_effects_enabled_var => (constrained_value => simplified_value)`. /// /// We partition the maps of constrained values according to the side-effects flag at the point /// at which the values are constrained. This prevents constraints which are only sometimes enforced /// being used to modify the rest of the program. - /// - /// We also keep track of how a value was simplified to other values per block. That is, - /// a same ValueId could have been simplified to one value in one block and to another value - /// in another block. - constraint_simplification_mappings: - HashMap>>, + constraint_simplification_mappings: ConstraintSimplificationCache, // Cache of instructions without any side-effects along with their outputs. cached_instruction_results: InstructionResultCache, - - dom: DominatorTree, } #[derive(Copy, Clone)] @@ -196,9 +190,56 @@ pub(crate) struct BrilligInfo<'a> { brillig_functions: &'a BTreeMap, } +/// Records a simplified equivalents of an [`Instruction`] in the blocks +/// where the constraint that advised the simplification has been encountered. +/// +/// For more information see [`ConstraintSimplificationCache`]. +#[derive(Default)] +struct SimplificationCache { + /// Simplified expressions where we found them. + /// + /// It will always have at least one value because `add` is called + /// after the default is constructed. + simplifications: HashMap, +} + +impl SimplificationCache { + /// Called with a newly encountered simplification. + fn add(&mut self, dfg: &DataFlowGraph, simple: ValueId, block: BasicBlockId) { + self.simplifications + .entry(block) + .and_modify(|existing| { + // `SimplificationCache` may already hold a simplification in this block + // so we check whether `simple` is a better simplification than the current one. + if let Some((_, simpler)) = simplify(dfg, *existing, simple) { + *existing = simpler; + }; + }) + .or_insert(simple); + } + + /// Try to find a simplification in a visible block. + fn get(&self, block: BasicBlockId, dom: &DominatorTree) -> Option { + // Deterministically walk up the dominator chain until we encounter a block that contains a simplification. + dom.find_map_dominator(block, |b| self.simplifications.get(&b).cloned()) + } +} + +/// HashMap from `(side_effects_enabled_var, Instruction)` to a simplified expression that it can +/// be replaced with based on constraints that testify to their equivalence, stored together +/// with the set of blocks at which this constraint has been observed. +/// +/// Only blocks dominated by one in the cache should have access to this information, otherwise +/// we create a sort of time paradox where we replace an instruction with a constant we believe +/// it _should_ equal to, without ever actually producing and asserting the value. +type ConstraintSimplificationCache = HashMap>; + /// HashMap from `(Instruction, side_effects_enabled_var)` to the results of the instruction. /// Stored as a two-level map to avoid cloning Instructions during the `.get` call. /// +/// The `side_effects_enabled_var` is optional because we only use them when `Instruction::requires_acir_gen_predicate` +/// is true _and_ the constraint information is also taken into account. +/// /// In addition to each result, the original BasicBlockId is stored as well. This allows us /// to deduplicate instructions across blocks as long as the new block dominates the original. type InstructionResultCache = HashMap, ResultCache>>; @@ -208,15 +249,11 @@ type InstructionResultCache = HashMap, Resu /// For more information see [`InstructionResultCache`]. #[derive(Default)] struct ResultCache { - results: Vec<(BasicBlockId, Vec)>, + result: Option<(BasicBlockId, Vec)>, } impl<'brillig> Context<'brillig> { - fn new( - function: &Function, - use_constraint_info: bool, - brillig_info: Option>, - ) -> Self { + fn new(use_constraint_info: bool, brillig_info: Option>) -> Self { Self { use_constraint_info, brillig_info, @@ -224,19 +261,25 @@ impl<'brillig> Context<'brillig> { block_queue: Default::default(), constraint_simplification_mappings: Default::default(), cached_instruction_results: Default::default(), - dom: DominatorTree::with_function(function), } } - fn fold_constants_in_block(&mut self, function: &mut Function, block: BasicBlockId) { + fn fold_constants_in_block( + &mut self, + function: &mut Function, + dom: &mut DominatorTree, + block: BasicBlockId, + ) { let instructions = function.dfg[block].take_instructions(); + // Default side effect condition variable with an enabled state. let mut side_effects_enabled_var = function.dfg.make_constant(FieldElement::one(), Type::bool()); for instruction_id in instructions { self.fold_constants_into_instruction( - &mut function.dfg, + function, + dom, block, instruction_id, &mut side_effects_enabled_var, @@ -247,48 +290,59 @@ impl<'brillig> Context<'brillig> { fn fold_constants_into_instruction( &mut self, - dfg: &mut DataFlowGraph, - block: BasicBlockId, + function: &mut Function, + dom: &mut DominatorTree, + mut block: BasicBlockId, id: InstructionId, side_effects_enabled_var: &mut ValueId, ) { - let constraint_simplification_mapping = - self.constraint_simplification_mappings.get(side_effects_enabled_var); - let instruction = Self::resolve_instruction( - id, - block, - dfg, - &mut self.dom, - constraint_simplification_mapping, - ); + let constraint_simplification_mapping = self.get_constraint_map(*side_effects_enabled_var); + let dfg = &mut function.dfg; + + let instruction = + Self::resolve_instruction(id, block, dfg, dom, constraint_simplification_mapping); + let old_results = dfg.instruction_results(id).to_vec(); // If a copy of this instruction exists earlier in the block, then reuse the previous results. - if let Some(cached_results) = - self.get_cached(dfg, &instruction, *side_effects_enabled_var, block) + if let Some(cache_result) = + self.get_cached(dfg, dom, &instruction, *side_effects_enabled_var, block) { - Self::replace_result_ids(dfg, &old_results, cached_results); - return; - } + match cache_result { + CacheResult::Cached(cached) => { + // We track whether we may mutate MakeArray instructions before we deduplicate + // them but we still need to issue an extra inc_rc in case they're mutated afterward. + if matches!(instruction, Instruction::MakeArray { .. }) { + let value = *cached.last().unwrap(); + let inc_rc = Instruction::IncrementRc { value }; + let call_stack = dfg.get_call_stack(id); + dfg.insert_instruction_and_results(inc_rc, block, None, call_stack); + } + + Self::replace_result_ids(dfg, &old_results, cached); + return; + } + CacheResult::NeedToHoistToCommonBlock(dominator) => { + // Just change the block to insert in the common dominator instead. + // This will only move the current instance of the instruction right now. + // When constant folding is run a second time later on, it'll catch + // that the previous instance can be deduplicated to this instance. + block = dominator; + } + } + }; - let new_results = // First try to inline a call to a brillig function with all constant arguments. - Self::try_inline_brillig_call_with_all_constants( + let new_results = Self::try_inline_brillig_call_with_all_constants( &instruction, &old_results, block, dfg, self.brillig_info, ) + // Otherwise, try inserting the instruction again to apply any optimizations using the newly resolved inputs. .unwrap_or_else(|| { - // Otherwise, try inserting the instruction again to apply any optimizations using the newly resolved inputs. - Self::push_instruction( - id, - instruction.clone(), - &old_results, - block, - dfg, - ) + Self::push_instruction(id, instruction.clone(), &old_results, block, dfg) }); Self::replace_result_ids(dfg, &old_results, &new_results); @@ -296,7 +350,7 @@ impl<'brillig> Context<'brillig> { self.cache_instruction( instruction.clone(), new_results, - dfg, + function, *side_effects_enabled_var, block, ); @@ -314,7 +368,7 @@ impl<'brillig> Context<'brillig> { block: BasicBlockId, dfg: &DataFlowGraph, dom: &mut DominatorTree, - constraint_simplification_mapping: Option<&HashMap>>, + constraint_simplification_mapping: &HashMap, ) -> Instruction { let instruction = dfg[instruction_id].clone(); @@ -324,30 +378,28 @@ impl<'brillig> Context<'brillig> { // This allows us to reach a stable final `ValueId` for each instruction input as we add more // constraints to the cache. fn resolve_cache( + block: BasicBlockId, dfg: &DataFlowGraph, dom: &mut DominatorTree, - cache: Option<&HashMap>>, + cache: &HashMap, value_id: ValueId, - block: BasicBlockId, ) -> ValueId { let resolved_id = dfg.resolve(value_id); - let Some(cached_values) = cache.and_then(|cache| cache.get(&resolved_id)) else { - return resolved_id; - }; - - for (cached_block, cached_value) in cached_values { - // We can only use the simplified value if it was simplified in a block that dominates the current one - if dom.dominates(*cached_block, block) { - return resolve_cache(dfg, dom, cache, *cached_value, block); + match cache.get(&resolved_id) { + Some(simplification_cache) => { + if let Some(simplified) = simplification_cache.get(block, dom) { + resolve_cache(block, dfg, dom, cache, simplified) + } else { + resolved_id + } } + None => resolved_id, } - - resolved_id } // Resolve any inputs to ensure that we're comparing like-for-like instructions. instruction.map_values(|value_id| { - resolve_cache(dfg, dom, constraint_simplification_mapping, value_id, block) + resolve_cache(block, dfg, dom, constraint_simplification_mapping, value_id) }) } @@ -385,7 +437,7 @@ impl<'brillig> Context<'brillig> { &mut self, instruction: Instruction, instruction_results: Vec, - dfg: &DataFlowGraph, + function: &Function, side_effects_enabled_var: ValueId, block: BasicBlockId, ) { @@ -394,21 +446,53 @@ impl<'brillig> Context<'brillig> { // to map from the more complex to the simpler value. if let Instruction::Constrain(lhs, rhs, _) = instruction { // These `ValueId`s should be fully resolved now. - if let Some((complex, simple)) = simplify(dfg, lhs, rhs) { + if let Some((complex, simple)) = simplify(&function.dfg, lhs, rhs) { self.get_constraint_map(side_effects_enabled_var) .entry(complex) .or_default() - .push((block, simple)); + .add(&function.dfg, simple, block); } } } + // If we have an array get whose value is from an array set on the same array at the same index, + // we can simplify that array get to the value of the previous array set. + // + // For example: + // v3 = array_set v0, index v1, value v2 + // v4 = array_get v3, index v1 -> Field + // + // We know that `v4` can be simplified to `v2`. + // Thus, even if the index is dynamic (meaning the array get would have side effects), + // we can simplify the operation when we take into account the predicate. + if let Instruction::ArraySet { index, value, .. } = &instruction { + let use_predicate = + self.use_constraint_info && instruction.requires_acir_gen_predicate(&function.dfg); + let predicate = use_predicate.then_some(side_effects_enabled_var); + + let array_get = Instruction::ArrayGet { array: instruction_results[0], index: *index }; + + self.cached_instruction_results + .entry(array_get) + .or_default() + .entry(predicate) + .or_default() + .cache(block, vec![*value]); + } + + self.remove_possibly_mutated_cached_make_arrays(&instruction, function); + // If the instruction doesn't have side-effects and if it won't interact with enable_side_effects during acir_gen, // we cache the results so we can reuse them if the same instruction appears again later in the block. // Others have side effects representing failure, which are implicit in the ACIR code and can also be deduplicated. - if instruction.can_be_deduplicated(dfg, self.use_constraint_info) { + let can_be_deduplicated = + instruction.can_be_deduplicated(function, self.use_constraint_info); + + // We also allow deduplicating MakeArray instructions that we have tracked which haven't + // been mutated. + if can_be_deduplicated || matches!(instruction, Instruction::MakeArray { .. }) { let use_predicate = - self.use_constraint_info && instruction.requires_acir_gen_predicate(dfg); + self.use_constraint_info && instruction.requires_acir_gen_predicate(&function.dfg); let predicate = use_predicate.then_some(side_effects_enabled_var); self.cached_instruction_results @@ -420,10 +504,12 @@ impl<'brillig> Context<'brillig> { } } + /// Get the simplification mapping from complex to simpler instructions, + /// which all depend on the same side effect condition variable. fn get_constraint_map( &mut self, side_effects_enabled_var: ValueId, - ) -> &mut HashMap> { + ) -> &mut HashMap { self.constraint_simplification_mappings.entry(side_effects_enabled_var).or_default() } @@ -438,19 +524,20 @@ impl<'brillig> Context<'brillig> { } } - fn get_cached<'a>( - &'a mut self, + /// Get a cached result if it can be used in this context. + fn get_cached( + &self, dfg: &DataFlowGraph, + dom: &mut DominatorTree, instruction: &Instruction, side_effects_enabled_var: ValueId, block: BasicBlockId, - ) -> Option<&'a [ValueId]> { + ) -> Option { let results_for_instruction = self.cached_instruction_results.get(instruction)?; - let predicate = self.use_constraint_info && instruction.requires_acir_gen_predicate(dfg); let predicate = predicate.then_some(side_effects_enabled_var); - results_for_instruction.get(&predicate)?.get(block, &mut self.dom) + results_for_instruction.get(&predicate)?.get(block, dom, instruction.has_side_effects(dfg)) } /// Checks if the given instruction is a call to a brillig function with all constant arguments. @@ -612,12 +699,34 @@ impl<'brillig> Context<'brillig> { } } } + + fn remove_possibly_mutated_cached_make_arrays( + &mut self, + instruction: &Instruction, + function: &Function, + ) { + use Instruction::{ArraySet, Store}; + + // Should we consider calls to slice_push_back and similar to be mutating operations as well? + if let Store { value: array, .. } | ArraySet { array, .. } = instruction { + let instruction = match &function.dfg[*array] { + Value::Instruction { instruction, .. } => &function.dfg[*instruction], + _ => return, + }; + + if matches!(instruction, Instruction::MakeArray { .. }) { + self.cached_instruction_results.remove(instruction); + } + } + } } impl ResultCache { /// Records that an `Instruction` in block `block` produced the result values `results`. fn cache(&mut self, block: BasicBlockId, results: Vec) { - self.results.push((block, results)); + if self.result.is_none() { + self.result = Some((block, results)); + } } /// Returns a set of [`ValueId`]s produced from a copy of this [`Instruction`] which sits @@ -626,13 +735,23 @@ impl ResultCache { /// We require that the cached instruction's block dominates `block` in order to avoid /// cycles causing issues (e.g. two instructions being replaced with the results of each other /// such that neither instruction exists anymore.) - fn get(&self, block: BasicBlockId, dom: &mut DominatorTree) -> Option<&[ValueId]> { - for (origin_block, results) in &self.results { + fn get( + &self, + block: BasicBlockId, + dom: &mut DominatorTree, + has_side_effects: bool, + ) -> Option { + self.result.as_ref().and_then(|(origin_block, results)| { if dom.dominates(*origin_block, block) { - return Some(results); + Some(CacheResult::Cached(results)) + } else if !has_side_effects { + // Insert a copy of this instruction in the common dominator + let dominator = dom.common_dominator(*origin_block, block); + Some(CacheResult::NeedToHoistToCommonBlock(dominator)) + } else { + None } - } - None + }) } } @@ -663,7 +782,7 @@ pub(crate) fn type_to_brillig_parameter(typ: &Type) -> Option for item_typ in item_type.iter() { parameters.push(type_to_brillig_parameter(item_typ)?); } - Some(BrilligParameter::Array(parameters, *size)) + Some(BrilligParameter::Array(parameters, *size as usize)) } _ => None, } @@ -940,32 +1059,22 @@ mod test { // Regression for #4600 #[test] fn array_get_regression() { - // fn main f0 { - // b0(v0: u1, v1: u64): - // enable_side_effects_if v0 - // v2 = make_array [Field 0, Field 1] - // v3 = array_get v2, index v1 - // v4 = not v0 - // enable_side_effects_if v4 - // v5 = array_get v2, index v1 - // } - // // We want to make sure after constant folding both array_gets remain since they are // under different enable_side_effects_if contexts and thus one may be disabled while // the other is not. If one is removed, it is possible e.g. v4 is replaced with v2 which // is disabled (only gets from index 0) and thus returns the wrong result. let src = " - acir(inline) fn main f0 { - b0(v0: u1, v1: u64): - enable_side_effects v0 - v4 = make_array [Field 0, Field 1] : [Field; 2] - v5 = array_get v4, index v1 -> Field - v6 = not v0 - enable_side_effects v6 - v7 = array_get v4, index v1 -> Field - return - } - "; + acir(inline) fn main f0 { + b0(v0: u1, v1: u64): + enable_side_effects v0 + v4 = make_array [Field 0, Field 1] : [Field; 2] + v5 = array_get v4, index v1 -> Field + v6 = not v0 + enable_side_effects v6 + v7 = array_get v4, index v1 -> Field + return + } + "; let ssa = Ssa::from_str(src).unwrap(); // Expected output is unchanged @@ -1032,7 +1141,6 @@ mod test { // v5 = call keccakf1600(v1) // v6 = call keccakf1600(v2) // } - // // Here we're checking a situation where two identical arrays are being initialized twice and being assigned separate `ValueId`s. // This would result in otherwise identical instructions not being deduplicated. let main_id = Id::test_new(0); @@ -1071,6 +1179,7 @@ mod test { // fn main f0 { // b0(v0: u64): // v1 = make_array [v0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0] + // inc_rc v1 // v5 = call keccakf1600(v1) // } let ssa = ssa.fold_constants(); @@ -1080,7 +1189,107 @@ mod test { let main = ssa.main(); let instructions = main.dfg[main.entry_block()].instructions(); let ending_instruction_count = instructions.len(); - assert_eq!(ending_instruction_count, 2); + assert_eq!(ending_instruction_count, 3); + } + + #[test] + fn deduplicate_across_blocks() { + // fn main f0 { + // b0(v0: u1): + // v1 = not v0 + // jmp b1() + // b1(): + // v2 = not v0 + // return v2 + // } + let main_id = Id::test_new(0); + + // Compiling main + let mut builder = FunctionBuilder::new("main".into(), main_id); + let b1 = builder.insert_block(); + + let v0 = builder.add_parameter(Type::bool()); + let _v1 = builder.insert_not(v0); + builder.terminate_with_jmp(b1, Vec::new()); + + builder.switch_to_block(b1); + let v2 = builder.insert_not(v0); + builder.terminate_with_return(vec![v2]); + + let ssa = builder.finish(); + let main = ssa.main(); + assert_eq!(main.dfg[main.entry_block()].instructions().len(), 1); + assert_eq!(main.dfg[b1].instructions().len(), 1); + + // Expected output: + // + // fn main f0 { + // b0(v0: u1): + // v1 = not v0 + // jmp b1() + // b1(): + // return v1 + // } + let ssa = ssa.fold_constants_using_constraints(); + let main = ssa.main(); + assert_eq!(main.dfg[main.entry_block()].instructions().len(), 1); + assert_eq!(main.dfg[b1].instructions().len(), 0); + } + + #[test] + fn deduplicate_across_non_dominated_blocks() { + let src = " + brillig(inline) fn main f0 { + b0(v0: u32): + v2 = lt u32 1000, v0 + jmpif v2 then: b1, else: b2 + b1(): + v4 = shl v0, u32 1 + v5 = lt v0, v4 + constrain v5 == u1 1 + jmp b2() + b2(): + v7 = lt u32 1000, v0 + jmpif v7 then: b3, else: b4 + b3(): + v8 = shl v0, u32 1 + v9 = lt v0, v8 + constrain v9 == u1 1 + jmp b4() + b4(): + return + } + "; + let ssa = Ssa::from_str(src).unwrap(); + + // v4 has been hoisted, although: + // - v5 has not yet been removed since it was encountered earlier in the program + // - v8 hasn't been recognized as a duplicate of v6 yet since they still reference v4 and + // v5 respectively + let expected = " + brillig(inline) fn main f0 { + b0(v0: u32): + v2 = lt u32 1000, v0 + v4 = shl v0, u32 1 + jmpif v2 then: b1, else: b2 + b1(): + v5 = shl v0, u32 1 + v6 = lt v0, v5 + constrain v6 == u1 1 + jmp b2() + b2(): + jmpif v2 then: b3, else: b4 + b3(): + v8 = lt v0, v4 + constrain v8 == u1 1 + jmp b4() + b4(): + return + } + "; + + let ssa = ssa.fold_constants_using_constraints(); + assert_normalized_ssa_equals(ssa, expected); } #[test] @@ -1260,46 +1469,158 @@ mod test { } #[test] - fn deduplicate_across_blocks() { - // fn main f0 { - // b0(v0: u1): - // v1 = not v0 - // jmp b1() - // b1(): - // v2 = not v0 - // return v2 - // } - let main_id = Id::test_new(0); + fn does_not_use_cached_constrain_in_block_that_is_not_dominated() { + let src = " + brillig(inline) fn main f0 { + b0(v0: Field, v1: Field): + v3 = eq v0, Field 0 + jmpif v3 then: b1, else: b2 + b1(): + v5 = eq v1, Field 1 + constrain v1 == Field 1 + jmp b2() + b2(): + v6 = eq v1, Field 0 + constrain v1 == Field 0 + return + } + "; + let ssa = Ssa::from_str(src).unwrap(); + let ssa = ssa.fold_constants_using_constraints(); + assert_normalized_ssa_equals(ssa, src); + } - // Compiling main - let mut builder = FunctionBuilder::new("main".into(), main_id); - let b1 = builder.insert_block(); + #[test] + fn does_not_hoist_constrain_to_common_ancestor() { + let src = " + brillig(inline) fn main f0 { + b0(v0: Field, v1: Field): + v3 = eq v0, Field 0 + jmpif v3 then: b1, else: b2 + b1(): + constrain v1 == Field 1 + jmp b2() + b2(): + jmpif v0 then: b3, else: b4 + b3(): + constrain v1 == Field 1 // This was incorrectly hoisted to b0 but this condition is not valid when going b0 -> b2 -> b4 + jmp b4() + b4(): + return + } + "; + let ssa = Ssa::from_str(src).unwrap(); + let ssa = ssa.fold_constants_using_constraints(); + assert_normalized_ssa_equals(ssa, src); + } - let v0 = builder.add_parameter(Type::bool()); - let _v1 = builder.insert_not(v0); - builder.terminate_with_jmp(b1, Vec::new()); + #[test] + fn does_not_hoist_sub_to_common_ancestor() { + let src = " + acir(inline) fn main f0 { + b0(v0: u32): + v2 = eq v0, u32 0 + jmpif v2 then: b4, else: b1 + b4(): + v5 = sub v0, u32 1 + jmp b5() + b5(): + return + b1(): + jmpif v0 then: b3, else: b2 + b3(): + v4 = sub v0, u32 1 // We can't hoist this because v0 is zero here and it will lead to an underflow + jmp b5() + b2(): + jmp b5() + } + "; + let ssa = Ssa::from_str(src).unwrap(); + let ssa = ssa.fold_constants_using_constraints(); + assert_normalized_ssa_equals(ssa, src); + } - builder.switch_to_block(b1); - let v2 = builder.insert_not(v0); - builder.terminate_with_return(vec![v2]); + #[test] + fn deduplicates_side_effecting_intrinsics() { + let src = " + // After EnableSideEffectsIf removal: + acir(inline) fn main f0 { + b0(v0: Field, v1: Field, v2: u1): + v4 = call is_unconstrained() -> u1 + v7 = call to_be_radix(v0, u32 256) -> [u8; 1] // `a.to_be_radix(256)`; + inc_rc v7 + v8 = call to_be_radix(v0, u32 256) -> [u8; 1] // duplicate load of `a` + inc_rc v8 + v9 = cast v2 as Field // `if c { a.to_be_radix(256) }` + v10 = mul v0, v9 // attaching `c` to `a` + v11 = call to_be_radix(v10, u32 256) -> [u8; 1] // calling `to_radix(c * a)` + inc_rc v11 + enable_side_effects v2 // side effect var for `c` shifted down by removal + return + } + "; + let ssa = Ssa::from_str(src).unwrap(); + let expected = " + acir(inline) fn main f0 { + b0(v0: Field, v1: Field, v2: u1): + v4 = call is_unconstrained() -> u1 + v7 = call to_be_radix(v0, u32 256) -> [u8; 1] + inc_rc v7 + inc_rc v7 + v8 = cast v2 as Field + v9 = mul v0, v8 + v10 = call to_be_radix(v9, u32 256) -> [u8; 1] + inc_rc v10 + enable_side_effects v2 + return + } + "; + let ssa = ssa.fold_constants_using_constraints(); + assert_normalized_ssa_equals(ssa, expected); + } - let ssa = builder.finish(); - let main = ssa.main(); - assert_eq!(main.dfg[main.entry_block()].instructions().len(), 1); - assert_eq!(main.dfg[b1].instructions().len(), 1); + #[test] + fn array_get_from_array_set_with_different_predicates() { + let src = " + acir(inline) fn main f0 { + b0(v0: [Field; 3], v1: u32, v2: Field): + enable_side_effects u1 0 + v4 = array_set v0, index v1, value v2 + enable_side_effects u1 1 + v6 = array_get v4, index v1 -> Field + return v6 + } + "; + + let ssa = Ssa::from_str(src).unwrap(); - // Expected output: - // - // fn main f0 { - // b0(v0: u1): - // v1 = not v0 - // jmp b1() - // b1(): - // return v1 - // } let ssa = ssa.fold_constants_using_constraints(); - let main = ssa.main(); - assert_eq!(main.dfg[main.entry_block()].instructions().len(), 1); - assert_eq!(main.dfg[b1].instructions().len(), 0); + // We expect the code to be unchanged + assert_normalized_ssa_equals(ssa, src); + } + + #[test] + fn array_get_from_array_set_same_predicates() { + let src = " + acir(inline) fn main f0 { + b0(v0: [Field; 3], v1: u32, v2: Field): + enable_side_effects u1 1 + v4 = array_set v0, index v1, value v2 + v6 = array_get v4, index v1 -> Field + return v6 + } + "; + let ssa = Ssa::from_str(src).unwrap(); + + let expected = " + acir(inline) fn main f0 { + b0(v0: [Field; 3], v1: u32, v2: Field): + enable_side_effects u1 1 + v4 = array_set v0, index v1, value v2 + return v2 + } + "; + let ssa = ssa.fold_constants_using_constraints(); + assert_normalized_ssa_equals(ssa, expected); } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs index 8d3fa9cc615..f7ac6f7b313 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs @@ -18,8 +18,6 @@ use crate::ssa::{ ssa_gen::{Ssa, SSA_WORD_SIZE}, }; -use super::rc::{pop_rc_for, RcInstruction}; - impl Ssa { /// Performs Dead Instruction Elimination (DIE) to remove any instructions with /// unused results. @@ -108,8 +106,6 @@ impl Context { let instructions_len = block.instructions().len(); - let mut rc_tracker = RcTracker::default(); - // Indexes of instructions that might be out of bounds. // We'll remove those, but before that we'll insert bounds checks for them. let mut possible_index_out_of_bounds_indexes = Vec::new(); @@ -127,8 +123,9 @@ impl Context { .push(instructions_len - instruction_index - 1); } } else { - use Instruction::*; - if matches!(instruction, IncrementRc { .. } | DecrementRc { .. }) { + // We can't remove rc instructions if they're loaded from a reference + // since we'd have no way of knowing whether the reference is still used. + if Self::is_inc_dec_instruction_on_known_array(instruction, &function.dfg) { self.rc_instructions.push((*instruction_id, block_id)); } else { instruction.for_each_value(|value| { @@ -136,13 +133,8 @@ impl Context { }); } } - - rc_tracker.track_inc_rcs_to_remove(*instruction_id, function); } - self.instructions_to_remove.extend(rc_tracker.get_non_mutated_arrays()); - self.instructions_to_remove.extend(rc_tracker.rc_pairs_to_remove); - // If there are some instructions that might trigger an out of bounds error, // first add constrain checks. Then run the DIE pass again, which will remove those // but leave the constrains (any any value needed by those constrains) @@ -201,19 +193,28 @@ impl Context { } fn remove_rc_instructions(self, dfg: &mut DataFlowGraph) { - for (rc, block) in self.rc_instructions { - let value = match &dfg[rc] { - Instruction::IncrementRc { value } => *value, - Instruction::DecrementRc { value } => *value, - other => { - unreachable!("Expected IncrementRc or DecrementRc instruction, found {other:?}") + let unused_rc_values_by_block: HashMap> = + self.rc_instructions.into_iter().fold(HashMap::default(), |mut acc, (rc, block)| { + let value = match &dfg[rc] { + Instruction::IncrementRc { value } => *value, + Instruction::DecrementRc { value } => *value, + other => { + unreachable!( + "Expected IncrementRc or DecrementRc instruction, found {other:?}" + ) + } + }; + + if !self.used_values.contains(&value) { + acc.entry(block).or_default().insert(rc); } - }; + acc + }); - // This could be more efficient if we have to remove multiple instructions in a single block - if !self.used_values.contains(&value) { - dfg[block].instructions_mut().retain(|instruction| *instruction != rc); - } + for (block, instructions_to_remove) in unused_rc_values_by_block { + dfg[block] + .instructions_mut() + .retain(|instruction| !instructions_to_remove.contains(instruction)); } } @@ -337,6 +338,28 @@ impl Context { inserted_check } + + /// True if this is a `Instruction::IncrementRc` or `Instruction::DecrementRc` + /// operating on an array directly from a `Instruction::MakeArray` or an + /// intrinsic known to return a fresh array. + fn is_inc_dec_instruction_on_known_array( + instruction: &Instruction, + dfg: &DataFlowGraph, + ) -> bool { + use Instruction::*; + if let IncrementRc { value } | DecrementRc { value } = instruction { + if let Value::Instruction { instruction, .. } = &dfg[*value] { + return match &dfg[*instruction] { + MakeArray { .. } => true, + Call { func, .. } => { + matches!(&dfg[*func], Value::Intrinsic(_) | Value::ForeignFunction(_)) + } + _ => false, + }; + } + } + false + } } fn instruction_might_result_in_out_of_bounds( @@ -499,103 +522,6 @@ fn apply_side_effects( (lhs, rhs) } -#[derive(Default)] -struct RcTracker { - // We can track IncrementRc instructions per block to determine whether they are useless. - // IncrementRc and DecrementRc instructions are normally side effectual instructions, but we remove - // them if their value is not used anywhere in the function. However, even when their value is used, their existence - // is pointless logic if there is no array set between the increment and the decrement of the reference counter. - // We track per block whether an IncrementRc instruction has a paired DecrementRc instruction - // with the same value but no array set in between. - // If we see an inc/dec RC pair within a block we can safely remove both instructions. - rcs_with_possible_pairs: HashMap>, - rc_pairs_to_remove: HashSet, - // We also separately track all IncrementRc instructions and all arrays which have been mutably borrowed. - // If an array has not been mutably borrowed we can then safely remove all IncrementRc instructions on that array. - inc_rcs: HashMap>, - mut_borrowed_arrays: HashSet, - // The SSA often creates patterns where after simplifications we end up with repeat - // IncrementRc instructions on the same value. We track whether the previous instruction was an IncrementRc, - // and if the current instruction is also an IncrementRc on the same value we remove the current instruction. - // `None` if the previous instruction was anything other than an IncrementRc - previous_inc_rc: Option, -} - -impl RcTracker { - fn track_inc_rcs_to_remove(&mut self, instruction_id: InstructionId, function: &Function) { - let instruction = &function.dfg[instruction_id]; - - if let Instruction::IncrementRc { value } = instruction { - if let Some(previous_value) = self.previous_inc_rc { - if previous_value == *value { - self.rc_pairs_to_remove.insert(instruction_id); - } - } - self.previous_inc_rc = Some(*value); - } else { - self.previous_inc_rc = None; - } - - // DIE loops over a block in reverse order, so we insert an RC instruction for possible removal - // when we see a DecrementRc and check whether it was possibly mutated when we see an IncrementRc. - match instruction { - Instruction::IncrementRc { value } => { - if let Some(inc_rc) = - pop_rc_for(*value, function, &mut self.rcs_with_possible_pairs) - { - if !inc_rc.possibly_mutated { - self.rc_pairs_to_remove.insert(inc_rc.id); - self.rc_pairs_to_remove.insert(instruction_id); - } - } - - self.inc_rcs.entry(*value).or_default().insert(instruction_id); - } - Instruction::DecrementRc { value } => { - let typ = function.dfg.type_of_value(*value); - - // We assume arrays aren't mutated until we find an array_set - let dec_rc = - RcInstruction { id: instruction_id, array: *value, possibly_mutated: false }; - self.rcs_with_possible_pairs.entry(typ).or_default().push(dec_rc); - } - Instruction::ArraySet { array, .. } => { - let typ = function.dfg.type_of_value(*array); - if let Some(dec_rcs) = self.rcs_with_possible_pairs.get_mut(&typ) { - for dec_rc in dec_rcs { - dec_rc.possibly_mutated = true; - } - } - - self.mut_borrowed_arrays.insert(*array); - } - Instruction::Store { value, .. } => { - // We are very conservative and say that any store of an array value means it has the potential - // to be mutated. This is done due to the tracking of mutable borrows still being per block. - let typ = function.dfg.type_of_value(*value); - if matches!(&typ, Type::Array(..) | Type::Slice(..)) { - self.mut_borrowed_arrays.insert(*value); - } - } - _ => {} - } - } - - fn get_non_mutated_arrays(&self) -> HashSet { - self.inc_rcs - .keys() - .filter_map(|value| { - if !self.mut_borrowed_arrays.contains(value) { - Some(&self.inc_rcs[value]) - } else { - None - } - }) - .flatten() - .copied() - .collect() - } -} #[cfg(test)] mod test { use std::sync::Arc; @@ -604,7 +530,7 @@ mod test { use crate::ssa::{ function_builder::FunctionBuilder, - ir::{instruction::Instruction, map::Id, types::Type}, + ir::{map::Id, types::Type}, opt::assert_normalized_ssa_equals, Ssa, }; @@ -676,30 +602,6 @@ mod test { assert_normalized_ssa_equals(ssa, expected); } - #[test] - fn remove_useless_paired_rcs_even_when_used() { - let src = " - acir(inline) fn main f0 { - b0(v0: [Field; 2]): - inc_rc v0 - v2 = array_get v0, index u32 0 -> Field - dec_rc v0 - return v2 - } - "; - let ssa = Ssa::from_str(src).unwrap(); - - let expected = " - acir(inline) fn main f0 { - b0(v0: [Field; 2]): - v2 = array_get v0, index u32 0 -> Field - return v2 - } - "; - let ssa = ssa.dead_instruction_elimination(); - assert_normalized_ssa_equals(ssa, expected); - } - #[test] fn keep_paired_rcs_with_array_set() { let src = " @@ -770,92 +672,23 @@ mod test { } #[test] - fn keep_inc_rc_on_borrowed_array_set() { - // acir(inline) fn main f0 { - // b0(v0: [u32; 2]): - // inc_rc v0 - // v3 = array_set v0, index u32 0, value u32 1 - // inc_rc v0 - // inc_rc v0 - // inc_rc v0 - // v4 = array_get v3, index u32 1 - // return v4 - // } - let main_id = Id::test_new(0); - - // Compiling main - let mut builder = FunctionBuilder::new("main".into(), main_id); - let array_type = Type::Array(Arc::new(vec![Type::unsigned(32)]), 2); - let v0 = builder.add_parameter(array_type.clone()); - builder.increment_array_reference_count(v0); - let zero = builder.numeric_constant(0u128, Type::unsigned(32)); - let one = builder.numeric_constant(1u128, Type::unsigned(32)); - let v3 = builder.insert_array_set(v0, zero, one); - builder.increment_array_reference_count(v0); - builder.increment_array_reference_count(v0); - builder.increment_array_reference_count(v0); - - let v4 = builder.insert_array_get(v3, one, Type::unsigned(32)); - - builder.terminate_with_return(vec![v4]); - - let ssa = builder.finish(); - let main = ssa.main(); - - // The instruction count never includes the terminator instruction - assert_eq!(main.dfg[main.entry_block()].instructions().len(), 6); - - // We expect the output to be unchanged - // Expected output: - // - // acir(inline) fn main f0 { - // b0(v0: [u32; 2]): - // inc_rc v0 - // v3 = array_set v0, index u32 0, value u32 1 - // inc_rc v0 - // v4 = array_get v3, index u32 1 - // return v4 - // } - let ssa = ssa.dead_instruction_elimination(); - let main = ssa.main(); - - let instructions = main.dfg[main.entry_block()].instructions(); - // We expect only the repeated inc_rc instructions to be collapsed into a single inc_rc. - assert_eq!(instructions.len(), 4); - - assert!(matches!(&main.dfg[instructions[0]], Instruction::IncrementRc { .. })); - assert!(matches!(&main.dfg[instructions[1]], Instruction::ArraySet { .. })); - assert!(matches!(&main.dfg[instructions[2]], Instruction::IncrementRc { .. })); - assert!(matches!(&main.dfg[instructions[3]], Instruction::ArrayGet { .. })); - } - - #[test] - fn remove_inc_rcs_that_are_never_mutably_borrowed() { + fn does_not_remove_inc_or_dec_rc_of_if_they_are_loaded_from_a_reference() { let src = " - acir(inline) fn main f0 { - b0(v0: [Field; 2]): - inc_rc v0 - inc_rc v0 - inc_rc v0 - v2 = array_get v0, index u32 0 -> Field - inc_rc v0 - return v2 + brillig(inline) fn borrow_mut f0 { + b0(v0: &mut [Field; 3]): + v1 = load v0 -> [Field; 3] + inc_rc v1 // this one shouldn't be removed + v2 = load v0 -> [Field; 3] + inc_rc v2 // this one shouldn't be removed + v3 = load v0 -> [Field; 3] + v6 = array_set v3, index u32 0, value Field 5 + store v6 at v0 + dec_rc v6 + return } "; let ssa = Ssa::from_str(src).unwrap(); - let main = ssa.main(); - - // The instruction count never includes the terminator instruction - assert_eq!(main.dfg[main.entry_block()].instructions().len(), 5); - - let expected = " - acir(inline) fn main f0 { - b0(v0: [Field; 2]): - v2 = array_get v0, index u32 0 -> Field - return v2 - } - "; let ssa = ssa.dead_instruction_elimination(); - assert_normalized_ssa_equals(ssa, expected); + assert_normalized_ssa_equals(ssa, src); } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index 5d114672a55..3fbccf93ec9 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -131,8 +131,7 @@ //! v11 = mul v4, Field 12 //! v12 = add v10, v11 //! store v12 at v5 (new store) -use fxhash::FxHashMap as HashMap; -use std::collections::{BTreeMap, HashSet}; +use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; use acvm::{acir::AcirField, acir::BlackBoxFunc, FieldElement}; use iter_extended::vecmap; @@ -186,18 +185,6 @@ struct Context<'f> { /// Maps start of branch -> end of branch branch_ends: HashMap, - /// Maps an address to the old and new value of the element at that address - /// These only hold stores for one block at a time and is cleared - /// between inlining of branches. - store_values: HashMap, - - /// Stores all allocations local to the current branch. - /// Since these branches are local to the current branch (ie. only defined within one branch of - /// an if expression), they should not be merged with their previous value or stored value in - /// the other branch since there is no such value. The ValueId here is that which is returned - /// by the allocate instruction. - local_allocations: HashSet, - /// A stack of each jmpif condition that was taken to reach a particular point in the program. /// When two branches are merged back into one, this constitutes a join point, and is analogous /// to the rest of the program after an if statement. When such a join point / end block is @@ -214,13 +201,15 @@ struct Context<'f> { /// When processing a block, we pop this stack to get its arguments /// and at the end we push the arguments for his successor arguments_stack: Vec>, -} -#[derive(Clone)] -pub(crate) struct Store { - old_value: ValueId, - new_value: ValueId, - call_stack: CallStack, + /// Stores all allocations local to the current branch. + /// + /// Since these branches are local to the current branch (i.e. only defined within one branch of + /// an if expression), they should not be merged with their previous value or stored value in + /// the other branch since there is no such value. + /// + /// The `ValueId` here is that which is returned by the allocate instruction. + local_allocations: HashSet, } #[derive(Clone)] @@ -231,8 +220,6 @@ struct ConditionalBranch { old_condition: ValueId, // The condition of the branch condition: ValueId, - // The store values accumulated when processing the branch - store_values: HashMap, // The allocations accumulated when processing the branch local_allocations: HashSet, } @@ -263,12 +250,11 @@ fn flatten_function_cfg(function: &mut Function, no_predicates: &HashMap Context<'f> { let old_condition = *condition; let then_condition = self.inserter.resolve(old_condition); - let old_stores = std::mem::take(&mut self.store_values); let old_allocations = std::mem::take(&mut self.local_allocations); let branch = ConditionalBranch { old_condition, condition: self.link_condition(then_condition), - store_values: old_stores, - local_allocations: old_allocations, last_block: *then_destination, + local_allocations: old_allocations, }; let cond_context = ConditionalContext { condition: then_condition, @@ -473,19 +457,12 @@ impl<'f> Context<'f> { ); let else_condition = self.link_condition(else_condition); - // Make sure the else branch sees the previous values of each store - // rather than any values created in the 'then' branch. - let old_stores = std::mem::take(&mut cond_context.then_branch.store_values); - cond_context.then_branch.store_values = std::mem::take(&mut self.store_values); - self.undo_stores_in_then_branch(&cond_context.then_branch.store_values); - let old_allocations = std::mem::take(&mut self.local_allocations); let else_branch = ConditionalBranch { old_condition: cond_context.then_branch.old_condition, condition: else_condition, - store_values: old_stores, - local_allocations: old_allocations, last_block: *block, + local_allocations: old_allocations, }; cond_context.then_branch.local_allocations.clear(); cond_context.else_branch = Some(else_branch); @@ -509,10 +486,8 @@ impl<'f> Context<'f> { } let mut else_branch = cond_context.else_branch.unwrap(); - let stores_in_branch = std::mem::replace(&mut self.store_values, else_branch.store_values); self.local_allocations = std::mem::take(&mut else_branch.local_allocations); else_branch.last_block = *block; - else_branch.store_values = stores_in_branch; cond_context.else_branch = Some(else_branch); // We must remember to reset whether side effects are enabled when both branches @@ -562,7 +537,11 @@ impl<'f> Context<'f> { let args = vecmap(then_args.iter().zip(else_args), |(then_arg, else_arg)| { (self.inserter.resolve(*then_arg), self.inserter.resolve(else_arg)) }); - + let else_condition = if let Some(branch) = cond_context.else_branch { + branch.condition + } else { + self.inserter.function.dfg.make_constant(FieldElement::zero(), Type::bool()) + }; let block = self.inserter.function.entry_block(); // Cannot include this in the previous vecmap since it requires exclusive access to self @@ -570,6 +549,7 @@ impl<'f> Context<'f> { let instruction = Instruction::IfElse { then_condition: cond_context.then_branch.condition, then_value: then_arg, + else_condition, else_value: else_arg, }; let call_stack = cond_context.call_stack.clone(); @@ -580,8 +560,6 @@ impl<'f> Context<'f> { .first() }); - let call_stack = cond_context.call_stack; - self.merge_stores(cond_context.then_branch, cond_context.else_branch, call_stack); self.arguments_stack.pop(); self.arguments_stack.pop(); self.arguments_stack.push(args); @@ -636,116 +614,29 @@ impl<'f> Context<'f> { self.insert_instruction_with_typevars(enable_side_effects, None, call_stack); } - /// Merge any store instructions found in each branch. - /// - /// This function relies on the 'then' branch being merged before the 'else' branch of a jmpif - /// instruction. If this ordering is changed, the ordering that store values are merged within - /// this function also needs to be changed to reflect that. - fn merge_stores( - &mut self, - then_branch: ConditionalBranch, - else_branch: Option, - call_stack: CallStack, - ) { - // Address -> (then_value, else_value, value_before_the_if) - let mut new_map = BTreeMap::new(); - - for (address, store) in then_branch.store_values { - new_map.insert(address, (store.new_value, store.old_value, store.old_value)); - } - - if else_branch.is_some() { - for (address, store) in else_branch.clone().unwrap().store_values { - if let Some(entry) = new_map.get_mut(&address) { - entry.1 = store.new_value; - } else { - new_map.insert(address, (store.old_value, store.new_value, store.old_value)); - } - } - } - - let then_condition = then_branch.condition; - let block = self.inserter.function.entry_block(); - - // Merging must occur in a separate loop as we cannot borrow `self` as mutable while `value_merger` does - let mut new_values = HashMap::default(); - for (address, (then_case, else_case, _)) in &new_map { - let instruction = Instruction::IfElse { - then_condition, - then_value: *then_case, - else_value: *else_case, - }; - let dfg = &mut self.inserter.function.dfg; - let value = dfg - .insert_instruction_and_results(instruction, block, None, call_stack.clone()) - .first(); - - new_values.insert(address, value); - } - - // Replace stores with new merged values - for (address, (_, _, old_value)) in &new_map { - let value = new_values[address]; - let address = *address; - self.insert_instruction_with_typevars( - Instruction::Store { address, value }, - None, - call_stack.clone(), - ); - - if let Some(store) = self.store_values.get_mut(&address) { - store.new_value = value; - } else { - self.store_values.insert( - address, - Store { - old_value: *old_value, - new_value: value, - call_stack: call_stack.clone(), - }, - ); - } - } - } - - fn remember_store(&mut self, address: ValueId, new_value: ValueId, call_stack: CallStack) { - if !self.local_allocations.contains(&address) { - if let Some(store_value) = self.store_values.get_mut(&address) { - store_value.new_value = new_value; - } else { - let load = Instruction::Load { address }; - - let load_type = Some(vec![self.inserter.function.dfg.type_of_value(new_value)]); - let old_value = self - .insert_instruction_with_typevars(load.clone(), load_type, call_stack.clone()) - .first(); - - self.store_values.insert(address, Store { old_value, new_value, call_stack }); - } - } - } - /// Push the given instruction to the end of the entry block of the current function. /// /// Note that each ValueId of the instruction will be mapped via self.inserter.resolve. /// As a result, the instruction that will be pushed will actually be a new instruction /// with a different InstructionId from the original. The results of the given instruction /// will also be mapped to the results of the new instruction. - fn push_instruction(&mut self, id: InstructionId) -> Vec { + /// + /// `previous_allocate_result` should only be set to the result of an allocate instruction + /// if that instruction was the instruction immediately previous to this one - if there are + /// any instructions in between it should be None. + fn push_instruction(&mut self, id: InstructionId) { let (instruction, call_stack) = self.inserter.map_instruction(id); let instruction = self.handle_instruction_side_effects(instruction, call_stack.clone()); - let is_allocate = matches!(instruction, Instruction::Allocate); + let instruction_is_allocate = matches!(&instruction, Instruction::Allocate); let entry = self.inserter.function.entry_block(); let results = self.inserter.push_instruction_value(instruction, id, entry, call_stack); // Remember an allocate was created local to this branch so that we do not try to merge store // values across branches for it later. - if is_allocate { + if instruction_is_allocate { self.local_allocations.insert(results.first()); } - - results.results().into_owned() } /// If we are currently in a branch, we need to modify constrain instructions @@ -782,8 +673,35 @@ impl<'f> Context<'f> { Instruction::Constrain(lhs, rhs, message) } Instruction::Store { address, value } => { - self.remember_store(address, value, call_stack); - Instruction::Store { address, value } + // If this instruction immediately follows an allocate, and stores to that + // address there is no previous value to load and we don't need a merge anyway. + if self.local_allocations.contains(&address) { + Instruction::Store { address, value } + } else { + // Instead of storing `value`, store `if condition { value } else { previous_value }` + let typ = self.inserter.function.dfg.type_of_value(value); + let load = Instruction::Load { address }; + let previous_value = self + .insert_instruction_with_typevars( + load, + Some(vec![typ]), + call_stack.clone(), + ) + .first(); + + let else_condition = self + .insert_instruction(Instruction::Not(condition), call_stack.clone()); + + let instruction = Instruction::IfElse { + then_condition: condition, + then_value: value, + else_condition, + else_value: previous_value, + }; + + let updated_value = self.insert_instruction(instruction, call_stack); + Instruction::Store { address, value: updated_value } + } } Instruction::RangeCheck { value, max_bit_size, assert_message } => { // Replace value with `value * predicate` to zero out value when predicate is inactive. @@ -905,23 +823,11 @@ impl<'f> Context<'f> { call_stack, ) } - - fn undo_stores_in_then_branch(&mut self, store_values: &HashMap) { - for (address, store) in store_values { - let address = *address; - let value = store.old_value; - let instruction = Instruction::Store { address, value }; - // Considering the location of undoing a store to be the same as the original store. - self.insert_instruction_with_typevars(instruction, None, store.call_stack.clone()); - } - } } #[cfg(test)] mod test { - use std::sync::Arc; - - use acvm::{acir::AcirField, FieldElement}; + use acvm::acir::AcirField; use crate::ssa::{ function_builder::FunctionBuilder, @@ -961,9 +867,11 @@ mod test { v1 = not v0 enable_side_effects u1 1 v3 = cast v0 as Field - v5 = mul v3, Field -1 - v7 = add Field 4, v5 - return v7 + v4 = cast v1 as Field + v6 = mul v3, Field 3 + v8 = mul v4, Field 4 + v9 = add v6, v8 + return v9 } "; @@ -1023,15 +931,15 @@ mod test { b0(v0: u1, v1: &mut Field): enable_side_effects v0 v2 = load v1 -> Field - store Field 5 at v1 - v4 = not v0 - store v2 at v1 - enable_side_effects u1 1 - v6 = cast v0 as Field - v7 = sub Field 5, v2 - v8 = mul v6, v7 - v9 = add v2, v8 + v3 = not v0 + v4 = cast v0 as Field + v5 = cast v3 as Field + v7 = mul v4, Field 5 + v8 = mul v5, v2 + v9 = add v7, v8 store v9 at v1 + v10 = not v0 + enable_side_effects u1 1 return } "; @@ -1062,17 +970,23 @@ mod test { b0(v0: u1, v1: &mut Field): enable_side_effects v0 v2 = load v1 -> Field - store Field 5 at v1 - v4 = not v0 - store v2 at v1 - enable_side_effects v4 - v5 = load v1 -> Field - store Field 6 at v1 + v3 = not v0 + v4 = cast v0 as Field + v5 = cast v3 as Field + v7 = mul v4, Field 5 + v8 = mul v5, v2 + v9 = add v7, v8 + store v9 at v1 + v10 = not v0 + enable_side_effects v10 + v11 = load v1 -> Field + v12 = cast v10 as Field + v13 = cast v0 as Field + v15 = mul v12, Field 6 + v16 = mul v13, v11 + v17 = add v15, v16 + store v17 at v1 enable_side_effects u1 1 - v8 = cast v0 as Field - v10 = mul v8, Field -1 - v11 = add Field 6, v10 - store v11 at v1 return } "; @@ -1115,6 +1029,7 @@ mod test { // b7 b8 // ↘ ↙ // b9 + let src = " acir(inline) fn main f0 { b0(v0: u1, v1: u1): @@ -1165,33 +1080,49 @@ mod test { v20 = load v2 -> Field // call v1(Field 8, v20) jmp b9() - } - "; + }"; let ssa = Ssa::from_str(src).unwrap(); + let ssa = ssa.flatten_cfg().mem2reg(); - // Expected results after mem2reg removes the allocation and each load and store: let expected = " acir(inline) fn main f0 { b0(v0: u1, v1: u1): v2 = allocate -> &mut Field enable_side_effects v0 - v3 = mul v0, v1 - enable_side_effects v3 - v4 = not v1 - v5 = mul v0, v4 + v3 = not v0 + v4 = cast v0 as Field + v5 = cast v3 as Field + v7 = mul v4, Field 2 + v8 = add v7, v5 + v9 = mul v0, v1 + enable_side_effects v9 + v10 = not v9 + v11 = cast v9 as Field + v12 = cast v10 as Field + v14 = mul v11, Field 5 + v15 = mul v12, v8 + v16 = add v14, v15 + v17 = not v1 + v18 = mul v0, v17 + enable_side_effects v18 + v19 = not v18 + v20 = cast v18 as Field + v21 = cast v19 as Field + v23 = mul v20, Field 6 + v24 = mul v21, v16 + v25 = add v23, v24 enable_side_effects v0 - v6 = cast v3 as Field - v8 = mul v6, Field -1 - v10 = add Field 6, v8 - v11 = not v0 + v26 = not v0 + enable_side_effects v26 + v27 = cast v26 as Field + v28 = cast v0 as Field + v30 = mul v27, Field 3 + v31 = mul v28, v25 + v32 = add v30, v31 enable_side_effects u1 1 - v13 = cast v0 as Field - v15 = sub v10, Field 3 - v16 = mul v13, v15 - v17 = add Field 3, v16 - return v17 + return v32 }"; let main = ssa.main(); @@ -1201,10 +1132,7 @@ mod test { }; let merged_values = get_all_constants_reachable_from_instruction(&main.dfg, ret); - assert_eq!( - merged_values, - vec![FieldElement::from(3u128), FieldElement::from(6u128), -FieldElement::from(1u128)] - ); + assert_eq!(merged_values, vec![2, 3, 5, 6]); assert_normalized_ssa_equals(ssa, expected); } @@ -1287,7 +1215,7 @@ mod test { fn get_all_constants_reachable_from_instruction( dfg: &DataFlowGraph, value: ValueId, - ) -> Vec { + ) -> Vec { match dfg[value] { Value::Instruction { instruction, .. } => { let mut values = vec![]; @@ -1305,7 +1233,7 @@ mod test { values.dedup(); values } - Value::NumericConstant { constant, .. } => vec![constant], + Value::NumericConstant { constant, .. } => vec![constant.to_u128()], _ => Vec::new(), } } @@ -1344,63 +1272,71 @@ mod test { fn should_not_merge_incorrectly_to_false() { // Regression test for #1792 // Tests that it does not simplify a true constraint an always-false constraint - // acir(inline) fn main f1 { - // b0(v0: [u8; 2]): - // v5 = array_get v0, index u8 0 - // v6 = cast v5 as u32 - // v8 = truncate v6 to 1 bits, max_bit_size: 32 - // v9 = cast v8 as u1 - // v10 = allocate - // store u8 0 at v10 - // jmpif v9 then: b2, else: b3 - // b2(): - // v12 = cast v5 as Field - // v13 = add v12, Field 1 - // store v13 at v10 - // jmp b4() - // b4(): - // constrain v9 == u1 1 - // return - // b3(): - // store u8 0 at v10 - // jmp b4() - // } - let main_id = Id::test_new(1); - let mut builder = FunctionBuilder::new("main".into(), main_id); - builder.insert_block(); // b0 - let b1 = builder.insert_block(); - let b2 = builder.insert_block(); - let b3 = builder.insert_block(); - let element_type = Arc::new(vec![Type::unsigned(8)]); - let array_type = Type::Array(element_type.clone(), 2); - let array = builder.add_parameter(array_type); - let zero = builder.numeric_constant(0_u128, Type::unsigned(8)); - let v5 = builder.insert_array_get(array, zero, Type::unsigned(8)); - let v6 = builder.insert_cast(v5, Type::unsigned(32)); - let i_two = builder.numeric_constant(2_u128, Type::unsigned(32)); - let v8 = builder.insert_binary(v6, BinaryOp::Mod, i_two); - let v9 = builder.insert_cast(v8, Type::bool()); - let v10 = builder.insert_allocate(Type::field()); - builder.insert_store(v10, zero); - builder.terminate_with_jmpif(v9, b1, b2); - builder.switch_to_block(b1); - let one = builder.field_constant(1_u128); - let v5b = builder.insert_cast(v5, Type::field()); - let v13: Id = builder.insert_binary(v5b, BinaryOp::Add, one); - let v14 = builder.insert_cast(v13, Type::unsigned(8)); - builder.insert_store(v10, v14); - builder.terminate_with_jmp(b3, vec![]); - builder.switch_to_block(b2); - builder.insert_store(v10, zero); - builder.terminate_with_jmp(b3, vec![]); - builder.switch_to_block(b3); - let v_true = builder.numeric_constant(true, Type::bool()); - let v12 = builder.insert_binary(v9, BinaryOp::Eq, v_true); - builder.insert_constrain(v12, v_true, None); - builder.terminate_with_return(vec![]); - let ssa = builder.finish(); + + let src = " + acir(inline) fn main f0 { + b0(v0: [u8; 2]): + v2 = array_get v0, index u8 0 -> u8 + v3 = cast v2 as u32 + v4 = truncate v3 to 1 bits, max_bit_size: 32 + v5 = cast v4 as u1 + v6 = allocate -> &mut Field + store u8 0 at v6 + jmpif v5 then: b2, else: b1 + b2(): + v7 = cast v2 as Field + v9 = add v7, Field 1 + v10 = cast v9 as u8 + store v10 at v6 + jmp b3() + b3(): + constrain v5 == u1 1 + return + b1(): + store u8 0 at v6 + jmp b3() + } + "; + + let ssa = Ssa::from_str(src).unwrap(); + + let expected = " + acir(inline) fn main f0 { + b0(v0: [u8; 2]): + v2 = array_get v0, index u8 0 -> u8 + v3 = cast v2 as u32 + v4 = truncate v3 to 1 bits, max_bit_size: 32 + v5 = cast v4 as u1 + v6 = allocate -> &mut Field + store u8 0 at v6 + enable_side_effects v5 + v7 = cast v2 as Field + v9 = add v7, Field 1 + v10 = cast v9 as u8 + v11 = load v6 -> u8 + v12 = not v5 + v13 = cast v4 as u8 + v14 = cast v12 as u8 + v15 = mul v13, v10 + v16 = mul v14, v11 + v17 = add v15, v16 + store v17 at v6 + v18 = not v5 + enable_side_effects v18 + v19 = load v6 -> u8 + v20 = cast v18 as u8 + v21 = cast v4 as u8 + v22 = mul v21, v19 + store v22 at v6 + enable_side_effects u1 1 + constrain v5 == u1 1 + return + } + "; + let flattened_ssa = ssa.flatten_cfg(); let main = flattened_ssa.main(); + // Now assert that there is not an always-false constraint after flattening: let mut constrain_count = 0; for instruction in main.dfg[main.entry_block()].instructions() { @@ -1414,6 +1350,8 @@ mod test { } } assert_eq!(constrain_count, 1); + + assert_normalized_ssa_equals(flattened_ssa, expected); } #[test] @@ -1549,7 +1487,7 @@ mod test { b2(): return b1(): - jmp b2() + jmp b2() } "; let merged_ssa = Ssa::from_str(src).unwrap(); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs index ddc8b0bfe6b..a01be691778 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs @@ -21,7 +21,7 @@ impl<'a> SliceCapacityTracker<'a> { pub(crate) fn collect_slice_information( &self, instruction: &Instruction, - slice_sizes: &mut HashMap, + slice_sizes: &mut HashMap, results: &[ValueId], ) { match instruction { @@ -106,13 +106,12 @@ impl<'a> SliceCapacityTracker<'a> { Intrinsic::ToBits(_) => { // Compiler sanity check assert!(matches!(self.dfg.type_of_value(result_slice), Type::Slice(_))); - slice_sizes.insert(result_slice, FieldElement::max_num_bits() as usize); + slice_sizes.insert(result_slice, FieldElement::max_num_bits()); } Intrinsic::ToRadix(_) => { // Compiler sanity check assert!(matches!(self.dfg.type_of_value(result_slice), Type::Slice(_))); - slice_sizes - .insert(result_slice, FieldElement::max_num_bytes() as usize); + slice_sizes.insert(result_slice, FieldElement::max_num_bytes()); } Intrinsic::AsSlice => { let array_size = self @@ -157,7 +156,7 @@ impl<'a> SliceCapacityTracker<'a> { pub(crate) fn compute_slice_capacity( &self, array_id: ValueId, - slice_sizes: &mut HashMap, + slice_sizes: &mut HashMap, ) { if let Some((array, typ)) = self.dfg.get_array_constant(array_id) { // Compiler sanity check @@ -165,7 +164,7 @@ impl<'a> SliceCapacityTracker<'a> { if let Type::Slice(_) = typ { let element_size = typ.element_size(); let len = array.len() / element_size; - slice_sizes.insert(array_id, len); + slice_sizes.insert(array_id, len as u32); } } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs index 8ea26d4e96d..6ea235b9414 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs @@ -17,7 +17,7 @@ pub(crate) struct ValueMerger<'a> { // Maps SSA array values with a slice type to their size. // This must be computed before merging values. - slice_sizes: &'a mut HashMap, + slice_sizes: &'a mut HashMap, array_set_conditionals: &'a mut HashMap, @@ -28,7 +28,7 @@ impl<'a> ValueMerger<'a> { pub(crate) fn new( dfg: &'a mut DataFlowGraph, block: BasicBlockId, - slice_sizes: &'a mut HashMap, + slice_sizes: &'a mut HashMap, array_set_conditionals: &'a mut HashMap, current_condition: Option, call_stack: CallStack, @@ -54,6 +54,7 @@ impl<'a> ValueMerger<'a> { pub(crate) fn merge_values( &mut self, then_condition: ValueId, + else_condition: ValueId, then_value: ValueId, else_value: ValueId, ) -> ValueId { @@ -69,14 +70,15 @@ impl<'a> ValueMerger<'a> { self.dfg, self.block, then_condition, + else_condition, then_value, else_value, ), typ @ Type::Array(_, _) => { - self.merge_array_values(typ, then_condition, then_value, else_value) + self.merge_array_values(typ, then_condition, else_condition, then_value, else_value) } typ @ Type::Slice(_) => { - self.merge_slice_values(typ, then_condition, then_value, else_value) + self.merge_slice_values(typ, then_condition, else_condition, then_value, else_value) } Type::Reference(_) => panic!("Cannot return references from an if expression"), Type::Function => panic!("Cannot return functions from an if expression"), @@ -84,11 +86,12 @@ impl<'a> ValueMerger<'a> { } /// Merge two numeric values a and b from separate basic blocks to a single value. This - /// function would return the result of `if c { a } else { b }` as `c * (a-b) + b`. + /// function would return the result of `if c { a } else { b }` as `c*a + (!c)*b`. pub(crate) fn merge_numeric_values( dfg: &mut DataFlowGraph, block: BasicBlockId, then_condition: ValueId, + else_condition: ValueId, then_value: ValueId, else_value: ValueId, ) -> ValueId { @@ -111,38 +114,31 @@ impl<'a> ValueMerger<'a> { // We must cast the bool conditions to the actual numeric type used by each value. let then_condition = dfg .insert_instruction_and_results( - Instruction::Cast(then_condition, Type::field()), + Instruction::Cast(then_condition, then_type), block, None, call_stack.clone(), ) .first(); - - let then_field = Instruction::Cast(then_value, Type::field()); - let then_field_value = - dfg.insert_instruction_and_results(then_field, block, None, call_stack.clone()).first(); - - let else_field = Instruction::Cast(else_value, Type::field()); - let else_field_value = - dfg.insert_instruction_and_results(else_field, block, None, call_stack.clone()).first(); - - let diff = Instruction::binary(BinaryOp::Sub, then_field_value, else_field_value); - let diff_value = - dfg.insert_instruction_and_results(diff, block, None, call_stack.clone()).first(); - - let conditional_diff = Instruction::binary(BinaryOp::Mul, then_condition, diff_value); - let conditional_diff_value = dfg - .insert_instruction_and_results(conditional_diff, block, None, call_stack.clone()) + let else_condition = dfg + .insert_instruction_and_results( + Instruction::Cast(else_condition, else_type), + block, + None, + call_stack.clone(), + ) .first(); - let merged_field = - Instruction::binary(BinaryOp::Add, else_field_value, conditional_diff_value); - let merged_field_value = dfg - .insert_instruction_and_results(merged_field, block, None, call_stack.clone()) - .first(); + let mul = Instruction::binary(BinaryOp::Mul, then_condition, then_value); + let then_value = + dfg.insert_instruction_and_results(mul, block, None, call_stack.clone()).first(); + + let mul = Instruction::binary(BinaryOp::Mul, else_condition, else_value); + let else_value = + dfg.insert_instruction_and_results(mul, block, None, call_stack.clone()).first(); - let merged = Instruction::Cast(merged_field_value, then_type); - dfg.insert_instruction_and_results(merged, block, None, call_stack).first() + let add = Instruction::binary(BinaryOp::Add, then_value, else_value); + dfg.insert_instruction_and_results(add, block, None, call_stack).first() } /// Given an if expression that returns an array: `if c { array1 } else { array2 }`, @@ -152,6 +148,7 @@ impl<'a> ValueMerger<'a> { &mut self, typ: Type, then_condition: ValueId, + else_condition: ValueId, then_value: ValueId, else_value: ValueId, ) -> ValueId { @@ -162,10 +159,11 @@ impl<'a> ValueMerger<'a> { _ => panic!("Expected array type"), }; - let actual_length = len * element_types.len(); + let actual_length = len * element_types.len() as u32; if let Some(result) = self.try_merge_only_changed_indices( then_condition, + else_condition, then_value, else_value, actual_length, @@ -175,7 +173,8 @@ impl<'a> ValueMerger<'a> { for i in 0..len { for (element_index, element_type) in element_types.iter().enumerate() { - let index = ((i * element_types.len() + element_index) as u128).into(); + let index = + ((i * element_types.len() as u32 + element_index as u32) as u128).into(); let index = self.dfg.make_constant(index, Type::field()); let typevars = Some(vec![element_type.clone()]); @@ -195,7 +194,12 @@ impl<'a> ValueMerger<'a> { let then_element = get_element(then_value, typevars.clone()); let else_element = get_element(else_value, typevars); - merged.push_back(self.merge_values(then_condition, then_element, else_element)); + merged.push_back(self.merge_values( + then_condition, + else_condition, + then_element, + else_element, + )); } } @@ -208,6 +212,7 @@ impl<'a> ValueMerger<'a> { &mut self, typ: Type, then_condition: ValueId, + else_condition: ValueId, then_value_id: ValueId, else_value_id: ValueId, ) -> ValueId { @@ -222,22 +227,22 @@ impl<'a> ValueMerger<'a> { let (slice, typ) = self.dfg.get_array_constant(then_value_id).unwrap_or_else(|| { panic!("ICE: Merging values during flattening encountered slice {then_value_id} without a preset size"); }); - slice.len() / typ.element_types().len() + (slice.len() / typ.element_types().len()) as u32 }); let else_len = self.slice_sizes.get(&else_value_id).copied().unwrap_or_else(|| { let (slice, typ) = self.dfg.get_array_constant(else_value_id).unwrap_or_else(|| { panic!("ICE: Merging values during flattening encountered slice {else_value_id} without a preset size"); }); - slice.len() / typ.element_types().len() + (slice.len() / typ.element_types().len()) as u32 }); let len = then_len.max(else_len); for i in 0..len { for (element_index, element_type) in element_types.iter().enumerate() { - let index_usize = i * element_types.len() + element_index; - let index_value = (index_usize as u128).into(); + let index_u32 = i * element_types.len() as u32 + element_index as u32; + let index_value = (index_u32 as u128).into(); let index = self.dfg.make_constant(index_value, Type::field()); let typevars = Some(vec![element_type.clone()]); @@ -245,7 +250,7 @@ impl<'a> ValueMerger<'a> { let mut get_element = |array, typevars, len| { // The smaller slice is filled with placeholder data. Codegen for slice accesses must // include checks against the dynamic slice length so that this placeholder data is not incorrectly accessed. - if len <= index_usize { + if len <= index_u32 { self.make_slice_dummy_data(element_type) } else { let get = Instruction::ArrayGet { array, index }; @@ -260,12 +265,20 @@ impl<'a> ValueMerger<'a> { } }; - let then_element = - get_element(then_value_id, typevars.clone(), then_len * element_types.len()); + let then_element = get_element( + then_value_id, + typevars.clone(), + then_len * element_types.len() as u32, + ); let else_element = - get_element(else_value_id, typevars, else_len * element_types.len()); - - merged.push_back(self.merge_values(then_condition, then_element, else_element)); + get_element(else_value_id, typevars, else_len * element_types.len() as u32); + + merged.push_back(self.merge_values( + then_condition, + else_condition, + then_element, + else_element, + )); } } @@ -314,9 +327,10 @@ impl<'a> ValueMerger<'a> { fn try_merge_only_changed_indices( &mut self, then_condition: ValueId, + else_condition: ValueId, then_value: ValueId, else_value: ValueId, - array_length: usize, + array_length: u32, ) -> Option { let mut found = false; let current_condition = self.current_condition?; @@ -370,7 +384,7 @@ impl<'a> ValueMerger<'a> { .chain(seen_else.into_iter().map(|(_, index, typ, condition)| (index, typ, condition))) .collect(); - if !found || changed_indices.len() >= array_length { + if !found || changed_indices.len() as u32 >= array_length { return None; } @@ -397,7 +411,8 @@ impl<'a> ValueMerger<'a> { let then_element = get_element(then_value, typevars.clone()); let else_element = get_element(else_value, typevars); - let value = self.merge_values(then_condition, then_element, else_element); + let value = + self.merge_values(then_condition, else_condition, then_element, else_element); array = self.insert_array_set(array, index, value, Some(condition)).first(); } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs index 6cf7070e65e..f91487fd73e 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs @@ -1089,7 +1089,6 @@ mod test { } #[test] - #[ignore] #[should_panic( expected = "Attempted to recur more than 1000 times during inlining function 'main': acir(inline) fn main f0 {" )] diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs index 14233ca73e5..290d2a33846 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs @@ -7,14 +7,16 @@ //! - Already marked as loop invariants //! //! We also check that we are not hoisting instructions with side effects. -use fxhash::FxHashSet as HashSet; +use acvm::{acir::AcirField, FieldElement}; +use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; use crate::ssa::{ ir::{ basic_block::BasicBlockId, function::{Function, RuntimeType}, function_inserter::FunctionInserter, - instruction::InstructionId, + instruction::{Instruction, InstructionId}, + types::Type, value::ValueId, }, Ssa, @@ -45,25 +47,51 @@ impl Function { } impl Loops { - fn hoist_loop_invariants(self, function: &mut Function) { + fn hoist_loop_invariants(mut self, function: &mut Function) { let mut context = LoopInvariantContext::new(function); - for loop_ in self.yet_to_unroll.iter() { + // The loops should be sorted by the number of blocks. + // We want to access outer nested loops first, which we do by popping + // from the top of the list. + while let Some(loop_) = self.yet_to_unroll.pop() { let Ok(pre_header) = loop_.get_pre_header(context.inserter.function, &self.cfg) else { // If the loop does not have a preheader we skip hoisting loop invariants for this loop continue; }; - context.hoist_loop_invariants(loop_, pre_header); + + context.hoist_loop_invariants(&loop_, pre_header); } context.map_dependent_instructions(); } } +impl Loop { + /// Find the value that controls whether to perform a loop iteration. + /// This is going to be the block parameter of the loop header. + /// + /// Consider the following example of a `for i in 0..4` loop: + /// ```text + /// brillig(inline) fn main f0 { + /// b0(v0: u32): + /// ... + /// jmp b1(u32 0) + /// b1(v1: u32): // Loop header + /// v5 = lt v1, u32 4 // Upper bound + /// jmpif v5 then: b3, else: b2 + /// ``` + /// In the example above, `v1` is the induction variable + fn get_induction_variable(&self, function: &Function) -> ValueId { + function.dfg.block_parameters(self.header)[0] + } +} + struct LoopInvariantContext<'f> { inserter: FunctionInserter<'f>, defined_in_loop: HashSet, loop_invariants: HashSet, + // Maps induction variable -> fixed upper loop bound + outer_induction_variables: HashMap, } impl<'f> LoopInvariantContext<'f> { @@ -72,6 +100,7 @@ impl<'f> LoopInvariantContext<'f> { inserter: FunctionInserter::new(function), defined_in_loop: HashSet::default(), loop_invariants: HashSet::default(), + outer_induction_variables: HashMap::default(), } } @@ -88,13 +117,29 @@ impl<'f> LoopInvariantContext<'f> { self.inserter.push_instruction(instruction_id, *block); } - self.update_values_defined_in_loop_and_invariants(instruction_id, hoist_invariant); + self.extend_values_defined_in_loop_and_invariants(instruction_id, hoist_invariant); } } + + // Keep track of a loop induction variable and respective upper bound. + // This will be used by later loops to determine whether they have operations + // reliant upon the maximum induction variable. + let upper_bound = loop_.get_const_upper_bound(self.inserter.function); + if let Some(upper_bound) = upper_bound { + let induction_variable = loop_.get_induction_variable(self.inserter.function); + let induction_variable = self.inserter.resolve(induction_variable); + self.outer_induction_variables.insert(induction_variable, upper_bound); + } } /// Gather the variables declared within the loop fn set_values_defined_in_loop(&mut self, loop_: &Loop) { + // Clear any values that may be defined in previous loops, as the context is per function. + self.defined_in_loop.clear(); + // These are safe to keep per function, but we want to be clear that these values + // are used per loop. + self.loop_invariants.clear(); + for block in loop_.blocks.iter() { let params = self.inserter.function.dfg.block_parameters(*block); self.defined_in_loop.extend(params); @@ -107,7 +152,7 @@ impl<'f> LoopInvariantContext<'f> { /// Update any values defined in the loop and loop invariants after a /// analyzing and re-inserting a loop's instruction. - fn update_values_defined_in_loop_and_invariants( + fn extend_values_defined_in_loop_and_invariants( &mut self, instruction_id: InstructionId, hoist_invariant: bool, @@ -143,9 +188,44 @@ impl<'f> LoopInvariantContext<'f> { is_loop_invariant &= !self.defined_in_loop.contains(&value) || self.loop_invariants.contains(&value); }); - is_loop_invariant && instruction.can_be_deduplicated(&self.inserter.function.dfg, false) + + let can_be_deduplicated = instruction.can_be_deduplicated(self.inserter.function, false) + || self.can_be_deduplicated_from_upper_bound(&instruction); + + is_loop_invariant && can_be_deduplicated + } + + /// Certain instructions can take advantage of that our induction variable has a fixed maximum. + /// + /// For example, an array access can usually only be safely deduplicated when we have a constant + /// index that is below the length of the array. + /// Checking an array get where the index is the loop's induction variable on its own + /// would determine that the instruction is not safe for hoisting. + /// However, if we know that the induction variable's upper bound will always be in bounds of the array + /// we can safely hoist the array access. + fn can_be_deduplicated_from_upper_bound(&self, instruction: &Instruction) -> bool { + match instruction { + Instruction::ArrayGet { array, index } => { + let array_typ = self.inserter.function.dfg.type_of_value(*array); + let upper_bound = self.outer_induction_variables.get(index); + if let (Type::Array(_, len), Some(upper_bound)) = (array_typ, upper_bound) { + upper_bound.to_u128() <= len.into() + } else { + false + } + } + _ => false, + } } + /// Loop invariant hoisting only operates over loop instructions. + /// The `FunctionInserter` is used for mapping old values to new values after + /// re-inserting loop invariant instructions. + /// However, there may be instructions which are not within loops that are + /// still reliant upon the instruction results altered during the pass. + /// This method re-inserts all instructions so that all instructions have + /// correct new value IDs based upon the `FunctionInserter` internal map. + /// Leaving out this mapping could lead to instructions with values that do not exist. fn map_dependent_instructions(&mut self) { let blocks = self.inserter.function.reachable_blocks(); for block in blocks { @@ -375,4 +455,108 @@ mod test { // The code should be unchanged assert_normalized_ssa_equals(ssa, src); } + + #[test] + fn hoist_array_gets_using_induction_variable_with_const_bound() { + // SSA for the following program: + // + // fn triple_loop(x: u32) { + // let arr = [2; 5]; + // for i in 0..4 { + // for j in 0..4 { + // for _ in 0..4 { + // assert_eq(arr[i], x); + // assert_eq(arr[j], x); + // } + // } + // } + // } + // + // `arr[i]` and `arr[j]` are safe to hoist as we know the maximum possible index + // to be used for both array accesses. + // We want to make sure `arr[i]` is hoisted to the outermost loop body and that + // `arr[j]` is hoisted to the second outermost loop body. + let src = " + brillig(inline) fn main f0 { + b0(v0: u32, v1: u32): + v6 = make_array [u32 2, u32 2, u32 2, u32 2, u32 2] : [u32; 5] + inc_rc v6 + jmp b1(u32 0) + b1(v2: u32): + v9 = lt v2, u32 4 + jmpif v9 then: b3, else: b2 + b3(): + jmp b4(u32 0) + b4(v3: u32): + v10 = lt v3, u32 4 + jmpif v10 then: b6, else: b5 + b6(): + jmp b7(u32 0) + b7(v4: u32): + v13 = lt v4, u32 4 + jmpif v13 then: b9, else: b8 + b9(): + v15 = array_get v6, index v2 -> u32 + v16 = eq v15, v0 + constrain v15 == v0 + v17 = array_get v6, index v3 -> u32 + v18 = eq v17, v0 + constrain v17 == v0 + v19 = add v4, u32 1 + jmp b7(v19) + b8(): + v14 = add v3, u32 1 + jmp b4(v14) + b5(): + v12 = add v2, u32 1 + jmp b1(v12) + b2(): + return + } + "; + + let ssa = Ssa::from_str(src).unwrap(); + + let expected = " + brillig(inline) fn main f0 { + b0(v0: u32, v1: u32): + v6 = make_array [u32 2, u32 2, u32 2, u32 2, u32 2] : [u32; 5] + inc_rc v6 + jmp b1(u32 0) + b1(v2: u32): + v9 = lt v2, u32 4 + jmpif v9 then: b3, else: b2 + b3(): + v10 = array_get v6, index v2 -> u32 + v11 = eq v10, v0 + jmp b4(u32 0) + b4(v3: u32): + v12 = lt v3, u32 4 + jmpif v12 then: b6, else: b5 + b6(): + v15 = array_get v6, index v3 -> u32 + v16 = eq v15, v0 + jmp b7(u32 0) + b7(v4: u32): + v17 = lt v4, u32 4 + jmpif v17 then: b9, else: b8 + b9(): + constrain v10 == v0 + constrain v15 == v0 + v19 = add v4, u32 1 + jmp b7(v19) + b8(): + v18 = add v3, u32 1 + jmp b4(v18) + b5(): + v14 = add v2, u32 1 + jmp b1(v14) + b2(): + return + } + "; + + let ssa = ssa.loop_invariant_code_motion(); + assert_normalized_ssa_equals(ssa, expected); + } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs index 0690dbbf204..53a31ae57c1 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -18,6 +18,7 @@ //! - A reference with 0 aliases means we were unable to find which reference this reference //! refers to. If such a reference is stored to, we must conservatively invalidate every //! reference in the current block. +//! - We also track the last load instruction to each address per block. //! //! From there, to figure out the value of each reference at the end of block, iterate each instruction: //! - On `Instruction::Allocate`: @@ -28,6 +29,13 @@ //! - Furthermore, if the result of the load is a reference, mark the result as an alias //! of the reference it dereferences to (if known). //! - If which reference it dereferences to is not known, this load result has no aliases. +//! - We also track the last instance of a load instruction to each address in a block. +//! If we see that the last load instruction was from the same address as the current load instruction, +//! we move to replace the result of the current load with the result of the previous load. +//! This removal requires a couple conditions: +//! - No store occurs to that address before the next load, +//! - The address is not used as an argument to a call +//! This optimization helps us remove repeated loads for which there are not known values. //! - On `Instruction::Store { address, value }`: //! - If the address of the store is known: //! - If the address has exactly 1 alias: @@ -40,11 +48,13 @@ //! - Conservatively mark every alias in the block to `Unknown`. //! - Additionally, if there were no Loads to any alias of the address between this Store and //! the previous Store to the same address, the previous store can be removed. +//! - Remove the instance of the last load instruction to the address and its aliases //! - On `Instruction::Call { arguments }`: //! - If any argument of the call is a reference, set the value of each alias of that //! reference to `Unknown` //! - Any builtin functions that may return aliases if their input also contains a //! reference should be tracked. Examples: `slice_push_back`, `slice_insert`, `slice_remove`, etc. +//! - Remove the instance of the last load instruction for any reference arguments and their aliases //! //! On a terminator instruction: //! - If the terminator is a `Jmp`: @@ -274,6 +284,9 @@ impl<'f> PerFunctionContext<'f> { if let Some(first_predecessor) = predecessors.next() { let mut first = self.blocks.get(&first_predecessor).cloned().unwrap_or_default(); first.last_stores.clear(); + // Last loads are tracked per block. During unification we are creating a new block from the current one, + // so we must clear the last loads of the current block before we return the new block. + first.last_loads.clear(); // Note that we have to start folding with the first block as the accumulator. // If we started with an empty block, an empty block union'd with any other block @@ -410,6 +423,28 @@ impl<'f> PerFunctionContext<'f> { self.last_loads.insert(address, (instruction, block_id)); } + + // Check whether the block has a repeat load from the same address (w/ no calls or stores in between the loads). + // If we do have a repeat load, we can remove the current load and map its result to the previous load's result. + if let Some(last_load) = references.last_loads.get(&address) { + let Instruction::Load { address: previous_address } = + &self.inserter.function.dfg[*last_load] + else { + panic!("Expected a Load instruction here"); + }; + let result = self.inserter.function.dfg.instruction_results(instruction)[0]; + let previous_result = + self.inserter.function.dfg.instruction_results(*last_load)[0]; + if *previous_address == address { + self.inserter.map_value(result, previous_result); + self.instructions_to_remove.insert(instruction); + } + } + // We want to set the load for every load even if the address has a known value + // and the previous load instruction was removed. + // We are safe to still remove a repeat load in this case as we are mapping from the current load's + // result to the previous load, which if it was removed should already have a mapping to the known value. + references.set_last_load(address, instruction); } Instruction::Store { address, value } => { let address = self.inserter.function.dfg.resolve(*address); @@ -435,6 +470,8 @@ impl<'f> PerFunctionContext<'f> { } references.set_known_value(address, value); + // If we see a store to an address, the last load to that address needs to remain. + references.keep_last_load_for(address, self.inserter.function); references.last_stores.insert(address, instruction); } Instruction::Allocate => { @@ -542,6 +579,9 @@ impl<'f> PerFunctionContext<'f> { let value = self.inserter.function.dfg.resolve(*value); references.set_unknown(value); references.mark_value_used(value, self.inserter.function); + + // If a reference is an argument to a call, the last load to that address and its aliases needs to remain. + references.keep_last_load_for(value, self.inserter.function); } } } @@ -572,6 +612,12 @@ impl<'f> PerFunctionContext<'f> { let destination_parameters = self.inserter.function.dfg[*destination].parameters(); assert_eq!(destination_parameters.len(), arguments.len()); + // If we have multiple parameters that alias that same argument value, + // then those parameters also alias each other. + // We save parameters with repeat arguments to later mark those + // parameters as aliasing one another. + let mut arg_set: HashMap> = HashMap::default(); + // Add an alias for each reference parameter for (parameter, argument) in destination_parameters.iter().zip(arguments) { if self.inserter.function.dfg.value_is_reference(*parameter) { @@ -581,10 +627,27 @@ impl<'f> PerFunctionContext<'f> { if let Some(aliases) = references.aliases.get_mut(expression) { // The argument reference is possibly aliased by this block parameter aliases.insert(*parameter); + + // Check if we have seen the same argument + let seen_parameters = arg_set.entry(argument).or_default(); + // Add the current parameter to the parameters we have seen for this argument. + // The previous parameters and the current one alias one another. + seen_parameters.insert(*parameter); } } } } + + // Set the aliases of the parameters + for (_, aliased_params) in arg_set { + for param in aliased_params.iter() { + self.set_aliases( + references, + *param, + AliasSet::known_multiple(aliased_params.clone()), + ); + } + } } TerminatorInstruction::Return { return_values, .. } => { // Removing all `last_stores` for each returned reference is more important here @@ -612,6 +675,8 @@ mod tests { map::Id, types::Type, }, + opt::assert_normalized_ssa_equals, + Ssa, }; #[test] @@ -822,88 +887,53 @@ mod tests { // is later stored in a successor block #[test] fn load_aliases_in_predecessor_block() { - // fn main { - // b0(): - // v0 = allocate - // store Field 0 at v0 - // v2 = allocate - // store v0 at v2 - // v3 = load v2 - // v4 = load v2 - // jmp b1() - // b1(): - // store Field 1 at v3 - // store Field 2 at v4 - // v7 = load v3 - // v8 = eq v7, Field 2 - // return - // } - let main_id = Id::test_new(0); - let mut builder = FunctionBuilder::new("main".into(), main_id); - - let v0 = builder.insert_allocate(Type::field()); - - let zero = builder.field_constant(0u128); - builder.insert_store(v0, zero); - - let v2 = builder.insert_allocate(Type::Reference(Arc::new(Type::field()))); - builder.insert_store(v2, v0); - - let v3 = builder.insert_load(v2, Type::field()); - let v4 = builder.insert_load(v2, Type::field()); - let b1 = builder.insert_block(); - builder.terminate_with_jmp(b1, vec![]); - - builder.switch_to_block(b1); - - let one = builder.field_constant(1u128); - builder.insert_store(v3, one); - - let two = builder.field_constant(2u128); - builder.insert_store(v4, two); - - let v8 = builder.insert_load(v3, Type::field()); - let _ = builder.insert_binary(v8, BinaryOp::Eq, two); - - builder.terminate_with_return(vec![]); - - let ssa = builder.finish(); - assert_eq!(ssa.main().reachable_blocks().len(), 2); + let src = " + acir(inline) fn main f0 { + b0(): + v0 = allocate -> &mut Field + store Field 0 at v0 + v2 = allocate -> &mut &mut Field + store v0 at v2 + v3 = load v2 -> &mut Field + v4 = load v2 -> &mut Field + jmp b1() + b1(): + store Field 1 at v3 + store Field 2 at v4 + v7 = load v3 -> Field + v8 = eq v7, Field 2 + return + } + "; - // Expected result: - // acir fn main f0 { - // b0(): - // v9 = allocate - // store Field 0 at v9 - // v10 = allocate - // jmp b1() - // b1(): - // return - // } - let ssa = ssa.mem2reg(); - println!("{}", ssa); + let mut ssa = Ssa::from_str(src).unwrap(); + let main = ssa.main_mut(); - let main = ssa.main(); - assert_eq!(main.reachable_blocks().len(), 2); + let instructions = main.dfg[main.entry_block()].instructions(); + assert_eq!(instructions.len(), 6); // The final return is not counted // All loads should be removed - assert_eq!(count_loads(main.entry_block(), &main.dfg), 0); - assert_eq!(count_loads(b1, &main.dfg), 0); - // The first store is not removed as it is used as a nested reference in another store. - // We would need to track whether the store where `v9` is the store value gets removed to know whether + // We would need to track whether the store where `v0` is the store value gets removed to know whether // to remove it. - assert_eq!(count_stores(main.entry_block(), &main.dfg), 1); // The first store in b1 is removed since there is another store to the same reference // in the same block, and the store is not needed before the later store. // The rest of the stores are also removed as no loads are done within any blocks // to the stored values. - assert_eq!(count_stores(b1, &main.dfg), 0); - - let b1_instructions = main.dfg[b1].instructions(); + let expected = " + acir(inline) fn main f0 { + b0(): + v0 = allocate -> &mut Field + store Field 0 at v0 + v2 = allocate -> &mut &mut Field + jmp b1() + b1(): + return + } + "; - // We expect the last eq to be optimized out - assert_eq!(b1_instructions.len(), 0); + let ssa = ssa.mem2reg(); + assert_normalized_ssa_equals(ssa, expected); } #[test] @@ -933,7 +963,7 @@ mod tests { // v10 = eq v9, Field 2 // constrain v9 == Field 2 // v11 = load v2 - // v12 = load v10 + // v12 = load v11 // v13 = eq v12, Field 2 // constrain v11 == Field 2 // return @@ -992,7 +1022,7 @@ mod tests { let main = ssa.main(); assert_eq!(main.reachable_blocks().len(), 4); - // The store from the original SSA should remain + // The stores from the original SSA should remain assert_eq!(count_stores(main.entry_block(), &main.dfg), 2); assert_eq!(count_stores(b2, &main.dfg), 1); @@ -1039,4 +1069,160 @@ mod tests { let main = ssa.main(); assert_eq!(count_loads(main.entry_block(), &main.dfg), 1); } + + #[test] + fn remove_repeat_loads() { + // This tests starts with two loads from the same unknown load. + // Specifically you should look for `load v2` in `b3`. + // We should be able to remove the second repeated load. + let src = " + acir(inline) fn main f0 { + b0(): + v0 = allocate -> &mut Field + store Field 0 at v0 + v2 = allocate -> &mut &mut Field + store v0 at v2 + jmp b1(Field 0) + b1(v3: Field): + v4 = eq v3, Field 0 + jmpif v4 then: b2, else: b3 + b2(): + v5 = load v2 -> &mut Field + store Field 2 at v5 + v8 = add v3, Field 1 + jmp b1(v8) + b3(): + v9 = load v0 -> Field + v10 = eq v9, Field 2 + constrain v9 == Field 2 + v11 = load v2 -> &mut Field + v12 = load v2 -> &mut Field + v13 = load v12 -> Field + v14 = eq v13, Field 2 + constrain v13 == Field 2 + return + } + "; + + let ssa = Ssa::from_str(src).unwrap(); + + // The repeated load from v3 should be removed + // b3 should only have three loads now rather than four previously + // + // All stores are expected to remain. + let expected = " + acir(inline) fn main f0 { + b0(): + v1 = allocate -> &mut Field + store Field 0 at v1 + v3 = allocate -> &mut &mut Field + store v1 at v3 + jmp b1(Field 0) + b1(v0: Field): + v4 = eq v0, Field 0 + jmpif v4 then: b3, else: b2 + b3(): + v11 = load v3 -> &mut Field + store Field 2 at v11 + v13 = add v0, Field 1 + jmp b1(v13) + b2(): + v5 = load v1 -> Field + v7 = eq v5, Field 2 + constrain v5 == Field 2 + v8 = load v3 -> &mut Field + v9 = load v8 -> Field + v10 = eq v9, Field 2 + constrain v9 == Field 2 + return + } + "; + + let ssa = ssa.mem2reg(); + assert_normalized_ssa_equals(ssa, expected); + } + + #[test] + fn keep_repeat_loads_passed_to_a_call() { + // The test is the exact same as `remove_repeat_loads` above except with the call + // to `f1` between the repeated loads. + let src = " + acir(inline) fn main f0 { + b0(): + v1 = allocate -> &mut Field + store Field 0 at v1 + v3 = allocate -> &mut &mut Field + store v1 at v3 + jmp b1(Field 0) + b1(v0: Field): + v4 = eq v0, Field 0 + jmpif v4 then: b3, else: b2 + b3(): + v13 = load v3 -> &mut Field + store Field 2 at v13 + v15 = add v0, Field 1 + jmp b1(v15) + b2(): + v5 = load v1 -> Field + v7 = eq v5, Field 2 + constrain v5 == Field 2 + v8 = load v3 -> &mut Field + call f1(v3) + v10 = load v3 -> &mut Field + v11 = load v10 -> Field + v12 = eq v11, Field 2 + constrain v11 == Field 2 + return + } + acir(inline) fn foo f1 { + b0(v0: &mut Field): + return + } + "; + + let ssa = Ssa::from_str(src).unwrap(); + + let ssa = ssa.mem2reg(); + // We expect the program to be unchanged + assert_normalized_ssa_equals(ssa, src); + } + + #[test] + fn keep_repeat_loads_with_alias_store() { + // v7, v8, and v9 alias one another. We want to make sure that a repeat load to v7 with a store + // to its aliases in between the repeat loads does not remove those loads. + let src = " + acir(inline) fn main f0 { + b0(v0: u1): + jmpif v0 then: b2, else: b1 + b2(): + v6 = allocate -> &mut Field + store Field 0 at v6 + jmp b3(v6, v6, v6) + b3(v1: &mut Field, v2: &mut Field, v3: &mut Field): + v8 = load v1 -> Field + store Field 2 at v2 + v10 = load v1 -> Field + store Field 1 at v3 + v11 = load v1 -> Field + store Field 3 at v3 + v13 = load v1 -> Field + constrain v8 == Field 0 + constrain v10 == Field 2 + constrain v11 == Field 1 + constrain v13 == Field 3 + return + b1(): + v4 = allocate -> &mut Field + store Field 1 at v4 + jmp b3(v4, v4, v4) + } + "; + + let ssa = Ssa::from_str(src).unwrap(); + + let ssa = ssa.mem2reg(); + // We expect the program to be unchanged + assert_normalized_ssa_equals(ssa, src); + } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs index cdbb1043232..ccf5bd9d9f8 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs @@ -191,7 +191,7 @@ impl Context<'_> { let typ = self.function.dfg.type_of_value(rhs); if let Type::Numeric(NumericType::Unsigned { bit_size }) = typ { let to_bits = self.function.dfg.import_intrinsic(Intrinsic::ToBits(Endian::Little)); - let result_types = vec![Type::Array(Arc::new(vec![Type::bool()]), bit_size as usize)]; + let result_types = vec![Type::Array(Arc::new(vec![Type::bool()]), bit_size)]; let rhs_bits = self.insert_call(to_bits, vec![rhs], result_types); let rhs_bits = rhs_bits[0]; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs index 8e25c3f0a35..02191801fcd 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs @@ -48,7 +48,7 @@ impl Function { #[derive(Default)] struct Context { - slice_sizes: HashMap, + slice_sizes: HashMap, // Maps array_set result -> element that was overwritten by that instruction. // Used to undo array_sets while merging values @@ -66,8 +66,9 @@ impl Context { for instruction in instructions { match &function.dfg[instruction] { - Instruction::IfElse { then_condition, then_value, else_value } => { + Instruction::IfElse { then_condition, then_value, else_condition, else_value } => { let then_condition = *then_condition; + let else_condition = *else_condition; let then_value = *then_value; let else_value = *else_value; @@ -84,7 +85,12 @@ impl Context { call_stack, ); - let value = value_merger.merge_values(then_condition, then_value, else_value); + let value = value_merger.merge_values( + then_condition, + else_condition, + then_value, + else_value, + ); let _typ = function.dfg.type_of_value(value); let results = function.dfg.instruction_results(instruction); @@ -142,13 +148,13 @@ impl Context { } } - fn get_or_find_capacity(&mut self, dfg: &DataFlowGraph, value: ValueId) -> usize { + fn get_or_find_capacity(&mut self, dfg: &DataFlowGraph, value: ValueId) -> u32 { match self.slice_sizes.entry(value) { Entry::Occupied(entry) => return *entry.get(), Entry::Vacant(entry) => { if let Some((array, typ)) = dfg.get_array_constant(value) { let length = array.len() / typ.element_types().len(); - return *entry.insert(length); + return *entry.insert(length as u32); } if let Type::Array(_, length) = dfg.type_of_value(value) { @@ -164,7 +170,7 @@ impl Context { enum SizeChange { None, - SetTo(ValueId, usize), + SetTo(ValueId, u32), // These two variants store the old and new slice ids // not their lengths which should be old_len = new_len +/- 1 diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs index 777c16dacd1..1a13acc5435 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs @@ -19,8 +19,10 @@ //! When unrolling ACIR code, we remove reference count instructions because they are //! only used by Brillig bytecode. use acvm::{acir::AcirField, FieldElement}; +use im::HashSet; use crate::{ + brillig::brillig_gen::convert_ssa_function, errors::RuntimeError, ssa::{ ir::{ @@ -37,38 +39,60 @@ use crate::{ ssa_gen::Ssa, }, }; -use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; +use fxhash::FxHashMap as HashMap; impl Ssa { /// Loop unrolling can return errors, since ACIR functions need to be fully unrolled. /// This meta-pass will keep trying to unroll loops and simplifying the SSA until no more errors are found. - #[tracing::instrument(level = "trace", skip(ssa))] - pub(crate) fn unroll_loops_iteratively(mut ssa: Ssa) -> Result { - for (_, function) in ssa.functions.iter_mut() { + /// + /// The `max_bytecode_incr_pct`, when given, is used to limit the growth of the Brillig bytecode size + /// after unrolling small loops to some percentage of the original loop. For example a value of 150 would + /// mean the new loop can be 150% (ie. 2.5 times) larger than the original loop. It will still contain + /// fewer SSA instructions, but that can still result in more Brillig opcodes. + #[tracing::instrument(level = "trace", skip(self))] + pub(crate) fn unroll_loops_iteratively( + mut self: Ssa, + max_bytecode_increase_percent: Option, + ) -> Result { + for (_, function) in self.functions.iter_mut() { + // Take a snapshot of the function to compare byte size increase, + // but only if the setting indicates we have to, otherwise skip it. + let orig_func_and_max_incr_pct = max_bytecode_increase_percent + .filter(|_| function.runtime().is_brillig()) + .map(|max_incr_pct| (function.clone(), max_incr_pct)); + // Try to unroll loops first: - let mut unroll_errors = function.try_unroll_loops(); + let (mut has_unrolled, mut unroll_errors) = function.try_unroll_loops(); // Keep unrolling until no more errors are found while !unroll_errors.is_empty() { let prev_unroll_err_count = unroll_errors.len(); // Simplify the SSA before retrying - - // Do a mem2reg after the last unroll to aid simplify_cfg - function.mem2reg(); - function.simplify_function(); - // Do another mem2reg after simplify_cfg to aid the next unroll - function.mem2reg(); + simplify_between_unrolls(function); // Unroll again - unroll_errors = function.try_unroll_loops(); + let (new_unrolled, new_errors) = function.try_unroll_loops(); + unroll_errors = new_errors; + has_unrolled |= new_unrolled; + // If we didn't manage to unroll any more loops, exit if unroll_errors.len() >= prev_unroll_err_count { return Err(unroll_errors.swap_remove(0)); } } + + if has_unrolled { + if let Some((orig_function, max_incr_pct)) = orig_func_and_max_incr_pct { + let new_size = brillig_bytecode_size(function); + let orig_size = brillig_bytecode_size(&orig_function); + if !is_new_size_ok(orig_size, new_size, max_incr_pct) { + *function = orig_function; + } + } + } } - Ok(ssa) + Ok(self) } } @@ -77,7 +101,7 @@ impl Function { // This can also be true for ACIR, but we have no alternative to unrolling in ACIR. // Brillig also generally prefers smaller code rather than faster code, // so we only attempt to unroll small loops, which we decide on a case-by-case basis. - fn try_unroll_loops(&mut self) -> Vec { + fn try_unroll_loops(&mut self) -> (bool, Vec) { Loops::find_all(self).unroll_each(self) } } @@ -85,7 +109,7 @@ impl Function { pub(super) struct Loop { /// The header block of a loop is the block which dominates all the /// other blocks in the loop. - header: BasicBlockId, + pub(super) header: BasicBlockId, /// The start of the back_edge n -> d is the block n at the end of /// the loop that jumps back to the header block d which restarts the loop. @@ -170,8 +194,10 @@ impl Loops { /// Unroll all loops within a given function. /// Any loops which fail to be unrolled (due to using non-constant indices) will be unmodified. - fn unroll_each(mut self, function: &mut Function) -> Vec { + /// Returns whether any blocks have been modified + fn unroll_each(mut self, function: &mut Function) -> (bool, Vec) { let mut unroll_errors = vec![]; + let mut has_unrolled = false; while let Some(next_loop) = self.yet_to_unroll.pop() { if function.runtime().is_brillig() && !next_loop.is_small_loop(function, &self.cfg) { continue; @@ -181,13 +207,17 @@ impl Loops { if next_loop.blocks.iter().any(|block| self.modified_blocks.contains(block)) { let mut new_loops = Self::find_all(function); new_loops.failed_to_unroll = self.failed_to_unroll; - return unroll_errors.into_iter().chain(new_loops.unroll_each(function)).collect(); + let (new_unrolled, new_errors) = new_loops.unroll_each(function); + return (has_unrolled || new_unrolled, [unroll_errors, new_errors].concat()); } // Don't try to unroll the loop again if it is known to fail if !self.failed_to_unroll.contains(&next_loop.header) { match next_loop.unroll(function, &self.cfg) { - Ok(_) => self.modified_blocks.extend(next_loop.blocks), + Ok(_) => { + has_unrolled = true; + self.modified_blocks.extend(next_loop.blocks); + } Err(call_stack) => { self.failed_to_unroll.insert(next_loop.header); unroll_errors.push(RuntimeError::UnknownLoopBound { call_stack }); @@ -195,7 +225,7 @@ impl Loops { } } } - unroll_errors + (has_unrolled, unroll_errors) } } @@ -269,7 +299,7 @@ impl Loop { /// v5 = lt v1, u32 4 // Upper bound /// jmpif v5 then: b3, else: b2 /// ``` - fn get_const_upper_bound(&self, function: &Function) -> Option { + pub(super) fn get_const_upper_bound(&self, function: &Function) -> Option { let block = &function.dfg[self.header]; let instructions = block.instructions(); assert_eq!( @@ -947,21 +977,59 @@ impl<'f> LoopIteration<'f> { } } +/// Unrolling leaves some duplicate instructions which can potentially be removed. +fn simplify_between_unrolls(function: &mut Function) { + // Do a mem2reg after the last unroll to aid simplify_cfg + function.mem2reg(); + function.simplify_function(); + // Do another mem2reg after simplify_cfg to aid the next unroll + function.mem2reg(); +} + +/// Convert the function to Brillig bytecode and return the resulting size. +fn brillig_bytecode_size(function: &Function) -> usize { + // We need to do some SSA passes in order for the conversion to be able to go ahead, + // otherwise we can hit `unreachable!()` instructions in `convert_ssa_instruction`. + // Creating a clone so as not to modify the originals. + let mut temp = function.clone(); + + // Might as well give it the best chance. + simplify_between_unrolls(&mut temp); + + // This is to try to prevent hitting ICE. + temp.dead_instruction_elimination(false); + + convert_ssa_function(&temp, false).byte_code.len() +} + +/// Decide if the new bytecode size is acceptable, compared to the original. +/// +/// The maximum increase can be expressed as a negative value if we demand a decrease. +/// (Values -100 and under mean the new size should be 0). +fn is_new_size_ok(orig_size: usize, new_size: usize, max_incr_pct: i32) -> bool { + let max_size_pct = 100i32.saturating_add(max_incr_pct).max(0) as usize; + let max_size = orig_size.saturating_mul(max_size_pct); + new_size.saturating_mul(100) <= max_size +} + #[cfg(test)] mod tests { use acvm::FieldElement; + use test_case::test_case; use crate::errors::RuntimeError; use crate::ssa::{ir::value::ValueId, opt::assert_normalized_ssa_equals, Ssa}; - use super::{BoilerplateStats, Loops}; + use super::{is_new_size_ok, BoilerplateStats, Loops}; - /// Tries to unroll all loops in each SSA function. + /// Tries to unroll all loops in each SSA function once, calling the `Function` directly, + /// bypassing the iterative loop done by the SSA which does further optimisations. + /// /// If any loop cannot be unrolled, it is left as-is or in a partially unrolled state. fn try_unroll_loops(mut ssa: Ssa) -> (Ssa, Vec) { let mut errors = vec![]; for function in ssa.functions.values_mut() { - errors.extend(function.try_unroll_loops()); + errors.extend(function.try_unroll_loops().1); } (ssa, errors) } @@ -1221,9 +1289,26 @@ mod tests { let (ssa, errors) = try_unroll_loops(ssa); assert_eq!(errors.len(), 0, "Unroll should have no errors"); + // Check that it's still the original assert_normalized_ssa_equals(ssa, parse_ssa().to_string().as_str()); } + #[test] + fn test_brillig_unroll_iteratively_respects_max_increase() { + let ssa = brillig_unroll_test_case(); + let ssa = ssa.unroll_loops_iteratively(Some(-90)).unwrap(); + // Check that it's still the original + assert_normalized_ssa_equals(ssa, brillig_unroll_test_case().to_string().as_str()); + } + + #[test] + fn test_brillig_unroll_iteratively_with_large_max_increase() { + let ssa = brillig_unroll_test_case(); + let ssa = ssa.unroll_loops_iteratively(Some(50)).unwrap(); + // Check that it did the unroll + assert_eq!(ssa.main().reachable_blocks().len(), 2, "The loop should be unrolled"); + } + /// Test that `break` and `continue` stop unrolling without any panic. #[test] fn test_brillig_unroll_break_and_continue() { @@ -1377,4 +1462,14 @@ mod tests { let loop0 = loops.yet_to_unroll.pop().expect("there should be a loop"); loop0.boilerplate_stats(function, &loops.cfg).expect("there should be stats") } + + #[test_case(1000, 700, 50, true; "size decreased")] + #[test_case(1000, 1500, 50, true; "size increased just by the max")] + #[test_case(1000, 1501, 50, false; "size increased over the max")] + #[test_case(1000, 700, -50, false; "size decreased but not enough")] + #[test_case(1000, 250, -50, true; "size decreased over expectations")] + #[test_case(1000, 250, -1250, false; "demanding more than minus 100 is handled")] + fn test_is_new_size_ok(old: usize, new: usize, max: i32, ok: bool) { + assert_eq!(is_new_size_ok(old, new, max), ok); + } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/lexer.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/lexer.rs index d89bc1e9e28..5b66810c641 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/lexer.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/lexer.rs @@ -62,6 +62,7 @@ impl<'a> Lexer<'a> { Some('-') if self.peek_char() == Some('>') => self.double_char_token(Token::Arrow), Some('-') => self.single_char_token(Token::Dash), Some('"') => self.eat_string_literal(), + Some('b') if self.peek_char() == Some('"') => self.eat_byte_string_literal(), Some(ch) if ch.is_ascii_alphanumeric() || ch == '_' => self.eat_alpha_numeric(ch), Some(char) => Err(LexerError::UnexpectedCharacter { char, @@ -180,8 +181,23 @@ impl<'a> Lexer<'a> { fn eat_string_literal(&mut self) -> SpannedTokenResult { let start = self.position; - let mut string = String::new(); + let string = self.eat_string(start)?; + let str_literal_token = Token::Str(string); + let end = self.position; + Ok(str_literal_token.into_span(start, end)) + } + + fn eat_byte_string_literal(&mut self) -> SpannedTokenResult { + let start = self.position; + self.next_char(); // skip the b + let string = self.eat_string(start)?; + let str_literal_token = Token::ByteStr(string); + let end = self.position; + Ok(str_literal_token.into_span(start, end)) + } + fn eat_string(&mut self, start: u32) -> Result { + let mut string = String::new(); while let Some(next) = self.next_char() { let char = match next { '"' => break, @@ -206,11 +222,7 @@ impl<'a> Lexer<'a> { string.push(char); } - - let str_literal_token = Token::Str(string); - - let end = self.position; - Ok(str_literal_token.into_span(start, end)) + Ok(string) } fn eat_while bool>( diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/mod.rs index 3d8bd37dead..24a5ff43071 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/mod.rs @@ -4,7 +4,10 @@ use std::{ }; use super::{ - ir::{instruction::BinaryOp, types::Type}, + ir::{ + instruction::BinaryOp, + types::{NumericType, Type}, + }, Ssa, }; @@ -448,12 +451,39 @@ impl<'a> Parser<'a> { } if self.eat_keyword(Keyword::MakeArray)? { - self.eat_or_error(Token::LeftBracket)?; - let elements = self.parse_comma_separated_values()?; - self.eat_or_error(Token::RightBracket)?; - self.eat_or_error(Token::Colon)?; - let typ = self.parse_type()?; - return Ok(ParsedInstruction::MakeArray { target, elements, typ }); + if self.eat(Token::Ampersand)? { + let Some(string) = self.eat_byte_str()? else { + return self.expected_byte_string(); + }; + let u8 = Type::Numeric(NumericType::Unsigned { bit_size: 8 }); + let typ = Type::Slice(Arc::new(vec![u8.clone()])); + let elements = string + .bytes() + .map(|byte| ParsedValue::NumericConstant { + constant: FieldElement::from(byte as u128), + typ: u8.clone(), + }) + .collect(); + return Ok(ParsedInstruction::MakeArray { target, elements, typ }); + } else if let Some(string) = self.eat_byte_str()? { + let u8 = Type::Numeric(NumericType::Unsigned { bit_size: 8 }); + let typ = Type::Array(Arc::new(vec![u8.clone()]), string.len() as u32); + let elements = string + .bytes() + .map(|byte| ParsedValue::NumericConstant { + constant: FieldElement::from(byte as u128), + typ: u8.clone(), + }) + .collect(); + return Ok(ParsedInstruction::MakeArray { target, elements, typ }); + } else { + self.eat_or_error(Token::LeftBracket)?; + let elements = self.parse_comma_separated_values()?; + self.eat_or_error(Token::RightBracket)?; + self.eat_or_error(Token::Colon)?; + let typ = self.parse_type()?; + return Ok(ParsedInstruction::MakeArray { target, elements, typ }); + } } if self.eat_keyword(Keyword::Not)? { @@ -656,7 +686,7 @@ impl<'a> Parser<'a> { if self.eat(Token::Semicolon)? { let length = self.eat_int_or_error()?; self.eat_or_error(Token::RightBracket)?; - return Ok(Type::Array(Arc::new(element_types), length.to_u128() as usize)); + return Ok(Type::Array(Arc::new(element_types), length.to_u128() as u32)); } else { self.eat_or_error(Token::RightBracket)?; return Ok(Type::Slice(Arc::new(element_types))); @@ -796,6 +826,18 @@ impl<'a> Parser<'a> { } } + fn eat_byte_str(&mut self) -> ParseResult> { + if matches!(self.token.token(), Token::ByteStr(..)) { + let token = self.bump()?; + match token.into_token() { + Token::ByteStr(string) => Ok(Some(string)), + _ => unreachable!(), + } + } else { + Ok(None) + } + } + fn eat(&mut self, token: Token) -> ParseResult { if self.token.token() == &token { self.bump()?; @@ -848,6 +890,13 @@ impl<'a> Parser<'a> { }) } + fn expected_byte_string(&mut self) -> ParseResult { + Err(ParserError::ExpectedByteString { + found: self.token.token().clone(), + span: self.token.to_span(), + }) + } + fn expected_identifier(&mut self) -> ParseResult { Err(ParserError::ExpectedIdentifier { found: self.token.token().clone(), @@ -911,6 +960,8 @@ pub(crate) enum ParserError { ExpectedInstructionOrTerminator { found: Token, span: Span }, #[error("Expected a string literal or 'data', found '{found}'")] ExpectedStringOrData { found: Token, span: Span }, + #[error("Expected a byte string literal, found '{found}'")] + ExpectedByteString { found: Token, span: Span }, #[error("Expected a value, found '{found}'")] ExpectedValue { found: Token, span: Span }, #[error("Multiple return values only allowed for call")] @@ -928,6 +979,7 @@ impl ParserError { | ParserError::ExpectedType { span, .. } | ParserError::ExpectedInstructionOrTerminator { span, .. } | ParserError::ExpectedStringOrData { span, .. } + | ParserError::ExpectedByteString { span, .. } | ParserError::ExpectedValue { span, .. } => *span, ParserError::MultipleReturnValuesOnlyAllowedForCall { second_target, .. } => { second_target.span diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs index 593b66d0c98..6318f9dc56e 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs @@ -89,6 +89,30 @@ fn test_make_composite_array() { assert_ssa_roundtrip(src); } +#[test] +fn test_make_byte_array_with_string_literal() { + let src = " + acir(inline) fn main f0 { + b0(): + v9 = make_array b\"Hello world!\" + return v9 + } + "; + assert_ssa_roundtrip(src); +} + +#[test] +fn test_make_byte_slice_with_string_literal() { + let src = " + acir(inline) fn main f0 { + b0(): + v9 = make_array &b\"Hello world!\" + return v9 + } + "; + assert_ssa_roundtrip(src); +} + #[test] fn test_block_parameters() { let src = " @@ -228,14 +252,14 @@ fn test_constrain_with_static_message() { #[test] fn test_constrain_with_dynamic_message() { - let src = " + let src = r#" acir(inline) fn main f0 { b0(v0: Field, v1: Field): - v7 = make_array [u8 123, u8 120, u8 125, u8 32, u8 123, u8 121, u8 125] : [u8; 7] + v7 = make_array b"{x} {y}" constrain v0 == Field 1, data v7, u32 2, v0, v1 return } - "; + "#; assert_ssa_roundtrip(src); } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/token.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/token.rs index d8dd4ec011e..83a2a1d1ed2 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/token.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/token.rs @@ -30,6 +30,7 @@ pub(crate) enum Token { Ident(String), Int(FieldElement), Str(String), + ByteStr(String), Keyword(Keyword), IntType(IntType), /// = @@ -79,6 +80,7 @@ impl Display for Token { Token::Ident(ident) => write!(f, "{}", ident), Token::Int(int) => write!(f, "{}", int), Token::Str(string) => write!(f, "{string:?}"), + Token::ByteStr(string) => write!(f, "{string:?}"), Token::Keyword(keyword) => write!(f, "{}", keyword), Token::IntType(int_type) => write!(f, "{}", int_type), Token::Assign => write!(f, "="), diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index 0c6041029da..116e0de4ecd 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -20,7 +20,7 @@ use crate::ssa::ir::value::ValueId; use super::value::{Tree, Value, Values}; use super::SSA_WORD_SIZE; -use fxhash::FxHashMap as HashMap; +use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; /// The FunctionContext is the main context object for translating a /// function into SSA form during the SSA-gen pass. @@ -159,7 +159,8 @@ impl<'a> FunctionContext<'a> { let parameter_value = Self::map_type(parameter_type, |typ| { let value = self.builder.add_parameter(typ); if mutable { - self.new_mutable_variable(value) + // This will wrap any `mut var: T` in a reference and increase the rc of an array if needed + self.new_mutable_variable(value, true) } else { value.into() } @@ -170,8 +171,17 @@ impl<'a> FunctionContext<'a> { /// Allocate a single slot of memory and store into it the given initial value of the variable. /// Always returns a Value::Mutable wrapping the allocate instruction. - pub(super) fn new_mutable_variable(&mut self, value_to_store: ValueId) -> Value { + pub(super) fn new_mutable_variable( + &mut self, + value_to_store: ValueId, + increment_array_rc: bool, + ) -> Value { let element_type = self.builder.current_function.dfg.type_of_value(value_to_store); + + if increment_array_rc { + self.builder.increment_array_reference_count(value_to_store); + } + let alloc = self.builder.insert_allocate(element_type); self.builder.insert_store(alloc, value_to_store); let typ = self.builder.type_of_value(value_to_store); @@ -236,12 +246,12 @@ impl<'a> FunctionContext<'a> { ast::Type::Field => Type::field(), ast::Type::Array(len, element) => { let element_types = Self::convert_type(element).flatten(); - Type::Array(Arc::new(element_types), *len as usize) + Type::Array(Arc::new(element_types), *len) } ast::Type::Integer(Signedness::Signed, bits) => Type::signed((*bits).into()), ast::Type::Integer(Signedness::Unsigned, bits) => Type::unsigned((*bits).into()), ast::Type::Bool => Type::unsigned(1), - ast::Type::String(len) => Type::str(*len as usize), + ast::Type::String(len) => Type::str(*len), ast::Type::FmtString(_, _) => { panic!("convert_non_tuple_type called on a fmt string: {typ}") } @@ -732,10 +742,6 @@ impl<'a> FunctionContext<'a> { let element_types = Self::convert_type(element_type); values.map_both(element_types, |value, element_type| { let reference = value.eval_reference(); - // Reference counting in brillig relies on us incrementing reference - // counts when arrays/slices are constructed or indexed. - // Thus, if we dereference an lvalue which happens to be array/slice we should increment its reference counter. - self.builder.increment_array_reference_count(reference); self.builder.insert_load(reference, element_type).into() }) } @@ -907,33 +913,55 @@ impl<'a> FunctionContext<'a> { } } - /// Increments the reference count of all parameters. Returns the entry block of the function. + /// Increments the reference count of mutable reference array parameters. + /// Any mutable-value (`mut a: [T; N]` versus `a: &mut [T; N]`) are already incremented + /// by `FunctionBuilder::add_parameter_to_scope`. + /// Returns each array id that was incremented. /// /// This is done on parameters rather than call arguments so that we can optimize out /// paired inc/dec instructions within brillig functions more easily. - pub(crate) fn increment_parameter_rcs(&mut self) -> BasicBlockId { + pub(crate) fn increment_parameter_rcs(&mut self) -> HashSet { let entry = self.builder.current_function.entry_block(); let parameters = self.builder.current_function.dfg.block_parameters(entry).to_vec(); + let mut incremented = HashSet::default(); + let mut seen_array_types = HashSet::default(); + for parameter in parameters { - self.builder.increment_array_reference_count(parameter); + // Avoid reference counts for immutable arrays that aren't behind references. + let typ = self.builder.current_function.dfg.type_of_value(parameter); + + if let Type::Reference(element) = typ { + if element.contains_an_array() { + // If we haven't already seen this array type, the value may be possibly + // aliased, so issue an inc_rc for it. + if !seen_array_types.insert(element.get_contained_array().clone()) + && self.builder.increment_array_reference_count(parameter) + { + incremented.insert(parameter); + } + } + } } - entry + incremented } /// Ends a local scope of a function. /// This will issue DecrementRc instructions for any arrays in the given starting scope /// block's parameters. Arrays that are also used in terminator instructions for the scope are /// ignored. - pub(crate) fn end_scope(&mut self, scope: BasicBlockId, terminator_args: &[ValueId]) { - let mut dropped_parameters = - self.builder.current_function.dfg.block_parameters(scope).to_vec(); - - dropped_parameters.retain(|parameter| !terminator_args.contains(parameter)); + pub(crate) fn end_scope( + &mut self, + mut incremented_params: HashSet, + terminator_args: &[ValueId], + ) { + incremented_params.retain(|parameter| !terminator_args.contains(parameter)); - for parameter in dropped_parameters { - self.builder.decrement_array_reference_count(parameter); + for parameter in incremented_params { + if self.builder.current_function.dfg.value_is_reference(parameter) { + self.builder.decrement_array_reference_count(parameter); + } } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index c50f0a7f45c..2fe0a38af00 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -125,10 +125,10 @@ impl<'a> FunctionContext<'a> { /// Codegen a function's body and set its return value to that of its last parameter. /// For functions returning nothing, this will be an empty list. fn codegen_function_body(&mut self, body: &Expression) -> Result<(), RuntimeError> { - let entry_block = self.increment_parameter_rcs(); + let incremented_params = self.increment_parameter_rcs(); let return_value = self.codegen_expression(body)?; let results = return_value.into_value_list(self); - self.end_scope(entry_block, &results); + self.end_scope(incremented_params, &results); self.builder.terminate_with_return(results); Ok(()) @@ -195,8 +195,7 @@ impl<'a> FunctionContext<'a> { fn codegen_literal(&mut self, literal: &ast::Literal) -> Result { match literal { ast::Literal::Array(array) => { - let elements = - try_vecmap(&array.contents, |element| self.codegen_expression(element))?; + let elements = self.codegen_array_elements(&array.contents)?; let typ = Self::convert_type(&array.typ).flatten(); Ok(match array.typ { @@ -207,8 +206,7 @@ impl<'a> FunctionContext<'a> { }) } ast::Literal::Slice(array) => { - let elements = - try_vecmap(&array.contents, |element| self.codegen_expression(element))?; + let elements = self.codegen_array_elements(&array.contents)?; let typ = Self::convert_type(&array.typ).flatten(); Ok(match array.typ { @@ -245,18 +243,33 @@ impl<'a> FunctionContext<'a> { } } + fn codegen_array_elements( + &mut self, + elements: &[Expression], + ) -> Result, RuntimeError> { + try_vecmap(elements, |element| { + let value = self.codegen_expression(element)?; + Ok((value, element.is_array_or_slice_literal())) + }) + } + fn codegen_string(&mut self, string: &str) -> Values { let elements = vecmap(string.as_bytes(), |byte| { - self.builder.numeric_constant(*byte as u128, Type::unsigned(8)).into() + let char = self.builder.numeric_constant(*byte as u128, Type::unsigned(8)); + (char.into(), false) }); let typ = Self::convert_non_tuple_type(&ast::Type::String(elements.len() as u32)); self.codegen_array(elements, typ) } // Codegen an array but make sure that we do not have a nested slice + /// + /// The bool aspect of each array element indicates whether the element is an array constant + /// or not. If it is, we avoid incrementing the reference count because we consider the + /// constant to be moved into this larger array constant. fn codegen_array_checked( &mut self, - elements: Vec, + elements: Vec<(Values, bool)>, typ: Type, ) -> Result { if typ.is_nested_slice() { @@ -273,11 +286,15 @@ impl<'a> FunctionContext<'a> { /// stored next to the other fields in memory. So an array such as [(1, 2), (3, 4)] is /// stored the same as the array [1, 2, 3, 4]. /// + /// The bool aspect of each array element indicates whether the element is an array constant + /// or not. If it is, we avoid incrementing the reference count because we consider the + /// constant to be moved into this larger array constant. + /// /// The value returned from this function is always that of the allocate instruction. - fn codegen_array(&mut self, elements: Vec, typ: Type) -> Values { + fn codegen_array(&mut self, elements: Vec<(Values, bool)>, typ: Type) -> Values { let mut array = im::Vector::new(); - for element in elements { + for (element, is_array_constant) in elements { element.for_each(|element| { let element = element.eval(self); @@ -286,7 +303,10 @@ impl<'a> FunctionContext<'a> { // pessimistic reference count (since some are likely moved rather than shared) // which is important for Brillig's copy on write optimization. This has no // effect in ACIR code. - self.builder.increment_array_reference_count(element); + if !is_array_constant { + self.builder.increment_array_reference_count(element); + } + array.push_back(element); }); } @@ -662,15 +682,22 @@ impl<'a> FunctionContext<'a> { fn codegen_let(&mut self, let_expr: &ast::Let) -> Result { let mut values = self.codegen_expression(&let_expr.expression)?; + // Don't mutate the reference count if we're assigning an array literal to a Let: + // `let mut foo = [1, 2, 3];` + // we consider the array to be moved, so we should have an initial rc of just 1. + let should_inc_rc = !let_expr.expression.is_array_or_slice_literal(); + values = values.map(|value| { let value = value.eval(self); - // Make sure to increment array reference counts on each let binding - self.builder.increment_array_reference_count(value); - Tree::Leaf(if let_expr.mutable { - self.new_mutable_variable(value) + self.new_mutable_variable(value, should_inc_rc) } else { + // `new_mutable_variable` increments rcs internally so we have to + // handle it separately for the immutable case + if should_inc_rc { + self.builder.increment_array_reference_count(value); + } value::Value::Normal(value) }) }); @@ -729,10 +756,14 @@ impl<'a> FunctionContext<'a> { fn codegen_assign(&mut self, assign: &ast::Assign) -> Result { let lhs = self.extract_current_value(&assign.lvalue)?; let rhs = self.codegen_expression(&assign.expression)?; + let should_inc_rc = !assign.expression.is_array_or_slice_literal(); rhs.clone().for_each(|value| { let value = value.eval(self); - self.builder.increment_array_reference_count(value); + + if should_inc_rc { + self.builder.increment_array_reference_count(value); + } }); self.assign_new_value(lhs, rhs); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/debug/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/debug/mod.rs index fed3149118b..f05fc721581 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/debug/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/debug/mod.rs @@ -67,12 +67,16 @@ impl DebugInstrumenter { self.insert_state_set_oracle(module, 8); } - fn insert_var(&mut self, var_name: &str) -> SourceVarId { + fn insert_var(&mut self, var_name: &str) -> Option { + if var_name == "_" { + return None; + } + let var_id = SourceVarId(self.next_var_id); self.next_var_id += 1; self.variables.insert(var_id, var_name.to_string()); self.scope.last_mut().unwrap().insert(var_name.to_string(), var_id); - var_id + Some(var_id) } fn lookup_var(&self, var_name: &str) -> Option { @@ -107,9 +111,9 @@ impl DebugInstrumenter { .flat_map(|param| { pattern_vars(¶m.pattern) .iter() - .map(|(id, _is_mut)| { - let var_id = self.insert_var(&id.0.contents); - build_assign_var_stmt(var_id, id_expr(id)) + .filter_map(|(id, _is_mut)| { + let var_id = self.insert_var(&id.0.contents)?; + Some(build_assign_var_stmt(var_id, id_expr(id))) }) .collect::>() }) @@ -225,13 +229,28 @@ impl DebugInstrumenter { } }) .collect(); - let vars_exprs: Vec = vars.iter().map(|(id, _)| id_expr(id)).collect(); + let vars_exprs: Vec = vars + .iter() + .map(|(id, _)| { + // We don't want to generate an expression to read from "_". + // And since this expression is going to be assigned to "_" so it doesn't matter + // what it is, we can use `()` for it. + if id.0.contents == "_" { + ast::Expression { + kind: ast::ExpressionKind::Literal(ast::Literal::Unit), + span: id.span(), + } + } else { + id_expr(id) + } + }) + .collect(); let mut block_stmts = vec![ast::Statement { kind: ast::StatementKind::Let(let_stmt.clone()), span: *span }]; - block_stmts.extend(vars.iter().map(|(id, _)| { - let var_id = self.insert_var(&id.0.contents); - build_assign_var_stmt(var_id, id_expr(id)) + block_stmts.extend(vars.iter().filter_map(|(id, _)| { + let var_id = self.insert_var(&id.0.contents)?; + Some(build_assign_var_stmt(var_id, id_expr(id))) })); block_stmts.push(ast::Statement { kind: ast::StatementKind::Expression(ast::Expression { @@ -422,21 +441,31 @@ impl DebugInstrumenter { let var_name = &for_stmt.identifier.0.contents; let var_id = self.insert_var(var_name); - let set_stmt = build_assign_var_stmt(var_id, id_expr(&for_stmt.identifier)); - let drop_stmt = build_drop_var_stmt(var_id, Span::empty(for_stmt.span.end())); + let set_and_drop_stmt = var_id.map(|var_id| { + ( + build_assign_var_stmt(var_id, id_expr(&for_stmt.identifier)), + build_drop_var_stmt(var_id, Span::empty(for_stmt.span.end())), + ) + }); self.walk_expr(&mut for_stmt.block); + + let mut statements = Vec::new(); + let block_statement = ast::Statement { + kind: ast::StatementKind::Semi(for_stmt.block.clone()), + span: for_stmt.block.span, + }; + + if let Some((set_stmt, drop_stmt)) = set_and_drop_stmt { + statements.push(set_stmt); + statements.push(block_statement); + statements.push(drop_stmt); + } else { + statements.push(block_statement); + } + for_stmt.block = ast::Expression { - kind: ast::ExpressionKind::Block(ast::BlockExpression { - statements: vec![ - set_stmt, - ast::Statement { - kind: ast::StatementKind::Semi(for_stmt.block.clone()), - span: for_stmt.block.span, - }, - drop_stmt, - ], - }), + kind: ast::ExpressionKind::Block(ast::BlockExpression { statements }), span: for_stmt.span, }; } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs index a27e2bf0163..962356d6dd9 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs @@ -329,8 +329,6 @@ impl<'context> Elaborator<'context> { push_arg(Value::TraitDefinition(trait_id)); } else { let (expr_id, expr_type) = interpreter.elaborator.elaborate_expression(arg); - push_arg(interpreter.evaluate(expr_id)?); - if let Err(UnificationError) = expr_type.unify(param_type) { return Err(InterpreterError::TypeMismatch { expected: param_type.clone(), @@ -338,6 +336,7 @@ impl<'context> Elaborator<'context> { location: arg_location, }); } + push_arg(interpreter.evaluate(expr_id)?); }; } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs index 20d27fbc9ac..478504a79be 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs @@ -440,6 +440,9 @@ impl<'context> Elaborator<'context> { // so we need to reintroduce the same IDs into scope here. for parameter in &func_meta.parameter_idents { let name = self.interner.definition_name(parameter.id).to_owned(); + if name == "_" { + continue; + } let warn_if_unused = !(func_meta.trait_impl.is_some() && name == "self"); self.add_existing_variable_to_scope(name, parameter.clone(), warn_if_unused); } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs index 3928362db11..3fbdadbbee8 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs @@ -331,16 +331,18 @@ impl<'context> Elaborator<'context> { let resolver_meta = ResolverMeta { num_times_used: 0, ident: ident.clone(), warn_if_unused }; - let scope = self.scopes.get_mut_scope(); - let old_value = scope.add_key_value(name.clone(), resolver_meta); - - if !allow_shadowing { - if let Some(old_value) = old_value { - self.push_err(ResolverError::DuplicateDefinition { - name, - first_span: old_value.ident.location.span, - second_span: location.span, - }); + if name != "_" { + let scope = self.scopes.get_mut_scope(); + let old_value = scope.add_key_value(name.clone(), resolver_meta); + + if !allow_shadowing { + if let Some(old_value) = old_value { + self.push_err(ResolverError::DuplicateDefinition { + name, + first_span: old_value.ident.location.span, + second_span: location.span, + }); + } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs index 7e06964b563..0404ae3c2c0 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs @@ -1321,11 +1321,23 @@ impl<'context> Elaborator<'context> { { Some(method_id) => Some(HirMethodReference::FuncId(method_id)), None => { - self.push_err(TypeCheckError::UnresolvedMethodCall { - method_name: method_name.to_string(), - object_type: object_type.clone(), - span, - }); + let has_field_with_function_type = + typ.borrow().get_fields_as_written().into_iter().any(|field| { + field.name.0.contents == method_name && field.typ.is_function() + }); + if has_field_with_function_type { + self.push_err(TypeCheckError::CannotInvokeStructFieldFunctionType { + method_name: method_name.to_string(), + object_type: object_type.clone(), + span, + }); + } else { + self.push_err(TypeCheckError::UnresolvedMethodCall { + method_name: method_name.to_string(), + object_type: object_type.clone(), + span, + }); + } None } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs index 80bd5247ee6..5c8e0a1b53e 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs @@ -223,11 +223,21 @@ impl<'a> From<&'a ResolverError> for Diagnostic { *span, ) } - ResolverError::VariableNotDeclared { name, span } => Diagnostic::simple_error( - format!("cannot find `{name}` in this scope "), - "not found in this scope".to_string(), - *span, - ), + ResolverError::VariableNotDeclared { name, span } => { + if name == "_" { + Diagnostic::simple_error( + "in expressions, `_` can only be used on the left-hand side of an assignment".to_string(), + "`_` not allowed here".to_string(), + *span, + ) + } else { + Diagnostic::simple_error( + format!("cannot find `{name}` in this scope"), + "not found in this scope".to_string(), + *span, + ) + } + }, ResolverError::PathIsNotIdent { span } => Diagnostic::simple_error( "cannot use path as an identifier".to_string(), String::new(), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs index a6b6120986e..dfa431157e3 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs @@ -99,6 +99,8 @@ pub enum TypeCheckError { CannotMutateImmutableVariable { name: String, span: Span }, #[error("No method named '{method_name}' found for type '{object_type}'")] UnresolvedMethodCall { method_name: String, object_type: Type, span: Span }, + #[error("Cannot invoke function field '{method_name}' on type '{object_type}' as a method")] + CannotInvokeStructFieldFunctionType { method_name: String, object_type: Type, span: Span }, #[error("Integers must have the same signedness LHS is {sign_x:?}, RHS is {sign_y:?}")] IntegerSignedness { sign_x: Signedness, sign_y: Signedness, span: Span }, #[error("Integers must have the same bit width LHS is {bit_width_x}, RHS is {bit_width_y}")] @@ -511,6 +513,13 @@ impl<'a> From<&'a TypeCheckError> for Diagnostic { TypeCheckError::CyclicType { typ: _, span } => { Diagnostic::simple_error(error.to_string(), "Cyclic types have unlimited size and are prohibited in Noir".into(), *span) } + TypeCheckError::CannotInvokeStructFieldFunctionType { method_name, object_type, span } => { + Diagnostic::simple_error( + format!("Cannot invoke function field '{method_name}' on type '{object_type}' as a method"), + format!("to call the function stored in '{method_name}', surround the field access with parentheses: '(', ')'"), + *span, + ) + }, } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs index 659fafbbcbb..2c9a44c079d 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs @@ -1087,6 +1087,14 @@ impl Type { } } + pub fn is_function(&self) -> bool { + match self.follow_bindings_shallow().as_ref() { + Type::Function(..) => true, + Type::Alias(alias_type, _) => alias_type.borrow().typ.is_function(), + _ => false, + } + } + /// True if this type can be used as a parameter to `main` or a contract function. /// This is only false for unsized types like slices or slices that do not make sense /// as a program input such as named generics or mutable references. diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs index 8f6817dc15d..5d9b66f4f96 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs @@ -48,6 +48,12 @@ pub enum Expression { Continue, } +impl Expression { + pub fn is_array_or_slice_literal(&self) -> bool { + matches!(self, Expression::Literal(Literal::Array(_) | Literal::Slice(_))) + } +} + /// A definition is either a local (variable), function, or is a built-in /// function that will be generated or referenced by the compiler later. #[derive(Debug, Clone, PartialEq, Eq, Hash)] diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs index 06f51b16842..e1ecc972eeb 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs @@ -428,8 +428,8 @@ impl<'a> Parser<'a> { Some(if self.eat_colon() { let expression = self.parse_expression_or_error(); (ident, expression) - } else if self.at(Token::Assign) { - // If we find '=' instead of ':', assume the user meant ':`, error and continue + } else if self.at(Token::DoubleColon) || self.at(Token::Assign) { + // If we find '=' or '::' instead of ':', assume the user meant ':`, error and continue self.expected_token(Token::Colon); self.bump(); let expression = self.parse_expression_or_error(); @@ -1369,6 +1369,34 @@ mod tests { assert_eq!(expr.to_string(), "y"); } + #[test] + fn parses_constructor_recovers_if_double_colon_instead_of_colon() { + let src = " + Foo { x: 1, y:: z } + ^^ + "; + let (src, span) = get_source_with_error_span(src); + let mut parser = Parser::for_str(&src); + let expr = parser.parse_expression_or_error(); + + let error = get_single_error(&parser.errors, span); + assert_eq!(error.to_string(), "Expected a ':' but found '::'"); + + let ExpressionKind::Constructor(mut constructor) = expr.kind else { + panic!("Expected constructor"); + }; + assert_eq!(constructor.typ.to_string(), "Foo"); + assert_eq!(constructor.fields.len(), 2); + + let (name, expr) = constructor.fields.remove(0); + assert_eq!(name.to_string(), "x"); + assert_eq!(expr.to_string(), "1"); + + let (name, expr) = constructor.fields.remove(0); + assert_eq!(name.to_string(), "y"); + assert_eq!(expr.to_string(), "z"); + } + #[test] fn parses_parses_if_true() { let src = "if true { 1 }"; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs index 605236c8dda..cba29d58ea3 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs @@ -3752,6 +3752,35 @@ fn allows_struct_with_generic_infix_type_as_main_input_3() { assert_no_errors(src); } +#[test] +fn errors_with_better_message_when_trying_to_invoke_struct_field_that_is_a_function() { + let src = r#" + pub struct Foo { + wrapped: fn(Field) -> bool, + } + + impl Foo { + fn call(self) -> bool { + self.wrapped(1) + } + } + + fn main() {} + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::TypeError(TypeCheckError::CannotInvokeStructFieldFunctionType { + method_name, + .. + }) = &errors[0].0 + else { + panic!("Expected a 'CannotInvokeStructFieldFunctionType' error, got {:?}", errors[0].0); + }; + + assert_eq!(method_name, "wrapped"); +} + fn test_disallows_attribute_on_impl_method( attr: &str, check_error: impl FnOnce(&CompilationError), @@ -3845,3 +3874,33 @@ fn disallows_export_attribute_on_trait_impl_method() { )); }); } + +#[test] +fn allows_multiple_underscore_parameters() { + let src = r#" + pub fn foo(_: i32, _: i64) {} + + fn main() {} + "#; + assert_no_errors(src); +} + +#[test] +fn disallows_underscore_on_right_hand_side() { + let src = r#" + fn main() { + let _ = 1; + let _x = _; + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::ResolverError(ResolverError::VariableNotDeclared { name, .. }) = + &errors[0].0 + else { + panic!("Expected a VariableNotDeclared error, got {:?}", errors[0].0); + }; + + assert_eq!(name, "_"); +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests/metaprogramming.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests/metaprogramming.rs index 82c40203244..89a049ebc9d 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/tests/metaprogramming.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests/metaprogramming.rs @@ -141,3 +141,23 @@ fn errors_if_macros_inject_functions_with_name_collisions() { ) if contents == "foo" )); } + +#[test] +fn uses_correct_type_for_attribute_arguments() { + let src = r#" + #[foo(32)] + comptime fn foo(_f: FunctionDefinition, i: u32) { + let y: u32 = 1; + let _ = y == i; + } + + #[bar([0; 2])] + comptime fn bar(_f: FunctionDefinition, i: [u32; 2]) { + let y: u32 = 1; + let _ = y == i[0]; + } + + fn main() {} + "#; + assert_no_errors(src); +} diff --git a/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs b/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs index 5ab04c6f576..838a2472125 100644 --- a/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs @@ -69,6 +69,9 @@ pub enum PrintableValueDisplay { #[derive(Debug, Error)] pub enum ForeignCallError { + #[error("No handler could be found for foreign call `{0}`")] + NoHandler(String), + #[error("Foreign call inputs needed for execution are missing")] MissingForeignCallInputs, diff --git a/noir/noir-repo/compiler/wasm/Cargo.toml b/noir/noir-repo/compiler/wasm/Cargo.toml index c8b8c3bb06e..9951b23f609 100644 --- a/noir/noir-repo/compiler/wasm/Cargo.toml +++ b/noir/noir-repo/compiler/wasm/Cargo.toml @@ -1,10 +1,12 @@ [package] name = "noir_wasm" +description = "A JS interface to the Noir compiler" version.workspace = true authors.workspace = true edition.workspace = true rust-version.workspace = true license.workspace = true +repository.workspace = true [lints] workspace = true @@ -42,4 +44,4 @@ getrandom = { workspace = true, features = ["js"] } rust-embed = { workspace = true, features = ["debug-embed"] } [build-dependencies] -build-data.workspace = true \ No newline at end of file +build-data.workspace = true diff --git a/noir/noir-repo/compiler/wasm/LICENSE-APACHE b/noir/noir-repo/compiler/wasm/LICENSE-APACHE new file mode 100644 index 00000000000..261eeb9e9f8 --- /dev/null +++ b/noir/noir-repo/compiler/wasm/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/noir/noir-repo/compiler/wasm/LICENSE-MIT b/noir/noir-repo/compiler/wasm/LICENSE-MIT new file mode 100644 index 00000000000..a93d7f55c8e --- /dev/null +++ b/noir/noir-repo/compiler/wasm/LICENSE-MIT @@ -0,0 +1,21 @@ +MIT License + + Copyright (c) 2021-2023 noir-lang + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. diff --git a/noir/noir-repo/compiler/wasm/tsconfig.json b/noir/noir-repo/compiler/wasm/tsconfig.json index d2ae58b8fc9..42c7396aa83 100644 --- a/noir/noir-repo/compiler/wasm/tsconfig.json +++ b/noir/noir-repo/compiler/wasm/tsconfig.json @@ -18,4 +18,4 @@ "allowJs": true, }, "exclude": ["node_modules"] -} \ No newline at end of file +} diff --git a/noir/noir-repo/cspell.json b/noir/noir-repo/cspell.json index 36bba737cd7..5c707e92e21 100644 --- a/noir/noir-repo/cspell.json +++ b/noir/noir-repo/cspell.json @@ -106,6 +106,7 @@ "Guillaume", "gzipped", "hasher", + "heaptrack", "hexdigit", "higher-kinded", "Hindley-Milner", @@ -154,6 +155,7 @@ "nargo", "neovim", "newtype", + "nextest", "nightlies", "nixpkgs", "noirc", diff --git a/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md b/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md index f3badde62be..41a823646dd 100644 --- a/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md +++ b/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md @@ -79,7 +79,7 @@ fn main() { You can construct a U128 from its limbs: ```rust fn main(x: u64, y: u64) { - let x = U128::from_u64s_be(x,y); + let z = U128::from_u64s_be(x,y); assert(z.hi == x as Field); assert(z.lo == y as Field); } diff --git a/noir/noir-repo/docs/docs/noir/standard_library/containers/boundedvec.md b/noir/noir-repo/docs/docs/noir/standard_library/containers/boundedvec.md index 98b7d584033..4efb1e4ea0f 100644 --- a/noir/noir-repo/docs/docs/noir/standard_library/containers/boundedvec.md +++ b/noir/noir-repo/docs/docs/noir/standard_library/containers/boundedvec.md @@ -246,6 +246,42 @@ Example: let bounded_vec: BoundedVec = BoundedVec::from_array([1, 2, 3]) ``` +### from_parts + +```rust +pub fn from_parts(mut array: [T; MaxLen], len: u32) -> Self +``` + +Creates a new BoundedVec from the given array and length. +The given length must be less than or equal to the length of the array. + +This function will zero out any elements at or past index `len` of `array`. +This incurs an extra runtime cost of O(MaxLen). If you are sure your array is +zeroed after that index, you can use `from_parts_unchecked` to remove the extra loop. + +Example: + +#include_code from-parts noir_stdlib/src/collections/bounded_vec.nr rust + +### from_parts_unchecked + +```rust +pub fn from_parts_unchecked(array: [T; MaxLen], len: u32) -> Self +``` + +Creates a new BoundedVec from the given array and length. +The given length must be less than or equal to the length of the array. + +This function is unsafe because it expects all elements past the `len` index +of `array` to be zeroed, but does not check for this internally. Use `from_parts` +for a safe version of this function which does zero out any indices past the +given length. Invalidating this assumption can notably cause `BoundedVec::eq` +to give incorrect results since it will check even elements past `len`. + +Example: + +#include_code from-parts-unchecked noir_stdlib/src/collections/bounded_vec.nr rust + ### map ```rust diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.33.0/index.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.33.0/index.mdx index a6bd306f91d..9ed9662b0b9 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.33.0/index.mdx +++ b/noir/noir-repo/docs/versioned_docs/version-v0.33.0/index.mdx @@ -51,7 +51,7 @@ Noir can be used both in complex cloud-based backends and in user's smartphones, Aztec Contracts leverage Noir to allow for the storage and execution of private information. Writing an Aztec Contract is as easy as writing Noir, and Aztec developers can easily interact with the network storage and execution through the [Aztec.nr](https://docs.aztec.network/developers/contracts/main) library. - Soliditry Verifier Example + Solidity Verifier Example Noir can auto-generate Solidity verifier contracts that verify Noir proofs. This allows for non-interactive verification of proofs containing private information in an immutable system. This feature powers a multitude of use-case scenarios, from P2P chess tournaments, to [Aztec Layer-2 Blockchain](https://docs.aztec.network/) diff --git a/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr b/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr index f33890f197e..0ad39c518c4 100644 --- a/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr +++ b/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr @@ -420,6 +420,58 @@ impl BoundedVec { } ret } + + /// Creates a new BoundedVec from the given array and length. + /// The given length must be less than or equal to the length of the array. + /// + /// This function will zero out any elements at or past index `len` of `array`. + /// This incurs an extra runtime cost of O(MaxLen). If you are sure your array is + /// zeroed after that index, you can use `from_parts_unchecked` to remove the extra loop. + /// + /// Example: + /// + /// ```noir + /// let vec: BoundedVec = BoundedVec::from_parts([1, 2, 3, 0], 3); + /// assert_eq(vec.len(), 3); + /// ``` + pub fn from_parts(mut array: [T; MaxLen], len: u32) -> Self { + assert(len <= MaxLen); + let zeroed = crate::mem::zeroed(); + for i in 0..MaxLen { + if i >= len { + array[i] = zeroed; + } + } + BoundedVec { storage: array, len } + } + + /// Creates a new BoundedVec from the given array and length. + /// The given length must be less than or equal to the length of the array. + /// + /// This function is unsafe because it expects all elements past the `len` index + /// of `array` to be zeroed, but does not check for this internally. Use `from_parts` + /// for a safe version of this function which does zero out any indices past the + /// given length. Invalidating this assumption can notably cause `BoundedVec::eq` + /// to give incorrect results since it will check even elements past `len`. + /// + /// Example: + /// + /// ```noir + /// let vec: BoundedVec = BoundedVec::from_parts_unchecked([1, 2, 3, 0], 3); + /// assert_eq(vec.len(), 3); + /// + /// // invalid use! + /// let vec1: BoundedVec = BoundedVec::from_parts_unchecked([1, 2, 3, 1], 3); + /// let vec2: BoundedVec = BoundedVec::from_parts_unchecked([1, 2, 3, 2], 3); + /// + /// // both vecs have length 3 so we'd expect them to be equal, but this + /// // fails because elements past the length are still checked in eq + /// assert_eq(vec1, vec2); // fails + /// ``` + pub fn from_parts_unchecked(array: [T; MaxLen], len: u32) -> Self { + assert(len <= MaxLen); + BoundedVec { storage: array, len } + } } impl Eq for BoundedVec @@ -431,7 +483,11 @@ where // // We make the assumption that the user has used the proper interface for working with `BoundedVec`s // rather than directly manipulating the internal fields as this can result in an inconsistent internal state. - (self.len == other.len) & (self.storage == other.storage) + if self.len == other.len { + self.storage == other.storage + } else { + false + } } } @@ -598,4 +654,38 @@ mod bounded_vec_tests { assert(bounded_vec1 != bounded_vec2); } } + + mod from_parts { + use crate::collections::bounded_vec::BoundedVec; + + #[test] + fn from_parts() { + // docs:start:from-parts + let vec: BoundedVec = BoundedVec::from_parts([1, 2, 3, 0], 3); + assert_eq(vec.len(), 3); + + // Any elements past the given length are zeroed out, so these + // two BoundedVecs will be completely equal + let vec1: BoundedVec = BoundedVec::from_parts([1, 2, 3, 1], 3); + let vec2: BoundedVec = BoundedVec::from_parts([1, 2, 3, 2], 3); + assert_eq(vec1, vec2); + // docs:end:from-parts + } + + #[test] + fn from_parts_unchecked() { + // docs:start:from-parts-unchecked + let vec: BoundedVec = BoundedVec::from_parts_unchecked([1, 2, 3, 0], 3); + assert_eq(vec.len(), 3); + + // invalid use! + let vec1: BoundedVec = BoundedVec::from_parts_unchecked([1, 2, 3, 1], 3); + let vec2: BoundedVec = BoundedVec::from_parts_unchecked([1, 2, 3, 2], 3); + + // both vecs have length 3 so we'd expect them to be equal, but this + // fails because elements past the length are still checked in eq + assert(vec1 != vec2); + // docs:end:from-parts-unchecked + } + } } diff --git a/noir/noir-repo/noir_stdlib/src/hash/poseidon2.nr b/noir/noir-repo/noir_stdlib/src/hash/poseidon2.nr index f2167c43c2c..419f07a2aca 100644 --- a/noir/noir-repo/noir_stdlib/src/hash/poseidon2.nr +++ b/noir/noir-repo/noir_stdlib/src/hash/poseidon2.nr @@ -13,11 +13,7 @@ pub struct Poseidon2 { impl Poseidon2 { #[no_predicates] pub fn hash(input: [Field; N], message_size: u32) -> Field { - if message_size == N { - Poseidon2::hash_internal(input, N, false) - } else { - Poseidon2::hash_internal(input, message_size, true) - } + Poseidon2::hash_internal(input, message_size, message_size != N) } pub(crate) fn new(iv: Field) -> Poseidon2 { diff --git a/noir/noir-repo/test_programs/execution_success/array_dedup_regression/Nargo.toml b/noir/noir-repo/test_programs/execution_success/array_dedup_regression/Nargo.toml new file mode 100644 index 00000000000..16a708743ed --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/array_dedup_regression/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "array_dedup_regression" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/array_dedup_regression/Prover.toml b/noir/noir-repo/test_programs/execution_success/array_dedup_regression/Prover.toml new file mode 100644 index 00000000000..3aea0c58ce5 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/array_dedup_regression/Prover.toml @@ -0,0 +1 @@ +x = 0 diff --git a/noir/noir-repo/test_programs/execution_success/array_dedup_regression/src/main.nr b/noir/noir-repo/test_programs/execution_success/array_dedup_regression/src/main.nr new file mode 100644 index 00000000000..5506d55b9e7 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/array_dedup_regression/src/main.nr @@ -0,0 +1,21 @@ +unconstrained fn main(x: u32) { + let a1 = [1, 2, 3, 4, 5]; + + for i in 0..5 { + let mut a2 = [1, 2, 3, 4, 5]; + a2[x + i] = 128; + println(a2); + + if i != 0 { + assert(a2[x + i - 1] != 128); + } + } + + // Can't use `== [1, 2, 3, 4, 5]` here, that make_array may get + // deduplicated to equal a1 in the bugged version + assert_eq(a1[0], 1); + assert_eq(a1[1], 2); + assert_eq(a1[2], 3); + assert_eq(a1[3], 4); + assert_eq(a1[4], 5); +} diff --git a/noir/noir-repo/test_programs/execution_success/inline_decompose_hint_brillig_call/Nargo.toml b/noir/noir-repo/test_programs/execution_success/inline_decompose_hint_brillig_call/Nargo.toml new file mode 100644 index 00000000000..ecac2dfb197 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/inline_decompose_hint_brillig_call/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "inline_decompose_hint_brillig_call" +version = "0.1.0" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/inline_decompose_hint_brillig_call/src/main.nr b/noir/noir-repo/test_programs/execution_success/inline_decompose_hint_brillig_call/src/main.nr new file mode 100644 index 00000000000..e500f0f976d --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/inline_decompose_hint_brillig_call/src/main.nr @@ -0,0 +1,15 @@ +use std::embedded_curve_ops::{EmbeddedCurvePoint, EmbeddedCurveScalar, fixed_base_scalar_mul}; + +fn main() -> pub Field { + let pre_address = 0x23d95e303879a5d0bbef78ecbc335e559da37431f6dcd11da54ed375c2846813; + let (a, b) = std::field::bn254::decompose(pre_address); + let curve = EmbeddedCurveScalar { lo: a, hi: b }; + let key = fixed_base_scalar_mul(curve); + let point = EmbeddedCurvePoint { + x: 0x111223493147f6785514b1c195bb37a2589f22a6596d30bb2bb145fdc9ca8f1e, + y: 0x273bbffd678edce8fe30e0deafc4f66d58357c06fd4a820285294b9746c3be95, + is_infinite: false, + }; + let address_point = key.add(point); + address_point.x +} diff --git a/noir/noir-repo/test_programs/execution_success/loop_invariant_regression/src/main.nr b/noir/noir-repo/test_programs/execution_success/loop_invariant_regression/src/main.nr index 25f6e92f868..c28ce063116 100644 --- a/noir/noir-repo/test_programs/execution_success/loop_invariant_regression/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/loop_invariant_regression/src/main.nr @@ -2,6 +2,7 @@ // to be hoisted to the loop's pre-header block. fn main(x: u32, y: u32) { loop(4, x, y); + array_read_loop(4, x); } fn loop(upper_bound: u32, x: u32, y: u32) { @@ -11,3 +12,15 @@ fn loop(upper_bound: u32, x: u32, y: u32) { assert_eq(z, 12); } } + +fn array_read_loop(upper_bound: u32, x: u32) { + let arr = [2; 5]; + for i in 0..upper_bound { + for j in 0..upper_bound { + for _ in 0..upper_bound { + assert_eq(arr[i], x); + assert_eq(arr[j], x); + } + } + } +} diff --git a/noir/noir-repo/test_programs/execution_success/reference_counts/src/main.nr b/noir/noir-repo/test_programs/execution_success/reference_counts/src/main.nr index 7ab7de893fa..8de4d0f2508 100644 --- a/noir/noir-repo/test_programs/execution_success/reference_counts/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/reference_counts/src/main.nr @@ -1,10 +1,19 @@ +use std::mem::array_refcount; + fn main() { let mut array = [0, 1, 2]; assert_refcount(array, 1); - borrow(array, std::mem::array_refcount(array)); - borrow_mut(&mut array, std::mem::array_refcount(array)); - copy_mut(array, std::mem::array_refcount(array)); + borrow(array, array_refcount(array)); + borrow_mut(&mut array, array_refcount(array)); + copy_mut(array, array_refcount(array)); + + borrow_mut_two(&mut array, &mut array, array_refcount(array)); + + let mut u32_array = [0, 1, 2]; + let rc1 = array_refcount(array); + let rc2 = array_refcount(u32_array); + borrow_mut_two_separate(&mut array, &mut u32_array, rc1, rc2); } fn borrow(array: [Field; 3], rc_before_call: u32) { @@ -13,19 +22,48 @@ fn borrow(array: [Field; 3], rc_before_call: u32) { } fn borrow_mut(array: &mut [Field; 3], rc_before_call: u32) { - assert_refcount(*array, rc_before_call + 0); // Issue! This should be rc_before_call + 1 - array[0] = 5; + // Optimization: inc_rc isn't needed since there is only one array (`array`) + // of the same type that `array` can be modified through + assert_refcount(*array, rc_before_call + 0); + array[0] = 3; println(array[0]); } fn copy_mut(mut array: [Field; 3], rc_before_call: u32) { - assert_refcount(array, rc_before_call + 0); // Issue! This should be rc_before_call + 1 - array[0] = 6; + assert_refcount(array, rc_before_call + 1); + array[0] = 4; println(array[0]); } -fn assert_refcount(array: [Field; 3], expected: u32) { - let count = std::mem::array_refcount(array); +/// Borrow the same array mutably through both parameters, inc_rc is necessary here, although +/// only one is needed to bring the rc from 1 to 2. +fn borrow_mut_two(array1: &mut [Field; 3], array2: &mut [Field; 3], rc_before_call: u32) { + assert_refcount(*array1, rc_before_call + 1); + assert_refcount(*array2, rc_before_call + 1); + array1[0] = 5; + array2[0] = 6; + println(array1[0]); // array1 & 2 alias, so this should also print 6 + println(array2[0]); +} + +/// Borrow a different array: we should be able to reason that these types cannot be mutably +/// aliased since they're different types so we don't need any inc_rc instructions. +fn borrow_mut_two_separate( + array1: &mut [Field; 3], + array2: &mut [u32; 3], + rc_before_call1: u32, + rc_before_call2: u32, +) { + assert_refcount(*array1, rc_before_call1 + 0); + assert_refcount(*array2, rc_before_call2 + 0); + array1[0] = 7; + array2[0] = 8; + println(array1[0]); + println(array2[0]); +} + +fn assert_refcount(array: [T; 3], expected: u32) { + let count = array_refcount(array); // All refcounts are zero when running this as a constrained program if std::runtime::is_unconstrained() { diff --git a/noir/noir-repo/test_programs/gates_report_brillig.sh b/noir/noir-repo/test_programs/gates_report_brillig.sh old mode 100644 new mode 100755 diff --git a/noir/noir-repo/test_programs/gates_report_brillig_execution.sh b/noir/noir-repo/test_programs/gates_report_brillig_execution.sh old mode 100644 new mode 100755 diff --git a/noir/noir-repo/test_programs/memory_report.sh b/noir/noir-repo/test_programs/memory_report.sh new file mode 100755 index 00000000000..1b8274b76cc --- /dev/null +++ b/noir/noir-repo/test_programs/memory_report.sh @@ -0,0 +1,48 @@ +#!/usr/bin/env bash +set -e + +sudo apt-get install heaptrack + +NARGO="nargo" + + +# Tests to be profiled for memory report +tests_to_profile=("keccak256" "workspace" "regression_4709" "ram_blowup_regression") + +current_dir=$(pwd) +execution_success_path="$current_dir/execution_success" +test_dirs=$(ls $execution_success_path) + +FIRST="1" + +echo "{\"memory_reports\": [ " > memory_report.json + + +for test_name in ${tests_to_profile[@]}; do + full_path=$execution_success_path"/"$test_name + cd $full_path + + if [ $FIRST = "1" ] + then + FIRST="0" + else + echo " ," >> $current_dir"/memory_report.json" + fi + heaptrack --output $current_dir/$test_name"_heap" $NARGO compile --force + if test -f $current_dir/$test_name"_heap.gz"; + then + heaptrack --analyze $current_dir/$test_name"_heap.gz" > $current_dir/$test_name"_heap_analysis.txt" + rm $current_dir/$test_name"_heap.gz" + else + heaptrack --analyze $current_dir/$test_name"_heap.zst" > $current_dir/$test_name"_heap_analysis.txt" + rm $current_dir/$test_name"_heap.zst" + fi + consumption="$(grep 'peak heap memory consumption' $current_dir/$test_name'_heap_analysis.txt')" + len=${#consumption}-30 + peak=${consumption:30:len} + rm $current_dir/$test_name"_heap_analysis.txt" + echo -e " {\n \"artifact_name\":\"$test_name\",\n \"peak_memory\":\"$peak\"\n }" >> $current_dir"/memory_report.json" +done + +echo "]}" >> $current_dir"/memory_report.json" + diff --git a/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs b/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs index c453936568c..bf5969718e5 100644 --- a/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs +++ b/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs @@ -8,7 +8,7 @@ use clap::Args; use crate::cli::fs::inputs::{read_bytecode_from_file, read_inputs_from_file}; use crate::errors::CliError; -use nargo::ops::{execute_program, DefaultForeignCallExecutor}; +use nargo::{foreign_calls::DefaultForeignCallExecutor, ops::execute_program}; use super::fs::witness::{create_output_witness_string, save_witness_to_dir}; diff --git a/noir/noir-repo/tooling/debugger/src/foreign_calls.rs b/noir/noir-repo/tooling/debugger/src/foreign_calls.rs index 6a773a4b348..ecf27a22f29 100644 --- a/noir/noir-repo/tooling/debugger/src/foreign_calls.rs +++ b/noir/noir-repo/tooling/debugger/src/foreign_calls.rs @@ -3,7 +3,7 @@ use acvm::{ pwg::ForeignCallWaitInfo, AcirField, FieldElement, }; -use nargo::ops::{DefaultForeignCallExecutor, ForeignCallExecutor}; +use nargo::foreign_calls::{DefaultForeignCallExecutor, ForeignCallExecutor}; use noirc_artifacts::debug::{DebugArtifact, DebugVars, StackFrame}; use noirc_errors::debug_info::{DebugFnId, DebugVarId}; use noirc_printable_type::ForeignCallError; diff --git a/noir/noir-repo/tooling/lsp/src/requests/completion/tests.rs b/noir/noir-repo/tooling/lsp/src/requests/completion/tests.rs index 9306e38a48a..97c7ad86d5a 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/completion/tests.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/completion/tests.rs @@ -1586,6 +1586,54 @@ fn main() { assert_eq!(changed, expected); } + #[test] + async fn test_auto_import_inserts_after_last_use_in_nested_module() { + let src = r#"mod foo { + pub mod bar { + pub fn hello_world() {} + } +} + +mod baz { + fn qux() {} +} + +mod other { + use baz::qux; + + fn main() { + hel>|< + } +}"#; + + let expected = r#"mod foo { + pub mod bar { + pub fn hello_world() {} + } +} + +mod baz { + fn qux() {} +} + +mod other { + use baz::qux; + use super::foo::bar::hello_world; + + fn main() { + hel + } +}"#; + let mut items = get_completions(src).await; + assert_eq!(items.len(), 1); + + let item = items.remove(0); + + let changed = + apply_text_edits(&src.replace(">|<", ""), &item.additional_text_edits.unwrap()); + assert_eq!(changed, expected); + } + #[test] async fn test_does_not_auto_import_test_functions() { let src = r#" diff --git a/noir/noir-repo/tooling/lsp/src/requests/test_run.rs b/noir/noir-repo/tooling/lsp/src/requests/test_run.rs index 50c699bb6a6..937fdcc0a5e 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/test_run.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/test_run.rs @@ -101,6 +101,11 @@ fn on_test_run_request_inner( result: "fail".to_string(), message: Some(message), }, + TestStatus::Skipped => NargoTestRunResult { + id: params.id.clone(), + result: "skipped".to_string(), + message: None, + }, TestStatus::CompileError(diag) => NargoTestRunResult { id: params.id.clone(), result: "error".to_string(), diff --git a/noir/noir-repo/tooling/lsp/src/use_segment_positions.rs b/noir/noir-repo/tooling/lsp/src/use_segment_positions.rs index f9a3f429029..246ff653245 100644 --- a/noir/noir-repo/tooling/lsp/src/use_segment_positions.rs +++ b/noir/noir-repo/tooling/lsp/src/use_segment_positions.rs @@ -318,7 +318,7 @@ fn new_use_completion_item_additional_text_edits( request: UseCompletionItemAdditionTextEditsRequest, ) -> Vec { let line = request.auto_import_line as u32; - let character = (request.nesting * 4) as u32; + let character = 0; let indent = " ".repeat(request.nesting * 4); let mut newlines = "\n"; @@ -331,6 +331,6 @@ fn new_use_completion_item_additional_text_edits( vec![TextEdit { range: Range { start: Position { line, character }, end: Position { line, character } }, - new_text: format!("use {};{}{}", request.full_path, newlines, indent), + new_text: format!("{}use {};{}", indent, request.full_path, newlines), }] } diff --git a/noir/noir-repo/tooling/nargo/src/foreign_calls/mocker.rs b/noir/noir-repo/tooling/nargo/src/foreign_calls/mocker.rs new file mode 100644 index 00000000000..c93d16bbaf6 --- /dev/null +++ b/noir/noir-repo/tooling/nargo/src/foreign_calls/mocker.rs @@ -0,0 +1,176 @@ +use acvm::{ + acir::brillig::{ForeignCallParam, ForeignCallResult}, + pwg::ForeignCallWaitInfo, + AcirField, +}; +use noirc_printable_type::{decode_string_value, ForeignCallError}; +use serde::{Deserialize, Serialize}; + +use super::{ForeignCall, ForeignCallExecutor}; + +/// This struct represents an oracle mock. It can be used for testing programs that use oracles. +#[derive(Debug, PartialEq, Eq, Clone)] +struct MockedCall { + /// The id of the mock, used to update or remove it + id: usize, + /// The oracle it's mocking + name: String, + /// Optionally match the parameters + params: Option>>, + /// The parameters with which the mock was last called + last_called_params: Option>>, + /// The result to return when this mock is called + result: ForeignCallResult, + /// How many times should this mock be called before it is removed + times_left: Option, +} + +impl MockedCall { + fn new(id: usize, name: String) -> Self { + Self { + id, + name, + params: None, + last_called_params: None, + result: ForeignCallResult { values: vec![] }, + times_left: None, + } + } +} + +impl MockedCall { + fn matches(&self, name: &str, params: &[ForeignCallParam]) -> bool { + self.name == name && (self.params.is_none() || self.params.as_deref() == Some(params)) + } +} + +#[derive(Debug, Default)] +pub(crate) struct MockForeignCallExecutor { + /// Mocks have unique ids used to identify them in Noir, allowing to update or remove them. + last_mock_id: usize, + /// The registered mocks + mocked_responses: Vec>, +} + +impl MockForeignCallExecutor { + fn extract_mock_id( + foreign_call_inputs: &[ForeignCallParam], + ) -> Result<(usize, &[ForeignCallParam]), ForeignCallError> { + let (id, params) = + foreign_call_inputs.split_first().ok_or(ForeignCallError::MissingForeignCallInputs)?; + let id = + usize::try_from(id.unwrap_field().try_to_u64().expect("value does not fit into u64")) + .expect("value does not fit into usize"); + Ok((id, params)) + } + + fn find_mock_by_id(&self, id: usize) -> Option<&MockedCall> { + self.mocked_responses.iter().find(|response| response.id == id) + } + + fn find_mock_by_id_mut(&mut self, id: usize) -> Option<&mut MockedCall> { + self.mocked_responses.iter_mut().find(|response| response.id == id) + } + + fn parse_string(param: &ForeignCallParam) -> String { + let fields: Vec<_> = param.fields().to_vec(); + decode_string_value(&fields) + } +} + +impl Deserialize<'a>> ForeignCallExecutor + for MockForeignCallExecutor +{ + fn execute( + &mut self, + foreign_call: &ForeignCallWaitInfo, + ) -> Result, ForeignCallError> { + let foreign_call_name = foreign_call.function.as_str(); + match ForeignCall::lookup(foreign_call_name) { + Some(ForeignCall::CreateMock) => { + let mock_oracle_name = Self::parse_string(&foreign_call.inputs[0]); + assert!(ForeignCall::lookup(&mock_oracle_name).is_none()); + let id = self.last_mock_id; + self.mocked_responses.push(MockedCall::new(id, mock_oracle_name)); + self.last_mock_id += 1; + + Ok(F::from(id).into()) + } + Some(ForeignCall::SetMockParams) => { + let (id, params) = Self::extract_mock_id(&foreign_call.inputs)?; + self.find_mock_by_id_mut(id) + .unwrap_or_else(|| panic!("Unknown mock id {}", id)) + .params = Some(params.to_vec()); + + Ok(ForeignCallResult::default()) + } + Some(ForeignCall::GetMockLastParams) => { + let (id, _) = Self::extract_mock_id(&foreign_call.inputs)?; + let mock = + self.find_mock_by_id(id).unwrap_or_else(|| panic!("Unknown mock id {}", id)); + + let last_called_params = mock + .last_called_params + .clone() + .unwrap_or_else(|| panic!("Mock {} was never called", mock.name)); + + Ok(last_called_params.into()) + } + Some(ForeignCall::SetMockReturns) => { + let (id, params) = Self::extract_mock_id(&foreign_call.inputs)?; + self.find_mock_by_id_mut(id) + .unwrap_or_else(|| panic!("Unknown mock id {}", id)) + .result = ForeignCallResult { values: params.to_vec() }; + + Ok(ForeignCallResult::default()) + } + Some(ForeignCall::SetMockTimes) => { + let (id, params) = Self::extract_mock_id(&foreign_call.inputs)?; + let times = + params[0].unwrap_field().try_to_u64().expect("Invalid bit size of times"); + + self.find_mock_by_id_mut(id) + .unwrap_or_else(|| panic!("Unknown mock id {}", id)) + .times_left = Some(times); + + Ok(ForeignCallResult::default()) + } + Some(ForeignCall::ClearMock) => { + let (id, _) = Self::extract_mock_id(&foreign_call.inputs)?; + self.mocked_responses.retain(|response| response.id != id); + Ok(ForeignCallResult::default()) + } + _ => { + let mock_response_position = self + .mocked_responses + .iter() + .position(|response| response.matches(foreign_call_name, &foreign_call.inputs)); + + if let Some(response_position) = mock_response_position { + // If the program has registered a mocked response to this oracle call then we prefer responding + // with that. + + let mock = self + .mocked_responses + .get_mut(response_position) + .expect("Invalid position of mocked response"); + + mock.last_called_params = Some(foreign_call.inputs.clone()); + + let result = mock.result.values.clone(); + + if let Some(times_left) = &mut mock.times_left { + *times_left -= 1; + if *times_left == 0 { + self.mocked_responses.remove(response_position); + } + } + + Ok(result.into()) + } else { + Err(ForeignCallError::NoHandler(foreign_call_name.to_string())) + } + } + } + } +} diff --git a/noir/noir-repo/tooling/nargo/src/foreign_calls/mod.rs b/noir/noir-repo/tooling/nargo/src/foreign_calls/mod.rs new file mode 100644 index 00000000000..16ed71e11e3 --- /dev/null +++ b/noir/noir-repo/tooling/nargo/src/foreign_calls/mod.rs @@ -0,0 +1,146 @@ +use std::path::PathBuf; + +use acvm::{acir::brillig::ForeignCallResult, pwg::ForeignCallWaitInfo, AcirField}; +use mocker::MockForeignCallExecutor; +use noirc_printable_type::ForeignCallError; +use print::PrintForeignCallExecutor; +use rand::Rng; +use rpc::RPCForeignCallExecutor; +use serde::{Deserialize, Serialize}; + +pub(crate) mod mocker; +pub(crate) mod print; +pub(crate) mod rpc; + +pub trait ForeignCallExecutor { + fn execute( + &mut self, + foreign_call: &ForeignCallWaitInfo, + ) -> Result, ForeignCallError>; +} + +/// This enumeration represents the Brillig foreign calls that are natively supported by nargo. +/// After resolution of a foreign call, nargo will restart execution of the ACVM +pub enum ForeignCall { + Print, + CreateMock, + SetMockParams, + GetMockLastParams, + SetMockReturns, + SetMockTimes, + ClearMock, +} + +impl std::fmt::Display for ForeignCall { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.name()) + } +} + +impl ForeignCall { + pub(crate) fn name(&self) -> &'static str { + match self { + ForeignCall::Print => "print", + ForeignCall::CreateMock => "create_mock", + ForeignCall::SetMockParams => "set_mock_params", + ForeignCall::GetMockLastParams => "get_mock_last_params", + ForeignCall::SetMockReturns => "set_mock_returns", + ForeignCall::SetMockTimes => "set_mock_times", + ForeignCall::ClearMock => "clear_mock", + } + } + + pub(crate) fn lookup(op_name: &str) -> Option { + match op_name { + "print" => Some(ForeignCall::Print), + "create_mock" => Some(ForeignCall::CreateMock), + "set_mock_params" => Some(ForeignCall::SetMockParams), + "get_mock_last_params" => Some(ForeignCall::GetMockLastParams), + "set_mock_returns" => Some(ForeignCall::SetMockReturns), + "set_mock_times" => Some(ForeignCall::SetMockTimes), + "clear_mock" => Some(ForeignCall::ClearMock), + _ => None, + } + } +} + +#[derive(Debug, Default)] +pub struct DefaultForeignCallExecutor { + /// The executor for any [`ForeignCall::Print`] calls. + printer: Option, + mocker: MockForeignCallExecutor, + external: Option, +} + +impl DefaultForeignCallExecutor { + pub fn new( + show_output: bool, + resolver_url: Option<&str>, + root_path: Option, + package_name: Option, + ) -> Self { + let id = rand::thread_rng().gen(); + let printer = if show_output { Some(PrintForeignCallExecutor) } else { None }; + let external_resolver = resolver_url.map(|resolver_url| { + RPCForeignCallExecutor::new(resolver_url, id, root_path, package_name) + }); + DefaultForeignCallExecutor { + printer, + mocker: MockForeignCallExecutor::default(), + external: external_resolver, + } + } +} + +impl Deserialize<'a>> ForeignCallExecutor + for DefaultForeignCallExecutor +{ + fn execute( + &mut self, + foreign_call: &ForeignCallWaitInfo, + ) -> Result, ForeignCallError> { + let foreign_call_name = foreign_call.function.as_str(); + match ForeignCall::lookup(foreign_call_name) { + Some(ForeignCall::Print) => { + if let Some(printer) = &mut self.printer { + printer.execute(foreign_call) + } else { + Ok(ForeignCallResult::default()) + } + } + Some( + ForeignCall::CreateMock + | ForeignCall::SetMockParams + | ForeignCall::GetMockLastParams + | ForeignCall::SetMockReturns + | ForeignCall::SetMockTimes + | ForeignCall::ClearMock, + ) => self.mocker.execute(foreign_call), + + None => { + // First check if there's any defined mock responses for this foreign call. + match self.mocker.execute(foreign_call) { + Err(ForeignCallError::NoHandler(_)) => (), + response_or_error => return response_or_error, + }; + + if let Some(external_resolver) = &mut self.external { + // If the user has registered an external resolver then we forward any remaining oracle calls there. + match external_resolver.execute(foreign_call) { + Err(ForeignCallError::NoHandler(_)) => (), + response_or_error => return response_or_error, + }; + } + + // If all executors have no handler for the given foreign call then we cannot + // return a correct response to the ACVM. The best we can do is to return an empty response, + // this allows us to ignore any foreign calls which exist solely to pass information from inside + // the circuit to the environment (e.g. custom logging) as the execution will still be able to progress. + // + // We optimistically return an empty response for all oracle calls as the ACVM will error + // should a response have been required. + Ok(ForeignCallResult::default()) + } + } + } +} diff --git a/noir/noir-repo/tooling/nargo/src/foreign_calls/print.rs b/noir/noir-repo/tooling/nargo/src/foreign_calls/print.rs new file mode 100644 index 00000000000..92fcd65ae28 --- /dev/null +++ b/noir/noir-repo/tooling/nargo/src/foreign_calls/print.rs @@ -0,0 +1,36 @@ +use acvm::{acir::brillig::ForeignCallResult, pwg::ForeignCallWaitInfo, AcirField}; +use noirc_printable_type::{ForeignCallError, PrintableValueDisplay}; + +use super::{ForeignCall, ForeignCallExecutor}; + +#[derive(Debug, Default)] +pub(crate) struct PrintForeignCallExecutor; + +impl ForeignCallExecutor for PrintForeignCallExecutor { + fn execute( + &mut self, + foreign_call: &ForeignCallWaitInfo, + ) -> Result, ForeignCallError> { + let foreign_call_name = foreign_call.function.as_str(); + match ForeignCall::lookup(foreign_call_name) { + Some(ForeignCall::Print) => { + let skip_newline = foreign_call.inputs[0].unwrap_field().is_zero(); + + let foreign_call_inputs = foreign_call + .inputs + .split_first() + .ok_or(ForeignCallError::MissingForeignCallInputs)? + .1; + + let display_values: PrintableValueDisplay = foreign_call_inputs.try_into()?; + let display_string = + format!("{display_values}{}", if skip_newline { "" } else { "\n" }); + + print!("{display_string}"); + + Ok(ForeignCallResult::default()) + } + _ => Err(ForeignCallError::NoHandler(foreign_call_name.to_string())), + } + } +} diff --git a/noir/noir-repo/tooling/nargo/src/foreign_calls/rpc.rs b/noir/noir-repo/tooling/nargo/src/foreign_calls/rpc.rs new file mode 100644 index 00000000000..0653eb1c7e3 --- /dev/null +++ b/noir/noir-repo/tooling/nargo/src/foreign_calls/rpc.rs @@ -0,0 +1,227 @@ +use std::path::PathBuf; + +use acvm::{acir::brillig::ForeignCallResult, pwg::ForeignCallWaitInfo, AcirField}; +use jsonrpc::{arg as build_json_rpc_arg, minreq_http::Builder, Client}; +use noirc_printable_type::ForeignCallError; +use serde::{Deserialize, Serialize}; + +use super::ForeignCallExecutor; + +#[derive(Debug)] +pub(crate) struct RPCForeignCallExecutor { + /// A randomly generated id for this `DefaultForeignCallExecutor`. + /// + /// This is used so that a single `external_resolver` can distinguish between requests from multiple + /// instantiations of `DefaultForeignCallExecutor`. + id: u64, + /// JSON RPC client to resolve foreign calls + external_resolver: Client, + /// Root path to the program or workspace in execution. + root_path: Option, + /// Name of the package in execution + package_name: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +struct ResolveForeignCallRequest { + /// A session ID which allows the external RPC server to link this foreign call request to other foreign calls + /// for the same program execution. + /// + /// This is intended to allow a single RPC server to maintain state related to multiple program executions being + /// performed in parallel. + session_id: u64, + + #[serde(flatten)] + /// The foreign call which the external RPC server is to provide a response for. + function_call: ForeignCallWaitInfo, + + #[serde(skip_serializing_if = "Option::is_none")] + /// Root path to the program or workspace in execution. + root_path: Option, + #[serde(skip_serializing_if = "Option::is_none")] + /// Name of the package in execution + package_name: Option, +} + +impl RPCForeignCallExecutor { + pub(crate) fn new( + resolver_url: &str, + id: u64, + root_path: Option, + package_name: Option, + ) -> Self { + let mut transport_builder = + Builder::new().url(resolver_url).expect("Invalid oracle resolver URL"); + + if let Some(Ok(timeout)) = + std::env::var("NARGO_FOREIGN_CALL_TIMEOUT").ok().map(|timeout| timeout.parse()) + { + let timeout_duration = std::time::Duration::from_millis(timeout); + transport_builder = transport_builder.timeout(timeout_duration); + }; + let oracle_resolver = Client::with_transport(transport_builder.build()); + + RPCForeignCallExecutor { external_resolver: oracle_resolver, id, root_path, package_name } + } +} + +impl Deserialize<'a>> ForeignCallExecutor + for RPCForeignCallExecutor +{ + fn execute( + &mut self, + foreign_call: &ForeignCallWaitInfo, + ) -> Result, ForeignCallError> { + let encoded_params = vec![build_json_rpc_arg(ResolveForeignCallRequest { + session_id: self.id, + function_call: foreign_call.clone(), + root_path: self.root_path.clone().map(|path| path.to_str().unwrap().to_string()), + package_name: self.package_name.clone(), + })]; + + let req = self.external_resolver.build_request("resolve_foreign_call", &encoded_params); + + let response = self.external_resolver.send_request(req)?; + + let parsed_response: ForeignCallResult = response.result()?; + + Ok(parsed_response) + } +} + +#[cfg(test)] +mod tests { + use acvm::{ + acir::brillig::ForeignCallParam, brillig_vm::brillig::ForeignCallResult, + pwg::ForeignCallWaitInfo, FieldElement, + }; + use jsonrpc_core::Result as RpcResult; + use jsonrpc_derive::rpc; + use jsonrpc_http_server::{Server, ServerBuilder}; + + use super::{ForeignCallExecutor, RPCForeignCallExecutor, ResolveForeignCallRequest}; + + #[allow(unreachable_pub)] + #[rpc] + pub trait OracleResolver { + #[rpc(name = "resolve_foreign_call")] + fn resolve_foreign_call( + &self, + req: ResolveForeignCallRequest, + ) -> RpcResult>; + } + + struct OracleResolverImpl; + + impl OracleResolverImpl { + fn echo(&self, param: ForeignCallParam) -> ForeignCallResult { + vec![param].into() + } + + fn sum(&self, array: ForeignCallParam) -> ForeignCallResult { + let mut res: FieldElement = 0_usize.into(); + + for value in array.fields() { + res += value; + } + + res.into() + } + } + + impl OracleResolver for OracleResolverImpl { + fn resolve_foreign_call( + &self, + req: ResolveForeignCallRequest, + ) -> RpcResult> { + let response = match req.function_call.function.as_str() { + "sum" => self.sum(req.function_call.inputs[0].clone()), + "echo" => self.echo(req.function_call.inputs[0].clone()), + "id" => FieldElement::from(req.session_id as u128).into(), + + _ => panic!("unexpected foreign call"), + }; + Ok(response) + } + } + + fn build_oracle_server() -> (Server, String) { + let mut io = jsonrpc_core::IoHandler::new(); + io.extend_with(OracleResolverImpl.to_delegate()); + + // Choosing port 0 results in a random port being assigned. + let server = ServerBuilder::new(io) + .start_http(&"127.0.0.1:0".parse().expect("Invalid address")) + .expect("Could not start server"); + + let url = format!("http://{}", server.address()); + (server, url) + } + + #[test] + fn test_oracle_resolver_echo() { + let (server, url) = build_oracle_server(); + + let mut executor = RPCForeignCallExecutor::new(&url, 1, None, None); + + let foreign_call: ForeignCallWaitInfo = ForeignCallWaitInfo { + function: "echo".to_string(), + inputs: vec![ForeignCallParam::Single(1_u128.into())], + }; + + let result = executor.execute(&foreign_call); + assert_eq!(result.unwrap(), ForeignCallResult { values: foreign_call.inputs }); + + server.close(); + } + + #[test] + fn test_oracle_resolver_sum() { + let (server, url) = build_oracle_server(); + + let mut executor = RPCForeignCallExecutor::new(&url, 2, None, None); + + let foreign_call: ForeignCallWaitInfo = ForeignCallWaitInfo { + function: "sum".to_string(), + inputs: vec![ForeignCallParam::Array(vec![1_usize.into(), 2_usize.into()])], + }; + + let result = executor.execute(&foreign_call); + assert_eq!(result.unwrap(), FieldElement::from(3_usize).into()); + + server.close(); + } + + #[test] + fn foreign_call_executor_id_is_persistent() { + let (server, url) = build_oracle_server(); + + let mut executor = RPCForeignCallExecutor::new(&url, 3, None, None); + + let foreign_call: ForeignCallWaitInfo = + ForeignCallWaitInfo { function: "id".to_string(), inputs: Vec::new() }; + + let result_1 = executor.execute(&foreign_call).unwrap(); + let result_2 = executor.execute(&foreign_call).unwrap(); + assert_eq!(result_1, result_2); + + server.close(); + } + + #[test] + fn oracle_resolver_rpc_can_distinguish_executors() { + let (server, url) = build_oracle_server(); + + let mut executor_1 = RPCForeignCallExecutor::new(&url, 4, None, None); + let mut executor_2 = RPCForeignCallExecutor::new(&url, 5, None, None); + + let foreign_call: ForeignCallWaitInfo = + ForeignCallWaitInfo { function: "id".to_string(), inputs: Vec::new() }; + + let result_1 = executor_1.execute(&foreign_call).unwrap(); + let result_2 = executor_2.execute(&foreign_call).unwrap(); + assert_ne!(result_1, result_2); + + server.close(); + } +} diff --git a/noir/noir-repo/tooling/nargo/src/lib.rs b/noir/noir-repo/tooling/nargo/src/lib.rs index 88f07e0c292..74b7f54d860 100644 --- a/noir/noir-repo/tooling/nargo/src/lib.rs +++ b/noir/noir-repo/tooling/nargo/src/lib.rs @@ -9,6 +9,7 @@ pub mod constants; pub mod errors; +pub mod foreign_calls; pub mod ops; pub mod package; pub mod workspace; diff --git a/noir/noir-repo/tooling/nargo/src/ops/check.rs b/noir/noir-repo/tooling/nargo/src/ops/check.rs index 14d629ab0f6..707353ccdad 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/check.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/check.rs @@ -2,8 +2,8 @@ use acvm::compiler::CircuitSimulator; use noirc_driver::{CompiledProgram, ErrorsAndWarnings}; use noirc_errors::{CustomDiagnostic, FileDiagnostic}; +/// Run each function through a circuit simulator to check that they are solvable. pub fn check_program(compiled_program: &CompiledProgram) -> Result<(), ErrorsAndWarnings> { - // Check if the program is solvable for (i, circuit) in compiled_program.program.functions.iter().enumerate() { let mut simulator = CircuitSimulator::default(); if !simulator.check_circuit(circuit) { diff --git a/noir/noir-repo/tooling/nargo/src/ops/execute.rs b/noir/noir-repo/tooling/nargo/src/ops/execute.rs index 09ef554d2aa..57116ec2efd 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/execute.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/execute.rs @@ -10,10 +10,9 @@ use acvm::{acir::circuit::Circuit, acir::native_types::WitnessMap}; use acvm::{AcirField, BlackBoxFunctionSolver}; use crate::errors::ExecutionError; +use crate::foreign_calls::ForeignCallExecutor; use crate::NargoError; -use super::foreign_calls::ForeignCallExecutor; - struct ProgramExecutor<'a, F, B: BlackBoxFunctionSolver, E: ForeignCallExecutor> { functions: &'a [Circuit], diff --git a/noir/noir-repo/tooling/nargo/src/ops/foreign_calls.rs b/noir/noir-repo/tooling/nargo/src/ops/foreign_calls.rs deleted file mode 100644 index 30785949a46..00000000000 --- a/noir/noir-repo/tooling/nargo/src/ops/foreign_calls.rs +++ /dev/null @@ -1,494 +0,0 @@ -use std::path::PathBuf; - -use acvm::{ - acir::brillig::{ForeignCallParam, ForeignCallResult}, - pwg::ForeignCallWaitInfo, - AcirField, -}; -use jsonrpc::{arg as build_json_rpc_arg, minreq_http::Builder, Client}; -use noirc_printable_type::{decode_string_value, ForeignCallError, PrintableValueDisplay}; -use rand::Rng; -use serde::{Deserialize, Serialize}; - -pub trait ForeignCallExecutor { - fn execute( - &mut self, - foreign_call: &ForeignCallWaitInfo, - ) -> Result, ForeignCallError>; -} - -/// This enumeration represents the Brillig foreign calls that are natively supported by nargo. -/// After resolution of a foreign call, nargo will restart execution of the ACVM -pub enum ForeignCall { - Print, - CreateMock, - SetMockParams, - GetMockLastParams, - SetMockReturns, - SetMockTimes, - ClearMock, -} - -impl std::fmt::Display for ForeignCall { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.name()) - } -} - -impl ForeignCall { - pub(crate) fn name(&self) -> &'static str { - match self { - ForeignCall::Print => "print", - ForeignCall::CreateMock => "create_mock", - ForeignCall::SetMockParams => "set_mock_params", - ForeignCall::GetMockLastParams => "get_mock_last_params", - ForeignCall::SetMockReturns => "set_mock_returns", - ForeignCall::SetMockTimes => "set_mock_times", - ForeignCall::ClearMock => "clear_mock", - } - } - - pub(crate) fn lookup(op_name: &str) -> Option { - match op_name { - "print" => Some(ForeignCall::Print), - "create_mock" => Some(ForeignCall::CreateMock), - "set_mock_params" => Some(ForeignCall::SetMockParams), - "get_mock_last_params" => Some(ForeignCall::GetMockLastParams), - "set_mock_returns" => Some(ForeignCall::SetMockReturns), - "set_mock_times" => Some(ForeignCall::SetMockTimes), - "clear_mock" => Some(ForeignCall::ClearMock), - _ => None, - } - } -} - -/// This struct represents an oracle mock. It can be used for testing programs that use oracles. -#[derive(Debug, PartialEq, Eq, Clone)] -struct MockedCall { - /// The id of the mock, used to update or remove it - id: usize, - /// The oracle it's mocking - name: String, - /// Optionally match the parameters - params: Option>>, - /// The parameters with which the mock was last called - last_called_params: Option>>, - /// The result to return when this mock is called - result: ForeignCallResult, - /// How many times should this mock be called before it is removed - times_left: Option, -} - -impl MockedCall { - fn new(id: usize, name: String) -> Self { - Self { - id, - name, - params: None, - last_called_params: None, - result: ForeignCallResult { values: vec![] }, - times_left: None, - } - } -} - -impl MockedCall { - fn matches(&self, name: &str, params: &[ForeignCallParam]) -> bool { - self.name == name && (self.params.is_none() || self.params.as_deref() == Some(params)) - } -} - -#[derive(Debug, Default)] -pub struct DefaultForeignCallExecutor { - /// A randomly generated id for this `DefaultForeignCallExecutor`. - /// - /// This is used so that a single `external_resolver` can distinguish between requests from multiple - /// instantiations of `DefaultForeignCallExecutor`. - id: u64, - - /// Mocks have unique ids used to identify them in Noir, allowing to update or remove them. - last_mock_id: usize, - /// The registered mocks - mocked_responses: Vec>, - /// Whether to print [`ForeignCall::Print`] output. - show_output: bool, - /// JSON RPC client to resolve foreign calls - external_resolver: Option, - /// Root path to the program or workspace in execution. - root_path: Option, - /// Name of the package in execution - package_name: Option, -} - -#[derive(Debug, Serialize, Deserialize)] -struct ResolveForeignCallRequest { - /// A session ID which allows the external RPC server to link this foreign call request to other foreign calls - /// for the same program execution. - /// - /// This is intended to allow a single RPC server to maintain state related to multiple program executions being - /// performed in parallel. - session_id: u64, - - #[serde(flatten)] - /// The foreign call which the external RPC server is to provide a response for. - function_call: ForeignCallWaitInfo, - - #[serde(skip_serializing_if = "Option::is_none")] - /// Root path to the program or workspace in execution. - root_path: Option, - #[serde(skip_serializing_if = "Option::is_none")] - /// Name of the package in execution - package_name: Option, -} - -impl DefaultForeignCallExecutor { - pub fn new( - show_output: bool, - resolver_url: Option<&str>, - root_path: Option, - package_name: Option, - ) -> Self { - let oracle_resolver = resolver_url.map(|resolver_url| { - let mut transport_builder = - Builder::new().url(resolver_url).expect("Invalid oracle resolver URL"); - - if let Some(Ok(timeout)) = - std::env::var("NARGO_FOREIGN_CALL_TIMEOUT").ok().map(|timeout| timeout.parse()) - { - let timeout_duration = std::time::Duration::from_millis(timeout); - transport_builder = transport_builder.timeout(timeout_duration); - }; - Client::with_transport(transport_builder.build()) - }); - DefaultForeignCallExecutor { - show_output, - external_resolver: oracle_resolver, - id: rand::thread_rng().gen(), - mocked_responses: Vec::new(), - last_mock_id: 0, - root_path, - package_name, - } - } -} - -impl DefaultForeignCallExecutor { - fn extract_mock_id( - foreign_call_inputs: &[ForeignCallParam], - ) -> Result<(usize, &[ForeignCallParam]), ForeignCallError> { - let (id, params) = - foreign_call_inputs.split_first().ok_or(ForeignCallError::MissingForeignCallInputs)?; - let id = - usize::try_from(id.unwrap_field().try_to_u64().expect("value does not fit into u64")) - .expect("value does not fit into usize"); - Ok((id, params)) - } - - fn find_mock_by_id(&self, id: usize) -> Option<&MockedCall> { - self.mocked_responses.iter().find(|response| response.id == id) - } - - fn find_mock_by_id_mut(&mut self, id: usize) -> Option<&mut MockedCall> { - self.mocked_responses.iter_mut().find(|response| response.id == id) - } - - fn parse_string(param: &ForeignCallParam) -> String { - let fields: Vec<_> = param.fields().to_vec(); - decode_string_value(&fields) - } - - fn execute_print(foreign_call_inputs: &[ForeignCallParam]) -> Result<(), ForeignCallError> { - let skip_newline = foreign_call_inputs[0].unwrap_field().is_zero(); - - let foreign_call_inputs = - foreign_call_inputs.split_first().ok_or(ForeignCallError::MissingForeignCallInputs)?.1; - let display_string = Self::format_printable_value(foreign_call_inputs, skip_newline)?; - - print!("{display_string}"); - - Ok(()) - } - - fn format_printable_value( - foreign_call_inputs: &[ForeignCallParam], - skip_newline: bool, - ) -> Result { - let display_values: PrintableValueDisplay = foreign_call_inputs.try_into()?; - - let result = format!("{display_values}{}", if skip_newline { "" } else { "\n" }); - - Ok(result) - } -} - -impl Deserialize<'a>> ForeignCallExecutor - for DefaultForeignCallExecutor -{ - fn execute( - &mut self, - foreign_call: &ForeignCallWaitInfo, - ) -> Result, ForeignCallError> { - let foreign_call_name = foreign_call.function.as_str(); - match ForeignCall::lookup(foreign_call_name) { - Some(ForeignCall::Print) => { - if self.show_output { - Self::execute_print(&foreign_call.inputs)?; - } - Ok(ForeignCallResult::default()) - } - Some(ForeignCall::CreateMock) => { - let mock_oracle_name = Self::parse_string(&foreign_call.inputs[0]); - assert!(ForeignCall::lookup(&mock_oracle_name).is_none()); - let id = self.last_mock_id; - self.mocked_responses.push(MockedCall::new(id, mock_oracle_name)); - self.last_mock_id += 1; - - Ok(F::from(id).into()) - } - Some(ForeignCall::SetMockParams) => { - let (id, params) = Self::extract_mock_id(&foreign_call.inputs)?; - self.find_mock_by_id_mut(id) - .unwrap_or_else(|| panic!("Unknown mock id {}", id)) - .params = Some(params.to_vec()); - - Ok(ForeignCallResult::default()) - } - Some(ForeignCall::GetMockLastParams) => { - let (id, _) = Self::extract_mock_id(&foreign_call.inputs)?; - let mock = - self.find_mock_by_id(id).unwrap_or_else(|| panic!("Unknown mock id {}", id)); - - let last_called_params = mock - .last_called_params - .clone() - .unwrap_or_else(|| panic!("Mock {} was never called", mock.name)); - - Ok(last_called_params.into()) - } - Some(ForeignCall::SetMockReturns) => { - let (id, params) = Self::extract_mock_id(&foreign_call.inputs)?; - self.find_mock_by_id_mut(id) - .unwrap_or_else(|| panic!("Unknown mock id {}", id)) - .result = ForeignCallResult { values: params.to_vec() }; - - Ok(ForeignCallResult::default()) - } - Some(ForeignCall::SetMockTimes) => { - let (id, params) = Self::extract_mock_id(&foreign_call.inputs)?; - let times = - params[0].unwrap_field().try_to_u64().expect("Invalid bit size of times"); - - self.find_mock_by_id_mut(id) - .unwrap_or_else(|| panic!("Unknown mock id {}", id)) - .times_left = Some(times); - - Ok(ForeignCallResult::default()) - } - Some(ForeignCall::ClearMock) => { - let (id, _) = Self::extract_mock_id(&foreign_call.inputs)?; - self.mocked_responses.retain(|response| response.id != id); - Ok(ForeignCallResult::default()) - } - None => { - let mock_response_position = self - .mocked_responses - .iter() - .position(|response| response.matches(foreign_call_name, &foreign_call.inputs)); - - if let Some(response_position) = mock_response_position { - // If the program has registered a mocked response to this oracle call then we prefer responding - // with that. - - let mock = self - .mocked_responses - .get_mut(response_position) - .expect("Invalid position of mocked response"); - - mock.last_called_params = Some(foreign_call.inputs.clone()); - - let result = mock.result.values.clone(); - - if let Some(times_left) = &mut mock.times_left { - *times_left -= 1; - if *times_left == 0 { - self.mocked_responses.remove(response_position); - } - } - - Ok(result.into()) - } else if let Some(external_resolver) = &self.external_resolver { - // If the user has registered an external resolver then we forward any remaining oracle calls there. - - let encoded_params = vec![build_json_rpc_arg(ResolveForeignCallRequest { - session_id: self.id, - function_call: foreign_call.clone(), - root_path: self - .root_path - .clone() - .map(|path| path.to_str().unwrap().to_string()), - package_name: self.package_name.clone(), - })]; - - let req = - external_resolver.build_request("resolve_foreign_call", &encoded_params); - - let response = external_resolver.send_request(req)?; - - let parsed_response: ForeignCallResult = response.result()?; - - Ok(parsed_response) - } else { - // If there's no registered mock oracle response and no registered resolver then we cannot - // return a correct response to the ACVM. The best we can do is to return an empty response, - // this allows us to ignore any foreign calls which exist solely to pass information from inside - // the circuit to the environment (e.g. custom logging) as the execution will still be able to progress. - // - // We optimistically return an empty response for all oracle calls as the ACVM will error - // should a response have been required. - Ok(ForeignCallResult::default()) - } - } - } - } -} - -#[cfg(test)] -mod tests { - use acvm::{ - acir::brillig::ForeignCallParam, brillig_vm::brillig::ForeignCallResult, - pwg::ForeignCallWaitInfo, FieldElement, - }; - use jsonrpc_core::Result as RpcResult; - use jsonrpc_derive::rpc; - use jsonrpc_http_server::{Server, ServerBuilder}; - - use crate::ops::{DefaultForeignCallExecutor, ForeignCallExecutor}; - - use super::ResolveForeignCallRequest; - - #[allow(unreachable_pub)] - #[rpc] - pub trait OracleResolver { - #[rpc(name = "resolve_foreign_call")] - fn resolve_foreign_call( - &self, - req: ResolveForeignCallRequest, - ) -> RpcResult>; - } - - struct OracleResolverImpl; - - impl OracleResolverImpl { - fn echo(&self, param: ForeignCallParam) -> ForeignCallResult { - vec![param].into() - } - - fn sum(&self, array: ForeignCallParam) -> ForeignCallResult { - let mut res: FieldElement = 0_usize.into(); - - for value in array.fields() { - res += value; - } - - res.into() - } - } - - impl OracleResolver for OracleResolverImpl { - fn resolve_foreign_call( - &self, - req: ResolveForeignCallRequest, - ) -> RpcResult> { - let response = match req.function_call.function.as_str() { - "sum" => self.sum(req.function_call.inputs[0].clone()), - "echo" => self.echo(req.function_call.inputs[0].clone()), - "id" => FieldElement::from(req.session_id as u128).into(), - - _ => panic!("unexpected foreign call"), - }; - Ok(response) - } - } - - fn build_oracle_server() -> (Server, String) { - let mut io = jsonrpc_core::IoHandler::new(); - io.extend_with(OracleResolverImpl.to_delegate()); - - // Choosing port 0 results in a random port being assigned. - let server = ServerBuilder::new(io) - .start_http(&"127.0.0.1:0".parse().expect("Invalid address")) - .expect("Could not start server"); - - let url = format!("http://{}", server.address()); - (server, url) - } - - #[test] - fn test_oracle_resolver_echo() { - let (server, url) = build_oracle_server(); - - let mut executor = - DefaultForeignCallExecutor::::new(false, Some(&url), None, None); - - let foreign_call = ForeignCallWaitInfo { - function: "echo".to_string(), - inputs: vec![ForeignCallParam::Single(1_u128.into())], - }; - - let result = executor.execute(&foreign_call); - assert_eq!(result.unwrap(), ForeignCallResult { values: foreign_call.inputs }); - - server.close(); - } - - #[test] - fn test_oracle_resolver_sum() { - let (server, url) = build_oracle_server(); - - let mut executor = DefaultForeignCallExecutor::new(false, Some(&url), None, None); - - let foreign_call = ForeignCallWaitInfo { - function: "sum".to_string(), - inputs: vec![ForeignCallParam::Array(vec![1_usize.into(), 2_usize.into()])], - }; - - let result = executor.execute(&foreign_call); - assert_eq!(result.unwrap(), FieldElement::from(3_usize).into()); - - server.close(); - } - - #[test] - fn foreign_call_executor_id_is_persistent() { - let (server, url) = build_oracle_server(); - - let mut executor = - DefaultForeignCallExecutor::::new(false, Some(&url), None, None); - - let foreign_call = ForeignCallWaitInfo { function: "id".to_string(), inputs: Vec::new() }; - - let result_1 = executor.execute(&foreign_call).unwrap(); - let result_2 = executor.execute(&foreign_call).unwrap(); - assert_eq!(result_1, result_2); - - server.close(); - } - - #[test] - fn oracle_resolver_rpc_can_distinguish_executors() { - let (server, url) = build_oracle_server(); - - let mut executor_1 = - DefaultForeignCallExecutor::::new(false, Some(&url), None, None); - let mut executor_2 = - DefaultForeignCallExecutor::::new(false, Some(&url), None, None); - - let foreign_call = ForeignCallWaitInfo { function: "id".to_string(), inputs: Vec::new() }; - - let result_1 = executor_1.execute(&foreign_call).unwrap(); - let result_2 = executor_2.execute(&foreign_call).unwrap(); - assert_ne!(result_1, result_2); - - server.close(); - } -} diff --git a/noir/noir-repo/tooling/nargo/src/ops/mod.rs b/noir/noir-repo/tooling/nargo/src/ops/mod.rs index f70577a14f1..04efeb5a9ec 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/mod.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/mod.rs @@ -4,7 +4,6 @@ pub use self::compile::{ compile_workspace, report_errors, }; pub use self::execute::{execute_program, execute_program_with_profiling}; -pub use self::foreign_calls::{DefaultForeignCallExecutor, ForeignCall, ForeignCallExecutor}; pub use self::optimize::{optimize_contract, optimize_program}; pub use self::transform::{transform_contract, transform_program}; @@ -13,7 +12,6 @@ pub use self::test::{run_test, TestStatus}; mod check; mod compile; mod execute; -mod foreign_calls; mod optimize; mod test; mod transform; diff --git a/noir/noir-repo/tooling/nargo/src/ops/test.rs b/noir/noir-repo/tooling/nargo/src/ops/test.rs index 370a4235f61..e258627b522 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/test.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/test.rs @@ -1,27 +1,42 @@ use std::path::PathBuf; use acvm::{ - acir::native_types::{WitnessMap, WitnessStack}, - BlackBoxFunctionSolver, FieldElement, + acir::{ + brillig::ForeignCallResult, + native_types::{WitnessMap, WitnessStack}, + }, + pwg::ForeignCallWaitInfo, + AcirField, BlackBoxFunctionSolver, FieldElement, }; use noirc_abi::Abi; use noirc_driver::{compile_no_check, CompileError, CompileOptions}; use noirc_errors::{debug_info::DebugInfo, FileDiagnostic}; use noirc_frontend::hir::{def_map::TestFunction, Context}; +use noirc_printable_type::ForeignCallError; +use rand::Rng; +use serde::{Deserialize, Serialize}; -use crate::{errors::try_to_diagnose_runtime_error, NargoError}; +use crate::{ + errors::try_to_diagnose_runtime_error, + foreign_calls::{ + mocker::MockForeignCallExecutor, print::PrintForeignCallExecutor, + rpc::RPCForeignCallExecutor, ForeignCall, ForeignCallExecutor, + }, + NargoError, +}; -use super::{execute_program, DefaultForeignCallExecutor}; +use super::execute_program; pub enum TestStatus { Pass, Fail { message: String, error_diagnostic: Option }, + Skipped, CompileError(FileDiagnostic), } impl TestStatus { pub fn failed(&self) -> bool { - !matches!(self, TestStatus::Pass) + !matches!(self, TestStatus::Pass | TestStatus::Skipped) } } @@ -48,23 +63,42 @@ pub fn run_test>( if test_function_has_no_arguments { // Run the backend to ensure the PWG evaluates functions like std::hash::pedersen, // otherwise constraints involving these expressions will not error. + let mut foreign_call_executor = TestForeignCallExecutor::new( + show_output, + foreign_call_resolver_url, + root_path, + package_name, + ); + let circuit_execution = execute_program( &compiled_program.program, WitnessMap::new(), blackbox_solver, - &mut DefaultForeignCallExecutor::new( - show_output, - foreign_call_resolver_url, - root_path, - package_name, - ), + &mut foreign_call_executor, ); - test_status_program_compile_pass( + + let status = test_status_program_compile_pass( test_function, compiled_program.abi, compiled_program.debug, circuit_execution, - ) + ); + + let ignore_foreign_call_failures = + std::env::var("NARGO_IGNORE_TEST_FAILURES_FROM_FOREIGN_CALLS") + .is_ok_and(|var| &var == "true"); + + if let TestStatus::Fail { .. } = status { + if ignore_foreign_call_failures + && foreign_call_executor.encountered_unknown_foreign_call + { + TestStatus::Skipped + } else { + status + } + } else { + status + } } else { #[cfg(target_arch = "wasm32")] { @@ -90,7 +124,7 @@ pub fn run_test>( program, initial_witness, blackbox_solver, - &mut DefaultForeignCallExecutor::::new( + &mut TestForeignCallExecutor::::new( false, foreign_call_resolver_url, root_path.clone(), @@ -215,3 +249,93 @@ fn check_expected_failure_message( error_diagnostic, } } + +/// A specialized foreign call executor which tracks whether it has encountered any unknown foreign calls +struct TestForeignCallExecutor { + /// The executor for any [`ForeignCall::Print`] calls. + printer: Option, + mocker: MockForeignCallExecutor, + external: Option, + + encountered_unknown_foreign_call: bool, +} + +impl TestForeignCallExecutor { + fn new( + show_output: bool, + resolver_url: Option<&str>, + root_path: Option, + package_name: Option, + ) -> Self { + let id = rand::thread_rng().gen(); + let printer = if show_output { Some(PrintForeignCallExecutor) } else { None }; + let external_resolver = resolver_url.map(|resolver_url| { + RPCForeignCallExecutor::new(resolver_url, id, root_path, package_name) + }); + TestForeignCallExecutor { + printer, + mocker: MockForeignCallExecutor::default(), + external: external_resolver, + encountered_unknown_foreign_call: false, + } + } +} + +impl Deserialize<'a>> ForeignCallExecutor + for TestForeignCallExecutor +{ + fn execute( + &mut self, + foreign_call: &ForeignCallWaitInfo, + ) -> Result, ForeignCallError> { + // If the circuit has reached a new foreign call opcode then it can't have failed from any previous unknown foreign calls. + self.encountered_unknown_foreign_call = false; + + let foreign_call_name = foreign_call.function.as_str(); + match ForeignCall::lookup(foreign_call_name) { + Some(ForeignCall::Print) => { + if let Some(printer) = &mut self.printer { + printer.execute(foreign_call) + } else { + Ok(ForeignCallResult::default()) + } + } + + Some( + ForeignCall::CreateMock + | ForeignCall::SetMockParams + | ForeignCall::GetMockLastParams + | ForeignCall::SetMockReturns + | ForeignCall::SetMockTimes + | ForeignCall::ClearMock, + ) => self.mocker.execute(foreign_call), + + None => { + // First check if there's any defined mock responses for this foreign call. + match self.mocker.execute(foreign_call) { + Err(ForeignCallError::NoHandler(_)) => (), + response_or_error => return response_or_error, + }; + + if let Some(external_resolver) = &mut self.external { + // If the user has registered an external resolver then we forward any remaining oracle calls there. + match external_resolver.execute(foreign_call) { + Err(ForeignCallError::NoHandler(_)) => (), + response_or_error => return response_or_error, + }; + } + + self.encountered_unknown_foreign_call = true; + + // If all executors have no handler for the given foreign call then we cannot + // return a correct response to the ACVM. The best we can do is to return an empty response, + // this allows us to ignore any foreign calls which exist solely to pass information from inside + // the circuit to the environment (e.g. custom logging) as the execution will still be able to progress. + // + // We optimistically return an empty response for all oracle calls as the ACVM will error + // should a response have been required. + Ok(ForeignCallResult::default()) + } + } + } +} diff --git a/noir/noir-repo/tooling/nargo/src/ops/transform.rs b/noir/noir-repo/tooling/nargo/src/ops/transform.rs index 9255ac3e0ec..fdda368d150 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/transform.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/transform.rs @@ -6,6 +6,7 @@ use iter_extended::vecmap; use noirc_driver::{CompiledContract, CompiledProgram}; use noirc_errors::debug_info::DebugInfo; +/// Apply ACVM optimizations on the circuit. pub fn transform_program( mut compiled_program: CompiledProgram, expression_width: ExpressionWidth, @@ -18,6 +19,7 @@ pub fn transform_program( compiled_program } +/// Apply the optimizing transformation on each function in the contract. pub fn transform_contract( contract: CompiledContract, expression_width: ExpressionWidth, @@ -25,7 +27,6 @@ pub fn transform_contract( let functions = vecmap(contract.functions, |mut func| { func.bytecode = transform_program_internal(func.bytecode, &mut func.debug, expression_width); - func }); diff --git a/noir/noir-repo/tooling/nargo_cli/Cargo.toml b/noir/noir-repo/tooling/nargo_cli/Cargo.toml index 02e669f5c68..5603b7f4fca 100644 --- a/noir/noir-repo/tooling/nargo_cli/Cargo.toml +++ b/noir/noir-repo/tooling/nargo_cli/Cargo.toml @@ -25,6 +25,7 @@ toml.workspace = true [dependencies] clap.workspace = true fm.workspace = true +fxhash.workspace = true iter-extended.workspace = true nargo.workspace = true nargo_fmt.workspace = true diff --git a/noir/noir-repo/tooling/nargo_cli/benches/criterion.rs b/noir/noir-repo/tooling/nargo_cli/benches/criterion.rs index 488cbfcd243..51de97df139 100644 --- a/noir/noir-repo/tooling/nargo_cli/benches/criterion.rs +++ b/noir/noir-repo/tooling/nargo_cli/benches/criterion.rs @@ -115,7 +115,7 @@ fn criterion_test_execution(c: &mut Criterion, test_program_dir: &Path, force_br let artifacts = RefCell::new(None); let mut foreign_call_executor = - nargo::ops::DefaultForeignCallExecutor::new(false, None, None, None); + nargo::foreign_calls::DefaultForeignCallExecutor::new(false, None, None, None); c.bench_function(&benchmark_name, |b| { b.iter_batched( diff --git a/noir/noir-repo/tooling/nargo_cli/build.rs b/noir/noir-repo/tooling/nargo_cli/build.rs index 740e5ed2052..41b3c0c9cf7 100644 --- a/noir/noir-repo/tooling/nargo_cli/build.rs +++ b/noir/noir-repo/tooling/nargo_cli/build.rs @@ -60,13 +60,9 @@ const IGNORED_BRILLIG_TESTS: [&str; 11] = [ ]; /// Tests which aren't expected to work with the default inliner cases. -const INLINER_MIN_OVERRIDES: [(&str, i64); 2] = [ +const INLINER_MIN_OVERRIDES: [(&str, i64); 1] = [ // 0 works if PoseidonHasher::write is tagged as `inline_always`, otherwise 22. ("eddsa", 0), - // (#6583): The RcTracker in the DIE SSA pass is removing inc_rcs that are still needed. - // This triggers differently depending on the optimization level (although all are wrong), - // so we arbitrarily only run with the inlined versions. - ("reference_counts", 0), ]; /// Some tests are expected to have warnings @@ -213,8 +209,13 @@ fn test_{test_name}(force_brillig: ForceBrillig, inliner_aggressiveness: Inliner nargo.arg("--program-dir").arg(test_program_dir); nargo.arg("{test_command}").arg("--force"); nargo.arg("--inliner-aggressiveness").arg(inliner_aggressiveness.0.to_string()); + if force_brillig.0 {{ nargo.arg("--force-brillig"); + + // Set the maximum increase so that part of the optimization is exercised (it might fail). + nargo.arg("--max-bytecode-increase-percent"); + nargo.arg("50"); }} {test_content} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs index 304988ed516..ff6009981c7 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -65,6 +65,7 @@ pub(crate) fn run(args: CompileCommand, config: NargoConfig) -> Result<(), CliEr Ok(()) } +/// Continuously recompile the workspace on any Noir file change event. fn watch_workspace(workspace: &Workspace, compile_options: &CompileOptions) -> notify::Result<()> { let (tx, rx) = std::sync::mpsc::channel(); @@ -108,6 +109,8 @@ fn watch_workspace(workspace: &Workspace, compile_options: &CompileOptions) -> n Ok(()) } +/// Parse and compile the entire workspace, then report errors. +/// This is the main entry point used by all other commands that need compilation. pub(super) fn compile_workspace_full( workspace: &Workspace, compile_options: &CompileOptions, @@ -129,6 +132,8 @@ pub(super) fn compile_workspace_full( Ok(()) } +/// Compile binary and contract packages. +/// Returns the merged warnings or errors. fn compile_workspace( file_manager: &FileManager, parsed_files: &ParsedFiles, @@ -144,6 +149,7 @@ fn compile_workspace( // Compile all of the packages in parallel. let program_warnings_or_errors: CompilationResult<()> = compile_programs(file_manager, parsed_files, workspace, &binary_packages, compile_options); + let contract_warnings_or_errors: CompilationResult<()> = compiled_contracts( file_manager, parsed_files, @@ -164,6 +170,7 @@ fn compile_workspace( } } +/// Compile the given binary packages in the workspace. fn compile_programs( file_manager: &FileManager, parsed_files: &ParsedFiles, @@ -171,6 +178,8 @@ fn compile_programs( binary_packages: &[Package], compile_options: &CompileOptions, ) -> CompilationResult<()> { + // Load any existing artifact for a given package, _iff_ it was compiled with the same nargo version. + // The loaded circuit includes backend specific transformations, which might be different from the current target. let load_cached_program = |package| { let program_artifact_path = workspace.package_build_path(package); read_program_from_file(program_artifact_path) @@ -180,19 +189,45 @@ fn compile_programs( }; let compile_package = |package| { + let cached_program = load_cached_program(package); + + // Hash over the entire compiled program, including any post-compile transformations. + // This is used to detect whether `cached_program` is returned by `compile_program`. + let cached_hash = cached_program.as_ref().map(fxhash::hash64); + + // Compile the program, or use the cached artifacts if it matches. let (program, warnings) = compile_program( file_manager, parsed_files, workspace, package, compile_options, - load_cached_program(package), + cached_program, )?; + // Choose the target width for the final, backend specific transformation. let target_width = get_target_width(package.expression_width, compile_options.expression_width); + + // If the compiled program is the same as the cached one, we don't apply transformations again, unless the target width has changed. + // The transformations might not be idempotent, which would risk creating witnesses that don't work with earlier versions, + // based on which we might have generated a verifier already. + if cached_hash == Some(fxhash::hash64(&program)) { + let width_matches = program + .program + .functions + .iter() + .all(|circuit| circuit.expression_width == target_width); + + if width_matches { + return Ok(((), warnings)); + } + } + // Run ACVM optimizations and set the target width. let program = nargo::ops::transform_program(program, target_width); + // Check solvability. nargo::ops::check_program(&program)?; + // Overwrite the build artifacts with the final circuit, which includes the backend specific transformations. save_program_to_file(&program.into(), &package.name, workspace.target_directory_path()); Ok(((), warnings)) @@ -208,6 +243,7 @@ fn compile_programs( collect_errors(program_results).map(|(_, warnings)| ((), warnings)) } +/// Compile the given contracts in the workspace. fn compiled_contracts( file_manager: &FileManager, parsed_files: &ParsedFiles, diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs index 8dc71b1c7e5..fa95d3123c6 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -7,7 +7,7 @@ use clap::Args; use nargo::constants::PROVER_INPUT_FILE; use nargo::errors::try_to_diagnose_runtime_error; -use nargo::ops::DefaultForeignCallExecutor; +use nargo::foreign_calls::DefaultForeignCallExecutor; use nargo::package::{CrateName, Package}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::{Format, InputValue}; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs index cf416b1fa5f..769a1f79d81 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs @@ -4,7 +4,7 @@ use clap::Args; use iter_extended::vecmap; use nargo::{ constants::PROVER_INPUT_FILE, - ops::DefaultForeignCallExecutor, + foreign_calls::DefaultForeignCallExecutor, package::{CrateName, Package}, }; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs index 7b0201226ef..aa0ee1bb94b 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs @@ -255,6 +255,12 @@ fn display_test_report( ); } } + TestStatus::Skipped { .. } => { + writer + .set_color(ColorSpec::new().set_fg(Some(Color::Yellow))) + .expect("Failed to set color"); + writeln!(writer, "skipped").expect("Failed to write to stderr"); + } TestStatus::CompileError(err) => { noirc_errors::reporter::report_all( file_manager.as_file_map(), diff --git a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-props.rs b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-props.rs index 0013a90b4ff..86c225831b9 100644 --- a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-props.rs +++ b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-props.rs @@ -2,10 +2,7 @@ use std::{cell::RefCell, collections::BTreeMap, path::Path}; use acvm::{acir::native_types::WitnessStack, AcirField, FieldElement}; use iter_extended::vecmap; -use nargo::{ - ops::{execute_program, DefaultForeignCallExecutor}, - parse_all, -}; +use nargo::{foreign_calls::DefaultForeignCallExecutor, ops::execute_program, parse_all}; use noirc_abi::input_parser::InputValue; use noirc_driver::{ compile_main, file_manager_with_stdlib, prepare_crate, CompilationResult, CompileOptions, @@ -64,6 +61,7 @@ fn prepare_and_compile_snippet( ) -> CompilationResult { let (mut context, root_crate_id) = prepare_snippet(source); let options = CompileOptions { force_brillig, ..Default::default() }; + // TODO: Run nargo::ops::transform_program? compile_main(&mut context, root_crate_id, &options, None) } diff --git a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs index bdc92e625ab..99f0c9a2e7f 100644 --- a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs +++ b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs @@ -138,6 +138,12 @@ fn display_test_report( ); } } + TestStatus::Skipped { .. } => { + writer + .set_color(ColorSpec::new().set_fg(Some(Color::Yellow))) + .expect("Failed to set color"); + writeln!(writer, "skipped").expect("Failed to write to stderr"); + } TestStatus::CompileError(err) => { noirc_errors::reporter::report_all( file_manager.as_file_map(), diff --git a/noir/noir-repo/tooling/noirc_abi/proptest-regressions/input_parser/json.txt b/noir/noir-repo/tooling/noirc_abi/proptest-regressions/input_parser/json.txt new file mode 100644 index 00000000000..19de8eeaf48 --- /dev/null +++ b/noir/noir-repo/tooling/noirc_abi/proptest-regressions/input_parser/json.txt @@ -0,0 +1,7 @@ +# Seeds for failure cases proptest has generated in the past. It is +# automatically read and these particular cases re-run before any +# novel cases are generated. +# +# It is recommended to check this file in to source control so that +# everyone who runs the test benefits from these saved cases. +cc b3f9ae88d54944ca274764f4d99a2023d4b0ac09beb89bc599cbba1e45dd3620 # shrinks to (typ, value) = (Integer { sign: Signed, width: 1 }, -1) diff --git a/noir/noir-repo/tooling/noirc_abi/proptest-regressions/input_parser/toml.txt b/noir/noir-repo/tooling/noirc_abi/proptest-regressions/input_parser/toml.txt new file mode 100644 index 00000000000..1448cb67ef1 --- /dev/null +++ b/noir/noir-repo/tooling/noirc_abi/proptest-regressions/input_parser/toml.txt @@ -0,0 +1,9 @@ +# Seeds for failure cases proptest has generated in the past. It is +# automatically read and these particular cases re-run before any +# novel cases are generated. +# +# It is recommended to check this file in to source control so that +# everyone who runs the test benefits from these saved cases. +cc 9d200afb8f5c01e3414d24eebe1436a7eef5377a46a9a9235aaa7f81e0b33656 # shrinks to (typ, value) = (Integer { sign: Signed, width: 8 }, -1) +cc 7fd29637e5566d819992185c1a95438e9949a555928a911b3918eed2e3f7a1fd # shrinks to (typ, value) = (Integer { sign: Signed, width: 64 }, -1) +cc 8ecbda39d887674b53ca23a861ac30fbb10c123bb70c57e69b336c86a3d9dea8 # shrinks to (abi, input_map) = (Abi { parameters: [AbiParameter { name: "¡", typ: Struct { path: "�)\u{1b}=�?Ⱥ\u{59424}?{\u{e4d5e}%Ѩ/Q\u{36a17}/*\";\u{b}&iC_\u{d313f}S\u{1b}\u{9dfec}\r/\u{10530d}", fields: [("?p*\"/\u{202e}\u{6f038}\u{537ca}.y@~𘛶?4\u{1b}*", Field), (".Ⱥ/$\u{7f}\u{103c06}%\\\u{202e}][0\u{88479}]\"*~\u{36fd5}\u{5}\u{feff}]{/", Tuple { fields: [String { length: 937 }] }), ("r\u{ac3a5}&:", Boolean), ("$d6🕴/:|�\u{37f8b}\r\u{a13b7}C$𲁹\\&\u{f8712}?\u{db61c}t%\u{57be1}\0", Field), ("/\u{6378b}\u{a426c}¥\u{7}/\u{fcb29}$\u{53c6b}\u{12d6f}\u{12bd3}.\u{f2f82}\u{8613e}*$\u{fd32f}\u{e29f7}\0𨺉'¬\"1", Struct { path: "\\\u{4a5ac}<\u{9e505}\u{4f3af}🕴&?<:^\u{7}\u{88}\u{3e1ff}(¥\u{531f3}K{:¥𦺀", fields: [("n\0Ѩ/\u{1b}𥐰\u{a4906}�¥`{\u{389d4}`1\u{7708a})\u{3dac4}8\u{93e5f}㒭\\\"\u{e6824}\u{b}Ѩ\u{88946}Ⱥ{", Integer { sign: Signed, width: 127 })] }), ("¥🕴\u{1b}¥🕴=sR\0\u{35f36}\u{867dc}>ä\u{202e}f:BȺ?:``*·¥\u{74ca5}\"", Tuple { fields: [Boolean, Field, String { length: 205 }, String { length: 575 }, Integer { sign: Signed, width: 124 }, String { length: 923 }, String { length: 294 }] })] }, visibility: Public }], return_type: None, error_types: {} }, {"¡": Struct({"$d6🕴/:|�\u{37f8b}\r\u{a13b7}C$𲁹\\&\u{f8712}?\u{db61c}t%\u{57be1}\0": Field(-8275115097504119425402713293372777967031130481426075481525511323101167533940), ".Ⱥ/$\u{7f}\u{103c06}%\\\u{202e}][0\u{88479}]\"*~\u{36fd5}\u{5}\u{feff}]{/": Vec([String("A \0A 0 aA0 a0aa00 A\000 0 \0\0aA\0\0a \0 \0a 0A\0A\0 Aa0aAA0A\0aa\00 0\0\0\0\0\00a Aa0 \0 a A0 \0AA0A Aa Aa\00aAaAaaA0A0 aA0 \0 Aa\00 \0000AAA a \0AAaaA\0\0a A0a0AA\0aA00 aA a0A\0AAa0a\0A0a\0\0A0A \00Aaaaa a A AO.*D\r.`bD4a\n*\u{15}\\B\"ace.8&A\t[AV8w<\u{18}\"\u{f}4`^Q\u{1b}U*$Z/\0\u{b}]qw${`\"=X&A\\\u{e}%`\\:\"$\u{1}.(6_C:\u{7}a`V=N**\u{1b})#Y\u{7f}#\u{b}$l\t}.Mns5!\t*$g\u{18}\rC\u{11}\"$=\u{7}.?&\u{1}yW\t.Y|<6\u{12}\u{e}/4JJ*&/V$`\"&`x#R\np\\%'*\n:P\0K\u{b}*`\r7Ym\t_\u{b}=$\u{16}`0v\u{7f}'NV^N4J<9=G*A:!b\u{1c}:'c{ST&z![\u{7f}/.={E*pmaWC\u{7f}7p{<\"']\u{8}?`\u{1b}\"\\\u{1}$\u{18}/!\u{16}-\t:E7CUs%_qw*xf.S\t\u{4}'=\"&%t'\u{1f}\u{7f}\u{b}$.=f\u{6}\"$A}xV_$\u{1a}nH\n\u{1b}?<&\n\u{15}U\\-b\u{1d}|\u{b}\u{2}t \rwA{L\u{11}\u{6}\u{10}\0\u{1b}G[x?&Yi?&7\u{b}?\r\u{1f}b\\$=\u{b}x& Q/\t\u{4}|X\"7\"{\0\0j'.\0\\e1zR.\u{c}\n<\u{b}Q*R+y8\u{19}(o\u{1f}@m\nt+\u{7f}Q\\+.Rn?\u{17}UZ\"$\u{b}/\0B=9=\t{\u{8}qZ&`!:D{\u{6}IO.H\u{7f}:?/3@\r\u{1b}oä\u{202e}f:BȺ?:``*·¥\u{74ca5}\"": Vec([Field(1), Field(8822392870083219098626030699076694602179106416928939583840848325203494062169), String("*TXn;{}\"_)_9\nk\\#ts\u{10}%\\c\n/2._::Oj*\u{7f}\0\r&PUMl\u{10}$/u?L}\u{7f}*P&<%=\u{7}S#%A\n \u{e}\\#v!\"\nepRp.{vH{&@\t\u{1f}\u{b}?=T\u{f}\"B\u{11}\n/{HY.\u{16}\n\nj<&\u{3}{f\n/9J*&x.$/,\r\0\u{1c}'\u{5}\u{13}\u{1b}`T\0`\n&/&\u{15}\u{b}w:{SK\u{7f}\\apR%/'0`0\n'd$$\u{7f}Vs\t<{\nDTT\\F\n\u{15}y.\\\t*-)&D$*u\u{b}\u{1b}?{\u{b}/\n\u{7f}0*.7\0\n:\u{b}.rSk<6~>{#"), String(".\"JA%q6i\ra/:F\u{16}?q<\t\rN\\13?H<;?{`\u{1d}p{.\"5?*@'N\"\u{1a}P,\u{1b}\u{7f}c+dt5':Y\u{1b}k/G>k/eM$XIX')\u{1b}'&\u{7f}\\\r\u{1b}`'P_.\n.?\0p`Y\u{c}`._\u{b}B\0\ng/*v$jfJ:\u{c}\u{1b}Pv}xn7ph@#{_<{.JD?r%'E\n7s9n/],u![;%*\u{2}{y`MgRdok8\"%<*>*{GyFJ}?\0W%#\0\u{1b}\u{7f}\u{16}G:\t=w\u{7f}:q\u{7f}:{k?\u{b}(:ca{$*1X/cw\u{1b}Z6I\rX\0\u{1b}(.^14\r\\=s\u{1b}w\u{3}F~\n\u{1e})/$0:=[\u{1},\\\\\tg\u{16}:],J`\0N\n\u{1b}\u{1b}\u{1b}{.xb\u{1a}\r'12#?e\\#/\tA\u{7f}\".\\Ke=\\?!v+P\u{17}\r\u{12}x.=A.`0<&?\niR/*WW\rnV)5vY.~\n _h\0&5f#\r\u{2}-S%\t s..\u{7f}!X}\"=\"?\u{5}y\u{4}`fr&R&d: 1Ht\"4`y_/S.71#{|%$%&ehy\u{16}J_\u{e}=:.%'\"N=J:\r:{&.\u{12}\u{b})&N\u{10}R_3;11\u{b}Qd<`<{?xF:~\"%<=<<\03:t??&\r;{\u{13}?__Y\u{6})\\k,vs?\n`G(*\n!\u{1b}[@z\0$?*yKLJh_\u{13}FkY'\\?T^\u{1f}$1n`'[\n\u{7f}\0+l\u{b}\u{1a}E\u{b}&(/\u{b}\rr\t:&\0+N'N:oC:*``IN\u{b}*.:\t$7+'*U:\t Result { let json_value = match (value, abi_type) { + (InputValue::Field(f), AbiType::Integer { sign: crate::Sign::Signed, width }) => { + JsonTypes::String(field_to_signed_hex(*f, *width)) + } (InputValue::Field(f), AbiType::Field | AbiType::Integer { .. }) => { JsonTypes::String(Self::format_field_string(*f)) } @@ -143,6 +146,9 @@ impl InputValue { ) -> Result { let input_value = match (value, param_type) { (JsonTypes::String(string), AbiType::String { .. }) => InputValue::String(string), + (JsonTypes::String(string), AbiType::Integer { sign: crate::Sign::Signed, width }) => { + InputValue::Field(parse_str_to_signed(&string, *width)?) + } ( JsonTypes::String(string), AbiType::Field | AbiType::Integer { .. } | AbiType::Boolean, @@ -192,3 +198,40 @@ impl InputValue { Ok(input_value) } } + +#[cfg(test)] +mod test { + use proptest::prelude::*; + + use crate::{ + arbitrary::arb_abi_and_input_map, + input_parser::{arbitrary::arb_signed_integer_type_and_value, json::JsonTypes, InputValue}, + }; + + use super::{parse_json, serialize_to_json}; + + proptest! { + #[test] + fn serializing_and_parsing_returns_original_input((abi, input_map) in arb_abi_and_input_map()) { + let json = serialize_to_json(&input_map, &abi).expect("should be serializable"); + let parsed_input_map = parse_json(&json, &abi).expect("should be parsable"); + + prop_assert_eq!(parsed_input_map, input_map); + } + + #[test] + fn signed_integer_serialization_roundtrip((typ, value) in arb_signed_integer_type_and_value()) { + let string_input = JsonTypes::String(value.to_string()); + let input_value = InputValue::try_from_json(string_input, &typ, "foo").expect("should be parsable"); + let JsonTypes::String(output_string) = JsonTypes::try_from_input_value(&input_value, &typ).expect("should be serializable") else { + panic!("wrong type output"); + }; + let output_number = if let Some(output_string) = output_string.strip_prefix("-0x") { + -i64::from_str_radix(output_string, 16).unwrap() + } else { + i64::from_str_radix(output_string.strip_prefix("0x").unwrap(), 16).unwrap() + }; + prop_assert_eq!(output_number, value); + } + } +} diff --git a/noir/noir-repo/tooling/noirc_abi/src/input_parser/mod.rs b/noir/noir-repo/tooling/noirc_abi/src/input_parser/mod.rs index d7bbb0adfe3..b7732235eb2 100644 --- a/noir/noir-repo/tooling/noirc_abi/src/input_parser/mod.rs +++ b/noir/noir-repo/tooling/noirc_abi/src/input_parser/mod.rs @@ -248,6 +248,11 @@ mod serialization_tests { typ: AbiType::Field, visibility: AbiVisibility::Private, }, + AbiParameter { + name: "signed_example".into(), + typ: AbiType::Integer { sign: Sign::Signed, width: 8 }, + visibility: AbiVisibility::Private, + }, AbiParameter { name: "bar".into(), typ: AbiType::Struct { @@ -272,6 +277,7 @@ mod serialization_tests { let input_map: BTreeMap = BTreeMap::from([ ("foo".into(), InputValue::Field(FieldElement::one())), + ("signed_example".into(), InputValue::Field(FieldElement::from(240u128))), ( "bar".into(), InputValue::Struct(BTreeMap::from([ @@ -317,7 +323,9 @@ fn parse_str_to_field(value: &str) -> Result { } fn parse_str_to_signed(value: &str, width: u32) -> Result { - let big_num = if let Some(hex) = value.strip_prefix("0x") { + let big_num = if let Some(hex) = value.strip_prefix("-0x") { + BigInt::from_str_radix(hex, 16).map(|value| -value) + } else if let Some(hex) = value.strip_prefix("0x") { BigInt::from_str_radix(hex, 16) } else { BigInt::from_str_radix(value, 10) @@ -357,12 +365,23 @@ fn field_from_big_int(bigint: BigInt) -> FieldElement { } } +fn field_to_signed_hex(f: FieldElement, bit_size: u32) -> String { + let f_u128 = f.to_u128(); + let max = 2_u128.pow(bit_size - 1) - 1; + if f_u128 > max { + let f = FieldElement::from(2_u128.pow(bit_size) - f_u128); + format!("-0x{}", f.to_hex()) + } else { + format!("0x{}", f.to_hex()) + } +} + #[cfg(test)] mod test { use acvm::{AcirField, FieldElement}; use num_bigint::BigUint; - use super::parse_str_to_field; + use super::{parse_str_to_field, parse_str_to_signed}; fn big_uint_from_field(field: FieldElement) -> BigUint { BigUint::from_bytes_be(&field.to_be_bytes()) @@ -400,4 +419,38 @@ mod test { let noncanonical_field = FieldElement::modulus().to_string(); assert!(parse_str_to_field(&noncanonical_field).is_err()); } + + #[test] + fn test_parse_str_to_signed() { + let value = parse_str_to_signed("1", 8).unwrap(); + assert_eq!(value, FieldElement::from(1_u128)); + + let value = parse_str_to_signed("-1", 8).unwrap(); + assert_eq!(value, FieldElement::from(255_u128)); + + let value = parse_str_to_signed("-1", 16).unwrap(); + assert_eq!(value, FieldElement::from(65535_u128)); + } +} + +#[cfg(test)] +mod arbitrary { + use proptest::prelude::*; + + use crate::{AbiType, Sign}; + + pub(super) fn arb_signed_integer_type_and_value() -> BoxedStrategy<(AbiType, i64)> { + (2u32..=64) + .prop_flat_map(|width| { + let typ = Just(AbiType::Integer { width, sign: Sign::Signed }); + let value = if width == 64 { + // Avoid overflow + i64::MIN..i64::MAX + } else { + -(2i64.pow(width - 1))..(2i64.pow(width - 1) - 1) + }; + (typ, value) + }) + .boxed() + } } diff --git a/noir/noir-repo/tooling/noirc_abi/src/input_parser/toml.rs b/noir/noir-repo/tooling/noirc_abi/src/input_parser/toml.rs index 321d3511b5d..6f2be68a0c4 100644 --- a/noir/noir-repo/tooling/noirc_abi/src/input_parser/toml.rs +++ b/noir/noir-repo/tooling/noirc_abi/src/input_parser/toml.rs @@ -1,4 +1,4 @@ -use super::{parse_str_to_field, parse_str_to_signed, InputValue}; +use super::{field_to_signed_hex, parse_str_to_field, parse_str_to_signed, InputValue}; use crate::{errors::InputParserError, Abi, AbiType, MAIN_RETURN_NAME}; use acvm::{AcirField, FieldElement}; use iter_extended::{try_btree_map, try_vecmap}; @@ -60,7 +60,7 @@ pub(crate) fn serialize_to_toml( Ok(toml_string) } -#[derive(Debug, Deserialize, Serialize, Clone)] +#[derive(Debug, Deserialize, Serialize, Clone, PartialEq)] #[serde(untagged)] enum TomlTypes { // This is most likely going to be a hex string @@ -83,6 +83,9 @@ impl TomlTypes { abi_type: &AbiType, ) -> Result { let toml_value = match (value, abi_type) { + (InputValue::Field(f), AbiType::Integer { sign: crate::Sign::Signed, width }) => { + TomlTypes::String(field_to_signed_hex(*f, *width)) + } (InputValue::Field(f), AbiType::Field | AbiType::Integer { .. }) => { let f_str = format!("0x{}", f.to_hex()); TomlTypes::String(f_str) @@ -126,6 +129,7 @@ impl InputValue { ) -> Result { let input_value = match (value, param_type) { (TomlTypes::String(string), AbiType::String { .. }) => InputValue::String(string), + ( TomlTypes::String(string), AbiType::Field @@ -139,7 +143,7 @@ impl InputValue { TomlTypes::Integer(integer), AbiType::Field | AbiType::Integer { .. } | AbiType::Boolean, ) => { - let new_value = FieldElement::from(i128::from(integer)); + let new_value = FieldElement::from(u128::from(integer)); InputValue::Field(new_value) } @@ -179,3 +183,40 @@ impl InputValue { Ok(input_value) } } + +#[cfg(test)] +mod test { + use proptest::prelude::*; + + use crate::{ + arbitrary::arb_abi_and_input_map, + input_parser::{arbitrary::arb_signed_integer_type_and_value, toml::TomlTypes, InputValue}, + }; + + use super::{parse_toml, serialize_to_toml}; + + proptest! { + #[test] + fn serializing_and_parsing_returns_original_input((abi, input_map) in arb_abi_and_input_map()) { + let toml = serialize_to_toml(&input_map, &abi).expect("should be serializable"); + let parsed_input_map = parse_toml(&toml, &abi).expect("should be parsable"); + + prop_assert_eq!(parsed_input_map, input_map); + } + + #[test] + fn signed_integer_serialization_roundtrip((typ, value) in arb_signed_integer_type_and_value()) { + let string_input = TomlTypes::String(value.to_string()); + let input_value = InputValue::try_from_toml(string_input.clone(), &typ, "foo").expect("should be parsable"); + let TomlTypes::String(output_string) = TomlTypes::try_from_input_value(&input_value, &typ).expect("should be serializable") else { + panic!("wrong type output"); + }; + let output_number = if let Some(output_string) = output_string.strip_prefix("-0x") { + -i64::from_str_radix(output_string, 16).unwrap() + } else { + i64::from_str_radix(output_string.strip_prefix("0x").unwrap(), 16).unwrap() + }; + prop_assert_eq!(output_number, value); + } + } +} diff --git a/noir/noir-repo/tooling/noirc_abi/src/lib.rs b/noir/noir-repo/tooling/noirc_abi/src/lib.rs index b1b199727c2..bd5674d64f1 100644 --- a/noir/noir-repo/tooling/noirc_abi/src/lib.rs +++ b/noir/noir-repo/tooling/noirc_abi/src/lib.rs @@ -49,6 +49,7 @@ pub const MAIN_RETURN_NAME: &str = "return"; /// depends on the types of programs that users want to do. I don't envision string manipulation /// in programs, however it is possible to support, with many complications like encoding character set /// support. +#[derive(Hash)] pub enum AbiType { Field, Array { @@ -77,7 +78,7 @@ pub enum AbiType { }, } -#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] #[cfg_attr(test, derive(arbitrary::Arbitrary))] #[serde(rename_all = "lowercase")] /// Represents whether the parameter is public or known only to the prover. @@ -89,7 +90,7 @@ pub enum AbiVisibility { DataBus, } -#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] #[cfg_attr(test, derive(arbitrary::Arbitrary))] #[serde(rename_all = "lowercase")] pub enum Sign { @@ -146,7 +147,7 @@ impl From<&AbiType> for PrintableType { } } -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Hash)] #[cfg_attr(test, derive(arbitrary::Arbitrary))] /// An argument or return value of the circuit's `main` function. pub struct AbiParameter { @@ -163,7 +164,7 @@ impl AbiParameter { } } -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, Serialize, Deserialize, Hash)] #[cfg_attr(test, derive(arbitrary::Arbitrary))] pub struct AbiReturnType { #[cfg_attr(test, proptest(strategy = "arbitrary::arb_abi_type()"))] @@ -171,7 +172,7 @@ pub struct AbiReturnType { pub visibility: AbiVisibility, } -#[derive(Clone, Debug, Default, Serialize, Deserialize)] +#[derive(Clone, Debug, Default, Serialize, Deserialize, Hash)] #[cfg_attr(test, derive(arbitrary::Arbitrary))] pub struct Abi { /// An ordered list of the arguments to the program's `main` function, specifying their types and visibility. @@ -459,7 +460,7 @@ pub enum AbiValue { }, } -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] #[serde(tag = "error_kind", rename_all = "lowercase")] pub enum AbiErrorType { FmtString { length: u32, item_types: Vec }, diff --git a/noir/noir-repo/tooling/noirc_abi_wasm/Cargo.toml b/noir/noir-repo/tooling/noirc_abi_wasm/Cargo.toml index daa619ca01d..b00d580515e 100644 --- a/noir/noir-repo/tooling/noirc_abi_wasm/Cargo.toml +++ b/noir/noir-repo/tooling/noirc_abi_wasm/Cargo.toml @@ -1,9 +1,11 @@ [package] name = "noirc_abi_wasm" +description = "An ABI encoder for the Noir language" version.workspace = true authors.workspace = true edition.workspace = true license.workspace = true +repository.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/noir/noir-repo/tooling/noirc_abi_wasm/test/browser/abi_encode.test.ts b/noir/noir-repo/tooling/noirc_abi_wasm/test/browser/abi_encode.test.ts index e1aaf0dc2c0..ac18495919c 100644 --- a/noir/noir-repo/tooling/noirc_abi_wasm/test/browser/abi_encode.test.ts +++ b/noir/noir-repo/tooling/noirc_abi_wasm/test/browser/abi_encode.test.ts @@ -15,7 +15,8 @@ it('recovers original inputs when abi encoding and decoding', async () => { const foo: Field = inputs.foo as Field; const bar: Field[] = inputs.bar as Field[]; expect(BigInt(decoded_inputs.inputs.foo)).to.be.equal(BigInt(foo)); - expect(BigInt(decoded_inputs.inputs.bar[0])).to.be.equal(BigInt(bar[0])); - expect(BigInt(decoded_inputs.inputs.bar[1])).to.be.equal(BigInt(bar[1])); + expect(parseInt(decoded_inputs.inputs.bar[0])).to.be.equal(parseInt(bar[0].toString())); + expect(parseInt(decoded_inputs.inputs.bar[1])).to.be.equal(parseInt(bar[1].toString())); + expect(parseInt(decoded_inputs.inputs.bar[2])).to.be.equal(parseInt(bar[2].toString())); expect(decoded_inputs.return_value).to.be.null; }); diff --git a/noir/noir-repo/tooling/noirc_abi_wasm/test/node/abi_encode.test.ts b/noir/noir-repo/tooling/noirc_abi_wasm/test/node/abi_encode.test.ts index a49c10b6ea6..e87618d84da 100644 --- a/noir/noir-repo/tooling/noirc_abi_wasm/test/node/abi_encode.test.ts +++ b/noir/noir-repo/tooling/noirc_abi_wasm/test/node/abi_encode.test.ts @@ -11,7 +11,8 @@ it('recovers original inputs when abi encoding and decoding', async () => { const foo: Field = inputs.foo as Field; const bar: Field[] = inputs.bar as Field[]; expect(BigInt(decoded_inputs.inputs.foo)).to.be.equal(BigInt(foo)); - expect(BigInt(decoded_inputs.inputs.bar[0])).to.be.equal(BigInt(bar[0])); - expect(BigInt(decoded_inputs.inputs.bar[1])).to.be.equal(BigInt(bar[1])); + expect(parseInt(decoded_inputs.inputs.bar[0])).to.be.equal(parseInt(bar[0].toString())); + expect(parseInt(decoded_inputs.inputs.bar[1])).to.be.equal(parseInt(bar[1].toString())); + expect(parseInt(decoded_inputs.inputs.bar[2])).to.be.equal(parseInt(bar[2].toString())); expect(decoded_inputs.return_value).to.be.null; }); diff --git a/noir/noir-repo/tooling/noirc_abi_wasm/test/shared/abi_encode.ts b/noir/noir-repo/tooling/noirc_abi_wasm/test/shared/abi_encode.ts index 62eb7658f43..b789bb05371 100644 --- a/noir/noir-repo/tooling/noirc_abi_wasm/test/shared/abi_encode.ts +++ b/noir/noir-repo/tooling/noirc_abi_wasm/test/shared/abi_encode.ts @@ -5,7 +5,7 @@ export const abi: Abi = { { name: 'foo', type: { kind: 'field' }, visibility: 'private' }, { name: 'bar', - type: { kind: 'array', length: 2, type: { kind: 'field' } }, + type: { kind: 'array', length: 3, type: { kind: 'integer', sign: 'signed', width: 32 } }, visibility: 'private', }, ], @@ -15,5 +15,5 @@ export const abi: Abi = { export const inputs: InputMap = { foo: '1', - bar: ['1', '2'], + bar: ['1', '2', '-1'], }; diff --git a/noir/noir-repo/tooling/profiler/src/cli/execution_flamegraph_cmd.rs b/noir/noir-repo/tooling/profiler/src/cli/execution_flamegraph_cmd.rs index 981d08a3eb1..6d6da89f660 100644 --- a/noir/noir-repo/tooling/profiler/src/cli/execution_flamegraph_cmd.rs +++ b/noir/noir-repo/tooling/profiler/src/cli/execution_flamegraph_cmd.rs @@ -8,7 +8,7 @@ use crate::flamegraph::{BrilligExecutionSample, FlamegraphGenerator, InfernoFlam use crate::fs::{read_inputs_from_file, read_program_from_file}; use crate::opcode_formatter::format_brillig_opcode; use bn254_blackbox_solver::Bn254BlackBoxSolver; -use nargo::ops::DefaultForeignCallExecutor; +use nargo::foreign_calls::DefaultForeignCallExecutor; use noirc_abi::input_parser::Format; use noirc_artifacts::debug::DebugArtifact; diff --git a/spartan/aztec-network/files/config/config-prover-env.sh b/spartan/aztec-network/files/config/config-prover-env.sh index 11c4ad5aef2..073547821d4 100644 --- a/spartan/aztec-network/files/config/config-prover-env.sh +++ b/spartan/aztec-network/files/config/config-prover-env.sh @@ -3,7 +3,7 @@ set -eu # Pass the bootnode url as an argument # Ask the bootnode for l1 contract addresses -output=$(node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js get-node-info -u $1) +output=$(node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js get-node-info --node-url $1) echo "$output" @@ -13,6 +13,7 @@ registry_address=$(echo "$output" | grep -oP 'Registry Address: \K0x[a-fA-F0-9]{ inbox_address=$(echo "$output" | grep -oP 'L1 -> L2 Inbox Address: \K0x[a-fA-F0-9]{40}') outbox_address=$(echo "$output" | grep -oP 'L2 -> L1 Outbox Address: \K0x[a-fA-F0-9]{40}') fee_juice_address=$(echo "$output" | grep -oP 'Fee Juice Address: \K0x[a-fA-F0-9]{40}') +staking_asset_address=$(echo "$output" | grep -oP 'Staking Asset Address: \K0x[a-fA-F0-9]{40}') fee_juice_portal_address=$(echo "$output" | grep -oP 'Fee Juice Portal Address: \K0x[a-fA-F0-9]{40}') coin_issuer_address=$(echo "$output" | grep -oP 'CoinIssuer Address: \K0x[a-fA-F0-9]{40}') reward_distributor_address=$(echo "$output" | grep -oP 'RewardDistributor Address: \K0x[a-fA-F0-9]{40}') @@ -20,13 +21,14 @@ governance_proposer_address=$(echo "$output" | grep -oP 'GovernanceProposer Addr governance_address=$(echo "$output" | grep -oP 'Governance Address: \K0x[a-fA-F0-9]{40}') # Write the addresses to a file in the shared volume -cat < /shared/contracts/contracts.env +cat </shared/contracts/contracts.env export BOOTSTRAP_NODES=$boot_node_enr export ROLLUP_CONTRACT_ADDRESS=$rollup_address export REGISTRY_CONTRACT_ADDRESS=$registry_address export INBOX_CONTRACT_ADDRESS=$inbox_address export OUTBOX_CONTRACT_ADDRESS=$outbox_address export FEE_JUICE_CONTRACT_ADDRESS=$fee_juice_address +export STAKING_ASSET_CONTRACT_ADDRESS=$staking_asset_address export FEE_JUICE_PORTAL_CONTRACT_ADDRESS=$fee_juice_portal_address export COIN_ISSUER_CONTRACT_ADDRESS=$coin_issuer_address export REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=$reward_distributor_address diff --git a/spartan/aztec-network/files/config/config-validator-env.sh b/spartan/aztec-network/files/config/config-validator-env.sh index 71d03fbbc98..b2848f8e069 100644 --- a/spartan/aztec-network/files/config/config-validator-env.sh +++ b/spartan/aztec-network/files/config/config-validator-env.sh @@ -1,10 +1,9 @@ #!/bin/bash set -eu - -# Pass the bootnode url as an argument -# Ask the bootnode for l1 contract addresses -output=$(node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js get-node-info -u $1) +# Pass a PXE url as an argument +# Ask the PXE's node for l1 contract addresses +output=$(node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js get-node-info -u $1 --node-url '') echo "$output" @@ -14,6 +13,7 @@ registry_address=$(echo "$output" | grep -oP 'Registry Address: \K0x[a-fA-F0-9]{ inbox_address=$(echo "$output" | grep -oP 'L1 -> L2 Inbox Address: \K0x[a-fA-F0-9]{40}') outbox_address=$(echo "$output" | grep -oP 'L2 -> L1 Outbox Address: \K0x[a-fA-F0-9]{40}') fee_juice_address=$(echo "$output" | grep -oP 'Fee Juice Address: \K0x[a-fA-F0-9]{40}') +staking_asset_address=$(echo "$output" | grep -oP 'Staking Asset Address: \K0x[a-fA-F0-9]{40}') fee_juice_portal_address=$(echo "$output" | grep -oP 'Fee Juice Portal Address: \K0x[a-fA-F0-9]{40}') coin_issuer_address=$(echo "$output" | grep -oP 'CoinIssuer Address: \K0x[a-fA-F0-9]{40}') reward_distributor_address=$(echo "$output" | grep -oP 'RewardDistributor Address: \K0x[a-fA-F0-9]{40}') @@ -25,15 +25,15 @@ governance_address=$(echo "$output" | grep -oP 'Governance Address: \K0x[a-fA-F0 INDEX=$(echo $POD_NAME | awk -F'-' '{print $NF}') private_key=$(jq -r ".[$INDEX]" /app/config/keys.json) - # Write the addresses to a file in the shared volume -cat < /shared/contracts/contracts.env +cat </shared/contracts/contracts.env export BOOTSTRAP_NODES=$boot_node_enr export ROLLUP_CONTRACT_ADDRESS=$rollup_address export REGISTRY_CONTRACT_ADDRESS=$registry_address export INBOX_CONTRACT_ADDRESS=$inbox_address export OUTBOX_CONTRACT_ADDRESS=$outbox_address export FEE_JUICE_CONTRACT_ADDRESS=$fee_juice_address +export STAKING_ASSET_CONTRACT_ADDRESS=$staking_asset_address export FEE_JUICE_PORTAL_CONTRACT_ADDRESS=$fee_juice_portal_address export COIN_ISSUER_CONTRACT_ADDRESS=$coin_issuer_address export REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=$reward_distributor_address diff --git a/spartan/aztec-network/files/config/deploy-l1-contracts.sh b/spartan/aztec-network/files/config/deploy-l1-contracts.sh index 4d976821f04..74f8e3c6bfc 100644 --- a/spartan/aztec-network/files/config/deploy-l1-contracts.sh +++ b/spartan/aztec-network/files/config/deploy-l1-contracts.sh @@ -4,17 +4,21 @@ set -exu CHAIN_ID=$1 -# Use default account, it is funded on our dev machine -export PRIVATE_KEY="0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" - # Run the deploy-l1-contracts command and capture the output output="" -# if INIT_VALIDATORS is true, then we need to pass the validators flag to the deploy-l1-contracts command -if [ "$INIT_VALIDATORS" = "true" ]; then - output=$(node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js deploy-l1-contracts --validators $2 --l1-chain-id $CHAIN_ID) -else - output=$(node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js deploy-l1-contracts --l1-chain-id $CHAIN_ID) -fi +MAX_RETRIES=5 +RETRY_DELAY=60 +for attempt in $(seq 1 $MAX_RETRIES); do + # if INIT_VALIDATORS is true, then we need to pass the validators flag to the deploy-l1-contracts command + if [ "${INIT_VALIDATORS:-false}" = "true" ]; then + output=$(node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js deploy-l1-contracts --mnemonic "$MNEMONIC" --validators $2 --l1-chain-id $CHAIN_ID) && break + else + output=$(node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js deploy-l1-contracts --mnemonic "$MNEMONIC" --l1-chain-id $CHAIN_ID) && break + fi + echo "Attempt $attempt failed. Retrying in $RETRY_DELAY seconds..." + sleep "$RETRY_DELAY" +done || { echo "All l1 contract deploy attempts failed."; exit 1; } + echo "$output" @@ -24,6 +28,7 @@ registry_address=$(echo "$output" | grep -oP 'Registry Address: \K0x[a-fA-F0-9]{ inbox_address=$(echo "$output" | grep -oP 'L1 -> L2 Inbox Address: \K0x[a-fA-F0-9]{40}') outbox_address=$(echo "$output" | grep -oP 'L2 -> L1 Outbox Address: \K0x[a-fA-F0-9]{40}') fee_juice_address=$(echo "$output" | grep -oP 'Fee Juice Address: \K0x[a-fA-F0-9]{40}') +staking_asset_address=$(echo "$output" | grep -oP 'Staking Asset Address: \K0x[a-fA-F0-9]{40}') fee_juice_portal_address=$(echo "$output" | grep -oP 'Fee Juice Portal Address: \K0x[a-fA-F0-9]{40}') coin_issuer_address=$(echo "$output" | grep -oP 'CoinIssuer Address: \K0x[a-fA-F0-9]{40}') reward_distributor_address=$(echo "$output" | grep -oP 'RewardDistributor Address: \K0x[a-fA-F0-9]{40}') @@ -37,6 +42,7 @@ export REGISTRY_CONTRACT_ADDRESS=$registry_address export INBOX_CONTRACT_ADDRESS=$inbox_address export OUTBOX_CONTRACT_ADDRESS=$outbox_address export FEE_JUICE_CONTRACT_ADDRESS=$fee_juice_address +export STAKING_ASSET_CONTRACT_ADDRESS=$staking_asset_address export FEE_JUICE_PORTAL_CONTRACT_ADDRESS=$fee_juice_portal_address export COIN_ISSUER_CONTRACT_ADDRESS=$coin_issuer_address export REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=$reward_distributor_address diff --git a/spartan/aztec-network/files/config/setup-service-addresses.sh b/spartan/aztec-network/files/config/setup-service-addresses.sh index 5ca3bb5a248..063c84a16e5 100644 --- a/spartan/aztec-network/files/config/setup-service-addresses.sh +++ b/spartan/aztec-network/files/config/setup-service-addresses.sh @@ -81,17 +81,29 @@ fi if [ "${PROVER_BROKER_EXTERNAL_HOST}" != "" ]; then PROVER_BROKER_ADDR="${PROVER_BROKER_EXTERNAL_HOST}" -elif [ "${NETWORK_PUBLIC}" = "true" ]; then - PROVER_BROKER_ADDR=$(get_service_address "prover-broker" "${PROVER_BROKER_PORT}") else PROVER_BROKER_ADDR="http://${SERVICE_NAME}-prover-broker.${NAMESPACE}:${PROVER_BROKER_PORT}" fi +# Configure OTEL_COLLECTOR_ENDPOINT if not set in values file +if [ "${TELEMETRY:-false}" = "true" ] && [ "${OTEL_COLLECTOR_ENDPOINT}" = "" ]; then + OTEL_COLLECTOR_PORT=${OTEL_COLLECTOR_PORT:-4318} + OTEL_COLLECTOR_ENDPOINT="http://metrics-opentelemetry-collector.metrics:$OTEL_COLLECTOR_PORT" +fi # Write addresses to file for sourcing echo "export ETHEREUM_HOST=${ETHEREUM_ADDR}" >> /shared/config/service-addresses echo "export BOOT_NODE_HOST=${BOOT_NODE_ADDR}" >> /shared/config/service-addresses echo "export PROVER_NODE_HOST=${PROVER_NODE_ADDR}" >> /shared/config/service-addresses echo "export PROVER_BROKER_HOST=${PROVER_BROKER_ADDR}" >> /shared/config/service-addresses + +if [ "${OTEL_COLLECTOR_ENDPOINT}" != "" ]; then + echo "export OTEL_COLLECTOR_ENDPOINT=$OTEL_COLLECTOR_ENDPOINT" >> /shared/config/service-addresses + echo "export OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=$OTEL_COLLECTOR_ENDPOINT/v1/logs" >> /shared/config/service-addresses + echo "export OTEL_EXPORTER_OTLP_METRICS_ENDPOINT=$OTEL_COLLECTOR_ENDPOINT/v1/metrics" >> /shared/config/service-addresses + echo "export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=$OTEL_COLLECTOR_ENDPOINT/v1/traces" >> /shared/config/service-addresses +fi + + echo "Addresses configured:" cat /shared/config/service-addresses diff --git a/spartan/aztec-network/templates/_helpers.tpl b/spartan/aztec-network/templates/_helpers.tpl index 3db484690a0..8581bda2d4f 100644 --- a/spartan/aztec-network/templates/_helpers.tpl +++ b/spartan/aztec-network/templates/_helpers.tpl @@ -68,30 +68,6 @@ http://{{ include "aztec-network.fullname" . }}-validator.{{ .Release.Namespace http://{{ include "aztec-network.fullname" . }}-metrics.{{ .Release.Namespace }} {{- end -}} -{{- define "aztec-network.otelCollectorMetricsEndpoint" -}} -{{- if .Values.telemetry.enabled -}} -{{- if .Values.telemetry.otelCollectorEndpoint -}} -{{- .Values.telemetry.otelCollectorEndpoint -}}/v1/metrics -{{- end -}} -{{- end -}} -{{- end -}} - -{{- define "aztec-network.otelCollectorTracesEndpoint" -}} -{{- if .Values.telemetry.enabled -}} -{{- if .Values.telemetry.otelCollectorEndpoint -}} -{{- .Values.telemetry.otelCollectorEndpoint -}}/v1/traces -{{- end -}} -{{- end -}} -{{- end -}} - -{{- define "aztec-network.otelCollectorLogsEndpoint" -}} -{{- if .Values.telemetry.enabled -}} -{{- if .Values.telemetry.otelCollectorEndpoint -}} -{{- .Values.telemetry.otelCollectorEndpoint -}}/v1/logs -{{- end -}} -{{- end -}} -{{- end -}} - {{- define "helpers.flag" -}} {{- $name := index . 0 -}} {{- $value := index . 1 -}} @@ -153,6 +129,10 @@ Service Address Setup Container value: "{{ .Values.network.public }}" - name: NAMESPACE value: {{ .Release.Namespace }} + - name: TELEMETRY + value: "{{ .Values.telemetry.enabled }}" + - name: OTEL_COLLECTOR_ENDPOINT + value: "{{ .Values.telemetry.otelCollectorEndpoint }}" - name: EXTERNAL_ETHEREUM_HOST value: "{{ .Values.ethereum.externalHost }}" - name: ETHEREUM_PORT diff --git a/spartan/aztec-network/templates/boot-node.yaml b/spartan/aztec-network/templates/boot-node.yaml index 00e585513a6..3a5d2103f9a 100644 --- a/spartan/aztec-network/templates/boot-node.yaml +++ b/spartan/aztec-network/templates/boot-node.yaml @@ -17,6 +17,7 @@ spec: {{- include "aztec-network.selectorLabels" . | nindent 8 }} app: boot-node spec: + dnsPolicy: ClusterFirstWithHostNet {{- if .Values.network.public }} hostNetwork: true {{- include "aztec-network.publicAntiAffinity" . | nindent 6 }} @@ -32,17 +33,18 @@ spec: - -c - | source /shared/config/service-addresses + cat /shared/config/service-addresses echo "Awaiting ethereum node at ${ETHEREUM_HOST}" until curl -s -X POST -H 'Content-Type: application/json' \ -d '{"jsonrpc":"2.0","method":"web3_clientVersion","params":[],"id":67}' \ ${ETHEREUM_HOST} | grep -q reth; do - echo "Waiting for Ethereum node..." + echo "Waiting for Ethereum node ${ETHEREUM_HOST}..." sleep 5 done echo "Ethereum node is ready!" {{- if .Values.telemetry.enabled }} - until curl --head --silent {{ include "aztec-network.otelCollectorMetricsEndpoint" . }} > /dev/null; do - echo "Waiting for OpenTelemetry collector..." + until curl --head --silent $OTEL_COLLECTOR_ENDPOINT > /dev/null; do + echo "Waiting for OpenTelemetry collector $OTEL_COLLECTOR_ENDPOINT..." sleep 5 done echo "OpenTelemetry collector is ready!" @@ -70,8 +72,12 @@ spec: - name: scripts mountPath: /scripts env: + - name: TELEMETRY + value: "{{ .Values.telemetry.enabled }}" - name: INIT_VALIDATORS value: "true" + - name: MNEMONIC + value: "{{ .Values.aztec.l1DeploymentMnemonic }}" - name: ETHEREUM_SLOT_DURATION value: "{{ .Values.ethereum.blockTime }}" - name: AZTEC_SLOT_DURATION @@ -150,16 +156,10 @@ spec: value: "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" - name: OTEL_RESOURCE_ATTRIBUTES value: service.name={{ .Release.Name }},service.namespace={{ .Release.Namespace }},service.version={{ .Chart.AppVersion }},environment={{ .Values.environment | default "production" }} - - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT - value: {{ include "aztec-network.otelCollectorMetricsEndpoint" . | quote }} - - name: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT - value: {{ include "aztec-network.otelCollectorTracesEndpoint" . | quote }} - - name: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT - value: {{ include "aztec-network.otelCollectorLogsEndpoint" . | quote }} - name: PROVER_REAL_PROOFS - value: "{{ .Values.bootNode.realProofs }}" + value: "{{ .Values.aztec.realProofs }}" - name: PXE_PROVER_ENABLED - value: "{{ .Values.bootNode.realProofs }}" + value: "{{ .Values.aztec.realProofs }}" - name: ETHEREUM_SLOT_DURATION value: "{{ .Values.ethereum.blockTime }}" - name: AZTEC_SLOT_DURATION @@ -208,6 +208,7 @@ data: export INBOX_CONTRACT_ADDRESS={{ .Values.bootNode.contracts.inboxAddress }} export OUTBOX_CONTRACT_ADDRESS={{ .Values.bootNode.contracts.outboxAddress }} export FEE_JUICE_CONTRACT_ADDRESS={{ .Values.bootNode.contracts.feeJuiceAddress }} + export STAKING_ASSET_CONTRACT_ADDRESS={{ .Values.bootNode.contracts.stakingAssetAddress }} export FEE_JUICE_PORTAL_CONTRACT_ADDRESS={{ .Values.bootNode.contracts.feeJuicePortalAddress }} {{- end }} {{if not .Values.network.public }} diff --git a/spartan/aztec-network/templates/deploy-l1-verifier.yaml b/spartan/aztec-network/templates/deploy-l1-verifier.yaml index 8866dd1ca09..c3edb5d42ff 100644 --- a/spartan/aztec-network/templates/deploy-l1-verifier.yaml +++ b/spartan/aztec-network/templates/deploy-l1-verifier.yaml @@ -44,6 +44,7 @@ spec: chmod +x /tmp/setup-service-addresses.sh /tmp/setup-service-addresses.sh source /shared/config/service-addresses + cat /shared/config/service-addresses until curl -s -X GET "$BOOT_NODE_HOST/status"; do echo "Waiting for Aztec node $BOOT_NODE_HOST..." @@ -51,19 +52,25 @@ spec: done echo "Boot node is ready!" - export ROLLUP_CONTRACT_ADDRESS=$(curl -X POST -H 'Content-Type: application/json' \ + l1_contracts=$(curl -X POST -H 'Content-Type: application/json' \ -d '{"jsonrpc":"2.0","method":"node_getL1ContractAddresses","params":[],"id":1}' \ - "$BOOT_NODE_HOST" \ - | jq -r '.result.rollupAddress.value') - + "$BOOT_NODE_HOST") + echo "L1 Contracts" + echo $l1_contracts + export ROLLUP_CONTRACT_ADDRESS=$(echo $l1_contracts | jq -r '.result.rollupAddress') + [ -z "$ROLLUP_CONTRACT_ADDRESS" ] && echo "Could not retrieve rollup address!" && exit 1 echo "Rollup contract address: $ROLLUP_CONTRACT_ADDRESS" - node /usr/src/yarn-project/aztec/dest/bin/index.js deploy-l1-verifier --verifier real + MAX_RETRIES=5 + RETRY_DELAY=60 + for attempt in $(seq 1 $MAX_RETRIES); do + node /usr/src/yarn-project/aztec/dest/bin/index.js deploy-l1-verifier --verifier real + echo "Attempt $attempt failed. Retrying in $RETRY_DELAY seconds..." + sleep "$RETRY_DELAY" + done || { echo "All L1 verifier deploy attempts failed." >&2; exit 1; } echo "L1 verifier deployed" env: - name: NODE_NO_WARNINGS value: "1" - - name: DEBUG - value: "aztec:*" - name: LOG_LEVEL value: "debug" - name: L1_CHAIN_ID diff --git a/spartan/aztec-network/templates/prover-agent.yaml b/spartan/aztec-network/templates/prover-agent.yaml index ef080501868..c27adb96eeb 100644 --- a/spartan/aztec-network/templates/prover-agent.yaml +++ b/spartan/aztec-network/templates/prover-agent.yaml @@ -34,6 +34,7 @@ spec: serviceAccountName: {{ include "aztec-network.fullname" . }}-node {{- if .Values.network.public }} hostNetwork: true + dnsPolicy: ClusterFirstWithHostNet {{- end }} volumes: - name: config @@ -50,14 +51,15 @@ spec: - -c - | source /shared/config/service-addresses + cat /shared/config/service-addresses until curl -s -X POST ${PROVER_BROKER_HOST}/status; do echo "Waiting for broker ${PROVER_BROKER_HOST} ..." sleep 5 done echo "Broker is ready!" {{- if .Values.telemetry.enabled }} - until curl --head --silent {{ include "aztec-network.otelCollectorMetricsEndpoint" . }} > /dev/null; do - echo "Waiting for OpenTelemetry collector..." + until curl --head --silent $OTEL_COLLECTOR_ENDPOINT > /dev/null; do + echo "Waiting for OpenTelemetry collector $OTEL_COLLECTOR_ENDPOINT..." sleep 5 done echo "OpenTelemetry collector is ready!" @@ -85,10 +87,8 @@ spec: value: "{{ .Values.proverAgent.logLevel }}" - name: LOG_JSON value: "1" - - name: DEBUG - value: "{{ .Values.proverAgent.debug }}" - name: PROVER_REAL_PROOFS - value: "{{ .Values.proverAgent.realProofs }}" + value: "{{ .Values.aztec.realProofs }}" - name: PROVER_AGENT_COUNT value: "1" - name: PROVER_AGENT_POLL_INTERVAL_MS @@ -97,12 +97,6 @@ spec: value: {{ join "," .Values.proverAgent.proofTypes | quote }} - name: OTEL_RESOURCE_ATTRIBUTES value: service.name={{ .Release.Name }},service.namespace={{ .Release.Namespace }},service.version={{ .Chart.AppVersion }},environment={{ .Values.environment | default "production" }} - - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT - value: {{ include "aztec-network.otelCollectorMetricsEndpoint" . | quote }} - - name: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT - value: {{ include "aztec-network.otelCollectorTracesEndpoint" . | quote }} - - name: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT - value: {{ include "aztec-network.otelCollectorLogsEndpoint" . | quote }} resources: {{- toYaml .Values.proverAgent.resources | nindent 12 }} {{- end }} diff --git a/spartan/aztec-network/templates/prover-broker.yaml b/spartan/aztec-network/templates/prover-broker.yaml index 214b6720fce..850366997b2 100644 --- a/spartan/aztec-network/templates/prover-broker.yaml +++ b/spartan/aztec-network/templates/prover-broker.yaml @@ -20,6 +20,7 @@ spec: serviceAccountName: {{ include "aztec-network.fullname" . }}-node {{- if .Values.network.public }} hostNetwork: true + dnsPolicy: ClusterFirstWithHostNet {{- end }} volumes: - name: config @@ -36,9 +37,10 @@ spec: - -c - | source /shared/config/service-addresses + cat /shared/config/service-addresses {{- if .Values.telemetry.enabled }} - until curl --head --silent {{ include "aztec-network.otelCollectorMetricsEndpoint" . }} > /dev/null; do - echo "Waiting for OpenTelemetry collector..." + until curl --head --silent $OTEL_COLLECTOR_ENDPOINT > /dev/null; do + echo "Waiting for OpenTelemetry collector $OTEL_COLLECTOR_ENDPOINT..." sleep 5 done echo "OpenTelemetry collector is ready!" @@ -66,8 +68,6 @@ spec: value: "{{ .Values.proverBroker.logLevel }}" - name: LOG_JSON value: "1" - - name: DEBUG - value: "{{ .Values.proverBroker.debug }}" - name: PROVER_BROKER_POLL_INTERVAL_MS value: "{{ .Values.proverBroker.pollIntervalMs }}" - name: PROVER_BROKER_JOB_TIMEOUT_MS @@ -78,15 +78,11 @@ spec: value: "{{ .Values.proverBroker.dataDirectory }}" - name: OTEL_RESOURCE_ATTRIBUTES value: service.name={{ .Release.Name }},service.namespace={{ .Release.Namespace }},service.version={{ .Chart.AppVersion }},environment={{ .Values.environment | default "production" }} - - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT - value: {{ include "aztec-network.otelCollectorMetricsEndpoint" . | quote }} - - name: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT - value: {{ include "aztec-network.otelCollectorTracesEndpoint" . | quote }} - - name: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT - value: {{ include "aztec-network.otelCollectorLogsEndpoint" . | quote }} resources: {{- toYaml .Values.proverBroker.resources | nindent 12 }} +{{- end }} --- +# Headless service for StatefulSet DNS entries apiVersion: v1 kind: Service metadata: @@ -101,4 +97,3 @@ spec: ports: - port: {{ .Values.proverBroker.service.nodePort }} name: node -{{ end }} diff --git a/spartan/aztec-network/templates/prover-node.yaml b/spartan/aztec-network/templates/prover-node.yaml index bf13dad1821..44984a2fb2a 100644 --- a/spartan/aztec-network/templates/prover-node.yaml +++ b/spartan/aztec-network/templates/prover-node.yaml @@ -19,6 +19,7 @@ spec: spec: {{- if .Values.network.public }} hostNetwork: true + dnsPolicy: ClusterFirstWithHostNet {{- include "aztec-network.publicAntiAffinity" . | nindent 6 }} {{- end }} serviceAccountName: {{ include "aztec-network.fullname" . }}-node @@ -35,7 +36,7 @@ spec: until curl -s -X POST -H 'Content-Type: application/json' \ -d '{"jsonrpc":"2.0","method":"web3_clientVersion","params":[],"id":67}' \ ${ETHEREUM_HOST} | grep -q reth; do - echo "Waiting for Ethereum node..." + echo "Waiting for Ethereum node ${ETHEREUM_HOST}..." sleep 5 done echo "Ethereum node is ready!" @@ -51,8 +52,8 @@ spec: fi {{- if .Values.telemetry.enabled }} - until curl --head --silent {{ include "aztec-network.otelCollectorMetricsEndpoint" . }} > /dev/null; do - echo "Waiting for OpenTelemetry collector..." + until curl --head --silent $OTEL_COLLECTOR_ENDPOINT > /dev/null; do + echo "Waiting for OpenTelemetry collector $OTEL_COLLECTOR_ENDPOINT..." sleep 5 done echo "OpenTelemetry collector is ready!" @@ -121,7 +122,7 @@ spec: - name: DEBUG value: "{{ .Values.proverNode.debug }}" - name: PROVER_REAL_PROOFS - value: "{{ .Values.proverNode.realProofs }}" + value: "{{ .Values.aztec.realProofs }}" - name: PROVER_AGENT_COUNT value: "{{ .Values.proverNode.proverAgent.count }}" - name: PROVER_AGENT_POLL_INTERVAL_MS @@ -142,12 +143,6 @@ spec: value: "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" - name: OTEL_RESOURCE_ATTRIBUTES value: service.name={{ .Release.Name }},service.namespace={{ .Release.Namespace }},service.version={{ .Chart.AppVersion }},environment={{ .Values.environment | default "production" }} - - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT - value: {{ include "aztec-network.otelCollectorMetricsEndpoint" . | quote }} - - name: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT - value: {{ include "aztec-network.otelCollectorTracesEndpoint" . | quote }} - - name: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT - value: {{ include "aztec-network.otelCollectorLogsEndpoint" . | quote }} - name: L1_CHAIN_ID value: "{{ .Values.ethereum.chainId }}" - name: P2P_ENABLED diff --git a/spartan/aztec-network/templates/pxe.yaml b/spartan/aztec-network/templates/pxe.yaml index 94a8a87886c..d61df752190 100644 --- a/spartan/aztec-network/templates/pxe.yaml +++ b/spartan/aztec-network/templates/pxe.yaml @@ -19,6 +19,7 @@ spec: serviceAccountName: {{ include "aztec-network.fullname" . }}-node {{- if .Values.network.public }} hostNetwork: true + dnsPolicy: ClusterFirstWithHostNet {{- end }} volumes: - name: config @@ -37,6 +38,7 @@ spec: - -c - | source /shared/config/service-addresses + cat /shared/config/service-addresses until curl --head --silent ${BOOT_NODE_HOST}/status; do echo "Waiting for boot node..." sleep 5 @@ -68,6 +70,7 @@ spec: - "-c" - | source /shared/config/service-addresses + cat /shared/config/service-addresses {{- if .Values.network.public }} # If the network is public, we need to use the boot node URL export AZTEC_NODE_URL=${BOOT_NODE_HOST} @@ -84,10 +87,8 @@ spec: value: "1" - name: LOG_LEVEL value: "{{ .Values.pxe.logLevel }}" - - name: DEBUG - value: "{{ .Values.pxe.debug }}" - name: PXE_PROVER_ENABLED - value: "{{ .Values.pxe.proverEnabled }}" + value: "{{ .Values.aztec.realProofs }}" ports: - name: http containerPort: {{ .Values.pxe.service.nodePort }} diff --git a/spartan/aztec-network/templates/rbac.yaml b/spartan/aztec-network/templates/rbac.yaml index a0e8e68cd11..94f143f619e 100644 --- a/spartan/aztec-network/templates/rbac.yaml +++ b/spartan/aztec-network/templates/rbac.yaml @@ -55,4 +55,4 @@ roleRef: subjects: - kind: ServiceAccount name: {{ include "aztec-network.fullname" . }}-node - namespace: {{ .Release.Namespace }} + namespace: {{ .Release.Namespace }} \ No newline at end of file diff --git a/spartan/aztec-network/templates/reth.yaml b/spartan/aztec-network/templates/reth.yaml index d6230ecf0ad..8a3e28728bf 100644 --- a/spartan/aztec-network/templates/reth.yaml +++ b/spartan/aztec-network/templates/reth.yaml @@ -19,6 +19,43 @@ spec: {{- if .Values.network.public }} hostNetwork: true {{- end }} + initContainers: + - name: prepare-genesis + image: node:18-alpine + command: ["/bin/sh", "-c"] + args: + - | + cd /tmp + npm init -y + npm install ethers@6 + cat > derive.js << 'EOF' + const { ethers } = require('ethers'); + const fs = require('fs'); + + async function main() { + const mnemonic = process.env.DEPLOYMENT_MNEMONIC; + const wallet = ethers.Wallet.fromPhrase(mnemonic); + + const genesis = JSON.parse(fs.readFileSync('/genesis-template/genesis.json', 'utf8')); + + genesis.alloc[wallet.address] = { + balance: '0x3635c9adc5dea00000' // 1000 ETH in wei + }; + + fs.writeFileSync('/genesis-output/genesis.json', JSON.stringify(genesis, null, 2)); + } + + main().catch(console.error); + EOF + node derive.js + env: + - name: DEPLOYMENT_MNEMONIC + value: {{ .Values.aztec.l1DeploymentMnemonic }} + volumeMounts: + - name: genesis-template + mountPath: /genesis-template + - name: genesis-output + mountPath: /genesis-output containers: - name: ethereum image: "{{ .Values.images.reth.image }}" @@ -40,7 +77,7 @@ spec: volumeMounts: - name: shared-volume mountPath: /data - - name: genesis + - name: genesis-output mountPath: /genesis resources: {{- toYaml .Values.ethereum.resources | nindent 12 }} @@ -48,9 +85,11 @@ spec: - name: shared-volume persistentVolumeClaim: claimName: {{ include "aztec-network.fullname" . }}-ethereum-pvc - - name: genesis + - name: genesis-template configMap: name: {{ include "aztec-network.fullname" . }}-reth-genesis + - name: genesis-output + emptyDir: {} {{if not .Values.network.public }} --- apiVersion: v1 diff --git a/spartan/aztec-network/templates/setup-l2-contracts.yaml b/spartan/aztec-network/templates/setup-l2-contracts.yaml index 56cf8fc57f2..8afc65abb66 100644 --- a/spartan/aztec-network/templates/setup-l2-contracts.yaml +++ b/spartan/aztec-network/templates/setup-l2-contracts.yaml @@ -48,12 +48,13 @@ spec: chmod +x /tmp/setup-service-addresses.sh /tmp/setup-service-addresses.sh source /shared/config/service-addresses + cat /shared/config/service-addresses export AZTEC_NODE_URL=$BOOT_NODE_HOST export PXE_URL=$BOOT_NODE_HOST until curl -s -X POST -H 'content-type: application/json' \ -d '{"jsonrpc":"2.0","method":"pxe_getNodeInfo","params":[],"id":67}' \ $PXE_URL | grep -q '"enr:-'; do - echo "Waiting for PXE service..." + echo "Waiting for PXE service at ${PXE_URL}..." sleep 5 done echo "PXE service is ready!" @@ -61,8 +62,8 @@ spec: node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js setup-protocol-contracts --skipProofWait --l1-chain-id {{ .Values.ethereum.chainId }} echo "L2 contracts initialized" env: - - name: DEBUG - value: "aztec:*" + - name: TELEMETRY + value: "{{ .Values.telemetry.enabled }}" - name: LOG_LEVEL value: "debug" - name: NETWORK_PUBLIC diff --git a/spartan/aztec-network/templates/transaction-bot.yaml b/spartan/aztec-network/templates/transaction-bot.yaml index cd5b88a13bd..06c6ce7048b 100644 --- a/spartan/aztec-network/templates/transaction-bot.yaml +++ b/spartan/aztec-network/templates/transaction-bot.yaml @@ -38,6 +38,7 @@ spec: - -c - | source /shared/config/service-addresses + cat /shared/config/service-addresses {{- if .Values.bot.nodeUrl }} export AZTEC_NODE_URL={{ .Values.bot.nodeUrl }} {{- else if .Values.network.public }} @@ -63,6 +64,7 @@ spec: - "-c" - | source /shared/config/service-addresses + cat /shared/config/service-addresses {{- if .Values.bot.nodeUrl }} export AZTEC_NODE_URL={{ .Values.bot.nodeUrl }} {{- else if .Values.network.public }} @@ -79,8 +81,6 @@ spec: value: "1" - name: LOG_LEVEL value: "{{ .Values.bot.logLevel }}" - - name: DEBUG - value: "{{ .Values.bot.debug }}" - name: BOT_PRIVATE_KEY value: "{{ .Values.bot.botPrivateKey }}" - name: BOT_TX_INTERVAL_SECONDS @@ -94,9 +94,9 @@ spec: - name: BOT_NO_START value: "{{ .Values.bot.botNoStart }}" - name: PXE_PROVER_ENABLED - value: "{{ .Values.bot.pxeProverEnabled }}" + value: "{{ .Values.aztec.realProofs }}" - name: PROVER_REAL_PROOFS - value: "{{ .Values.bot.proverRealProofs }}" + value: "{{ .Values.aztec.realProofs }}" - name: BOT_MAX_CONSECUTIVE_ERRORS value: "{{ .Values.bot.maxErrors }}" - name: BOT_STOP_WHEN_UNHEALTHY diff --git a/spartan/aztec-network/templates/validator.yaml b/spartan/aztec-network/templates/validator.yaml index 6f8aba191b2..1faa6823076 100644 --- a/spartan/aztec-network/templates/validator.yaml +++ b/spartan/aztec-network/templates/validator.yaml @@ -20,6 +20,7 @@ spec: spec: {{- if .Values.network.public }} hostNetwork: true + dnsPolicy: ClusterFirstWithHostNet {{- include "aztec-network.publicAntiAffinity" . | nindent 6 }} {{- end }} serviceAccountName: {{ include "aztec-network.fullname" . }}-node @@ -33,18 +34,19 @@ spec: - -c - | source /shared/config/service-addresses + cat /shared/config/service-addresses # First check ethereum node until curl -s -X POST -H 'Content-Type: application/json' \ -d '{"jsonrpc":"2.0","method":"web3_clientVersion","params":[],"id":67}' \ $ETHEREUM_HOST | grep -q reth; do - echo "Waiting for Ethereum node..." + echo "Waiting for Ethereum node ${ETHEREUM_HOST}..." sleep 5 done echo "Ethereum node is ready!" {{- if .Values.telemetry.enabled }} - until curl --head --silent {{ include "aztec-network.otelCollectorMetricsEndpoint" . }} > /dev/null; do - echo "Waiting for OpenTelemetry collector..." + until curl --head --silent $OTEL_COLLECTOR_ENDPOINT > /dev/null; do + echo "Waiting for OpenTelemetry collector $OTEL_COLLECTOR_ENDPOINT..." sleep 5 done echo "OpenTelemetry collector is ready!" @@ -165,12 +167,6 @@ spec: value: "{{ .Values.ethereum.chainId }}" - name: OTEL_RESOURCE_ATTRIBUTES value: service.name={{ .Release.Name }},service.namespace={{ .Release.Namespace }},service.version={{ .Chart.AppVersion }},environment={{ .Values.environment | default "production" }} - - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT - value: {{ include "aztec-network.otelCollectorMetricsEndpoint" . | quote }} - - name: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT - value: {{ include "aztec-network.otelCollectorTracesEndpoint" . | quote }} - - name: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT - value: {{ include "aztec-network.otelCollectorLogsEndpoint" . | quote }} - name: ETHEREUM_SLOT_DURATION value: "{{ .Values.ethereum.blockTime }}" - name: AZTEC_SLOT_DURATION diff --git a/spartan/aztec-network/values.yaml b/spartan/aztec-network/values.yaml index 005d3061137..a9596657a6f 100644 --- a/spartan/aztec-network/values.yaml +++ b/spartan/aztec-network/values.yaml @@ -33,6 +33,8 @@ aztec: slotDuration: 24 # in seconds, aka L2 slot duration. Must be a multiple of {{ ethereum.blockTime }} epochDuration: 16 # how many L2 slots in an epoch epochProofClaimWindow: 13 # in L2 slots + realProofs: false + l1DeploymentMnemonic: "test test test test test test test test test test test junk" # the mnemonic used when deploying contracts bootNode: peerIdPrivateKey: "" @@ -43,12 +45,11 @@ bootNode: p2pUdpPort: 40400 nodePort: 8080 logLevel: "debug" - debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" + debug: "" coinbaseAddress: "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" sequencer: maxSecondsBetweenBlocks: 0 minTxsPerBlock: 1 - realProofs: false validator: disabled: true p2p: @@ -87,7 +88,7 @@ validator: p2pUdpPort: 40400 nodePort: 8080 logLevel: "debug" - debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" + debug: "" sequencer: maxSecondsBetweenBlocks: 0 minTxsPerBlock: 1 @@ -117,8 +118,7 @@ proverNode: p2pUdpPort: 40400 nodePort: 8080 logLevel: "debug" - debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" - realProofs: false + debug: "" proverAgent: count: 0 pollIntervalMs: 1000 @@ -136,10 +136,7 @@ proverNode: storage: "8Gi" pxe: - proverEnabled: false logLevel: "debug" - proverEnable: false - debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" replicas: 1 service: nodePort: 8081 @@ -158,7 +155,6 @@ bot: enabled: true nodeUrl: "" logLevel: "debug" - debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:l2_block_stream,-aztec:world-state:database" replicas: 1 botPrivateKey: "0xcafe" txIntervalSeconds: 24 @@ -167,8 +163,6 @@ bot: # Do not wait for transactions followChain: "NONE" botNoStart: false - pxeProverEnabled: false - proverRealProofs: false maxErrors: 3 stopIfUnhealthy: true service: @@ -221,8 +215,6 @@ proverAgent: gke: spotEnabled: false logLevel: "debug" - debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" - realProofs: false bb: hardwareConcurrency: "" nodeSelector: {} @@ -238,7 +230,6 @@ proverBroker: jobMaxRetries: 3 dataDirectory: "" logLevel: "debug" - debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" nodeSelector: {} resources: {} diff --git a/spartan/aztec-network/values/1-validator-with-proving-and-metrics.yaml b/spartan/aztec-network/values/1-validator-with-proving-and-metrics.yaml index 2a10cd3473f..43814e98963 100644 --- a/spartan/aztec-network/values/1-validator-with-proving-and-metrics.yaml +++ b/spartan/aztec-network/values/1-validator-with-proving-and-metrics.yaml @@ -1,3 +1,8 @@ +aztec: + realProofs: true + slotDuration: 36 + epochDuration: 32 + validator: replicas: 1 validatorKeys: @@ -11,12 +16,8 @@ bootNode: validator: disabled: true -proverNode: - realProofs: true - proverAgent: replicas: 6 - realProofs: true bb: hardwareConcurrency: 16 resources: @@ -27,22 +28,13 @@ proverAgent: memory: "96Gi" cpu: "16" -pxe: - proverEnabled: true - bot: enabled: true - pxeProverEnabled: true txIntervalSeconds: 200 jobs: deployL1Verifier: enable: true -aztec: - slotDuration: 36 - epochDuration: 32 - telemetry: - enabled: true - otelCollectorEndpoint: http://metrics-opentelemetry-collector.metrics:4318 + enabled: true \ No newline at end of file diff --git a/spartan/aztec-network/values/1-validator-with-proving.yaml b/spartan/aztec-network/values/1-validator-with-proving.yaml index 07f064561f5..caa0d4e5a15 100644 --- a/spartan/aztec-network/values/1-validator-with-proving.yaml +++ b/spartan/aztec-network/values/1-validator-with-proving.yaml @@ -1,3 +1,8 @@ +aztec: + realProofs: true + slotDuration: 36 + epochDuration: 32 + validator: replicas: 1 validatorKeys: @@ -11,12 +16,8 @@ bootNode: validator: disabled: true -proverNode: - realProofs: true - proverAgent: replicas: 6 - realProofs: true bb: hardwareConcurrency: 16 resources: @@ -27,18 +28,10 @@ proverAgent: memory: "96Gi" cpu: "16" -pxe: - proverEnabled: true - bot: enabled: true - pxeProverEnabled: true txIntervalSeconds: 200 jobs: deployL1Verifier: enable: true - -aztec: - slotDuration: 36 - epochDuration: 32 diff --git a/spartan/aztec-network/values/16-validators-with-metrics.yaml b/spartan/aztec-network/values/16-validators-with-metrics.yaml index 8bc8f2c115c..454ec8c2839 100644 --- a/spartan/aztec-network/values/16-validators-with-metrics.yaml +++ b/spartan/aztec-network/values/16-validators-with-metrics.yaml @@ -6,7 +6,6 @@ # (then `./forward.sh` if you want to see it) telemetry: enabled: true - otelCollectorEndpoint: http://metrics-opentelemetry-collector.metrics:4318 validator: replicas: 16 diff --git a/spartan/aztec-network/values/3-validators-with-metrics.yaml b/spartan/aztec-network/values/3-validators-with-metrics.yaml index b20b34b5194..c3a57e25228 100644 --- a/spartan/aztec-network/values/3-validators-with-metrics.yaml +++ b/spartan/aztec-network/values/3-validators-with-metrics.yaml @@ -6,7 +6,6 @@ # (then `./forward.sh` if you want to see it) telemetry: enabled: true - otelCollectorEndpoint: http://metrics-opentelemetry-collector.metrics:4318 validator: replicas: 3 diff --git a/spartan/aztec-network/values/4-validators-with-metrics.yaml b/spartan/aztec-network/values/4-validators-with-metrics.yaml index 47387cd89c1..6f59aa62708 100644 --- a/spartan/aztec-network/values/4-validators-with-metrics.yaml +++ b/spartan/aztec-network/values/4-validators-with-metrics.yaml @@ -6,7 +6,6 @@ # (then `./forward.sh` if you want to see it) telemetry: enabled: true - otelCollectorEndpoint: http://metrics-opentelemetry-collector.metrics:4318 validator: replicas: 4 diff --git a/spartan/aztec-network/values/48-validators.yaml b/spartan/aztec-network/values/48-validators.yaml index 31d48095681..4659655e4d2 100644 --- a/spartan/aztec-network/values/48-validators.yaml +++ b/spartan/aztec-network/values/48-validators.yaml @@ -6,7 +6,6 @@ # (then `./forward.sh` if you want to see it) telemetry: enabled: true - otelCollectorEndpoint: http://metrics-opentelemetry-collector.metrics:4318 validator: replicas: 48 diff --git a/spartan/aztec-network/values/gcp-proving-test.yaml b/spartan/aztec-network/values/gcp-proving-test.yaml index 6a361ecd025..546ffc61f4c 100644 --- a/spartan/aztec-network/values/gcp-proving-test.yaml +++ b/spartan/aztec-network/values/gcp-proving-test.yaml @@ -1,6 +1,5 @@ telemetry: enabled: true - otelCollectorEndpoint: http://metrics-opentelemetry-collector.metrics:4318 validator: replicas: 1 @@ -23,7 +22,6 @@ validator: storage: "128Gi" bootNode: - realProofs: true validator: disabled: true resources: @@ -33,7 +31,6 @@ bootNode: storage: "128Gi" proverNode: - realProofs: true resources: requests: memory: "8Gi" @@ -42,7 +39,6 @@ proverNode: proverAgent: replicas: 120 - realProofs: true bb: hardwareConcurrency: 31 resources: @@ -50,15 +46,10 @@ proverAgent: memory: "116Gi" cpu: "31" -pxe: - proverEnabled: true - bot: replicas: 16 enabled: true - pxeProverEnabled: true txIntervalSeconds: 1 - proverRealProofs: true botPrivateKey: "" privateTransfersPerTx: 1 publicTransfersPerTx: 1 @@ -80,3 +71,4 @@ fullnameOverride: sp aztec: slotDuration: 36 epochDuration: 32 + realProofs: true diff --git a/spartan/aztec-network/values/multicloud-demo.yaml b/spartan/aztec-network/values/multicloud-demo.yaml index 6ba49557253..f408059d69e 100644 --- a/spartan/aztec-network/values/multicloud-demo.yaml +++ b/spartan/aztec-network/values/multicloud-demo.yaml @@ -2,7 +2,6 @@ telemetry: enabled: false - otelCollectorEndpoint: http://metrics-opentelemetry-collector.metrics:4318 validator: replicas: 1 @@ -17,12 +16,8 @@ bootNode: validator: disabled: true -proverNode: - realProofs: false - proverAgent: replicas: 1 - realProofs: false bb: hardwareConcurrency: 16 diff --git a/spartan/aztec-network/values/prover-node-with-agents.yaml b/spartan/aztec-network/values/prover-node-with-agents.yaml index 4a58f419958..2f1e1454325 100644 --- a/spartan/aztec-network/values/prover-node-with-agents.yaml +++ b/spartan/aztec-network/values/prover-node-with-agents.yaml @@ -6,32 +6,25 @@ # (then `./forward.sh` if you want to see it) telemetry: enabled: true - otelCollectorEndpoint: http://metrics-opentelemetry-collector.metrics:4318 validator: external: true bootNode: - debug: "aztec:*,-aztec:avm_simulator:*,-aztec:world-state:database,discv5:*,-JsonProxy:*" + debug: "discv5:*" validator: disabled: true -proverNode: - realProofs: false - proverAgent: replicas: 1 - realProofs: false bb: hardwareConcurrency: 16 pxe: external: false - proverEnabled: true bot: enabled: false - pxeProverEnabled: false txIntervalSeconds: 200 jobs: diff --git a/spartan/aztec-network/values/release-devnet.yaml b/spartan/aztec-network/values/release-devnet.yaml new file mode 100644 index 00000000000..485e6462aeb --- /dev/null +++ b/spartan/aztec-network/values/release-devnet.yaml @@ -0,0 +1,50 @@ +########## +# BEWARE # +########## +# You need to deploy the metrics helm chart before using this values file. +# head to spartan/metrics and run `./install.sh` +# (then `./forward.sh` if you want to see it) +telemetry: + enabled: true + +validator: + replicas: 1 + validatorKeys: + - 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 + validatorAddresses: + - 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266 + validator: + disabled: false + +bootNode: + validator: + disabled: true + +# use small provers to produce fake proofs +proverAgent: + replicas: 1 + resources: + requests: + memory: "4Gi" + cpu: "1" + +bot: + followChain: "PENDING" + enabled: true + txIntervalSeconds: 200 + +network: + public: true + +images: + aztec: + pullPolicy: Always + +aztec: + slotDuration: 36 + epochDuration: 32 + realProofs: false # devnet does not use real proofs + +jobs: + deployL1Verifier: + enable: false diff --git a/spartan/aztec-network/values/release.yaml b/spartan/aztec-network/values/release.yaml index b48f9cf2640..2f18e02fc51 100644 --- a/spartan/aztec-network/values/release.yaml +++ b/spartan/aztec-network/values/release.yaml @@ -1,6 +1,11 @@ network: public: true +aztec: + slotDuration: 36 + epochDuration: 32 + realProofs: true + images: aztec: pullPolicy: Always @@ -10,7 +15,6 @@ telemetry: otelCollectorEndpoint: http://34.150.160.154:4318 validator: - realProofs: true replicas: 48 validatorKeys: - 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 @@ -118,17 +122,12 @@ validator: disabled: false bootNode: - realProofs: true peerIdPrivateKey: 080212200ba8451c6d62b03c4441f0a466c0bce7a3a595f2cf50a055ded3305c77aa3af0 validator: disabled: true -proverNode: - realProofs: true - proverAgent: replicas: 4 - realProofs: true bb: hardwareConcurrency: 16 gke: @@ -141,19 +140,11 @@ proverAgent: memory: "96Gi" cpu: "16" -pxe: - proverEnabled: true - bot: followChain: "PENDING" enabled: true - pxeProverEnabled: true txIntervalSeconds: 200 jobs: deployL1Verifier: enable: true - -aztec: - slotDuration: 36 - epochDuration: 32 diff --git a/spartan/releases/README.md b/spartan/releases/README.md index 527762ae112..7ab06050674 100644 --- a/spartan/releases/README.md +++ b/spartan/releases/README.md @@ -28,9 +28,13 @@ This will install `aztec-spartan.sh` in the current directory. You can now run i ./aztec-spartan.sh config ``` -If you don't have Docker installed, the script will do it for you. It will then prompt for any required environment variables and output a `docker-compose.yml` file. +If you don't have Docker installed, the script will do it for you. It will then prompt for any required environment variables and output both a `docker-compose.yml` and an `.env` file. -You can run the command without any command to see all available options, and pass them as flags, i.e. `npx aztec-spartan config -p 8080 -p2p 40400 -n nameme`. +You will also be prompted to choose whether to use a [named volume](https://docs.docker.com/engine/storage/volumes/) (default) or if you want to use a local directory to store the node's data. + +Run `./aztec-spartan.sh` without any command to see all available options, and pass them as flags, i.e. `npx aztec-spartan config -p 8080 -p2p 40400`. + +If you want to use a different key for p2p peer id, pass it with `-pk `. ## Running diff --git a/spartan/releases/rough-rhino/Earthfile b/spartan/releases/rough-rhino/Earthfile index 53e1f6365a7..a81d2453302 100644 --- a/spartan/releases/rough-rhino/Earthfile +++ b/spartan/releases/rough-rhino/Earthfile @@ -43,14 +43,13 @@ test-install: -p 8080 \ -p2p 40400 \ -ip 1.2.3.4 \ - -k 0x00 \ - -n test-validator + -k 0x00 # Verify docker-compose.yml was created and contains correct values - RUN test -f docker-compose.yml && \ - grep -q "name: test-validator" docker-compose.yml && \ - grep -q "P2P_UDP_ANNOUNCE_ADDR=1.2.3.4:40400" docker-compose.yml && \ - grep -q "AZTEC_PORT=8080" docker-compose.yml && \ - grep -q "VALIDATOR_PRIVATE_KEY=0x00" docker-compose.yml && \ + RUN test -f .env && \ + test -f docker-compose.yml && \ + grep -q "P2P_UDP_ANNOUNCE_ADDR=1.2.3.4:40400" .env && \ + grep -q "AZTEC_PORT=8080" .env && \ + grep -q "VALIDATOR_PRIVATE_KEY=0x00" .env && \ echo "✅ Config test passed" || \ (echo "❌ Config test failed" && exit 1) @@ -70,12 +69,11 @@ test-docker-check: test-start-stop: FROM +test-setup # First install with test configuration - RUN echo -e "\n\n" | ./aztec-spartan.sh config \ + RUN echo -e "\n" | ./aztec-spartan.sh config \ -p 8080 \ -p2p 40400 \ -ip 1.2.3.4 \ - -k 0x00 \ - -n test-validator + -k 0x00 # Test start command RUN ./aztec-spartan.sh start 2>&1 | grep -q "Starting containers" && \ echo "✅ Start command test passed" || \ @@ -91,6 +89,37 @@ test-update: echo "✅ Update command test passed" || \ (echo "❌ Update command test failed" && exit 1) +test-data-dir: + FROM +test-setup + # Test installation with data directory argument + RUN echo -e "\n\n" | ./aztec-spartan.sh config \ + -p 8080 \ + -p2p 40400 \ + -ip 1.2.3.4 \ + -k 0x00 \ + -d ./aztec-data + # Verify docker-compose.yml uses bind mount instead of named volume + RUN grep -q "volumes:" docker-compose.yml && \ + grep -q "./aztec-data:/var/lib/aztec" docker-compose.yml && \ + ! grep -q "volumes:\n aztec_data:" docker-compose.yml && \ + echo "✅ Data directory test passed" || \ + (echo "❌ Data directory test failed" && exit 1) + +test-p2p-key: + FROM +test-setup + # Test installation with P2P private key argument + RUN echo -e "\n\n" | ./aztec-spartan.sh config \ + -p 8080 \ + -p2p 40400 \ + -ip 1.2.3.4 \ + -k 0x00 \ + -pk 00000 + # Verify the P2P private key was set in the .env file + RUN test -f .env && \ + grep -q "PEER_ID_PRIVATE_KEY=00000" .env && \ + echo "✅ P2P private key test passed" || \ + (echo "❌ P2P private key test failed" && exit 1) + test-all: BUILD +test-help BUILD +test-no-config @@ -98,4 +127,6 @@ test-all: BUILD +test-docker-check BUILD +test-start-stop BUILD +test-update + BUILD +test-data-dir + BUILD +test-p2p-key diff --git a/spartan/releases/rough-rhino/aztec-spartan.sh b/spartan/releases/rough-rhino/aztec-spartan.sh index 5198a7bf78c..cb1823507a7 100755 --- a/spartan/releases/rough-rhino/aztec-spartan.sh +++ b/spartan/releases/rough-rhino/aztec-spartan.sh @@ -14,7 +14,7 @@ DEFAULT_PORT="8080" DEFAULT_KEY="0x0000000000000000000000000000000000000000000000000000000000000001" # Try to get default IP from ipify API, otherwise leave empty to require user input DEFAULT_IP=$(curl -s --connect-timeout 5 https://api.ipify.org?format=json | grep -o '"ip":"[^"]*' | cut -d'"' -f4 || echo "") -DEFAULT_NAME="validator-1" +DEFAULT_BIND_MOUNT_DIR="$HOME/aztec-data" # Parse command line arguments parse_args() { @@ -36,8 +36,12 @@ parse_args() { CLI_KEY="$2" shift 2 ;; - -n|--name) - CLI_NAME="$2" + -d|--data-dir) + BIND_MOUNT_DIR="$2" + shift 2 + ;; + -pk|--p2p-id-private-key) + PEER_ID_PRIVATE_KEY="$2" shift 2 ;; *) @@ -110,14 +114,6 @@ configure_environment() { echo -e "${BLUE}Configuring environment...${NC}" - # Use CLI arguments if provided, otherwise use defaults or prompt - if [ -n "$CLI_NAME" ]; then - NAME="$CLI_NAME" - else - read -p "Validator Name [$DEFAULT_NAME]: " NAME - NAME=${NAME:-$DEFAULT_NAME} - fi - if [ -n "$CLI_P2P_PORT" ]; then P2P_PORT="$CLI_P2P_PORT" else @@ -163,50 +159,89 @@ configure_environment() { fi fi + if [ -n "$BIND_MOUNT_DIR" ]; then + BIND_MOUNT_DIR="$BIND_MOUNT_DIR" + else + read -p "Use docker volume for data directory? [Y/n] " -n 1 -r + echo + if [[ $REPLY =~ ^[Nn]$ ]]; then + read -p "Relative path for data directory [${DEFAULT_BIND_MOUNT_DIR}]: " BIND_MOUNT_DIR + BIND_MOUNT_DIR=${BIND_MOUNT_DIR:-$DEFAULT_BIND_MOUNT_DIR} + fi + fi + + + # Generate .env file + cat > .env << EOF +P2P_UDP_ANNOUNCE_ADDR=${IP}:${P2P_PORT} +P2P_TCP_ANNOUNCE_ADDR=${IP}:${P2P_PORT} +COINBASE=0xbaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +VALIDATOR_DISABLED=false +VALIDATOR_PRIVATE_KEY=${KEY} +SEQ_PUBLISHER_PRIVATE_KEY=${KEY} +L1_PRIVATE_KEY=${KEY} +DEBUG=aztec:*,-aztec:avm_simulator*,-aztec:circuits:artifact_hash,-aztec:libp2p_service,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream* +LOG_LEVEL=debug +AZTEC_PORT=${PORT} +P2P_ENABLED=true +L1_CHAIN_ID=1337 +PROVER_REAL_PROOFS=true +PXE_PROVER_ENABLED=true +ETHEREUM_SLOT_DURATION=12sec +AZTEC_SLOT_DURATION=36 +AZTEC_EPOCH_DURATION=32 +AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS=13 +ETHEREUM_HOST=http://34.48.76.131:8545 +BOOTSTRAP_NODES=enr:-Jq4QO_3szmgtG2cbEdnFDIhpGAQkc1HwfNy4-M6sG9QmQbPTmp9PMOHR3xslfR23hORiU-GpA7uM9uXw49lFcnuuvYGjWF6dGVjX25ldHdvcmsBgmlkgnY0gmlwhCIwTIOJc2VjcDI1NmsxoQKQTN17XKCwjYSSwmTc-6YzCMhd3v6Ofl8TS-WunX6LCoN0Y3CCndCDdWRwgp3Q +REGISTRY_CONTRACT_ADDRESS=0x5fbdb2315678afecb367f032d93f642f64180aa3 +GOVERNANCE_PROPOSER_CONTRACT_ADDRESS=0x9fe46736679d2d9a65f0992f2272de9f3c7fa6e0 +FEE_JUICE_CONTRACT_ADDRESS=0xe7f1725e7734ce288f8367e1bb143e90bb3f0512 +ROLLUP_CONTRACT_ADDRESS=0x2279b7a0a67db372996a5fab50d91eaa73d2ebe6 +REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=0x5fc8d32690cc91d4c39d9d3abcbd16989f875707 +GOVERNANCE_CONTRACT_ADDRESS=0xcf7ed3acca5a467e9e704c703e8d87f634fb0fc9 +COIN_ISSUER_CONTRACT_ADDRESS=0xdc64a140aa3e981100a9beca4e685f962f0cf6c9 +FEE_JUICE_PORTAL_CONTRACT_ADDRESS=0x0165878a594ca255338adfa4d48449f69242eb8f +INBOX_CONTRACT_ADDRESS=0xed179b78d5781f93eb169730d8ad1be7313123f4 +OUTBOX_CONTRACT_ADDRESS=0x1016b5aaa3270a65c315c664ecb238b6db270b64 +P2P_UDP_LISTEN_ADDR=0.0.0.0:${P2P_PORT} +P2P_TCP_LISTEN_ADDR=0.0.0.0:${P2P_PORT} +DATA_DIRECTORY=/var/lib/aztec +PEER_ID_PRIVATE_KEY=${PEER_ID_PRIVATE_KEY} +EOF + # Generate docker-compose.yml cat > docker-compose.yml << EOF -name: ${NAME} services: validator: network_mode: host restart: unless-stopped - environment: - - P2P_UDP_ANNOUNCE_ADDR=${IP}:${P2P_PORT} - - P2P_TCP_ANNOUNCE_ADDR=${IP}:${P2P_PORT} - - COINBASE=0xbaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa - - VALIDATOR_DISABLED=false - - VALIDATOR_PRIVATE_KEY=${KEY} - - SEQ_PUBLISHER_PRIVATE_KEY=${KEY} - - L1_PRIVATE_KEY=${KEY} - - DEBUG=aztec:*,-aztec:avm_simulator*,-aztec:circuits:artifact_hash,-aztec:libp2p_service,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream* - - LOG_LEVEL=debug - - AZTEC_PORT=${PORT} - - P2P_ENABLED=true - - L1_CHAIN_ID=1337 - - PROVER_REAL_PROOFS=true - - PXE_PROVER_ENABLED=true - - ETHEREUM_SLOT_DURATION=12sec - - AZTEC_SLOT_DURATION=36 - - AZTEC_EPOCH_DURATION=32 - - AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS=13 - - ETHEREUM_HOST=http://34.48.76.131:8545 - - BOOTSTRAP_NODES=enr:-Jq4QO_3szmgtG2cbEdnFDIhpGAQkc1HwfNy4-M6sG9QmQbPTmp9PMOHR3xslfR23hORiU-GpA7uM9uXw49lFcnuuvYGjWF6dGVjX25ldHdvcmsBgmlkgnY0gmlwhCIwTIOJc2VjcDI1NmsxoQKQTN17XKCwjYSSwmTc-6YzCMhd3v6Ofl8TS-WunX6LCoN0Y3CCndCDdWRwgp3Q - - REGISTRY_CONTRACT_ADDRESS=0x5fbdb2315678afecb367f032d93f642f64180aa3 - - GOVERNANCE_PROPOSER_CONTRACT_ADDRESS=0x9fe46736679d2d9a65f0992f2272de9f3c7fa6e0 - - FEE_JUICE_CONTRACT_ADDRESS=0xe7f1725e7734ce288f8367e1bb143e90bb3f0512 - - ROLLUP_CONTRACT_ADDRESS=0x2279b7a0a67db372996a5fab50d91eaa73d2ebe6 - - REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=0x5fc8d32690cc91d4c39d9d3abcbd16989f875707 - - GOVERNANCE_CONTRACT_ADDRESS=0xcf7ed3acca5a467e9e704c703e8d87f634fb0fc9 - - COIN_ISSUER_CONTRACT_ADDRESS=0xdc64a140aa3e981100a9beca4e685f962f0cf6c9 - - FEE_JUICE_PORTAL_CONTRACT_ADDRESS=0x0165878a594ca255338adfa4d48449f69242eb8f - - INBOX_CONTRACT_ADDRESS=0xed179b78d5781f93eb169730d8ad1be7313123f4 - - OUTBOX_CONTRACT_ADDRESS=0x1016b5aaa3270a65c315c664ecb238b6db270b64 - - P2P_UDP_LISTEN_ADDR=0.0.0.0:${P2P_PORT} - - P2P_TCP_LISTEN_ADDR=0.0.0.0:${P2P_PORT} + env_file: .env image: aztecprotocol/aztec:698cd3d62680629a3f1bfc0f82604534cedbccf3-${ARCH} - command: start --node --archiver --sequencer + entrypoint: > + sh -c ' + + test -z "\$PEER_ID_PRIVATE_KEY" -a ! -f /var/lib/aztec/p2p-private-key && node /usr/src/yarn-project/aztec/dest/bin/index.js generate-p2p-private-key | head -1 | cut -d" " -f 3 | tee /var/lib/aztec/p2p-private-key || echo "Re-using existing P2P private key" + test -z "\$PEER_ID_PRIVATE_KEY" && export PEER_ID_PRIVATE_KEY=\$(cat /var/lib/aztec/p2p-private-key) + + node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js start --node --archiver --sequencer' EOF + # Add volume configuration based on user choice + if [ -n "$BIND_MOUNT_DIR" ]; then + cat >> docker-compose.yml << EOF + volumes: + - ${BIND_MOUNT_DIR}:/var/lib/aztec +EOF + else + cat >> docker-compose.yml << EOF + volumes: + - aztec_data:/var/lib/aztec + +volumes: + aztec_data: +EOF + fi + echo -e "${GREEN}Configuration complete! Use './aztec-spartan.sh start' to launch your node.${NC}" } @@ -283,3 +318,4 @@ case "$1" in exit 1 ;; esac + diff --git a/spartan/scripts/deploy_spartan.sh b/spartan/scripts/deploy_spartan.sh index 96a8ef2c68d..16bb8c76628 100755 --- a/spartan/scripts/deploy_spartan.sh +++ b/spartan/scripts/deploy_spartan.sh @@ -8,7 +8,7 @@ NAMESPACE=${3:-spartan} PROD=${4:-true} PROD_ARGS="" if [ "$PROD" = "true" ] ; then - PROD_ARGS="--set network.public=true --set telemetry.enabled=true --set telemetry.otelCollectorEndpoint=http://metrics-opentelemetry-collector.metrics:4318" + PROD_ARGS="--set network.public=true" fi SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" @@ -69,14 +69,12 @@ function upgrade() { if ! upgrade | tee "$SCRIPT_DIR/logs/$NAMESPACE-helm.log" ; then if grep 'cannot patch "'$NAMESPACE'-aztec-network-setup-l2-contracts"' "$SCRIPT_DIR/logs/$NAMESPACE-helm.log" ; then kubectl delete job $NAMESPACE-aztec-network-setup-l2-contracts -n $NAMESPACE - upgrade fi -fi -if ! upgrade | tee "$SCRIPT_DIR/logs/$NAMESPACE-helm.log" ; then if grep 'cannot patch "'$NAMESPACE'-aztec-network-deploy-l1-verifier"' "$SCRIPT_DIR/logs/$NAMESPACE-helm.log" ; then kubectl delete job $NAMESPACE-aztec-network-deploy-l1-verifier -n $NAMESPACE - upgrade fi + + upgrade fi diff --git a/spartan/scripts/get_service_address b/spartan/scripts/get_service_address new file mode 100755 index 00000000000..3f3634faaef --- /dev/null +++ b/spartan/scripts/get_service_address @@ -0,0 +1,47 @@ +set -eu +SERVICE_LABEL=$1 +PORT=$2 +MAX_RETRIES=30 +RETRY_INTERVAL=2 +attempt=1 + +# Get pod name +while [ $attempt -le $MAX_RETRIES ]; do + POD_NAME=$(kubectl get pods -n ${NAMESPACE} -l app=${SERVICE_LABEL} -o jsonpath='{.items[0].metadata.name}') + if [ -n "$POD_NAME" ]; then + break + fi + echo "Attempt $attempt: Waiting for ${SERVICE_LABEL} pod to be available..." >&2 + sleep $RETRY_INTERVAL + attempt=$((attempt + 1)) +done + +if [ -z "$POD_NAME" ]; then + echo "Error: Failed to get ${SERVICE_LABEL} pod name after $MAX_RETRIES attempts" >&2 + return 1 +fi +echo "Pod name: [${POD_NAME}]" >&2 + +# Get node name +attempt=1 +NODE_NAME="" +while [ $attempt -le $MAX_RETRIES ]; do + NODE_NAME=$(kubectl get pod ${POD_NAME} -n ${NAMESPACE} -o jsonpath='{.spec.nodeName}') + if [ -n "$NODE_NAME" ]; then + break + fi + echo "Attempt $attempt: Waiting for node name to be available..." >&2 + sleep $RETRY_INTERVAL + attempt=$((attempt + 1)) +done + +if [ -z "$NODE_NAME" ]; then + echo "Error: Failed to get node name after $MAX_RETRIES attempts" >&2 + return 1 +fi +echo "Node name: ${NODE_NAME}" >&2 + +# Get the node's external IP +NODE_IP=$(kubectl get node ${NODE_NAME} -o jsonpath='{.status.addresses[?(@.type=="ExternalIP")].address}') +echo "Node IP: ${NODE_IP}" >&2 +echo "http://${NODE_IP}:${PORT}" \ No newline at end of file diff --git a/spartan/scripts/post_deploy_spartan.sh b/spartan/scripts/post_deploy_spartan.sh index bcf66bff49b..e268174f49c 100755 --- a/spartan/scripts/post_deploy_spartan.sh +++ b/spartan/scripts/post_deploy_spartan.sh @@ -5,7 +5,7 @@ set -o pipefail echo "Bootstrapping network with test contracts" -NAMESPACE=${1:-spartan} +export NAMESPACE=${1:-spartan} TAG=${2:-latest} SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" @@ -22,10 +22,11 @@ function get_load_balancer_url() { kubectl get svc -n $namespace -o jsonpath="{.items[?(@.metadata.name=='$service_name')].status.loadBalancer.ingress[0].hostname}" } + # Fetch the service URLs based on the namespace for injection in the test-transfer.sh -export BOOTNODE_URL=http://$(get_load_balancer_url $NAMESPACE "$NAMESPACE-aztec-network-boot-node-lb-tcp"):8080 -export PXE_URL=http://$(get_load_balancer_url $NAMESPACE "$NAMESPACE-aztec-network-pxe-lb"):8080 -export ETHEREUM_HOST=http://$(get_load_balancer_url $NAMESPACE "$NAMESPACE-aztec-network-ethereum-lb"):8545 +export BOOTNODE_URL=$($(dirname $0)/get_service_address boot-node 8080) +export PXE_URL=$($(dirname $0)/get_service_address pxe 8080) +export ETHEREUM_HOST=$($(dirname $0)/get_service_address ethereum 8545) echo "BOOTNODE_URL: $BOOTNODE_URL" echo "PXE_URL: $PXE_URL" @@ -36,6 +37,6 @@ echo "Bootstrapping contracts for test network. NOTE: This took one hour last ru docker run aztecprotocol/aztec:$TAG bootstrap-network \ --rpc-url $BOOTNODE_URL \ --l1-rpc-url $ETHEREUM_HOST \ - --l1-chain-id 31337 \ + --l1-chain-id 1337 \ --l1-private-key 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 \ --json | tee ./basic_contracts.json diff --git a/spartan/terraform/deploy-release/deploy.sh b/spartan/terraform/deploy-release/deploy.sh deleted file mode 100755 index e9574554524..00000000000 --- a/spartan/terraform/deploy-release/deploy.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash - -RELEASE_NAME="rough-rhino" -terraform init -backend-config="key=deploy-network/${RELEASE_NAME}/terraform.tfstate" -terraform apply -var-file="release.tfvars" diff --git a/spartan/terraform/deploy-release/main.tf b/spartan/terraform/deploy-release/main.tf index bd98f0897a8..4f525c1388f 100644 --- a/spartan/terraform/deploy-release/main.tf +++ b/spartan/terraform/deploy-release/main.tf @@ -18,32 +18,37 @@ terraform { provider "kubernetes" { alias = "gke-cluster" config_path = "~/.kube/config" - config_context = var.gke_cluster_context + config_context = var.GKE_CLUSTER_CONTEXT } provider "helm" { alias = "gke-cluster" kubernetes { config_path = "~/.kube/config" - config_context = var.gke_cluster_context + config_context = var.GKE_CLUSTER_CONTEXT } } # Aztec Helm release for gke-cluster resource "helm_release" "aztec-gke-cluster" { provider = helm.gke-cluster - name = var.release_name + name = var.RELEASE_NAME repository = "../../" chart = "aztec-network" - namespace = var.release_name + namespace = var.RELEASE_NAME create_namespace = true # base values file - values = [file("../../aztec-network/values/${var.values_file}")] + values = [file("../../aztec-network/values/${var.VALUES_FILE}")] set { name = "images.aztec.image" - value = var.aztec_docker_image + value = var.AZTEC_DOCKER_IMAGE + } + + set { + name = "aztec.l1DeploymentMnemonic" + value = var.l1_deployment_mnemonic } # Setting timeout and wait conditions diff --git a/spartan/terraform/deploy-release/release.tfvars b/spartan/terraform/deploy-release/release.tfvars deleted file mode 100644 index f3236423d9f..00000000000 --- a/spartan/terraform/deploy-release/release.tfvars +++ /dev/null @@ -1,4 +0,0 @@ -release_name = "rough-rhino" -values_file = "release.yaml" -aztec_docker_image = "aztecprotocol/aztec:698cd3d62680629a3f1bfc0f82604534cedbccf3-x86_64" - diff --git a/spartan/terraform/deploy-release/variables.tf b/spartan/terraform/deploy-release/variables.tf index ebccc9d3f67..369218d3ede 100644 --- a/spartan/terraform/deploy-release/variables.tf +++ b/spartan/terraform/deploy-release/variables.tf @@ -1,20 +1,26 @@ -variable "gke_cluster_context" { +variable "GKE_CLUSTER_CONTEXT" { description = "GKE cluster context" type = string default = "gke_testnet-440309_us-east4-a_spartan-gke" } -variable "release_name" { +variable "RELEASE_NAME" { description = "Name of helm deployment and k8s namespace" type = string } -variable "values_file" { +variable "VALUES_FILE" { description = "Name of the values file to use for deployment" type = string } -variable "aztec_docker_image" { +variable "AZTEC_DOCKER_IMAGE" { description = "Docker image to use for the aztec network" type = string } + +variable "l1_deployment_mnemonic" { + description = "Mnemonic to use for the L1 contract deployments" + type = string + sensitive = true +} diff --git a/spartan/terraform/gke-cluster-old/firewall.tf b/spartan/terraform/gke-cluster-old/firewall.tf new file mode 100644 index 00000000000..0dc4b406ce3 --- /dev/null +++ b/spartan/terraform/gke-cluster-old/firewall.tf @@ -0,0 +1,51 @@ +# Create ingress firewall rules for UDP +resource "google_compute_firewall" "udp_ingress" { + name = "allow-udp-ingress-custom" + network = "default" + allow { + protocol = "udp" + ports = ["40400-40499", "8080", "8545"] + } + direction = "INGRESS" + source_ranges = ["0.0.0.0/0"] + target_tags = ["gke-node", "aztec-gke-node"] +} + +# Create egress firewall rules for UDP +resource "google_compute_firewall" "udp_egress" { + name = "allow-udp-egress-custom" + network = "default" + allow { + protocol = "udp" + ports = ["40400-40499", "8080", "8545"] + } + direction = "EGRESS" + destination_ranges = ["0.0.0.0/0"] + target_tags = ["gke-node", "aztec-gke-node"] +} + +# Create ingress firewall rules for TCP +resource "google_compute_firewall" "tcp_ingress" { + name = "allow-tcp-ingress-custom" + network = "default" + allow { + protocol = "tcp" + ports = ["40400-40499", "8080", "8545"] + } + direction = "INGRESS" + source_ranges = ["0.0.0.0/0"] + target_tags = ["gke-node", "aztec-gke-node"] +} + +# Create egress firewall rules for TCP +resource "google_compute_firewall" "tcp_egress" { + name = "allow-tcp-egress-custom" + network = "default" + allow { + protocol = "tcp" + ports = ["40400-40499", "8080", "8545"] + } + direction = "EGRESS" + destination_ranges = ["0.0.0.0/0"] + target_tags = ["gke-node", "aztec-gke-node"] +} diff --git a/spartan/terraform/gke-cluster-old/main.tf b/spartan/terraform/gke-cluster-old/main.tf new file mode 100644 index 00000000000..6055ca52cc3 --- /dev/null +++ b/spartan/terraform/gke-cluster-old/main.tf @@ -0,0 +1,193 @@ +terraform { + backend "s3" { + bucket = "aztec-terraform" + key = "spartan-gke-cluster/terraform.tfstate" + region = "eu-west-2" + } + required_providers { + google = { + source = "hashicorp/google" + version = "~> 5.0" + } + } +} + +# Configure the Google Cloud provider +provider "google" { + project = var.project + region = var.region +} + +# Create the service account +resource "google_service_account" "gke_sa" { + account_id = "gke-nodes-sa" + display_name = "GKE Nodes Service Account" + description = "Service account for GKE nodes" +} + +# Add IAM roles to the service account +resource "google_project_iam_member" "gke_sa_roles" { + for_each = toset([ + "roles/logging.logWriter", + "roles/monitoring.metricWriter", + "roles/monitoring.viewer", + "roles/artifactregistry.reader" + ]) + project = var.project + role = each.key + member = "serviceAccount:${google_service_account.gke_sa.email}" +} + +# Create a new service account for Helm +resource "google_service_account" "helm_sa" { + account_id = "helm-sa" + display_name = "Helm Service Account" + description = "Service account for Helm operations" +} + +# Add IAM roles to the Helm service account +resource "google_project_iam_member" "helm_sa_roles" { + for_each = toset([ + "roles/container.admin", + "roles/storage.admin", + "roles/secretmanager.admin" + ]) + project = var.project + role = each.key + member = "serviceAccount:${google_service_account.helm_sa.email}" +} + +# Create a GKE cluster +resource "google_container_cluster" "primary" { + name = "spartan-gke" + location = var.zone + + initial_node_count = 1 + # Remove default node pool after cluster creation + remove_default_node_pool = true + + # Kubernetes version + min_master_version = "latest" + + # Network configuration + network = "default" + subnetwork = "default" + + # Master auth configuration + master_auth { + client_certificate_config { + issue_client_certificate = false + } + } +} + +# Create primary node pool with autoscaling +resource "google_container_node_pool" "primary_nodes" { + name = "primary-node-pool" + location = var.zone + cluster = google_container_cluster.primary.name + + # Enable autoscaling + autoscaling { + min_node_count = 1 + max_node_count = 5 + } + + # Node configuration + node_config { + machine_type = "t2d-standard-32" + + service_account = google_service_account.gke_sa.email + oauth_scopes = [ + "https://www.googleapis.com/auth/cloud-platform" + ] + + labels = { + env = "production" + } + tags = ["gke-node"] + } + + # Management configuration + management { + auto_repair = true + auto_upgrade = true + } +} + +# Create node pool for aztec nodes (validators, prover nodes, boot nodes) +resource "google_container_node_pool" "aztec_nodes" { + name = "aztec-node-pool" + location = var.zone + cluster = google_container_cluster.primary.name + + # Enable autoscaling + autoscaling { + min_node_count = 1 + max_node_count = 128 + } + + # Node configuration + node_config { + machine_type = "t2d-standard-8" + + service_account = google_service_account.gke_sa.email + oauth_scopes = [ + "https://www.googleapis.com/auth/cloud-platform" + ] + + labels = { + env = "production" + } + tags = ["gke-node", "aztec"] + } + + # Management configuration + management { + auto_repair = true + auto_upgrade = true + } +} + +# Create spot instance node pool with autoscaling +resource "google_container_node_pool" "spot_nodes" { + name = "spot-node-pool" + location = var.zone + cluster = google_container_cluster.primary.name + + # Enable autoscaling + autoscaling { + min_node_count = 0 + max_node_count = 10 + } + + # Node configuration + node_config { + machine_type = "t2d-standard-32" + spot = true + + service_account = google_service_account.gke_sa.email + oauth_scopes = [ + "https://www.googleapis.com/auth/cloud-platform" + ] + + labels = { + env = "production" + pool = "spot" + } + tags = ["gke-node", "spot"] + + # Spot instance termination handler + taint { + key = "cloud.google.com/gke-spot" + value = "true" + effect = "NO_SCHEDULE" + } + } + + # Management configuration + management { + auto_repair = true + auto_upgrade = true + } +} diff --git a/spartan/terraform/gke-cluster-old/outputs.tf b/spartan/terraform/gke-cluster-old/outputs.tf new file mode 100644 index 00000000000..befaa28092e --- /dev/null +++ b/spartan/terraform/gke-cluster-old/outputs.tf @@ -0,0 +1,17 @@ +output "cluster_endpoint" { + value = google_container_cluster.primary.endpoint +} + +output "service_account_email" { + value = google_service_account.gke_sa.email +} + +output "region" { + description = "Google cloud region" + value = var.region +} + +output "kubernetes_cluster_name" { + description = "GKE Cluster Name" + value = google_container_cluster.primary.name +} diff --git a/spartan/terraform/gke-cluster-old/variables.tf b/spartan/terraform/gke-cluster-old/variables.tf new file mode 100644 index 00000000000..555458daa5d --- /dev/null +++ b/spartan/terraform/gke-cluster-old/variables.tf @@ -0,0 +1,11 @@ +variable "project" { + default = "testnet-440309" +} + +variable "region" { + default = "us-east4" +} + +variable "zone" { + default = "us-east4-a" +} diff --git a/spartan/terraform/gke-cluster/firewall.tf b/spartan/terraform/gke-cluster/firewall.tf index 0c5741c8506..0dc4b406ce3 100644 --- a/spartan/terraform/gke-cluster/firewall.tf +++ b/spartan/terraform/gke-cluster/firewall.tf @@ -8,7 +8,7 @@ resource "google_compute_firewall" "udp_ingress" { } direction = "INGRESS" source_ranges = ["0.0.0.0/0"] - target_tags = ["gke-node"] + target_tags = ["gke-node", "aztec-gke-node"] } # Create egress firewall rules for UDP @@ -21,7 +21,7 @@ resource "google_compute_firewall" "udp_egress" { } direction = "EGRESS" destination_ranges = ["0.0.0.0/0"] - target_tags = ["gke-node"] + target_tags = ["gke-node", "aztec-gke-node"] } # Create ingress firewall rules for TCP @@ -34,7 +34,7 @@ resource "google_compute_firewall" "tcp_ingress" { } direction = "INGRESS" source_ranges = ["0.0.0.0/0"] - target_tags = ["gke-node"] + target_tags = ["gke-node", "aztec-gke-node"] } # Create egress firewall rules for TCP @@ -47,5 +47,5 @@ resource "google_compute_firewall" "tcp_egress" { } direction = "EGRESS" destination_ranges = ["0.0.0.0/0"] - target_tags = ["gke-node"] + target_tags = ["gke-node", "aztec-gke-node"] } diff --git a/spartan/terraform/gke-cluster/main.tf b/spartan/terraform/gke-cluster/main.tf index 971a4aacdbc..2c169144485 100644 --- a/spartan/terraform/gke-cluster/main.tf +++ b/spartan/terraform/gke-cluster/main.tf @@ -1,7 +1,7 @@ terraform { backend "s3" { bucket = "aztec-terraform" - key = "spartan-gke-cluster/terraform.tfstate" + key = "aztec-gke-cluster/terraform.tfstate" region = "eu-west-2" } required_providers { @@ -20,9 +20,9 @@ provider "google" { # Create the service account resource "google_service_account" "gke_sa" { - account_id = "gke-nodes-sa" - display_name = "GKE Nodes Service Account" - description = "Service account for GKE nodes" + account_id = "aztec-gke-nodes-sa" + display_name = "Aztec GKE Nodes Service Account" + description = "Service account for aztec GKE nodes" } # Add IAM roles to the service account @@ -49,7 +49,8 @@ resource "google_service_account" "helm_sa" { resource "google_project_iam_member" "helm_sa_roles" { for_each = toset([ "roles/container.admin", - "roles/storage.admin" + "roles/storage.admin", + "roles/secretmanager.admin" ]) project = var.project role = each.key @@ -58,7 +59,7 @@ resource "google_project_iam_member" "helm_sa_roles" { # Create a GKE cluster resource "google_container_cluster" "primary" { - name = "spartan-gke" + name = var.cluster_name location = var.zone initial_node_count = 1 @@ -89,7 +90,7 @@ resource "google_container_node_pool" "primary_nodes" { # Enable autoscaling autoscaling { min_node_count = 1 - max_node_count = 5 + max_node_count = 2 } # Node configuration @@ -104,7 +105,7 @@ resource "google_container_node_pool" "primary_nodes" { labels = { env = "production" } - tags = ["gke-node"] + tags = ["aztec-gke-node"] } # Management configuration @@ -128,7 +129,7 @@ resource "google_container_node_pool" "aztec_nodes" { # Node configuration node_config { - machine_type = "t2d-standard-8" + machine_type = "t2d-standard-4" service_account = google_service_account.gke_sa.email oauth_scopes = [ @@ -138,7 +139,7 @@ resource "google_container_node_pool" "aztec_nodes" { labels = { env = "production" } - tags = ["gke-node", "aztec"] + tags = ["aztec-gke-node", "aztec"] } # Management configuration @@ -150,7 +151,7 @@ resource "google_container_node_pool" "aztec_nodes" { # Create spot instance node pool with autoscaling resource "google_container_node_pool" "spot_nodes" { - name = "spot-node-pool" + name = "aztec-spot-node-pool" location = var.zone cluster = google_container_cluster.primary.name @@ -174,7 +175,7 @@ resource "google_container_node_pool" "spot_nodes" { env = "production" pool = "spot" } - tags = ["gke-node", "spot"] + tags = ["aztec-gke-node", "spot"] # Spot instance termination handler taint { diff --git a/spartan/terraform/gke-cluster/variables.tf b/spartan/terraform/gke-cluster/variables.tf index 555458daa5d..83e1925cbd4 100644 --- a/spartan/terraform/gke-cluster/variables.tf +++ b/spartan/terraform/gke-cluster/variables.tf @@ -3,9 +3,13 @@ variable "project" { } variable "region" { - default = "us-east4" + default = "us-west1" } variable "zone" { - default = "us-east4-a" + default = "us-west1-a" +} + +variable "cluster_name" { + default = "aztec-gke" } diff --git a/spartan/terraform/user-script/main.tf b/spartan/terraform/user-script/main.tf new file mode 100644 index 00000000000..c3291292c2e --- /dev/null +++ b/spartan/terraform/user-script/main.tf @@ -0,0 +1,144 @@ +terraform { + backend "s3" { + bucket = "aztec-terraform" + key = "spartan-script" + region = "eu-west-2" + } + required_providers { + aws = { + source = "hashicorp/aws" + version = "5.29.0" + } + } +} + +provider "aws" { + region = "eu-west-2" +} + +data "terraform_remote_state" "aztec2_iac" { + backend = "s3" + config = { + bucket = "aztec-terraform" + key = "aztec2/iac" + region = "eu-west-2" + } +} + +resource "aws_s3_bucket" "sp_testnet_script" { + bucket = "sp-testnet.aztec.network" +} + +resource "aws_s3_bucket_website_configuration" "sp_testnet_script" { + bucket = aws_s3_bucket.sp_testnet_script.id + + index_document { + suffix = "create-spartan.sh" + } +} + +resource "aws_s3_bucket_public_access_block" "sp_testnet_public_access" { + bucket = aws_s3_bucket.sp_testnet_script.id + + block_public_acls = false + block_public_policy = false + ignore_public_acls = false + restrict_public_buckets = false +} + +resource "aws_s3_bucket_policy" "sp_testnet_policy" { + bucket = aws_s3_bucket.sp_testnet_script.id + + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Effect = "Allow" + Principal = "*" + Action = "s3:GetObject" + Resource = "arn:aws:s3:::${aws_s3_bucket.sp_testnet_script.id}/*" + } + ] + }) +} + +# Upload files to s3 bucket +resource "null_resource" "upload_script" { + triggers = { + always_run = "${timestamp()}" + } + + provisioner "local-exec" { + interpreter = ["/bin/bash", "-c"] + command = < { }, 10_000); it('skip event search if no changes found', async () => { - const loggerSpy = jest.spyOn((archiver as any).log, 'verbose'); + const loggerSpy = jest.spyOn((archiver as any).log, 'debug'); let latestBlockNum = await archiver.getBlockNumber(); expect(latestBlockNum).toEqual(0); @@ -294,15 +294,12 @@ describe('Archiver', () => { expect(latestBlockNum).toEqual(numL2BlocksInTest); // For some reason, this is 1-indexed. - expect(loggerSpy).toHaveBeenNthCalledWith( - 1, - `Retrieved no new L1 -> L2 messages between L1 blocks ${1n} and ${50}.`, - ); - expect(loggerSpy).toHaveBeenNthCalledWith(2, `No blocks to retrieve from ${1n} to ${50n}`); + expect(loggerSpy).toHaveBeenNthCalledWith(1, `Retrieved no new L1 to L2 messages between L1 blocks 1 and 50.`); + expect(loggerSpy).toHaveBeenNthCalledWith(2, `No blocks to retrieve from 1 to 50`); }, 10_000); it('handles L2 reorg', async () => { - const loggerSpy = jest.spyOn((archiver as any).log, 'verbose'); + const loggerSpy = jest.spyOn((archiver as any).log, 'debug'); let latestBlockNum = await archiver.getBlockNumber(); expect(latestBlockNum).toEqual(0); @@ -354,17 +351,13 @@ describe('Archiver', () => { expect(latestBlockNum).toEqual(numL2BlocksInTest); // For some reason, this is 1-indexed. - expect(loggerSpy).toHaveBeenNthCalledWith( - 1, - `Retrieved no new L1 -> L2 messages between L1 blocks ${1n} and ${50}.`, - ); - expect(loggerSpy).toHaveBeenNthCalledWith(2, `No blocks to retrieve from ${1n} to ${50n}`); + expect(loggerSpy).toHaveBeenNthCalledWith(1, `Retrieved no new L1 to L2 messages between L1 blocks 1 and 50.`); + expect(loggerSpy).toHaveBeenNthCalledWith(2, `No blocks to retrieve from 1 to 50`); // Lets take a look to see if we can find re-org stuff! await sleep(1000); - expect(loggerSpy).toHaveBeenNthCalledWith(6, `L2 prune have occurred, unwind state`); - expect(loggerSpy).toHaveBeenNthCalledWith(7, `Unwinding 1 block from block 2`); + expect(loggerSpy).toHaveBeenNthCalledWith(9, `L2 prune has been detected.`); // Should also see the block number be reduced latestBlockNum = await archiver.getBlockNumber(); diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 94c664d0f11..831295da0be 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -176,7 +176,7 @@ export class Archiver implements ArchiveSource { config.l1Contracts.registryAddress, archiverStore, config.archiverPollingIntervalMS ?? 10_000, - new ArchiverInstrumentation(telemetry), + new ArchiverInstrumentation(telemetry, () => archiverStore.estimateSize()), { l1StartBlock, l1GenesisTime, epochDuration, slotDuration, ethereumSlotDuration }, ); await archiver.start(blockUntilSynced); @@ -193,7 +193,6 @@ export class Archiver implements ArchiveSource { } if (blockUntilSynced) { - this.log.info(`Performing initial chain sync to rollup contract ${this.rollupAddress.toString()}`); await this.sync(blockUntilSynced); } @@ -233,6 +232,15 @@ export class Archiver implements ArchiveSource { const { blocksSynchedTo = l1StartBlock, messagesSynchedTo = l1StartBlock } = await this.store.getSynchPoint(); const currentL1BlockNumber = await this.publicClient.getBlockNumber(); + if (blockUntilSynced) { + this.log.info( + `Starting archiver sync to rollup contract ${this.rollupAddress.toString()} from L1 block ${Math.min( + Number(blocksSynchedTo), + Number(messagesSynchedTo), + )} to current L1 block ${currentL1BlockNumber}`, + ); + } + // ********** Ensuring Consistency of data pulled from L1 ********** /** @@ -271,9 +279,10 @@ export class Archiver implements ArchiveSource { // the chain locally before we start unwinding stuff. This can be optimized by figuring out // up to which point we're pruning, and then requesting L2 blocks up to that point only. await this.handleEpochPrune(provenBlockNumber, currentL1BlockNumber); + } - const storeSizes = this.store.estimateSize(); - this.instrumentation.recordDBMetrics(storeSizes); + if (blockUntilSynced) { + this.log.info(`Initial archiver sync to L1 block ${currentL1BlockNumber} complete.`); } } @@ -289,13 +298,13 @@ export class Archiver implements ArchiveSource { if (canPrune) { const blocksToUnwind = localPendingBlockNumber - provenBlockNumber; - this.log.verbose( - `L2 prune will occur on next submission. ` + - `Unwinding ${count(blocksToUnwind, 'block')} from block ${localPendingBlockNumber} ` + - `to the last proven block ${provenBlockNumber}.`, - ); + this.log.debug(`L2 prune will occur on next block submission.`); await this.store.unwindBlocks(Number(localPendingBlockNumber), Number(blocksToUnwind)); - this.log.verbose(`Unwound ${count(blocksToUnwind, 'block')}. New L2 block is ${await this.getBlockNumber()}.`); + this.log.warn( + `Unwound ${count(blocksToUnwind, 'block')} from L2 block ${localPendingBlockNumber} ` + + `to ${provenBlockNumber} due to predicted reorg at L1 block ${currentL1BlockNumber}. ` + + `Updated L2 latest block is ${await this.getBlockNumber()}.`, + ); // TODO(palla/reorg): Do we need to set the block synched L1 block number here? // Seems like the next iteration should handle this. // await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber); @@ -316,8 +325,8 @@ export class Archiver implements ArchiveSource { if (localTotalMessageCount === destinationTotalMessageCount) { await this.store.setMessageSynchedL1BlockNumber(currentL1BlockNumber); - this.log.verbose( - `Retrieved no new L1 -> L2 messages between L1 blocks ${messagesSynchedTo + 1n} and ${currentL1BlockNumber}.`, + this.log.debug( + `Retrieved no new L1 to L2 messages between L1 blocks ${messagesSynchedTo + 1n} and ${currentL1BlockNumber}.`, ); return; } @@ -332,7 +341,7 @@ export class Archiver implements ArchiveSource { await this.store.addL1ToL2Messages(retrievedL1ToL2Messages); this.log.verbose( - `Retrieved ${retrievedL1ToL2Messages.retrievedData.length} new L1 -> L2 messages between L1 blocks ${ + `Retrieved ${retrievedL1ToL2Messages.retrievedData.length} new L1 to L2 messages between L1 blocks ${ messagesSynchedTo + 1n } and ${currentL1BlockNumber}.`, ); @@ -359,10 +368,10 @@ export class Archiver implements ArchiveSource { localBlockForDestinationProvenBlockNumber && provenArchive === localBlockForDestinationProvenBlockNumber.archive.root.toString() ) { - this.log.verbose(`Updating the proven block number to ${provenBlockNumber} and epoch to ${provenEpochNumber}`); await this.store.setProvenL2BlockNumber(Number(provenBlockNumber)); // if we are here then we must have a valid proven epoch number await this.store.setProvenL2EpochNumber(Number(provenEpochNumber)); + this.log.info(`Updated proven chain`, { provenBlockNumber, provenEpochNumber }); } this.instrumentation.updateLastProvenBlock(Number(provenBlockNumber)); }; @@ -372,7 +381,7 @@ export class Archiver implements ArchiveSource { const noBlocks = localPendingBlockNumber === 0n && pendingBlockNumber === 0n; if (noBlocks) { await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber); - this.log.verbose(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`); + this.log.debug(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`); return { provenBlockNumber }; } @@ -389,7 +398,7 @@ export class Archiver implements ArchiveSource { const noBlockSinceLast = localPendingBlock && pendingArchive === localPendingBlock.archive.root.toString(); if (noBlockSinceLast) { await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber); - this.log.verbose(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`); + this.log.debug(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`); return { provenBlockNumber }; } @@ -399,7 +408,7 @@ export class Archiver implements ArchiveSource { // or the L1 have reorged. // In any case, we have to figure out how far into the past the action will take us. // For simplicity here, we will simply rewind until we end in a block that is also on the chain on L1. - this.log.verbose(`L2 prune have occurred, unwind state`); + this.log.debug(`L2 prune has been detected.`); let tipAfterUnwind = localPendingBlockNumber; while (true) { @@ -417,15 +426,18 @@ export class Archiver implements ArchiveSource { } const blocksToUnwind = localPendingBlockNumber - tipAfterUnwind; - this.log.verbose( - `Unwinding ${blocksToUnwind} block${blocksToUnwind > 1n ? 's' : ''} from block ${localPendingBlockNumber}`, - ); - await this.store.unwindBlocks(Number(localPendingBlockNumber), Number(blocksToUnwind)); + + this.log.warn( + `Unwound ${count(blocksToUnwind, 'block')} from L2 block ${localPendingBlockNumber} ` + + `due to mismatched block hashes at L1 block ${currentL1BlockNumber}. ` + + `Updated L2 latest block is ${await this.getBlockNumber()}.`, + ); } } - this.log.debug(`Retrieving blocks from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`); + // TODO(palla/log) Downgrade to trace + this.log.debug(`Retrieving L2 blocks from L1 block ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`); const retrievedBlocks = await retrieveBlockFromRollup( this.rollup, this.publicClient, @@ -438,34 +450,42 @@ export class Archiver implements ArchiveSource { if (retrievedBlocks.length === 0) { // We are not calling `setBlockSynchedL1BlockNumber` because it may cause sync issues if based off infura. // See further details in earlier comments. - this.log.verbose(`Retrieved no new L2 blocks from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`); + // TODO(palla/log) Downgrade to trace + this.log.debug(`Retrieved no new L2 blocks from L1 block ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`); return { provenBlockNumber }; } + const lastProcessedL1BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].l1.blockNumber; this.log.debug( `Retrieved ${retrievedBlocks.length} new L2 blocks between L1 blocks ${ blocksSynchedTo + 1n - } and ${currentL1BlockNumber}.`, + } and ${currentL1BlockNumber} with last processed L1 block ${lastProcessedL1BlockNumber}.`, ); - const lastProcessedL1BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].l1.blockNumber; - - this.log.debug(`last processed L1 block: [${lastProcessedL1BlockNumber}]`); for (const block of retrievedBlocks) { - this.log.debug(`ingesting new L2 block`, block.data.header.globalVariables.toFriendlyJSON()); + this.log.debug(`Ingesting new L2 block ${block.data.number}`, { + ...block.data.header.globalVariables.toInspect(), + blockHash: block.data.hash, + l1BlockNumber: block.l1.blockNumber, + }); } const timer = new Timer(); await this.store.addBlocks(retrievedBlocks); + for (const block of retrievedBlocks) { + this.log.info(`Downloaded L2 block ${block.data.number}`, { + blockHash: block.data.hash(), + blockNumber: block.data.number, + }); + } + // Important that we update AFTER inserting the blocks. await updateProvenBlock(); this.instrumentation.processNewBlocks( timer.ms() / retrievedBlocks.length, retrievedBlocks.map(b => b.data), ); - const lastL2BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].data.number; - this.log.verbose(`Processed ${retrievedBlocks.length} new L2 blocks up to ${lastL2BlockNumber}`); return { provenBlockNumber }; } @@ -828,7 +848,7 @@ class ArchiverStoreHelper .map(log => ContractClassRegisteredEvent.fromLog(log.data)) .map(e => e.toContractClassPublic()); if (contractClasses.length > 0) { - contractClasses.forEach(c => this.#log.verbose(`Registering contract class ${c.id.toString()}`)); + contractClasses.forEach(c => this.#log.verbose(`${Operation[operation]} contract class ${c.id.toString()}`)); if (operation == Operation.Store) { // TODO: Will probably want to create some worker threads to compute these bytecode commitments as they are expensive return await this.store.addContractClasses( diff --git a/yarn-project/archiver/src/archiver/instrumentation.ts b/yarn-project/archiver/src/archiver/instrumentation.ts index 1d6343b8f9d..7c44a9a4618 100644 --- a/yarn-project/archiver/src/archiver/instrumentation.ts +++ b/yarn-project/archiver/src/archiver/instrumentation.ts @@ -5,6 +5,7 @@ import { type Gauge, type Histogram, LmdbMetrics, + type LmdbStatsCallback, Metrics, type TelemetryClient, type UpDownCounter, @@ -23,7 +24,7 @@ export class ArchiverInstrumentation { private log = createDebugLogger('aztec:archiver:instrumentation'); - constructor(private telemetry: TelemetryClient) { + constructor(private telemetry: TelemetryClient, lmdbStats?: LmdbStatsCallback) { const meter = telemetry.getMeter('Archiver'); this.blockHeight = meter.createGauge(Metrics.ARCHIVER_BLOCK_HEIGHT, { description: 'The height of the latest block processed by the archiver', @@ -72,13 +73,10 @@ export class ArchiverInstrumentation { name: Metrics.ARCHIVER_DB_NUM_ITEMS, description: 'Num items in the archiver database', }, + lmdbStats, ); } - public recordDBMetrics(metrics: { mappingSize: number; numItems: number; actualSize: number }) { - this.dbMetrics.recordDBMetrics(metrics); - } - public isEnabled(): boolean { return this.telemetry.isEnabled(); } diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts index efb4922d328..da6f4938883 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts @@ -94,7 +94,7 @@ export class LogStore { } const tag = new Fr(correctedBuffer); - this.#log.verbose(`Found tagged unencrypted log with tag ${tag.toString()} in block ${block.number}`); + this.#log.debug(`Found tagged unencrypted log with tag ${tag.toString()} in block ${block.number}`); const currentLogs = taggedLogs.get(tag.toString()) ?? []; currentLogs.push( new TxScopedL2Log(txHash, dataStartIndexForTx, block.number, /* isFromPublic */ true, log.data).toBuffer(), diff --git a/yarn-project/aztec-faucet/terraform/main.tf b/yarn-project/aztec-faucet/terraform/main.tf index 2326d9e3e4e..d77dd205300 100644 --- a/yarn-project/aztec-faucet/terraform/main.tf +++ b/yarn-project/aztec-faucet/terraform/main.tf @@ -107,8 +107,8 @@ resource "aws_ecs_task_definition" "aztec-faucet" { value = "80" }, { - name = "DEBUG", - value = "aztec:*" + name = "LOG_LEVEL", + value = "verbose" }, { name = "RPC_URL", diff --git a/yarn-project/aztec-faucet/terraform/variables.tf b/yarn-project/aztec-faucet/terraform/variables.tf index f1d2fbf5c86..992719e667d 100644 --- a/yarn-project/aztec-faucet/terraform/variables.tf +++ b/yarn-project/aztec-faucet/terraform/variables.tf @@ -35,6 +35,10 @@ variable "FEE_JUICE_CONTRACT_ADDRESS" { type = string } +variable "STAKING_ASSET_CONTRACT_ADDRESS" { + type = string +} + variable "DEV_COIN_CONTRACT_ADDRESS" { type = string } diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 587d97371a4..57690bd78d9 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -45,6 +45,7 @@ import { type L1_TO_L2_MSG_TREE_HEIGHT, type NOTE_HASH_TREE_HEIGHT, type NULLIFIER_TREE_HEIGHT, + type NodeInfo, type NullifierLeafPreimage, type PUBLIC_DATA_TREE_HEIGHT, type PrivateLog, @@ -237,6 +238,29 @@ export class AztecNodeService implements AztecNode { return Promise.resolve(this.p2pClient.isReady() ?? false); } + public async getNodeInfo(): Promise { + const [nodeVersion, protocolVersion, chainId, enr, contractAddresses, protocolContractAddresses] = + await Promise.all([ + this.getNodeVersion(), + this.getVersion(), + this.getChainId(), + this.getEncodedEnr(), + this.getL1ContractAddresses(), + this.getProtocolContractAddresses(), + ]); + + const nodeInfo: NodeInfo = { + nodeVersion, + l1ChainId: chainId, + protocolVersion, + enr, + l1ContractAddresses: contractAddresses, + protocolContractAddresses: protocolContractAddresses, + }; + + return nodeInfo; + } + /** * Get a block specified by its number. * @param number - The block number being requested. diff --git a/yarn-project/aztec.js/src/contract/contract.test.ts b/yarn-project/aztec.js/src/contract/contract.test.ts index 4e856779ad4..66a54e8cfb5 100644 --- a/yarn-project/aztec.js/src/contract/contract.test.ts +++ b/yarn-project/aztec.js/src/contract/contract.test.ts @@ -41,6 +41,7 @@ describe('Contract Class', () => { inboxAddress: EthAddress.random(), outboxAddress: EthAddress.random(), feeJuiceAddress: EthAddress.random(), + stakingAssetAddress: EthAddress.random(), feeJuicePortalAddress: EthAddress.random(), governanceAddress: EthAddress.random(), coinIssuerAddress: EthAddress.random(), diff --git a/yarn-project/aztec.js/src/index.ts b/yarn-project/aztec.js/src/index.ts index ca6f1011172..3d3ac097559 100644 --- a/yarn-project/aztec.js/src/index.ts +++ b/yarn-project/aztec.js/src/index.ts @@ -42,7 +42,6 @@ export { ContractDeployer } from './deployment/index.js'; export { AnvilTestWatcher, CheatCodes, - EthCheatCodes, L1FeeJuicePortalManager, L1ToL2TokenPortalManager, L1TokenManager, @@ -157,7 +156,7 @@ export { decodeFromAbi, encodeArguments, type AbiType } from '@aztec/foundation/ export { toBigIntBE } from '@aztec/foundation/bigint-buffer'; export { sha256 } from '@aztec/foundation/crypto'; export { makeFetch } from '@aztec/foundation/json-rpc/client'; -export { createDebugLogger, onLog, type DebugLogger } from '@aztec/foundation/log'; +export { createDebugLogger, type DebugLogger } from '@aztec/foundation/log'; export { retry, retryUntil } from '@aztec/foundation/retry'; export { to2Fields, toBigInt } from '@aztec/foundation/serialize'; export { sleep } from '@aztec/foundation/sleep'; @@ -165,7 +164,7 @@ export { elapsed } from '@aztec/foundation/timer'; export { type FieldsOf } from '@aztec/foundation/types'; export { fileURLToPath } from '@aztec/foundation/url'; -export { deployL1Contract, deployL1Contracts, type DeployL1Contracts } from '@aztec/ethereum'; +export { type DeployL1Contracts, EthCheatCodes, deployL1Contract, deployL1Contracts } from '@aztec/ethereum'; // Start of section that exports public api via granular api. // Here you *can* do `export *` as the granular api defacto exports things explicitly. diff --git a/yarn-project/aztec.js/src/utils/cheat_codes.ts b/yarn-project/aztec.js/src/utils/cheat_codes.ts index f35ae53c25f..10b837a9b04 100644 --- a/yarn-project/aztec.js/src/utils/cheat_codes.ts +++ b/yarn-project/aztec.js/src/utils/cheat_codes.ts @@ -1,13 +1,10 @@ import { type EpochProofClaim, type Note, type PXE } from '@aztec/circuit-types'; import { type AztecAddress, EthAddress, Fr } from '@aztec/circuits.js'; import { deriveStorageSlotInMap } from '@aztec/circuits.js/hash'; -import { type L1ContractAddresses } from '@aztec/ethereum'; -import { toBigIntBE, toHex } from '@aztec/foundation/bigint-buffer'; -import { keccak256 } from '@aztec/foundation/crypto'; +import { EthCheatCodes, type L1ContractAddresses } from '@aztec/ethereum'; import { createDebugLogger } from '@aztec/foundation/log'; import { RollupAbi } from '@aztec/l1-artifacts'; -import fs from 'fs'; import { type GetContractReturnType, type Hex, @@ -49,248 +46,6 @@ export class CheatCodes { } } -/** - * A class that provides utility functions for interacting with ethereum (L1). - */ -export class EthCheatCodes { - constructor( - /** - * The RPC URL to use for interacting with the chain - */ - public rpcUrl: string, - /** - * The logger to use for the eth cheatcodes - */ - public logger = createDebugLogger('aztec:cheat_codes:eth'), - ) {} - - async rpcCall(method: string, params: any[]) { - const paramsString = JSON.stringify(params); - const content = { - body: `{"jsonrpc":"2.0", "method": "${method}", "params": ${paramsString}, "id": 1}`, - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - }; - return await (await fetch(this.rpcUrl, content)).json(); - } - - /** - * Get the auto mine status of the underlying chain - * @returns True if automine is on, false otherwise - */ - public async isAutoMining(): Promise { - try { - const res = await this.rpcCall('anvil_getAutomine', []); - return res.result; - } catch (err) { - this.logger.error(`Calling "anvil_getAutomine" failed with:`, err); - } - return false; - } - - /** - * Get the current blocknumber - * @returns The current block number - */ - public async blockNumber(): Promise { - const res = await this.rpcCall('eth_blockNumber', []); - return parseInt(res.result, 16); - } - - /** - * Get the current chainId - * @returns The current chainId - */ - public async chainId(): Promise { - const res = await this.rpcCall('eth_chainId', []); - return parseInt(res.result, 16); - } - - /** - * Get the current timestamp - * @returns The current timestamp - */ - public async timestamp(): Promise { - const res = await this.rpcCall('eth_getBlockByNumber', ['latest', true]); - return parseInt(res.result.timestamp, 16); - } - - /** - * Advance the chain by a number of blocks - * @param numberOfBlocks - The number of blocks to mine - * @returns The current chainId - */ - public async mine(numberOfBlocks = 1): Promise { - const res = await this.rpcCall('hardhat_mine', [numberOfBlocks]); - if (res.error) { - throw new Error(`Error mining: ${res.error.message}`); - } - this.logger.verbose(`Mined ${numberOfBlocks} L1 blocks`); - } - - /** - * Set the balance of an account - * @param account - The account to set the balance for - * @param balance - The balance to set - */ - public async setBalance(account: EthAddress, balance: bigint): Promise { - const res = await this.rpcCall('anvil_setBalance', [account.toString(), toHex(balance)]); - if (res.error) { - throw new Error(`Error setting balance for ${account}: ${res.error.message}`); - } - this.logger.verbose(`Set balance for ${account} to ${balance}`); - } - - /** - * Set the interval between blocks (block time) - * @param interval - The interval to use between blocks - */ - public async setBlockInterval(interval: number): Promise { - const res = await this.rpcCall('anvil_setBlockTimestampInterval', [interval]); - if (res.error) { - throw new Error(`Error setting block interval: ${res.error.message}`); - } - this.logger.verbose(`Set L1 block interval to ${interval}`); - } - - /** - * Set the next block timestamp - * @param timestamp - The timestamp to set the next block to - */ - public async setNextBlockTimestamp(timestamp: number): Promise { - const res = await this.rpcCall('evm_setNextBlockTimestamp', [timestamp]); - if (res.error) { - throw new Error(`Error setting next block timestamp: ${res.error.message}`); - } - this.logger.verbose(`Set L1 next block timestamp to ${timestamp}`); - } - - /** - * Set the next block timestamp and mines the block - * @param timestamp - The timestamp to set the next block to - */ - public async warp(timestamp: number | bigint): Promise { - const res = await this.rpcCall('evm_setNextBlockTimestamp', [Number(timestamp)]); - if (res.error) { - throw new Error(`Error warping: ${res.error.message}`); - } - await this.mine(); - this.logger.verbose(`Warped L1 timestamp to ${timestamp}`); - } - - /** - * Dumps the current chain state to a file. - * @param fileName - The file name to dump state into - */ - public async dumpChainState(fileName: string): Promise { - const res = await this.rpcCall('hardhat_dumpState', []); - if (res.error) { - throw new Error(`Error dumping state: ${res.error.message}`); - } - const jsonContent = JSON.stringify(res.result); - fs.writeFileSync(`${fileName}.json`, jsonContent, 'utf8'); - this.logger.verbose(`Dumped state to ${fileName}`); - } - - /** - * Loads the chain state from a file. - * @param fileName - The file name to load state from - */ - public async loadChainState(fileName: string): Promise { - const data = JSON.parse(fs.readFileSync(`${fileName}.json`, 'utf8')); - const res = await this.rpcCall('hardhat_loadState', [data]); - if (res.error) { - throw new Error(`Error loading state: ${res.error.message}`); - } - this.logger.verbose(`Loaded state from ${fileName}`); - } - - /** - * Load the value at a storage slot of a contract address on eth - * @param contract - The contract address - * @param slot - The storage slot - * @returns - The value at the storage slot - */ - public async load(contract: EthAddress, slot: bigint): Promise { - const res = await this.rpcCall('eth_getStorageAt', [contract.toString(), toHex(slot), 'latest']); - return BigInt(res.result); - } - - /** - * Set the value at a storage slot of a contract address on eth - * @param contract - The contract address - * @param slot - The storage slot - * @param value - The value to set the storage slot to - */ - public async store(contract: EthAddress, slot: bigint, value: bigint): Promise { - // for the rpc call, we need to change value to be a 32 byte hex string. - const res = await this.rpcCall('hardhat_setStorageAt', [contract.toString(), toHex(slot), toHex(value, true)]); - if (res.error) { - throw new Error(`Error setting storage for contract ${contract} at ${slot}: ${res.error.message}`); - } - this.logger.verbose(`Set L1 storage for contract ${contract} at ${slot} to ${value}`); - } - - /** - * Computes the slot value for a given map and key. - * @param baseSlot - The base slot of the map (specified in Aztec.nr contract) - * @param key - The key to lookup in the map - * @returns The storage slot of the value in the map - */ - public keccak256(baseSlot: bigint, key: bigint): bigint { - // abi encode (removing the 0x) - concat key and baseSlot (both padded to 32 bytes) - const abiEncoded = toHex(key, true).substring(2) + toHex(baseSlot, true).substring(2); - return toBigIntBE(keccak256(Buffer.from(abiEncoded, 'hex'))); - } - - /** - * Send transactions impersonating an externally owned account or contract. - * @param who - The address to impersonate - */ - public async startImpersonating(who: EthAddress | Hex): Promise { - const res = await this.rpcCall('hardhat_impersonateAccount', [who.toString()]); - if (res.error) { - throw new Error(`Error impersonating ${who}: ${res.error.message}`); - } - this.logger.verbose(`Impersonating ${who}`); - } - - /** - * Stop impersonating an account that you are currently impersonating. - * @param who - The address to stop impersonating - */ - public async stopImpersonating(who: EthAddress | Hex): Promise { - const res = await this.rpcCall('hardhat_stopImpersonatingAccount', [who.toString()]); - if (res.error) { - throw new Error(`Error when stopping the impersonation of ${who}: ${res.error.message}`); - } - this.logger.verbose(`Stopped impersonating ${who}`); - } - - /** - * Set the bytecode for a contract - * @param contract - The contract address - * @param bytecode - The bytecode to set - */ - public async etch(contract: EthAddress, bytecode: `0x${string}`): Promise { - const res = await this.rpcCall('hardhat_setCode', [contract.toString(), bytecode]); - if (res.error) { - throw new Error(`Error setting bytecode for ${contract}: ${res.error.message}`); - } - this.logger.verbose(`Set bytecode for ${contract} to ${bytecode}`); - } - - /** - * Get the bytecode for a contract - * @param contract - The contract address - * @returns The bytecode for the contract - */ - public async getBytecode(contract: EthAddress): Promise<`0x${string}`> { - const res = await this.rpcCall('eth_getCode', [contract.toString(), 'latest']); - return res.result; - } -} - /** Cheat codes for the L1 rollup contract. */ export class RollupCheatCodes { private client: WalletClient & PublicClient; @@ -327,8 +82,11 @@ export class RollupCheatCodes { /** The pending chain tip */ pending: bigint; /** The proven chain tip */ proven: bigint; }> { - const [pending, proven] = await this.rollup.read.tips(); - return { pending, proven }; + const res = await this.rollup.read.getTips(); + return { + pending: res.pendingBlockNumber, + proven: res.provenBlockNumber, + }; } /** Fetches the epoch and slot duration config from the rollup contract */ @@ -370,8 +128,13 @@ export class RollupCheatCodes { /** Returns the current proof claim (if any) */ public async getProofClaim(): Promise { // REFACTOR: This code is duplicated from l1-publisher - const [epochToProve, basisPointFee, bondAmount, bondProviderHex, proposerClaimantHex] = - await this.rollup.read.proofClaim(); + const { + epochToProve, + basisPointFee, + bondAmount, + bondProvider: bondProviderHex, + proposerClaimant: proposerClaimantHex, + } = await this.rollup.read.getProofClaim(); const bondProvider = EthAddress.fromString(bondProviderHex); const proposerClaimant = EthAddress.fromString(proposerClaimantHex); @@ -396,7 +159,7 @@ export class RollupCheatCodes { public async markAsProven(maybeBlockNumber?: number | bigint) { const blockNumber = maybeBlockNumber ? BigInt(maybeBlockNumber) - : await this.rollup.read.tips().then(([pending]) => pending); + : await this.rollup.read.getTips().then(({ pendingBlockNumber }) => pendingBlockNumber); await this.asOwner(async account => { await this.rollup.write.setAssumeProvenThroughBlockNumber([blockNumber], { account, chain: this.client.chain }); diff --git a/yarn-project/aztec.js/src/wallet/account_wallet.ts b/yarn-project/aztec.js/src/wallet/account_wallet.ts index 1a891160850..781b8ab454e 100644 --- a/yarn-project/aztec.js/src/wallet/account_wallet.ts +++ b/yarn-project/aztec.js/src/wallet/account_wallet.ts @@ -34,6 +34,10 @@ export class AccountWallet extends BaseWallet { return this.account.getVersion(); } + override isL1ToL2MessageSynced(l1ToL2Message: Fr): Promise { + return this.pxe.isL1ToL2MessageSynced(l1ToL2Message); + } + /** * Computes an authentication witness from either a message hash or an intent. * diff --git a/yarn-project/aztec.js/src/wallet/base_wallet.ts b/yarn-project/aztec.js/src/wallet/base_wallet.ts index aaf2bea1954..98e0d1451ae 100644 --- a/yarn-project/aztec.js/src/wallet/base_wallet.ts +++ b/yarn-project/aztec.js/src/wallet/base_wallet.ts @@ -44,6 +44,8 @@ import { type IntentAction, type IntentInnerHash } from '../utils/authwit.js'; export abstract class BaseWallet implements Wallet { constructor(protected readonly pxe: PXE, private scopes?: AztecAddress[]) {} + abstract isL1ToL2MessageSynced(l1ToL2Message: Fr): Promise; + abstract getCompleteAddress(): CompleteAddress; abstract getChainId(): Fr; diff --git a/yarn-project/aztec.js/src/wallet/signerless_wallet.ts b/yarn-project/aztec.js/src/wallet/signerless_wallet.ts index 0231d79b01a..2912d65e1d1 100644 --- a/yarn-project/aztec.js/src/wallet/signerless_wallet.ts +++ b/yarn-project/aztec.js/src/wallet/signerless_wallet.ts @@ -42,4 +42,8 @@ export class SignerlessWallet extends BaseWallet { createAuthWit(_intent: Fr | Buffer | IntentInnerHash | IntentAction): Promise { throw new Error('SignerlessWallet: Method createAuthWit not implemented.'); } + + override isL1ToL2MessageSynced(l1ToL2Message: Fr): Promise { + return this.pxe.isL1ToL2MessageSynced(l1ToL2Message); + } } diff --git a/yarn-project/aztec.js/webpack.config.js b/yarn-project/aztec.js/webpack.config.js index d377a5fa056..3ba9561af4e 100644 --- a/yarn-project/aztec.js/webpack.config.js +++ b/yarn-project/aztec.js/webpack.config.js @@ -61,6 +61,7 @@ export default { fs: false, path: false, url: false, + tty: false, worker_threads: false, buffer: require.resolve('buffer/'), util: require.resolve('util/'), diff --git a/yarn-project/aztec/docker-compose.yml b/yarn-project/aztec/docker-compose.yml index f26a2e54828..3fe35cd42f8 100644 --- a/yarn-project/aztec/docker-compose.yml +++ b/yarn-project/aztec/docker-compose.yml @@ -21,7 +21,7 @@ services: ports: - '${SANDBOX_PXE_PORT:-8080}:8080' environment: - DEBUG: # Loaded from the user shell if explicitly set + LOG_LEVEL: # Loaded from the user shell if explicitly set HOST_WORKDIR: '${PWD}' # Loaded from the user shell to show log files absolute path in host ETHEREUM_HOST: http://ethereum:8545 L1_CHAIN_ID: 31337 diff --git a/yarn-project/aztec/package.json b/yarn-project/aztec/package.json index e9ff10afa77..f8732bff89b 100644 --- a/yarn-project/aztec/package.json +++ b/yarn-project/aztec/package.json @@ -22,7 +22,7 @@ "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", "build:dev": "tsc -b --watch", "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests", - "run:example:token": "DEBUG='aztec:*' node ./dest/examples/token.js" + "run:example:token": "LOG_LEVEL='verbose' node ./dest/examples/token.js" }, "inherits": [ "../package.common.json" @@ -55,16 +55,13 @@ "@aztec/telemetry-client": "workspace:^", "@aztec/txe": "workspace:^", "@aztec/types": "workspace:^", - "@opentelemetry/winston-transport": "^0.7.0", "@types/chalk": "^2.2.0", "abitype": "^0.8.11", "chalk": "^5.3.0", "commander": "^12.1.0", "koa": "^2.14.2", "koa-router": "^12.0.0", - "viem": "^2.7.15", - "winston": "^3.10.0", - "winston-daily-rotate-file": "^4.7.1" + "viem": "^2.7.15" }, "files": [ "dest", @@ -117,4 +114,4 @@ "engines": { "node": ">=18" } -} +} \ No newline at end of file diff --git a/yarn-project/aztec/src/cli/aztec_start_options.ts b/yarn-project/aztec/src/cli/aztec_start_options.ts index 90b0a970092..6e3b05512bc 100644 --- a/yarn-project/aztec/src/cli/aztec_start_options.ts +++ b/yarn-project/aztec/src/cli/aztec_start_options.ts @@ -143,6 +143,12 @@ export const aztecStartOptions: { [key: string]: AztecStartOption[] } = { defaultValue: undefined, envVar: 'FEE_JUICE_CONTRACT_ADDRESS', }, + { + flag: '--staking-asset-address ', + description: 'The deployed L1 Staking Asset contract address', + defaultValue: undefined, + envVar: 'STAKING_ASSET_CONTRACT_ADDRESS', + }, { flag: '--fee-juice-portal-address ', description: 'The deployed L1 Fee Juice portal contract address', diff --git a/yarn-project/aztec/src/cli/cli.ts b/yarn-project/aztec/src/cli/cli.ts index 91d803851e4..b6b314bd63c 100644 --- a/yarn-project/aztec/src/cli/cli.ts +++ b/yarn-project/aztec/src/cli/cli.ts @@ -9,7 +9,6 @@ import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; import { Command } from 'commander'; -import { setupConsoleJsonLog } from '../logging.js'; import { createSandbox } from '../sandbox.js'; import { github, splash } from '../splash.js'; import { aztecStartOptions } from './aztec_start_options.js'; @@ -39,11 +38,6 @@ export function injectAztecCommands(program: Command, userLog: LogFn, debugLogge startCmd.helpInformation = printAztecStartHelpText; startCmd.action(async options => { - // setup json logging - if (['1', 'true', 'TRUE'].includes(process.env.LOG_JSON ?? '')) { - setupConsoleJsonLog(); - } - // list of 'stop' functions to call when process ends const signalHandlers: Array<() => Promise> = []; const services: NamespacedApiHandlers = {}; diff --git a/yarn-project/aztec/src/cli/cmds/start_prover_agent.ts b/yarn-project/aztec/src/cli/cmds/start_prover_agent.ts index 3ae24df0ad9..df382eb6251 100644 --- a/yarn-project/aztec/src/cli/cmds/start_prover_agent.ts +++ b/yarn-project/aztec/src/cli/cmds/start_prover_agent.ts @@ -40,7 +40,18 @@ export async function startProverAgent( ); const prover = await buildServerCircuitProver(config, telemetry); const proofStore = new InlineProofStore(); - const agents = times(config.proverAgentCount, () => new ProvingAgent(broker, proofStore, prover)); + const agents = times( + config.proverAgentCount, + () => + new ProvingAgent( + broker, + proofStore, + prover, + telemetry, + config.proverAgentProofTypes, + config.proverAgentPollIntervalMs, + ), + ); await Promise.all(agents.map(agent => agent.start())); diff --git a/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts b/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts index 197d48971c9..ce5ef637ff6 100644 --- a/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts +++ b/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts @@ -3,6 +3,10 @@ import { type NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server'; import { type LogFn } from '@aztec/foundation/log'; import { ProvingJobBrokerSchema, createAndStartProvingBroker } from '@aztec/prover-client/broker'; import { getProverNodeBrokerConfigFromEnv } from '@aztec/prover-node'; +import { + createAndStartTelemetryClient, + getConfigEnvVars as getTelemetryClientConfig, +} from '@aztec/telemetry-client/start'; import { extractRelevantOptions } from '../util.js'; @@ -22,7 +26,8 @@ export async function startProverBroker( ...extractRelevantOptions(options, proverBrokerConfigMappings, 'proverBroker'), // override with command line options }; - const broker = await createAndStartProvingBroker(config); + const client = await createAndStartTelemetryClient(getTelemetryClientConfig()); + const broker = await createAndStartProvingBroker(config, client); services.proverBroker = [broker, ProvingJobBrokerSchema]; signalHandlers.push(() => broker.stop()); diff --git a/yarn-project/aztec/src/logging.ts b/yarn-project/aztec/src/logging.ts deleted file mode 100644 index a7deed55ae5..00000000000 --- a/yarn-project/aztec/src/logging.ts +++ /dev/null @@ -1,69 +0,0 @@ -import { currentLevel, onLog, setLevel } from '@aztec/foundation/log'; - -import { OpenTelemetryTransportV3 } from '@opentelemetry/winston-transport'; -import * as path from 'path'; -import * as process from 'process'; -import * as winston from 'winston'; -import DailyRotateFile from 'winston-daily-rotate-file'; - -const { format } = winston; -const CURRENT_LOG_FILE_NAME = 'aztec.debug.log'; -const LOG_DIR = 'log'; - -/** Creates a winston logger that logs everything to a local rotating file */ -function createWinstonLocalFileLogger() { - // See https://www.npmjs.com/package/winston-daily-rotate-file#user-content-options - const transport: DailyRotateFile = new DailyRotateFile({ - filename: 'aztec-%DATE%.debug.log', - dirname: LOG_DIR, - datePattern: 'YYYY-MM-DD', - zippedArchive: true, - maxSize: '30m', - maxFiles: '5', - createSymlink: true, - symlinkName: CURRENT_LOG_FILE_NAME, - }); - - return winston.createLogger({ - level: 'debug', - transports: [transport], - format: format.combine(format.timestamp(), format.json()), - }); -} - -/** Creates a winston logger that logs everything to stdout in json format */ -function createWinstonJsonStdoutLogger() { - return winston.createLogger({ - level: currentLevel, - transports: [ - new winston.transports.Console({ - format: format.combine(format.timestamp(), format.json()), - }), - new OpenTelemetryTransportV3(), - ], - }); -} - -/** - * Hooks to all log statements and outputs them to a local rotating file. - * @returns Output log name. - */ -export function setupFileDebugLog() { - const logger = createWinstonLocalFileLogger(); - onLog((level, module, message, data) => { - logger.log({ ...data, level, module, message }); - }); - const workdir = process.env.HOST_WORKDIR ?? process.cwd(); - return path.join(workdir, LOG_DIR, CURRENT_LOG_FILE_NAME); -} - -/** - * Silences the foundation stdout logger and funnels all logs through a winston JSON logger. - */ -export function setupConsoleJsonLog() { - const logger = createWinstonJsonStdoutLogger(); - setLevel('silent'); - onLog((level, module, message, data) => { - logger.log({ ...data, level, module, message }); - }); -} diff --git a/yarn-project/aztec/terraform/node/main.tf b/yarn-project/aztec/terraform/node/main.tf index f419a3568a7..ebb85c4461d 100644 --- a/yarn-project/aztec/terraform/node/main.tf +++ b/yarn-project/aztec/terraform/node/main.tf @@ -252,10 +252,6 @@ resource "aws_ecs_task_definition" "aztec-node" { name = "AZTEC_PORT" value = "80" }, - { - name = "DEBUG" - value = "aztec:*,-json-rpc:json_proxy:*,-aztec:avm_simulator:*" - }, { name = "ETHEREUM_HOST" value = "${local.eth_host}" @@ -324,6 +320,10 @@ resource "aws_ecs_task_definition" "aztec-node" { name = "FEE_JUICE_CONTRACT_ADDRESS" value = data.terraform_remote_state.l1_contracts.outputs.fee_juice_contract_address }, + { + name = "STAKING_ASSET_CONTRACT_ADDRESS" + value = data.terraform_remote_state.l1_contracts.outputs.staking_asset_contract_address + }, { name = "FEE_JUICE_PORTAL_CONTRACT_ADDRESS" value = data.terraform_remote_state.l1_contracts.outputs.FEE_JUICE_PORTAL_CONTRACT_ADDRESS diff --git a/yarn-project/aztec/terraform/prover-node/main.tf b/yarn-project/aztec/terraform/prover-node/main.tf index 45bdfcb0be8..0577e0fa77f 100644 --- a/yarn-project/aztec/terraform/prover-node/main.tf +++ b/yarn-project/aztec/terraform/prover-node/main.tf @@ -235,7 +235,6 @@ resource "aws_ecs_task_definition" "aztec-prover-node" { { name = "NODE_ENV", value = "production" }, { name = "LOG_LEVEL", value = "verbose" }, { name = "LOG_JSON", value = "1" }, - { name = "DEBUG", value = "aztec:*,-json-rpc:json_proxy:*,-aztec:avm_simulator:*" }, { name = "DEPLOY_TAG", value = var.DEPLOY_TAG }, { name = "NETWORK_NAME", value = "${var.DEPLOY_TAG}" }, { name = "ETHEREUM_HOST", value = "${local.eth_host}" }, @@ -275,6 +274,7 @@ resource "aws_ecs_task_definition" "aztec-prover-node" { { name = "OUTBOX_CONTRACT_ADDRESS", value = data.terraform_remote_state.l1_contracts.outputs.outbox_contract_address }, { name = "REGISTRY_CONTRACT_ADDRESS", value = data.terraform_remote_state.l1_contracts.outputs.registry_contract_address }, { name = "FEE_JUICE_CONTRACT_ADDRESS", value = data.terraform_remote_state.l1_contracts.outputs.fee_juice_contract_address }, + { name = "STAKING_ASSET_CONTRACT_ADDRESS", value = data.terraform_remote_state.l1_contracts.outputs.staking_asset_contract_address }, { name = "FEE_JUICE_PORTAL_CONTRACT_ADDRESS", value = data.terraform_remote_state.l1_contracts.outputs.FEE_JUICE_PORTAL_CONTRACT_ADDRESS }, // P2P (disabled) diff --git a/yarn-project/aztec/terraform/prover/main.tf b/yarn-project/aztec/terraform/prover/main.tf index 72b48ff520b..97f62ee8995 100644 --- a/yarn-project/aztec/terraform/prover/main.tf +++ b/yarn-project/aztec/terraform/prover/main.tf @@ -250,8 +250,8 @@ resource "aws_ecs_task_definition" "aztec-proving-agent" { "value": "production" }, { - "name": "DEBUG", - "value": "aztec:*" + "name": "LOG_LEVEL", + "value": "verbose" }, { "name": "DEPLOY_TAG", diff --git a/yarn-project/bb-prover/src/avm_proving.test.ts b/yarn-project/bb-prover/src/avm_proving.test.ts index 3e0ae84cf22..6195eb0850a 100644 --- a/yarn-project/bb-prover/src/avm_proving.test.ts +++ b/yarn-project/bb-prover/src/avm_proving.test.ts @@ -30,7 +30,7 @@ async function proveAndVerifyAvmTestContract(functionName: string, calldata: Fr[ const bbWorkingDirectory = await fs.mkdtemp(path.join(tmpdir(), 'bb-')); // Then we prove. - const proofRes = await generateAvmProof(bbPath, bbWorkingDirectory, avmCircuitInputs, logger); + const proofRes = await generateAvmProof(bbPath, bbWorkingDirectory, avmCircuitInputs, internalLogger); if (proofRes.status === BB_RESULT.FAILURE) { internalLogger.error(`Proof generation failed: ${proofRes.reason}`); } diff --git a/yarn-project/bb-prover/src/bb/execute.ts b/yarn-project/bb-prover/src/bb/execute.ts index a14598863a9..b8acb266fd7 100644 --- a/yarn-project/bb-prover/src/bb/execute.ts +++ b/yarn-project/bb-prover/src/bb/execute.ts @@ -1,6 +1,6 @@ import { type AvmCircuitInputs } from '@aztec/circuits.js'; import { sha256 } from '@aztec/foundation/crypto'; -import { type LogFn, currentLevel as currentLogLevel } from '@aztec/foundation/log'; +import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; import { Timer } from '@aztec/foundation/timer'; import { type NoirCompiledCircuit } from '@aztec/types/noir'; @@ -202,6 +202,7 @@ export async function executeBbClientIvcProof( bytecodeStackPath: string, witnessStackPath: string, log: LogFn, + noAutoVerify = false, ): Promise { // Check that the working directory exists try { @@ -238,6 +239,9 @@ export async function executeBbClientIvcProof( '--input_type', 'runtime_stack', ]; + if (noAutoVerify) { + args.push('--no_auto_verify'); + } const timer = new Timer(); const logFunction = (message: string) => { log(`bb - ${message}`); @@ -505,7 +509,7 @@ export async function generateAvmProof( pathToBB: string, workingDirectory: string, input: AvmCircuitInputs, - log: LogFn, + logger: DebugLogger, ): Promise { // Check that the working directory exists try { @@ -562,11 +566,11 @@ export async function generateAvmProof( avmHintsPath, '-o', outputPath, - currentLogLevel == 'debug' ? '-d' : currentLogLevel == 'verbose' ? '-v' : '', + logger.level === 'debug' || logger.level === 'trace' ? '-d' : logger.level === 'verbose' ? '-v' : '', ]; const timer = new Timer(); const logFunction = (message: string) => { - log(`AvmCircuit (prove) BB out - ${message}`); + logger.verbose(`AvmCircuit (prove) BB out - ${message}`); }; const result = await executeBB(pathToBB, 'avm_prove', args, logFunction); const duration = timer.ms(); diff --git a/yarn-project/bb-prover/src/prover/bb_prover.ts b/yarn-project/bb-prover/src/prover/bb_prover.ts index f737b093a38..57c957382c9 100644 --- a/yarn-project/bb-prover/src/prover/bb_prover.ts +++ b/yarn-project/bb-prover/src/prover/bb_prover.ts @@ -535,7 +535,7 @@ export class BBNativeRollupProver implements ServerCircuitProver { private async generateAvmProofWithBB(input: AvmCircuitInputs, workingDirectory: string): Promise { logger.info(`Proving avm-circuit for ${input.functionName}...`); - const provingResult = await generateAvmProof(this.config.bbBinaryPath, workingDirectory, input, logger.verbose); + const provingResult = await generateAvmProof(this.config.bbBinaryPath, workingDirectory, input, logger); if (provingResult.status === BB_RESULT.FAILURE) { logger.error(`Failed to generate AVM proof for ${input.functionName}: ${provingResult.reason}`); diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts index c2db85e5250..30d729f4750 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts @@ -10,6 +10,7 @@ import { L1_TO_L2_MSG_TREE_HEIGHT, NOTE_HASH_TREE_HEIGHT, NULLIFIER_TREE_HEIGHT, + type NodeInfo, PUBLIC_DATA_TREE_HEIGHT, PrivateLog, type ProtocolContractAddresses, @@ -179,6 +180,19 @@ describe('AztecNodeApiSchema', () => { expect(response).toBe(true); }); + it('getNodeInfo', async () => { + const response = await context.client.getNodeInfo(); + expect(response).toEqual({ + ...(await handler.getNodeInfo()), + l1ContractAddresses: Object.fromEntries( + L1ContractsNames.map(name => [name, expect.any(EthAddress)]), + ) as L1ContractAddresses, + protocolContractAddresses: Object.fromEntries( + ProtocolContractsNames.map(name => [name, expect.any(AztecAddress)]), + ) as ProtocolContractAddresses, + }); + }); + it('getBlocks', async () => { const response = await context.client.getBlocks(1, 1); expect(response).toHaveLength(1); @@ -451,6 +465,20 @@ class MockAztecNode implements AztecNode { isReady(): Promise { return Promise.resolve(true); } + getNodeInfo(): Promise { + return Promise.resolve({ + nodeVersion: '1.0', + l1ChainId: 1, + protocolVersion: 1, + enr: 'enr', + l1ContractAddresses: Object.fromEntries( + L1ContractsNames.map(name => [name, EthAddress.random()]), + ) as L1ContractAddresses, + protocolContractAddresses: Object.fromEntries( + ProtocolContractsNames.map(name => [name, AztecAddress.random()]), + ) as ProtocolContractAddresses, + }); + } getBlocks(from: number, limit: number): Promise { return Promise.resolve(times(limit, i => L2Block.random(from + i))); } diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.ts index 457d188acf0..96ac1a1f3ed 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.ts @@ -9,6 +9,8 @@ import { L1_TO_L2_MSG_TREE_HEIGHT, NOTE_HASH_TREE_HEIGHT, NULLIFIER_TREE_HEIGHT, + type NodeInfo, + NodeInfoSchema, PUBLIC_DATA_TREE_HEIGHT, PrivateLog, type ProtocolContractAddresses, @@ -230,6 +232,13 @@ export interface AztecNode */ isReady(): Promise; + /** + * Returns the information about the server's node. Includes current Node version, compatible Noir version, + * L1 chain identifier, protocol version, and L1 address of the rollup contract. + * @returns - The node information. + */ + getNodeInfo(): Promise; + /** * Method to request blocks. Will attempt to return all requested blocks but will return only those available. * @param from - The start of the range of blocks to return. @@ -508,6 +517,8 @@ export const AztecNodeApiSchema: ApiSchemaFor = { isReady: z.function().returns(z.boolean()), + getNodeInfo: z.function().returns(NodeInfoSchema), + getBlocks: z.function().args(z.number(), z.number()).returns(z.array(L2Block.schema)), getCurrentBaseFees: z.function().returns(GasFees.schema), diff --git a/yarn-project/circuit-types/src/interfaces/pxe.test.ts b/yarn-project/circuit-types/src/interfaces/pxe.test.ts index 294ba519988..65976b07ac7 100644 --- a/yarn-project/circuit-types/src/interfaces/pxe.test.ts +++ b/yarn-project/circuit-types/src/interfaces/pxe.test.ts @@ -87,6 +87,10 @@ describe('PXESchema', () => { expect([...tested].sort()).toEqual(all.sort()); }); + it('isL1ToL2MessageSynced', async () => { + await context.client.isL1ToL2MessageSynced(Fr.random()); + }); + it('addAuthWitness', async () => { await context.client.addAuthWitness(AuthWitness.random()); }); @@ -326,6 +330,11 @@ class MockPXE implements PXE { private artifact: ContractArtifact, private instance: ContractInstanceWithAddress, ) {} + + isL1ToL2MessageSynced(_l1ToL2Message: Fr): Promise { + return Promise.resolve(false); + } + addAuthWitness(authWitness: AuthWitness): Promise { expect(authWitness).toBeInstanceOf(AuthWitness); return Promise.resolve(); diff --git a/yarn-project/circuit-types/src/interfaces/pxe.ts b/yarn-project/circuit-types/src/interfaces/pxe.ts index 0ff0f322ecf..2ed9f7bea75 100644 --- a/yarn-project/circuit-types/src/interfaces/pxe.ts +++ b/yarn-project/circuit-types/src/interfaces/pxe.ts @@ -53,6 +53,13 @@ import { type SyncStatus, SyncStatusSchema } from './sync-status.js'; * is exposed to dapps for interacting with the network on behalf of the user. */ export interface PXE { + /** + * Returns whether an L1 to L2 message is synced by archiver and if it's ready to be included in a block. + * @param l1ToL2Message - The L1 to L2 message to check. + * @returns Whether the message is synced and ready to be included in a block. + */ + isL1ToL2MessageSynced(l1ToL2Message: Fr): Promise; + /** * Insert an auth witness for a given message hash. Auth witnesses are used to authorize actions on * behalf of a user. For instance, a token transfer initiated by a different address may request @@ -470,6 +477,7 @@ const PXEInfoSchema = z.object({ }) satisfies ZodFor; export const PXESchema: ApiSchemaFor = { + isL1ToL2MessageSynced: z.function().args(schemas.Fr).returns(z.boolean()), addAuthWitness: z.function().args(AuthWitness.schema).returns(z.void()), getAuthWitness: z .function() diff --git a/yarn-project/circuit-types/src/l2_block_downloader/l2_block_stream.ts b/yarn-project/circuit-types/src/l2_block_downloader/l2_block_stream.ts index 41eb4581346..bfe7e8eba6c 100644 --- a/yarn-project/circuit-types/src/l2_block_downloader/l2_block_stream.ts +++ b/yarn-project/circuit-types/src/l2_block_downloader/l2_block_stream.ts @@ -46,7 +46,7 @@ export class L2BlockStream { try { const sourceTips = await this.l2BlockSource.getL2Tips(); const localTips = await this.localData.getL2Tips(); - this.log.debug(`Running L2 block stream`, { + this.log.trace(`Running L2 block stream`, { sourceLatest: sourceTips.latest.number, localLatest: localTips.latest.number, sourceFinalized: sourceTips.finalized.number, @@ -80,7 +80,7 @@ export class L2BlockStream { while (latestBlockNumber < sourceTips.latest.number) { const from = latestBlockNumber + 1; const limit = Math.min(this.opts.batchSize ?? 20, sourceTips.latest.number - from + 1); - this.log.debug(`Requesting blocks from ${from} limit ${limit} proven=${this.opts.proven}`); + this.log.trace(`Requesting blocks from ${from} limit ${limit} proven=${this.opts.proven}`); const blocks = await this.l2BlockSource.getBlocks(from, limit, this.opts.proven); if (blocks.length === 0) { break; @@ -119,7 +119,7 @@ export class L2BlockStream { const sourceBlockHash = args.sourceCache.find(id => id.number === blockNumber && id.hash)?.hash ?? (await this.l2BlockSource.getBlockHeader(blockNumber).then(h => h?.hash().toString())); - this.log.debug(`Comparing block hashes for block ${blockNumber}`, { + this.log.trace(`Comparing block hashes for block ${blockNumber}`, { localBlockHash, sourceBlockHash, sourceCacheNumber: args.sourceCache[0]?.number, diff --git a/yarn-project/circuit-types/src/p2p/block_proposal.ts b/yarn-project/circuit-types/src/p2p/block_proposal.ts index 8e64f4c1fd9..5d2ad7f7f46 100644 --- a/yarn-project/circuit-types/src/p2p/block_proposal.ts +++ b/yarn-project/circuit-types/src/p2p/block_proposal.ts @@ -1,5 +1,5 @@ import { Buffer32 } from '@aztec/foundation/buffer'; -import { recoverAddress } from '@aztec/foundation/crypto'; +import { keccak256, recoverAddress } from '@aztec/foundation/crypto'; import { type EthAddress } from '@aztec/foundation/eth-address'; import { Signature } from '@aztec/foundation/eth-signature'; import { type Fr } from '@aztec/foundation/fields'; @@ -42,7 +42,7 @@ export class BlockProposal extends Gossipable { } override p2pMessageIdentifier(): Buffer32 { - return BlockProposalHash.fromField(this.payload.archive); + return new BlockProposalHash(keccak256(this.signature.toBuffer())); } get archive(): Fr { diff --git a/yarn-project/circuit-types/src/p2p/interface.ts b/yarn-project/circuit-types/src/p2p/interface.ts index 1e252250399..06a02794602 100644 --- a/yarn-project/circuit-types/src/p2p/interface.ts +++ b/yarn-project/circuit-types/src/p2p/interface.ts @@ -17,3 +17,15 @@ export const TopicTypeMap: Record = { [TopicType.block_attestation]: BlockAttestation as unknown as typeof Gossipable, [TopicType.epoch_proof_quote]: EpochProofQuote as unknown as typeof Gossipable, }; + +/** + * Map from topic to deserialiser + * + * Used in msgIdFn libp2p to get the p2pMessageIdentifier from a message + */ +export const TopicToDeserializer = { + [Tx.p2pTopic]: Tx.fromBuffer, + [BlockProposal.p2pTopic]: BlockProposal.fromBuffer, + [BlockAttestation.p2pTopic]: BlockAttestation.fromBuffer, + [EpochProofQuote.p2pTopic]: EpochProofQuote.fromBuffer, +}; diff --git a/yarn-project/circuits.js/src/contract/artifact_hash.ts b/yarn-project/circuits.js/src/contract/artifact_hash.ts index a7bc52ae7ad..a170f49106d 100644 --- a/yarn-project/circuits.js/src/contract/artifact_hash.ts +++ b/yarn-project/circuits.js/src/contract/artifact_hash.ts @@ -47,7 +47,7 @@ export function computeArtifactHash( const preimage = computeArtifactHashPreimage(artifact); const artifactHash = computeArtifactHash(computeArtifactHashPreimage(artifact)); - getLogger().debug('Computed artifact hash', { artifactHash, ...preimage }); + getLogger().trace('Computed artifact hash', { artifactHash, ...preimage }); return artifactHash; } diff --git a/yarn-project/circuits.js/src/structs/global_variables.ts b/yarn-project/circuits.js/src/structs/global_variables.ts index 914dc7b5fb6..240b4d9abed 100644 --- a/yarn-project/circuits.js/src/structs/global_variables.ts +++ b/yarn-project/circuits.js/src/structs/global_variables.ts @@ -159,10 +159,22 @@ export class GlobalVariables { ); } + toInspect() { + return { + chainId: this.chainId.toNumber(), + version: this.version.toNumber(), + blockNumber: this.blockNumber.toNumber(), + slotNumber: this.slotNumber.toNumber(), + timestamp: this.timestamp.toNumber(), + coinbase: this.coinbase.toString(), + feeRecipient: this.feeRecipient.toString(), + feePerDaGas: this.gasFees.feePerDaGas.toNumber(), + feePerL2Gas: this.gasFees.feePerL2Gas.toNumber(), + }; + } + [inspect.custom]() { - return `GlobalVariables { chainId: ${this.chainId.toString()}, version: ${this.version.toString()}, blockNumber: ${this.blockNumber.toString()}, slotNumber: ${this.slotNumber.toString()}, timestamp: ${this.timestamp.toString()}, coinbase: ${this.coinbase.toString()}, feeRecipient: ${this.feeRecipient.toString()}, gasFees: ${inspect( - this.gasFees, - )} }`; + return `GlobalVariables ${inspect(this.toInspect())}`; } public equals(other: this): boolean { diff --git a/yarn-project/cli-wallet/package.json b/yarn-project/cli-wallet/package.json index e8fe9b83dfb..e33bf41c805 100644 --- a/yarn-project/cli-wallet/package.json +++ b/yarn-project/cli-wallet/package.json @@ -19,7 +19,7 @@ "scripts": { "start": "node --no-warnings ./dest/bin", "start:debug": "node --inspect=0.0.0.0:9221 --no-warnings ./dest/bin", - "dev": "DEBUG='aztec:*' LOG_LEVEL=debug && node ./dest/bin", + "dev": "LOG_LEVEL=debug && node ./dest/bin", "build": "yarn clean && tsc -b", "build:dev": "tsc -b --watch", "clean": "rm -rf ./dest .tsbuildinfo", @@ -100,4 +100,4 @@ "engines": { "node": ">=18" } -} +} \ No newline at end of file diff --git a/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts b/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts index e91e2948644..12073936e11 100644 --- a/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts +++ b/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts @@ -1,5 +1,5 @@ import { getSchnorrAccount } from '@aztec/accounts/schnorr'; -import { BatchCall, type PXE, type Wallet, createCompatibleClient } from '@aztec/aztec.js'; +import { BatchCall, type PXE, type WaitOpts, type Wallet, createCompatibleClient, retryUntil } from '@aztec/aztec.js'; import { L1FeeJuicePortalManager } from '@aztec/aztec.js'; import { type AztecAddress, type EthAddress, FEE_FUNDING_FOR_TESTER_ACCOUNT, Fq, Fr } from '@aztec/circuits.js'; import { @@ -20,6 +20,12 @@ type ContractDeploymentInfo = { salt: Fr; }; +const waitOpts: WaitOpts = { + timeout: 120, + provenTimeout: 1200, + interval: 1, +}; + export async function bootstrapNetwork( pxeUrl: string, l1Url: string, @@ -34,7 +40,7 @@ export async function bootstrapNetwork( // setup a one-off account contract const account = getSchnorrAccount(pxe, Fr.random(), Fq.random(), Fr.random()); - const wallet = await account.deploy().getWallet({ proven: true, provenTimeout: 600 }); + const wallet = await account.deploy().getWallet(); const l1Clients = createL1Clients( l1Url, @@ -90,9 +96,17 @@ export async function bootstrapNetwork( log(`DevCoin L1: ${erc20Address}`); log(`DevCoin L1 Portal: ${portalAddress}`); log(`DevCoin L2: ${token.address}`); + log(`DevCoin L2 init hash: ${token.initHash}`); + log(`DevCoin L2 salt: ${token.salt}`); log(`DevCoin L2 Bridge: ${bridge.address}`); + log(`DevCoin L2 Bridge init hash: ${bridge.initHash}`); + log(`DevCoin L2 Bridge salt: ${bridge.salt}`); log(`DevCoin FPC: ${fpc.address}`); + log(`DevCoin FPC init hash: ${fpc.initHash}`); + log(`DevCoin FPC salt: ${fpc.salt}`); log(`Counter: ${counter.address}`); + log(`Counter init hash: ${counter.initHash}`); + log(`Counter salt: ${counter.salt}`); } } @@ -142,17 +156,17 @@ async function deployToken( const { TokenContract, TokenBridgeContract } = await import('@aztec/noir-contracts.js'); const devCoin = await TokenContract.deploy(wallet, wallet.getAddress(), 'DevCoin', 'DEV', 18) .send({ universalDeploy: true }) - .deployed({ proven: true, provenTimeout: 600 }); + .deployed(waitOpts); const bridge = await TokenBridgeContract.deploy(wallet, devCoin.address, l1Portal) .send({ universalDeploy: true }) - .deployed({ proven: true, provenTimeout: 600 }); + .deployed(waitOpts); await new BatchCall(wallet, [ devCoin.methods.set_minter(bridge.address, true).request(), devCoin.methods.set_admin(bridge.address).request(), ]) .send() - .wait({ proven: true, provenTimeout: 600 }); + .wait(waitOpts); return { token: { @@ -204,7 +218,7 @@ async function deployFPC( const { FPCContract } = await import('@aztec/noir-contracts.js'); const fpc = await FPCContract.deploy(wallet, tokenAddress, feeRecipient) .send({ universalDeploy: true }) - .deployed({ proven: true, provenTimeout: 600 }); + .deployed(waitOpts); const info: ContractDeploymentInfo = { address: fpc.address, initHash: fpc.instance.initializationHash, @@ -219,7 +233,7 @@ async function deployCounter(wallet: Wallet): Promise { const { CounterContract } = await import('@aztec/noir-contracts.js'); const counter = await CounterContract.deploy(wallet, 1, wallet.getAddress(), wallet.getAddress()) .send({ universalDeploy: true }) - .deployed({ proven: true, provenTimeout: 600 }); + .deployed(waitOpts); const info: ContractDeploymentInfo = { address: counter.address, initHash: counter.instance.initializationHash, @@ -253,27 +267,23 @@ async function fundFPC( ); const amount = FEE_FUNDING_FOR_TESTER_ACCOUNT; - const { claimAmount, claimSecret, messageLeafIndex } = await feeJuicePortal.bridgeTokensPublic( + const { claimAmount, claimSecret, messageLeafIndex, messageHash } = await feeJuicePortal.bridgeTokensPublic( fpcAddress, amount, true, ); + await retryUntil(async () => await wallet.isL1ToL2MessageSynced(Fr.fromString(messageHash)), 'message sync', 600, 1); + const counter = await CounterContract.at(counterAddress, wallet); // TODO (alexg) remove this once sequencer builds blocks continuously // advance the chain - await counter.methods - .increment(wallet.getAddress(), wallet.getAddress()) - .send() - .wait({ proven: true, provenTimeout: 600 }); - await counter.methods - .increment(wallet.getAddress(), wallet.getAddress()) - .send() - .wait({ proven: true, provenTimeout: 600 }); + await counter.methods.increment(wallet.getAddress(), wallet.getAddress()).send().wait(waitOpts); + await counter.methods.increment(wallet.getAddress(), wallet.getAddress()).send().wait(waitOpts); await feeJuiceContract.methods .claim(fpcAddress, claimAmount, claimSecret, messageLeafIndex) .send() - .wait({ proven: true, provenTimeout: 600 }); + .wait({ ...waitOpts, proven: true }); } diff --git a/yarn-project/cli/src/cmds/infrastructure/sequencers.ts b/yarn-project/cli/src/cmds/infrastructure/sequencers.ts index 341744f0a6c..3e82bcaacc2 100644 --- a/yarn-project/cli/src/cmds/infrastructure/sequencers.ts +++ b/yarn-project/cli/src/cmds/infrastructure/sequencers.ts @@ -1,7 +1,7 @@ import { createCompatibleClient } from '@aztec/aztec.js'; -import { createEthereumChain } from '@aztec/ethereum'; +import { MINIMUM_STAKE, createEthereumChain } from '@aztec/ethereum'; import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; -import { RollupAbi } from '@aztec/l1-artifacts'; +import { RollupAbi, TestERC20Abi } from '@aztec/l1-artifacts'; import { createPublicClient, createWalletClient, getContract, http } from 'viem'; import { mnemonicToAccount } from 'viem/accounts'; @@ -49,7 +49,7 @@ export async function sequencers(opts: { const who = (maybeWho as `0x{string}`) ?? walletClient?.account.address.toString(); if (command === 'list') { - const sequencers = await rollup.read.getValidators(); + const sequencers = await rollup.read.getAttesters(); if (sequencers.length === 0) { log(`No sequencers registered on rollup`); } else { @@ -59,11 +59,26 @@ export async function sequencers(opts: { } } } else if (command === 'add') { - if (!who || !writeableRollup) { + if (!who || !writeableRollup || !walletClient) { throw new Error(`Missing sequencer address`); } + log(`Adding ${who} as sequencer`); - const hash = await writeableRollup.write.addValidator([who]); + + const stakingAsset = getContract({ + address: await rollup.read.STAKING_ASSET(), + abi: TestERC20Abi, + client: walletClient, + }); + + await Promise.all( + [ + await stakingAsset.write.mint([walletClient.account.address, MINIMUM_STAKE], {} as any), + await stakingAsset.write.approve([rollup.address, MINIMUM_STAKE], {} as any), + ].map(txHash => publicClient.waitForTransactionReceipt({ hash: txHash })), + ); + + const hash = await writeableRollup.write.deposit([who, who, who, MINIMUM_STAKE]); await publicClient.waitForTransactionReceipt({ hash }); log(`Added in tx ${hash}`); } else if (command === 'remove') { @@ -71,7 +86,7 @@ export async function sequencers(opts: { throw new Error(`Missing sequencer address`); } log(`Removing ${who} as sequencer`); - const hash = await writeableRollup.write.removeValidator([who]); + const hash = await writeableRollup.write.initiateWithdraw([who, who]); await publicClient.waitForTransactionReceipt({ hash }); log(`Removed in tx ${hash}`); } else if (command === 'who-next') { diff --git a/yarn-project/cli/src/cmds/l1/deploy_l1_contracts.ts b/yarn-project/cli/src/cmds/l1/deploy_l1_contracts.ts index 2d7165ff442..4052658acc9 100644 --- a/yarn-project/cli/src/cmds/l1/deploy_l1_contracts.ts +++ b/yarn-project/cli/src/cmds/l1/deploy_l1_contracts.ts @@ -42,6 +42,7 @@ export async function deployL1Contracts( log(`L1 -> L2 Inbox Address: ${l1ContractAddresses.inboxAddress.toString()}`); log(`L2 -> L1 Outbox Address: ${l1ContractAddresses.outboxAddress.toString()}`); log(`Fee Juice Address: ${l1ContractAddresses.feeJuiceAddress.toString()}`); + log(`Staking Asset Address: ${l1ContractAddresses.stakingAssetAddress.toString()}`); log(`Fee Juice Portal Address: ${l1ContractAddresses.feeJuicePortalAddress.toString()}`); log(`CoinIssuer Address: ${l1ContractAddresses.coinIssuerAddress.toString()}`); log(`RewardDistributor Address: ${l1ContractAddresses.rewardDistributorAddress.toString()}`); diff --git a/yarn-project/cli/src/cmds/l1/index.ts b/yarn-project/cli/src/cmds/l1/index.ts index 80c0d514c3b..5bb1ff71240 100644 --- a/yarn-project/cli/src/cmds/l1/index.ts +++ b/yarn-project/cli/src/cmds/l1/index.ts @@ -109,7 +109,7 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL 'test test test test test test test test test test test junk', ) .addOption(l1ChainIdOption) - .option('--validator ', 'ethereum address of the validator', parseEthereumAddress) + .option('--validator
', 'ethereum address of the validator', parseEthereumAddress) .option('--rollup
', 'ethereum address of the rollup contract', parseEthereumAddress) .action(async options => { const { removeL1Validator } = await import('./update_l1_validators.js'); diff --git a/yarn-project/cli/src/cmds/l1/update_l1_validators.ts b/yarn-project/cli/src/cmds/l1/update_l1_validators.ts index 9827721418e..916d0c351fe 100644 --- a/yarn-project/cli/src/cmds/l1/update_l1_validators.ts +++ b/yarn-project/cli/src/cmds/l1/update_l1_validators.ts @@ -1,8 +1,8 @@ import { EthCheatCodes } from '@aztec/aztec.js'; import { type EthAddress } from '@aztec/circuits.js'; -import { createEthereumChain, getL1ContractsConfigEnvVars } from '@aztec/ethereum'; +import { MINIMUM_STAKE, createEthereumChain, getL1ContractsConfigEnvVars, isAnvilTestChain } from '@aztec/ethereum'; import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; -import { RollupAbi } from '@aztec/l1-artifacts'; +import { RollupAbi, TestERC20Abi } from '@aztec/l1-artifacts'; import { createPublicClient, createWalletClient, getContract, http } from 'viem'; import { generatePrivateKey, mnemonicToAccount, privateKeyToAccount } from 'viem/accounts'; @@ -49,13 +49,40 @@ export async function addL1Validator({ client: walletClient, }); + const stakingAsset = getContract({ + address: await rollup.read.STAKING_ASSET(), + abi: TestERC20Abi, + client: walletClient, + }); + + await Promise.all( + [ + await stakingAsset.write.mint([walletClient.account.address, MINIMUM_STAKE], {} as any), + await stakingAsset.write.approve([rollupAddress.toString(), MINIMUM_STAKE], {} as any), + ].map(txHash => publicClient.waitForTransactionReceipt({ hash: txHash })), + ); + dualLog(`Adding validator ${validatorAddress.toString()} to rollup ${rollupAddress.toString()}`); - const txHash = await rollup.write.addValidator([validatorAddress.toString()]); + const txHash = await rollup.write.deposit([ + validatorAddress.toString(), + validatorAddress.toString(), + validatorAddress.toString(), + MINIMUM_STAKE, + ]); dualLog(`Transaction hash: ${txHash}`); await publicClient.waitForTransactionReceipt({ hash: txHash }); - dualLog(`Funding validator on L1`); - const cheatCodes = new EthCheatCodes(rpcUrl, debugLogger); - await cheatCodes.setBalance(validatorAddress, 10n ** 20n); + if (isAnvilTestChain(chainId)) { + dualLog(`Funding validator on L1`); + const cheatCodes = new EthCheatCodes(rpcUrl, debugLogger); + await cheatCodes.setBalance(validatorAddress, 10n ** 20n); + } else { + const balance = await publicClient.getBalance({ address: validatorAddress.toString() }); + const balanceInEth = Number(balance) / 10 ** 18; + dualLog(`Validator balance: ${balanceInEth.toFixed(6)} ETH`); + if (balanceInEth === 0) { + dualLog(`WARNING: Validator has no balance. Remember to fund it!`); + } + } } export async function removeL1Validator({ @@ -78,7 +105,7 @@ export async function removeL1Validator({ }); dualLog(`Removing validator ${validatorAddress.toString()} from rollup ${rollupAddress.toString()}`); - const txHash = await rollup.write.removeValidator([validatorAddress.toString()]); + const txHash = await rollup.write.initiateWithdraw([validatorAddress.toString(), validatorAddress.toString()]); dualLog(`Transaction hash: ${txHash}`); await publicClient.waitForTransactionReceipt({ hash: txHash }); } @@ -154,7 +181,7 @@ export async function debugRollup({ rpcUrl, chainId, rollupAddress, log }: Rollu log(`Pending block num: ${pendingNum}`); const provenNum = await rollup.read.getProvenBlockNumber(); log(`Proven block num: ${provenNum}`); - const validators = await rollup.read.getValidators(); + const validators = await rollup.read.getAttesters(); log(`Validators: ${validators.map(v => v.toString()).join(', ')}`); const committee = await rollup.read.getCurrentEpochCommittee(); log(`Committee: ${committee.map(v => v.toString()).join(', ')}`); @@ -162,8 +189,6 @@ export async function debugRollup({ rpcUrl, chainId, rollupAddress, log }: Rollu log(`Archive: ${archive}`); const epochNum = await rollup.read.getCurrentEpoch(); log(`Current epoch: ${epochNum}`); - const epoch = await rollup.read.epochs([epochNum]); - log(`Epoch Sample Seed: ${epoch[0].toString()}, Next Seed: ${epoch[1].toString()}`); const slot = await rollup.read.getCurrentSlot(); log(`Current slot: ${slot}`); const proposerDuringPrevL1Block = await rollup.read.getCurrentProposer(); diff --git a/yarn-project/cli/src/cmds/pxe/get_node_info.ts b/yarn-project/cli/src/cmds/pxe/get_node_info.ts index dd5939277fb..ea13af3a6dd 100644 --- a/yarn-project/cli/src/cmds/pxe/get_node_info.ts +++ b/yarn-project/cli/src/cmds/pxe/get_node_info.ts @@ -1,8 +1,13 @@ -import { createCompatibleClient } from '@aztec/aztec.js'; +import { type AztecNode, type PXE, createAztecNodeClient, createCompatibleClient } from '@aztec/aztec.js'; import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; -export async function getNodeInfo(rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { - const client = await createCompatibleClient(rpcUrl, debugLogger); +export async function getNodeInfo(rpcUrl: string, pxeRequest: boolean, debugLogger: DebugLogger, log: LogFn) { + let client: AztecNode | PXE; + if (pxeRequest) { + client = await createCompatibleClient(rpcUrl, debugLogger); + } else { + client = createAztecNodeClient(rpcUrl); + } const info = await client.getNodeInfo(); log(`Node Version: ${info.nodeVersion}`); log(`Chain Id: ${info.l1ChainId}`); @@ -14,6 +19,7 @@ export async function getNodeInfo(rpcUrl: string, debugLogger: DebugLogger, log: log(` L1 -> L2 Inbox Address: ${info.l1ContractAddresses.inboxAddress.toString()}`); log(` L2 -> L1 Outbox Address: ${info.l1ContractAddresses.outboxAddress.toString()}`); log(` Fee Juice Address: ${info.l1ContractAddresses.feeJuiceAddress.toString()}`); + log(` Staking Asset Address: ${info.l1ContractAddresses.stakingAssetAddress.toString()}`); log(` Fee Juice Portal Address: ${info.l1ContractAddresses.feeJuicePortalAddress.toString()}`); log(` CoinIssuer Address: ${info.l1ContractAddresses.coinIssuerAddress.toString()}`); log(` RewardDistributor Address: ${info.l1ContractAddresses.rewardDistributorAddress.toString()}`); diff --git a/yarn-project/cli/src/cmds/pxe/index.ts b/yarn-project/cli/src/cmds/pxe/index.ts index ad1d9ed59c3..8bae3b79705 100644 --- a/yarn-project/cli/src/cmds/pxe/index.ts +++ b/yarn-project/cli/src/cmds/pxe/index.ts @@ -5,6 +5,7 @@ import { type Command } from 'commander'; import { logJson, + makePxeOption, parseAztecAddress, parseEthereumAddress, parseField, @@ -142,11 +143,18 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL program .command('get-node-info') - .description('Gets the information of an aztec node at a URL.') - .addOption(pxeOption) + .description('Gets the information of an Aztec node from a PXE or directly from an Aztec node.') + .option('--node-url ', 'URL of the node.') + .addOption(makePxeOption(false)) .action(async options => { const { getNodeInfo } = await import('./get_node_info.js'); - await getNodeInfo(options.rpcUrl, debugLogger, log); + let url: string; + if (options.nodeUrl) { + url = options.nodeUrl; + } else { + url = options.rpcUrl; + } + await getNodeInfo(url, !options.nodeUrl, debugLogger, log); }); program diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index d9514c42dfb..047609c4015 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -15,11 +15,11 @@ "clean": "rm -rf ./dest .tsbuildinfo", "formatting": "run -T prettier --check ./src \"!src/web/main.js\" && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", - "test": "LOG_LEVEL=${LOG_LEVEL:-verbose} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", + "test": "LOG_LEVEL=${LOG_LEVEL:-verbose} NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", "test:with-alerts": "./scripts/test-with-alerts.sh", - "test:profile": "LOG_LEVEL=${LOG_LEVEL:-verbose} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 0x --output-dir \"flame_graph/{pid}.0x\" -- node --experimental-vm-modules ../node_modules/jest/bin/jest.js --runInBand --testTimeout=300000 --forceExit", + "test:profile": "LOG_LEVEL=${LOG_LEVEL:-verbose} NODE_NO_WARNINGS=1 0x --output-dir \"flame_graph/{pid}.0x\" -- node --experimental-vm-modules ../node_modules/jest/bin/jest.js --runInBand --testTimeout=300000 --forceExit", "serve:flames": "python3 -m http.server --directory \"flame_graph\" 8000", - "test:debug": "LOG_LEVEL=${LOG_LEVEL:-verbose} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --inspect --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", + "test:debug": "LOG_LEVEL=${LOG_LEVEL:-verbose} NODE_NO_WARNINGS=1 node --inspect --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", "test:integration": "concurrently -k -s first -c reset,dim -n test,anvil \"yarn test:integration:run\" \"anvil\"", "test:integration:run": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --no-cache --runInBand --config jest.integration.config.json", "test:unit": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest src/fixtures" @@ -93,7 +93,6 @@ "viem": "^2.7.15", "webpack": "^5.88.2", "webpack-cli": "^5.1.4", - "winston": "^3.10.0", "zod": "^3.23.8" }, "devDependencies": { @@ -157,4 +156,4 @@ "testRegex": "./src/.*\\.test\\.(js|mjs|ts)$", "rootDir": "./src" } -} +} \ No newline at end of file diff --git a/yarn-project/end-to-end/package.local.json b/yarn-project/end-to-end/package.local.json index a5214893419..cbb0987a9dc 100644 --- a/yarn-project/end-to-end/package.local.json +++ b/yarn-project/end-to-end/package.local.json @@ -2,7 +2,7 @@ "scripts": { "build": "yarn clean && tsc -b && webpack", "formatting": "run -T prettier --check ./src \"!src/web/main.js\" && run -T eslint ./src", - "test": "LOG_LEVEL=${LOG_LEVEL:-verbose} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", + "test": "LOG_LEVEL=${LOG_LEVEL:-verbose} NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", "test:unit": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest src/fixtures" } -} +} \ No newline at end of file diff --git a/yarn-project/end-to-end/scripts/docker-compose-devnet.yml b/yarn-project/end-to-end/scripts/docker-compose-devnet.yml index 4f6efe441ad..d8ce6a6b7b4 100644 --- a/yarn-project/end-to-end/scripts/docker-compose-devnet.yml +++ b/yarn-project/end-to-end/scripts/docker-compose-devnet.yml @@ -7,8 +7,7 @@ services: - aztec-node-url - faucet-url environment: - DEBUG: ${DEBUG:-'aztec:*'} - DEBUG_COLORS: 1 + LOG_LEVEL: ${LOG_LEVEL:-'verbose'} ETHEREUM_HOST: JOB_NAME: ${JOB_NAME:-''} PXE_PROVER_ENABLED: ${PXE_PROVER_ENABLED:-1} diff --git a/yarn-project/end-to-end/scripts/docker-compose-no-sandbox.yml b/yarn-project/end-to-end/scripts/docker-compose-no-sandbox.yml index 26aee913035..2fbbabb4d6d 100644 --- a/yarn-project/end-to-end/scripts/docker-compose-no-sandbox.yml +++ b/yarn-project/end-to-end/scripts/docker-compose-no-sandbox.yml @@ -11,8 +11,7 @@ services: image: aztecprotocol/end-to-end:${AZTEC_DOCKER_TAG:-latest} environment: BENCHMARK: 'true' - DEBUG: ${DEBUG:-'aztec:*'} - DEBUG_COLORS: 1 + LOG_LEVEL: ${LOG_LEVEL:-'verbose'} ETHEREUM_HOST: http://fork:8545 L1_CHAIN_ID: 31337 ARCHIVER_POLLING_INTERVAL_MS: 50 diff --git a/yarn-project/end-to-end/scripts/docker-compose-p2p.yml b/yarn-project/end-to-end/scripts/docker-compose-p2p.yml index f5c79b9c0c2..c16d467ac9e 100644 --- a/yarn-project/end-to-end/scripts/docker-compose-p2p.yml +++ b/yarn-project/end-to-end/scripts/docker-compose-p2p.yml @@ -14,8 +14,8 @@ services: - '40400:40400/tcp' - '40400:40400/udp' environment: - DEBUG: 'aztec:*' - DEBUG_COLORS: 1 + LOG_LEVEL: ${LOG_LEVEL:-'verbose'} + DEBUG: ${DEBUG:-""} P2P_TCP_LISTEN_ADDR: '0.0.0.0:40400' P2P_TCP_ANNOUNCE_ADDR: 'p2p-bootstrap:40400' P2P_UDP_ANNOUNCE_ADDR: 'p2p-bootstrap:40400' @@ -25,8 +25,8 @@ services: image: aztecprotocol/end-to-end:${AZTEC_DOCKER_TAG:-latest} environment: BENCHMARK: true - DEBUG: ${DEBUG:-'aztec:*'} - DEBUG_COLORS: 1 + LOG_LEVEL: ${LOG_LEVEL:-'verbose'} + DEBUG: ${DEBUG:-""} ETHEREUM_HOST: http://fork:8545 L1_CHAIN_ID: 31337 ARCHIVER_POLLING_INTERVAL_MS: 500 diff --git a/yarn-project/end-to-end/scripts/docker-compose-wallet.yml b/yarn-project/end-to-end/scripts/docker-compose-wallet.yml index 7aa1646aa72..b1a421ed575 100644 --- a/yarn-project/end-to-end/scripts/docker-compose-wallet.yml +++ b/yarn-project/end-to-end/scripts/docker-compose-wallet.yml @@ -11,8 +11,7 @@ services: image: aztecprotocol/aztec:${AZTEC_DOCKER_TAG:-latest} command: 'start --sandbox' environment: - DEBUG: 'aztec:*' - DEBUG_COLORS: 1 + LOG_LEVEL: ${LOG_LEVEL:-'verbose'} ETHEREUM_HOST: http://fork:8545 L1_CHAIN_ID: 31337 ARCHIVER_POLLING_INTERVAL_MS: 50 @@ -29,8 +28,7 @@ services: end-to-end: image: aztecprotocol/end-to-end:${AZTEC_DOCKER_TAG:-latest} environment: - DEBUG: ${DEBUG:-aztec:*} - DEBUG_COLORS: 1 + LOG_LEVEL: ${LOG_LEVEL:-'verbose'} ETHEREUM_HOST: http://fork:8545 L1_CHAIN_ID: 31337 PXE_URL: http://sandbox:8080 diff --git a/yarn-project/end-to-end/scripts/docker-compose.yml b/yarn-project/end-to-end/scripts/docker-compose.yml index 75b9cd0d36f..f1aa66cc7ff 100644 --- a/yarn-project/end-to-end/scripts/docker-compose.yml +++ b/yarn-project/end-to-end/scripts/docker-compose.yml @@ -11,8 +11,7 @@ services: image: aztecprotocol/aztec:${AZTEC_DOCKER_TAG:-latest} command: 'start --sandbox' environment: - DEBUG: ${DEBUG:-aztec:*,-aztec:avm_simulator:memory} - DEBUG_COLORS: 1 + LOG_LEVEL: ${LOG_LEVEL:-'verbose'} ETHEREUM_HOST: http://fork:8545 L1_CHAIN_ID: 31337 ARCHIVER_POLLING_INTERVAL_MS: 50 @@ -30,8 +29,7 @@ services: image: aztecprotocol/end-to-end:${AZTEC_DOCKER_TAG:-latest} environment: BENCHMARK: 'true' - DEBUG: ${DEBUG:-aztec:*,-aztec:avm_simulator:memory} - DEBUG_COLORS: 1 + LOG_LEVEL: ${LOG_LEVEL:-'verbose'} ETHEREUM_HOST: http://fork:8545 L1_CHAIN_ID: 31337 PXE_URL: http://sandbox:8080 diff --git a/yarn-project/end-to-end/scripts/e2e_compose_test.sh b/yarn-project/end-to-end/scripts/e2e_compose_test.sh index e6a74f58745..2cfdda8f712 100755 --- a/yarn-project/end-to-end/scripts/e2e_compose_test.sh +++ b/yarn-project/end-to-end/scripts/e2e_compose_test.sh @@ -3,7 +3,7 @@ # Usage: ./e2e_compose_test.sh # Optional environment variables: # COMPOSE_FILE (default: ./scripts/docker-compose.yml) -# DEBUG (default: "aztec:*") +# LOG_LEVEL (default: "verbose") # HARDWARE_CONCURRENCY (default: "") # ENABLE_GAS (default: "") # AZTEC_DOCKER_TAG (default: current git commit) @@ -16,7 +16,7 @@ set -eu export TEST="$1" # Variables with defaults COMPOSE_FILE="${COMPOSE_FILE:-./scripts/docker-compose.yml}" -export DEBUG="${DEBUG:-aztec:*,-aztec:avm_simulator:*}" +export LOG_LEVEL="${LOG_LEVEL:-verbose}" export HARDWARE_CONCURRENCY="${HARDWARE_CONCURRENCY:-}" export AZTEC_DOCKER_TAG="${AZTEC_DOCKER_TAG:-$(git rev-parse HEAD)}" diff --git a/yarn-project/end-to-end/scripts/e2e_test_config.yml b/yarn-project/end-to-end/scripts/e2e_test_config.yml index 8a65a011708..2fb7902c93f 100644 --- a/yarn-project/end-to-end/scripts/e2e_test_config.yml +++ b/yarn-project/end-to-end/scripts/e2e_test_config.yml @@ -4,19 +4,19 @@ tests: env: HARDWARE_CONCURRENCY: '32' COMPOSE_FILE: 'scripts/docker-compose-no-sandbox.yml' - DEBUG: 'aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees' + LOG_LEVEL: 'verbose; debug: aztec:benchmarks,aztec:sequencer,aztec:world_state,aztec:merkle_trees' command: './scripts/e2e_compose_test.sh bench_prover' bench_publish_rollup: env: HARDWARE_CONCURRENCY: '32' COMPOSE_FILE: 'scripts/docker-compose-no-sandbox.yml' - DEBUG: 'aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees' + LOG_LEVEL: 'verbose; debug: aztec:benchmarks,aztec:sequencer,aztec:world_state,aztec:merkle_trees' command: './scripts/e2e_compose_test.sh bench_publish_rollup' bench_tx_size: env: HARDWARE_CONCURRENCY: '32' COMPOSE_FILE: 'scripts/docker-compose-no-sandbox.yml' - DEBUG: 'aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees' + LOG_LEVEL: 'verbose; debug: aztec:benchmarks,aztec:sequencer,aztec:world_state,aztec:merkle_trees' command: './scripts/e2e_compose_test.sh bench_tx_size' e2e_2_pxes: {} e2e_account_contracts: {} diff --git a/yarn-project/end-to-end/scripts/native-network/boot-node.sh b/yarn-project/end-to-end/scripts/native-network/boot-node.sh index 943bcdf4a4f..0a569f93b10 100755 --- a/yarn-project/end-to-end/scripts/native-network/boot-node.sh +++ b/yarn-project/end-to-end/scripts/native-network/boot-node.sh @@ -11,9 +11,9 @@ exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname # Set environment variables export PORT=${PORT:-"8080"} -export DEBUG=${DEBUG:-"aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:l2_block_stream,-aztec:world-state:*"} -export LOG_LEVEL=${LOG_LEVEL:-"debug"} -export ETHEREUM_HOST="http://127.0.0.1:8545" +export DEBUG=${DEBUG:-""} +export LOG_LEVEL=${LOG_LEVEL:-"verbose"} +export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} export P2P_ENABLED="true" export VALIDATOR_DISABLED="true" export SEQ_MAX_SECONDS_BETWEEN_BLOCKS="0" @@ -26,11 +26,11 @@ export OTEL_EXPORTER_OTLP_METRICS_ENDPOINT="${OTEL_EXPORTER_OTLP_METRICS_ENDPOIN export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT="${OTEL_EXPORTER_OTLP_TRACES_ENDPOINT:-}" export OTEL_EXPORTER_OTLP_LOGS_ENDPOINT="${OTEL_EXPORTER_OTLP_LOGS_ENDPOINT:-}" export OTEL_RESOURCE_ATTRIBUTES="service.name=boot-node" -export VALIDATOR_PRIVATE_KEY="0x47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a" +export VALIDATOR_PRIVATE_KEY=${VALIDATOR_PRIVATE_KEY:-"0x47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a"} REPO=$(git rev-parse --show-toplevel) echo "Waiting for l1 contracts to be deployed..." -until [ -f "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env ] ; do +until [ -f "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env ]; do sleep 1 done echo "Done waiting." @@ -42,4 +42,4 @@ function filter_noise() { } # Start the Aztec node with the sequencer and archiver -node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js start --node --archiver --sequencer --pxe 2>&1 | filter_noise \ No newline at end of file +node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js start --node --archiver --sequencer 2>&1 | filter_noise diff --git a/yarn-project/end-to-end/scripts/native-network/deploy-l1-contracts.sh b/yarn-project/end-to-end/scripts/native-network/deploy-l1-contracts.sh index 9e9dad3f195..014a1b2b07d 100755 --- a/yarn-project/end-to-end/scripts/native-network/deploy-l1-contracts.sh +++ b/yarn-project/end-to-end/scripts/native-network/deploy-l1-contracts.sh @@ -18,21 +18,33 @@ else INIT_VALIDATORS="false" fi -echo "Waiting for Anvil to be up at port 8545..." +export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} +export L1_CHAIN_ID=${L1_CHAIN_ID:-"31337"} +export PRIVATE_KEY=${PRIVATE_KEY:-""} +export SALT=${SALT:-"1337"} + +echo "Waiting for Ethereum node to be up..." until curl -s -X POST -H 'Content-Type: application/json' \ --data '{"jsonrpc":"2.0","method":"eth_blockNumber","params":[],"id":1}' \ - http://127.0.0.1:8545 2>/dev/null | grep -q 'result' ; do + $ETHEREUM_HOST 2>/dev/null | grep -q 'result'; do sleep 1 done echo "Done waiting." -# Run the deploy-l1-contracts command and capture the output -export ETHEREUM_HOST="http://127.0.0.1:8545" -if [ "$INIT_VALIDATORS" = "true" ]; then - output=$(node --no-warnings $(git rev-parse --show-toplevel)/yarn-project/aztec/dest/bin/index.js deploy-l1-contracts --validators "$VALIDATOR_ADDRESSES" --salt 1337) -else - output=$(node --no-warnings $(git rev-parse --show-toplevel)/yarn-project/aztec/dest/bin/index.js deploy-l1-contracts --salt 1337) -fi +# Construct base command +COMMAND="node --no-warnings $(git rev-parse --show-toplevel)/yarn-project/aztec/dest/bin/index.js \ + deploy-l1-contracts \ + --rpc-url $ETHEREUM_HOST \ + --l1-chain-id $L1_CHAIN_ID \ + --salt $SALT" + +# Add validators if specified +[ "$INIT_VALIDATORS" = "true" ] && COMMAND="$COMMAND --validators $VALIDATOR_ADDRESSES" + +# Add private key if provided +[ -n "$PRIVATE_KEY" ] && COMMAND="$COMMAND --private-key $PRIVATE_KEY" + +output=$($COMMAND) echo "$output" @@ -42,20 +54,21 @@ REGISTRY_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'Registry Address: \K0x[a- INBOX_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'L1 -> L2 Inbox Address: \K0x[a-fA-F0-9]{40}') OUTBOX_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'L2 -> L1 Outbox Address: \K0x[a-fA-F0-9]{40}') FEE_JUICE_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'Fee Juice Address: \K0x[a-fA-F0-9]{40}') +STAKING_ASSET_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'Staking Asset Address: \K0x[a-fA-F0-9]{40}') FEE_JUICE_PORTAL_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'Fee Juice Portal Address: \K0x[a-fA-F0-9]{40}') COIN_ISSUER_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'CoinIssuer Address: \K0x[a-fA-F0-9]{40}') REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'RewardDistributor Address: \K0x[a-fA-F0-9]{40}') GOVERNANCE_PROPOSER_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'GovernanceProposer Address: \K0x[a-fA-F0-9]{40}') GOVERNANCE_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'Governance Address: \K0x[a-fA-F0-9]{40}') - # Save contract addresses to state/l1-contracts.env -cat << EOCONFIG > $(git rev-parse --show-toplevel)/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env +cat <$(git rev-parse --show-toplevel)/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env export ROLLUP_CONTRACT_ADDRESS=$ROLLUP_CONTRACT_ADDRESS export REGISTRY_CONTRACT_ADDRESS=$REGISTRY_CONTRACT_ADDRESS export INBOX_CONTRACT_ADDRESS=$INBOX_CONTRACT_ADDRESS export OUTBOX_CONTRACT_ADDRESS=$OUTBOX_CONTRACT_ADDRESS export FEE_JUICE_CONTRACT_ADDRESS=$FEE_JUICE_CONTRACT_ADDRESS +export STAKING_ASSET_CONTRACT_ADDRESS=$STAKING_ASSET_CONTRACT_ADDRESS export FEE_JUICE_PORTAL_CONTRACT_ADDRESS=$FEE_JUICE_PORTAL_CONTRACT_ADDRESS export COIN_ISSUER_CONTRACT_ADDRESS=$COIN_ISSUER_CONTRACT_ADDRESS export REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=$REWARD_DISTRIBUTOR_CONTRACT_ADDRESS diff --git a/yarn-project/end-to-end/scripts/native-network/prover-node.sh b/yarn-project/end-to-end/scripts/native-network/prover-node.sh index 1fa0ac6865c..08a4c748855 100755 --- a/yarn-project/end-to-end/scripts/native-network/prover-node.sh +++ b/yarn-project/end-to-end/scripts/native-network/prover-node.sh @@ -14,11 +14,11 @@ exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname REPO=$(git rev-parse --show-toplevel) echo "Waiting for l1 contracts to be deployed..." -until [ -f "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env ] ; do +until [ -f "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env ]; do sleep 1 done echo "Waiting for Aztec Node..." -until curl -s http://127.0.0.1:8080/status >/dev/null ; do +until curl -s http://127.0.0.1:8080/status >/dev/null; do sleep 1 done echo "Done waiting." @@ -26,17 +26,18 @@ echo "Done waiting." source "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env # Get node info from the boot node -output=$(node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js get-node-info -u http://127.0.0.1:8080) +output=$(node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js get-node-info --node-url http://127.0.0.1:8080) # Extract boot node ENR export BOOTSTRAP_NODES=$(echo "$output" | grep -oP 'Node ENR: \K.*') # Set environment variables -export LOG_LEVEL=${LOG_LEVEL:-"debug"} -export DEBUG=${DEBUG:-"aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:l2_block_stream,-aztec:world-state:*"} -export ETHEREUM_HOST="http://127.0.0.1:8545" +export LOG_LEVEL=${LOG_LEVEL:-"verbose"} +export DEBUG=${DEBUG:-""} +export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} export PROVER_AGENT_COUNT="1" -export PROVER_PUBLISHER_PRIVATE_KEY="0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" +export PROVER_AGENT_ENABLED="true" +export PROVER_PUBLISHER_PRIVATE_KEY=${PROVER_PUBLISHER_PRIVATE_KEY:-"0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"} export PROVER_COORDINATION_NODE_URL="http://127.0.0.1:8080" export AZTEC_NODE_URL="http://127.0.0.1:8080" export OTEL_RESOURCE_ATTRIBUTES="service.name=prover-node-${PORT}" diff --git a/yarn-project/end-to-end/scripts/native-network/pxe.sh b/yarn-project/end-to-end/scripts/native-network/pxe.sh index e02133cf943..ee1fbca03e5 100755 --- a/yarn-project/end-to-end/scripts/native-network/pxe.sh +++ b/yarn-project/end-to-end/scripts/native-network/pxe.sh @@ -9,19 +9,19 @@ exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname # Starts the PXE (Private eXecution Environment) service # Set environment variables -export ETHEREUM_HOST="http://127.0.0.1:8545" -export AZTEC_NODE_URL="http://127.0.0.1:8080" -export LOG_LEVEL=${LOG_LEVEL:-"debug"} -export DEBUG="aztec:*" +export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} +export AZTEC_NODE_URL=${AZTEC_NODE_URL:-"http://127.0.0.1:8080"} +export VALIDATOR_NODE_URL=${VALIDATOR_NODE_URL:-"http://127.0.0.1:8081"} +export LOG_LEVEL=${LOG_LEVEL:-"verbose"} echo "Waiting for Aztec Node..." -until curl -s http://127.0.0.1:8080/status >/dev/null ; do +until curl -s $AZTEC_NODE_URL/status >/dev/null; do sleep 1 done # We need to also wait for the validator, as the initial node cannot # Produce blocks on it's own echo "Waiting for Validator 0..." -until curl -s http://127.0.0.1:8081/status >/dev/null ; do +until curl -s $VALIDATOR_NODE_URL/status >/dev/null; do sleep 1 done echo "Done waiting." @@ -31,4 +31,4 @@ function filter_noise() { } # Start the PXE service -node --no-warnings $(git rev-parse --show-toplevel)/yarn-project/aztec/dest/bin/index.js start --port=8079 --pxe 2>&1 | filter_noise \ No newline at end of file +node --no-warnings $(git rev-parse --show-toplevel)/yarn-project/aztec/dest/bin/index.js start --port=8079 --pxe 2>&1 | filter_noise diff --git a/yarn-project/end-to-end/scripts/native-network/test-4epochs.sh b/yarn-project/end-to-end/scripts/native-network/test-4epochs.sh index 3edbcd48637..3e00718517c 100755 --- a/yarn-project/end-to-end/scripts/native-network/test-4epochs.sh +++ b/yarn-project/end-to-end/scripts/native-network/test-4epochs.sh @@ -31,7 +31,6 @@ until [ -f "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l2-contr done echo "Done waiting." -export DEBUG="aztec:*" -export LOG_LEVEL=${LOG_LEVEL:-"debug"} +export LOG_LEVEL=${LOG_LEVEL:-"verbose"} cd $(git rev-parse --show-toplevel)/yarn-project/end-to-end -DEBUG=aztec:* yarn test src/spartan/4epochs.test.ts \ No newline at end of file +yarn test src/spartan/4epochs.test.ts \ No newline at end of file diff --git a/yarn-project/end-to-end/scripts/native-network/test-transfer.sh b/yarn-project/end-to-end/scripts/native-network/test-transfer.sh index 50790afbe3e..a58483c3fc0 100755 --- a/yarn-project/end-to-end/scripts/native-network/test-transfer.sh +++ b/yarn-project/end-to-end/scripts/native-network/test-transfer.sh @@ -11,6 +11,7 @@ exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname export BOOTNODE_URL=${BOOTNODE_URL:-http://127.0.0.1:8080} export PXE_URL=${PXE_URL:-http://127.0.0.1:8079} export ETHEREUM_HOST=${ETHEREUM_HOST:-http://127.0.0.1:8545} +export K8S=${K8S:-false} REPO=$(git rev-parse --show-toplevel) # Run our test assuming the port in pxe.sh @@ -31,7 +32,6 @@ until [ -f "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l2-contr done echo "Done waiting." -export DEBUG="aztec:*" -export LOG_LEVEL=${LOG_LEVEL:-"debug"} +export LOG_LEVEL=${LOG_LEVEL:-"verbose"} cd $(git rev-parse --show-toplevel)/yarn-project/end-to-end -DEBUG=aztec:* yarn test src/spartan/transfer.test.ts \ No newline at end of file +yarn test src/spartan/transfer.test.ts \ No newline at end of file diff --git a/yarn-project/end-to-end/scripts/native-network/transaction-bot.sh b/yarn-project/end-to-end/scripts/native-network/transaction-bot.sh index 722bfdcf0ce..038236dd63e 100755 --- a/yarn-project/end-to-end/scripts/native-network/transaction-bot.sh +++ b/yarn-project/end-to-end/scripts/native-network/transaction-bot.sh @@ -4,6 +4,10 @@ set -eu # Get the name of the script without the path and extension SCRIPT_NAME=$(basename "$0" .sh) +# Set the token contract to use +export BOT_TOKEN_CONTRACT=${BOT_TOKEN_CONTRACT:-"TokenContract"} +export BOT_PXE_URL=${BOT_PXE_URL:-"http://127.0.0.1:8079"} + # Redirect stdout and stderr to .log while also printing to the console exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log" >&2) @@ -11,26 +15,29 @@ exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname REPO=$(git rev-parse --show-toplevel) echo "Waiting for Aztec Node..." -until curl -s http://127.0.0.1:8080/status >/dev/null ; do +until curl -s http://127.0.0.1:8080/status >/dev/null; do sleep 1 done echo "Waiting for PXE service..." until curl -s -X POST -H 'content-type: application/json' \ -d '{"jsonrpc":"2.0","method":"pxe_getNodeInfo","params":[],"id":67}' \ - http://127.0.0.1:8079 | grep -q '"enr:-'; do - sleep 1 -done -echo "Waiting for l2 contracts to be deployed..." -until [ -f "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l2-contracts.env ] ; do + $BOT_PXE_URL | grep -q '"enr:-'; do sleep 1 done -echo "Done waiting." + +# Don't wait for l2 contracts if using EasyPrivateTokenContract +if [ "${BOT_TOKEN_CONTRACT:-TokenContract}" != "EasyPrivateTokenContract" ]; then + echo "Waiting for l2 contracts to be deployed..." + until [ -f "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l2-contracts.env ]; do + sleep 1 + done + echo "Done waiting." +fi # Set environment variables -export ETHEREUM_HOST="http://127.0.0.1:8545" -export AZTEC_NODE_URL="http://127.0.0.1:8080" -export LOG_LEVEL=${LOG_LEVEL:-"debug"} -export DEBUG="aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:l2_block_stream,-aztec:world-state:*" +export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} +export AZTEC_NODE_URL=${AZTEC_NODE_URL:-"http://127.0.0.1:8080"} +export LOG_LEVEL=${LOG_LEVEL:-"verbose"} export BOT_PRIVATE_KEY="0xcafe" export BOT_TX_INTERVAL_SECONDS="5" export BOT_PRIVATE_TRANSFERS_PER_TX="1" @@ -42,4 +49,5 @@ export PXE_PROVER_ENABLED="false" export PROVER_REAL_PROOFS="false" # Start the bot -node --no-warnings $(git rev-parse --show-toplevel)/yarn-project/aztec/dest/bin/index.js start --port=8077 --pxe --bot + +node --no-warnings $REPO/yarn-project/aztec/dest/bin/index.js start --port=8077 --bot --pxe diff --git a/yarn-project/end-to-end/scripts/native-network/validator.sh b/yarn-project/end-to-end/scripts/native-network/validator.sh index 518dbb9db97..207952f9b0d 100755 --- a/yarn-project/end-to-end/scripts/native-network/validator.sh +++ b/yarn-project/end-to-end/scripts/native-network/validator.sh @@ -10,19 +10,21 @@ exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname # PORTS PORT="$1" P2P_PORT="$2" +ADDRESS="${3:-${ADDRESS:-}}" +export VALIDATOR_PRIVATE_KEY="${4:-${VALIDATOR_PRIVATE_KEY:-}}" # Starts the Validator Node REPO=$(git rev-parse --show-toplevel) echo "Waiting for l1 contracts to be deployed..." -until [ -f "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env ] ; do +until [ -f "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env ]; do sleep 1 done source "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env echo "Waiting for Aztec Node..." -until curl -s http://127.0.0.1:8080/status >/dev/null ; do +until curl -s http://127.0.0.1:8080/status >/dev/null; do sleep 1 done echo "Done waiting." @@ -31,21 +33,33 @@ echo "Done waiting." BOOT_NODE_URL="http://127.0.0.1:8080" # Get node info from the boot node -output=$(node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js get-node-info -u $BOOT_NODE_URL) +output=$(node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js get-node-info --node-url $BOOT_NODE_URL) # Extract boot node ENR export BOOTSTRAP_NODES=$(echo "$output" | grep -oP 'Node ENR: \K.*') echo "BOOTSTRAP_NODES: $BOOTSTRAP_NODES" -# Generate a private key for the validator -json_account=$(node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js generate-l1-account) -export ADDRESS=$(echo $json_account | jq -r '.address') -export LOG_LEVEL=${LOG_LEVEL:-"debug"} -export VALIDATOR_PRIVATE_KEY=$(echo $json_account | jq -r '.privateKey') +# Generate a private key for the validator only if not already set +if [ -z "${VALIDATOR_PRIVATE_KEY:-}" ] || [ -z "${ADDRESS:-}" ]; then + echo "Generating new L1 Validator account..." + json_account=$(node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js generate-l1-account) + export ADDRESS=$(echo $json_account | jq -r '.address') + export VALIDATOR_PRIVATE_KEY=$(echo $json_account | jq -r '.privateKey') +fi + export L1_PRIVATE_KEY=$VALIDATOR_PRIVATE_KEY export SEQ_PUBLISHER_PRIVATE_KEY=$VALIDATOR_PRIVATE_KEY -export DEBUG=${DEBUG:-"aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:l2_block_stream,-aztec:world-state:*"} -export ETHEREUM_HOST="http://127.0.0.1:8545" +export DEBUG=${DEBUG:-""} +export LOG_LEVEL=${LOG_LEVEL:-"verbose"} +export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} + +# Automatically detect if we're using Anvil +if curl -s -H "Content-Type: application/json" -X POST --data '{"method":"web3_clientVersion","params":[],"id":49,"jsonrpc":"2.0"}' $ETHEREUM_HOST | jq .result | grep -q anvil; then + IS_ANVIL="true" +else + IS_ANVIL="false" +fi + export P2P_ENABLED="true" export VALIDATOR_DISABLED="false" export SEQ_MAX_SECONDS_BETWEEN_BLOCKS="0" @@ -59,15 +73,24 @@ export OTEL_EXPORTER_OTLP_METRICS_ENDPOINT="${OTEL_EXPORTER_OTLP_METRICS_ENDPOIN export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT="${OTEL_EXPORTER_OTLP_TRACES_ENDPOINT:-}" export OTEL_EXPORTER_OTLP_LOGS_ENDPOINT="${OTEL_EXPORTER_OTLP_LOGS_ENDPOINT:-}" -# Add L1 validator -# this may fail, so try 3 times -for i in {1..3}; do - node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js add-l1-validator --validator $ADDRESS --rollup $ROLLUP_CONTRACT_ADDRESS && break - sleep 1 -done +# Check if validator is already registered +echo "Checking if validator is already registered..." +debug_output=$(node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js debug-rollup --rollup $ROLLUP_CONTRACT_ADDRESS) +if echo "$debug_output" | grep -q "Validators:.*$ADDRESS"; then + echo "Validator $ADDRESS is already registered" +else + # Add L1 validator + # this may fail, so try 3 times + echo "Adding validator $ADDRESS..." + for i in {1..3}; do + node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js add-l1-validator --validator $ADDRESS --rollup $ROLLUP_CONTRACT_ADDRESS && break + sleep 1 + done +fi -# Fast forward epochs -node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js fast-forward-epochs --rollup $ROLLUP_CONTRACT_ADDRESS --count 1 +# Fast forward epochs if we're on an anvil chain +if [ "$IS_ANVIL" = "true" ]; then + node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js fast-forward-epochs --rollup $ROLLUP_CONTRACT_ADDRESS --count 1 +fi # Start the Validator Node with the sequencer and archiver node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js start --port="$PORT" --node --archiver --sequencer - diff --git a/yarn-project/end-to-end/scripts/native-network/validators.sh b/yarn-project/end-to-end/scripts/native-network/validators.sh index 6a9ac7f4f40..c2454b87481 100755 --- a/yarn-project/end-to-end/scripts/native-network/validators.sh +++ b/yarn-project/end-to-end/scripts/native-network/validators.sh @@ -16,19 +16,26 @@ cd "$(dirname "${BASH_SOURCE[0]}")" CMD=() # Generate validator commands -for ((i=0; i { // Expect the tx to revert await expect(publisher.proposeL2Block(block)).resolves.toEqual(false); - // Expect a proper error to be logged. Full message looks like: - // aztec:sequencer:publisher [ERROR] Rollup process tx reverted. The contract function "propose" reverted. Error: Rollup__InvalidInHash(bytes32 expected, bytes32 actual) (0x00089a9d421a82c4a25f7acbebe69e638d5b064fa8a60e018793dcb0be53752c, 0x00a5a12af159e0608de45d825718827a36d8a7cdfa9ecc7955bc62180ae78e51) blockNumber=1 slotNumber=49 blockHash=0x131c59ebc2ce21224de6473fe954b0d4eb918043432a3a95406bb7e7a4297fbd txHash=0xc01c3c26b6b67003a8cce352afe475faf7e0196a5a3bba963cfda3792750ed28 - expect(loggerErrorSpy).toHaveBeenCalledWith( - expect.stringMatching(/Rollup__InvalidInHash/), + // Test for both calls + expect(loggerErrorSpy).toHaveBeenCalledTimes(2); + + // Test first call + expect(loggerErrorSpy).toHaveBeenNthCalledWith( + 1, + expect.stringMatching(/^L1 Transaction 0x[a-f0-9]{64} reverted$/), + ); + + // Test second call + expect(loggerErrorSpy).toHaveBeenNthCalledWith( + 2, + expect.stringMatching( + /^Rollup process tx reverted\. The contract function "propose" reverted\. Error: Rollup__InvalidInHash/, + ), undefined, expect.objectContaining({ blockHash: expect.any(String), blockNumber: expect.any(Number), slotNumber: expect.any(BigInt), + txHash: expect.any(String), }), ); }); diff --git a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts index 6b8401823a4..6f1c7096705 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts @@ -21,7 +21,7 @@ const DATA_DIR = './data/gossip'; const qosAlerts: AlertConfig[] = [ { alert: 'SequencerTimeToCollectAttestations', - expr: 'aztec_sequencer_time_to_collect_attestations > 2500', + expr: 'aztec_sequencer_time_to_collect_attestations > 3500', labels: { severity: 'error' }, for: '10m', annotations: {}, @@ -42,6 +42,7 @@ describe('e2e_p2p_network', () => { }); await t.applyBaseSnapshots(); await t.setup(); + await t.removeInitialNode(); }); afterEach(async () => { diff --git a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts index 3289add932a..0fb208a4932 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts @@ -1,10 +1,9 @@ import { getSchnorrAccount } from '@aztec/accounts/schnorr'; import { type AztecNodeConfig, type AztecNodeService } from '@aztec/aztec-node'; import { type AccountWalletWithSecretKey, EthCheatCodes } from '@aztec/aztec.js'; -import { EthAddress } from '@aztec/circuits.js'; -import { getL1ContractsConfigEnvVars } from '@aztec/ethereum'; +import { MINIMUM_STAKE, getL1ContractsConfigEnvVars } from '@aztec/ethereum'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; -import { RollupAbi } from '@aztec/l1-artifacts'; +import { RollupAbi, TestERC20Abi } from '@aztec/l1-artifacts'; import { SpamContract } from '@aztec/noir-contracts.js'; import { type BootstrapNode } from '@aztec/p2p'; import { createBootstrapNodeFromPrivateKey } from '@aztec/p2p/mocks'; @@ -14,9 +13,10 @@ import { getContract } from 'viem'; import { privateKeyToAccount } from 'viem/accounts'; import { - PRIVATE_KEYS_START_INDEX, + ATTESTER_PRIVATE_KEYS_START_INDEX, + PROPOSER_PRIVATE_KEYS_START_INDEX, createValidatorConfig, - generateNodePrivateKeys, + generatePrivateKeys, } from '../fixtures/setup_p2p_test.js'; import { type ISnapshotManager, @@ -39,8 +39,9 @@ export class P2PNetworkTest { public logger: DebugLogger; public ctx!: SubsystemsContext; - public nodePrivateKeys: `0x${string}`[] = []; - public nodePublicKeys: string[] = []; + public attesterPrivateKeys: `0x${string}`[] = []; + public attesterPublicKeys: string[] = []; + public proposerPrivateKeys: `0x${string}`[] = []; public peerIdPrivateKeys: string[] = []; public bootstrapNodeEnr: string = ''; @@ -54,7 +55,6 @@ export class P2PNetworkTest { public bootstrapNode: BootstrapNode, public bootNodePort: number, private numberOfNodes: number, - initialValidatorAddress: string, initialValidatorConfig: AztecNodeConfig, // If set enable metrics collection metricsPort?: number, @@ -63,18 +63,16 @@ export class P2PNetworkTest { // Set up the base account and node private keys for the initial network deployment this.baseAccount = privateKeyToAccount(`0x${getPrivateKeyFromIndex(0)!.toString('hex')}`); - this.nodePrivateKeys = generateNodePrivateKeys(PRIVATE_KEYS_START_INDEX, numberOfNodes); - this.nodePublicKeys = this.nodePrivateKeys.map(privateKey => privateKeyToAccount(privateKey).address); + this.proposerPrivateKeys = generatePrivateKeys(PROPOSER_PRIVATE_KEYS_START_INDEX, numberOfNodes); + this.attesterPrivateKeys = generatePrivateKeys(ATTESTER_PRIVATE_KEYS_START_INDEX, numberOfNodes); + this.attesterPublicKeys = this.attesterPrivateKeys.map(privateKey => privateKeyToAccount(privateKey).address); this.bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); - const initialValidators = [EthAddress.fromString(initialValidatorAddress)]; - this.snapshotManager = createSnapshotManager(`e2e_p2p_network/${testName}`, process.env.E2E_DATA_PATH, { ...initialValidatorConfig, ethereumSlotDuration: l1ContractsConfig.ethereumSlotDuration, salt: 420, - initialValidators, metricsPort: metricsPort, }); } @@ -97,16 +95,8 @@ export class P2PNetworkTest { const bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); const initialValidatorConfig = await createValidatorConfig({} as AztecNodeConfig, bootstrapNodeEnr); - const intiailValidatorAddress = privateKeyToAccount(initialValidatorConfig.publisherPrivateKey).address; - - return new P2PNetworkTest( - testName, - bootstrapNode, - port, - numberOfNodes, - intiailValidatorAddress, - initialValidatorConfig, - ); + + return new P2PNetworkTest(testName, bootstrapNode, port, numberOfNodes, initialValidatorConfig); } async applyBaseSnapshots() { @@ -119,25 +109,44 @@ export class P2PNetworkTest { this.logger.verbose(`Adding ${this.numberOfNodes} validators`); - const txHashes: `0x${string}`[] = []; - for (let i = 0; i < this.numberOfNodes; i++) { - const account = privateKeyToAccount(this.nodePrivateKeys[i]!); - this.logger.debug(`Adding ${account.address} as validator`); - const txHash = await rollup.write.addValidator([account.address]); - txHashes.push(txHash); - - this.logger.debug(`Adding ${account.address} as validator`); - } + const stakingAsset = getContract({ + address: deployL1ContractsValues.l1ContractAddresses.stakingAssetAddress.toString(), + abi: TestERC20Abi, + client: deployL1ContractsValues.walletClient, + }); - // Wait for all the transactions adding validators to be mined + const stakeNeeded = MINIMUM_STAKE * BigInt(this.numberOfNodes); await Promise.all( - txHashes.map(txHash => - deployL1ContractsValues.publicClient.waitForTransactionReceipt({ - hash: txHash, - }), - ), + [ + await stakingAsset.write.mint([deployL1ContractsValues.walletClient.account.address, stakeNeeded], {} as any), + await stakingAsset.write.approve( + [deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(), stakeNeeded], + {} as any, + ), + ].map(txHash => deployL1ContractsValues.publicClient.waitForTransactionReceipt({ hash: txHash })), ); + const validators = []; + + for (let i = 0; i < this.numberOfNodes; i++) { + const attester = privateKeyToAccount(this.attesterPrivateKeys[i]!); + const proposer = privateKeyToAccount(this.proposerPrivateKeys[i]!); + validators.push({ + attester: attester.address, + proposer: proposer.address, + withdrawer: attester.address, + amount: MINIMUM_STAKE, + } as const); + + this.logger.verbose( + `Adding (attester, proposer) pair: (${attester.address}, ${proposer.address}) as validator`, + ); + } + + await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ + hash: await rollup.write.cheat__InitialiseValidatorSet([validators]), + }); + //@note Now we jump ahead to the next epoch such that the validator committee is picked // INTERVAL MINING: If we are using anvil interval mining this will NOT progress the time! // Which means that the validator set will still be empty! So anyone can propose. @@ -195,47 +204,9 @@ export class P2PNetworkTest { } async removeInitialNode() { - await this.snapshotManager.snapshot( - 'remove-inital-validator', - async ({ deployL1ContractsValues, aztecNodeConfig }) => { - const rollup = getContract({ - address: deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(), - abi: RollupAbi, - client: deployL1ContractsValues.walletClient, - }); - - // Remove the setup validator - const initialValidatorAddress = privateKeyToAccount(`0x${getPrivateKeyFromIndex(0)!.toString('hex')}`).address; - const txHash = await rollup.write.removeValidator([initialValidatorAddress]); - - await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ - hash: txHash, - }); - - //@note Now we jump ahead to the next epoch such that the validator committee is picked - // INTERVAL MINING: If we are using anvil interval mining this will NOT progress the time! - // Which means that the validator set will still be empty! So anyone can propose. - const slotsInEpoch = await rollup.read.EPOCH_DURATION(); - const timestamp = await rollup.read.getTimestampForSlot([slotsInEpoch]); - const cheatCodes = new EthCheatCodes(aztecNodeConfig.l1RpcUrl); - try { - await cheatCodes.warp(Number(timestamp)); - } catch (err) { - this.logger.debug('Warp failed, time already satisfied'); - } - - // Send and await a tx to make sure we mine a block for the warp to correctly progress. - await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ - hash: await deployL1ContractsValues.walletClient.sendTransaction({ - to: this.baseAccount.address, - value: 1n, - account: this.baseAccount, - }), - }); - - await this.ctx.aztecNode.stop(); - }, - ); + await this.snapshotManager.snapshot('remove-inital-validator', async () => { + await this.ctx.aztecNode.stop(); + }); } async setup() { diff --git a/yarn-project/end-to-end/src/e2e_p2p/reqresp.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reqresp.test.ts index c7644b77f3d..d9be627e861 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reqresp.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reqresp.test.ts @@ -123,6 +123,11 @@ describe('e2e_p2p_reqresp_tx', () => { client: t.ctx.deployL1ContractsValues.publicClient, }); + const attesters = await rollupContract.read.getAttesters(); + const mappedProposers = await Promise.all( + attesters.map(async attester => await rollupContract.read.getProposerForAttester([attester])), + ); + const currentTime = await t.ctx.cheatCodes.eth.timestamp(); const slotDuration = await rollupContract.read.SLOT_DURATION(); @@ -133,9 +138,11 @@ describe('e2e_p2p_reqresp_tx', () => { const proposer = await rollupContract.read.getProposerAt([nextSlot]); proposers.push(proposer); } - // Get the indexes of the nodes that are responsible for the next two slots - const proposerIndexes = proposers.map(proposer => t.nodePublicKeys.indexOf(proposer)); + const proposerIndexes = proposers.map(proposer => mappedProposers.indexOf(proposer as `0x${string}`)); + + t.logger.info('proposerIndexes: ' + proposerIndexes.join(', ')); + const nodesToTurnOffTxGossip = Array.from({ length: NUM_NODES }, (_, i) => i).filter( i => !proposerIndexes.includes(i), ); diff --git a/yarn-project/end-to-end/src/fixtures/logging.ts b/yarn-project/end-to-end/src/fixtures/logging.ts index 5e16e5585f8..8c2488158c5 100644 --- a/yarn-project/end-to-end/src/fixtures/logging.ts +++ b/yarn-project/end-to-end/src/fixtures/logging.ts @@ -1,10 +1,5 @@ -import { onLog } from '@aztec/aztec.js'; - import { mkdirpSync } from 'fs-extra'; import { dirname } from 'path'; -import * as winston from 'winston'; - -const { format, transports } = winston; let metricsLoggerSet = false; @@ -22,15 +17,6 @@ export function setupMetricsLogger(filename: string) { return; } mkdirpSync(dirname(filename)); - const logger = winston.createLogger({ - level: 'debug', - format: format.combine(format.timestamp(), format.json()), - transports: [new transports.File({ filename })], - }); - onLog((level, namespace, message, data) => { - if (data && data['eventName']) { - logger.log({ ...data, level, namespace, message }); - } - }); + // TODO(palla/log): Reenable or kill metrics logger metricsLoggerSet = true; } diff --git a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts index f8d4fdacaaa..53c7a3dde62 100644 --- a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts +++ b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts @@ -13,7 +13,8 @@ import { getEndToEndTestTelemetryClient } from './with_telemetry_utils.js'; // Setup snapshots will create a node with index 0, so all of our loops here // need to start from 1 to avoid running validators with the same key -export const PRIVATE_KEYS_START_INDEX = 1; +export const PROPOSER_PRIVATE_KEYS_START_INDEX = 1; +export const ATTESTER_PRIVATE_KEYS_START_INDEX = 1001; export interface NodeContext { node: AztecNodeService; @@ -22,13 +23,13 @@ export interface NodeContext { account: AztecAddress; } -export function generateNodePrivateKeys(startIndex: number, numberOfNodes: number): `0x${string}`[] { - const nodePrivateKeys: `0x${string}`[] = []; +export function generatePrivateKeys(startIndex: number, numberOfKeys: number): `0x${string}`[] { + const privateKeys: `0x${string}`[] = []; // Do not start from 0 as it is used during setup - for (let i = startIndex; i < startIndex + numberOfNodes; i++) { - nodePrivateKeys.push(`0x${getPrivateKeyFromIndex(i)!.toString('hex')}`); + for (let i = startIndex; i < startIndex + numberOfKeys; i++) { + privateKeys.push(`0x${getPrivateKeyFromIndex(i)!.toString('hex')}`); } - return nodePrivateKeys; + return privateKeys; } export function createNodes( @@ -45,7 +46,7 @@ export function createNodes( const port = bootNodePort + i + 1; const dataDir = dataDirectory ? `${dataDirectory}-${i}` : undefined; - const nodePromise = createNode(config, port, bootstrapNodeEnr, i + PRIVATE_KEYS_START_INDEX, dataDir, metricsPort); + const nodePromise = createNode(config, port, bootstrapNodeEnr, i, dataDir, metricsPort); nodePromises.push(nodePromise); } return Promise.all(nodePromises); @@ -56,17 +57,11 @@ export async function createNode( config: AztecNodeConfig, tcpPort: number, bootstrapNode: string | undefined, - publisherAddressIndex: number, + accountIndex: number, dataDirectory?: string, metricsPort?: number, ) { - const validatorConfig = await createValidatorConfig( - config, - bootstrapNode, - tcpPort, - publisherAddressIndex, - dataDirectory, - ); + const validatorConfig = await createValidatorConfig(config, bootstrapNode, tcpPort, accountIndex, dataDirectory); const telemetryClient = await getEndToEndTestTelemetryClient(metricsPort, /*serviceName*/ `node:${tcpPort}`); @@ -85,11 +80,15 @@ export async function createValidatorConfig( ) { port = port ?? (await getPort()); - const privateKey = getPrivateKeyFromIndex(accountIndex); - const privateKeyHex: `0x${string}` = `0x${privateKey!.toString('hex')}`; + const attesterPrivateKey: `0x${string}` = `0x${getPrivateKeyFromIndex( + ATTESTER_PRIVATE_KEYS_START_INDEX + accountIndex, + )!.toString('hex')}`; + const proposerPrivateKey: `0x${string}` = `0x${getPrivateKeyFromIndex( + PROPOSER_PRIVATE_KEYS_START_INDEX + accountIndex, + )!.toString('hex')}`; - config.publisherPrivateKey = privateKeyHex; - config.validatorPrivateKey = privateKeyHex; + config.validatorPrivateKey = attesterPrivateKey; + config.publisherPrivateKey = proposerPrivateKey; const nodeConfig: AztecNodeConfig = { ...config, diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index 488e7291bda..8aa2a934b33 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -296,9 +296,9 @@ async function setupFromFresh( const deployL1ContractsValues = await setupL1Contracts(aztecNodeConfig.l1RpcUrl, hdAccount, logger, { salt: opts.salt, - initialValidators: opts.initialValidators, ...deployL1ContractsArgs, ...getL1ContractsConfigEnvVars(), + initialValidators: opts.initialValidators, }); aztecNodeConfig.l1Contracts = deployL1ContractsValues.l1ContractAddresses; aztecNodeConfig.l1PublishRetryIntervalMS = 100; @@ -317,7 +317,7 @@ async function setupFromFresh( const feeJuice = getContract({ address: deployL1ContractsValues.l1ContractAddresses.feeJuiceAddress.toString(), - abi: l1Artifacts.feeJuice.contractAbi, + abi: l1Artifacts.feeAsset.contractAbi, client: deployL1ContractsValues.walletClient, }); diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index db1825bb89c..482dfc15775 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -377,7 +377,7 @@ export async function setup( const feeJuice = getContract({ address: deployL1ContractsValues.l1ContractAddresses.feeJuiceAddress.toString(), - abi: l1Artifacts.feeJuice.contractAbi, + abi: l1Artifacts.feeAsset.contractAbi, client: deployL1ContractsValues.walletClient, }); diff --git a/yarn-project/end-to-end/src/guides/up_quick_start.test.ts b/yarn-project/end-to-end/src/guides/up_quick_start.test.ts index 15f31c364e2..635334127c3 100644 --- a/yarn-project/end-to-end/src/guides/up_quick_start.test.ts +++ b/yarn-project/end-to-end/src/guides/up_quick_start.test.ts @@ -9,9 +9,12 @@ describe('guides/up_quick_start', () => { // TODO: update to not use CLI it('works', async () => { await waitForPXE(createPXEClient(PXE_URL)); - execSync(`DEBUG="aztec:*" PXE_URL=\${PXE_URL:-http://localhost:8080} ./src/guides/up_quick_start.sh`, { - shell: '/bin/bash', - stdio: 'inherit', - }); + execSync( + `LOG_LEVEL=\${LOG_LEVEL:-verbose} PXE_URL=\${PXE_URL:-http://localhost:8080} ./src/guides/up_quick_start.sh`, + { + shell: '/bin/bash', + stdio: 'inherit', + }, + ); }); }); diff --git a/yarn-project/end-to-end/src/prover-coordination/e2e_prover_coordination.test.ts b/yarn-project/end-to-end/src/prover-coordination/e2e_prover_coordination.test.ts index bf0cf8934e0..899b52437f7 100644 --- a/yarn-project/end-to-end/src/prover-coordination/e2e_prover_coordination.test.ts +++ b/yarn-project/end-to-end/src/prover-coordination/e2e_prover_coordination.test.ts @@ -139,12 +139,13 @@ describe('e2e_prover_coordination', () => { proposer: EthAddress; prover: EthAddress; }) => { - const [epochToProve, basisPointFee, bondAmount, prover, proposer] = await rollupContract.read.proofClaim(); + const { epochToProve, basisPointFee, bondAmount, bondProvider, proposerClaimant } = + await rollupContract.read.getProofClaim(); expect(epochToProve).toEqual(expected.epochToProve); expect(basisPointFee).toEqual(BigInt(expected.basisPointFee)); expect(bondAmount).toEqual(expected.bondAmount); - expect(prover).toEqual(expected.prover.toChecksumString()); - expect(proposer).toEqual(expected.proposer.toChecksumString()); + expect(bondProvider).toEqual(expected.prover.toChecksumString()); + expect(proposerClaimant).toEqual(expected.proposer.toChecksumString()); }; const performEscrow = async (amount: bigint) => { diff --git a/yarn-project/end-to-end/src/shared/browser.ts b/yarn-project/end-to-end/src/shared/browser.ts index ea9dde82e56..50e09adf022 100644 --- a/yarn-project/end-to-end/src/shared/browser.ts +++ b/yarn-project/end-to-end/src/shared/browser.ts @@ -6,6 +6,7 @@ import * as AztecJs from '@aztec/aztec.js'; import { TokenContractArtifact } from '@aztec/noir-contracts.js/Token'; import { contractArtifactToBuffer } from '@aztec/types/abi'; +import getPort from 'get-port'; import { type Server } from 'http'; import Koa from 'koa'; import serve from 'koa-static'; @@ -77,16 +78,18 @@ export const browserTestSuite = ( app = new Koa(); app.use(serve(path.resolve(__dirname, './web'))); + const debuggingPort = await getPort({ port: 9222 }); browser = await launch({ executablePath: process.env.CHROME_BIN, headless: true, + debuggingPort, args: [ '--no-sandbox', '--headless', '--disable-gpu', '--disable-dev-shm-usage', '--disable-software-rasterizer', - '--remote-debugging-port=9222', + `--remote-debugging-port=${debuggingPort}`, ], }); page = await browser.newPage(); diff --git a/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts b/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts index d68750db230..a72729961bf 100644 --- a/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts +++ b/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts @@ -76,9 +76,11 @@ export async function deployAndInitializeTokenAndBridgeContracts( underlyingERC20: any; }> { if (!underlyingERC20Address) { - underlyingERC20Address = await deployL1Contract(walletClient, publicClient, TestERC20Abi, TestERC20Bytecode).then( - ({ address }) => address, - ); + underlyingERC20Address = await deployL1Contract(walletClient, publicClient, TestERC20Abi, TestERC20Bytecode, [ + 'Underlying', + 'UND', + walletClient.account.address, + ]).then(({ address }) => address); } const underlyingERC20 = getContract({ address: underlyingERC20Address!.toString(), @@ -86,6 +88,9 @@ export async function deployAndInitializeTokenAndBridgeContracts( client: walletClient, }); + // allow anyone to mint + await underlyingERC20.write.setFreeForAll([true], {} as any); + // deploy the token portal const { address: tokenPortalAddress } = await deployL1Contract( walletClient, diff --git a/yarn-project/end-to-end/src/spartan/gating-passive.test.ts b/yarn-project/end-to-end/src/spartan/gating-passive.test.ts index 6369f912a7a..2285dac373d 100644 --- a/yarn-project/end-to-end/src/spartan/gating-passive.test.ts +++ b/yarn-project/end-to-end/src/spartan/gating-passive.test.ts @@ -41,7 +41,7 @@ const { SPARTAN_DIR, INSTANCE_NAME, } = config; -const debugLogger = createDebugLogger('aztec:spartan-test:reorg'); +const debugLogger = createDebugLogger('aztec:spartan-test:gating-passive'); describe('a test that passively observes the network in the presence of network chaos', () => { jest.setTimeout(60 * 60 * 1000); // 60 minutes @@ -126,14 +126,16 @@ describe('a test that passively observes the network in the presence of network await sleep(Number(epochDuration * slotDuration) * 1000); const newTips = await rollupCheatCodes.getTips(); - const expectedPending = - controlTips.pending + BigInt(Math.floor((1 - MAX_MISSED_SLOT_PERCENT) * Number(epochDuration))); - expect(newTips.pending).toBeGreaterThan(expectedPending); // calculate the percentage of slots missed const perfectPending = controlTips.pending + BigInt(Math.floor(Number(epochDuration))); const missedSlots = Number(perfectPending) - Number(newTips.pending); const missedSlotsPercentage = (missedSlots / Number(epochDuration)) * 100; debugLogger.info(`Missed ${missedSlots} slots, ${missedSlotsPercentage.toFixed(2)}%`); + + // Ensure we missed at most the max allowed slots + // This is in place to ensure that we don't have a bad regression in the network + const maxMissedSlots = Math.floor(Number(epochDuration) * MAX_MISSED_SLOT_PERCENT); + expect(missedSlots).toBeLessThanOrEqual(maxMissedSlots); } }); }); diff --git a/yarn-project/end-to-end/webpack.config.js b/yarn-project/end-to-end/webpack.config.js index 88f6bb5178c..3ae5808f82f 100644 --- a/yarn-project/end-to-end/webpack.config.js +++ b/yarn-project/end-to-end/webpack.config.js @@ -60,6 +60,7 @@ export default { fs: false, path: false, url: false, + tty: false, worker_threads: false, buffer: require.resolve('buffer/'), util: require.resolve('util/'), diff --git a/yarn-project/ethereum/package.json b/yarn-project/ethereum/package.json index 887ad01645d..f6be604435c 100644 --- a/yarn-project/ethereum/package.json +++ b/yarn-project/ethereum/package.json @@ -41,6 +41,8 @@ "@jest/globals": "^29.5.0", "@types/jest": "^29.5.0", "@types/node": "^18.14.6", + "@viem/anvil": "^0.0.10", + "get-port": "^7.1.0", "jest": "^29.5.0", "ts-node": "^10.9.1", "typescript": "^5.0.4" diff --git a/yarn-project/ethereum/src/constants.ts b/yarn-project/ethereum/src/constants.ts index c1f4b34d732..2fea0175aca 100644 --- a/yarn-project/ethereum/src/constants.ts +++ b/yarn-project/ethereum/src/constants.ts @@ -2,3 +2,4 @@ import { type Hex } from 'viem'; export const NULL_KEY: Hex = `0x${'0000000000000000000000000000000000000000000000000000000000000000'}`; export const AZTEC_TEST_CHAIN_ID = 677692; +export const MINIMUM_STAKE = BigInt(100e18); diff --git a/yarn-project/ethereum/src/deploy_l1_contracts.ts b/yarn-project/ethereum/src/deploy_l1_contracts.ts index e9d8522f636..52a7aed1907 100644 --- a/yarn-project/ethereum/src/deploy_l1_contracts.ts +++ b/yarn-project/ethereum/src/deploy_l1_contracts.ts @@ -5,6 +5,8 @@ import { type DebugLogger } from '@aztec/foundation/log'; import { CoinIssuerAbi, CoinIssuerBytecode, + ExtRollupLibAbi, + ExtRollupLibBytecode, FeeJuicePortalAbi, FeeJuicePortalBytecode, GovernanceAbi, @@ -13,6 +15,8 @@ import { GovernanceProposerBytecode, InboxAbi, InboxBytecode, + LeonidasLibAbi, + LeonidasLibBytecode, OutboxAbi, OutboxBytecode, RegistryAbi, @@ -22,12 +26,8 @@ import { RollupAbi, RollupBytecode, RollupLinkReferences, - SampleLibAbi, - SampleLibBytecode, TestERC20Abi, TestERC20Bytecode, - TxsDecoderAbi, - TxsDecoderBytecode, } from '@aztec/l1-artifacts'; import type { Abi, Narrow } from 'abitype'; @@ -53,8 +53,10 @@ import { type HDAccount, type PrivateKeyAccount, mnemonicToAccount, privateKeyTo import { foundry } from 'viem/chains'; import { type L1ContractsConfig } from './config.js'; +import { MINIMUM_STAKE } from './constants.js'; import { isAnvilTestChain } from './ethereum_chain.js'; import { type L1ContractAddresses } from './l1_contract_addresses.js'; +import { L1TxUtils } from './l1_tx_utils.js'; /** * Return type of the deployL1Contract function. @@ -126,10 +128,14 @@ export interface L1ContractArtifactsForDeployment { * Rollup contract artifacts */ rollup: ContractArtifacts; + /** + * The token to stake. + */ + stakingAsset: ContractArtifacts; /** * The token to pay for gas. This will be bridged to L2 via the feeJuicePortal below */ - feeJuice: ContractArtifacts; + feeAsset: ContractArtifacts; /** * Fee juice portal contract artifacts. Optional for now as gas is not strictly enforced */ @@ -171,18 +177,22 @@ export const l1Artifacts: L1ContractArtifactsForDeployment = { libraries: { linkReferences: RollupLinkReferences, libraryCode: { - TxsDecoder: { - contractAbi: TxsDecoderAbi, - contractBytecode: TxsDecoderBytecode, + LeonidasLib: { + contractAbi: LeonidasLibAbi, + contractBytecode: LeonidasLibBytecode, }, - SampleLib: { - contractAbi: SampleLibAbi, - contractBytecode: SampleLibBytecode, + ExtRollupLib: { + contractAbi: ExtRollupLibAbi, + contractBytecode: ExtRollupLibBytecode, }, }, }, }, - feeJuice: { + stakingAsset: { + contractAbi: TestERC20Abi, + contractBytecode: TestERC20Bytecode, + }, + feeAsset: { contractAbi: TestERC20Abi, contractBytecode: TestERC20Bytecode, }, @@ -306,8 +316,19 @@ export const deployL1Contracts = async ( const registryAddress = await govDeployer.deploy(l1Artifacts.registry, [account.address.toString()]); logger.info(`Deployed Registry at ${registryAddress}`); - const feeJuiceAddress = await govDeployer.deploy(l1Artifacts.feeJuice); - logger.info(`Deployed Fee Juice at ${feeJuiceAddress}`); + const feeAssetAddress = await govDeployer.deploy(l1Artifacts.feeAsset, [ + 'FeeJuice', + 'FEE', + account.address.toString(), + ]); + logger.info(`Deployed Fee Juice at ${feeAssetAddress}`); + + const stakingAssetAddress = await govDeployer.deploy(l1Artifacts.stakingAsset, [ + 'Staking', + 'STK', + account.address.toString(), + ]); + logger.info(`Deployed Staking Asset at ${stakingAssetAddress}`); // @todo #8084 // @note These numbers are just chosen to make testing simple. @@ -320,21 +341,23 @@ export const deployL1Contracts = async ( ]); logger.info(`Deployed GovernanceProposer at ${governanceProposerAddress}`); + // @note @LHerskind the assets are expected to be the same at some point, but for better + // configurability they are different for now. const governanceAddress = await govDeployer.deploy(l1Artifacts.governance, [ - feeJuiceAddress.toString(), + feeAssetAddress.toString(), governanceProposerAddress.toString(), ]); logger.info(`Deployed Governance at ${governanceAddress}`); const coinIssuerAddress = await govDeployer.deploy(l1Artifacts.coinIssuer, [ - feeJuiceAddress.toString(), + feeAssetAddress.toString(), 1n * 10n ** 18n, // @todo #8084 governanceAddress.toString(), ]); logger.info(`Deployed CoinIssuer at ${coinIssuerAddress}`); const rewardDistributorAddress = await govDeployer.deploy(l1Artifacts.rewardDistributor, [ - feeJuiceAddress.toString(), + feeAssetAddress.toString(), registryAddress.toString(), governanceAddress.toString(), ]); @@ -347,27 +370,29 @@ export const deployL1Contracts = async ( const feeJuicePortalAddress = await deployer.deploy(l1Artifacts.feeJuicePortal, [ registryAddress.toString(), - feeJuiceAddress.toString(), + feeAssetAddress.toString(), args.l2FeeJuiceAddress.toString(), ]); logger.info(`Deployed Fee Juice Portal at ${feeJuicePortalAddress}`); - const rollupArgs = { + const rollupConfigArgs = { aztecSlotDuration: args.aztecSlotDuration, aztecEpochDuration: args.aztecEpochDuration, targetCommitteeSize: args.aztecTargetCommitteeSize, aztecEpochProofClaimWindowInL2Slots: args.aztecEpochProofClaimWindowInL2Slots, + minimumStake: MINIMUM_STAKE, }; - const rollupAddress = await deployer.deploy(l1Artifacts.rollup, [ + const rollupArgs = [ feeJuicePortalAddress.toString(), rewardDistributorAddress.toString(), + stakingAssetAddress.toString(), args.vkTreeRoot.toString(), args.protocolContractTreeRoot.toString(), account.address.toString(), - args.initialValidators?.map(v => v.toString()) ?? [], - rollupArgs, - ]); - logger.info(`Deployed Rollup at ${rollupAddress}`, rollupArgs); + rollupConfigArgs, + ]; + const rollupAddress = await deployer.deploy(l1Artifacts.rollup, rollupArgs); + logger.info(`Deployed Rollup at ${rollupAddress}`, rollupConfigArgs); await deployer.waitForDeployments(); logger.info(`All core contracts deployed`); @@ -378,9 +403,15 @@ export const deployL1Contracts = async ( client: walletClient, }); - const feeJuice = getContract({ - address: feeJuiceAddress.toString(), - abi: l1Artifacts.feeJuice.contractAbi, + const feeAsset = getContract({ + address: feeAssetAddress.toString(), + abi: l1Artifacts.feeAsset.contractAbi, + client: walletClient, + }); + + const stakingAsset = getContract({ + address: stakingAssetAddress.toString(), + abi: l1Artifacts.stakingAsset.contractAbi, client: walletClient, }); @@ -393,12 +424,40 @@ export const deployL1Contracts = async ( // Transaction hashes to await const txHashes: Hex[] = []; + { + const txHash = await feeAsset.write.setFreeForAll([true], {} as any); + logger.info(`Fee asset set to free for all in ${txHash}`); + txHashes.push(txHash); + } + + if (args.initialValidators && args.initialValidators.length > 0) { + // Mint tokens, approve them, use cheat code to initialise validator set without setting up the epoch. + const stakeNeeded = MINIMUM_STAKE * BigInt(args.initialValidators.length); + await Promise.all( + [ + await stakingAsset.write.mint([walletClient.account.address, stakeNeeded], {} as any), + await stakingAsset.write.approve([rollupAddress.toString(), stakeNeeded], {} as any), + ].map(txHash => publicClient.waitForTransactionReceipt({ hash: txHash })), + ); + + const initiateValidatorSetTxHash = await rollup.write.cheat__InitialiseValidatorSet([ + args.initialValidators.map(v => ({ + attester: v.toString(), + proposer: v.toString(), + withdrawer: v.toString(), + amount: MINIMUM_STAKE, + })), + ]); + txHashes.push(initiateValidatorSetTxHash); + logger.info(`Initialized validator set (${args.initialValidators.join(', ')}) in tx ${initiateValidatorSetTxHash}`); + } + // @note This value MUST match what is in `constants.nr`. It is currently specified here instead of just importing // because there is circular dependency hell. This is a temporary solution. #3342 // @todo #8084 // fund the portal contract with Fee Juice - const FEE_JUICE_INITIAL_MINT = 200000000000000000000; - const mintTxHash = await feeJuice.write.mint([feeJuicePortalAddress.toString(), FEE_JUICE_INITIAL_MINT], {} as any); + const FEE_JUICE_INITIAL_MINT = 200000000000000000000n; + const mintTxHash = await feeAsset.write.mint([feeJuicePortalAddress.toString(), FEE_JUICE_INITIAL_MINT], {} as any); // @note This is used to ensure we fully wait for the transaction when running against a real chain // otherwise we execute subsequent transactions too soon @@ -414,7 +473,7 @@ export const deployL1Contracts = async ( } logger.info( - `Initialized Fee Juice Portal at ${feeJuicePortalAddress} to bridge between L1 ${feeJuiceAddress} to L2 ${args.l2FeeJuiceAddress}`, + `Initialized Fee Juice Portal at ${feeJuicePortalAddress} to bridge between L1 ${feeAssetAddress} to L2 ${args.l2FeeJuiceAddress}`, ); if (isAnvilTestChain(chain.id)) { @@ -492,7 +551,8 @@ export const deployL1Contracts = async ( registryAddress, inboxAddress, outboxAddress, - feeJuiceAddress, + feeJuiceAddress: feeAssetAddress, + stakingAssetAddress, feeJuicePortalAddress, coinIssuerAddress, rewardDistributorAddress, @@ -607,10 +667,21 @@ export async function deployL1Contract( logger?: DebugLogger, ): Promise<{ address: EthAddress; txHash: Hex | undefined }> { let txHash: Hex | undefined = undefined; - let address: Hex | null | undefined = undefined; + let resultingAddress: Hex | null | undefined = undefined; + + const l1TxUtils = new L1TxUtils(publicClient, walletClient, logger); if (libraries) { - // @note Assumes that we wont have nested external libraries. + // Note that this does NOT work well for linked libraries having linked libraries. + + // Verify that all link references have corresponding code + for (const linkRef in libraries.linkReferences) { + for (const contractName in libraries.linkReferences[linkRef]) { + if (!libraries.libraryCode[contractName]) { + throw new Error(`Missing library code for ${contractName}`); + } + } + } const replacements: Record = {}; @@ -659,21 +730,31 @@ export async function deployL1Contract( const salt = padHex(maybeSalt, { size: 32 }); const deployer: Hex = '0x4e59b44847b379578588920cA78FbF26c0B4956C'; const calldata = encodeDeployData({ abi, bytecode, args }); - address = getContractAddress({ from: deployer, salt, bytecode: calldata, opcode: 'CREATE2' }); - const existing = await publicClient.getBytecode({ address }); + resultingAddress = getContractAddress({ from: deployer, salt, bytecode: calldata, opcode: 'CREATE2' }); + const existing = await publicClient.getBytecode({ address: resultingAddress }); if (existing === undefined || existing === '0x') { - txHash = await walletClient.sendTransaction({ to: deployer, data: concatHex([salt, calldata]) }); - logger?.verbose(`Deploying contract with salt ${salt} to address ${address} in tx ${txHash}`); + const res = await l1TxUtils.sendTransaction({ + to: deployer, + data: concatHex([salt, calldata]), + }); + txHash = res.txHash; + + logger?.verbose(`Deployed contract with salt ${salt} to address ${resultingAddress} in tx ${txHash}.`); } else { - logger?.verbose(`Skipping existing deployment of contract with salt ${salt} to address ${address}`); + logger?.verbose(`Skipping existing deployment of contract with salt ${salt} to address ${resultingAddress}`); } } else { - txHash = await walletClient.deployContract({ abi, bytecode, args }); - logger?.verbose(`Deploying contract in tx ${txHash}`); - const receipt = await publicClient.waitForTransactionReceipt({ hash: txHash, pollingInterval: 100 }); - address = receipt.contractAddress; - if (!address) { + // Regular deployment path + const deployData = encodeDeployData({ abi, bytecode, args }); + const receipt = await l1TxUtils.sendAndMonitorTransaction({ + to: null, + data: deployData, + }); + + txHash = receipt.transactionHash; + resultingAddress = receipt.contractAddress; + if (!resultingAddress) { throw new Error( `No contract address found in receipt: ${JSON.stringify(receipt, (_, val) => typeof val === 'bigint' ? String(val) : val, @@ -682,6 +763,6 @@ export async function deployL1Contract( } } - return { address: EthAddress.fromString(address!), txHash }; + return { address: EthAddress.fromString(resultingAddress!), txHash }; } // docs:end:deployL1Contract diff --git a/yarn-project/ethereum/src/eth_cheat_codes.ts b/yarn-project/ethereum/src/eth_cheat_codes.ts new file mode 100644 index 00000000000..74918bf4653 --- /dev/null +++ b/yarn-project/ethereum/src/eth_cheat_codes.ts @@ -0,0 +1,316 @@ +import { toBigIntBE, toHex } from '@aztec/foundation/bigint-buffer'; +import { keccak256 } from '@aztec/foundation/crypto'; +import { type EthAddress } from '@aztec/foundation/eth-address'; +import { createDebugLogger } from '@aztec/foundation/log'; + +import fs from 'fs'; +import { type Hex } from 'viem'; + +/** + * A class that provides utility functions for interacting with ethereum (L1). + */ +export class EthCheatCodes { + constructor( + /** + * The RPC URL to use for interacting with the chain + */ + public rpcUrl: string, + /** + * The logger to use for the eth cheatcodes + */ + public logger = createDebugLogger('aztec:cheat_codes:eth'), + ) {} + + async rpcCall(method: string, params: any[]) { + const paramsString = JSON.stringify(params); + const content = { + body: `{"jsonrpc":"2.0", "method": "${method}", "params": ${paramsString}, "id": 1}`, + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + }; + return await (await fetch(this.rpcUrl, content)).json(); + } + + /** + * Get the auto mine status of the underlying chain + * @returns True if automine is on, false otherwise + */ + public async isAutoMining(): Promise { + try { + const res = await this.rpcCall('anvil_getAutomine', []); + return res.result; + } catch (err) { + this.logger.error(`Calling "anvil_getAutomine" failed with:`, err); + } + return false; + } + + /** + * Get the current blocknumber + * @returns The current block number + */ + public async blockNumber(): Promise { + const res = await this.rpcCall('eth_blockNumber', []); + return parseInt(res.result, 16); + } + + /** + * Get the current chainId + * @returns The current chainId + */ + public async chainId(): Promise { + const res = await this.rpcCall('eth_chainId', []); + return parseInt(res.result, 16); + } + + /** + * Get the current timestamp + * @returns The current timestamp + */ + public async timestamp(): Promise { + const res = await this.rpcCall('eth_getBlockByNumber', ['latest', true]); + return parseInt(res.result.timestamp, 16); + } + + /** + * Advance the chain by a number of blocks + * @param numberOfBlocks - The number of blocks to mine + */ + public async mine(numberOfBlocks = 1): Promise { + const res = await this.rpcCall('hardhat_mine', [numberOfBlocks]); + if (res.error) { + throw new Error(`Error mining: ${res.error.message}`); + } + this.logger.verbose(`Mined ${numberOfBlocks} L1 blocks`); + } + + /** + * Mines a single block with evm_mine + */ + public async evmMine(): Promise { + const res = await this.rpcCall('evm_mine', []); + if (res.error) { + throw new Error(`Error mining: ${res.error.message}`); + } + } + + /** + * Set the balance of an account + * @param account - The account to set the balance for + * @param balance - The balance to set + */ + public async setBalance(account: EthAddress, balance: bigint): Promise { + const res = await this.rpcCall('anvil_setBalance', [account.toString(), toHex(balance)]); + if (res.error) { + throw new Error(`Error setting balance for ${account}: ${res.error.message}`); + } + this.logger.verbose(`Set balance for ${account} to ${balance}`); + } + + /** + * Set the interval between blocks (block time) + * @param interval - The interval to use between blocks + */ + public async setBlockInterval(interval: number): Promise { + const res = await this.rpcCall('anvil_setBlockTimestampInterval', [interval]); + if (res.error) { + throw new Error(`Error setting block interval: ${res.error.message}`); + } + this.logger.verbose(`Set L1 block interval to ${interval}`); + } + + /** + * Set the next block base fee per gas + * @param baseFee - The base fee to set + */ + public async setNextBlockBaseFeePerGas(baseFee: bigint): Promise { + const res = await this.rpcCall('anvil_setNextBlockBaseFeePerGas', [baseFee.toString()]); + if (res.error) { + throw new Error(`Error setting next block base fee per gas: ${res.error.message}`); + } + this.logger.verbose(`Set L1 next block base fee per gas to ${baseFee}`); + } + + /** + * Set the interval between blocks (block time) + * @param seconds - The interval to use between blocks + */ + public async setIntervalMining(seconds: number): Promise { + const res = await this.rpcCall('anvil_setIntervalMining', [seconds]); + if (res.error) { + throw new Error(`Error setting interval mining: ${res.error.message}`); + } + this.logger.verbose(`Set L1 interval mining to ${seconds} seconds`); + } + + /** + * Set the automine status of the underlying anvil chain + * @param automine - The automine status to set + */ + public async setAutomine(automine: boolean): Promise { + const res = await this.rpcCall('anvil_setAutomine', [automine]); + if (res.error) { + throw new Error(`Error setting automine: ${res.error.message}`); + } + this.logger.verbose(`Set L1 automine to ${automine}`); + } + + /** + * Drop a transaction from the mempool + * @param txHash - The transaction hash + */ + public async dropTransaction(txHash: Hex): Promise { + const res = await this.rpcCall('anvil_dropTransaction', [txHash]); + if (res.error) { + throw new Error(`Error dropping transaction: ${res.error.message}`); + } + this.logger.verbose(`Dropped transaction ${txHash}`); + } + + /** + * Set the next block timestamp + * @param timestamp - The timestamp to set the next block to + */ + public async setNextBlockTimestamp(timestamp: number): Promise { + const res = await this.rpcCall('evm_setNextBlockTimestamp', [timestamp]); + if (res.error) { + throw new Error(`Error setting next block timestamp: ${res.error.message}`); + } + this.logger.verbose(`Set L1 next block timestamp to ${timestamp}`); + } + + /** + * Set the next block timestamp and mines the block + * @param timestamp - The timestamp to set the next block to + */ + public async warp(timestamp: number | bigint): Promise { + const res = await this.rpcCall('evm_setNextBlockTimestamp', [Number(timestamp)]); + if (res.error) { + throw new Error(`Error warping: ${res.error.message}`); + } + await this.mine(); + this.logger.verbose(`Warped L1 timestamp to ${timestamp}`); + } + + /** + * Dumps the current chain state to a file. + * @param fileName - The file name to dump state into + */ + public async dumpChainState(fileName: string): Promise { + const res = await this.rpcCall('hardhat_dumpState', []); + if (res.error) { + throw new Error(`Error dumping state: ${res.error.message}`); + } + const jsonContent = JSON.stringify(res.result); + fs.writeFileSync(`${fileName}.json`, jsonContent, 'utf8'); + this.logger.verbose(`Dumped state to ${fileName}`); + } + + /** + * Loads the chain state from a file. + * @param fileName - The file name to load state from + */ + public async loadChainState(fileName: string): Promise { + const data = JSON.parse(fs.readFileSync(`${fileName}.json`, 'utf8')); + const res = await this.rpcCall('hardhat_loadState', [data]); + if (res.error) { + throw new Error(`Error loading state: ${res.error.message}`); + } + this.logger.verbose(`Loaded state from ${fileName}`); + } + + /** + * Load the value at a storage slot of a contract address on eth + * @param contract - The contract address + * @param slot - The storage slot + * @returns - The value at the storage slot + */ + public async load(contract: EthAddress, slot: bigint): Promise { + const res = await this.rpcCall('eth_getStorageAt', [contract.toString(), toHex(slot), 'latest']); + return BigInt(res.result); + } + + /** + * Set the value at a storage slot of a contract address on eth + * @param contract - The contract address + * @param slot - The storage slot + * @param value - The value to set the storage slot to + */ + public async store(contract: EthAddress, slot: bigint, value: bigint): Promise { + // for the rpc call, we need to change value to be a 32 byte hex string. + const res = await this.rpcCall('hardhat_setStorageAt', [contract.toString(), toHex(slot), toHex(value, true)]); + if (res.error) { + throw new Error(`Error setting storage for contract ${contract} at ${slot}: ${res.error.message}`); + } + this.logger.verbose(`Set L1 storage for contract ${contract} at ${slot} to ${value}`); + } + + /** + * Computes the slot value for a given map and key. + * @param baseSlot - The base slot of the map (specified in Aztec.nr contract) + * @param key - The key to lookup in the map + * @returns The storage slot of the value in the map + */ + public keccak256(baseSlot: bigint, key: bigint): bigint { + // abi encode (removing the 0x) - concat key and baseSlot (both padded to 32 bytes) + const abiEncoded = toHex(key, true).substring(2) + toHex(baseSlot, true).substring(2); + return toBigIntBE(keccak256(Buffer.from(abiEncoded, 'hex'))); + } + + /** + * Send transactions impersonating an externally owned account or contract. + * @param who - The address to impersonate + */ + public async startImpersonating(who: EthAddress | Hex): Promise { + const res = await this.rpcCall('hardhat_impersonateAccount', [who.toString()]); + if (res.error) { + throw new Error(`Error impersonating ${who}: ${res.error.message}`); + } + this.logger.verbose(`Impersonating ${who}`); + } + + /** + * Stop impersonating an account that you are currently impersonating. + * @param who - The address to stop impersonating + */ + public async stopImpersonating(who: EthAddress | Hex): Promise { + const res = await this.rpcCall('hardhat_stopImpersonatingAccount', [who.toString()]); + if (res.error) { + throw new Error(`Error when stopping the impersonation of ${who}: ${res.error.message}`); + } + this.logger.verbose(`Stopped impersonating ${who}`); + } + + /** + * Set the bytecode for a contract + * @param contract - The contract address + * @param bytecode - The bytecode to set + */ + public async etch(contract: EthAddress, bytecode: `0x${string}`): Promise { + const res = await this.rpcCall('hardhat_setCode', [contract.toString(), bytecode]); + if (res.error) { + throw new Error(`Error setting bytecode for ${contract}: ${res.error.message}`); + } + this.logger.verbose(`Set bytecode for ${contract} to ${bytecode}`); + } + + /** + * Get the bytecode for a contract + * @param contract - The contract address + * @returns The bytecode for the contract + */ + public async getBytecode(contract: EthAddress): Promise<`0x${string}`> { + const res = await this.rpcCall('eth_getCode', [contract.toString(), 'latest']); + return res.result; + } + + /** + * Get the raw transaction object for a given transaction hash + * @param txHash - The transaction hash + * @returns The raw transaction + */ + public async getRawTransaction(txHash: Hex): Promise<`0x${string}`> { + const res = await this.rpcCall('debug_getRawTransaction', [txHash]); + return res.result; + } +} diff --git a/yarn-project/ethereum/src/index.ts b/yarn-project/ethereum/src/index.ts index 30a990db651..d6393560093 100644 --- a/yarn-project/ethereum/src/index.ts +++ b/yarn-project/ethereum/src/index.ts @@ -1,8 +1,10 @@ export * from './constants.js'; export * from './deploy_l1_contracts.js'; +export * from './ethereum_chain.js'; +export * from './eth_cheat_codes.js'; +export * from './l1_tx_utils.js'; export * from './l1_contract_addresses.js'; export * from './l1_reader.js'; -export * from './ethereum_chain.js'; export * from './utils.js'; export * from './config.js'; export * from './types.js'; diff --git a/yarn-project/ethereum/src/l1_contract_addresses.ts b/yarn-project/ethereum/src/l1_contract_addresses.ts index 1733e15fc06..eca35f4edea 100644 --- a/yarn-project/ethereum/src/l1_contract_addresses.ts +++ b/yarn-project/ethereum/src/l1_contract_addresses.ts @@ -20,6 +20,7 @@ export const L1ContractsNames = [ 'rewardDistributorAddress', 'governanceProposerAddress', 'governanceAddress', + 'stakingAssetAddress', ] as const; /** Provides the directory of current L1 contract addresses */ @@ -33,6 +34,7 @@ export const L1ContractAddressesSchema = z.object({ inboxAddress: schemas.EthAddress, outboxAddress: schemas.EthAddress, feeJuiceAddress: schemas.EthAddress, + stakingAssetAddress: schemas.EthAddress, feeJuicePortalAddress: schemas.EthAddress, coinIssuerAddress: schemas.EthAddress, rewardDistributorAddress: schemas.EthAddress, @@ -68,6 +70,11 @@ export const l1ContractAddressesMapping: ConfigMappingsType description: 'The deployed L1 Fee Juice contract address.', parseEnv, }, + stakingAssetAddress: { + env: 'STAKING_ASSET_CONTRACT_ADDRESS', + description: 'The deployed L1 staking asset contract address.', + parseEnv, + }, feeJuicePortalAddress: { env: 'FEE_JUICE_PORTAL_CONTRACT_ADDRESS', description: 'The deployed L1 Fee Juice portal contract address.', diff --git a/yarn-project/ethereum/src/l1_tx_utils.test.ts b/yarn-project/ethereum/src/l1_tx_utils.test.ts new file mode 100644 index 00000000000..7dffaf011ce --- /dev/null +++ b/yarn-project/ethereum/src/l1_tx_utils.test.ts @@ -0,0 +1,302 @@ +import { EthAddress } from '@aztec/foundation/eth-address'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { sleep } from '@aztec/foundation/sleep'; + +import { type Anvil } from '@viem/anvil'; +import { + type Account, + type Chain, + type HttpTransport, + type PublicClient, + type WalletClient, + createPublicClient, + createWalletClient, + http, +} from 'viem'; +import { mnemonicToAccount, privateKeyToAccount } from 'viem/accounts'; +import { foundry } from 'viem/chains'; + +import { EthCheatCodes } from './eth_cheat_codes.js'; +import { L1TxUtils, defaultL1TxUtilsConfig } from './l1_tx_utils.js'; +import { startAnvil } from './test/start_anvil.js'; + +const MNEMONIC = 'test test test test test test test test test test test junk'; +const WEI_CONST = 1_000_000_000n; +// Simple contract that just returns 42 +const SIMPLE_CONTRACT_BYTECODE = '0x69602a60005260206000f3600052600a6016f3'; + +export type PendingTransaction = { + hash: `0x${string}`; + maxFeePerGas: bigint; + maxPriorityFeePerGas: bigint; +}; + +describe('GasUtils', () => { + let gasUtils: L1TxUtils; + let walletClient: WalletClient; + let publicClient: PublicClient; + let anvil: Anvil; + let cheatCodes: EthCheatCodes; + const initialBaseFee = WEI_CONST; // 1 gwei + const logger = createDebugLogger('l1_gas_test'); + + beforeAll(async () => { + const { anvil: anvilInstance, rpcUrl } = await startAnvil(1); + anvil = anvilInstance; + cheatCodes = new EthCheatCodes(rpcUrl); + const hdAccount = mnemonicToAccount(MNEMONIC, { addressIndex: 0 }); + const privKeyRaw = hdAccount.getHdKey().privateKey; + if (!privKeyRaw) { + throw new Error('Failed to get private key'); + } + const privKey = Buffer.from(privKeyRaw).toString('hex'); + const account = privateKeyToAccount(`0x${privKey}`); + + publicClient = createPublicClient({ + transport: http(rpcUrl), + chain: foundry, + }); + + walletClient = createWalletClient({ + transport: http(rpcUrl), + chain: foundry, + account, + }); + + // set base fee + await publicClient.transport.request({ + method: 'anvil_setNextBlockBaseFeePerGas', + params: [initialBaseFee.toString()], + }); + await cheatCodes.evmMine(); + + gasUtils = new L1TxUtils(publicClient, walletClient, logger, { + gasLimitBufferPercentage: 20n, + maxGwei: 500n, + minGwei: 1n, + maxAttempts: 3, + checkIntervalMs: 100, + stallTimeMs: 1000, + }); + }); + + afterEach(async () => { + // Reset base fee + await cheatCodes.setNextBlockBaseFeePerGas(initialBaseFee); + await cheatCodes.evmMine(); + }); + afterAll(async () => { + // disabling interval mining as it seems to cause issues with stopping anvil + await cheatCodes.setIntervalMining(0); // Disable interval mining + await anvil.stop(); + }, 5_000); + + it('sends and monitors a simple transaction', async () => { + const receipt = await gasUtils.sendAndMonitorTransaction({ + to: '0x1234567890123456789012345678901234567890', + data: '0x', + value: 0n, + }); + + expect(receipt.status).toBe('success'); + }, 10_000); + + it('handles gas price spikes by retrying with higher gas price', async () => { + // Disable all forms of mining + await cheatCodes.setAutomine(false); + await cheatCodes.setIntervalMining(0); + + // Ensure initial base fee is low + await cheatCodes.setNextBlockBaseFeePerGas(initialBaseFee); + + const request = { + to: '0x1234567890123456789012345678901234567890' as `0x${string}`, + data: '0x' as `0x${string}`, + value: 0n, + }; + + const estimatedGas = await publicClient.estimateGas(request); + + const originalMaxFeePerGas = WEI_CONST * 10n; + const originalMaxPriorityFeePerGas = WEI_CONST; + + const txHash = await walletClient.sendTransaction({ + ...request, + gas: estimatedGas, + maxFeePerGas: originalMaxFeePerGas, + maxPriorityFeePerGas: originalMaxPriorityFeePerGas, + }); + + const rawTx = await cheatCodes.getRawTransaction(txHash); + + // Temporarily drop the transaction + await cheatCodes.dropTransaction(txHash); + + // Mine a block with higher base fee + await cheatCodes.setNextBlockBaseFeePerGas((WEI_CONST * 15n) / 10n); + await cheatCodes.evmMine(); + + // Re-add the original tx + await publicClient.transport.request({ + method: 'eth_sendRawTransaction', + params: [rawTx], + }); + + // keeping auto-mining disabled to simulate a stuck transaction + // The monitor should detect the stall and create a replacement tx + + // Monitor should detect stall and replace with higher gas price + const monitorFn = gasUtils.monitorTransaction(request, txHash, { gasLimit: estimatedGas }); + + await sleep(2000); + // re-enable mining + await cheatCodes.setIntervalMining(1); + const receipt = await monitorFn; + expect(receipt.status).toBe('success'); + // Verify that a replacement transaction was created + expect(receipt.transactionHash).not.toBe(txHash); + + // Get details of replacement tx to verify higher gas price + const replacementTx = await publicClient.getTransaction({ hash: receipt.transactionHash }); + + expect(replacementTx.maxFeePerGas!).toBeGreaterThan(originalMaxFeePerGas); + expect(replacementTx.maxPriorityFeePerGas!).toBeGreaterThan(originalMaxPriorityFeePerGas); + }, 20_000); + + it('respects max gas price limits during spikes', async () => { + const maxGwei = 500n; + const newBaseFee = (maxGwei - 10n) * WEI_CONST; + + // Set base fee high but still under our max + await cheatCodes.setNextBlockBaseFeePerGas(newBaseFee); + + // Mine a new block to make the base fee change take effect + await cheatCodes.evmMine(); + + const receipt = await gasUtils.sendAndMonitorTransaction({ + to: '0x1234567890123456789012345678901234567890', + data: '0x', + value: 0n, + }); + + expect(receipt.effectiveGasPrice).toBeLessThanOrEqual(maxGwei * WEI_CONST); + }, 60_000); + + it('adds appropriate buffer to gas estimation', async () => { + const stableBaseFee = WEI_CONST * 10n; + await cheatCodes.setNextBlockBaseFeePerGas(stableBaseFee); + await cheatCodes.evmMine(); + + // First deploy without any buffer + const baselineGasUtils = new L1TxUtils(publicClient, walletClient, logger, { + gasLimitBufferPercentage: 0n, + maxGwei: 500n, + minGwei: 10n, // Increased minimum gas price + maxAttempts: 5, + checkIntervalMs: 100, + stallTimeMs: 1000, + }); + + const baselineTx = await baselineGasUtils.sendAndMonitorTransaction({ + to: EthAddress.ZERO.toString(), + data: SIMPLE_CONTRACT_BYTECODE, + }); + + // Get the transaction details to see the gas limit + const baselineDetails = await publicClient.getTransaction({ + hash: baselineTx.transactionHash, + }); + + // Now deploy with 20% buffer + const bufferedGasUtils = new L1TxUtils(publicClient, walletClient, logger, { + gasLimitBufferPercentage: 20n, + maxGwei: 500n, + minGwei: 1n, + maxAttempts: 3, + checkIntervalMs: 100, + stallTimeMs: 1000, + }); + + const bufferedTx = await bufferedGasUtils.sendAndMonitorTransaction({ + to: EthAddress.ZERO.toString(), + data: SIMPLE_CONTRACT_BYTECODE, + }); + + const bufferedDetails = await publicClient.getTransaction({ + hash: bufferedTx.transactionHash, + }); + + // The gas limit should be ~20% higher + expect(bufferedDetails.gas).toBeGreaterThan(baselineDetails.gas); + expect(bufferedDetails.gas).toBeLessThanOrEqual((baselineDetails.gas * 120n) / 100n); + }, 20_000); + + it('calculates correct gas prices for initial attempt', async () => { + // Set base fee to 1 gwei + await cheatCodes.setNextBlockBaseFeePerGas(WEI_CONST); + await cheatCodes.evmMine(); + + const basePriorityFee = await publicClient.estimateMaxPriorityFeePerGas(); + const gasPrice = await gasUtils['getGasPrice'](); + + // With default config, priority fee should be bumped by 20% + const expectedPriorityFee = (basePriorityFee * 120n) / 100n; + + // Base fee should be bumped for potential stalls (1.125^(stallTimeMs/12000) = ~1.125 for default config) + const expectedMaxFee = (WEI_CONST * 1125n) / 1000n + expectedPriorityFee; + + expect(gasPrice.maxPriorityFeePerGas).toBe(expectedPriorityFee); + expect(gasPrice.maxFeePerGas).toBe(expectedMaxFee); + }); + + it('calculates correct gas prices for retry attempts', async () => { + await cheatCodes.setNextBlockBaseFeePerGas(WEI_CONST); + await cheatCodes.evmMine(); + + const initialGasPrice = await gasUtils['getGasPrice'](); + + // Get retry gas price for 2nd attempt + const retryGasPrice = await gasUtils['getGasPrice'](undefined, 1, initialGasPrice); + + // With default config, retry should bump fees by 50% + const expectedPriorityFee = (initialGasPrice.maxPriorityFeePerGas * 150n) / 100n; + const expectedMaxFee = (initialGasPrice.maxFeePerGas * 150n) / 100n; + + expect(retryGasPrice.maxPriorityFeePerGas).toBe(expectedPriorityFee); + expect(retryGasPrice.maxFeePerGas).toBe(expectedMaxFee); + }); + + it('respects minimum gas price bump for replacements', async () => { + const gasUtils = new L1TxUtils(publicClient, walletClient, logger, { + ...defaultL1TxUtilsConfig, + priorityFeeRetryBumpPercentage: 5n, // Set lower than minimum 10% + }); + + const initialGasPrice = await gasUtils['getGasPrice'](); + + // Get retry gas price with attempt = 1 + const retryGasPrice = await gasUtils['getGasPrice'](undefined, 1, initialGasPrice); + + // Should use 10% minimum bump even though config specified 5% + const expectedPriorityFee = (initialGasPrice.maxPriorityFeePerGas * 110n) / 100n; + const expectedMaxFee = (initialGasPrice.maxFeePerGas * 110n) / 100n; + + expect(retryGasPrice.maxPriorityFeePerGas).toBe(expectedPriorityFee); + expect(retryGasPrice.maxFeePerGas).toBe(expectedMaxFee); + }); + + it('adds correct buffer to gas estimation', async () => { + const request = { + to: '0x1234567890123456789012345678901234567890' as `0x${string}`, + data: '0x' as `0x${string}`, + value: 0n, + }; + + const baseEstimate = await publicClient.estimateGas(request); + const bufferedEstimate = await gasUtils.estimateGas(walletClient.account!, request); + + // adds 20% buffer + const expectedEstimate = baseEstimate + (baseEstimate * 20n) / 100n; + expect(bufferedEstimate).toBe(expectedEstimate); + }); +}); diff --git a/yarn-project/ethereum/src/l1_tx_utils.ts b/yarn-project/ethereum/src/l1_tx_utils.ts new file mode 100644 index 00000000000..f95610303b7 --- /dev/null +++ b/yarn-project/ethereum/src/l1_tx_utils.ts @@ -0,0 +1,400 @@ +import { + type ConfigMappingsType, + bigintConfigHelper, + getDefaultConfig, + numberConfigHelper, +} from '@aztec/foundation/config'; +import { type DebugLogger } from '@aztec/foundation/log'; +import { makeBackoff, retry } from '@aztec/foundation/retry'; +import { sleep } from '@aztec/foundation/sleep'; + +import { + type Account, + type Address, + type Chain, + type GetTransactionReturnType, + type Hex, + type HttpTransport, + type PublicClient, + type TransactionReceipt, + type WalletClient, + formatGwei, +} from 'viem'; + +// 1_000_000_000 Gwei = 1 ETH +// 1_000_000_000 Wei = 1 Gwei +// 1_000_000_000_000_000_000 Wei = 1 ETH + +const WEI_CONST = 1_000_000_000n; + +// setting a minimum bump percentage to 10% due to geth's implementation +// https://github.com/ethereum/go-ethereum/blob/e3d61e6db028c412f74bc4d4c7e117a9e29d0de0/core/txpool/legacypool/list.go#L298 +const MIN_REPLACEMENT_BUMP_PERCENTAGE = 10n; + +// Avg ethereum block time is ~12s +const BLOCK_TIME_MS = 12_000; + +export interface L1TxUtilsConfig { + /** + * How much to increase calculated gas limit. + */ + gasLimitBufferPercentage?: bigint; + /** + * Maximum gas price in gwei + */ + maxGwei?: bigint; + /** + * Minimum gas price in gwei + */ + minGwei?: bigint; + /** + * Priority fee bump percentage + */ + priorityFeeBumpPercentage?: bigint; + /** + * How much to increase priority fee by each attempt (percentage) + */ + priorityFeeRetryBumpPercentage?: bigint; + /** + * Maximum number of speed-up attempts + */ + maxAttempts?: number; + /** + * How often to check tx status + */ + checkIntervalMs?: number; + /** + * How long before considering tx stalled + */ + stallTimeMs?: number; + /** + * How long to wait for a tx to be mined before giving up + */ + txTimeoutMs?: number; +} + +export const l1TxUtilsConfigMappings: ConfigMappingsType = { + gasLimitBufferPercentage: { + description: 'How much to increase gas price by each attempt (percentage)', + env: 'L1_GAS_LIMIT_BUFFER_PERCENTAGE', + ...bigintConfigHelper(20n), + }, + minGwei: { + description: 'Minimum gas price in gwei', + env: 'L1_GAS_PRICE_MIN', + ...bigintConfigHelper(1n), + }, + maxGwei: { + description: 'Maximum gas price in gwei', + env: 'L1_GAS_PRICE_MAX', + ...bigintConfigHelper(100n), + }, + priorityFeeBumpPercentage: { + description: 'How much to increase priority fee by each attempt (percentage)', + env: 'L1_PRIORITY_FEE_BUMP_PERCENTAGE', + ...bigintConfigHelper(20n), + }, + priorityFeeRetryBumpPercentage: { + description: 'How much to increase priority fee by each retry attempt (percentage)', + env: 'L1_PRIORITY_FEE_RETRY_BUMP_PERCENTAGE', + ...bigintConfigHelper(50n), + }, + maxAttempts: { + description: 'Maximum number of speed-up attempts', + env: 'L1_TX_MONITOR_MAX_ATTEMPTS', + ...numberConfigHelper(3), + }, + checkIntervalMs: { + description: 'How often to check tx status', + env: 'L1_TX_MONITOR_CHECK_INTERVAL_MS', + ...numberConfigHelper(10_000), + }, + stallTimeMs: { + description: 'How long before considering tx stalled', + env: 'L1_TX_MONITOR_STALL_TIME_MS', + ...numberConfigHelper(30_000), + }, + txTimeoutMs: { + description: 'How long to wait for a tx to be mined before giving up. Set to 0 to disable.', + env: 'L1_TX_MONITOR_TX_TIMEOUT_MS', + ...numberConfigHelper(300_000), // 5 mins + }, +}; + +export const defaultL1TxUtilsConfig = getDefaultConfig(l1TxUtilsConfigMappings); + +export interface L1TxRequest { + to: Address | null; + data: Hex; + value?: bigint; +} + +interface GasPrice { + maxFeePerGas: bigint; + maxPriorityFeePerGas: bigint; +} + +export class L1TxUtils { + private readonly config: L1TxUtilsConfig; + + constructor( + private readonly publicClient: PublicClient, + private readonly walletClient: WalletClient, + private readonly logger?: DebugLogger, + config?: Partial, + ) { + this.config = { + ...defaultL1TxUtilsConfig, + ...(config || {}), + }; + } + + /** + * Sends a transaction with gas estimation and pricing + * @param request - The transaction request (to, data, value) + * @param gasConfig - Optional gas configuration + * @returns The transaction hash and parameters used + */ + public async sendTransaction( + request: L1TxRequest, + _gasConfig?: Partial & { fixedGas?: bigint }, + ): Promise<{ txHash: Hex; gasLimit: bigint; gasPrice: GasPrice }> { + const gasConfig = { ...this.config, ..._gasConfig }; + const account = this.walletClient.account; + let gasLimit: bigint; + + if (gasConfig.fixedGas) { + gasLimit = gasConfig.fixedGas; + } else { + gasLimit = await this.estimateGas(account, request); + } + + const gasPrice = await this.getGasPrice(gasConfig); + + const txHash = await this.walletClient.sendTransaction({ + ...request, + gas: gasLimit, + maxFeePerGas: gasPrice.maxFeePerGas, + maxPriorityFeePerGas: gasPrice.maxPriorityFeePerGas, + }); + + this.logger?.verbose( + `Sent L1 transaction ${txHash} with gas limit ${gasLimit} and price ${formatGwei(gasPrice.maxFeePerGas)} gwei`, + ); + + return { txHash, gasLimit, gasPrice }; + } + + /** + * Monitors a transaction until completion, handling speed-ups if needed + * @param request - Original transaction request (needed for speed-ups) + * @param initialTxHash - Hash of the initial transaction + * @param params - Parameters used in the initial transaction + * @param gasConfig - Optional gas configuration + */ + public async monitorTransaction( + request: L1TxRequest, + initialTxHash: Hex, + params: { gasLimit: bigint }, + _gasConfig?: Partial, + ): Promise { + const gasConfig = { ...this.config, ..._gasConfig }; + const account = this.walletClient.account; + + // Retry a few times, in case the tx is not yet propagated. + const tx = await retry( + () => this.publicClient.getTransaction({ hash: initialTxHash }), + `Getting L1 transaction ${initialTxHash}`, + makeBackoff([1, 2, 3]), + this.logger, + true, + ); + + if (tx?.nonce === undefined || tx?.nonce === null) { + throw new Error(`Failed to get L1 transaction ${initialTxHash} nonce`); + } + const nonce = tx.nonce; + + const txHashes = new Set([initialTxHash]); + let currentTxHash = initialTxHash; + let attempts = 0; + let lastAttemptSent = Date.now(); + const initialTxTime = lastAttemptSent; + let txTimedOut = false; + + while (!txTimedOut) { + try { + const currentNonce = await this.publicClient.getTransactionCount({ address: account.address }); + if (currentNonce > nonce) { + for (const hash of txHashes) { + try { + const receipt = await this.publicClient.getTransactionReceipt({ hash }); + if (receipt) { + this.logger?.debug(`L1 Transaction ${hash} confirmed`); + if (receipt.status === 'reverted') { + this.logger?.error(`L1 Transaction ${hash} reverted`); + } + return receipt; + } + } catch (err) { + if (err instanceof Error && err.message.includes('reverted')) { + throw err; + } + } + } + } + + // Retry a few times, in case the tx is not yet propagated. + const tx = await retry( + () => this.publicClient.getTransaction({ hash: currentTxHash }), + `Getting L1 transaction ${currentTxHash}`, + makeBackoff([1, 2, 3]), + this.logger, + true, + ); + const timePassed = Date.now() - lastAttemptSent; + + if (tx && timePassed < gasConfig.stallTimeMs!) { + this.logger?.debug(`L1 Transaction ${currentTxHash} pending. Time passed: ${timePassed}ms`); + + // Check timeout before continuing + if (gasConfig.txTimeoutMs) { + txTimedOut = Date.now() - initialTxTime > gasConfig.txTimeoutMs; + if (txTimedOut) { + break; + } + } + + await sleep(gasConfig.checkIntervalMs!); + continue; + } + + if (timePassed > gasConfig.stallTimeMs! && attempts < gasConfig.maxAttempts!) { + attempts++; + const newGasPrice = await this.getGasPrice( + gasConfig, + attempts, + tx.maxFeePerGas && tx.maxPriorityFeePerGas + ? { maxFeePerGas: tx.maxFeePerGas, maxPriorityFeePerGas: tx.maxPriorityFeePerGas } + : undefined, + ); + + this.logger?.debug( + `L1 Transaction ${currentTxHash} appears stuck. Attempting speed-up ${attempts}/${gasConfig.maxAttempts} ` + + `with new priority fee ${formatGwei(newGasPrice.maxPriorityFeePerGas)} gwei`, + ); + + currentTxHash = await this.walletClient.sendTransaction({ + ...request, + nonce, + gas: params.gasLimit, + maxFeePerGas: newGasPrice.maxFeePerGas, + maxPriorityFeePerGas: newGasPrice.maxPriorityFeePerGas, + }); + + txHashes.add(currentTxHash); + lastAttemptSent = Date.now(); + } + await sleep(gasConfig.checkIntervalMs!); + } catch (err: any) { + this.logger?.warn(`Error monitoring tx ${currentTxHash}:`, err); + if (err.message?.includes('reverted')) { + throw err; + } + await sleep(gasConfig.checkIntervalMs!); + } + // Check if tx has timed out. + if (gasConfig.txTimeoutMs) { + txTimedOut = Date.now() - initialTxTime > gasConfig.txTimeoutMs!; + } + } + throw new Error(`L1 Transaction ${currentTxHash} timed out`); + } + + /** + * Sends a transaction and monitors it until completion + * @param request - The transaction request (to, data, value) + * @param gasConfig - Optional gas configuration + * @returns The receipt of the successful transaction + */ + public async sendAndMonitorTransaction( + request: L1TxRequest, + gasConfig?: Partial & { fixedGas?: bigint }, + ): Promise { + const { txHash, gasLimit } = await this.sendTransaction(request, gasConfig); + return this.monitorTransaction(request, txHash, { gasLimit }, gasConfig); + } + + /** + * Gets the current gas price with bounds checking + */ + private async getGasPrice( + _gasConfig?: L1TxUtilsConfig, + attempt: number = 0, + previousGasPrice?: typeof attempt extends 0 ? never : GasPrice, + ): Promise { + const gasConfig = { ...this.config, ..._gasConfig }; + const block = await this.publicClient.getBlock({ blockTag: 'latest' }); + const baseFee = block.baseFeePerGas ?? 0n; + + // Get initial priority fee from the network + let priorityFee = await this.publicClient.estimateMaxPriorityFeePerGas(); + let maxFeePerGas = baseFee; + + // Bump base fee so it's valid for next blocks if it stalls + const numBlocks = Math.ceil(gasConfig.stallTimeMs! / BLOCK_TIME_MS); + for (let i = 0; i < numBlocks; i++) { + // each block can go up 12.5% from previous baseFee + maxFeePerGas = (maxFeePerGas * (1_000n + 125n)) / 1_000n; + } + + if (attempt > 0) { + const configBump = + gasConfig.priorityFeeRetryBumpPercentage ?? defaultL1TxUtilsConfig.priorityFeeRetryBumpPercentage!; + const bumpPercentage = + configBump > MIN_REPLACEMENT_BUMP_PERCENTAGE ? configBump : MIN_REPLACEMENT_BUMP_PERCENTAGE; + + // Calculate minimum required fees based on previous attempt + const minPriorityFee = (previousGasPrice!.maxPriorityFeePerGas * (100n + bumpPercentage)) / 100n; + const minMaxFee = (previousGasPrice!.maxFeePerGas * (100n + bumpPercentage)) / 100n; + + // Add priority fee to maxFeePerGas + maxFeePerGas += priorityFee; + + // Use maximum between current network values and minimum required values + priorityFee = priorityFee > minPriorityFee ? priorityFee : minPriorityFee; + maxFeePerGas = maxFeePerGas > minMaxFee ? maxFeePerGas : minMaxFee; + } else { + // first attempt, just bump priority fee + priorityFee = (priorityFee * (100n + (gasConfig.priorityFeeBumpPercentage || 0n))) / 100n; + maxFeePerGas += priorityFee; + } + + // Ensure we don't exceed maxGwei + const maxGweiInWei = gasConfig.maxGwei! * WEI_CONST; + maxFeePerGas = maxFeePerGas > maxGweiInWei ? maxGweiInWei : maxFeePerGas; + + // Ensure priority fee doesn't exceed max fee + const maxPriorityFeePerGas = priorityFee > maxFeePerGas ? maxFeePerGas : priorityFee; + + this.logger?.debug( + `Gas price calculation (attempt ${attempt}): baseFee=${formatGwei(baseFee)}, ` + + `maxPriorityFee=${formatGwei(maxPriorityFeePerGas)}, maxFee=${formatGwei(maxFeePerGas)}`, + ); + + return { maxFeePerGas, maxPriorityFeePerGas }; + } + + /** + * Estimates gas and adds buffer + */ + public async estimateGas(account: Account, request: L1TxRequest, _gasConfig?: L1TxUtilsConfig): Promise { + const gasConfig = { ...this.config, ..._gasConfig }; + const initialEstimate = await this.publicClient.estimateGas({ account, ...request }); + + // Add buffer based on either fixed amount or percentage + const withBuffer = initialEstimate + (initialEstimate * (gasConfig.gasLimitBufferPercentage ?? 0n)) / 100n; + + return withBuffer; + } +} diff --git a/yarn-project/ethereum/src/test/tx_delayer.test.ts b/yarn-project/ethereum/src/test/tx_delayer.test.ts index f85bcd453cf..1fc1435e80c 100644 --- a/yarn-project/ethereum/src/test/tx_delayer.test.ts +++ b/yarn-project/ethereum/src/test/tx_delayer.test.ts @@ -72,7 +72,11 @@ describe('tx_delayer', () => { }, 20000); it('delays a tx sent through a contract', async () => { - const deployTxHash = await client.deployContract({ abi: TestERC20Abi, bytecode: TestERC20Bytecode, args: [] }); + const deployTxHash = await client.deployContract({ + abi: TestERC20Abi, + bytecode: TestERC20Bytecode, + args: ['test', 'TST', account.address], + }); const { contractAddress, blockNumber } = await client.waitForTransactionReceipt({ hash: deployTxHash, pollingInterval: 100, diff --git a/yarn-project/ethereum/src/utils.ts b/yarn-project/ethereum/src/utils.ts index 9b8f7837e98..2e66f6119fe 100644 --- a/yarn-project/ethereum/src/utils.ts +++ b/yarn-project/ethereum/src/utils.ts @@ -49,7 +49,7 @@ function tryExtractEvent< logger?: DebugLogger, ): TEventType | undefined { for (const log of logs) { - if (log.address === address) { + if (log.address.toLowerCase() === address.toLowerCase()) { try { const decodedEvent = decodeEventLog({ abi, ...log }); if (decodedEvent.eventName === eventName) { diff --git a/yarn-project/foundation/package.json b/yarn-project/foundation/package.json index cdaaafa04e9..ae009fa8304 100644 --- a/yarn-project/foundation/package.json +++ b/yarn-project/foundation/package.json @@ -103,6 +103,7 @@ "@koa/cors": "^5.0.0", "@noble/curves": "^1.2.0", "bn.js": "^5.2.1", + "colorette": "^2.0.20", "debug": "^4.3.4", "detect-node": "^2.1.0", "elliptic": "^6.5.4", @@ -117,6 +118,8 @@ "lodash.clonedeepwith": "^4.5.0", "memdown": "^6.1.1", "pako": "^2.1.0", + "pino": "^9.5.0", + "pino-pretty": "^13.0.0", "sha3": "^2.1.4", "zod": "^3.23.8" }, diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 44ffca4981f..b12e7c3ea78 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -145,6 +145,7 @@ export type EnvVar = | 'SEQ_REQUIRED_CONFIRMATIONS' | 'SEQ_TX_POLLING_INTERVAL_MS' | 'SEQ_ENFORCE_TIME_TABLE' + | 'STAKING_ASSET_CONTRACT_ADDRESS' | 'REWARD_DISTRIBUTOR_CONTRACT_ADDRESS' | 'TELEMETRY' | 'TEST_ACCOUNTS' @@ -170,4 +171,14 @@ export type EnvVar = | 'AZTEC_SLOT_DURATION' | 'AZTEC_EPOCH_DURATION' | 'AZTEC_TARGET_COMMITTEE_SIZE' - | 'AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS'; + | 'AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS' + | 'L1_GAS_LIMIT_BUFFER_PERCENTAGE' + | 'L1_GAS_LIMIT_BUFFER_FIXED' + | 'L1_GAS_PRICE_MIN' + | 'L1_GAS_PRICE_MAX' + | 'L1_PRIORITY_FEE_BUMP_PERCENTAGE' + | 'L1_PRIORITY_FEE_RETRY_BUMP_PERCENTAGE' + | 'L1_TX_MONITOR_MAX_ATTEMPTS' + | 'L1_TX_MONITOR_CHECK_INTERVAL_MS' + | 'L1_TX_MONITOR_STALL_TIME_MS' + | 'L1_TX_MONITOR_TX_TIMEOUT_MS'; diff --git a/yarn-project/foundation/src/crypto/random/randomness_singleton.ts b/yarn-project/foundation/src/crypto/random/randomness_singleton.ts index f226874a921..a848f85a606 100644 --- a/yarn-project/foundation/src/crypto/random/randomness_singleton.ts +++ b/yarn-project/foundation/src/crypto/random/randomness_singleton.ts @@ -1,4 +1,4 @@ -import { createDebugLogger } from '../../log/logger.js'; +import { createDebugLogger } from '../../log/pino-logger.js'; /** * A number generator which is used as a source of randomness in the system. If the SEED env variable is set, the diff --git a/yarn-project/foundation/src/json-rpc/client/safe_json_rpc_client.ts b/yarn-project/foundation/src/json-rpc/client/safe_json_rpc_client.ts index 3c2bf3c32ba..2de143063b6 100644 --- a/yarn-project/foundation/src/json-rpc/client/safe_json_rpc_client.ts +++ b/yarn-project/foundation/src/json-rpc/client/safe_json_rpc_client.ts @@ -1,6 +1,6 @@ import { format } from 'util'; -import { createDebugLogger } from '../../log/logger.js'; +import { createDebugLogger } from '../../log/pino-logger.js'; import { type ApiSchema, type ApiSchemaFor, schemaHasMethod } from '../../schemas/api.js'; import { defaultFetch } from './fetch.js'; diff --git a/yarn-project/foundation/src/log/index.ts b/yarn-project/foundation/src/log/index.ts index 2bf44ed88ba..8e61bc81782 100644 --- a/yarn-project/foundation/src/log/index.ts +++ b/yarn-project/foundation/src/log/index.ts @@ -1,5 +1,5 @@ export * from './console.js'; export * from './debug.js'; -export * from './logger.js'; +export * from './pino-logger.js'; export * from './log_history.js'; export * from './log_fn.js'; diff --git a/yarn-project/foundation/src/log/log-filters.test.ts b/yarn-project/foundation/src/log/log-filters.test.ts new file mode 100644 index 00000000000..11cabca8ed0 --- /dev/null +++ b/yarn-project/foundation/src/log/log-filters.test.ts @@ -0,0 +1,50 @@ +import { parseEnv } from './log-filters.js'; + +describe('parseEnv', () => { + const defaultLevel = 'info'; + + it('returns default level and empty filters when env is empty', () => { + const env = ''; + const [level, filters] = parseEnv(env, defaultLevel); + expect(level).toBe(defaultLevel); + expect(filters).toEqual([]); + }); + + it('parses level and filters from env string', () => { + const env = 'debug;warn:module1,module2;error:module3'; + const [level, filters] = parseEnv(env, defaultLevel); + expect(level).toBe('debug'); + expect(filters).toEqual([ + ['module3', 'error'], + ['module2', 'warn'], + ['module1', 'warn'], + ]); + }); + + it('handles spaces in env string', () => { + const env = 'debug; warn: module1, module2; error: module3'; + const [level, filters] = parseEnv(env, defaultLevel); + expect(level).toBe('debug'); + expect(filters).toEqual([ + ['module3', 'error'], + ['module2', 'warn'], + ['module1', 'warn'], + ]); + }); + + it('throws an error for invalid default log level', () => { + const env = 'invalid;module1:warn'; + expect(() => parseEnv(env, defaultLevel)).toThrow('Invalid log level: invalid'); + }); + + it('throws an error for invalid log level in filter', () => { + const env = 'invalid;warn:module'; + expect(() => parseEnv(env, defaultLevel)).toThrow('Invalid log level: invalid'); + }); + + it('throws an error for invalid log filter statement', () => { + const defaultLevel = 'info'; + const env = 'debug;warn:module1;error:'; + expect(() => parseEnv(env, defaultLevel)).toThrow('Invalid log filter statement: error'); + }); +}); diff --git a/yarn-project/foundation/src/log/log-filters.ts b/yarn-project/foundation/src/log/log-filters.ts new file mode 100644 index 00000000000..808818c3fd5 --- /dev/null +++ b/yarn-project/foundation/src/log/log-filters.ts @@ -0,0 +1,49 @@ +import { type LogLevel, LogLevels } from './log-levels.js'; + +export type LogFilters = [string, LogLevel][]; + +export function getLogLevelFromFilters(filters: LogFilters, module: string): LogLevel | undefined { + for (const [filterModule, level] of filters) { + if (module.startsWith(filterModule)) { + return level as LogLevel; + } + } + return undefined; +} + +export function assertLogLevel(level: string): asserts level is LogLevel { + if (!LogLevels.includes(level as LogLevel)) { + throw new Error(`Invalid log level: ${level}`); + } +} + +export function parseEnv(env: string | undefined, defaultLevel: LogLevel): [LogLevel, LogFilters] { + if (!env) { + return [defaultLevel, []]; + } + const [level] = env.split(';', 1); + assertLogLevel(level); + return [level, parseFilters(env.slice(level.length + 1))]; +} + +export function parseFilters(definition: string | undefined): LogFilters { + if (!definition) { + return []; + } + + const statements = definition.split(';'); + const filters: LogFilters = []; + for (const statement of statements) { + const [level] = statement.split(':', 1); + const modules = statement.slice(level.length + 1); + if (!modules || !level) { + throw new Error(`Invalid log filter statement: ${statement}`); + } + const sanitizedLevel = level.trim().toLowerCase(); + assertLogLevel(sanitizedLevel); + for (const module of modules.split(',')) { + filters.push([module.trim().toLowerCase(), sanitizedLevel as LogLevel | 'silent']); + } + } + return filters.reverse(); +} diff --git a/yarn-project/foundation/src/log/log-levels.ts b/yarn-project/foundation/src/log/log-levels.ts new file mode 100644 index 00000000000..d2a630de9f3 --- /dev/null +++ b/yarn-project/foundation/src/log/log-levels.ts @@ -0,0 +1,3 @@ +export const LogLevels = ['silent', 'fatal', 'error', 'warn', 'info', 'verbose', 'debug', 'trace'] as const; + +export type LogLevel = (typeof LogLevels)[number]; diff --git a/yarn-project/foundation/src/log/logger.ts b/yarn-project/foundation/src/log/logger.ts deleted file mode 100644 index 2f5954f6eb0..00000000000 --- a/yarn-project/foundation/src/log/logger.ts +++ /dev/null @@ -1,179 +0,0 @@ -import debug from 'debug'; -import { inspect } from 'util'; - -import { type LogData, type LogFn } from './log_fn.js'; - -const LogLevels = ['silent', 'error', 'warn', 'info', 'verbose', 'debug'] as const; - -/** - * A valid log severity level. - */ -export type LogLevel = (typeof LogLevels)[number]; - -function getLogLevel() { - const envLogLevel = process.env.LOG_LEVEL?.toLowerCase() as LogLevel; - let defaultLogLevel: LogLevel = 'info'; - if (process.env.DEBUG) { - // if we set DEBUG to a non-empty string, use debug as default - defaultLogLevel = 'debug'; - } else if (process.env.NODE_ENV === 'test') { - // otherwise, be silent in tests as these are frequently ran en-masse - defaultLogLevel = 'silent'; - } - return LogLevels.includes(envLogLevel) ? envLogLevel : defaultLogLevel; -} - -export let currentLevel = getLogLevel(); - -const logElapsedTime = ['1', 'true'].includes(process.env.LOG_ELAPSED_TIME ?? ''); -const firstTimestamp: number = Date.now(); - -function filterNegativePatterns(debugString: string): string { - return debugString - .split(',') - .filter(p => !p.startsWith('-')) - .join(','); -} -function extractNegativePatterns(debugString: string): string[] { - return ( - debugString - .split(',') - .filter(p => p.startsWith('-')) - // Remove the leading '-' from the pattern - .map(p => p.slice(1)) - ); -} - -const namespaces = process.env.DEBUG ?? 'aztec:*'; -debug.enable(filterNegativePatterns(namespaces)); - -/** Log function that accepts an exception object */ -type ErrorLogFn = (msg: string, err?: Error | unknown, data?: LogData) => void; - -/** - * Logger that supports multiple severity levels. - */ -export type Logger = { [K in LogLevel]: LogFn } & { /** Error log function */ error: ErrorLogFn }; - -/** - * Logger that supports multiple severity levels and can be called directly to issue a debug statement. - * Intended as a drop-in replacement for the debug module. - */ -export type DebugLogger = Logger; - -/** - * Creates a new DebugLogger for the current module, defaulting to the LOG_LEVEL env var. - * If DEBUG="[module]" env is set, will enable debug logging if the module matches. - * Uses npm debug for debug level and console.error for other levels. - * @param name - Name of the module. - * @param fixedLogData - Additional data to include in the log message. - * @usage createDebugLogger('aztec:validator'); - * // will always add the validator address to the log labels - * @returns A debug logger. - */ - -export function createDebugLogger(name: string): DebugLogger { - const debugLogger = debug(name); - - const negativePatterns = extractNegativePatterns(namespaces); - const accepted = () => { - return !negativePatterns.some(pattern => name.match(pattern)); - }; - const log = (level: LogLevel, msg: string, data?: LogData) => { - if (accepted()) { - logWithDebug(debugLogger, level, msg, data); - } - }; - const logger = { - silent: () => {}, - error: (msg: string, err?: unknown, data?: LogData) => log('error', fmtErr(msg, err), data), - warn: (msg: string, data?: LogData) => log('warn', msg, data), - info: (msg: string, data?: LogData) => log('info', msg, data), - verbose: (msg: string, data?: LogData) => log('verbose', msg, data), - debug: (msg: string, data?: LogData) => log('debug', msg, data), - }; - return Object.assign((msg: string, data?: LogData) => log('debug', msg, data), logger); -} - -/** - * A function to create a logger that automatically includes fixed data in each log entry. - * @param debugLogger - The base DebugLogger instance to which we attach fixed log data. - * @param fixedLogData - The data to be included in every log entry. - * @returns A DebugLogger with log level methods (error, warn, info, verbose, debug) that - * automatically attach `fixedLogData` to every log message. - */ -export function attachedFixedDataToLogger(debugLogger: DebugLogger, fixedLogData: LogData): DebugLogger { - // Helper function to merge fixed data with additional data passed to log entries. - const attach = (data?: LogData) => ({ ...fixedLogData, ...data }); - // Define the logger with all the necessary log level methods. - const logger = { - // Silent log level does nothing. - silent: () => {}, - error: (msg: string, err?: unknown, data?: LogData) => debugLogger.error(fmtErr(msg, err), attach(data)), - warn: (msg: string, data?: LogData) => debugLogger.warn(msg, attach(data)), - info: (msg: string, data?: LogData) => debugLogger.info(msg, attach(data)), - verbose: (msg: string, data?: LogData) => debugLogger.verbose(msg, attach(data)), - debug: (msg: string, data?: LogData) => debugLogger.debug(msg, attach(data)), - }; - return Object.assign((msg: string, data?: LogData) => debugLogger.debug(msg, attach(data)), logger); -} - -/** A callback to capture all logs. */ -export type LogHandler = (level: LogLevel, namespace: string, msg: string, data?: LogData) => void; - -const logHandlers: LogHandler[] = []; - -/** - * Registers a callback for all logs, whether they are emitted in the current log level or not. - * @param handler - Callback to be called on every log. - */ -export function onLog(handler: LogHandler) { - logHandlers.push(handler); -} - -/** Overrides current log level. */ -export function setLevel(level: LogLevel) { - currentLevel = level; -} - -/** - * Logs args to npm debug if enabled or log level is debug, console.error otherwise. - * @param debug - Instance of npm debug. - * @param level - Intended log level. - * @param args - Args to log. - */ -function logWithDebug(debug: debug.Debugger, level: LogLevel, msg: string, data?: LogData) { - for (const handler of logHandlers) { - handler(level, debug.namespace, msg, data); - } - - msg = data ? `${msg} ${fmtLogData(data)}` : msg; - if (debug.enabled && LogLevels.indexOf(level) <= LogLevels.indexOf(currentLevel)) { - if (logElapsedTime) { - const ts = ((Date.now() - firstTimestamp) / 1000).toFixed(3); - debug('%ss [%s] %s', ts, level.toUpperCase(), msg); - } else { - debug('[%s] %s', level.toUpperCase(), msg); - } - } -} - -/** - * Concatenates a log message and an exception. - * @param msg - Log message - * @param err - Error to log - * @returns A string with both the log message and the error message. - */ -function fmtErr(msg: string, err?: Error | unknown): string { - return err ? `${msg}: ${inspect(err)}` : msg; -} - -/** - * Formats structured log data as a string for console output. - * @param data - Optional log data. - */ -export function fmtLogData(data?: LogData): string { - return Object.entries(data ?? {}) - .map(([key, value]) => `${key}=${typeof value === 'object' && 'toString' in value ? value.toString() : value}`) - .join(' '); -} diff --git a/yarn-project/foundation/src/log/pino-logger.ts b/yarn-project/foundation/src/log/pino-logger.ts new file mode 100644 index 00000000000..a4ed349522b --- /dev/null +++ b/yarn-project/foundation/src/log/pino-logger.ts @@ -0,0 +1,165 @@ +import { createColors } from 'colorette'; +import isNode from 'detect-node'; +import { type LoggerOptions, pino } from 'pino'; +import { inspect } from 'util'; + +import { compactArray } from '../collection/array.js'; +import { getLogLevelFromFilters, parseEnv } from './log-filters.js'; +import { type LogLevel } from './log-levels.js'; +import { type LogData, type LogFn } from './log_fn.js'; + +// TODO(palla/log): Rename to createLogger +export function createDebugLogger(module: string): DebugLogger { + // TODO(palla/log): Rename all module names to remove the aztec prefix + const pinoLogger = logger.child( + { module: module.replace(/^aztec:/, '') }, + { level: getLogLevelFromFilters(logFilters, module) }, + ); + + // We check manually for isLevelEnabled to avoid calling processLogData unnecessarily. + // Note that isLevelEnabled is missing from the browser version of pino. + const logFn = (level: LogLevel, msg: string, data?: LogData) => + isLevelEnabled(pinoLogger, level) && pinoLogger[level](processLogData(data ?? {}), msg); + + return { + silent: () => {}, + // TODO(palla/log): Should we move err to data instead of the text message? + /** Log as fatal. Use when an error has brought down the system. */ + fatal: (msg: string, err?: unknown, data?: LogData) => logFn('fatal', formatErr(msg, err), data), + /** Log as error. Use for errors in general. */ + error: (msg: string, err?: unknown, data?: LogData) => logFn('error', formatErr(msg, err), data), + /** Log as warn. Use for when we stray from the happy path. */ + warn: (msg: string, data?: LogData) => logFn('warn', msg, data), + /** Log as info. Use for providing an operator with info on what the system is doing. */ + info: (msg: string, data?: LogData) => logFn('info', msg, data), + /** Log as verbose. Use for when we need additional insight on what a subsystem is doing. */ + verbose: (msg: string, data?: LogData) => logFn('verbose', msg, data), + /** Log as debug. Use for when we need debugging info to troubleshoot an issue on a specific component. */ + debug: (msg: string, data?: LogData) => logFn('debug', msg, data), + /** Log as trace. Use for when we want to denial-of-service any recipient of the logs. */ + trace: (msg: string, data?: LogData) => logFn('trace', msg, data), + level: pinoLogger.level as LogLevel, + isLevelEnabled: (level: LogLevel) => isLevelEnabled(pinoLogger, level), + }; +} + +// Allow global hooks for processing log data. +// Used for injecting OTEL trace_id in telemetry client. +type LogDataHandler = (data: LogData) => LogData; +const logDataHandlers: LogDataHandler[] = []; + +export function addLogDataHandler(handler: LogDataHandler): void { + logDataHandlers.push(handler); +} + +function processLogData(data: LogData): LogData { + return logDataHandlers.reduce((accum, handler) => handler(accum), data); +} + +// Patch isLevelEnabled missing from pino/browser. +function isLevelEnabled(logger: pino.Logger<'verbose', boolean>, level: LogLevel): boolean { + return typeof logger.isLevelEnabled === 'function' + ? logger.isLevelEnabled(level) + : logger.levels.values[level] >= logger.levels.values[logger.level]; +} + +// Load log levels from environment variables. +const defaultLogLevel = process.env.NODE_ENV === 'test' ? 'silent' : 'info'; +const [logLevel, logFilters] = parseEnv(process.env.LOG_LEVEL, defaultLogLevel); + +// Transport options for pretty logging to stdout via pino-pretty. +const useColor = true; +const { bold, reset } = createColors({ useColor }); +const prettyTransport: LoggerOptions['transport'] = { + target: 'pino-pretty', + options: { + destination: 2, + sync: true, + colorize: useColor, + ignore: 'module,pid,hostname,trace_id,span_id,trace_flags', + messageFormat: `${bold('{module}')} ${reset('{msg}')}`, + customLevels: 'fatal:60,error:50,warn:40,info:30,verbose:25,debug:20,trace:10', + customColors: 'fatal:bgRed,error:red,warn:yellow,info:green,verbose:magenta,debug:blue,trace:gray', + }, +}; + +// Transport for vanilla stdio logging as JSON. +const stdioTransport: LoggerOptions['transport'] = { + target: 'pino/file', + options: { destination: 2 }, +}; + +// Define custom logging levels for pino. +const customLevels = { verbose: 25 }; +const pinoOpts = { customLevels, useOnlyCustomLevels: false, level: logLevel }; +const levels = { + labels: { ...pino.levels.labels, ...Object.fromEntries(Object.entries(customLevels).map(e => e.reverse())) }, + values: { ...pino.levels.values, ...customLevels }, +}; + +// Transport for OpenTelemetry logging. While defining this here is an abstraction leakage since this +// should live in the telemetry-client, it is necessary to ensure that the logger is initialized with +// the correct transport. Tweaking transports of a live pino instance is tricky, and creating a new instance +// would mean that all child loggers created before the telemetry-client is initialized would not have +// this transport configured. Note that the target is defined as the export in the telemetry-client, +// since pino will load this transport separately on a worker thread, to minimize disruption to the main loop. + +const otelTransport: LoggerOptions['transport'] = { + target: '@aztec/telemetry-client/otel-pino-stream', + options: { levels, messageKey: 'msg' }, +}; + +// In nodejs, create a new pino instance with an stdout transport (either vanilla or json), and optionally +// an OTLP transport if the OTLP endpoint is provided. Note that transports are initialized in a worker thread. +// On the browser, we just log to the console. +const otlpEndpoint = process.env.OTEL_EXPORTER_OTLP_LOGS_ENDPOINT; +const logger = isNode + ? pino( + pinoOpts, + pino.transport({ + targets: compactArray([ + ['1', 'true', 'TRUE'].includes(process.env.LOG_JSON ?? '') ? stdioTransport : prettyTransport, + process.env.OTEL_EXPORTER_OTLP_LOGS_ENDPOINT ? otelTransport : undefined, + ]), + }), + ) + : pino({ ...pinoOpts, browser: { asObject: false } }); + +// Log the logger configuration. +logger.verbose( + { + module: 'logger', + ...logFilters.reduce((accum, [module, level]) => ({ ...accum, [`log.${module}`]: level }), {}), + }, + isNode + ? `Logger initialized with level ${logLevel}` + (otlpEndpoint ? ` with OTLP exporter to ${otlpEndpoint}` : '') + : `Browser console logger initialized with level ${logLevel}`, +); + +/** Log function that accepts an exception object */ +type ErrorLogFn = (msg: string, err?: Error | unknown, data?: LogData) => void; + +/** + * Logger that supports multiple severity levels. + */ +export type Logger = { [K in LogLevel]: LogFn } & { /** Error log function */ error: ErrorLogFn } & { + level: LogLevel; + isLevelEnabled: (level: LogLevel) => boolean; +}; + +/** + * Logger that supports multiple severity levels and can be called directly to issue a debug statement. + * Intended as a drop-in replacement for the debug module. + * TODO(palla/log): Remove this alias + */ +export type DebugLogger = Logger; + +/** + * Concatenates a log message and an exception. + * @param msg - Log message + * @param err - Error to log + * @returns A string with both the log message and the error message. + */ +function formatErr(msg: string, err?: Error | unknown): string { + return err ? `${msg}: ${inspect(err)}` : msg; +} diff --git a/yarn-project/foundation/src/queue/fifo_memory_queue.ts b/yarn-project/foundation/src/queue/fifo_memory_queue.ts index e2271143ac6..080133ed71c 100644 --- a/yarn-project/foundation/src/queue/fifo_memory_queue.ts +++ b/yarn-project/foundation/src/queue/fifo_memory_queue.ts @@ -1,4 +1,4 @@ -import { type DebugLogger } from '../log/logger.js'; +import { type DebugLogger } from '../log/index.js'; import { BaseMemoryQueue } from './base_memory_queue.js'; /** diff --git a/yarn-project/ivc-integration/src/avm_integration.test.ts b/yarn-project/ivc-integration/src/avm_integration.test.ts index 31a14eac16c..b9dd8bbd933 100644 --- a/yarn-project/ivc-integration/src/avm_integration.test.ts +++ b/yarn-project/ivc-integration/src/avm_integration.test.ts @@ -123,14 +123,13 @@ async function proveAvmTestContract(functionName: string, calldata: Fr[] = []): const avmCircuitInputs = await simulateAvmTestContractGenerateCircuitInputs(functionName, calldata); const internalLogger = createDebugLogger('aztec:avm-proving-test'); - const logger = (msg: string, _data?: any) => internalLogger.verbose(msg); // The paths for the barretenberg binary and the write path are hardcoded for now. const bbPath = path.resolve('../../barretenberg/cpp/build/bin/bb'); const bbWorkingDirectory = await fs.mkdtemp(path.join(tmpdir(), 'bb-')); // Then we prove. - const proofRes = await generateAvmProof(bbPath, bbWorkingDirectory, avmCircuitInputs, logger); + const proofRes = await generateAvmProof(bbPath, bbWorkingDirectory, avmCircuitInputs, internalLogger); if (proofRes.status === BB_RESULT.FAILURE) { internalLogger.error(`Proof generation failed: ${proofRes.reason}`); } diff --git a/yarn-project/ivc-integration/src/index.ts b/yarn-project/ivc-integration/src/index.ts index 93234564864..dab8804a187 100644 --- a/yarn-project/ivc-integration/src/index.ts +++ b/yarn-project/ivc-integration/src/index.ts @@ -1,3 +1,5 @@ +import { type CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS } from '@aztec/circuits.js'; + import { type ForeignCallOutput, Noir } from '@noir-lang/noir_js'; import createDebug from 'debug'; import { ungzip } from 'pako'; @@ -5,6 +7,12 @@ import { type Page } from 'playwright'; import MockAppCreatorCircuit from '../artifacts/app_creator.json' assert { type: 'json' }; import MockAppReaderCircuit from '../artifacts/app_reader.json' assert { type: 'json' }; +import MockAppCreatorVk from '../artifacts/keys/app_creator.vk.data.json' assert { type: 'json' }; +import MockAppReaderVk from '../artifacts/keys/app_reader.vk.data.json' assert { type: 'json' }; +import MockPrivateKernelInitVk from '../artifacts/keys/mock_private_kernel_init.vk.data.json' assert { type: 'json' }; +import MockPrivateKernelInnerVk from '../artifacts/keys/mock_private_kernel_inner.vk.data.json' assert { type: 'json' }; +import MockPrivateKernelResetVk from '../artifacts/keys/mock_private_kernel_reset.vk.data.json' assert { type: 'json' }; +import MockPrivateKernelTailVk from '../artifacts/keys/mock_private_kernel_tail.vk.data.json' assert { type: 'json' }; import MockPrivateKernelInitCircuit from '../artifacts/mock_private_kernel_init.json' assert { type: 'json' }; import MockPrivateKernelInnerCircuit from '../artifacts/mock_private_kernel_inner.json' assert { type: 'json' }; import MockPrivateKernelResetCircuit from '../artifacts/mock_private_kernel_reset.json' assert { type: 'json' }; @@ -14,6 +22,7 @@ import type { AppCreatorInputType, AppPublicInputs, AppReaderInputType, + FixedLengthArray, KernelPublicInputs, MockPrivateKernelInitInputType, MockPrivateKernelInnerInputType, @@ -33,6 +42,12 @@ export { MockPrivateKernelResetCircuit, MockPrivateKernelTailCircuit, MockPublicBaseCircuit, + MockAppCreatorVk, + MockAppReaderVk, + MockPrivateKernelInitVk, + MockPrivateKernelInnerVk, + MockPrivateKernelResetVk, + MockPrivateKernelTailVk, }; createDebug.enable('*'); @@ -126,6 +141,13 @@ export async function witnessGenMockPublicBaseCircuit(args: MockPublicBaseInputT }; } +export function getVkAsFields(vk: { + keyAsBytes: string; + keyAsFields: string[]; +}): FixedLengthArray { + return vk.keyAsFields as FixedLengthArray; +} + export async function generate3FunctionTestingIVCStack(): Promise<[string[], Uint8Array[]]> { const tx = { number_of_calls: '0x1', @@ -138,11 +160,13 @@ export async function generate3FunctionTestingIVCStack(): Promise<[string[], Uin const initWitnessGenResult = await witnessGenMockPrivateKernelInitCircuit({ app_inputs: appWitnessGenResult.publicInputs, tx, + app_vk: getVkAsFields(MockAppCreatorVk), }); logger('generated mock private kernel init witness'); const tailWitnessGenResult = await witnessGenMockPrivateKernelTailCircuit({ prev_kernel_public_inputs: initWitnessGenResult.publicInputs, + kernel_vk: getVkAsFields(MockPrivateKernelResetVk), }); logger('generated mock private kernel tail witness'); @@ -168,10 +192,13 @@ export async function generate6FunctionTestingIVCStack(): Promise<[string[], Uin const initWitnessGenResult = await witnessGenMockPrivateKernelInitCircuit({ app_inputs: creatorAppWitnessGenResult.publicInputs, tx, + app_vk: getVkAsFields(MockAppCreatorVk), }); const innerWitnessGenResult = await witnessGenMockPrivateKernelInnerCircuit({ prev_kernel_public_inputs: initWitnessGenResult.publicInputs, app_inputs: readerAppWitnessGenResult.publicInputs, + app_vk: getVkAsFields(MockAppReaderVk), + kernel_vk: getVkAsFields(MockPrivateKernelInitVk), }); const resetWitnessGenResult = await witnessGenMockPrivateKernelResetCircuit({ @@ -182,10 +209,12 @@ export async function generate6FunctionTestingIVCStack(): Promise<[string[], Uin MOCK_MAX_COMMITMENTS_PER_TX.toString(), MOCK_MAX_COMMITMENTS_PER_TX.toString(), ], + kernel_vk: getVkAsFields(MockPrivateKernelInnerVk), }); const tailWitnessGenResult = await witnessGenMockPrivateKernelTailCircuit({ prev_kernel_public_inputs: resetWitnessGenResult.publicInputs, + kernel_vk: getVkAsFields(MockPrivateKernelResetVk), }); // Create client IVC proof diff --git a/yarn-project/ivc-integration/src/native_client_ivc_integration.test.ts b/yarn-project/ivc-integration/src/native_client_ivc_integration.test.ts index 6f413b19a94..2b891891043 100644 --- a/yarn-project/ivc-integration/src/native_client_ivc_integration.test.ts +++ b/yarn-project/ivc-integration/src/native_client_ivc_integration.test.ts @@ -40,6 +40,7 @@ describe('Client IVC Integration', () => { path.join(bbWorkingDirectory, 'acir.msgpack'), path.join(bbWorkingDirectory, 'witnesses.msgpack'), logger.info, + true, ); if (provingResult.status === BB_RESULT.FAILURE) { diff --git a/yarn-project/ivc-integration/src/wasm_client_ivc_integration.test.ts b/yarn-project/ivc-integration/src/wasm_client_ivc_integration.test.ts index 44c3b7ba234..5f105b7f71e 100644 --- a/yarn-project/ivc-integration/src/wasm_client_ivc_integration.test.ts +++ b/yarn-project/ivc-integration/src/wasm_client_ivc_integration.test.ts @@ -6,11 +6,17 @@ import { ungzip } from 'pako'; import { MOCK_MAX_COMMITMENTS_PER_TX, MockAppCreatorCircuit, + MockAppCreatorVk, MockAppReaderCircuit, + MockAppReaderVk, MockPrivateKernelInitCircuit, + MockPrivateKernelInitVk, MockPrivateKernelInnerCircuit, + MockPrivateKernelInnerVk, MockPrivateKernelResetCircuit, + MockPrivateKernelResetVk, MockPrivateKernelTailCircuit, + getVkAsFields, witnessGenCreatorAppMockCircuit, witnessGenMockPrivateKernelInitCircuit, witnessGenMockPrivateKernelInnerCircuit, @@ -70,11 +76,13 @@ describe('Client IVC Integration', () => { const initWitnessGenResult = await witnessGenMockPrivateKernelInitCircuit({ app_inputs: appWitnessGenResult.publicInputs, tx, + app_vk: getVkAsFields(MockAppCreatorVk), }); logger.debug('generated mock private kernel init witness'); const tailWitnessGenResult = await witnessGenMockPrivateKernelTailCircuit({ prev_kernel_public_inputs: initWitnessGenResult.publicInputs, + kernel_vk: getVkAsFields(MockPrivateKernelInitVk), }); logger.debug('generated mock private kernel tail witness'); @@ -112,10 +120,13 @@ describe('Client IVC Integration', () => { const initWitnessGenResult = await witnessGenMockPrivateKernelInitCircuit({ app_inputs: creatorAppWitnessGenResult.publicInputs, tx, + app_vk: getVkAsFields(MockAppCreatorVk), }); const innerWitnessGenResult = await witnessGenMockPrivateKernelInnerCircuit({ prev_kernel_public_inputs: initWitnessGenResult.publicInputs, app_inputs: readerAppWitnessGenResult.publicInputs, + app_vk: getVkAsFields(MockAppReaderVk), + kernel_vk: getVkAsFields(MockPrivateKernelInitVk), }); const resetWitnessGenResult = await witnessGenMockPrivateKernelResetCircuit({ @@ -126,10 +137,12 @@ describe('Client IVC Integration', () => { MOCK_MAX_COMMITMENTS_PER_TX.toString(), MOCK_MAX_COMMITMENTS_PER_TX.toString(), ], + kernel_vk: getVkAsFields(MockPrivateKernelInnerVk), }); const tailWitnessGenResult = await witnessGenMockPrivateKernelTailCircuit({ prev_kernel_public_inputs: resetWitnessGenResult.publicInputs, + kernel_vk: getVkAsFields(MockPrivateKernelResetVk), }); // Create client IVC proof diff --git a/yarn-project/ivc-integration/webpack.config.js b/yarn-project/ivc-integration/webpack.config.js index 679267bc82c..93ad5979167 100644 --- a/yarn-project/ivc-integration/webpack.config.js +++ b/yarn-project/ivc-integration/webpack.config.js @@ -30,6 +30,9 @@ export default { ], resolve: { plugins: [new ResolveTypeScriptPlugin()], + fallback: { + tty: false, + } }, devServer: { hot: false, diff --git a/yarn-project/kv-store/package.json b/yarn-project/kv-store/package.json index bacc49e1a38..96073106a08 100644 --- a/yarn-project/kv-store/package.json +++ b/yarn-project/kv-store/package.json @@ -15,8 +15,7 @@ "clean": "rm -rf ./dest .tsbuildinfo", "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", - "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests", - "start": "DEBUG='aztec:*' && node ./dest/bin/index.js" + "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests" }, "inherits": [ "../package.common.json" @@ -80,4 +79,4 @@ "engines": { "node": ">=18" } -} +} \ No newline at end of file diff --git a/yarn-project/l1-artifacts/scripts/generate-artifacts.sh b/yarn-project/l1-artifacts/scripts/generate-artifacts.sh index ef0892de022..3b78a796f50 100755 --- a/yarn-project/l1-artifacts/scripts/generate-artifacts.sh +++ b/yarn-project/l1-artifacts/scripts/generate-artifacts.sh @@ -28,8 +28,8 @@ CONTRACTS=( "l1-contracts:GovernanceProposer" "l1-contracts:Governance" "l1-contracts:NewGovernanceProposerPayload" - "l1-contracts:TxsDecoder" - "l1-contracts:SampleLib" + "l1-contracts:LeonidasLib" + "l1-contracts:ExtRollupLib" ) diff --git a/yarn-project/p2p-bootstrap/scripts/docker-compose-bootstrap.yml b/yarn-project/p2p-bootstrap/scripts/docker-compose-bootstrap.yml index 1d6be73b473..508bbf6918f 100644 --- a/yarn-project/p2p-bootstrap/scripts/docker-compose-bootstrap.yml +++ b/yarn-project/p2p-bootstrap/scripts/docker-compose-bootstrap.yml @@ -6,6 +6,6 @@ services: ports: - '40400:40400' environment: - DEBUG: 'aztec:*' + LOG_LEVEL: 'verbose' P2P_TCP_LISTEN_ADDR: '0.0.0.0:40400' PEER_ID: '0a260024080112205ea53185db2e52dae74d0d4d6cadc494174810d0a713cd09b0ac517c38bc781e1224080112205ea53185db2e52dae74d0d4d6cadc494174810d0a713cd09b0ac517c38bc781e1a44080112402df8b977f356c6e34fa021c9647973234dff4df706c185794405aafb556723cf5ea53185db2e52dae74d0d4d6cadc494174810d0a713cd09b0ac517c38bc781e' diff --git a/yarn-project/p2p-bootstrap/terraform/main.tf b/yarn-project/p2p-bootstrap/terraform/main.tf index 31b76cb33cd..e48152f282d 100644 --- a/yarn-project/p2p-bootstrap/terraform/main.tf +++ b/yarn-project/p2p-bootstrap/terraform/main.tf @@ -137,7 +137,11 @@ resource "aws_ecs_task_definition" "p2p-bootstrap" { }, { "name": "DEBUG", - "value": "aztec:*,discv5:*" + "value": "discv5:*" + }, + { + "name": "LOG_LEVEL", + "value": "debug" }, { "name": "P2P_MIN_PEERS", diff --git a/yarn-project/p2p/package.json b/yarn-project/p2p/package.json index a458dcc94a5..9fffdafad29 100644 --- a/yarn-project/p2p/package.json +++ b/yarn-project/p2p/package.json @@ -91,7 +91,9 @@ "libp2p": "1.5.0", "semver": "^7.6.0", "sha3": "^2.1.4", - "tslib": "^2.4.0" + "snappy": "^7.2.2", + "tslib": "^2.4.0", + "xxhash-wasm": "^1.1.0" }, "devDependencies": { "@aztec/archiver": "workspace:^", diff --git a/yarn-project/p2p/src/mem_pools/instrumentation.ts b/yarn-project/p2p/src/mem_pools/instrumentation.ts index e4271029ba2..d80b2f69d55 100644 --- a/yarn-project/p2p/src/mem_pools/instrumentation.ts +++ b/yarn-project/p2p/src/mem_pools/instrumentation.ts @@ -3,6 +3,7 @@ import { Attributes, type Histogram, LmdbMetrics, + type LmdbStatsCallback, Metrics, type TelemetryClient, type UpDownCounter, @@ -58,7 +59,7 @@ export class PoolInstrumentation { private defaultAttributes; - constructor(telemetry: TelemetryClient, name: PoolName) { + constructor(telemetry: TelemetryClient, name: PoolName, dbStats?: LmdbStatsCallback) { const meter = telemetry.getMeter(name); this.defaultAttributes = { [Attributes.POOL_NAME]: name }; @@ -98,13 +99,10 @@ export class PoolInstrumentation { name: Metrics.MEMPOOL_DB_NUM_ITEMS, description: 'Num items in database for the Tx mempool', }, + dbStats, ); } - public recordDBMetrics(metrics: { mappingSize: number; numItems: number; actualSize: number }) { - this.dbMetrics.recordDBMetrics(metrics); - } - public recordSize(poolObject: PoolObject) { this.objectSize.record(poolObject.getSize()); } diff --git a/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.ts b/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.ts index 865fbd8fdf2..18ba3c5fc1d 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.ts @@ -37,7 +37,7 @@ export class AztecKVTxPool implements TxPool { this.#store = store; this.#log = log; - this.#metrics = new PoolInstrumentation(telemetry, PoolName.TX_POOL); + this.#metrics = new PoolInstrumentation(telemetry, PoolName.TX_POOL, () => store.estimateSize()); } public markAsMined(txHashes: TxHash[], blockNumber: number): Promise { @@ -53,8 +53,6 @@ export class AztecKVTxPool implements TxPool { } this.#metrics.recordRemovedObjects(deleted, 'pending'); this.#metrics.recordAddedObjects(txHashes.length, 'mined'); - const storeSizes = this.#store.estimateSize(); - this.#metrics.recordDBMetrics(storeSizes); }); } diff --git a/yarn-project/p2p/src/mocks/index.ts b/yarn-project/p2p/src/mocks/index.ts index f0fc6cd2ecf..38127a88daf 100644 --- a/yarn-project/p2p/src/mocks/index.ts +++ b/yarn-project/p2p/src/mocks/index.ts @@ -148,7 +148,7 @@ export type ReqRespNode = { export const MOCK_SUB_PROTOCOL_HANDLERS: ReqRespSubProtocolHandlers = { [PING_PROTOCOL]: pingHandler, [STATUS_PROTOCOL]: statusHandler, - [TX_REQ_PROTOCOL]: (_msg: any) => Promise.resolve(Uint8Array.from(Buffer.from('tx'))), + [TX_REQ_PROTOCOL]: (_msg: any) => Promise.resolve(Buffer.from('tx')), }; // By default, all requests are valid diff --git a/yarn-project/p2p/src/service/encoding.ts b/yarn-project/p2p/src/service/encoding.ts new file mode 100644 index 00000000000..0713b7e8a26 --- /dev/null +++ b/yarn-project/p2p/src/service/encoding.ts @@ -0,0 +1,61 @@ +// Taken from lodestar: https://github.com/ChainSafe/lodestar +import { sha256 } from '@aztec/foundation/crypto'; + +import { type RPC } from '@chainsafe/libp2p-gossipsub/message'; +import { type DataTransform } from '@chainsafe/libp2p-gossipsub/types'; +import { type Message } from '@libp2p/interface'; +import { compressSync, uncompressSync } from 'snappy'; +import xxhashFactory from 'xxhash-wasm'; + +// Load WASM +const xxhash = await xxhashFactory(); + +// Use salt to prevent msgId from being mined for collisions +const h64Seed = BigInt(Math.floor(Math.random() * 1e9)); + +// Shared buffer to convert msgId to string +const sharedMsgIdBuf = Buffer.alloc(20); + +/** + * The function used to generate a gossipsub message id + * We use the first 8 bytes of SHA256(data) for content addressing + */ +export function fastMsgIdFn(rpcMsg: RPC.Message): string { + if (rpcMsg.data) { + return xxhash.h64Raw(rpcMsg.data, h64Seed).toString(16); + } + return '0000000000000000'; +} + +export function msgIdToStrFn(msgId: Uint8Array): string { + // This happens serially, no need to reallocate the buffer + sharedMsgIdBuf.set(msgId); + return `0x${sharedMsgIdBuf.toString('hex')}`; +} + +/** + * Get the message identifier from a libp2p message + * + * Follows similarly to: + * https://github.com/ethereum/consensus-specs/blob/v1.1.0-alpha.7/specs/altair/p2p-interface.md#topics-and-messages + * + * @param message - The libp2p message + * @returns The message identifier + */ +export function getMsgIdFn(message: Message) { + const { topic } = message; + + const vec = [Buffer.from(topic), message.data]; + return sha256(Buffer.concat(vec)).subarray(0, 20); +} + +export class SnappyTransform implements DataTransform { + inboundTransform(_topicStr: string, data: Uint8Array): Uint8Array { + const uncompressed = Buffer.from(uncompressSync(Buffer.from(data), { asBuffer: true })); + return new Uint8Array(uncompressed); + } + + outboundTransform(_topicStr: string, data: Uint8Array): Uint8Array { + return new Uint8Array(compressSync(Buffer.from(data))); + } +} diff --git a/yarn-project/p2p/src/service/libp2p_service.ts b/yarn-project/p2p/src/service/libp2p_service.ts index 18d2d180a4a..f1e9df0c992 100644 --- a/yarn-project/p2p/src/service/libp2p_service.ts +++ b/yarn-project/p2p/src/service/libp2p_service.ts @@ -43,6 +43,7 @@ import { } from '../tx_validator/index.js'; import { type PubSubLibp2p, convertToMultiaddr } from '../util.js'; import { AztecDatastore } from './data_store.js'; +import { SnappyTransform, fastMsgIdFn, getMsgIdFn, msgIdToStrFn } from './encoding.js'; import { PeerManager } from './peer_manager.js'; import { PeerErrorSeverity } from './peer_scoring.js'; import { pingHandler, statusHandler } from './reqresp/handlers.js'; @@ -139,7 +140,7 @@ export class LibP2PService extends WithTracer implements P2PService { // add GossipSub listener this.node.services.pubsub.addEventListener('gossipsub:message', async e => { const { msg, propagationSource: peerId } = e.detail; - this.logger.debug(`Received PUBSUB message.`); + this.logger.trace(`Received PUBSUB message.`); await this.jobQueue.put(() => this.handleNewGossipMessage(msg, peerId)); }); @@ -242,6 +243,10 @@ export class LibP2PService extends WithTracer implements P2PService { heartbeatInterval: config.gossipsubInterval, mcacheLength: config.gossipsubMcacheLength, mcacheGossip: config.gossipsubMcacheGossip, + msgIdFn: getMsgIdFn, + msgIdToStrFn: msgIdToStrFn, + fastMsgIdFn: fastMsgIdFn, + dataTransform: new SnappyTransform(), metricsRegister: otelMetricsAdapter, metricsTopicStrToLabel: metricsTopicStrToLabels(), scoreParams: createPeerScoreParams({ @@ -278,11 +283,11 @@ export class LibP2PService extends WithTracer implements P2PService { * @param msg - the tx request message * @returns the tx response message */ - const txHandler = (msg: Buffer): Promise => { + const txHandler = (msg: Buffer): Promise => { const txHash = TxHash.fromBuffer(msg); const foundTx = mempools.txPool.getTxByHash(txHash); - const asUint8Array = Uint8Array.from(foundTx ? foundTx.toBuffer() : Buffer.alloc(0)); - return Promise.resolve(asUint8Array); + const buf = foundTx ? foundTx.toBuffer() : Buffer.alloc(0); + return Promise.resolve(buf); }; const requestResponseHandlers = { @@ -451,7 +456,7 @@ export class LibP2PService extends WithTracer implements P2PService { * @param message - The message to propagate. */ public propagate(message: T): void { - this.logger.debug(`[${message.p2pMessageIdentifier()}] queued`); + this.logger.trace(`[${message.p2pMessageIdentifier()}] queued`); void this.jobQueue.put(async () => { await this.sendToPeers(message); }); diff --git a/yarn-project/p2p/src/service/reqresp/handlers.ts b/yarn-project/p2p/src/service/reqresp/handlers.ts index 688fab959e3..20a9163f88e 100644 --- a/yarn-project/p2p/src/service/reqresp/handlers.ts +++ b/yarn-project/p2p/src/service/reqresp/handlers.ts @@ -3,8 +3,8 @@ * @param _msg - The ping request message. * @returns A resolved promise with the pong response. */ -export function pingHandler(_msg: any): Promise { - return Promise.resolve(Uint8Array.from(Buffer.from('pong'))); +export function pingHandler(_msg: any): Promise { + return Promise.resolve(Buffer.from('pong')); } /** @@ -12,6 +12,6 @@ export function pingHandler(_msg: any): Promise { * @param _msg - The status request message. * @returns A resolved promise with the ok response. */ -export function statusHandler(_msg: any): Promise { - return Promise.resolve(Uint8Array.from(Buffer.from('ok'))); +export function statusHandler(_msg: any): Promise { + return Promise.resolve(Buffer.from('ok')); } diff --git a/yarn-project/p2p/src/service/reqresp/interface.ts b/yarn-project/p2p/src/service/reqresp/interface.ts index 8370b8a8a21..e23608c3665 100644 --- a/yarn-project/p2p/src/service/reqresp/interface.ts +++ b/yarn-project/p2p/src/service/reqresp/interface.ts @@ -16,7 +16,7 @@ export type ReqRespSubProtocol = typeof PING_PROTOCOL | typeof STATUS_PROTOCOL | * A handler for a sub protocol * The message will arrive as a buffer, and the handler must return a buffer */ -export type ReqRespSubProtocolHandler = (msg: Buffer) => Promise; +export type ReqRespSubProtocolHandler = (msg: Buffer) => Promise; /** * A type mapping from supprotocol to it's rate limits @@ -83,8 +83,8 @@ export type SubProtocolMap = { * Default handler for unimplemented sub protocols, this SHOULD be overwritten * by the service, but is provided as a fallback */ -const defaultHandler = (_msg: any): Promise => { - return Promise.resolve(Uint8Array.from(Buffer.from('unimplemented'))); +const defaultHandler = (_msg: any): Promise => { + return Promise.resolve(Buffer.from('unimplemented')); }; /** diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.test.ts b/yarn-project/p2p/src/service/reqresp/reqresp.test.ts index 1807a318522..349b3a8f6b5 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.test.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.test.ts @@ -8,6 +8,7 @@ import { CollectiveReqRespTimeoutError, IndiviualReqRespTimeoutError } from '../ import { MOCK_SUB_PROTOCOL_HANDLERS, MOCK_SUB_PROTOCOL_VALIDATORS, + type ReqRespNode, connectToPeers, createNodes, startNodes, @@ -23,15 +24,22 @@ const PING_REQUEST = RequestableBuffer.fromBuffer(Buffer.from('ping')); // and ask for specific data that they missed via the traditional gossip protocol. describe('ReqResp', () => { let peerManager: MockProxy; + let nodes: ReqRespNode[]; beforeEach(() => { peerManager = mock(); }); + afterEach(async () => { + if (nodes) { + await stopNodes(nodes as ReqRespNode[]); + } + }); + it('Should perform a ping request', async () => { // Create two nodes // They need to discover each other - const nodes = await createNodes(peerManager, 2); + nodes = await createNodes(peerManager, 2); const { req: pinger } = nodes[0]; await startNodes(nodes); @@ -45,12 +53,10 @@ describe('ReqResp', () => { await sleep(500); expect(res?.toBuffer().toString('utf-8')).toEqual('pong'); - - await stopNodes(nodes); }); it('Should handle gracefully if a peer connected peer is offline', async () => { - const nodes = await createNodes(peerManager, 2); + nodes = await createNodes(peerManager, 2); const { req: pinger } = nodes[0]; const { req: ponger } = nodes[1]; @@ -66,12 +72,10 @@ describe('ReqResp', () => { const res = await pinger.sendRequest(PING_PROTOCOL, PING_REQUEST); expect(res).toBeUndefined(); - - await stopNodes(nodes); }); it('Should request from a later peer if other peers are offline', async () => { - const nodes = await createNodes(peerManager, 4); + nodes = await createNodes(peerManager, 4); await startNodes(nodes); await sleep(500); @@ -86,12 +90,10 @@ describe('ReqResp', () => { const res = await nodes[0].req.sendRequest(PING_PROTOCOL, PING_REQUEST); expect(res?.toBuffer().toString('utf-8')).toEqual('pong'); - - await stopNodes(nodes); }); it('Should hit a rate limit if too many requests are made in quick succession', async () => { - const nodes = await createNodes(peerManager, 2); + nodes = await createNodes(peerManager, 2); await startNodes(nodes); @@ -110,8 +112,6 @@ describe('ReqResp', () => { // Make sure the error message is logged const errorMessage = `Rate limit exceeded for ${PING_PROTOCOL} from ${nodes[0].p2p.peerId.toString()}`; expect(loggerSpy).toHaveBeenCalledWith(errorMessage); - - await stopNodes(nodes); }); describe('TX REQ PROTOCOL', () => { @@ -120,15 +120,15 @@ describe('ReqResp', () => { const txHash = tx.getTxHash(); const protocolHandlers = MOCK_SUB_PROTOCOL_HANDLERS; - protocolHandlers[TX_REQ_PROTOCOL] = (message: Buffer): Promise => { + protocolHandlers[TX_REQ_PROTOCOL] = (message: Buffer): Promise => { const receivedHash = TxHash.fromBuffer(message); if (txHash.equals(receivedHash)) { - return Promise.resolve(Uint8Array.from(tx.toBuffer())); + return Promise.resolve(tx.toBuffer()); } - return Promise.resolve(Uint8Array.from(Buffer.from(''))); + return Promise.resolve(Buffer.from('')); }; - const nodes = await createNodes(peerManager, 2); + nodes = await createNodes(peerManager, 2); await startNodes(nodes, protocolHandlers); await sleep(500); @@ -137,8 +137,6 @@ describe('ReqResp', () => { const res = await nodes[0].req.sendRequest(TX_REQ_PROTOCOL, txHash); expect(res).toEqual(tx); - - await stopNodes(nodes); }); it('Does not crash if tx hash returns undefined', async () => { @@ -147,11 +145,11 @@ describe('ReqResp', () => { const protocolHandlers = MOCK_SUB_PROTOCOL_HANDLERS; // Return nothing - protocolHandlers[TX_REQ_PROTOCOL] = (_message: Buffer): Promise => { - return Promise.resolve(Uint8Array.from(Buffer.from(''))); + protocolHandlers[TX_REQ_PROTOCOL] = (_message: Buffer): Promise => { + return Promise.resolve(Buffer.from('')); }; - const nodes = await createNodes(peerManager, 2); + nodes = await createNodes(peerManager, 2); await startNodes(nodes, protocolHandlers); await sleep(500); @@ -160,12 +158,10 @@ describe('ReqResp', () => { const res = await nodes[0].req.sendRequest(TX_REQ_PROTOCOL, txHash); expect(res).toBeUndefined(); - - await stopNodes(nodes); }); it('Should hit individual timeout if nothing is returned over the stream', async () => { - const nodes = await createNodes(peerManager, 2); + nodes = await createNodes(peerManager, 2); await startNodes(nodes); @@ -197,12 +193,10 @@ describe('ReqResp', () => { }), PeerErrorSeverity.HighToleranceError, ); - - await stopNodes(nodes); }); it('Should hit collective timeout if nothing is returned over the stream from multiple peers', async () => { - const nodes = await createNodes(peerManager, 4); + nodes = await createNodes(peerManager, 4); await startNodes(nodes); @@ -226,8 +220,6 @@ describe('ReqResp', () => { // Make sure the error message is logged const errorMessage = `${new CollectiveReqRespTimeoutError().message} | subProtocol: ${TX_REQ_PROTOCOL}`; expect(loggerSpy).toHaveBeenCalledWith(errorMessage); - - await stopNodes(nodes); }); it('Should penalize peer if transaction validation fails', async () => { @@ -236,12 +228,12 @@ describe('ReqResp', () => { // Mock that the node will respond with the tx const protocolHandlers = MOCK_SUB_PROTOCOL_HANDLERS; - protocolHandlers[TX_REQ_PROTOCOL] = (message: Buffer): Promise => { + protocolHandlers[TX_REQ_PROTOCOL] = (message: Buffer): Promise => { const receivedHash = TxHash.fromBuffer(message); if (txHash.equals(receivedHash)) { - return Promise.resolve(Uint8Array.from(tx.toBuffer())); + return Promise.resolve(tx.toBuffer()); } - return Promise.resolve(Uint8Array.from(Buffer.from(''))); + return Promise.resolve(Buffer.from('')); }; // Mock that the receiving node will find that the transaction is invalid @@ -251,7 +243,7 @@ describe('ReqResp', () => { return Promise.resolve(false); }; - const nodes = await createNodes(peerManager, 2); + nodes = await createNodes(peerManager, 2); await startNodes(nodes, protocolHandlers, protocolValidators); await sleep(500); @@ -268,8 +260,6 @@ describe('ReqResp', () => { }), PeerErrorSeverity.LowToleranceError, ); - - await stopNodes(nodes); }); }); }); diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.ts b/yarn-project/p2p/src/service/reqresp/reqresp.ts index a2249015c2f..9d67de5c367 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.ts @@ -5,6 +5,7 @@ import { executeTimeoutWithCustomError } from '@aztec/foundation/timer'; import { type IncomingStreamData, type PeerId, type Stream } from '@libp2p/interface'; import { pipe } from 'it-pipe'; import { type Libp2p } from 'libp2p'; +import { compressSync, uncompressSync } from 'snappy'; import { type Uint8ArrayList } from 'uint8arraylist'; import { CollectiveReqRespTimeoutError, IndiviualReqRespTimeoutError } from '../../errors/reqresp.error.js'; @@ -31,6 +32,9 @@ import { RequestResponseRateLimiter } from './rate_limiter/rate_limiter.js'; * This service implements the request response sub protocol, it is heavily inspired from * ethereum implementations of the same name. * + * Note, responses get compressed in streamHandler + * so they get decompressed in readMessage + * * see: https://github.com/ethereum/consensus-specs/blob/dev/specs/phase0/p2p-interface.md#the-reqresp-domain */ export class ReqResp { @@ -232,7 +236,7 @@ export class ReqResp { chunks.push(chunk.subarray()); } const messageData = chunks.concat(); - return Buffer.concat(messageData); + return uncompressSync(Buffer.concat(messageData), { asBuffer: true }) as Buffer; } /** @@ -269,7 +273,8 @@ export class ReqResp { async function* (source: any) { for await (const chunkList of source) { const msg = Buffer.from(chunkList.subarray()); - yield handler(msg); + const response = await handler(msg); + yield new Uint8Array(compressSync(response)); } }, stream, diff --git a/yarn-project/prover-client/package.json b/yarn-project/prover-client/package.json index b8766542083..08b6b59a46d 100644 --- a/yarn-project/prover-client/package.json +++ b/yarn-project/prover-client/package.json @@ -28,8 +28,8 @@ "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", "bb": "node --no-warnings ./dest/bb/index.js", - "test": "DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=1500000 --forceExit", - "test:debug": "LOG_LEVEL=debug DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=1500000 --forceExit --testNamePattern prover/bb_prover/parity" + "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=1500000 --forceExit", + "test:debug": "LOG_LEVEL=debug NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=1500000 --forceExit --testNamePattern prover/bb_prover/parity" }, "jest": { "moduleNameMapper": { @@ -104,4 +104,4 @@ "engines": { "node": ">=18" } -} +} \ No newline at end of file diff --git a/yarn-project/prover-client/package.local.json b/yarn-project/prover-client/package.local.json index bc11a5330d0..754bb34cec9 100644 --- a/yarn-project/prover-client/package.local.json +++ b/yarn-project/prover-client/package.local.json @@ -1,5 +1,5 @@ { "scripts": { - "test": "DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=1500000 --forceExit" + "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=1500000 --forceExit" } -} +} \ No newline at end of file diff --git a/yarn-project/prover-client/src/prover-client/prover-client.ts b/yarn-project/prover-client/src/prover-client/prover-client.ts index 3cc5b9aa32b..d41e3ad8851 100644 --- a/yarn-project/prover-client/src/prover-client/prover-client.ts +++ b/yarn-project/prover-client/src/prover-client/prover-client.ts @@ -137,7 +137,15 @@ export class ProverClient implements EpochProverManager { const prover = await buildServerCircuitProver(this.config, this.telemetry); this.agents = times( this.config.proverAgentCount, - () => new ProvingAgent(this.agentClient!, proofStore, prover, [], this.config.proverAgentPollIntervalMs), + () => + new ProvingAgent( + this.agentClient!, + proofStore, + prover, + this.telemetry, + [], + this.config.proverAgentPollIntervalMs, + ), ); await Promise.all(this.agents.map(agent => agent.start())); diff --git a/yarn-project/prover-client/src/proving_broker/factory.ts b/yarn-project/prover-client/src/proving_broker/factory.ts index 02a5fcb314b..67295fb6011 100644 --- a/yarn-project/prover-client/src/proving_broker/factory.ts +++ b/yarn-project/prover-client/src/proving_broker/factory.ts @@ -1,16 +1,20 @@ import { type ProverBrokerConfig } from '@aztec/circuit-types'; import { AztecLmdbStore } from '@aztec/kv-store/lmdb'; +import { type TelemetryClient } from '@aztec/telemetry-client'; import { ProvingBroker } from './proving_broker.js'; import { InMemoryBrokerDatabase } from './proving_broker_database/memory.js'; import { KVBrokerDatabase } from './proving_broker_database/persisted.js'; -export async function createAndStartProvingBroker(config: ProverBrokerConfig): Promise { +export async function createAndStartProvingBroker( + config: ProverBrokerConfig, + client: TelemetryClient, +): Promise { const database = config.proverBrokerDataDirectory - ? new KVBrokerDatabase(AztecLmdbStore.open(config.proverBrokerDataDirectory)) + ? new KVBrokerDatabase(AztecLmdbStore.open(config.proverBrokerDataDirectory), client) : new InMemoryBrokerDatabase(); - const broker = new ProvingBroker(database, { + const broker = new ProvingBroker(database, client, { jobTimeoutMs: config.proverBrokerJobTimeoutMs, maxRetries: config.proverBrokerJobMaxRetries, timeoutIntervalMs: config.proverBrokerPollIntervalMs, diff --git a/yarn-project/prover-client/src/proving_broker/proving_agent.test.ts b/yarn-project/prover-client/src/proving_broker/proving_agent.test.ts index cc49057ab6d..5a33598a31d 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_agent.test.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_agent.test.ts @@ -19,6 +19,7 @@ import { makeBaseParityInputs, makeParityPublicInputs } from '@aztec/circuits.js import { randomBytes } from '@aztec/foundation/crypto'; import { AbortError } from '@aztec/foundation/error'; import { promiseWithResolvers } from '@aztec/foundation/promise'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { jest } from '@jest/globals'; @@ -50,7 +51,7 @@ describe('ProvingAgent', () => { saveProofOutput: jest.fn(), }; - agent = new ProvingAgent(jobSource, proofDB, prover, [ProvingRequestType.BASE_PARITY]); + agent = new ProvingAgent(jobSource, proofDB, prover, new NoopTelemetryClient(), [ProvingRequestType.BASE_PARITY]); }); afterEach(async () => { diff --git a/yarn-project/prover-client/src/proving_broker/proving_agent.ts b/yarn-project/prover-client/src/proving_broker/proving_agent.ts index 6d17c8176b5..333ac91a4a9 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_agent.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_agent.ts @@ -10,8 +10,11 @@ import { } from '@aztec/circuit-types'; import { createDebugLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; +import { Timer } from '@aztec/foundation/timer'; +import { type TelemetryClient } from '@aztec/telemetry-client'; import { type ProofStore } from './proof_store.js'; +import { ProvingAgentInstrumentation } from './proving_agent_instrumentation.js'; import { ProvingJobController, ProvingJobControllerStatus } from './proving_job_controller.js'; /** @@ -20,6 +23,8 @@ import { ProvingJobController, ProvingJobControllerStatus } from './proving_job_ export class ProvingAgent { private currentJobController?: ProvingJobController; private runningPromise: RunningPromise; + private instrumentation: ProvingAgentInstrumentation; + private idleTimer: Timer | undefined; constructor( /** The source of proving jobs */ @@ -28,12 +33,15 @@ export class ProvingAgent { private proofStore: ProofStore, /** The prover implementation to defer jobs to */ private circuitProver: ServerCircuitProver, + /** A telemetry client through which to emit metrics */ + client: TelemetryClient, /** Optional list of allowed proof types to build */ private proofAllowList: Array = [], /** How long to wait between jobs */ private pollIntervalMs = 1000, private log = createDebugLogger('aztec:prover-client:proving-agent'), ) { + this.instrumentation = new ProvingAgentInstrumentation(client); this.runningPromise = new RunningPromise(this.safeWork, this.pollIntervalMs); } @@ -46,6 +54,7 @@ export class ProvingAgent { } public start(): void { + this.idleTimer = new Timer(); this.runningPromise.start(); } @@ -114,6 +123,11 @@ export class ProvingAgent { ); } + if (this.idleTimer) { + this.instrumentation.recordIdleTime(this.idleTimer); + } + this.idleTimer = undefined; + this.currentJobController.start(); } catch (err) { this.log.error(`Error in ProvingAgent: ${String(err)}`); @@ -126,6 +140,7 @@ export class ProvingAgent { err: Error | undefined, result: ProvingJobResultsMap[T] | undefined, ) => { + this.idleTimer = new Timer(); if (err) { const retry = err.name === ProvingError.NAME ? (err as ProvingError).retry : false; this.log.error(`Job id=${jobId} type=${ProvingRequestType[type]} failed err=${err.message} retry=${retry}`, err); diff --git a/yarn-project/prover-client/src/proving_broker/proving_agent_instrumentation.ts b/yarn-project/prover-client/src/proving_broker/proving_agent_instrumentation.ts new file mode 100644 index 00000000000..573b71f2e93 --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/proving_agent_instrumentation.ts @@ -0,0 +1,21 @@ +import { type Timer } from '@aztec/foundation/timer'; +import { type Histogram, Metrics, type TelemetryClient, ValueType } from '@aztec/telemetry-client'; + +export class ProvingAgentInstrumentation { + private idleTime: Histogram; + + constructor(client: TelemetryClient, name = 'ProvingAgent') { + const meter = client.getMeter(name); + + this.idleTime = meter.createHistogram(Metrics.PROVING_AGENT_IDLE, { + description: 'Records how long an agent was idle', + unit: 'ms', + valueType: ValueType.INT, + }); + } + + recordIdleTime(msOrTimer: Timer | number) { + const duration = typeof msOrTimer === 'number' ? msOrTimer : Math.floor(msOrTimer.ms()); + this.idleTime.record(duration); + } +} diff --git a/yarn-project/prover-client/src/proving_broker/proving_broker.test.ts b/yarn-project/prover-client/src/proving_broker/proving_broker.test.ts index 543843a6e15..76eef870b21 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_broker.test.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_broker.test.ts @@ -1,6 +1,7 @@ import { type ProofUri, type ProvingJob, type ProvingJobId, ProvingRequestType } from '@aztec/circuit-types'; import { randomBytes } from '@aztec/foundation/crypto'; import { openTmpStore } from '@aztec/kv-store/utils'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { jest } from '@jest/globals'; @@ -17,7 +18,7 @@ describe.each([ () => ({ database: new InMemoryBrokerDatabase(), cleanup: undefined }), () => { const store = openTmpStore(true); - const database = new KVBrokerDatabase(store); + const database = new KVBrokerDatabase(store, new NoopTelemetryClient()); const cleanup = () => store.close(); return { database, cleanup }; }, @@ -35,7 +36,7 @@ describe.each([ maxRetries = 2; ({ database, cleanup } = createDb()); - broker = new ProvingBroker(database, { + broker = new ProvingBroker(database, new NoopTelemetryClient(), { jobTimeoutMs, timeoutIntervalMs: jobTimeoutMs / 4, maxRetries, @@ -409,7 +410,7 @@ describe.each([ // fake some time passing while the broker restarts await jest.advanceTimersByTimeAsync(10_000); - broker = new ProvingBroker(database); + broker = new ProvingBroker(database, new NoopTelemetryClient()); await broker.start(); await assertJobStatus(job1.id, 'in-queue'); @@ -470,7 +471,7 @@ describe.each([ // fake some time passing while the broker restarts await jest.advanceTimersByTimeAsync(10_000); - broker = new ProvingBroker(database); + broker = new ProvingBroker(database, new NoopTelemetryClient()); await broker.start(); await assertJobStatus(job1.id, 'in-queue'); @@ -521,7 +522,7 @@ describe.each([ // fake some time passing while the broker restarts await jest.advanceTimersByTimeAsync(100 * jobTimeoutMs); - broker = new ProvingBroker(database); + broker = new ProvingBroker(database, new NoopTelemetryClient()); await broker.start(); await assertJobStatus(job1.id, 'in-queue'); diff --git a/yarn-project/prover-client/src/proving_broker/proving_broker.ts b/yarn-project/prover-client/src/proving_broker/proving_broker.ts index 62667821ec7..1c73b62b84a 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_broker.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_broker.ts @@ -12,10 +12,13 @@ import { import { createDebugLogger } from '@aztec/foundation/log'; import { type PromiseWithResolvers, RunningPromise, promiseWithResolvers } from '@aztec/foundation/promise'; import { PriorityMemoryQueue } from '@aztec/foundation/queue'; +import { Timer } from '@aztec/foundation/timer'; +import { type TelemetryClient } from '@aztec/telemetry-client'; import assert from 'assert'; import { type ProvingBrokerDatabase } from './proving_broker_database.js'; +import { type MonitorCallback, ProvingBrokerInstrumentation } from './proving_broker_instrumentation.js'; type InProgressMetadata = { id: ProvingJobId; @@ -58,6 +61,9 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { // as above, but for results private resultsCache = new Map(); + // tracks when each job was enqueued + private enqueuedAt = new Map(); + // keeps track of which jobs are currently being processed // in the event of a crash this information is lost, but that's ok // the next time the broker starts it will recreate jobsCache and still @@ -75,18 +81,37 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { private jobTimeoutMs: number; private maxRetries: number; + private instrumentation: ProvingBrokerInstrumentation; + public constructor( private database: ProvingBrokerDatabase, - { jobTimeoutMs = 30, timeoutIntervalMs = 10, maxRetries = 3 }: ProofRequestBrokerConfig = {}, + client: TelemetryClient, + { jobTimeoutMs = 30_000, timeoutIntervalMs = 10_000, maxRetries = 3 }: ProofRequestBrokerConfig = {}, private logger = createDebugLogger('aztec:prover-client:proving-broker'), ) { + this.instrumentation = new ProvingBrokerInstrumentation(client); this.timeoutPromise = new RunningPromise(this.timeoutCheck, timeoutIntervalMs); this.jobTimeoutMs = jobTimeoutMs; this.maxRetries = maxRetries; } - // eslint-disable-next-line require-await - public async start(): Promise { + private measureQueueDepth: MonitorCallback = (type: ProvingRequestType) => { + return this.queues[type].length(); + }; + + private countActiveJobs: MonitorCallback = (type: ProvingRequestType) => { + let count = 0; + for (const { id } of this.inProgress.values()) { + const job = this.jobsCache.get(id); + if (job?.type === type) { + count++; + } + } + + return count; + }; + + public start(): Promise { for (const [item, result] of this.database.allProvingJobs()) { this.logger.info(`Restoring proving job id=${item.id} settled=${!!result}`); @@ -103,6 +128,11 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { } this.timeoutPromise.start(); + + this.instrumentation.monitorQueueDepth(this.measureQueueDepth); + this.instrumentation.monitorActiveJobs(this.countActiveJobs); + + return Promise.resolve(); } public stop(): Promise { @@ -187,6 +217,10 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { startedAt: time, lastUpdatedAt: time, }); + const enqueuedAt = this.enqueuedAt.get(job.id); + if (enqueuedAt) { + this.instrumentation.recordJobWait(job.type, enqueuedAt); + } return { job, time }; } @@ -216,6 +250,7 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { this.logger.info(`Retrying proving job id=${id} type=${ProvingRequestType[item.type]} retry=${retries + 1}`); this.retries.set(id, retries + 1); this.enqueueJobInternal(item); + this.instrumentation.incRetriedJobs(item.type); return; } @@ -228,6 +263,11 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { const result: ProvingJobSettledResult = { status: 'rejected', reason: String(err) }; this.resultsCache.set(id, result); this.promises.get(id)!.resolve(result); + this.instrumentation.incRejectedJobs(item.type); + if (info) { + const duration = this.timeSource() - info.startedAt; + this.instrumentation.recordJobDuration(item.type, duration * 1000); + } } reportProvingJobProgress( @@ -303,6 +343,7 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { const result: ProvingJobSettledResult = { status: 'fulfilled', value }; this.resultsCache.set(id, result); this.promises.get(id)!.resolve(result); + this.instrumentation.incResolvedJobs(item.type); } private timeoutCheck = () => { @@ -320,6 +361,7 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { this.logger.warn(`Proving job id=${id} timed out. Adding it back to the queue.`); this.inProgress.delete(id); this.enqueueJobInternal(item); + this.instrumentation.incTimedOutJobs(item.type); } } }; @@ -329,6 +371,7 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { this.promises.set(job.id, promiseWithResolvers()); } this.queues[job.type].put(job); + this.enqueuedAt.set(job.id, new Timer()); this.logger.debug(`Enqueued new proving job id=${job.id}`); } } diff --git a/yarn-project/prover-client/src/proving_broker/proving_broker_database/persisted.ts b/yarn-project/prover-client/src/proving_broker/proving_broker_database/persisted.ts index 909b2d6e4e1..61ca5232015 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_broker_database/persisted.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_broker_database/persisted.ts @@ -1,14 +1,29 @@ import { type ProofUri, ProvingJob, type ProvingJobId, ProvingJobSettledResult } from '@aztec/circuit-types'; import { jsonParseWithSchema, jsonStringify } from '@aztec/foundation/json-rpc'; import { type AztecKVStore, type AztecMap } from '@aztec/kv-store'; +import { LmdbMetrics, Metrics, type TelemetryClient } from '@aztec/telemetry-client'; import { type ProvingBrokerDatabase } from '../proving_broker_database.js'; export class KVBrokerDatabase implements ProvingBrokerDatabase { private jobs: AztecMap; private jobResults: AztecMap; - - constructor(private store: AztecKVStore) { + private metrics: LmdbMetrics; + + constructor(private store: AztecKVStore, client: TelemetryClient) { + this.metrics = new LmdbMetrics( + client.getMeter('KVBrokerDatabase'), + { + name: Metrics.PROVING_QUEUE_DB_MAP_SIZE, + description: 'Database map size for the proving broker', + }, + { + name: Metrics.PROVING_QUEUE_DB_USED_SIZE, + description: 'Database used size for the proving broker', + }, + { name: Metrics.PROVING_QUEUE_DB_NUM_ITEMS, description: 'Number of items in the broker database' }, + () => store.estimateSize(), + ); this.jobs = store.openMap('proving_jobs'); this.jobResults = store.openMap('proving_job_results'); } diff --git a/yarn-project/prover-client/src/proving_broker/proving_broker_instrumentation.ts b/yarn-project/prover-client/src/proving_broker/proving_broker_instrumentation.ts new file mode 100644 index 00000000000..2379bdd8a32 --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/proving_broker_instrumentation.ts @@ -0,0 +1,130 @@ +import { ProvingRequestType } from '@aztec/circuit-types'; +import { type Timer } from '@aztec/foundation/timer'; +import { + Attributes, + type Histogram, + Metrics, + type ObservableGauge, + type ObservableResult, + type TelemetryClient, + type UpDownCounter, + ValueType, + millisecondBuckets, +} from '@aztec/telemetry-client'; + +export type MonitorCallback = (proofType: ProvingRequestType) => number; + +export class ProvingBrokerInstrumentation { + private queueSize: ObservableGauge; + private activeJobs: ObservableGauge; + private resolvedJobs: UpDownCounter; + private rejectedJobs: UpDownCounter; + private timedOutJobs: UpDownCounter; + private jobWait: Histogram; + private jobDuration: Histogram; + private retriedJobs: UpDownCounter; + + constructor(client: TelemetryClient, name = 'ProvingBroker') { + const meter = client.getMeter(name); + + this.queueSize = meter.createObservableGauge(Metrics.PROVING_QUEUE_SIZE, { + valueType: ValueType.INT, + }); + + this.activeJobs = meter.createObservableGauge(Metrics.PROVING_QUEUE_ACTIVE_JOBS, { + valueType: ValueType.INT, + }); + + this.resolvedJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_RESOLVED_JOBS, { + valueType: ValueType.INT, + }); + + this.rejectedJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_REJECTED_JOBS, { + valueType: ValueType.INT, + }); + + this.retriedJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_RETRIED_JOBS, { + valueType: ValueType.INT, + }); + + this.timedOutJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_TIMED_OUT_JOBS, { + valueType: ValueType.INT, + }); + + this.jobWait = meter.createHistogram(Metrics.PROVING_QUEUE_JOB_WAIT, { + description: 'Records how long a job sits in the queue', + unit: 'ms', + valueType: ValueType.INT, + advice: { + explicitBucketBoundaries: millisecondBuckets(1), // 10ms -> ~327s + }, + }); + + this.jobDuration = meter.createHistogram(Metrics.PROVING_QUEUE_JOB_DURATION, { + description: 'Records how long a job takes to complete', + unit: 'ms', + valueType: ValueType.INT, + advice: { + explicitBucketBoundaries: millisecondBuckets(1), // 10ms -> ~327s + }, + }); + } + + monitorQueueDepth(fn: MonitorCallback) { + this.queueSize.addCallback(obs => this.observe(obs, fn)); + } + + monitorActiveJobs(fn: MonitorCallback) { + this.activeJobs.addCallback(obs => this.observe(obs, fn)); + } + + incResolvedJobs(proofType: ProvingRequestType) { + this.resolvedJobs.add(1, { + [Attributes.PROVING_JOB_TYPE]: ProvingRequestType[proofType], + }); + } + + incRejectedJobs(proofType: ProvingRequestType) { + this.rejectedJobs.add(1, { + [Attributes.PROVING_JOB_TYPE]: ProvingRequestType[proofType], + }); + } + + incRetriedJobs(proofType: ProvingRequestType) { + this.retriedJobs.add(1, { + [Attributes.PROVING_JOB_TYPE]: ProvingRequestType[proofType], + }); + } + + incTimedOutJobs(proofType: ProvingRequestType) { + this.timedOutJobs.add(1, { + [Attributes.PROVING_JOB_TYPE]: ProvingRequestType[proofType], + }); + } + + recordJobWait(proofType: ProvingRequestType, msOrTimer: Timer | number) { + const duration = typeof msOrTimer === 'number' ? msOrTimer : Math.floor(msOrTimer.ms()); + this.jobWait.record(duration, { + [Attributes.PROVING_JOB_TYPE]: ProvingRequestType[proofType], + }); + } + + recordJobDuration(proofType: ProvingRequestType, msOrTimer: Timer | number) { + const duration = typeof msOrTimer === 'number' ? msOrTimer : Math.floor(msOrTimer.ms()); + this.jobDuration.record(duration, { + [Attributes.PROVING_JOB_TYPE]: ProvingRequestType[proofType], + }); + } + + private observe(obs: ObservableResult, fn: MonitorCallback) { + for (const proofType of Object.values(ProvingRequestType)) { + // a type predicate for TypeScript to recognize that we're only iterating over enum values + if (typeof proofType !== 'number') { + continue; + } + obs.observe(fn(proofType), { + [Attributes.PROVING_JOB_TYPE]: ProvingRequestType[proofType], + }); + } + } +} diff --git a/yarn-project/prover-client/src/test/mock_prover.ts b/yarn-project/prover-client/src/test/mock_prover.ts index c0ea23c2643..30a26cd7838 100644 --- a/yarn-project/prover-client/src/test/mock_prover.ts +++ b/yarn-project/prover-client/src/test/mock_prover.ts @@ -43,6 +43,7 @@ import { makeRootRollupPublicInputs, } from '@aztec/circuits.js/testing'; import { times } from '@aztec/foundation/collection'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { InlineProofStore, type ProofStore } from '../proving_broker/proof_store.js'; import { ProvingAgent } from '../proving_broker/proving_agent.js'; @@ -50,7 +51,7 @@ import { ProvingBroker } from '../proving_broker/proving_broker.js'; import { InMemoryBrokerDatabase } from '../proving_broker/proving_broker_database/memory.js'; export class TestBroker implements ProvingJobProducer { - private broker = new ProvingBroker(new InMemoryBrokerDatabase()); + private broker = new ProvingBroker(new InMemoryBrokerDatabase(), new NoopTelemetryClient()); private agents: ProvingAgent[]; constructor( @@ -58,7 +59,7 @@ export class TestBroker implements ProvingJobProducer { prover: ServerCircuitProver, private proofStore: ProofStore = new InlineProofStore(), ) { - this.agents = times(agentCount, () => new ProvingAgent(this.broker, proofStore, prover)); + this.agents = times(agentCount, () => new ProvingAgent(this.broker, proofStore, prover, new NoopTelemetryClient())); } public async start() { diff --git a/yarn-project/prover-node/src/factory.ts b/yarn-project/prover-node/src/factory.ts index 7190d81ee66..8d2db37c623 100644 --- a/yarn-project/prover-node/src/factory.ts +++ b/yarn-project/prover-node/src/factory.ts @@ -47,7 +47,7 @@ export async function createProverNode( const worldStateSynchronizer = await createWorldStateSynchronizer(worldStateConfig, archiver, telemetry); await worldStateSynchronizer.start(); - const broker = deps.broker ?? (await createAndStartProvingBroker(config)); + const broker = deps.broker ?? (await createAndStartProvingBroker(config, telemetry)); const prover = await createProverClient(config, worldStateSynchronizer, broker, telemetry); // REFACTOR: Move publisher out of sequencer package and into an L1-related package diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index 201d69ba2b0..eadab26de2d 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -137,6 +137,10 @@ export class PXEService implements PXE { this.log.info('Stopped Synchronizer'); } + isL1ToL2MessageSynced(l1ToL2Message: Fr): Promise { + return this.node.isL1ToL2MessageSynced(l1ToL2Message); + } + /** Returns an estimate of the db size in bytes. */ public estimateDbSize() { return this.db.estimateSize(); diff --git a/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts b/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts index 4acbc87e2a4..5899e8af003 100644 --- a/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts +++ b/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts @@ -39,6 +39,7 @@ function createPXEService(): Promise { inboxAddress: EthAddress.random(), outboxAddress: EthAddress.random(), feeJuiceAddress: EthAddress.random(), + stakingAssetAddress: EthAddress.random(), feeJuicePortalAddress: EthAddress.random(), governanceAddress: EthAddress.random(), coinIssuerAddress: EthAddress.random(), diff --git a/yarn-project/sequencer-client/src/publisher/config.ts b/yarn-project/sequencer-client/src/publisher/config.ts index 561add17597..367f2aa6677 100644 --- a/yarn-project/sequencer-client/src/publisher/config.ts +++ b/yarn-project/sequencer-client/src/publisher/config.ts @@ -1,4 +1,4 @@ -import { type L1ReaderConfig, NULL_KEY } from '@aztec/ethereum'; +import { type L1ReaderConfig, type L1TxUtilsConfig, NULL_KEY, l1TxUtilsConfigMappings } from '@aztec/ethereum'; import { type ConfigMappingsType, getConfigFromMappings, numberConfigHelper } from '@aztec/foundation/config'; /** @@ -19,12 +19,12 @@ export type TxSenderConfig = L1ReaderConfig & { /** * Configuration of the L1Publisher. */ -export interface PublisherConfig { +export type PublisherConfig = L1TxUtilsConfig & { /** * The interval to wait between publish retries. */ l1PublishRetryIntervalMS: number; -} +}; export const getTxSenderConfigMappings: ( scope: 'PROVER' | 'SEQ', @@ -62,13 +62,16 @@ export function getTxSenderConfigFromEnv(scope: 'PROVER' | 'SEQ'): Omit ConfigMappingsType = scope => ({ +export const getPublisherConfigMappings: ( + scope: 'PROVER' | 'SEQ', +) => ConfigMappingsType = scope => ({ l1PublishRetryIntervalMS: { env: `${scope}_PUBLISH_RETRY_INTERVAL_MS`, parseEnv: (val: string) => +val, defaultValue: 1000, description: 'The interval to wait between publish retries.', }, + ...l1TxUtilsConfigMappings, }); export function getPublisherConfigFromEnv(scope: 'PROVER' | 'SEQ'): PublisherConfig { diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts index d1916020719..cedbfbe0d7d 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts @@ -1,17 +1,30 @@ import { L2Block } from '@aztec/circuit-types'; import { EthAddress } from '@aztec/circuits.js'; -import { type L1ContractsConfig, getL1ContractsConfigEnvVars } from '@aztec/ethereum'; +import { + type L1ContractsConfig, + type L1TxRequest, + type L1TxUtilsConfig, + defaultL1TxUtilsConfig, + getL1ContractsConfigEnvVars, +} from '@aztec/ethereum'; import { type ViemSignature } from '@aztec/foundation/eth-signature'; import { sleep } from '@aztec/foundation/sleep'; import { RollupAbi } from '@aztec/l1-artifacts'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { type MockProxy, mock } from 'jest-mock-extended'; -import { type GetTransactionReceiptReturnType, type PrivateKeyAccount } from 'viem'; +import { + type GetTransactionReceiptReturnType, + type PrivateKeyAccount, + type TransactionReceipt, + encodeFunctionData, +} from 'viem'; import { type PublisherConfig, type TxSenderConfig } from './config.js'; import { L1Publisher } from './l1-publisher.js'; +const mockRollupAddress = '0xcafe'; + interface MockPublicClient { getTransactionReceipt: ({ hash }: { hash: '0x${string}' }) => Promise; getBlock(): Promise<{ timestamp: bigint }>; @@ -19,6 +32,13 @@ interface MockPublicClient { estimateGas: ({ to, data }: { to: '0x${string}'; data: '0x${string}' }) => Promise; } +interface MockL1TxUtils { + sendAndMonitorTransaction: ( + request: L1TxRequest, + _gasConfig?: Partial, + ) => Promise; +} + interface MockRollupContractWrite { propose: ( args: readonly [`0x${string}`, `0x${string}`] | readonly [`0x${string}`, `0x${string}`, `0x${string}`], @@ -42,6 +62,9 @@ interface MockRollupContractRead { class MockRollupContract { constructor(public write: MockRollupContractWrite, public read: MockRollupContractRead, public abi = RollupAbi) {} + get address() { + return mockRollupAddress; + } } describe('L1Publisher', () => { @@ -50,6 +73,7 @@ describe('L1Publisher', () => { let rollupContract: MockRollupContract; let publicClient: MockProxy; + let l1TxUtils: MockProxy; let proposeTxHash: `0x${string}`; let proposeTxReceipt: GetTransactionReceiptReturnType; @@ -60,8 +84,6 @@ describe('L1Publisher', () => { let blockHash: Buffer; let body: Buffer; - let account: PrivateKeyAccount; - let publisher: L1Publisher; const GAS_GUESS = 300_000n; @@ -87,7 +109,7 @@ describe('L1Publisher', () => { rollupContract = new MockRollupContract(rollupContractWrite, rollupContractRead); publicClient = mock(); - + l1TxUtils = mock(); const config = { l1RpcUrl: `http://127.0.0.1:8545`, l1ChainId: 1, @@ -95,26 +117,30 @@ describe('L1Publisher', () => { l1Contracts: { rollupAddress: EthAddress.ZERO.toString() }, l1PublishRetryIntervalMS: 1, ethereumSlotDuration: getL1ContractsConfigEnvVars().ethereumSlotDuration, - } as unknown as TxSenderConfig & PublisherConfig & Pick; + ...defaultL1TxUtilsConfig, + } as unknown as TxSenderConfig & + PublisherConfig & + Pick & + L1TxUtilsConfig; publisher = new L1Publisher(config, new NoopTelemetryClient()); (publisher as any)['rollupContract'] = rollupContract; (publisher as any)['publicClient'] = publicClient; - - account = (publisher as any)['account']; + (publisher as any)['l1TxUtils'] = l1TxUtils; + publisher as any; rollupContractRead.getCurrentSlot.mockResolvedValue(l2Block.header.globalVariables.slotNumber.toBigInt()); publicClient.getBlock.mockResolvedValue({ timestamp: 12n }); publicClient.estimateGas.mockResolvedValue(GAS_GUESS); + l1TxUtils.sendAndMonitorTransaction.mockResolvedValue(proposeTxReceipt); + (l1TxUtils as any).estimateGas.mockResolvedValue(GAS_GUESS); }); it('publishes and propose l2 block to l1', async () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); rollupContractWrite.propose.mockResolvedValueOnce(proposeTxHash); - publicClient.getTransactionReceipt.mockResolvedValueOnce(proposeTxReceipt); - const result = await publisher.proposeL2Block(l2Block); expect(result).toEqual(true); @@ -133,21 +159,22 @@ describe('L1Publisher', () => { [], `0x${body.toString('hex')}`, ] as const; - expect(rollupContractWrite.propose).toHaveBeenCalledWith(args, { - account: account, - gas: L1Publisher.PROPOSE_GAS_GUESS + GAS_GUESS, - }); - expect(publicClient.getTransactionReceipt).toHaveBeenCalledWith({ hash: proposeTxHash }); + expect(l1TxUtils.sendAndMonitorTransaction).toHaveBeenCalledWith( + { + to: mockRollupAddress, + data: encodeFunctionData({ abi: rollupContract.abi, functionName: 'propose', args }), + }, + { fixedGas: GAS_GUESS + L1Publisher.PROPOSE_GAS_GUESS }, + ); }); it('does not retry if sending a propose tx fails', async () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - rollupContractWrite.propose.mockRejectedValueOnce(new Error()).mockResolvedValueOnce(proposeTxHash); + l1TxUtils.sendAndMonitorTransaction.mockRejectedValueOnce(new Error()).mockResolvedValueOnce(proposeTxReceipt); const result = await publisher.proposeL2Block(l2Block); expect(result).toEqual(false); - expect(rollupContractWrite.propose).toHaveBeenCalledTimes(1); }); it('does not retry if simulating a publish and propose tx fails', async () => { @@ -157,45 +184,20 @@ describe('L1Publisher', () => { await expect(publisher.proposeL2Block(l2Block)).rejects.toThrow(); expect(rollupContractRead.validateHeader).toHaveBeenCalledTimes(1); - expect(rollupContractWrite.propose).toHaveBeenCalledTimes(0); }); it('does not retry if sending a publish and propose tx fails', async () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - rollupContractWrite.propose.mockRejectedValueOnce(new Error()); + l1TxUtils.sendAndMonitorTransaction.mockRejectedValueOnce(new Error()).mockResolvedValueOnce(proposeTxReceipt); const result = await publisher.proposeL2Block(l2Block); expect(result).toEqual(false); - expect(rollupContractWrite.propose).toHaveBeenCalledTimes(1); - }); - - it('retries if fetching the receipt fails (propose)', async () => { - rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - rollupContractWrite.propose.mockResolvedValueOnce(proposeTxHash); - publicClient.getTransactionReceipt.mockRejectedValueOnce(new Error()).mockResolvedValueOnce(proposeTxReceipt); - - const result = await publisher.proposeL2Block(l2Block); - - expect(result).toEqual(true); - expect(publicClient.getTransactionReceipt).toHaveBeenCalledTimes(2); - }); - - it('retries if fetching the receipt fails (publish propose)', async () => { - rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - rollupContractWrite.propose.mockResolvedValueOnce(proposeTxHash as `0x${string}`); - publicClient.getTransactionReceipt.mockRejectedValueOnce(new Error()).mockResolvedValueOnce(proposeTxReceipt); - - const result = await publisher.proposeL2Block(l2Block); - - expect(result).toEqual(true); - expect(publicClient.getTransactionReceipt).toHaveBeenCalledTimes(2); }); it('returns false if publish and propose tx reverts', async () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - rollupContractWrite.propose.mockResolvedValueOnce(proposeTxHash); - publicClient.getTransactionReceipt.mockResolvedValueOnce({ ...proposeTxReceipt, status: 'reverted' }); + l1TxUtils.sendAndMonitorTransaction.mockResolvedValueOnce({ ...proposeTxReceipt, status: 'reverted' }); const result = await publisher.proposeL2Block(l2Block); @@ -205,7 +207,7 @@ describe('L1Publisher', () => { it('returns false if propose tx reverts', async () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - publicClient.getTransactionReceipt.mockResolvedValueOnce({ ...proposeTxReceipt, status: 'reverted' }); + l1TxUtils.sendAndMonitorTransaction.mockResolvedValueOnce({ ...proposeTxReceipt, status: 'reverted' }); const result = await publisher.proposeL2Block(l2Block); @@ -214,8 +216,9 @@ describe('L1Publisher', () => { it('returns false if sending publish and progress tx is interrupted', async () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - rollupContractWrite.propose.mockImplementationOnce(() => sleep(10, proposeTxHash) as Promise<`0x${string}`>); - + l1TxUtils.sendAndMonitorTransaction.mockImplementationOnce( + () => sleep(10, proposeTxReceipt) as Promise, + ); const resultPromise = publisher.proposeL2Block(l2Block); publisher.interrupt(); const result = await resultPromise; @@ -226,7 +229,9 @@ describe('L1Publisher', () => { it('returns false if sending propose tx is interrupted', async () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - rollupContractWrite.propose.mockImplementationOnce(() => sleep(10, proposeTxHash) as Promise<`0x${string}`>); + l1TxUtils.sendAndMonitorTransaction.mockImplementationOnce( + () => sleep(10, proposeTxReceipt) as Promise, + ); const resultPromise = publisher.proposeL2Block(l2Block); publisher.interrupt(); diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index b1e0aa5a50c..d7e139d4dde 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -17,7 +17,13 @@ import { type Proof, type RootRollupPublicInputs, } from '@aztec/circuits.js'; -import { type EthereumChain, type L1ContractsConfig, createEthereumChain } from '@aztec/ethereum'; +import { + type EthereumChain, + type L1ContractsConfig, + L1TxUtils, + type L1TxUtilsConfig, + createEthereumChain, +} from '@aztec/ethereum'; import { makeTuple } from '@aztec/foundation/array'; import { areArraysEqual, compactArray, times } from '@aztec/foundation/collection'; import { type Signature } from '@aztec/foundation/eth-signature'; @@ -44,6 +50,7 @@ import { type PublicActions, type PublicClient, type PublicRpcSchema, + type TransactionReceipt, type WalletActions, type WalletClient, type WalletRpcSchema, @@ -161,8 +168,10 @@ export class L1Publisher { public static PROPOSE_GAS_GUESS: bigint = 12_000_000n; public static PROPOSE_AND_CLAIM_GAS_GUESS: bigint = this.PROPOSE_GAS_GUESS + 100_000n; + private readonly l1TxUtils: L1TxUtils; + constructor( - config: TxSenderConfig & PublisherConfig & Pick, + config: TxSenderConfig & PublisherConfig & Pick & L1TxUtilsConfig, client: TelemetryClient, ) { this.sleepTimeMs = config?.l1PublishRetryIntervalMS ?? 60_000; @@ -195,6 +204,8 @@ export class L1Publisher { client: this.walletClient, }); } + + this.l1TxUtils = new L1TxUtils(this.publicClient, this.walletClient, this.log, config); } protected createWalletClient( @@ -296,8 +307,13 @@ export class L1Publisher { } public async getProofClaim(): Promise { - const [epochToProve, basisPointFee, bondAmount, bondProviderHex, proposerClaimantHex] = - await this.rollupContract.read.proofClaim(); + const { + epochToProve, + basisPointFee, + bondAmount, + bondProvider: bondProviderHex, + proposerClaimant: proposerClaimantHex, + } = await this.rollupContract.read.getProofClaim(); const bondProvider = EthAddress.fromString(bondProviderHex); const proposerClaimant = EthAddress.fromString(proposerClaimantHex); @@ -503,36 +519,30 @@ export class L1Publisher { }); this.log.verbose(`Submitting propose transaction`); - - const tx = proofQuote + const result = proofQuote ? await this.sendProposeAndClaimTx(proposeTxArgs, proofQuote) : await this.sendProposeTx(proposeTxArgs); - if (!tx) { + if (!result?.receipt) { this.log.info(`Failed to publish block ${block.number} to L1`, ctx); return false; } - const { hash: txHash, args, functionName, gasLimit } = tx; - - const receipt = await this.getTransactionReceipt(txHash); - if (!receipt) { - this.log.info(`Failed to get receipt for tx ${txHash}`, ctx); - return false; - } + const { receipt, args, functionName } = result; // Tx was mined successfully - if (receipt.status) { - const tx = await this.getTransactionStats(txHash); + if (receipt.status === 'success') { + const tx = await this.getTransactionStats(receipt.transactionHash); const stats: L1PublishBlockStats = { - ...pick(receipt, 'gasPrice', 'gasUsed', 'transactionHash'), + gasPrice: receipt.effectiveGasPrice, + gasUsed: receipt.gasUsed, + transactionHash: receipt.transactionHash, ...pick(tx!, 'calldataGas', 'calldataSize', 'sender'), ...block.getStats(), eventName: 'rollup-published-to-l1', }; this.log.info(`Published L2 block to L1 rollup contract`, { ...stats, ...ctx }); this.metrics.recordProcessBlockTx(timer.ms(), stats); - return true; } @@ -541,7 +551,6 @@ export class L1Publisher { const errorMsg = await this.tryGetErrorFromRevertedTx({ args, functionName, - gasLimit, abi: RollupAbi, address: this.rollupContract.address, blockNumber: receipt.blockNumber, @@ -557,7 +566,6 @@ export class L1Publisher { private async tryGetErrorFromRevertedTx(args: { args: any[]; functionName: string; - gasLimit: bigint; abi: any; address: Hex; blockNumber: bigint | undefined; @@ -633,7 +641,7 @@ export class L1Publisher { const { fromBlock, toBlock, publicInputs, proof } = args; // Check that the block numbers match the expected epoch to be proven - const [pending, proven] = await this.rollupContract.read.tips(); + const { pendingBlockNumber: pending, provenBlockNumber: proven } = await this.rollupContract.read.getTips(); if (proven !== BigInt(fromBlock) - 1n) { throw new Error(`Cannot submit epoch proof for ${fromBlock}-${toBlock} as proven block is ${proven}`); } @@ -720,17 +728,25 @@ export class L1Publisher { ] as const; this.log.info(`SubmitEpochProof proofSize=${args.proof.withoutPublicInputs().length} bytes`); - await this.rollupContract.simulate.submitEpochRootProof(txArgs, { account: this.account }); - return await this.rollupContract.write.submitEpochRootProof(txArgs, { account: this.account }); + + const txReceipt = await this.l1TxUtils.sendAndMonitorTransaction({ + to: this.rollupContract.address, + data: encodeFunctionData({ + abi: this.rollupContract.abi, + functionName: 'submitEpochRootProof', + args: txArgs, + }), + }); + + return txReceipt.transactionHash; } catch (err) { this.log.error(`Rollup submit epoch proof failed`, err); return undefined; } } - private async prepareProposeTx(encodedData: L1ProcessArgs, gasGuess: bigint) { - // We have to jump a few hoops because viem is not happy around estimating gas for view functions - const computeTxsEffectsHashGas = await this.publicClient.estimateGas({ + private async prepareProposeTx(encodedData: L1ProcessArgs) { + const computeTxsEffectsHashGas = await this.l1TxUtils.estimateGas(this.account, { to: this.rollupContract.address, data: encodeFunctionData({ abi: this.rollupContract.abi, @@ -744,7 +760,7 @@ export class L1Publisher { // we will fail estimation in the case where we are simulating for the // first ethereum block within our slot (as current time is not in the // slot yet). - const gasGuesstimate = computeTxsEffectsHashGas + gasGuess; + const gasGuesstimate = computeTxsEffectsHashGas + L1Publisher.PROPOSE_GAS_GUESS; const attestations = encodedData.attestations ? encodedData.attestations.map(attest => attest.toViemSignature()) @@ -766,7 +782,7 @@ export class L1Publisher { `0x${encodedData.body.toString('hex')}`, ] as const; - return { args, gasGuesstimate }; + return { args, gas: gasGuesstimate }; } private getSubmitEpochProofArgs(args: { @@ -797,25 +813,34 @@ export class L1Publisher { private async sendProposeTx( encodedData: L1ProcessArgs, - ): Promise<{ hash: string; args: any; functionName: string; gasLimit: bigint } | undefined> { + ): Promise<{ receipt: TransactionReceipt; args: any; functionName: string } | undefined> { if (this.interrupted) { return undefined; } try { - const { args, gasGuesstimate } = await this.prepareProposeTx(encodedData, L1Publisher.PROPOSE_GAS_GUESS); - + const { args, gas } = await this.prepareProposeTx(encodedData); + const receipt = await this.l1TxUtils.sendAndMonitorTransaction( + { + to: this.rollupContract.address, + data: encodeFunctionData({ + abi: this.rollupContract.abi, + functionName: 'propose', + args, + }), + }, + { + fixedGas: gas, + }, + ); return { - hash: await this.rollupContract.write.propose(args, { - account: this.account, - gas: gasGuesstimate, - }), + receipt, args, functionName: 'propose', - gasLimit: gasGuesstimate, }; } catch (err) { prettyLogViemError(err, this.log); - this.log.error(`Rollup publish failed`, err); + const errorMessage = err instanceof Error ? err.message : String(err); + this.log.error(`Rollup publish failed`, errorMessage); return undefined; } } @@ -823,30 +848,36 @@ export class L1Publisher { private async sendProposeAndClaimTx( encodedData: L1ProcessArgs, quote: EpochProofQuote, - ): Promise<{ hash: string; args: any; functionName: string; gasLimit: bigint } | undefined> { + ): Promise<{ receipt: TransactionReceipt; args: any; functionName: string } | undefined> { if (this.interrupted) { return undefined; } try { - const { args, gasGuesstimate } = await this.prepareProposeTx( - encodedData, - L1Publisher.PROPOSE_AND_CLAIM_GAS_GUESS, - ); this.log.info(`ProposeAndClaim`); this.log.info(inspect(quote.payload)); + const { args, gas } = await this.prepareProposeTx(encodedData); + const receipt = await this.l1TxUtils.sendAndMonitorTransaction( + { + to: this.rollupContract.address, + data: encodeFunctionData({ + abi: this.rollupContract.abi, + functionName: 'proposeAndClaim', + args: [...args, quote.toViemArgs()], + }), + }, + { fixedGas: gas }, + ); + return { - hash: await this.rollupContract.write.proposeAndClaim([...args, quote.toViemArgs()], { - account: this.account, - gas: gasGuesstimate, - }), - functionName: 'proposeAndClaim', + receipt, args, - gasLimit: gasGuesstimate, + functionName: 'proposeAndClaim', }; } catch (err) { prettyLogViemError(err, this.log); - this.log.error(`Rollup publish failed`, err); + const errorMessage = err instanceof Error ? err.message : String(err); + this.log.error(`Rollup publish failed`, errorMessage); return undefined; } } diff --git a/yarn-project/simulator/src/avm/avm_memory_types.ts b/yarn-project/simulator/src/avm/avm_memory_types.ts index 3acd3160083..974719f6a10 100644 --- a/yarn-project/simulator/src/avm/avm_memory_types.ts +++ b/yarn-project/simulator/src/avm/avm_memory_types.ts @@ -259,7 +259,7 @@ export class TaggedMemory implements TaggedMemoryInterface { public getAs(offset: number): T { assert(offset < TaggedMemory.MAX_MEMORY_SIZE); const word = this._mem[offset]; - TaggedMemory.log.debug(`get(${offset}) = ${word}`); + TaggedMemory.log.trace(`get(${offset}) = ${word}`); if (word === undefined) { TaggedMemory.log.debug(`WARNING: Memory at offset ${offset} is undefined!`); return new Field(0) as T; @@ -270,7 +270,7 @@ export class TaggedMemory implements TaggedMemoryInterface { public getSlice(offset: number, size: number): MemoryValue[] { assert(offset + size <= TaggedMemory.MAX_MEMORY_SIZE); const value = this._mem.slice(offset, offset + size); - TaggedMemory.log.debug(`getSlice(${offset}, ${size}) = ${value}`); + TaggedMemory.log.trace(`getSlice(${offset}, ${size}) = ${value}`); for (let i = 0; i < value.length; i++) { if (value[i] === undefined) { value[i] = new Field(0); @@ -293,7 +293,7 @@ export class TaggedMemory implements TaggedMemoryInterface { public set(offset: number, v: MemoryValue) { assert(offset < TaggedMemory.MAX_MEMORY_SIZE); this._mem[offset] = v; - TaggedMemory.log.debug(`set(${offset}, ${v})`); + TaggedMemory.log.trace(`set(${offset}, ${v})`); } public setSlice(offset: number, vs: MemoryValue[]) { @@ -303,7 +303,7 @@ export class TaggedMemory implements TaggedMemoryInterface { this._mem.length = offset + vs.length; } this._mem.splice(offset, vs.length, ...vs); - TaggedMemory.log.debug(`setSlice(${offset}, ${vs})`); + TaggedMemory.log.trace(`setSlice(${offset}, ${vs})`); } public getTag(offset: number): TypeTag { diff --git a/yarn-project/simulator/src/avm/avm_simulator.ts b/yarn-project/simulator/src/avm/avm_simulator.ts index 643fae72da0..480d668959f 100644 --- a/yarn-project/simulator/src/avm/avm_simulator.ts +++ b/yarn-project/simulator/src/avm/avm_simulator.ts @@ -55,7 +55,8 @@ export class AvmSimulator { `Cannot allocate more than ${MAX_L2_GAS_PER_ENQUEUED_CALL} to the AVM for execution of an enqueued call`, ); this.log = createDebugLogger(`aztec:avm_simulator:core(f:${context.environment.functionSelector.toString()})`); - if (process.env.LOG_LEVEL === 'debug') { + // TODO(palla/log): Should tallies be printed on debug, or only on trace? + if (this.log.isLevelEnabled('debug')) { this.tallyPrintFunction = this.printOpcodeTallies; this.tallyInstructionFunction = this.tallyInstruction; } @@ -144,7 +145,7 @@ export class AvmSimulator { const instrStartGas = machineState.gasLeft; // Save gas before executing instruction (for profiling) const instrPc = machineState.pc; // Save PC before executing instruction (for profiling) - this.log.debug( + this.log.trace( `[PC:${machineState.pc}] [IC:${instrCounter++}] ${instruction.toString()} (gasLeft l2=${ machineState.l2GasLeft } da=${machineState.daGasLeft})`, @@ -185,7 +186,7 @@ export class AvmSimulator { } catch (err: any) { this.log.verbose('Exceptional halt (revert by something other than REVERT opcode)'); if (!(err instanceof AvmExecutionError || err instanceof SideEffectLimitReachedError)) { - this.log.verbose(`Unknown error thrown by AVM: ${err}`); + this.log.error(`Unknown error thrown by AVM: ${err}`); throw err; } diff --git a/yarn-project/simulator/src/avm/avm_tree.ts b/yarn-project/simulator/src/avm/avm_tree.ts index f9cc70f745e..5ac80dd87fd 100644 --- a/yarn-project/simulator/src/avm/avm_tree.ts +++ b/yarn-project/simulator/src/avm/avm_tree.ts @@ -680,7 +680,12 @@ export class EphemeralAvmTree { for (let i = 0; i < siblingPath.length; i++) { // Flip(XOR) the last bit because we are inserting siblings of the leaf const sibIndex = index ^ 1n; - this.updateLeaf(siblingPath[i], sibIndex, this.depth - i); + const node = this.getNode(sibIndex, this.depth - i); + // If we are inserting a sibling path and we already have a branch at that index in our + // ephemeral tree, we should not overwrite it + if (node === undefined || node.tag === TreeType.LEAF) { + this.updateLeaf(siblingPath[i], sibIndex, this.depth - i); + } index >>= 1n; } } diff --git a/yarn-project/telemetry-client/package.json b/yarn-project/telemetry-client/package.json index fdd9d252faf..52702de6db9 100644 --- a/yarn-project/telemetry-client/package.json +++ b/yarn-project/telemetry-client/package.json @@ -7,7 +7,8 @@ "exports": { ".": "./dest/index.js", "./start": "./dest/start.js", - "./noop": "./dest/noop.js" + "./noop": "./dest/noop.js", + "./otel-pino-stream": "./dest/vendor/otel-pino-stream.js" }, "scripts": { "build": "yarn clean && tsc -b", @@ -28,21 +29,20 @@ "dependencies": { "@aztec/foundation": "workspace:^", "@opentelemetry/api": "^1.9.0", - "@opentelemetry/api-logs": "^0.54.0", - "@opentelemetry/exporter-logs-otlp-http": "^0.54.0", - "@opentelemetry/exporter-metrics-otlp-http": "^0.52.0", - "@opentelemetry/exporter-trace-otlp-http": "^0.54.0", - "@opentelemetry/host-metrics": "^0.35.2", - "@opentelemetry/otlp-exporter-base": "^0.54.0", - "@opentelemetry/resource-detector-aws": "^1.5.2", - "@opentelemetry/resources": "^1.25.0", - "@opentelemetry/sdk-logs": "^0.54.0", - "@opentelemetry/sdk-metrics": "^1.25.0", - "@opentelemetry/sdk-trace-node": "^1.25.0", - "@opentelemetry/semantic-conventions": "^1.25.0", - "@opentelemetry/winston-transport": "^0.7.0", - "prom-client": "^15.1.3", - "winston": "^3.10.0" + "@opentelemetry/api-logs": "^0.55.0", + "@opentelemetry/core": "^1.28.0", + "@opentelemetry/exporter-logs-otlp-http": "^0.55.0", + "@opentelemetry/exporter-metrics-otlp-http": "^0.55.0", + "@opentelemetry/exporter-trace-otlp-http": "^0.55.0", + "@opentelemetry/host-metrics": "^0.35.4", + "@opentelemetry/otlp-exporter-base": "^0.55.0", + "@opentelemetry/resource-detector-aws": "^1.8.0", + "@opentelemetry/resources": "^1.28.0", + "@opentelemetry/sdk-logs": "^0.55.0", + "@opentelemetry/sdk-metrics": "^1.28.0", + "@opentelemetry/sdk-trace-node": "^1.28.0", + "@opentelemetry/semantic-conventions": "^1.28.0", + "prom-client": "^15.1.3" }, "devDependencies": { "@jest/globals": "^29.5.0", diff --git a/yarn-project/telemetry-client/src/config.ts b/yarn-project/telemetry-client/src/config.ts index 58c643c5076..dcb5d8a8a0c 100644 --- a/yarn-project/telemetry-client/src/config.ts +++ b/yarn-project/telemetry-client/src/config.ts @@ -14,17 +14,17 @@ export const telemetryClientConfigMappings: ConfigMappingsType new URL(val), + parseEnv: (val: string) => val && new URL(val), }, tracesCollectorUrl: { env: 'OTEL_EXPORTER_OTLP_TRACES_ENDPOINT', description: 'The URL of the telemetry collector for traces', - parseEnv: (val: string) => new URL(val), + parseEnv: (val: string) => val && new URL(val), }, logsCollectorUrl: { env: 'OTEL_EXPORTER_OTLP_LOGS_ENDPOINT', description: 'The URL of the telemetry collector for logs', - parseEnv: (val: string) => new URL(val), + parseEnv: (val: string) => val && new URL(val), }, serviceName: { env: 'OTEL_SERVICE_NAME', diff --git a/yarn-project/telemetry-client/src/lmdb_metrics.ts b/yarn-project/telemetry-client/src/lmdb_metrics.ts index c8efc91a801..a8e70662d65 100644 --- a/yarn-project/telemetry-client/src/lmdb_metrics.ts +++ b/yarn-project/telemetry-client/src/lmdb_metrics.ts @@ -1,38 +1,47 @@ -import { type Gauge, type Meter, type Metrics, ValueType } from './telemetry.js'; +import { type BatchObservableResult, type Meter, type Metrics, type ObservableGauge, ValueType } from './telemetry.js'; export type LmdbMetricDescriptor = { name: Metrics; description: string; }; +export type LmdbStatsCallback = () => { mappingSize: number; numItems: number; actualSize: number }; + export class LmdbMetrics { - private dbMapSize: Gauge; - private dbUsedSize: Gauge; - private dbNumItems: Gauge; + private dbMapSize: ObservableGauge; + private dbUsedSize: ObservableGauge; + private dbNumItems: ObservableGauge; constructor( meter: Meter, dbMapSizeDescriptor: LmdbMetricDescriptor, dbUsedSizeDescriptor: LmdbMetricDescriptor, dbNumItemsDescriptor: LmdbMetricDescriptor, + private getStats?: LmdbStatsCallback, ) { - this.dbMapSize = meter.createGauge(dbMapSizeDescriptor.name, { + this.dbMapSize = meter.createObservableGauge(dbMapSizeDescriptor.name, { description: dbMapSizeDescriptor.description, valueType: ValueType.INT, }); - this.dbUsedSize = meter.createGauge(dbUsedSizeDescriptor.name, { + this.dbUsedSize = meter.createObservableGauge(dbUsedSizeDescriptor.name, { description: dbUsedSizeDescriptor.description, valueType: ValueType.INT, }); - this.dbNumItems = meter.createGauge(dbNumItemsDescriptor.name, { + this.dbNumItems = meter.createObservableGauge(dbNumItemsDescriptor.name, { description: dbNumItemsDescriptor.description, valueType: ValueType.INT, }); - } - public recordDBMetrics(metrics: { mappingSize: number; numItems: number; actualSize: number }) { - this.dbMapSize.record(metrics.mappingSize); - this.dbNumItems.record(metrics.actualSize); - this.dbUsedSize.record(metrics.actualSize); + meter.addBatchObservableCallback(this.recordDBMetrics, [this.dbMapSize, this.dbUsedSize, this.dbNumItems]); } + + private recordDBMetrics = (observable: BatchObservableResult) => { + if (!this.getStats) { + return; + } + const metrics = this.getStats(); + observable.observe(this.dbMapSize, metrics.mappingSize); + observable.observe(this.dbNumItems, metrics.numItems); + observable.observe(this.dbUsedSize, metrics.actualSize); + }; } diff --git a/yarn-project/telemetry-client/src/metrics.ts b/yarn-project/telemetry-client/src/metrics.ts index 853ce0bb58f..d737e6dd863 100644 --- a/yarn-project/telemetry-client/src/metrics.ts +++ b/yarn-project/telemetry-client/src/metrics.ts @@ -79,6 +79,18 @@ export const PROVING_ORCHESTRATOR_BASE_ROLLUP_INPUTS_DURATION = export const PROVING_QUEUE_JOB_SIZE = 'aztec.proving_queue.job_size'; export const PROVING_QUEUE_SIZE = 'aztec.proving_queue.size'; +export const PROVING_QUEUE_ACTIVE_JOBS = 'aztec.proving_queue.active_jobs'; +export const PROVING_QUEUE_RESOLVED_JOBS = 'aztec.proving_queue.resolved_jobs'; +export const PROVING_QUEUE_REJECTED_JOBS = 'aztec.proving_queue.rejected_jobs'; +export const PROVING_QUEUE_RETRIED_JOBS = 'aztec.proving_queue.retried_jobs'; +export const PROVING_QUEUE_TIMED_OUT_JOBS = 'aztec.proving_queue.timed_out_jobs'; +export const PROVING_QUEUE_JOB_WAIT = 'aztec.proving_queue.job_wait'; +export const PROVING_QUEUE_JOB_DURATION = 'aztec.proving_queue.job_duration'; +export const PROVING_QUEUE_DB_NUM_ITEMS = 'aztec.proving_queue.db.num_items'; +export const PROVING_QUEUE_DB_MAP_SIZE = 'aztec.proving_queue.db.map_size'; +export const PROVING_QUEUE_DB_USED_SIZE = 'aztec.proving_queue.db.used_size'; + +export const PROVING_AGENT_IDLE = 'aztec.proving_queue.agent.idle'; export const PROVER_NODE_JOB_DURATION = 'aztec.prover_node.job_duration'; diff --git a/yarn-project/telemetry-client/src/otel.ts b/yarn-project/telemetry-client/src/otel.ts index f94d054cb31..46b0b8d0ff8 100644 --- a/yarn-project/telemetry-client/src/otel.ts +++ b/yarn-project/telemetry-client/src/otel.ts @@ -1,4 +1,4 @@ -import { type DebugLogger } from '@aztec/foundation/log'; +import { type DebugLogger, type LogData, addLogDataHandler } from '@aztec/foundation/log'; import { DiagConsoleLogger, @@ -6,28 +6,23 @@ import { type Meter, type Tracer, type TracerProvider, + context, diag, + isSpanContextValid, + trace, } from '@opentelemetry/api'; import { OTLPMetricExporter } from '@opentelemetry/exporter-metrics-otlp-http'; import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http'; import { HostMetrics } from '@opentelemetry/host-metrics'; -import { awsEc2Detector, awsEcsDetector } from '@opentelemetry/resource-detector-aws'; -import { - type IResource, - detectResourcesSync, - envDetectorSync, - osDetectorSync, - processDetectorSync, - serviceInstanceIdDetectorSync, -} from '@opentelemetry/resources'; +import { type IResource } from '@opentelemetry/resources'; import { type LoggerProvider } from '@opentelemetry/sdk-logs'; import { MeterProvider, PeriodicExportingMetricReader } from '@opentelemetry/sdk-metrics'; import { BatchSpanProcessor, NodeTracerProvider } from '@opentelemetry/sdk-trace-node'; -import { SEMRESATTRS_SERVICE_NAME, SEMRESATTRS_SERVICE_VERSION } from '@opentelemetry/semantic-conventions'; +import { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION } from '@opentelemetry/semantic-conventions'; -import { aztecDetector } from './aztec_resource_detector.js'; import { type TelemetryClientConfig } from './config.js'; -import { registerOtelLoggerProvider } from './otelLoggerProvider.js'; +import { registerOtelLoggerProvider } from './otel_logger_provider.js'; +import { getOtelResource } from './otel_resource.js'; import { type Gauge, type TelemetryClient } from './telemetry.js'; export class OpenTelemetryClient implements TelemetryClient { @@ -43,19 +38,33 @@ export class OpenTelemetryClient implements TelemetryClient { ) {} getMeter(name: string): Meter { - return this.meterProvider.getMeter(name, this.resource.attributes[SEMRESATTRS_SERVICE_VERSION] as string); + return this.meterProvider.getMeter(name, this.resource.attributes[ATTR_SERVICE_VERSION] as string); } getTracer(name: string): Tracer { - return this.traceProvider.getTracer(name, this.resource.attributes[SEMRESATTRS_SERVICE_VERSION] as string); + return this.traceProvider.getTracer(name, this.resource.attributes[ATTR_SERVICE_VERSION] as string); } public start() { this.log.info('Starting OpenTelemetry client'); diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.INFO); + // Add a callback to the logger to set context data from current trace + // Adapted from open-telemetry/opentelemetry-js-contrib PinoInstrumentation._getMixinFunction + addLogDataHandler((data: LogData) => { + const spanContext = trace.getSpan(context.active())?.spanContext(); + return spanContext && isSpanContextValid(spanContext) + ? { + ...data, + ['trace_id']: spanContext.traceId, + ['span_id']: spanContext.spanId, + ['trace_flags']: `0${spanContext.traceFlags.toString(16)}`, + } + : data; + }); + this.hostMetrics = new HostMetrics({ - name: this.resource.attributes[SEMRESATTRS_SERVICE_NAME] as string, + name: this.resource.attributes[ATTR_SERVICE_NAME] as string, meterProvider: this.meterProvider, }); @@ -88,33 +97,16 @@ export class OpenTelemetryClient implements TelemetryClient { } public static async createAndStart(config: TelemetryClientConfig, log: DebugLogger): Promise { - const resource = detectResourcesSync({ - detectors: [ - osDetectorSync, - envDetectorSync, - processDetectorSync, - serviceInstanceIdDetectorSync, - awsEc2Detector, - awsEcsDetector, - aztecDetector, - ], - }); - - if (resource.asyncAttributesPending) { - await resource.waitForAsyncAttributes!(); - } + const resource = await getOtelResource(); + // TODO(palla/log): Should we show traces as logs in stdout when otel collection is disabled? const tracerProvider = new NodeTracerProvider({ resource, + spanProcessors: config.tracesCollectorUrl + ? [new BatchSpanProcessor(new OTLPTraceExporter({ url: config.tracesCollectorUrl.href }))] + : [], }); - // optionally push traces to an OTEL collector instance - if (config.tracesCollectorUrl) { - tracerProvider.addSpanProcessor( - new BatchSpanProcessor(new OTLPTraceExporter({ url: config.tracesCollectorUrl.href })), - ); - } - tracerProvider.register(); const meterProvider = new MeterProvider({ @@ -129,7 +121,8 @@ export class OpenTelemetryClient implements TelemetryClient { }), ], }); - const loggerProvider = registerOtelLoggerProvider(resource, config.logsCollectorUrl); + + const loggerProvider = await registerOtelLoggerProvider(resource, config.logsCollectorUrl); const service = new OpenTelemetryClient(resource, meterProvider, tracerProvider, loggerProvider, log); service.start(); diff --git a/yarn-project/telemetry-client/src/otelLoggerProvider.ts b/yarn-project/telemetry-client/src/otel_logger_provider.ts similarity index 85% rename from yarn-project/telemetry-client/src/otelLoggerProvider.ts rename to yarn-project/telemetry-client/src/otel_logger_provider.ts index e5289b606c5..7566520afba 100644 --- a/yarn-project/telemetry-client/src/otelLoggerProvider.ts +++ b/yarn-project/telemetry-client/src/otel_logger_provider.ts @@ -4,7 +4,11 @@ import { CompressionAlgorithm } from '@opentelemetry/otlp-exporter-base'; import { type IResource } from '@opentelemetry/resources'; import { BatchLogRecordProcessor, LoggerProvider } from '@opentelemetry/sdk-logs'; -export function registerOtelLoggerProvider(resource: IResource, otelLogsUrl?: URL) { +import { getOtelResource } from './otel_resource.js'; + +export async function registerOtelLoggerProvider(resource?: IResource, otelLogsUrl?: URL) { + resource ??= await getOtelResource(); + const loggerProvider = new LoggerProvider({ resource }); if (!otelLogsUrl) { // If no URL provided, return it disconnected. @@ -24,7 +28,7 @@ export function registerOtelLoggerProvider(resource: IResource, otelLogsUrl?: UR maxQueueSize: 4096, }), ); - otelLogs.setGlobalLoggerProvider(loggerProvider); + otelLogs.setGlobalLoggerProvider(loggerProvider); return loggerProvider; } diff --git a/yarn-project/telemetry-client/src/otel_resource.ts b/yarn-project/telemetry-client/src/otel_resource.ts new file mode 100644 index 00000000000..3810d3e73be --- /dev/null +++ b/yarn-project/telemetry-client/src/otel_resource.ts @@ -0,0 +1,32 @@ +import { awsEc2Detector, awsEcsDetector } from '@opentelemetry/resource-detector-aws'; +import { + type IResource, + detectResourcesSync, + envDetectorSync, + osDetectorSync, + processDetectorSync, + serviceInstanceIdDetectorSync, +} from '@opentelemetry/resources'; + +import { aztecDetector } from './aztec_resource_detector.js'; + +export async function getOtelResource(): Promise { + // TODO(palla/log): Do we really need *all* this info? + const resource = detectResourcesSync({ + detectors: [ + osDetectorSync, + envDetectorSync, + processDetectorSync, + serviceInstanceIdDetectorSync, + awsEc2Detector, + awsEcsDetector, + aztecDetector, + ], + }); + + if (resource.asyncAttributesPending) { + await resource.waitForAsyncAttributes!(); + } + + return resource; +} diff --git a/yarn-project/telemetry-client/src/telemetry.ts b/yarn-project/telemetry-client/src/telemetry.ts index a481690f155..60e55b8b1c6 100644 --- a/yarn-project/telemetry-client/src/telemetry.ts +++ b/yarn-project/telemetry-client/src/telemetry.ts @@ -1,9 +1,13 @@ import { type AttributeValue, + type BatchObservableCallback, type MetricOptions, + type Observable, + type BatchObservableResult as OtelBatchObservableResult, type Gauge as OtelGauge, type Histogram as OtelHistogram, type ObservableGauge as OtelObservableGauge, + type ObservableResult as OtelObservableResult, type ObservableUpDownCounter as OtelObservableUpDownCounter, type UpDownCounter as OtelUpDownCounter, type Span, @@ -31,6 +35,8 @@ export type Histogram = OtelHistogram; export type UpDownCounter = OtelUpDownCounter; export type ObservableGauge = OtelObservableGauge; export type ObservableUpDownCounter = OtelObservableUpDownCounter; +export type ObservableResult = OtelObservableResult; +export type BatchObservableResult = OtelBatchObservableResult; export { Tracer }; @@ -53,6 +59,16 @@ export interface Meter { */ createObservableGauge(name: Metrics, options?: MetricOptions): ObservableGauge; + addBatchObservableCallback( + callback: BatchObservableCallback, + observables: Observable[], + ): void; + + removeBatchObservableCallback( + callback: BatchObservableCallback, + observables: Observable[], + ): void; + /** * Creates a new histogram instrument. A histogram is a metric that samples observations (usually things like request durations or response sizes) and counts them in configurable buckets. * @param name - The name of the histogram diff --git a/yarn-project/telemetry-client/src/vendor/otel-pino-stream.ts b/yarn-project/telemetry-client/src/vendor/otel-pino-stream.ts new file mode 100644 index 00000000000..5d2c053551a --- /dev/null +++ b/yarn-project/telemetry-client/src/vendor/otel-pino-stream.ts @@ -0,0 +1,280 @@ +/* + * Adapted from open-telemetry/opentelemetry-js-contrib + * All changes are prefixed with [aztec] to make them easy to identify + * + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { type Logger, SeverityNumber, logs } from '@opentelemetry/api-logs'; +import { millisToHrTime } from '@opentelemetry/core'; +import { Writable } from 'stream'; + +import { registerOtelLoggerProvider } from '../otel_logger_provider.js'; + +/* eslint-disable @typescript-eslint/ban-types */ +/* eslint-disable camelcase */ + +// This block is a copy (modulo code style and TypeScript types) of the Pino +// code that defines log level value and names. This file is part of +// *instrumenting* Pino, so we want to avoid a dependency on the library. +const DEFAULT_LEVELS = { + trace: 10, + debug: 20, + info: 30, + warn: 40, + error: 50, + fatal: 60, +}; + +const OTEL_SEV_NUM_FROM_PINO_LEVEL: { [level: number]: SeverityNumber } = { + [DEFAULT_LEVELS.trace]: SeverityNumber.TRACE, + [DEFAULT_LEVELS.debug]: SeverityNumber.DEBUG, + [DEFAULT_LEVELS.info]: SeverityNumber.INFO, + [DEFAULT_LEVELS.warn]: SeverityNumber.WARN, + [DEFAULT_LEVELS.error]: SeverityNumber.ERROR, + [DEFAULT_LEVELS.fatal]: SeverityNumber.FATAL, +}; + +const EXTRA_SEV_NUMS = [ + SeverityNumber.TRACE2, + SeverityNumber.TRACE3, + SeverityNumber.TRACE4, + SeverityNumber.DEBUG2, + SeverityNumber.DEBUG3, + SeverityNumber.DEBUG4, + SeverityNumber.INFO2, + SeverityNumber.INFO3, + SeverityNumber.INFO4, + SeverityNumber.WARN2, + SeverityNumber.WARN3, + SeverityNumber.WARN4, + SeverityNumber.ERROR2, + SeverityNumber.ERROR3, + SeverityNumber.ERROR4, + SeverityNumber.FATAL2, + SeverityNumber.FATAL3, + SeverityNumber.FATAL4, +]; + +function severityNumberFromPinoLevel(lvl: number) { + // Fast common case: one of the known levels + const sev = OTEL_SEV_NUM_FROM_PINO_LEVEL[lvl]; + if (sev !== undefined) { + return sev; + } + + // Otherwise, scale the Pino level range -- 10 (trace) to 70 (fatal+10) + // -- onto the extra OTel severity numbers (TRACE2, TRACE3, ..., FATAL4). + // Values below trace (10) map to SeverityNumber.TRACE2, which may be + // considered a bit weird, but it means the unnumbered levels are always + // just for exactly matching values. + const relativeLevelWeight = (lvl - 10) / (70 - 10); + const otelSevIdx = Math.floor(relativeLevelWeight * EXTRA_SEV_NUMS.length); + const cappedOTelIdx = Math.min(EXTRA_SEV_NUMS.length - 1, Math.max(0, otelSevIdx)); + const otelSevValue = EXTRA_SEV_NUMS[cappedOTelIdx]; + return otelSevValue; +} + +// [aztec] Custom function to map Aztec logging levels to OpenTelemetry severity numbers +function severityNumberFromAztecPinoLevel(lvl: number) { + return ( + OTEL_SEV_NUM_FROM_PINO_LEVEL[lvl] ?? + /* verbose */ (lvl === 25 ? SeverityNumber.DEBUG3 : undefined) ?? + severityNumberFromPinoLevel(lvl) + ); +} + +/** + * Return a function that knows how to convert the "time" field value on a + * Pino log record to an OTel LogRecord timestamp value. + * + * How to convert the serialized "time" on a Pino log record + * depends on the Logger's `Symbol(pino.time)` prop, configurable + * via https://getpino.io/#/docs/api?id=timestamp-boolean-function + * + * For example: + * const logger = pino({timestamp: pino.stdTimeFunctions.isoTime}) + * results in log record entries of the form: + * ,"time":"2024-05-17T22:03:25.969Z" + * `otelTimestampFromTime` will be given the value of the "time" field: + * "2024-05-17T22:03:25.969Z" + * which should be parsed to a number of milliseconds since the epoch. + */ +export function getTimeConverter(pinoLogger: any, pinoMod: any) { + const stdTimeFns = pinoMod.stdTimeFunctions; + const loggerTimeFn = pinoLogger[pinoMod.symbols.timeSym]; + if (loggerTimeFn === stdTimeFns.epochTime) { + return (time: number) => time; + } else if (loggerTimeFn === stdTimeFns.unixTime) { + return (time: number) => time * 1e3; + } else if (loggerTimeFn === stdTimeFns.isoTime) { + return (time: string) => new Date(time).getTime(); + } else if (loggerTimeFn === stdTimeFns.nullTime) { + return () => Date.now(); + } else { + // The logger has a custom time function. Don't guess. + return () => NaN; + } +} + +interface OTelPinoStreamOptions { + messageKey: string; + levels: any; // Pino.LevelMapping + otelTimestampFromTime: (time: any) => number; +} + +/** + * A Pino stream for sending records to the OpenTelemetry Logs API. + * + * - This stream emits an 'unknown' event on an unprocessable pino record. + * The event arguments are: `logLine: string`, `err: string | Error`. + */ +export class OTelPinoStream extends Writable { + private _otelLogger: Logger; + private _messageKey: string; + private _levels; + private _otelTimestampFromTime; + + constructor(options: OTelPinoStreamOptions) { + super(); + + // Note: A PINO_CONFIG event was added to pino (2024-04-04) to send config + // to transports. Eventually OTelPinoStream might be able to use this + // for auto-configuration in newer pino versions. The event currently does + // not include the `timeSym` value that is needed here, however. + this._messageKey = options.messageKey; + this._levels = options.levels; + + // [aztec] The following will break if we set up a custom time function in our logger + this._otelTimestampFromTime = options.otelTimestampFromTime ?? ((time: number) => time); + + // Cannot use `instrumentation.logger` until have delegating LoggerProvider: + // https://github.com/open-telemetry/opentelemetry-js/issues/4399 + // [aztec] Use the name of this package + this._otelLogger = logs.getLogger('@aztec/telemetry-client/otel-pino-stream', '0.1.0'); + } + + override _write(s: string, _encoding: string, callback: Function) { + try { + /* istanbul ignore if */ + if (!s) { + return; + } + + // Parse, and handle edge cases similar to how `pino-abtract-transport` does: + // https://github.com/pinojs/pino-abstract-transport/blob/v1.2.0/index.js#L28-L45 + // - Emitting an 'unknown' event on parse error mimicks pino-abstract-transport. + let recObj; + try { + recObj = JSON.parse(s); + } catch (parseErr) { + // Invalid JSON suggests a bug in Pino, or a logger configuration bug + // (a bogus `options.timestamp` or serializer). + this.emit('unknown', s.toString(), parseErr); + callback(); + return; + } + /* istanbul ignore if */ + if (recObj === null) { + this.emit('unknown', s.toString(), 'Null value ignored'); + callback(); + return; + } + /* istanbul ignore if */ + if (typeof recObj !== 'object') { + recObj = { + data: recObj, + }; + } + + const { + time, + [this._messageKey]: body, + level, // eslint-disable-line @typescript-eslint/no-unused-vars + + // The typical Pino `hostname` and `pid` fields are removed because they + // are redundant with the OpenTelemetry `host.name` and `process.pid` + // Resource attributes, respectively. This code cannot change the + // LoggerProvider's `resource`, so getting the OpenTelemetry equivalents + // depends on the user using the OpenTelemetry HostDetector and + // ProcessDetector. + // https://getpino.io/#/docs/api?id=opt-base + hostname, // eslint-disable-line @typescript-eslint/no-unused-vars + pid, // eslint-disable-line @typescript-eslint/no-unused-vars + + // The `trace_id` et al fields that may have been added by the + // "log correlation" feature are stripped, because they are redundant. + // trace_id, // eslint-disable-line @typescript-eslint/no-unused-vars + // span_id, // eslint-disable-line @typescript-eslint/no-unused-vars + // trace_flags, // eslint-disable-line @typescript-eslint/no-unused-vars + + // [aztec] They are not redundant, we depend on them for correlation. + // The instrumentation package seems to be adding these fields via a custom hook. + // We push them from the logger module in foundation, so we don't want to clear them here. + + ...attributes + } = recObj; + + let timestamp = this._otelTimestampFromTime(time); + if (isNaN(timestamp)) { + attributes['time'] = time; // save the unexpected "time" field to attributes + timestamp = Date.now(); + } + + // This avoids a possible subtle bug when a Pino logger uses + // `time: pino.stdTimeFunctions.unixTime` and logs in the first half-second + // since process start. The rounding involved results in: + // timestamp < performance.timeOrigin + // If that is passed to Logger.emit() it will be misinterpreted by + // `timeInputToHrTime` as a `performance.now()` value. + const timestampHrTime = millisToHrTime(timestamp); + + // Prefer using `stream.lastLevel`, because `recObj.level` can be customized + // to anything via `formatters.level` + // (https://getpino.io/#/docs/api?id=formatters-object). + // const lastLevel = (this as any).lastLevel; + + // [aztec] We do not prefer stream.lastLevel since it's undefined here, as we are running + // on a worker thread, so we use recObj.level because we know that we won't customize it. + const lastLevel = recObj.level; + + const otelRec = { + timestamp: timestampHrTime, + observedTimestamp: timestampHrTime, + severityNumber: severityNumberFromAztecPinoLevel(lastLevel), + severityText: this._levels.labels[lastLevel], + body, + attributes, + }; + + this._otelLogger.emit(otelRec); + } catch (err) { + // [aztec] Log errors to stderr + // eslint-disable-next-line no-console + console.error(`Error in OTelPinoStream: ${err}`); + } + callback(); + } +} + +// [aztec] Default export that loads the resource information and creates a new otel pino stream. +// Invoked by pino when creating a transport in a worker thread out of this stream. +// Note that the original open-telemetry/opentelemetry-js-contrib was set up to run on the main +// nodejs loop, as opposed to in a worker as pino recommends. +export default async function (options: OTelPinoStreamOptions) { + const url = process.env.OTEL_EXPORTER_OTLP_LOGS_ENDPOINT; + // We re-register here because this runs on a worker thread + await registerOtelLoggerProvider(undefined, url ? new URL(url) : undefined); + return new OTelPinoStream(options); +} diff --git a/yarn-project/txe/package.json b/yarn-project/txe/package.json index efa06e6bf57..ebae6269f3c 100644 --- a/yarn-project/txe/package.json +++ b/yarn-project/txe/package.json @@ -18,7 +18,7 @@ "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests", - "dev": "DEBUG='aztec:*,-aztec:avm_simulator:*' LOG_LEVEL=debug node ./dest/bin/index.js", + "dev": "LOG_LEVEL=debug node ./dest/bin/index.js", "start": "node ./dest/bin/index.js" }, "inherits": [ @@ -93,4 +93,4 @@ "engines": { "node": ">=18" } -} +} \ No newline at end of file diff --git a/yarn-project/world-state/src/native/native_world_state_instance.ts b/yarn-project/world-state/src/native/native_world_state_instance.ts index a1fa6baed48..f2af45b2acb 100644 --- a/yarn-project/world-state/src/native/native_world_state_instance.ts +++ b/yarn-project/world-state/src/native/native_world_state_instance.ts @@ -10,7 +10,7 @@ import { NULLIFIER_TREE_HEIGHT, PUBLIC_DATA_TREE_HEIGHT, } from '@aztec/circuits.js'; -import { createDebugLogger, fmtLogData } from '@aztec/foundation/log'; +import { createDebugLogger } from '@aztec/foundation/log'; import { SerialQueue } from '@aztec/foundation/queue'; import { Timer } from '@aztec/foundation/timer'; @@ -203,9 +203,9 @@ export class NativeWorldState implements NativeWorldStateInstance { data['publicDataWritesCount'] = body.publicDataWrites.length; } - this.log.debug(`Calling messageId=${messageId} ${WorldStateMessageType[messageType]} with ${fmtLogData(data)}`); + this.log.trace(`Calling messageId=${messageId} ${WorldStateMessageType[messageType]}`, data); } else { - this.log.debug(`Calling messageId=${messageId} ${WorldStateMessageType[messageType]}`); + this.log.trace(`Calling messageId=${messageId} ${WorldStateMessageType[messageType]}`); } const timer = new Timer(); @@ -248,14 +248,12 @@ export class NativeWorldState implements NativeWorldStateInstance { const response = TypedMessage.fromMessagePack(decodedResponse); const decodingDuration = timer.ms() - callDuration; const totalDuration = timer.ms(); - this.log.debug( - `Call messageId=${messageId} ${WorldStateMessageType[messageType]} took (ms) ${fmtLogData({ - totalDuration, - encodingDuration, - callDuration, - decodingDuration, - })}`, - ); + this.log.trace(`Call messageId=${messageId} ${WorldStateMessageType[messageType]} took (ms)`, { + totalDuration, + encodingDuration, + callDuration, + decodingDuration, + }); if (response.header.requestId !== request.header.messageId) { throw new Error( diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index c53310ab30d..c6c51d2d87d 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -260,7 +260,6 @@ __metadata: "@aztec/txe": "workspace:^" "@aztec/types": "workspace:^" "@jest/globals": ^29.5.0 - "@opentelemetry/winston-transport": ^0.7.0 "@types/chalk": ^2.2.0 "@types/jest": ^29.5.0 "@types/koa": ^2.13.6 @@ -273,8 +272,6 @@ __metadata: ts-node: ^10.9.1 typescript: ^5.0.4 viem: ^2.7.15 - winston: ^3.10.0 - winston-daily-rotate-file: ^4.7.1 bin: aztec: ./dest/bin/index.js languageName: unknown @@ -592,7 +589,6 @@ __metadata: viem: ^2.7.15 webpack: ^5.88.2 webpack-cli: ^5.1.4 - winston: ^3.10.0 zod: ^3.23.8 languageName: unknown linkType: soft @@ -666,6 +662,7 @@ __metadata: "@typescript-eslint/eslint-plugin": ^6.2.1 "@typescript-eslint/parser": ^6.2.1 bn.js: ^5.2.1 + colorette: ^2.0.20 comlink: ^4.4.1 debug: ^4.3.4 detect-node: ^2.1.0 @@ -687,6 +684,8 @@ __metadata: lodash.clonedeepwith: ^4.5.0 memdown: ^6.1.1 pako: ^2.1.0 + pino: ^9.5.0 + pino-pretty: ^13.0.0 prettier: ^2.7.1 sha3: ^2.1.4 supertest: ^6.3.3 @@ -919,11 +918,13 @@ __metadata: libp2p: 1.5.0 semver: ^7.6.0 sha3: ^2.1.4 + snappy: ^7.2.2 ts-node: ^10.9.1 tslib: ^2.4.0 typescript: ^5.0.4 uint8arrays: ^5.0.3 viem: ^2.7.15 + xxhash-wasm: ^1.1.0 languageName: unknown linkType: soft @@ -1198,25 +1199,24 @@ __metadata: "@aztec/foundation": "workspace:^" "@jest/globals": ^29.5.0 "@opentelemetry/api": ^1.9.0 - "@opentelemetry/api-logs": ^0.54.0 - "@opentelemetry/exporter-logs-otlp-http": ^0.54.0 - "@opentelemetry/exporter-metrics-otlp-http": ^0.52.0 - "@opentelemetry/exporter-trace-otlp-http": ^0.54.0 - "@opentelemetry/host-metrics": ^0.35.2 - "@opentelemetry/otlp-exporter-base": ^0.54.0 - "@opentelemetry/resource-detector-aws": ^1.5.2 - "@opentelemetry/resources": ^1.25.0 - "@opentelemetry/sdk-logs": ^0.54.0 - "@opentelemetry/sdk-metrics": ^1.25.0 - "@opentelemetry/sdk-trace-node": ^1.25.0 - "@opentelemetry/semantic-conventions": ^1.25.0 - "@opentelemetry/winston-transport": ^0.7.0 + "@opentelemetry/api-logs": ^0.55.0 + "@opentelemetry/core": ^1.28.0 + "@opentelemetry/exporter-logs-otlp-http": ^0.55.0 + "@opentelemetry/exporter-metrics-otlp-http": ^0.55.0 + "@opentelemetry/exporter-trace-otlp-http": ^0.55.0 + "@opentelemetry/host-metrics": ^0.35.4 + "@opentelemetry/otlp-exporter-base": ^0.55.0 + "@opentelemetry/resource-detector-aws": ^1.8.0 + "@opentelemetry/resources": ^1.28.0 + "@opentelemetry/sdk-logs": ^0.55.0 + "@opentelemetry/sdk-metrics": ^1.28.0 + "@opentelemetry/sdk-trace-node": ^1.28.0 + "@opentelemetry/semantic-conventions": ^1.28.0 "@types/jest": ^29.5.0 jest: ^29.5.0 prom-client: ^15.1.3 ts-node: ^10.9.1 typescript: ^5.0.4 - winston: ^3.10.0 languageName: unknown linkType: soft @@ -1903,13 +1903,6 @@ __metadata: languageName: node linkType: hard -"@colors/colors@npm:1.6.0, @colors/colors@npm:^1.6.0": - version: 1.6.0 - resolution: "@colors/colors@npm:1.6.0" - checksum: aa209963e0c3218e80a4a20553ba8c0fbb6fa13140540b4e5f97923790be06801fc90172c1114fc8b7e888b3d012b67298cde6b9e81521361becfaee400c662f - languageName: node - linkType: hard - "@cspotcode/source-map-support@npm:^0.8.0": version: 0.8.1 resolution: "@cspotcode/source-map-support@npm:0.8.1" @@ -1919,17 +1912,6 @@ __metadata: languageName: node linkType: hard -"@dabh/diagnostics@npm:^2.0.2": - version: 2.0.3 - resolution: "@dabh/diagnostics@npm:2.0.3" - dependencies: - colorspace: 1.1.x - enabled: 2.0.x - kuler: ^2.0.0 - checksum: 4879600c55c8315a0fb85fbb19057bad1adc08f0a080a8cb4e2b63f723c379bfc4283b68123a2b078d367b327dd8df12fcb27464efe791addc0a48b9df6d79a1 - languageName: node - linkType: hard - "@dependents/detective-less@npm:^3.0.1": version: 3.0.2 resolution: "@dependents/detective-less@npm:3.0.2" @@ -3401,6 +3383,97 @@ __metadata: languageName: node linkType: hard +"@napi-rs/snappy-android-arm-eabi@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-android-arm-eabi@npm:7.2.2" + conditions: os=android & cpu=arm + languageName: node + linkType: hard + +"@napi-rs/snappy-android-arm64@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-android-arm64@npm:7.2.2" + conditions: os=android & cpu=arm64 + languageName: node + linkType: hard + +"@napi-rs/snappy-darwin-arm64@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-darwin-arm64@npm:7.2.2" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + +"@napi-rs/snappy-darwin-x64@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-darwin-x64@npm:7.2.2" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + +"@napi-rs/snappy-freebsd-x64@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-freebsd-x64@npm:7.2.2" + conditions: os=freebsd & cpu=x64 + languageName: node + linkType: hard + +"@napi-rs/snappy-linux-arm-gnueabihf@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-linux-arm-gnueabihf@npm:7.2.2" + conditions: os=linux & cpu=arm + languageName: node + linkType: hard + +"@napi-rs/snappy-linux-arm64-gnu@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-linux-arm64-gnu@npm:7.2.2" + conditions: os=linux & cpu=arm64 + languageName: node + linkType: hard + +"@napi-rs/snappy-linux-arm64-musl@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-linux-arm64-musl@npm:7.2.2" + conditions: os=linux & cpu=arm64 + languageName: node + linkType: hard + +"@napi-rs/snappy-linux-x64-gnu@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-linux-x64-gnu@npm:7.2.2" + conditions: os=linux & cpu=x64 + languageName: node + linkType: hard + +"@napi-rs/snappy-linux-x64-musl@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-linux-x64-musl@npm:7.2.2" + conditions: os=linux & cpu=x64 + languageName: node + linkType: hard + +"@napi-rs/snappy-win32-arm64-msvc@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-win32-arm64-msvc@npm:7.2.2" + conditions: os=win32 & cpu=arm64 + languageName: node + linkType: hard + +"@napi-rs/snappy-win32-ia32-msvc@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-win32-ia32-msvc@npm:7.2.2" + conditions: os=win32 & cpu=ia32 + languageName: node + linkType: hard + +"@napi-rs/snappy-win32-x64-msvc@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-win32-x64-msvc@npm:7.2.2" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + "@noble/ciphers@npm:^0.4.0": version: 0.4.1 resolution: "@noble/ciphers@npm:0.4.1" @@ -3548,37 +3621,28 @@ __metadata: languageName: node linkType: hard -"@opentelemetry/api-logs@npm:0.52.0": - version: 0.52.0 - resolution: "@opentelemetry/api-logs@npm:0.52.0" - dependencies: - "@opentelemetry/api": ^1.0.0 - checksum: 502f60fd3a4b08fb7e54eaf22d0415e34dcbc9995696945eff8a4a12910e933149900cc470fb476b9411b4bbb98f8b598e3f4d4a37137698fcf0a7ea6ab240d6 - languageName: node - linkType: hard - -"@opentelemetry/api-logs@npm:0.54.0, @opentelemetry/api-logs@npm:^0.54.0": - version: 0.54.0 - resolution: "@opentelemetry/api-logs@npm:0.54.0" +"@opentelemetry/api-logs@npm:0.55.0, @opentelemetry/api-logs@npm:^0.55.0": + version: 0.55.0 + resolution: "@opentelemetry/api-logs@npm:0.55.0" dependencies: "@opentelemetry/api": ^1.3.0 - checksum: 5fc91054a290663844049cd9eb66419ea06d191b82220f2513147acdbd82579d1d3703a7e09f58a0014118d52b96d8b6340f9b43dd33a2c4469a31f13b3abc62 + checksum: 07833624711b4146ea4450b4ca714ada33e07a3c354feb4df08e4312a69d9fd200726deb910c8aaba17b13c52645252845ca9aa7113b78d277a806a28d0b2b90 languageName: node linkType: hard -"@opentelemetry/api@npm:^1.0.0, @opentelemetry/api@npm:^1.3.0, @opentelemetry/api@npm:^1.4.0, @opentelemetry/api@npm:^1.9.0": +"@opentelemetry/api@npm:^1.3.0, @opentelemetry/api@npm:^1.4.0, @opentelemetry/api@npm:^1.9.0": version: 1.9.0 resolution: "@opentelemetry/api@npm:1.9.0" checksum: 9e88e59d53ced668f3daaecfd721071c5b85a67dd386f1c6f051d1be54375d850016c881f656ffbe9a03bedae85f7e89c2f2b635313f9c9b195ad033cdc31020 languageName: node linkType: hard -"@opentelemetry/context-async-hooks@npm:1.25.0": - version: 1.25.0 - resolution: "@opentelemetry/context-async-hooks@npm:1.25.0" +"@opentelemetry/context-async-hooks@npm:1.28.0": + version: 1.28.0 + resolution: "@opentelemetry/context-async-hooks@npm:1.28.0" peerDependencies: "@opentelemetry/api": ">=1.0.0 <1.10.0" - checksum: f50f6ef621b6cfaa1d0919e4470b7c8326371beaf6be9a635c6f3221677bf9f5429a81a29b5518a41d3c002e35d4a89cb748ae61f650d61aa2ae3cbe123c0301 + checksum: 23288e78e25bb8d3af216825f7108a0380044d3ca3d9d427e6a33c8dbea3c67617e5024371190a9f09e171f13c4b40afc9135a807e40e866d9b98227c6b95a89 languageName: node linkType: hard @@ -3593,179 +3657,150 @@ __metadata: languageName: node linkType: hard -"@opentelemetry/core@npm:1.25.1, @opentelemetry/core@npm:^1.0.0": - version: 1.25.1 - resolution: "@opentelemetry/core@npm:1.25.1" +"@opentelemetry/core@npm:1.28.0, @opentelemetry/core@npm:^1.28.0": + version: 1.28.0 + resolution: "@opentelemetry/core@npm:1.28.0" dependencies: - "@opentelemetry/semantic-conventions": 1.25.1 + "@opentelemetry/semantic-conventions": 1.27.0 peerDependencies: "@opentelemetry/api": ">=1.0.0 <1.10.0" - checksum: ba1672fde4a1cfd9b55bf6070db71b808702fe59c4a70cda52a6156b2c813827954a6b4d3c3641283d394ff75a69b6359a0487459b4d26cd7d714ab3d21bc780 + checksum: ed80e0640df8ba8387e6f16ed3242891a08491f93d18106bd02ef0e6e75ad111e5f312ccf412edf8479e5800a6f27101a40d5023bd8f8566213a50a5a83e76ee languageName: node linkType: hard -"@opentelemetry/core@npm:1.27.0": - version: 1.27.0 - resolution: "@opentelemetry/core@npm:1.27.0" +"@opentelemetry/core@npm:^1.0.0": + version: 1.25.1 + resolution: "@opentelemetry/core@npm:1.25.1" dependencies: - "@opentelemetry/semantic-conventions": 1.27.0 + "@opentelemetry/semantic-conventions": 1.25.1 peerDependencies: "@opentelemetry/api": ">=1.0.0 <1.10.0" - checksum: 33ff551f89f0bb95830c9f9464c43b11adf88882ec1d3a03a5b9afcc89d2aafab33c36cb5047f18667d7929d6ab40ed0121649c42d0105f1cb33ffdca48f8b13 + checksum: ba1672fde4a1cfd9b55bf6070db71b808702fe59c4a70cda52a6156b2c813827954a6b4d3c3641283d394ff75a69b6359a0487459b4d26cd7d714ab3d21bc780 languageName: node linkType: hard -"@opentelemetry/exporter-logs-otlp-http@npm:^0.54.0": - version: 0.54.0 - resolution: "@opentelemetry/exporter-logs-otlp-http@npm:0.54.0" +"@opentelemetry/exporter-logs-otlp-http@npm:^0.55.0": + version: 0.55.0 + resolution: "@opentelemetry/exporter-logs-otlp-http@npm:0.55.0" dependencies: - "@opentelemetry/api-logs": 0.54.0 - "@opentelemetry/core": 1.27.0 - "@opentelemetry/otlp-exporter-base": 0.54.0 - "@opentelemetry/otlp-transformer": 0.54.0 - "@opentelemetry/sdk-logs": 0.54.0 + "@opentelemetry/api-logs": 0.55.0 + "@opentelemetry/core": 1.28.0 + "@opentelemetry/otlp-exporter-base": 0.55.0 + "@opentelemetry/otlp-transformer": 0.55.0 + "@opentelemetry/sdk-logs": 0.55.0 peerDependencies: "@opentelemetry/api": ^1.3.0 - checksum: 407cde2dd930aa19c0c826147d15aba84f94a58f1afbf86cfa1c41576be4492b689e1e9c7971a92805b051851cd6fab063bf24f29160b14c2d3b2cf1fded2bec + checksum: f639babc0bc62407577c1e34367a42577d242a43fb82f4c1af1fbb596fa805c6ea1b273bbfd3a49527fecfd51c42b4d43df25c308f592d5fb7dee056920e6297 languageName: node linkType: hard -"@opentelemetry/exporter-metrics-otlp-http@npm:^0.52.0": - version: 0.52.0 - resolution: "@opentelemetry/exporter-metrics-otlp-http@npm:0.52.0" +"@opentelemetry/exporter-metrics-otlp-http@npm:^0.55.0": + version: 0.55.0 + resolution: "@opentelemetry/exporter-metrics-otlp-http@npm:0.55.0" dependencies: - "@opentelemetry/core": 1.25.0 - "@opentelemetry/otlp-exporter-base": 0.52.0 - "@opentelemetry/otlp-transformer": 0.52.0 - "@opentelemetry/resources": 1.25.0 - "@opentelemetry/sdk-metrics": 1.25.0 + "@opentelemetry/core": 1.28.0 + "@opentelemetry/otlp-exporter-base": 0.55.0 + "@opentelemetry/otlp-transformer": 0.55.0 + "@opentelemetry/resources": 1.28.0 + "@opentelemetry/sdk-metrics": 1.28.0 peerDependencies: "@opentelemetry/api": ^1.3.0 - checksum: 8438733189879e3162ab4a374d7f22a4f9655257cbcde156f1041954cbc86bfab7299e696df49187684f1c219a76b263e6489c411b7008b81a05d5b0e7dcd92d + checksum: 236713540b5a5d0c6921cf0756ced5ffdd5952a4ec5d15d11363dff8ff4a576857d39007e99da45097b69bd09ae62119b1fa49de65edc1f5fa22037582c43cfe languageName: node linkType: hard -"@opentelemetry/exporter-trace-otlp-http@npm:^0.54.0": - version: 0.54.0 - resolution: "@opentelemetry/exporter-trace-otlp-http@npm:0.54.0" +"@opentelemetry/exporter-trace-otlp-http@npm:^0.55.0": + version: 0.55.0 + resolution: "@opentelemetry/exporter-trace-otlp-http@npm:0.55.0" dependencies: - "@opentelemetry/core": 1.27.0 - "@opentelemetry/otlp-exporter-base": 0.54.0 - "@opentelemetry/otlp-transformer": 0.54.0 - "@opentelemetry/resources": 1.27.0 - "@opentelemetry/sdk-trace-base": 1.27.0 + "@opentelemetry/core": 1.28.0 + "@opentelemetry/otlp-exporter-base": 0.55.0 + "@opentelemetry/otlp-transformer": 0.55.0 + "@opentelemetry/resources": 1.28.0 + "@opentelemetry/sdk-trace-base": 1.28.0 peerDependencies: "@opentelemetry/api": ^1.3.0 - checksum: e53263c3ffcfe62d7d299efac9515a977d284aabc4c89a961cec60853095f24e439abae348c326c7bd88911a85d08dd57833a64769e20254d02df8ac73e9b277 + checksum: e842ea7b24b6db7e0f9adc3ace81f682e7634c3ca25721c63313fea7711cf77085e77111cb113c0b7dad098332aa88a7ae96d7420e371cb1ff1b3116908b750f languageName: node linkType: hard -"@opentelemetry/host-metrics@npm:^0.35.2": - version: 0.35.2 - resolution: "@opentelemetry/host-metrics@npm:0.35.2" +"@opentelemetry/host-metrics@npm:^0.35.4": + version: 0.35.4 + resolution: "@opentelemetry/host-metrics@npm:0.35.4" dependencies: "@opentelemetry/sdk-metrics": ^1.8.0 systeminformation: 5.22.9 peerDependencies: "@opentelemetry/api": ^1.3.0 - checksum: 541df2585f9cbf8b6606f6782a2d351383f7a5b0a92b92ad4011ac46adac513474463d0c2474d6902d9d6d3b633be67c60ea0716ea2de277cebc1cb2538fa7a4 + checksum: d0be6116f5ffb81937820f887721da6a1ae841816d3c98159b94adc6146cf1ad1558527b9ed033b58db5439049bfde076842ac0b8ed57e819664f5a03f9e6c73 languageName: node linkType: hard -"@opentelemetry/otlp-exporter-base@npm:0.52.0": - version: 0.52.0 - resolution: "@opentelemetry/otlp-exporter-base@npm:0.52.0" +"@opentelemetry/otlp-exporter-base@npm:0.55.0, @opentelemetry/otlp-exporter-base@npm:^0.55.0": + version: 0.55.0 + resolution: "@opentelemetry/otlp-exporter-base@npm:0.55.0" dependencies: - "@opentelemetry/core": 1.25.0 - "@opentelemetry/otlp-transformer": 0.52.0 - peerDependencies: - "@opentelemetry/api": ^1.0.0 - checksum: 5230ba86d274f4d05fa2820a21e8278d796a299299e2af96150085c871427fe5ef4c6fa4954cdc1b8cdd0a87d5d6677ca0e547cc51253968572a6ede51f63ea2 - languageName: node - linkType: hard - -"@opentelemetry/otlp-exporter-base@npm:0.54.0, @opentelemetry/otlp-exporter-base@npm:^0.54.0": - version: 0.54.0 - resolution: "@opentelemetry/otlp-exporter-base@npm:0.54.0" - dependencies: - "@opentelemetry/core": 1.27.0 - "@opentelemetry/otlp-transformer": 0.54.0 + "@opentelemetry/core": 1.28.0 + "@opentelemetry/otlp-transformer": 0.55.0 peerDependencies: "@opentelemetry/api": ^1.3.0 - checksum: ded78325f22cd98314971216eb18d8f021a6cf7f3b1f69d08b0d257880deb2d409d598bfc3a6016b0557a1ec3b0c50527ba9acf09d4e3902f48d003f763441c0 - languageName: node - linkType: hard - -"@opentelemetry/otlp-transformer@npm:0.52.0": - version: 0.52.0 - resolution: "@opentelemetry/otlp-transformer@npm:0.52.0" - dependencies: - "@opentelemetry/api-logs": 0.52.0 - "@opentelemetry/core": 1.25.0 - "@opentelemetry/resources": 1.25.0 - "@opentelemetry/sdk-logs": 0.52.0 - "@opentelemetry/sdk-metrics": 1.25.0 - "@opentelemetry/sdk-trace-base": 1.25.0 - protobufjs: ^7.3.0 - peerDependencies: - "@opentelemetry/api": ">=1.3.0 <1.10.0" - checksum: 5f75f41a710e5e536faecdec7b1687352e450d185d12613bbcbb206570d96ca2833db15e1d7945cb27040a04c017135b07df2f607ccf9ca9a061f86ad87e8c35 + checksum: 1d4806bd7d36565c5165699b98f99cf00a7aaed23938bf039082550b5b2f973cfb056f71a6ec0317b5113647434d17dd48d9ab0b5fd4c7103beef97c8aa3bec6 languageName: node linkType: hard -"@opentelemetry/otlp-transformer@npm:0.54.0": - version: 0.54.0 - resolution: "@opentelemetry/otlp-transformer@npm:0.54.0" +"@opentelemetry/otlp-transformer@npm:0.55.0": + version: 0.55.0 + resolution: "@opentelemetry/otlp-transformer@npm:0.55.0" dependencies: - "@opentelemetry/api-logs": 0.54.0 - "@opentelemetry/core": 1.27.0 - "@opentelemetry/resources": 1.27.0 - "@opentelemetry/sdk-logs": 0.54.0 - "@opentelemetry/sdk-metrics": 1.27.0 - "@opentelemetry/sdk-trace-base": 1.27.0 + "@opentelemetry/api-logs": 0.55.0 + "@opentelemetry/core": 1.28.0 + "@opentelemetry/resources": 1.28.0 + "@opentelemetry/sdk-logs": 0.55.0 + "@opentelemetry/sdk-metrics": 1.28.0 + "@opentelemetry/sdk-trace-base": 1.28.0 protobufjs: ^7.3.0 peerDependencies: "@opentelemetry/api": ^1.3.0 - checksum: 69451290ec2c65ee27f35b29d41a1b961d169ff928d231805c2694cbc4b4bda788027cf8149a6a1325da7c3bc2ca20dc939ef91a4f3e2af481ed187653386610 + checksum: eaca0c0a428e4b5fb24a770d89767d278f1fbfbafcec7e126d5f06fc090ef74af8b5feadc031682749bb9231862c412a7452029d16c281f7a9a3f791130c2ec5 languageName: node linkType: hard -"@opentelemetry/propagator-b3@npm:1.25.0": - version: 1.25.0 - resolution: "@opentelemetry/propagator-b3@npm:1.25.0" +"@opentelemetry/propagator-b3@npm:1.28.0": + version: 1.28.0 + resolution: "@opentelemetry/propagator-b3@npm:1.28.0" dependencies: - "@opentelemetry/core": 1.25.0 + "@opentelemetry/core": 1.28.0 peerDependencies: "@opentelemetry/api": ">=1.0.0 <1.10.0" - checksum: 5e8a0feec400ebb20644ee217f904ec8894ccad49b753e80c5e131a4f3390504ca3fd17de58ff546313dedc6498dbd198ff83acc3d8084a205e1d901cfc0bb2d + checksum: 793812d47fde1cd55239ebc941b2439e18c226c4d29eb92168cce89c9305865a7058f397ff2f7d9ef5dc3d9cd7ac29c52bcb6c7d25947269d3d02c05643e371b languageName: node linkType: hard -"@opentelemetry/propagator-jaeger@npm:1.25.0": - version: 1.25.0 - resolution: "@opentelemetry/propagator-jaeger@npm:1.25.0" +"@opentelemetry/propagator-jaeger@npm:1.28.0": + version: 1.28.0 + resolution: "@opentelemetry/propagator-jaeger@npm:1.28.0" dependencies: - "@opentelemetry/core": 1.25.0 + "@opentelemetry/core": 1.28.0 peerDependencies: "@opentelemetry/api": ">=1.0.0 <1.10.0" - checksum: c652b4285e254041654a5153649f822b8e2eaa526b67e0a8c56c4eb173d9d0d0efa41ffed3f7dcdd1c2c2b85365cd05e001ee145e8701e4af9d7eef79488ca18 + checksum: 887589595a906a309e9962efcbc7940f37b85b6934d4910141de73b034c155d1309d336e259743a23684a7aa4669d6eeee89639ed33f97d1a0e8d8394251857f languageName: node linkType: hard -"@opentelemetry/resource-detector-aws@npm:^1.5.2": - version: 1.5.2 - resolution: "@opentelemetry/resource-detector-aws@npm:1.5.2" +"@opentelemetry/resource-detector-aws@npm:^1.8.0": + version: 1.8.0 + resolution: "@opentelemetry/resource-detector-aws@npm:1.8.0" dependencies: "@opentelemetry/core": ^1.0.0 - "@opentelemetry/resources": ^1.0.0 - "@opentelemetry/semantic-conventions": ^1.22.0 + "@opentelemetry/resources": ^1.10.0 + "@opentelemetry/semantic-conventions": ^1.27.0 peerDependencies: "@opentelemetry/api": ^1.0.0 - checksum: c58274117fb6a44593aab1135f11d39770a69a1a14108a826086a36a7108de13d0d9df333cf5533e98d40f751b20d8a3284426bfcd5dcc941157458bbba7fe1c + checksum: 7f393a3b3a9e1c015db188ea4b7ee651c0d7dc196bd574eb6bebec0a7ff93cbd652afcf1cdd02e97e56c0c53b3987487483f73ddd3323f2ba427af5f752ff806 languageName: node linkType: hard -"@opentelemetry/resources@npm:1.25.0, @opentelemetry/resources@npm:^1.25.0": +"@opentelemetry/resources@npm:1.25.0": version: 1.25.0 resolution: "@opentelemetry/resources@npm:1.25.0" dependencies: @@ -3777,57 +3812,44 @@ __metadata: languageName: node linkType: hard -"@opentelemetry/resources@npm:1.27.0": - version: 1.27.0 - resolution: "@opentelemetry/resources@npm:1.27.0" +"@opentelemetry/resources@npm:1.28.0, @opentelemetry/resources@npm:^1.10.0, @opentelemetry/resources@npm:^1.28.0": + version: 1.28.0 + resolution: "@opentelemetry/resources@npm:1.28.0" dependencies: - "@opentelemetry/core": 1.27.0 + "@opentelemetry/core": 1.28.0 "@opentelemetry/semantic-conventions": 1.27.0 peerDependencies: "@opentelemetry/api": ">=1.0.0 <1.10.0" - checksum: 43d298afea7daf7524e6b98c1441bcce9fa73b76aecf17e36cabb1a4cfaae6818acf9759d3e42706b1fd91243644076d2291e78c3ed81641d3b351fcff6cb9a9 + checksum: b5cb13b75e5da1ef306885cef06e68dc41197c0a25f37fc3029941de8912b0efac089b084fd38c2819a70d01c3b70bc781a60f776bb68ec901b9dfd24eb3a834 languageName: node linkType: hard -"@opentelemetry/resources@npm:^1.0.0": - version: 1.25.1 - resolution: "@opentelemetry/resources@npm:1.25.1" +"@opentelemetry/sdk-logs@npm:0.55.0, @opentelemetry/sdk-logs@npm:^0.55.0": + version: 0.55.0 + resolution: "@opentelemetry/sdk-logs@npm:0.55.0" dependencies: - "@opentelemetry/core": 1.25.1 - "@opentelemetry/semantic-conventions": 1.25.1 - peerDependencies: - "@opentelemetry/api": ">=1.0.0 <1.10.0" - checksum: 806e5aabbc93afcab767dc84707f702ca51bbc93e4565eb69a8591ed2fe78439aca19c5ca0d9f044c85ed97b9efb35936fdb65bef01f5f3e68504002c8a07220 - languageName: node - linkType: hard - -"@opentelemetry/sdk-logs@npm:0.52.0": - version: 0.52.0 - resolution: "@opentelemetry/sdk-logs@npm:0.52.0" - dependencies: - "@opentelemetry/api-logs": 0.52.0 - "@opentelemetry/core": 1.25.0 - "@opentelemetry/resources": 1.25.0 + "@opentelemetry/api-logs": 0.55.0 + "@opentelemetry/core": 1.28.0 + "@opentelemetry/resources": 1.28.0 peerDependencies: "@opentelemetry/api": ">=1.4.0 <1.10.0" - checksum: 7bf7aed40a168866d76e2260237f6cec9c82acaebcc02a3597985b2be644e4aebf69e0f57739e7fd7cc8e75ecd0bdc98b0429ea985d7de6064148477ffd6432e + checksum: 7e8d05d302759341f10d2c853378a0556bea295660419103b2be906b933ca660704bf1cdac30fc803a4bd7ed852f0a626e8774a6307e57aafcf08e67d3fcd737 languageName: node linkType: hard -"@opentelemetry/sdk-logs@npm:0.54.0, @opentelemetry/sdk-logs@npm:^0.54.0": - version: 0.54.0 - resolution: "@opentelemetry/sdk-logs@npm:0.54.0" +"@opentelemetry/sdk-metrics@npm:1.28.0, @opentelemetry/sdk-metrics@npm:^1.28.0": + version: 1.28.0 + resolution: "@opentelemetry/sdk-metrics@npm:1.28.0" dependencies: - "@opentelemetry/api-logs": 0.54.0 - "@opentelemetry/core": 1.27.0 - "@opentelemetry/resources": 1.27.0 + "@opentelemetry/core": 1.28.0 + "@opentelemetry/resources": 1.28.0 peerDependencies: - "@opentelemetry/api": ">=1.4.0 <1.10.0" - checksum: fd6db65af6d7afdb454eac1df8a4029d3d287d37e9289a4d128bea07995e8843b7b1e5d1f39aa39538397ce1b6bf624cc2548f40dc18324ba3bbaec86dd845b9 + "@opentelemetry/api": ">=1.3.0 <1.10.0" + checksum: b1a42fbad2046f21e384185b1559e198bb23bdfcd5970fc7f3a3cc4cfe5fb37ab8a6f29deef1b6753eb6a68e2c3b19c6d8a4957be4024af0ac0165eea24c051f languageName: node linkType: hard -"@opentelemetry/sdk-metrics@npm:1.25.0, @opentelemetry/sdk-metrics@npm:^1.25.0, @opentelemetry/sdk-metrics@npm:^1.8.0": +"@opentelemetry/sdk-metrics@npm:^1.8.0": version: 1.25.0 resolution: "@opentelemetry/sdk-metrics@npm:1.25.0" dependencies: @@ -3840,68 +3862,43 @@ __metadata: languageName: node linkType: hard -"@opentelemetry/sdk-metrics@npm:1.27.0": - version: 1.27.0 - resolution: "@opentelemetry/sdk-metrics@npm:1.27.0" +"@opentelemetry/sdk-trace-base@npm:1.28.0": + version: 1.28.0 + resolution: "@opentelemetry/sdk-trace-base@npm:1.28.0" dependencies: - "@opentelemetry/core": 1.27.0 - "@opentelemetry/resources": 1.27.0 - peerDependencies: - "@opentelemetry/api": ">=1.3.0 <1.10.0" - checksum: c8776577063a3a5199d5717247270daf5820ce6636530b5ea4b5a8d6b40170cec9bb6b56dacb5c118d2e90588af83d0ebbb13f4d370c7efe50f69d22e5d13463 - languageName: node - linkType: hard - -"@opentelemetry/sdk-trace-base@npm:1.25.0": - version: 1.25.0 - resolution: "@opentelemetry/sdk-trace-base@npm:1.25.0" - dependencies: - "@opentelemetry/core": 1.25.0 - "@opentelemetry/resources": 1.25.0 - "@opentelemetry/semantic-conventions": 1.25.0 - peerDependencies: - "@opentelemetry/api": ">=1.0.0 <1.10.0" - checksum: 4c0ce40dbe9dcf5e5f79c60c44ffadb6806f1a8cf45c13d901ea6a2345f6cf26a83a1dad4358859fcf941e01f8bd8654f907f88137d5051e023211f8d645e959 - languageName: node - linkType: hard - -"@opentelemetry/sdk-trace-base@npm:1.27.0": - version: 1.27.0 - resolution: "@opentelemetry/sdk-trace-base@npm:1.27.0" - dependencies: - "@opentelemetry/core": 1.27.0 - "@opentelemetry/resources": 1.27.0 + "@opentelemetry/core": 1.28.0 + "@opentelemetry/resources": 1.28.0 "@opentelemetry/semantic-conventions": 1.27.0 peerDependencies: "@opentelemetry/api": ">=1.0.0 <1.10.0" - checksum: d28c36724aeaf4884f7957e2ab138d9a0ca715a68b2ad23e2935ff0e39cd438c57fd0c8cc85fd5e280464857ede1ae8f9c8e40a37088a1e34d2e625e77276fee + checksum: 13828679153d1690384a57e17709c18a76dcee680e92c7f64c85bf6dc5771cc05f1eb70f64c726859718fe494428aab049511d26bd39fa4d9ebd5270ca39eca0 languageName: node linkType: hard -"@opentelemetry/sdk-trace-node@npm:^1.25.0": - version: 1.25.0 - resolution: "@opentelemetry/sdk-trace-node@npm:1.25.0" +"@opentelemetry/sdk-trace-node@npm:^1.28.0": + version: 1.28.0 + resolution: "@opentelemetry/sdk-trace-node@npm:1.28.0" dependencies: - "@opentelemetry/context-async-hooks": 1.25.0 - "@opentelemetry/core": 1.25.0 - "@opentelemetry/propagator-b3": 1.25.0 - "@opentelemetry/propagator-jaeger": 1.25.0 - "@opentelemetry/sdk-trace-base": 1.25.0 + "@opentelemetry/context-async-hooks": 1.28.0 + "@opentelemetry/core": 1.28.0 + "@opentelemetry/propagator-b3": 1.28.0 + "@opentelemetry/propagator-jaeger": 1.28.0 + "@opentelemetry/sdk-trace-base": 1.28.0 semver: ^7.5.2 peerDependencies: "@opentelemetry/api": ">=1.0.0 <1.10.0" - checksum: 22a0a61a6c092841ef4438f914edd259d3025078cc9331aaac340c624c2963aa6fdc4970ade5a0e6647c64e92e893ebde0b8ecdd021abac5358ea3c814a5c01c + checksum: 60868374d1eda9de8835b819a3fab2db2dae640a6253c0d12affe254dcdb02a7e79e7f151e2e09773bbf167e428c5582b810884870b8497dbd28886eb144241d languageName: node linkType: hard -"@opentelemetry/semantic-conventions@npm:1.25.0, @opentelemetry/semantic-conventions@npm:^1.25.0": +"@opentelemetry/semantic-conventions@npm:1.25.0": version: 1.25.0 resolution: "@opentelemetry/semantic-conventions@npm:1.25.0" checksum: 8c9d36f57f0d3d1d4945effe626894ffea860b4be4d5257666ee28b90843ce22694c5b01f9b25ed47a08043958b7e89a65b7ae8e4128f5ed72dcdfe71ac7a19a languageName: node linkType: hard -"@opentelemetry/semantic-conventions@npm:1.25.1, @opentelemetry/semantic-conventions@npm:^1.22.0": +"@opentelemetry/semantic-conventions@npm:1.25.1": version: 1.25.1 resolution: "@opentelemetry/semantic-conventions@npm:1.25.1" checksum: fea418a4b09c55121c6da11c49dd2105116533838c484aead17e8acf8029dad711e145849812f9c61f9e48fad8e2b6cf103d2c18847ca993032ce9b27c2f863d @@ -3915,13 +3912,10 @@ __metadata: languageName: node linkType: hard -"@opentelemetry/winston-transport@npm:^0.7.0": - version: 0.7.0 - resolution: "@opentelemetry/winston-transport@npm:0.7.0" - dependencies: - "@opentelemetry/api-logs": ^0.54.0 - winston-transport: 4.* - checksum: a75d1915e90ab9beaec842fe2f2ce053ea2b43001d8be7cfd47945fa6e1dee6e1d1b5850becb72c9553edb6904844b685df838a1a2cbea0f2f6edf6ce85dc3bb +"@opentelemetry/semantic-conventions@npm:^1.27.0, @opentelemetry/semantic-conventions@npm:^1.28.0": + version: 1.28.0 + resolution: "@opentelemetry/semantic-conventions@npm:1.28.0" + checksum: 1d708afa654990236cdb6b5da84f7ab899b70bff9f753bc49d93616a5c7f7f339ba1eba6a9fbb57dee596995334f4e7effa57a4624741882ab5b3c419c3511e2 languageName: node linkType: hard @@ -5195,13 +5189,6 @@ __metadata: languageName: node linkType: hard -"@types/triple-beam@npm:^1.3.2": - version: 1.3.5 - resolution: "@types/triple-beam@npm:1.3.5" - checksum: 519b6a1b30d4571965c9706ad5400a200b94e4050feca3e7856e3ea7ac00ec9903e32e9a10e2762d0f7e472d5d03e5f4b29c16c0bd8c1f77c8876c683b2231f1 - languageName: node - linkType: hard - "@types/wrap-ansi@npm:^3.0.0": version: 3.0.0 resolution: "@types/wrap-ansi@npm:3.0.0" @@ -5728,15 +5715,6 @@ __metadata: languageName: node linkType: hard -"abort-controller@npm:^3.0.0": - version: 3.0.0 - resolution: "abort-controller@npm:3.0.0" - dependencies: - event-target-shim: ^5.0.0 - checksum: 170bdba9b47b7e65906a28c8ce4f38a7a369d78e2271706f020849c1bfe0ee2067d4261df8bbb66eb84f79208fd5b710df759d64191db58cfba7ce8ef9c54b75 - languageName: node - linkType: hard - "abortable-iterator@npm:^5.0.1": version: 5.0.1 resolution: "abortable-iterator@npm:5.0.1" @@ -6274,13 +6252,6 @@ __metadata: languageName: node linkType: hard -"async@npm:^3.2.3": - version: 3.2.5 - resolution: "async@npm:3.2.5" - checksum: 5ec77f1312301dee02d62140a6b1f7ee0edd2a0f983b6fd2b0849b969f245225b990b47b8243e7b9ad16451a53e7f68e753700385b706198ced888beedba3af4 - languageName: node - linkType: hard - "asynckit@npm:^0.4.0": version: 0.4.0 resolution: "asynckit@npm:0.4.0" @@ -6288,6 +6259,13 @@ __metadata: languageName: node linkType: hard +"atomic-sleep@npm:^1.0.0": + version: 1.0.0 + resolution: "atomic-sleep@npm:1.0.0" + checksum: b95275afb2f80732f22f43a60178430c468906a415a7ff18bcd0feeebc8eec3930b51250aeda91a476062a90e07132b43a1794e8d8ffcf9b650e8139be75fa36 + languageName: node + linkType: hard + "available-typed-arrays@npm:^1.0.7": version: 1.0.7 resolution: "available-typed-arrays@npm:1.0.7" @@ -7336,7 +7314,7 @@ __metadata: languageName: node linkType: hard -"color-convert@npm:^1.9.0, color-convert@npm:^1.9.3": +"color-convert@npm:^1.9.0": version: 1.9.3 resolution: "color-convert@npm:1.9.3" dependencies: @@ -7361,50 +7339,20 @@ __metadata: languageName: node linkType: hard -"color-name@npm:^1.0.0, color-name@npm:^1.1.4, color-name@npm:~1.1.4": +"color-name@npm:^1.1.4, color-name@npm:~1.1.4": version: 1.1.4 resolution: "color-name@npm:1.1.4" checksum: b0445859521eb4021cd0fb0cc1a75cecf67fceecae89b63f62b201cca8d345baf8b952c966862a9d9a2632987d4f6581f0ec8d957dfacece86f0a7919316f610 languageName: node linkType: hard -"color-string@npm:^1.6.0": - version: 1.9.1 - resolution: "color-string@npm:1.9.1" - dependencies: - color-name: ^1.0.0 - simple-swizzle: ^0.2.2 - checksum: c13fe7cff7885f603f49105827d621ce87f4571d78ba28ef4a3f1a104304748f620615e6bf065ecd2145d0d9dad83a3553f52bb25ede7239d18e9f81622f1cc5 - languageName: node - linkType: hard - -"color@npm:^3.1.3": - version: 3.2.1 - resolution: "color@npm:3.2.1" - dependencies: - color-convert: ^1.9.3 - color-string: ^1.6.0 - checksum: f81220e8b774d35865c2561be921f5652117638dcda7ca4029262046e37fc2444ac7bbfdd110cf1fd9c074a4ee5eda8f85944ffbdda26186b602dd9bb05f6400 - languageName: node - linkType: hard - -"colorette@npm:^2.0.10, colorette@npm:^2.0.14": +"colorette@npm:^2.0.10, colorette@npm:^2.0.14, colorette@npm:^2.0.20, colorette@npm:^2.0.7": version: 2.0.20 resolution: "colorette@npm:2.0.20" checksum: 0c016fea2b91b733eb9f4bcdb580018f52c0bc0979443dad930e5037a968237ac53d9beb98e218d2e9235834f8eebce7f8e080422d6194e957454255bde71d3d languageName: node linkType: hard -"colorspace@npm:1.1.x": - version: 1.1.4 - resolution: "colorspace@npm:1.1.4" - dependencies: - color: ^3.1.3 - text-hex: 1.0.x - checksum: bb3934ef3c417e961e6d03d7ca60ea6e175947029bfadfcdb65109b01881a1c0ecf9c2b0b59abcd0ee4a0d7c1eae93beed01b0e65848936472270a0b341ebce8 - languageName: node - linkType: hard - "combine-source-map@npm:^0.8.0, combine-source-map@npm:~0.8.0": version: 0.8.0 resolution: "combine-source-map@npm:0.8.0" @@ -8161,6 +8109,13 @@ __metadata: languageName: node linkType: hard +"dateformat@npm:^4.6.3": + version: 4.6.3 + resolution: "dateformat@npm:4.6.3" + checksum: c3aa0617c0a5b30595122bc8d1bee6276a9221e4d392087b41cbbdf175d9662ae0e50d0d6dcdf45caeac5153c4b5b0844265f8cd2b2245451e3da19e39e3b65d + languageName: node + linkType: hard + "debounce@npm:^1.2.0": version: 1.2.1 resolution: "debounce@npm:1.2.1" @@ -8915,13 +8870,6 @@ __metadata: languageName: node linkType: hard -"enabled@npm:2.0.x": - version: 2.0.0 - resolution: "enabled@npm:2.0.0" - checksum: 9d256d89f4e8a46ff988c6a79b22fa814b4ffd82826c4fdacd9b42e9b9465709d3b748866d0ab4d442dfc6002d81de7f7b384146ccd1681f6a7f868d2acca063 - languageName: node - linkType: hard - "encodeurl@npm:^1.0.2, encodeurl@npm:~1.0.2": version: 1.0.2 resolution: "encodeurl@npm:1.0.2" @@ -9577,13 +9525,6 @@ __metadata: languageName: node linkType: hard -"event-target-shim@npm:^5.0.0": - version: 5.0.1 - resolution: "event-target-shim@npm:5.0.1" - checksum: 1ffe3bb22a6d51bdeb6bf6f7cf97d2ff4a74b017ad12284cc9e6a279e727dc30a5de6bb613e5596ff4dc3e517841339ad09a7eec44266eccb1aa201a30448166 - languageName: node - linkType: hard - "eventemitter3@npm:^4.0.0": version: 4.0.7 resolution: "eventemitter3@npm:4.0.7" @@ -9753,6 +9694,13 @@ __metadata: languageName: node linkType: hard +"fast-copy@npm:^3.0.2": + version: 3.0.2 + resolution: "fast-copy@npm:3.0.2" + checksum: 47f584bcede08ab3198559d3e0e093a547d567715b86be2198da6e3366c3c73eed550d97b86f9fb90dae179982b89c15d68187def960f522cdce14bacdfc6184 + languageName: node + linkType: hard + "fast-deep-equal@npm:^3.1.1, fast-deep-equal@npm:^3.1.3": version: 3.1.3 resolution: "fast-deep-equal@npm:3.1.3" @@ -9794,6 +9742,13 @@ __metadata: languageName: node linkType: hard +"fast-redact@npm:^3.1.1": + version: 3.5.0 + resolution: "fast-redact@npm:3.5.0" + checksum: ef03f0d1849da074a520a531ad299bf346417b790a643931ab4e01cb72275c8d55b60dc8512fb1f1818647b696790edefaa96704228db9f012da935faa1940af + languageName: node + linkType: hard + "fast-safe-stringify@npm:^2.0.7, fast-safe-stringify@npm:^2.1.1": version: 2.1.1 resolution: "fast-safe-stringify@npm:2.1.1" @@ -9851,13 +9806,6 @@ __metadata: languageName: node linkType: hard -"fecha@npm:^4.2.0": - version: 4.2.3 - resolution: "fecha@npm:4.2.3" - checksum: f94e2fb3acf5a7754165d04549460d3ae6c34830394d20c552197e3e000035d69732d74af04b9bed3283bf29fe2a9ebdcc0085e640b0be3cc3658b9726265e31 - languageName: node - linkType: hard - "fflate@npm:^0.8.0": version: 0.8.2 resolution: "fflate@npm:0.8.2" @@ -9874,15 +9822,6 @@ __metadata: languageName: node linkType: hard -"file-stream-rotator@npm:^0.6.1": - version: 0.6.1 - resolution: "file-stream-rotator@npm:0.6.1" - dependencies: - moment: ^2.29.1 - checksum: ebdf6a9e7ca886a50f4dafb2284d4569cefd5bdf4e4451ead25f4d68b7f9776b2620a3d110d534edd40935d1e17f37d818e2129303201870ff89c71b19b49ac1 - languageName: node - linkType: hard - "file-uri-to-path@npm:1.0.0": version: 1.0.0 resolution: "file-uri-to-path@npm:1.0.0" @@ -10009,13 +9948,6 @@ __metadata: languageName: node linkType: hard -"fn.name@npm:1.x.x": - version: 1.1.0 - resolution: "fn.name@npm:1.1.0" - checksum: e357144f48cfc9a7f52a82bbc6c23df7c8de639fce049cac41d41d62cabb740cdb9f14eddc6485e29c933104455bdd7a69bb14a9012cef9cd4fa252a4d0cf293 - languageName: node - linkType: hard - "follow-redirects@npm:^1.0.0, follow-redirects@npm:^1.12.1, follow-redirects@npm:^1.15.6": version: 1.15.6 resolution: "follow-redirects@npm:1.15.6" @@ -10639,6 +10571,13 @@ __metadata: languageName: node linkType: hard +"help-me@npm:^5.0.0": + version: 5.0.0 + resolution: "help-me@npm:5.0.0" + checksum: 474436627b6c7d2f406a2768453895889eb2712c8ded4c47658d5c6dd46c2ff3f742be4e4e8dedd57b7f1ac6b28803896a2e026a32a977f507222c16f23ab2e1 + languageName: node + linkType: hard + "hexoid@npm:^1.0.0": version: 1.0.0 resolution: "hexoid@npm:1.0.0" @@ -11173,13 +11112,6 @@ __metadata: languageName: node linkType: hard -"is-arrayish@npm:^0.3.1": - version: 0.3.2 - resolution: "is-arrayish@npm:0.3.2" - checksum: 977e64f54d91c8f169b59afcd80ff19227e9f5c791fa28fa2e5bce355cbaf6c2c356711b734656e80c9dd4a854dd7efcf7894402f1031dfc5de5d620775b4d5f - languageName: node - linkType: hard - "is-bigint@npm:^1.0.1": version: 1.0.4 resolution: "is-bigint@npm:1.0.4" @@ -12434,6 +12366,13 @@ __metadata: languageName: node linkType: hard +"joycon@npm:^3.1.1": + version: 3.1.1 + resolution: "joycon@npm:3.1.1" + checksum: 8003c9c3fc79c5c7602b1c7e9f7a2df2e9916f046b0dbad862aa589be78c15734d11beb9fe846f5e06138df22cb2ad29961b6a986ba81c4920ce2b15a7f11067 + languageName: node + linkType: hard + "js-sha3@npm:0.8.0": version: 0.8.0 resolution: "js-sha3@npm:0.8.0" @@ -12740,13 +12679,6 @@ __metadata: languageName: node linkType: hard -"kuler@npm:^2.0.0": - version: 2.0.0 - resolution: "kuler@npm:2.0.0" - checksum: 9e10b5a1659f9ed8761d38df3c35effabffbd19fc6107324095238e4ef0ff044392cae9ac64a1c2dda26e532426485342226b93806bd97504b174b0dcf04ed81 - languageName: node - linkType: hard - "labeled-stream-splicer@npm:^2.0.0": version: 2.0.2 resolution: "labeled-stream-splicer@npm:2.0.2" @@ -13076,34 +13008,6 @@ __metadata: languageName: node linkType: hard -"logform@npm:^2.3.2, logform@npm:^2.4.0": - version: 2.6.0 - resolution: "logform@npm:2.6.0" - dependencies: - "@colors/colors": 1.6.0 - "@types/triple-beam": ^1.3.2 - fecha: ^4.2.0 - ms: ^2.1.1 - safe-stable-stringify: ^2.3.1 - triple-beam: ^1.3.0 - checksum: b9ea74bb75e55379ad0eb3e4d65ae6e8d02bc45b431c218162878bf663997ab9258a73104c2b30e09dd2db288bb83c8bf8748e46689d75f5e7e34cf69378d6df - languageName: node - linkType: hard - -"logform@npm:^2.6.1": - version: 2.6.1 - resolution: "logform@npm:2.6.1" - dependencies: - "@colors/colors": 1.6.0 - "@types/triple-beam": ^1.3.2 - fecha: ^4.2.0 - ms: ^2.1.1 - safe-stable-stringify: ^2.3.1 - triple-beam: ^1.3.0 - checksum: 0c6b95fa8350ccc33c7c33d77de2a9920205399706fc1b125151c857b61eb90873f4670d9e0e58e58c165b68a363206ae670d6da8b714527c838da3c84449605 - languageName: node - linkType: hard - "long@npm:^5.0.0": version: 5.2.3 resolution: "long@npm:5.2.3" @@ -13751,13 +13655,6 @@ __metadata: languageName: node linkType: hard -"moment@npm:^2.29.1": - version: 2.30.1 - resolution: "moment@npm:2.30.1" - checksum: 859236bab1e88c3e5802afcf797fc801acdbd0ee509d34ea3df6eea21eb6bcc2abd4ae4e4e64aa7c986aa6cba563c6e62806218e6412a765010712e5fa121ba6 - languageName: node - linkType: hard - "morphdom@npm:^2.3.3": version: 2.7.4 resolution: "morphdom@npm:2.7.4" @@ -14200,13 +14097,6 @@ __metadata: languageName: node linkType: hard -"object-hash@npm:^2.0.1": - version: 2.2.0 - resolution: "object-hash@npm:2.2.0" - checksum: 55ba841e3adce9c4f1b9b46b41983eda40f854e0d01af2802d3ae18a7085a17168d6b81731d43fdf1d6bcbb3c9f9c56d22c8fea992203ad90a38d7d919bc28f1 - languageName: node - linkType: hard - "object-inspect@npm:^1.13.1": version: 1.13.1 resolution: "object-inspect@npm:1.13.1" @@ -14281,6 +14171,13 @@ __metadata: languageName: node linkType: hard +"on-exit-leak-free@npm:^2.1.0": + version: 2.1.2 + resolution: "on-exit-leak-free@npm:2.1.2" + checksum: 6ce7acdc7b9ceb51cf029b5239cbf41937ee4c8dcd9d4e475e1777b41702564d46caa1150a744e00da0ac6d923ab83471646a39a4470f97481cf6e2d8d253c3f + languageName: node + linkType: hard + "on-finished@npm:2.4.1, on-finished@npm:^2.3.0, on-finished@npm:^2.4.1": version: 2.4.1 resolution: "on-finished@npm:2.4.1" @@ -14313,15 +14210,6 @@ __metadata: languageName: node linkType: hard -"one-time@npm:^1.0.0": - version: 1.0.0 - resolution: "one-time@npm:1.0.0" - dependencies: - fn.name: 1.x.x - checksum: fd008d7e992bdec1c67f53a2f9b46381ee12a9b8c309f88b21f0223546003fb47e8ad7c1fd5843751920a8d276c63bd4b45670ef80c61fb3e07dbccc962b5c7d - languageName: node - linkType: hard - "onetime@npm:^5.1.0, onetime@npm:^5.1.2": version: 5.1.2 resolution: "onetime@npm:5.1.2" @@ -14802,6 +14690,66 @@ __metadata: languageName: node linkType: hard +"pino-abstract-transport@npm:^2.0.0": + version: 2.0.0 + resolution: "pino-abstract-transport@npm:2.0.0" + dependencies: + split2: ^4.0.0 + checksum: 4db0cd8a1a7b6d13e76dbb58e6adc057c39e4591c70f601f4a427c030d57dff748ab53954e1ecd3aa6e21c1a22dd38de96432606c6d906a7b9f610543bf1d6e2 + languageName: node + linkType: hard + +"pino-pretty@npm:^13.0.0": + version: 13.0.0 + resolution: "pino-pretty@npm:13.0.0" + dependencies: + colorette: ^2.0.7 + dateformat: ^4.6.3 + fast-copy: ^3.0.2 + fast-safe-stringify: ^2.1.1 + help-me: ^5.0.0 + joycon: ^3.1.1 + minimist: ^1.2.6 + on-exit-leak-free: ^2.1.0 + pino-abstract-transport: ^2.0.0 + pump: ^3.0.0 + secure-json-parse: ^2.4.0 + sonic-boom: ^4.0.1 + strip-json-comments: ^3.1.1 + bin: + pino-pretty: bin.js + checksum: a529219b3ccc99ed6a3e2de00ae6a8d4003344614bce39f836352317c962db8c3f4e9ee45843edc218cb9be618a7318b06fa6fab366d4314b9297d0130bc06f5 + languageName: node + linkType: hard + +"pino-std-serializers@npm:^7.0.0": + version: 7.0.0 + resolution: "pino-std-serializers@npm:7.0.0" + checksum: 08cd1d7b7adc4cfca39e42c2d5fd21bcf4513153734e7b8fa278b0e9e9f62df78c4c202886343fe882a462539c931cb8110b661775ad7f7217c96856795b5a86 + languageName: node + linkType: hard + +"pino@npm:^9.5.0": + version: 9.5.0 + resolution: "pino@npm:9.5.0" + dependencies: + atomic-sleep: ^1.0.0 + fast-redact: ^3.1.1 + on-exit-leak-free: ^2.1.0 + pino-abstract-transport: ^2.0.0 + pino-std-serializers: ^7.0.0 + process-warning: ^4.0.0 + quick-format-unescaped: ^4.0.3 + real-require: ^0.2.0 + safe-stable-stringify: ^2.3.1 + sonic-boom: ^4.0.1 + thread-stream: ^3.0.0 + bin: + pino: bin.js + checksum: 650c3087619a619e92948641f0d9acc60cca594175b02fe1ce9c0923a8d07a8d120866f50b0848c26a5898837b8c1ae086adf67066180f686ea21e6e515a8558 + languageName: node + linkType: hard + "pirates@npm:^4.0.4": version: 4.0.6 resolution: "pirates@npm:4.0.6" @@ -15022,6 +14970,13 @@ __metadata: languageName: node linkType: hard +"process-warning@npm:^4.0.0": + version: 4.0.0 + resolution: "process-warning@npm:4.0.0" + checksum: 39d5cee53649132f12479965857cb01793d62ee1a702f06d079ee8aceee935cd0f79c250faab60e86705d8a5226856a61c419778d48ac67f72e160cceb60a1e5 + languageName: node + linkType: hard + "process@npm:^0.11.10, process@npm:~0.11.0": version: 0.11.10 resolution: "process@npm:0.11.10" @@ -15313,6 +15268,13 @@ __metadata: languageName: node linkType: hard +"quick-format-unescaped@npm:^4.0.3": + version: 4.0.4 + resolution: "quick-format-unescaped@npm:4.0.4" + checksum: 7bc32b99354a1aa46c089d2a82b63489961002bb1d654cee3e6d2d8778197b68c2d854fd23d8422436ee1fdfd0abaddc4d4da120afe700ade68bd357815b26fd + languageName: node + linkType: hard + "quick-lru@npm:^4.0.1": version: 4.0.1 resolution: "quick-lru@npm:4.0.1" @@ -15465,19 +15427,6 @@ __metadata: languageName: node linkType: hard -"readable-stream@npm:^4.5.2": - version: 4.5.2 - resolution: "readable-stream@npm:4.5.2" - dependencies: - abort-controller: ^3.0.0 - buffer: ^6.0.3 - events: ^3.3.0 - process: ^0.11.10 - string_decoder: ^1.3.0 - checksum: c4030ccff010b83e4f33289c535f7830190773e274b3fcb6e2541475070bdfd69c98001c3b0cb78763fc00c8b62f514d96c2b10a8bd35d5ce45203a25fa1d33a - languageName: node - linkType: hard - "readdirp@npm:~3.6.0": version: 3.6.0 resolution: "readdirp@npm:3.6.0" @@ -15487,6 +15436,13 @@ __metadata: languageName: node linkType: hard +"real-require@npm:^0.2.0": + version: 0.2.0 + resolution: "real-require@npm:0.2.0" + checksum: fa060f19f2f447adf678d1376928c76379dce5f72bd334da301685ca6cdcb7b11356813332cc243c88470796bc2e2b1e2917fc10df9143dd93c2ea608694971d + languageName: node + linkType: hard + "receptacle@npm:^1.3.2": version: 1.3.2 resolution: "receptacle@npm:1.3.2" @@ -15933,6 +15889,13 @@ __metadata: languageName: node linkType: hard +"secure-json-parse@npm:^2.4.0": + version: 2.7.0 + resolution: "secure-json-parse@npm:2.7.0" + checksum: d9d7d5a01fc6db6115744ba23cf9e67ecfe8c524d771537c062ee05ad5c11b64c730bc58c7f33f60bd6877f96b86f0ceb9ea29644e4040cb757f6912d4dd6737 + languageName: node + linkType: hard + "select-hose@npm:^2.0.0": version: 2.0.0 resolution: "select-hose@npm:2.0.0" @@ -16265,15 +16228,6 @@ __metadata: languageName: node linkType: hard -"simple-swizzle@npm:^0.2.2": - version: 0.2.2 - resolution: "simple-swizzle@npm:0.2.2" - dependencies: - is-arrayish: ^0.3.1 - checksum: a7f3f2ab5c76c4472d5c578df892e857323e452d9f392e1b5cf74b74db66e6294a1e1b8b390b519fa1b96b5b613f2a37db6cffef52c3f1f8f3c5ea64eb2d54c0 - languageName: node - linkType: hard - "single-line-log@npm:^1.0.1": version: 1.1.2 resolution: "single-line-log@npm:1.1.2" @@ -16311,6 +16265,54 @@ __metadata: languageName: node linkType: hard +"snappy@npm:^7.2.2": + version: 7.2.2 + resolution: "snappy@npm:7.2.2" + dependencies: + "@napi-rs/snappy-android-arm-eabi": 7.2.2 + "@napi-rs/snappy-android-arm64": 7.2.2 + "@napi-rs/snappy-darwin-arm64": 7.2.2 + "@napi-rs/snappy-darwin-x64": 7.2.2 + "@napi-rs/snappy-freebsd-x64": 7.2.2 + "@napi-rs/snappy-linux-arm-gnueabihf": 7.2.2 + "@napi-rs/snappy-linux-arm64-gnu": 7.2.2 + "@napi-rs/snappy-linux-arm64-musl": 7.2.2 + "@napi-rs/snappy-linux-x64-gnu": 7.2.2 + "@napi-rs/snappy-linux-x64-musl": 7.2.2 + "@napi-rs/snappy-win32-arm64-msvc": 7.2.2 + "@napi-rs/snappy-win32-ia32-msvc": 7.2.2 + "@napi-rs/snappy-win32-x64-msvc": 7.2.2 + dependenciesMeta: + "@napi-rs/snappy-android-arm-eabi": + optional: true + "@napi-rs/snappy-android-arm64": + optional: true + "@napi-rs/snappy-darwin-arm64": + optional: true + "@napi-rs/snappy-darwin-x64": + optional: true + "@napi-rs/snappy-freebsd-x64": + optional: true + "@napi-rs/snappy-linux-arm-gnueabihf": + optional: true + "@napi-rs/snappy-linux-arm64-gnu": + optional: true + "@napi-rs/snappy-linux-arm64-musl": + optional: true + "@napi-rs/snappy-linux-x64-gnu": + optional: true + "@napi-rs/snappy-linux-x64-musl": + optional: true + "@napi-rs/snappy-win32-arm64-msvc": + optional: true + "@napi-rs/snappy-win32-ia32-msvc": + optional: true + "@napi-rs/snappy-win32-x64-msvc": + optional: true + checksum: cc6ee627d32325c3b3a7220f57bf7f87906372431072b77dfacf5d875a21c54043df8d6f328eadf8d58bda3d9bb558b3f00e1daaa757441cfa1ec20004f715f1 + languageName: node + linkType: hard + "sockjs@npm:^0.3.24": version: 0.3.24 resolution: "sockjs@npm:0.3.24" @@ -16360,6 +16362,15 @@ __metadata: languageName: node linkType: hard +"sonic-boom@npm:^4.0.1": + version: 4.2.0 + resolution: "sonic-boom@npm:4.2.0" + dependencies: + atomic-sleep: ^1.0.0 + checksum: e5e1ffdd3bcb0dee3bf6f7b2ff50dd3ffa2df864dc9d53463f33e225021a28601e91d0ec7e932739824bafd6f4ff3b7090939ac3e34ab1022e01692b41f7e8a3 + languageName: node + linkType: hard + "source-map-js@npm:^1.2.0": version: 1.2.0 resolution: "source-map-js@npm:1.2.0" @@ -16522,13 +16533,6 @@ __metadata: languageName: node linkType: hard -"stack-trace@npm:0.0.x": - version: 0.0.10 - resolution: "stack-trace@npm:0.0.10" - checksum: 473036ad32f8c00e889613153d6454f9be0536d430eb2358ca51cad6b95cea08a3cc33cc0e34de66b0dad221582b08ed2e61ef8e13f4087ab690f388362d6610 - languageName: node - linkType: hard - "stack-utils@npm:^2.0.3": version: 2.0.6 resolution: "stack-utils@npm:2.0.6" @@ -16740,7 +16744,7 @@ __metadata: languageName: node linkType: hard -"string_decoder@npm:^1.1.1, string_decoder@npm:^1.3.0": +"string_decoder@npm:^1.1.1": version: 1.3.0 resolution: "string_decoder@npm:1.3.0" dependencies: @@ -17111,13 +17115,6 @@ __metadata: languageName: node linkType: hard -"text-hex@npm:1.0.x": - version: 1.0.0 - resolution: "text-hex@npm:1.0.0" - checksum: 1138f68adc97bf4381a302a24e2352f04992b7b1316c5003767e9b0d3367ffd0dc73d65001ea02b07cd0ecc2a9d186de0cf02f3c2d880b8a522d4ccb9342244a - languageName: node - linkType: hard - "text-table@npm:^0.2.0": version: 0.2.0 resolution: "text-table@npm:0.2.0" @@ -17134,6 +17131,15 @@ __metadata: languageName: node linkType: hard +"thread-stream@npm:^3.0.0": + version: 3.1.0 + resolution: "thread-stream@npm:3.1.0" + dependencies: + real-require: ^0.2.0 + checksum: 3c5b494ce776f832dfd696792cc865f78c1e850db93e07979349bbc1a5845857cd447aea95808892906cc0178a2fd3233907329f3376e7fc9951e2833f5b7896 + languageName: node + linkType: hard + "through2@npm:^2.0.0, through2@npm:^2.0.3": version: 2.0.5 resolution: "through2@npm:2.0.5" @@ -17265,13 +17271,6 @@ __metadata: languageName: node linkType: hard -"triple-beam@npm:^1.3.0": - version: 1.4.1 - resolution: "triple-beam@npm:1.4.1" - checksum: 2e881a3e8e076b6f2b85b9ec9dd4a900d3f5016e6d21183ed98e78f9abcc0149e7d54d79a3f432b23afde46b0885bdcdcbff789f39bc75de796316961ec07f61 - languageName: node - linkType: hard - "ts-api-utils@npm:^1.0.1": version: 1.3.0 resolution: "ts-api-utils@npm:1.3.0" @@ -18454,61 +18453,6 @@ __metadata: languageName: node linkType: hard -"winston-daily-rotate-file@npm:^4.7.1": - version: 4.7.1 - resolution: "winston-daily-rotate-file@npm:4.7.1" - dependencies: - file-stream-rotator: ^0.6.1 - object-hash: ^2.0.1 - triple-beam: ^1.3.0 - winston-transport: ^4.4.0 - peerDependencies: - winston: ^3 - checksum: 227daea41f722caa017fc7d6f1f80d0e6c428491e57693e6bebc8312b85bcf3aace53cb3a925bda72fab59a6898fa127411d29348ec4b295e2263a7544cda611 - languageName: node - linkType: hard - -"winston-transport@npm:4.*": - version: 4.8.0 - resolution: "winston-transport@npm:4.8.0" - dependencies: - logform: ^2.6.1 - readable-stream: ^4.5.2 - triple-beam: ^1.3.0 - checksum: f84092188176d49a6f4f75321ba3e50107ac0942a51a6d7e36b80af19dafb22b57258aaa6d8220763044ea23e30bffd597d3280d2a2298e6a491fe424896bac7 - languageName: node - linkType: hard - -"winston-transport@npm:^4.4.0, winston-transport@npm:^4.7.0": - version: 4.7.0 - resolution: "winston-transport@npm:4.7.0" - dependencies: - logform: ^2.3.2 - readable-stream: ^3.6.0 - triple-beam: ^1.3.0 - checksum: ce074b5c76a99bee5236cf2b4d30fadfaf1e551d566f654f1eba303dc5b5f77169c21545ff5c5e4fdad9f8e815fc6d91b989f1db34161ecca6e860e62fd3a862 - languageName: node - linkType: hard - -"winston@npm:^3.10.0": - version: 3.13.0 - resolution: "winston@npm:3.13.0" - dependencies: - "@colors/colors": ^1.6.0 - "@dabh/diagnostics": ^2.0.2 - async: ^3.2.3 - is-stream: ^2.0.0 - logform: ^2.4.0 - one-time: ^1.0.0 - readable-stream: ^3.4.0 - safe-stable-stringify: ^2.3.1 - stack-trace: 0.0.x - triple-beam: ^1.3.0 - winston-transport: ^4.7.0 - checksum: 66f9fbbadb58e1632701e9c89391f217310c9455462148e163e060dcd25aed21351b0413bdbbf90e5c5fe9bc945fc5de6f53875ac7c7ef3061133a354fc678c0 - languageName: node - linkType: hard - "word-wrap@npm:^1.2.5": version: 1.2.5 resolution: "word-wrap@npm:1.2.5" @@ -18628,6 +18572,13 @@ __metadata: languageName: node linkType: hard +"xxhash-wasm@npm:^1.1.0": + version: 1.1.0 + resolution: "xxhash-wasm@npm:1.1.0" + checksum: 2ccecb3b1dac5fefe11002d5ff5d106bbb5b506f9ee817ecf1bda65e132ebff3c82701c6727df3cb90b94a6dc1d8b294337678606f2304bcb0fd6b8dc68afe0d + languageName: node + linkType: hard + "y18n@npm:^5.0.5": version: 5.0.8 resolution: "y18n@npm:5.0.8"