diff --git a/.github/workflows/devnet-deploy.yml b/.github/workflows/devnet-deploy.yml new file mode 100644 index 000000000000..4c4e5a7384f5 --- /dev/null +++ b/.github/workflows/devnet-deploy.yml @@ -0,0 +1,133 @@ +name: Deploy devnet + +on: + workflow_dispatch: + inputs: + namespace: + description: The namespace to deploy to, e.g. smoke + required: true + aztec_docker_image: + description: The Aztec Docker image to use + required: true + deployment_mnemonic_secret_name: + description: The name of the secret which holds the boot node's contract deployment mnemonic + required: true + default: testnet-deployment-mnemonic + respect_tf_lock: + description: Whether to respect the Terraform lock + required: false + default: "true" + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: false + +env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + CONTRACT_S3_BUCKET: s3://static.aztec.network + CLUSTER_NAME: aztec-gke + REGION: us-west1-a + NAMESPACE: ${{ inputs.namespace }} + AZTEC_DOCKER_IMAGE: ${{ inputs.aztec_docker_image }} + +jobs: + deploy-network: + uses: ./.github/workflows/network-deploy.yml + with: + namespace: ${{ github.event.inputs.namespace }} + values_file: release-devnet.yaml + aztec_docker_image: ${{ github.event.inputs.aztec_docker_image }} + deployment_mnemonic_secret_name: ${{ github.event.inputs.deployment_mnemonic_secret_name }} + respect_tf_lock: ${{ github.event.inputs.respect_tf_lock }} + secrets: + GCP_SA_KEY: ${{ secrets.GCP_SA_KEY }} + + + bootstrap-network: + runs-on: ubuntu-latest + needs: deploy-network + steps: + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }} + aws-region: eu-west-2 + + - name: Authenticate to Google Cloud + uses: google-github-actions/auth@v2 + with: + credentials_json: ${{ secrets.GCP_SA_KEY }} + + - name: Set up Cloud SDK + uses: google-github-actions/setup-gcloud@v2 + + - name: Setup kubectl access + run: | + gcloud components install kubectl gke-gcloud-auth-plugin --quiet + gcloud container clusters get-credentials ${{ env.CLUSTER_NAME }} --region ${{ env.REGION }} + + - name: Setup helm + run: | + curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3 + chmod +x get_helm.sh + sudo ./get_helm.sh + rm get_helm.sh + + - name: Bootstrap network + run: | + set -eu -o pipefail + + pxe_port_forward_pid="" + ethereum_port_forward_pid="" + + cleanup() { + echo "Cleaning up port-forward processes..." + if [ -n "$pxe_port_forward_pid" ]; then + kill $pxe_port_forward_pid 2>/dev/null || true + fi + if [ -n "$ethereum_port_forward_pid" ]; then + kill $ethereum_port_forward_pid 2>/dev/null || true + fi + } + + trap cleanup EXIT + + echo "Waiting for PXE pods to be ready..." + if ! kubectl wait -n $NAMESPACE --for=condition=ready pod -l app=pxe --timeout=10m; then + echo "Error: PXE pods did not become ready within timeout" + exit 1 + fi + + helm get values $NAMESPACE -n $NAMESPACE -o json --all > helm_values.json + + PXE_PORT="$(jq -r .pxe.service.nodePort helm_values.json)" + ETHEREUM_PORT="$(jq -r .ethereum.service.port helm_values.json)" + L1_CHAIN_ID="$(jq -r .ethereum.chainId helm_values.json)" + + MNEMONIC="$(jq -r .aztec.l1DeploymentMnemonic helm_values.json)" + echo "::add-mask::$MNEMONIC" + + rm helm_values.json + + kubectl port-forward -n $NAMESPACE svc/$NAMESPACE-aztec-network-pxe $PXE_PORT & + pxe_port_forward_pid=$! + + # port-forward directly to the pod because the Eth node does not have a service definition + ETH_POD_NAME=$(kubectl get pods -n $NAMESPACE -l app=ethereum -o jsonpath='{.items[0].metadata.name}') + kubectl port-forward -n $NAMESPACE pod/$ETH_POD_NAME $ETHEREUM_PORT & + ethereum_port_forward_pid=$! + + # wait for port-forwards to establish + sleep 5 + + docker run --rm --network host $AZTEC_DOCKER_IMAGE bootstrap-network \ + --rpc-url http://127.0.0.1:$PXE_PORT \ + --l1-rpc-url http://127.0.0.1:$ETHEREUM_PORT \ + --l1-chain-id "$L1_CHAIN_ID" \ + --mnemonic "$MNEMONIC" \ + --json | tee ./basic_contracts.json + + aws s3 cp ./basic_contracts.json ${{ env.CONTRACT_S3_BUCKET }}/devnet/basic_contracts.json + diff --git a/.github/workflows/devnet-deploys.yml b/.github/workflows/devnet-deploys.yml deleted file mode 100644 index 1561702cb018..000000000000 --- a/.github/workflows/devnet-deploys.yml +++ /dev/null @@ -1,787 +0,0 @@ -name: Deploy to network -on: - # push: - # branches: [devnet, provernet, alphanet] - workflow_dispatch: - inputs: - no_rebuild_images: - description: "Don't rebuild images" - required: false - type: boolean - no_deploy: - description: "Skip deployment (only release images)" - required: false - type: boolean - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# We only reference secret variables here where we put them into the environment so as to not create confusion - -# Anvil Accounts. Anvil provides 10 pre-funded accounts for the mnemonic we have specified in FORK_MNEMONIC. We are using: -# 1. The first account (index 0) is used in SEQ_1_PUBLISHER_PRIVATE_KEY -# 2. The 3rd account (index 2) is used in PROVER_1_PUBLISHER_PRIVATE_KEY -# 3. The 9th account (index 8) is used in this workflow for deploying contracts etc -# 4. The 10th account (index 9) is used by the deployed faucet -# TODO: Convert all this so we take the provided mnemonic and derive the keys from the above indices -env: - DOCKERHUB_PASSWORD: ${{ secrets.DOCKERHUB_PASSWORD }} - GIT_COMMIT: ${{ github.sha }} - DEPLOY_TAG: none - L1_CHAIN_ID: 677692 - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - API_KEY: ${{ secrets.DEVNET_API_KEY }} - FORK_ADMIN_API_KEY: ${{ secrets.DEVNET_API_KEY }} - FORK_MNEMONIC: ${{ secrets.FORK_MNEMONIC }} - CONTRACT_PUBLISHER_PRIVATE_KEY: ${{ secrets.CONTRACT_PUBLISHER_PRIVATE_KEY }} - CONTRACT_S3_BUCKET: s3://static.aztec.network - - # TF Vars - TF_VAR_DOCKERHUB_ACCOUNT: aztecprotocol - TF_VAR_L1_CHAIN_ID: 677692 - TF_VAR_DEPLOY_TAG: none - TF_VAR_IMAGE_TAG: ${{ github.sha }} - TF_VAR_API_KEY: ${{ secrets.DEVNET_API_KEY }} - - # Node / Sequencer - TF_VAR_BOOTSTRAP_NODES: "" - TF_VAR_P2P_ENABLED: "false" - TF_VAR_SEQUENCER_PRIVATE_KEYS: '["${{ secrets.SEQ_1_PUBLISHER_PRIVATE_KEY }}"]' - TF_VAR_NODE_P2P_PRIVATE_KEYS: '[""]' - TF_VAR_SEQ_MAX_SECONDS_BETWEEN_BLOCKS: 0 # disable auto block building - TF_VAR_SEQ_MIN_SECONDS_BETWEEN_BLOCKS: 0 # disable auto block building - TF_VAR_SEQ_MIN_TX_PER_BLOCK: 1 - TF_VAR_SEQ_MAX_TX_PER_BLOCK: 64 - TF_VAR_NODE_P2P_TCP_PORT: 40000 - TF_VAR_NODE_P2P_UDP_PORT: 45000 - TF_VAR_NODE_LB_RULE_PRIORITY: 500 - - # Prover Node - TF_VAR_PROVER_NODE_LB_RULE_PRIORITY: 6000 - TF_VAR_PROVER_PRIVATE_KEYS: '["${{ secrets.PROVER_1_PUBLISHER_PRIVATE_KEY }}"]' - - # Anvil - TF_VAR_FORK_MNEMONIC: ${{ secrets.FORK_MNEMONIC }} - TF_VAR_INFURA_API_KEY: ${{ secrets.INFURA_API_KEY }} - TF_VAR_FORK_ADMIN_API_KEY: ${{ secrets.DEVNET_API_KEY }} - TF_VAR_MAINNET_FORK_CPU_UNITS: 2048 - TF_VAR_MAINNET_FORK_MEMORY_UNITS: 4096 - - # Faucet - TF_VAR_FAUCET_ACCOUNT_INDEX: 9 - TF_VAR_FAUCET_LB_RULE_PRIORITY: 600 - - # Prover - TF_VAR_AGENTS_PER_PROVER: 1 - TF_VAR_PROVING_ENABLED: false - - # Transaction Bot - TF_VAR_BOT_API_KEY: ${{ secrets.BOT_API_KEY }} - TF_VAR_BOT_PRIVATE_KEY: "" - TF_VAR_BOT_NO_START: true - TF_VAR_BOT_PRIVATE_TRANSFERS_PER_TX: 0 # no private transfers - TF_VAR_BOT_PUBLIC_TRANSFERS_PER_TX: 1 - TF_VAR_BOT_TX_MINED_WAIT_SECONDS: 4800 - TF_VAR_BOT_FOLLOW_CHAIN: "PROVEN" - TF_VAR_BOT_TX_INTERVAL_SECONDS: 180 - TF_VAR_BOT_COUNT: 1 - TF_VAR_BOT_FLUSH_SETUP_TRANSACTIONS: false - TF_VAR_BOT_MAX_PENDING_TXS: 1 - - # PXE - TF_VAR_PXE_LB_RULE_PRIORITY: 4000 - -jobs: - setup: - uses: ./.github/workflows/setup-runner.yml - with: - username: ${{ github.actor }} - runner_type: builder-x86 - secrets: inherit - - # Set network specific variables as outputs from this job to be referenced in later jobs - set-network: - needs: setup - runs-on: ${{ github.actor }}-x86 - outputs: - deploy_tag: ${{ steps.set_network_vars.outputs.deploy_tag }} - branch_name: ${{ steps.set_network_vars.outputs.branch_name }} - network_api_key: ${{ steps.set_network_vars.outputs.network_api_key }} - network_fork_admin_api_key: ${{ steps.set_network_vars.outputs.network_fork_admin_api_key }} - agents_per_prover: ${{ steps.set_network_vars.outputs.agents_per_prover }} - bot_interval: ${{ steps.set_network_vars.outputs.bot_interval }} - node_tcp_range_start: ${{ steps.set_network_vars.outputs.node_tcp_range_start }} - node_udp_range_start: ${{ steps.set_network_vars.outputs.node_udp_range_start }} - prover_node_tcp_range_start: ${{ steps.set_network_vars.outputs.prover_node_tcp_range_start }} - prover_node_udp_range_start: ${{ steps.set_network_vars.outputs.prover_node_udp_range_start }} - node_lb_priority_range_start: ${{ steps.set_network_vars.outputs.node_lb_priority_range_start }} - pxe_lb_priority_range_start: ${{ steps.set_network_vars.outputs.pxe_lb_priority_range_start }} - prover_node_lb_priority_range_start: ${{ steps.set_network_vars.outputs.prover_node_lb_priority_range_start }} - faucet_lb_priority: ${{ steps.set_network_vars.outputs.faucet_lb_priority }} - max_txs_per_block: ${{ steps.set_network_vars.outputs.max_txs_per_block }} - bot_follow_chain: ${{ steps.set_network_vars.outputs.bot_follow_chain }} - min_txs_per_block: ${{ steps.set_network_vars.outputs.min_txs_per_block }} - bot_flush_setup_txs: ${{ steps.set_network_vars.outputs.bot_flush_setup_txs }} - bot_max_pending_txs: ${{ steps.set_network_vars.outputs.bot_max_pending_txs }} - mainnet_fork_cpu_units: ${{ steps.set_network_vars.outputs.mainnet_fork_cpu_units }} - mainnet_fork_memory_units: ${{ steps.set_network_vars.outputs.mainnet_fork_memory_units }} - bot_skip_simulation: ${{ steps.set_network_vars.outputs.bot_skip_simulation }} - bot_l2_gas_limit: ${{ steps.set_network_vars.outputs.bot_l2_gas_limit }} - bot_da_gas_limit: ${{ steps.set_network_vars.outputs.bot_da_gas_limit }} - bot_count: ${{ steps.set_network_vars.outputs.bot_count }} - steps: - - name: Set network vars - shell: bash - run: | - env - export BRANCH_NAME=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}} - if [ "$BRANCH_NAME" = "devnet" ] - then - echo "deploy_tag=devnet" >> $GITHUB_OUTPUT - echo "branch_name=devnet" >> $GITHUB_OUTPUT - echo "network_api_key=DEVNET_API_KEY" >> $GITHUB_OUTPUT - echo "network_fork_admin_api_key=DEVNET_API_KEY" >> $GITHUB_OUTPUT - echo "agents_per_prover=2" >> $GITHUB_OUTPUT - echo "bot_interval=180" >> $GITHUB_OUTPUT - echo "node_tcp_range_start=40100" >> $GITHUB_OUTPUT - echo "node_udp_range_start=45100" >> $GITHUB_OUTPUT - echo "prover_node_tcp_range_start=41100" >> $GITHUB_OUTPUT - echo "prover_node_udp_range_start=46100" >> $GITHUB_OUTPUT - echo "node_lb_priority_range_start=4100" >> $GITHUB_OUTPUT - echo "pxe_lb_priority_range_start=5100" >> $GITHUB_OUTPUT - echo "prover_node_lb_priority_range_start=6100" >> $GITHUB_OUTPUT - echo "faucet_lb_priority=601" >> $GITHUB_OUTPUT - echo "min_txs_per_block=1" >> $GITHUB_OUTPUT - echo "max_txs_per_block=64" >> $GITHUB_OUTPUT - echo "bot_follow_chain=PROVEN" >> $GITHUB_OUTPUT - echo "bot_flush_setup_txs=false" >> $GITHUB_OUTPUT - echo "bot_max_pending_txs=1" >> $GITHUB_OUTPUT - echo "mainnet_fork_cpu_units=2048" >> $GITHUB_OUTPUT - echo "mainnet_fork_memory_units=4096" >> $GITHUB_OUTPUT - echo "bot_skip_simulation=false" >> $GITHUB_OUTPUT - echo "bot_l2_gas_limit=" >> $GITHUB_OUTPUT - echo "bot_da_gas_limit=" >> $GITHUB_OUTPUT - echo "bot_count=1" >> $GITHUB_OUTPUT - elif [ "$BRANCH_NAME" = "provernet" ] - then - echo "deploy_tag=provernet" >> $GITHUB_OUTPUT - echo "branch_name=provernet" >> $GITHUB_OUTPUT - echo "network_api_key=PROVERNET_API_KEY" >> $GITHUB_OUTPUT - echo "network_fork_admin_api_key=PROVERNET_FORK_ADMIN_API_KEY" >> $GITHUB_OUTPUT - echo "agents_per_prover=8" >> $GITHUB_OUTPUT - echo "bot_interval=10" >> $GITHUB_OUTPUT - echo "node_tcp_range_start=40200" >> $GITHUB_OUTPUT - echo "node_udp_range_start=45200" >> $GITHUB_OUTPUT - echo "prover_node_tcp_range_start=41200" >> $GITHUB_OUTPUT - echo "prover_node_udp_range_start=46200" >> $GITHUB_OUTPUT - echo "node_lb_priority_range_start=4200" >> $GITHUB_OUTPUT - echo "pxe_lb_priority_range_start=5200" >> $GITHUB_OUTPUT - echo "prover_node_lb_priority_range_start=6200" >> $GITHUB_OUTPUT - echo "faucet_lb_priority=602" >> $GITHUB_OUTPUT - echo "min_txs_per_block=4" >> $GITHUB_OUTPUT - echo "max_txs_per_block=4" >> $GITHUB_OUTPUT - echo "bot_follow_chain=NONE" >> $GITHUB_OUTPUT - echo "bot_flush_setup_txs=true" >> $GITHUB_OUTPUT - echo "bot_max_pending_txs=32" >> $GITHUB_OUTPUT - echo "mainnet_fork_cpu_units=8192" >> $GITHUB_OUTPUT - echo "mainnet_fork_memory_units=32768" >> $GITHUB_OUTPUT - echo "bot_skip_simulation=true" >> $GITHUB_OUTPUT - echo "bot_l2_gas_limit=1000000000" >> $GITHUB_OUTPUT - echo "bot_da_gas_limit=1000000000" >> $GITHUB_OUTPUT - echo "bot_count=1" >> $GITHUB_OUTPUT - elif [ "$BRANCH_NAME" = "alphanet" ] - then - echo "deploy_tag=alphanet" >> $GITHUB_OUTPUT - echo "branch_name=alphanet" >> $GITHUB_OUTPUT - echo "network_api_key=ALPHANET_API_KEY" >> $GITHUB_OUTPUT - echo "network_fork_admin_api_key=ALPHANET_API_KEY" >> $GITHUB_OUTPUT - echo "agents_per_prover=1" >> $GITHUB_OUTPUT - echo "bot_interval=10" >> $GITHUB_OUTPUT - echo "node_tcp_range_start=40000" >> $GITHUB_OUTPUT - echo "node_udp_range_start=45000" >> $GITHUB_OUTPUT - echo "prover_node_tcp_range_start=41000" >> $GITHUB_OUTPUT - echo "prover_node_udp_range_start=46000" >> $GITHUB_OUTPUT - echo "node_lb_priority_range_start=4000" >> $GITHUB_OUTPUT - echo "pxe_lb_priority_range_start=5000" >> $GITHUB_OUTPUT - echo "prover_node_lb_priority_range_start=6000" >> $GITHUB_OUTPUT - echo "faucet_lb_priority=600" >> $GITHUB_OUTPUT - echo "min_txs_per_block=1" >> $GITHUB_OUTPUT - echo "max_txs_per_block=64" >> $GITHUB_OUTPUT - echo "bot_follow_chain=PROVEN" >> $GITHUB_OUTPUT - echo "bot_flush_setup_txs=false" >> $GITHUB_OUTPUT - echo "bot_max_pending_txs=1" >> $GITHUB_OUTPUT - echo "mainnet_fork_cpu_units=2048" >> $GITHUB_OUTPUT - echo "mainnet_fork_memory_units=4096" >> $GITHUB_OUTPUT - echo "bot_skip_simulation=false" >> $GITHUB_OUTPUT - echo "bot_l2_gas_limit=" >> $GITHUB_OUTPUT - echo "bot_da_gas_limit=" >> $GITHUB_OUTPUT - echo "bot_count=1" >> $GITHUB_OUTPUT - else - echo "Unrecognized Branch!!" - exit 1 - fi - id: set_network_vars - - build-mainnet-fork: - needs: set-network - env: - BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - TF_VAR_DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - TF_VAR_API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - TF_VAR_FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - API_KEY_NAME: ${{ needs.set-network.outputs.network_api_key }} - runs-on: ${{ github.actor }}-x86 - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - fetch-depth: 0 - - uses: ./.github/ci-setup-action - with: - concurrency_key: build-mainnet-fork-${{ github.actor }} - dockerhub_password: "${{ env.DOCKERHUB_PASSWORD }}" - - - name: Build & push mainnet fork image - if: ${{ github.event.inputs.no_rebuild_images == 'false' }} - run: | - earthly-ci --no-output --push ./iac/mainnet-fork+export-mainnet-fork --DIST_TAG=${{ env.DEPLOY_TAG }} - earthly-ci --no-output --push ./iac/mainnet-fork+export-mainnet-fork --DIST_TAG=${{ github.sha }} - - build-aztec: - needs: set-network - env: - BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - TF_VAR_DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - TF_VAR_API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - API_KEY_NAME: ${{ needs.set-network.outputs.network_api_key }} - runs-on: ${{ github.actor }}-x86 - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - fetch-depth: 0 - - uses: ./.github/ci-setup-action - with: - concurrency_key: build-aztec-${{ github.actor }} - dockerhub_password: "${{ env.DOCKERHUB_PASSWORD }}" - - name: "Build & Push aztec images" - timeout-minutes: 40 - if: ${{ github.event.inputs.no_rebuild_images == 'false' }} - run: | - env - earthly-ci --no-output --push ./yarn-project+export-aztec-arch --DIST_TAG=${{ github.sha }} --ARCH=x86_64 - earthly-ci --no-output --push ./yarn-project+export-aztec-arch --DIST_TAG=${{ env.DEPLOY_TAG }} --ARCH=x86_64 - - - name: "Re-tag Aztec image" - if: ${{ github.event.inputs.no_rebuild_images == 'true' }} - run: | - env - docker pull aztecprotocol/aztec:${{ env.DEPLOY_TAG }}-x86_64 - docker tag aztecprotocol/aztec:${{ env.DEPLOY_TAG }}-x86_64 aztecprotocol/aztec:${{ github.sha }}-x86_64 - docker push aztecprotocol/aztec:${{ github.sha }}-x86_64 - - build-aztec-nargo: - needs: [set-network, build-aztec] - env: - BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - TF_VAR_DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - TF_VAR_API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - API_KEY_NAME: ${{ needs.set-network.outputs.network_api_key }} - runs-on: ${{ github.actor }}-x86 - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - fetch-depth: 0 - - uses: ./.github/ci-setup-action - with: - concurrency_key: build-aztec-nargo-${{ github.actor }} - dockerhub_password: "${{ env.DOCKERHUB_PASSWORD }}" - - - name: Build & push aztec nargo image - if: ${{ github.event.inputs.no_rebuild_images == 'false' }} - run: | - earthly-ci --no-output --push ./aztec-nargo+export-aztec-nargo --DIST_TAG=${{ github.sha }} --ARCH=x86_64 - earthly-ci --no-output --push ./aztec-nargo+export-aztec-nargo --DIST_TAG=${{ env.DEPLOY_TAG }} --ARCH=x86_64 - - publish-aztec-manifests: - needs: [set-network, build-aztec, build-aztec-nargo] - env: - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - - uses: ./.github/ci-setup-action - with: - dockerhub_password: "${{ env.DOCKERHUB_PASSWORD }}" - - - name: "Publish aztec manifests" - run: | - env - docker pull aztecprotocol/aztec:${{ github.sha }}-x86_64 - - docker manifest create aztecprotocol/aztec:${{ env.DEPLOY_TAG }} \ - aztecprotocol/aztec:${{ github.sha }}-x86_64 - docker manifest create aztecprotocol/aztec:${{ github.sha }} \ - aztecprotocol/aztec:${{ github.sha }}-x86_64 - - docker manifest push aztecprotocol/aztec:${{ env.DEPLOY_TAG }} - docker manifest push aztecprotocol/aztec:${{ github.sha }} - - - name: "Publish aztec-nargo manifests" - if: ${{ github.event.inputs.no_rebuild_images == 'false' }} - run: | - docker pull aztecprotocol/aztec-nargo:${{ github.sha }}-x86_64 - - docker manifest create aztecprotocol/aztec-nargo:${{ env.DEPLOY_TAG }} \ - aztecprotocol/aztec-nargo:${{ github.sha }}-x86_64 - docker manifest create aztecprotocol/aztec-nargo:${{ github.sha }} \ - aztecprotocol/aztec-nargo:${{ github.sha }}-x86_64 - - docker manifest push aztecprotocol/aztec-nargo:${{ env.DEPLOY_TAG }} - docker manifest push aztecprotocol/aztec-nargo:${{ github.sha }} - - build-faucet: - needs: [set-network, build-aztec] - env: - BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - TF_VAR_DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - TF_VAR_API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - TF_VAR_FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - API_KEY_NAME: ${{ needs.set-network.outputs.network_api_key }} - runs-on: ${{ github.actor }}-x86 - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - fetch-depth: 0 - - uses: ./.github/ci-setup-action - with: - concurrency_key: build-faucet-${{ github.actor }} - dockerhub_password: "${{ env.DOCKERHUB_PASSWORD }}" - - name: "Build & Push aztec images" - timeout-minutes: 40 - if: ${{ github.event.inputs.no_rebuild_images == 'false' }} - run: | - env - earthly-ci --no-output --push ./yarn-project+export-aztec-faucet --DIST_TAG=${{ env.DEPLOY_TAG }} - earthly-ci --no-output --push ./yarn-project+export-aztec-faucet --DIST_TAG=${{ github.sha }} - - - name: "Re-tag Aztec image" - if: ${{ github.event.inputs.no_rebuild_images == 'true' }} - run: | - env - docker pull aztecprotocol/aztec-faucet:${{ env.DEPLOY_TAG }} - docker tag aztecprotocol/aztec-faucet:${{ env.DEPLOY_TAG }} aztecprotocol/aztec-faucet:${{ github.sha }} - docker push aztecprotocol/aztec-faucet:${{ github.sha }} - - build-cli-wallet: - needs: [set-network, build-aztec] - env: - BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - TF_VAR_DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - TF_VAR_API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - API_KEY_NAME: ${{ needs.set-network.outputs.network_api_key }} - runs-on: ${{ github.actor }}-x86 - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - fetch-depth: 0 - - uses: ./.github/ci-setup-action - with: - concurrency_key: build-cli-wallet-${{ github.actor }} - dockerhub_password: "${{ env.DOCKERHUB_PASSWORD }}" - - name: "Build & Push aztec images" - timeout-minutes: 40 - if: ${{ github.event.inputs.no_rebuild_images == 'false' }} - run: | - env - earthly-ci --no-output --push ./yarn-project+export-cli-wallet --DIST_TAG=${{ env.DEPLOY_TAG }} --ARCH=x86_64 - earthly-ci --no-output --push ./yarn-project+export-cli-wallet --DIST_TAG=${{ github.sha }} --ARCH=x86_64 - - publish-cli-wallet-manifest: - needs: [set-network, build-cli-wallet] - env: - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - runs-on: ${{ github.actor }}-x86 - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - fetch-depth: 0 - - uses: ./.github/ci-setup-action - with: - dockerhub_password: "${{ env.DOCKERHUB_PASSWORD }}" - - - name: "Publish aztec CLI wallet manifests" - if: ${{ github.event.inputs.no_rebuild_images == 'false' }} - run: | - env - docker pull aztecprotocol/cli-wallet:${{ github.sha }}-x86_64 - - docker manifest create aztecprotocol/cli-wallet:${{ env.DEPLOY_TAG }} \ - aztecprotocol/cli-wallet:${{ github.sha }}-x86_64 - docker manifest create aztecprotocol/cli-wallet:${{ github.sha }} \ - aztecprotocol/cli-wallet:${{ github.sha }}-x86_64 - - docker manifest push aztecprotocol/cli-wallet:${{ env.DEPLOY_TAG }} - docker manifest push aztecprotocol/cli-wallet:${{ github.sha }} - - build-end: - runs-on: ubuntu-latest - needs: - [ - set-network, - build-faucet, - build-mainnet-fork, - publish-aztec-manifests, - publish-cli-wallet-manifest, - ] - env: - BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - TF_VAR_DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - TF_VAR_API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - - uses: ./.github/ci-setup-action - - uses: hashicorp/setup-terraform@v3 - with: - terraform_version: 1.7.5 - - terraform-deploy: - if: ${{ github.event.inputs.no_deploy == 'false' }} - runs-on: ubuntu-latest - needs: [build-end, set-network] - env: - BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - TF_VAR_DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - TF_VAR_API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - TF_VAR_FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - TF_VAR_AGENTS_PER_PROVER: ${{ needs.set-network.outputs.agents_per_prover }} - TF_VAR_BOT_TX_INTERVAL_SECONDS: ${{ needs.set-network.outputs.bot_interval }} - TF_VAR_NODE_LB_RULE_PRIORITY: ${{ needs.set-network.outputs.node_lb_priority_range_start }} - TF_VAR_PXE_LB_RULE_PRIORITY: ${{ needs.set-network.outputs.pxe_lb_priority_range_start }} - TF_VAR_PROVER_NODE_LB_RULE_PRIORITY: ${{ needs.set-network.outputs.prover_node_lb_priority_range_start }} - TF_VAR_SEQ_MIN_TX_PER_BLOCK: 1 - TF_VAR_SEQ_MAX_TX_PER_BLOCK: ${{ needs.set-network.outputs.max_txs_per_block }} - TF_VAR_MAINNET_FORK_CPU_UNITS: ${{ needs.set-network.outputs.mainnet_fork_cpu_units }} - TF_VAR_MAINNET_FORK_MEMORY_UNITS: ${{ needs.set-network.outputs.mainnet_fork_memory_units }} - TF_VAR_BOT_SKIP_PUBLIC_SIMULATION: ${{ needs.set-network.outputs.bot_skip_simulation }} - TF_VAR_BOT_L2_GAS_LIMIT: ${{ needs.set-network.outputs.bot_l2_gas_limit }} - TF_VAR_BOT_DA_GAS_LIMIT: ${{ needs.set-network.outputs.bot_da_gas_limit }} - TF_VAR_BOT_COUNT: ${{ needs.set-network.outputs.bot_count }} - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - - uses: ./.github/ci-setup-action - - uses: hashicorp/setup-terraform@v3 - with: - terraform_version: 1.7.5 - - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }} - aws-region: eu-west-2 - - - name: Deploy mainnet fork - working-directory: ./iac/mainnet-fork/terraform - run: | - env - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/mainnet-fork" - terraform apply -input=false -auto-approve - - - name: Wait for mainnet fork deployment - run: | - ./.github/scripts/wait_for_infra.sh mainnet-fork ${{ env.DEPLOY_TAG }} ${{ env.API_KEY }} - - - name: Deploy L1 Contracts - run: | - set -e - set -o pipefail - - docker pull aztecprotocol/aztec:${{ env.DEPLOY_TAG }} - docker run aztecprotocol/aztec:${{ env.DEPLOY_TAG }} deploy-l1-contracts \ - --private-key ${{ env.CONTRACT_PUBLISHER_PRIVATE_KEY }} \ - --rpc-url https://${{ env.DEPLOY_TAG }}-mainnet-fork.aztec.network:8545/admin-${{ env.FORK_ADMIN_API_KEY }} \ - --l1-chain-id ${{ env.L1_CHAIN_ID }} \ - --salt ${{ github.run_id }} \ - --json | tee ./l1_contracts.json - - # upload contract addresses to S3 - aws s3 cp ./l1_contracts.json ${{ env.CONTRACT_S3_BUCKET }}/${{ env.DEPLOY_TAG }}/l1_contracts.json - - # export contract addresses so they can be used by subsequent terraform deployments - function extract() { - jq -r ".$1" ./l1_contracts.json - } - - echo "TF_VAR_ROLLUP_CONTRACT_ADDRESS=$(extract rollupAddress)" >>$GITHUB_ENV - echo "TF_VAR_REGISTRY_CONTRACT_ADDRESS=$(extract registryAddress)" >>$GITHUB_ENV - echo "TF_VAR_INBOX_CONTRACT_ADDRESS=$(extract inboxAddress)" >>$GITHUB_ENV - echo "TF_VAR_OUTBOX_CONTRACT_ADDRESS=$(extract outboxAddress)" >>$GITHUB_ENV - echo "TF_VAR_FEE_JUICE_CONTRACT_ADDRESS=$(extract feeJuiceAddress)" >>$GITHUB_ENV - echo "TF_VAR_FEE_JUICE_PORTAL_CONTRACT_ADDRESS=$(extract feeJuicePortalAddress)" >>$GITHUB_ENV - - - name: Apply l1-contracts Terraform - working-directory: ./l1-contracts/terraform - run: | - env - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/l1-contracts" - terraform apply -input=false -auto-approve - - - name: Disable transactions bot - working-directory: ./yarn-project/aztec/terraform/bot - run: | - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/bot" - terraform apply -input=false -auto-approve - - - name: Deploy Aztec Nodes - working-directory: ./yarn-project/aztec/terraform/node - run: | - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/aztec-node" - terraform apply -input=false -auto-approve -var="NODE_P2P_TCP_PORT=${{ needs.set-network.outputs.node_tcp_range_start }}" -var="NODE_P2P_UDP_PORT=${{ needs.set-network.outputs.node_udp_range_start }}" - - - name: Deploy Aztec Prover Nodes - working-directory: ./yarn-project/aztec/terraform/prover-node - run: | - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/aztec-prover-node" - terraform apply -input=false -auto-approve -var="NODE_P2P_TCP_PORT=${{ needs.set-network.outputs.prover_node_tcp_range_start }}" -var="NODE_P2P_UDP_PORT=${{ needs.set-network.outputs.prover_node_udp_range_start }}" - - - name: Deploy Provers - working-directory: ./yarn-project/aztec/terraform/prover - run: | - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/prover" - terraform apply -input=false -auto-approve - - - name: Deploy PXE - working-directory: ./yarn-project/aztec/terraform/pxe - run: | - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/pxe" - terraform apply -input=false -auto-approve -replace="aws_efs_file_system.pxe_data_store" - - bootstrap: - if: ${{ github.event.inputs.no_deploy == 'false' }} - runs-on: ubuntu-latest - needs: [terraform-deploy, set-network] - env: - BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - TF_VAR_DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - TF_VAR_API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - TF_VAR_FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - - - uses: ./.github/ci-setup-action - - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }} - aws-region: eu-west-2 - - - name: Wait for PXE to be available - run: | - env - ./.github/scripts/wait_for_infra.sh pxe ${{ env.DEPLOY_TAG }} ${{ env.API_KEY }} - - - name: Setup protocol contracts - run: | - set -e - set -o pipefail - docker pull aztecprotocol/aztec:${{ env.DEPLOY_TAG }} - docker run aztecprotocol/aztec:${{ env.DEPLOY_TAG }} setup-protocol-contracts \ - --rpc-url https://api.aztec.network/${{ env.DEPLOY_TAG }}/aztec-pxe/${{ env.API_KEY }} \ - --l1-chain-id ${{ env.L1_CHAIN_ID }} \ - --json | tee ./protocol_contracts.json - - aws s3 cp ./protocol_contracts.json ${{ env.CONTRACT_S3_BUCKET }}/${{ env.DEPLOY_TAG }}/protocol_contracts.json - - - name: Bootstrap network - run: | - set -e - set -o pipefail - docker run aztecprotocol/aztec:${{ env.DEPLOY_TAG }} bootstrap-network \ - --rpc-url https://api.aztec.network/${{ env.DEPLOY_TAG }}/aztec-pxe/${{ env.API_KEY }} \ - --l1-rpc-url https://${{ env.DEPLOY_TAG }}-mainnet-fork.aztec.network:8545/admin-${{ env.FORK_ADMIN_API_KEY }} \ - --l1-chain-id ${{ env.L1_CHAIN_ID }} \ - --l1-private-key ${{ env.CONTRACT_PUBLISHER_PRIVATE_KEY }} \ - --json | tee ./basic_contracts.json - - aws s3 cp ./basic_contracts.json ${{ env.CONTRACT_S3_BUCKET }}/${{ env.DEPLOY_TAG }}/basic_contracts.json - - deploy-faucet: - if: ${{ github.event.inputs.no_deploy == 'false' }} - runs-on: ubuntu-latest - needs: [terraform-deploy, bootstrap, set-network] - env: - BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - TF_VAR_DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - TF_VAR_API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - TF_VAR_FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - TF_VAR_FAUCET_LB_RULE_PRIORITY: ${{ needs.set-network.outputs.faucet_lb_priority }} - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - - uses: ./.github/ci-setup-action - - uses: hashicorp/setup-terraform@v3 - with: - terraform_version: 1.7.5 - - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }} - aws-region: eu-west-2 - - - name: Retrieve contract addresses - run: | - set -e - aws s3 cp ${{ env.CONTRACT_S3_BUCKET }}/${{ env.DEPLOY_TAG }}/l1_contracts.json ./l1_contracts.json - aws s3 cp ${{ env.CONTRACT_S3_BUCKET }}/${{ env.DEPLOY_TAG }}/basic_contracts.json ./basic_contracts.json - - echo "TF_VAR_FEE_JUICE_CONTRACT_ADDRESS=$(jq -r '.feeJuiceAddress' ./l1_contracts.json)" >>$GITHUB_ENV - echo "TF_VAR_DEV_COIN_CONTRACT_ADDRESS=$(jq -r '.devCoinL1' ./basic_contracts.json)" >>$GITHUB_ENV - - - name: Deploy Faucet - working-directory: ./yarn-project/aztec-faucet/terraform - run: | - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/aztec-faucet" - terraform apply -input=false -auto-approve - - enable-proving: - if: ${{ github.event.inputs.no_deploy == 'false' }} - runs-on: ubuntu-latest - needs: [deploy-faucet, set-network] - env: - BRANCH_NAME: ${{ needs.set-network.outputs.branch_name }} - DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - TF_VAR_DEPLOY_TAG: ${{ needs.set-network.outputs.deploy_tag }} - API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - TF_VAR_API_KEY: ${{ secrets[needs.set-network.outputs.network_api_key] }} - FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - TF_VAR_FORK_ADMIN_API_KEY: ${{ secrets[needs.set-network.outputs.network_fork_admin_api_key] }} - TF_VAR_AGENTS_PER_PROVER: ${{ needs.set-network.outputs.agents_per_prover }} - TF_VAR_BOT_TX_INTERVAL_SECONDS: ${{ needs.set-network.outputs.bot_interval }} - TF_VAR_NODE_LB_RULE_PRIORITY: ${{ needs.set-network.outputs.node_lb_priority_range_start }} - TF_VAR_PXE_LB_RULE_PRIORITY: ${{ needs.set-network.outputs.pxe_lb_priority_range_start }} - TF_VAR_PROVER_NODE_LB_RULE_PRIORITY: ${{ needs.set-network.outputs.prover_node_lb_priority_range_start }} - TF_VAR_BOT_FLUSH_SETUP_TRANSACTIONS: ${{ needs.set-network.outputs.bot_flush_setup_txs }} - TF_VAR_BOT_MAX_PENDING_TXS: ${{ needs.set-network.outputs.bot_max_pending_txs }} - TF_VAR_SEQ_MIN_TX_PER_BLOCK: ${{ needs.set-network.outputs.min_txs_per_block }} - TF_VAR_SEQ_MAX_TX_PER_BLOCK: ${{ needs.set-network.outputs.max_txs_per_block }} - TF_VAR_BOT_FOLLOW_CHAIN: ${{ needs.set-network.outputs.bot_follow_chain }} - TF_VAR_PROVING_ENABLED: true - TF_VAR_BOT_NO_START: false - TF_VAR_BOT_SKIP_PUBLIC_SIMULATION: ${{ needs.set-network.outputs.bot_skip_simulation }} - TF_VAR_BOT_L2_GAS_LIMIT: ${{ needs.set-network.outputs.bot_l2_gas_limit }} - TF_VAR_BOT_DA_GAS_LIMIT: ${{ needs.set-network.outputs.bot_da_gas_limit }} - TF_VAR_BOT_COUNT: ${{ needs.set-network.outputs.bot_count }} - steps: - - uses: actions/checkout@v4 - with: - ref: "${{ env.GIT_COMMIT }}" - - uses: ./.github/ci-setup-action - - uses: hashicorp/setup-terraform@v3 - with: - terraform_version: 1.7.5 - - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }} - aws-region: eu-west-2 - - # Unneded for now, since the prover-node runs with simulated proofs and submits them to L1, which does not verify them yet. - # - name: Set latest block as proven - # working-directory: ./yarn-project/aztec/terraform/pxe - # run: | - # set -eo pipefail - # docker run aztecprotocol/aztec:${{ env.DEPLOY_TAG }} set-proven-through \ - # --rpc-url https://api.aztec.network/${{ env.DEPLOY_TAG }}/aztec-pxe/${{ env.API_KEY }} \ - # --l1-rpc-url https://${{ env.DEPLOY_TAG }}-mainnet-fork.aztec.network:8545/admin-${{ env.FORK_ADMIN_API_KEY }} \ - # --l1-chain-id ${{ env.L1_CHAIN_ID }} \ - # --l1-private-key ${{ env.CONTRACT_PUBLISHER_PRIVATE_KEY }} - - - name: Deploy PXE - working-directory: ./yarn-project/aztec/terraform/pxe - run: | - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/pxe" - terraform apply -input=false -auto-approve - - - name: Deploy Aztec Nodes - working-directory: ./yarn-project/aztec/terraform/node - run: | - env - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/aztec-node" - terraform apply -input=false -auto-approve -var="NODE_P2P_TCP_PORT=${{ needs.set-network.outputs.node_tcp_range_start }}" -var="NODE_P2P_UDP_PORT=${{ needs.set-network.outputs.node_udp_range_start }}" - - - name: Deploy Aztec Prover Nodes - working-directory: ./yarn-project/aztec/terraform/prover-node - run: | - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/aztec-prover-node" - terraform apply -input=false -auto-approve -var="NODE_P2P_TCP_PORT=${{ needs.set-network.outputs.prover_node_tcp_range_start }}" -var="NODE_P2P_UDP_PORT=${{ needs.set-network.outputs.prover_node_udp_range_start }}" - - - name: Deploy Provers - working-directory: ./yarn-project/aztec/terraform/prover - run: | - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/prover" - terraform apply -input=false -auto-approve - - - name: Wait for PXE to be available - run: | - ./.github/scripts/wait_for_infra.sh pxe ${{ env.DEPLOY_TAG }} ${{ env.API_KEY }} - - - name: Deploy verifier - working-directory: ./yarn-project/aztec/terraform/pxe - run: | - set -eo pipefail - docker run aztecprotocol/aztec:${{ env.DEPLOY_TAG }} deploy-l1-verifier \ - --rpc-url https://api.aztec.network/${{ env.DEPLOY_TAG }}/aztec-pxe/${{ env.API_KEY }} \ - --l1-rpc-url https://${{ env.DEPLOY_TAG }}-mainnet-fork.aztec.network:8545/admin-${{ env.FORK_ADMIN_API_KEY }} \ - --l1-chain-id ${{ env.L1_CHAIN_ID }} \ - --l1-private-key ${{ env.CONTRACT_PUBLISHER_PRIVATE_KEY }} - - - name: Enable transactions bot - working-directory: ./yarn-project/aztec/terraform/bot - run: | - terraform init -input=false -backend-config="key=${{ env.DEPLOY_TAG }}/bot" - terraform apply -input=false -auto-approve diff --git a/.github/workflows/network-deploy.yml b/.github/workflows/network-deploy.yml index 8ceba6151413..2d48e35c9e1e 100644 --- a/.github/workflows/network-deploy.yml +++ b/.github/workflows/network-deploy.yml @@ -1,6 +1,33 @@ name: Aztec Network Deployment on: + workflow_call: + inputs: + namespace: + description: The namespace to deploy to, e.g. smoke + required: true + type: string + values_file: + description: The values file to use, e.g. 1-validators.yaml + required: true + type: string + aztec_docker_image: + description: The Aztec Docker image to use, e.g. aztecprotocol/aztec:da809c58290f9590836f45ec59376cbf04d3c4ce-x86_64 + required: true + type: string + deployment_mnemonic_secret_name: + description: The name of the secret which holds the boot node's contract deployment mnemonic + required: true + type: string + default: testnet-deployment-mnemonic + respect_tf_lock: + description: Whether to respect the Terraform lock + required: false + type: string + default: "true" + secrets: + GCP_SA_KEY: + required: true workflow_dispatch: inputs: namespace: @@ -12,6 +39,14 @@ on: aztec_docker_image: description: The Aztec Docker image to use, e.g. aztecprotocol/aztec:da809c58290f9590836f45ec59376cbf04d3c4ce-x86_64 required: true + deployment_mnemonic_secret_name: + description: The name of the secret which holds the boot node's contract deployment mnemonic + required: true + default: testnet-deployment-mnemonic + respect_tf_lock: + description: Whether to respect the Terraform lock + required: false + default: "true" jobs: network_deployment: @@ -26,6 +61,7 @@ jobs: AZTEC_DOCKER_IMAGE: ${{ inputs.aztec_docker_image }} NAMESPACE: ${{ inputs.namespace }} VALUES_FILE: ${{ inputs.values_file }} + DEPLOYMENT_MNEMONIC_SECRET_NAME: ${{ inputs.deployment_mnemonic_secret_name }} CHART_PATH: ./spartan/aztec-network CLUSTER_NAME: aztec-gke REGION: us-west1-a @@ -62,6 +98,12 @@ jobs: echo "Terraform state bucket already exists" fi + - name: Grab the boot node deployment mnemonic + id: get-mnemonic + run: | + echo "::add-mask::$(gcloud secrets versions access latest --secret=${{ env.DEPLOYMENT_MNEMONIC_SECRET_NAME }})" + echo "mnemonic=$(gcloud secrets versions access latest --secret=${{ env.DEPLOYMENT_MNEMONIC_SECRET_NAME }})" >> "$GITHUB_OUTPUT" + - name: Setup Terraform uses: hashicorp/setup-terraform@v2 with: @@ -78,12 +120,14 @@ jobs: working-directory: ./spartan/terraform/deploy-release run: | terraform plan \ - -var="release_name=${{ env.NAMESPACE }}" \ - -var="values_file=${{ env.VALUES_FILE }}" \ - -var="gke_cluster_context=${{ env.GKE_CLUSTER_CONTEXT }}" \ - -var="aztec_docker_image=${{ env.AZTEC_DOCKER_IMAGE }}" \ - -out=tfplan + -var="RELEASE_NAME=${{ env.NAMESPACE }}" \ + -var="VALUES_FILE=${{ env.VALUES_FILE }}" \ + -var="GKE_CLUSTER_CONTEXT=${{ env.GKE_CLUSTER_CONTEXT }}" \ + -var="AZTEC_DOCKER_IMAGE=${{ env.AZTEC_DOCKER_IMAGE }}" \ + -var="L1_DEPLOYMENT_MNEMONIC=${{ steps.get-mnemonic.outputs.mnemonic }}" \ + -out=tfplan \ + -lock=${{ inputs.respect_tf_lock }} - name: Terraform Apply working-directory: ./spartan/terraform/deploy-release - run: terraform apply -auto-approve tfplan + run: terraform apply -lock=${{ inputs.respect_tf_lock }} -auto-approve tfplan diff --git a/.github/workflows/publish-aztec-packages.yml b/.github/workflows/publish-aztec-packages.yml index c2fe77d9b882..9da6aad15897 100644 --- a/.github/workflows/publish-aztec-packages.yml +++ b/.github/workflows/publish-aztec-packages.yml @@ -4,7 +4,6 @@ on: push: branches: - master - - release/testnet* - "*/release-master*" workflow_dispatch: inputs: @@ -212,7 +211,11 @@ jobs: - name: Publish aztec manifests if: ${{ env.SHOULD_PUBLISH_DOCKER_IMAGES == 'true' }} run: | - if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then + if [ "${{ github.ref_name }}" == "release/*" ]; then + TAG="${{ github.ref_name }}" + VERSION="${TAG#release/}" + DIST_TAG=devnet + elif [ "${{ github.event_name }}" == "workflow_dispatch" ]; then TAG=${{ env.DEPLOY_TAG }} VERSION=${TAG#aztec-packages-v} DIST_TAG=latest @@ -276,52 +279,59 @@ jobs: concurrency_key: publish-npm dockerhub_password: "${{ env.DOCKERHUB_PASSWORD }}" + - name: Set tags and versions + id: version_step + run: | + if [ "${{ github.ref_name }}" == "release/*" ]; then + DIST_TAG=devnet + TAG=${{ env.DEPLOY_TAG }} + VERSION=${TAG#aztec-packages-v}-devnet + else + DIST_TAG=latest + TAG=${{ env.DEPLOY_TAG }} + VERSION=${TAG#aztec-packages-v} + fi + echo "VERSION=$VERSION" >> $GITHUB_OUTPUT + echo "DIST_TAG=$DIST_TAG" >> $GITHUB_OUTPUT + - name: Publish bb.js NPM package run: | - DEPLOY_TAG=${{ env.DEPLOY_TAG }} - VERSION=${DEPLOY_TAG#aztec-packages-v} earthly-ci \ --no-output \ --secret NPM_TOKEN=${{ env.NPM_TOKEN }} \ ./barretenberg/ts+publish-npm \ - --DIST_TAG=latest \ - --VERSION=$VERSION \ + --DIST_TAG=${{ steps.version_step.outputs.DIST_TAG }} \ + --VERSION=${{ steps.version_step.outputs.VERSION }} \ --DRY_RUN=${{ (github.event.inputs.publish == 'false') && '1' || '0' }} - name: Publish yarn-project NPM packages run: | - DEPLOY_TAG=${{ env.DEPLOY_TAG }} - VERSION=${DEPLOY_TAG#aztec-packages-v} earthly-ci \ --no-output \ --secret NPM_TOKEN=${{ env.NPM_TOKEN }} \ ./yarn-project+publish-npm \ - --DIST_TAG=latest \ - --VERSION=$VERSION \ + --DIST_TAG=${{ steps.version_step.outputs.DIST_TAG }} \ + --VERSION=${{ steps.version_step.outputs.VERSION }} \ --DRY_RUN=${{ (github.event.inputs.publish == 'false') && '1' || '0' }} - name: Publish l1-contracts NPM package run: | - DEPLOY_TAG=${{ env.DEPLOY_TAG }} - VERSION=${DEPLOY_TAG#aztec-packages-v} earthly-ci \ --no-output \ --secret NPM_TOKEN=${{ env.NPM_TOKEN }} \ ./l1-contracts+publish-npm \ - --DIST_TAG=latest \ - --VERSION=$VERSION \ + --DIST_TAG=${{ steps.version_step.outputs.DIST_TAG }} \ + --VERSION=${{ steps.version_step.outputs.VERSION }} \ --DRY_RUN=${{ (github.event.inputs.publish == 'false') && '1' || '0' }} - name: Publish spartan NPM package run: | - DEPLOY_TAG=${{ env.DEPLOY_TAG }} - VERSION=${DEPLOY_TAG#aztec-packages-v} earthly-ci \ --no-output \ --secret NPM_TOKEN=${{ env.NPM_TOKEN }} \ ./spartan/releases/rough-rhino+publish-npm \ - --DIST_TAG=latest \ - --VERSION=$VERSION \ + --DIST_TAG=${{ steps.version_step.outputs.DIST_TAG }} \ + --VERSION=${{ steps.version_step.outputs.VERSION }} \ --DRY_RUN=${{ (github.event.inputs.publish == 'false') && '1' || '0' }} publish-aztec-up: @@ -346,13 +356,17 @@ jobs: - name: Publish aztec-up working-directory: ./aztec-up/terraform run: | - terraform init - if [ -n "${{ env.DEPLOY_TAG }}" ]; then + if [ "${{ github.ref_name }}" == "release/*" ]; then + TAG="${{ github.ref_name }}" + VERSION="${TAG#release/}" + elif [ -n "${{ env.DEPLOY_TAG }}" ]; then TAG=${{ env.DEPLOY_TAG }} + VERSION=${TAG#aztec-packages-v} else - TAG=${{ github.ref_name }} + VERSION=${{ github.ref_name }} fi - export TF_VAR_VERSION=${TAG#aztec-packages-v} + terraform init + export TF_VAR_VERSION=${VERSION} terraform apply -auto-approve # Sometimes runners get killed because they can be spot, we try once more for good measure diff --git a/.github/workflows/release-please.yml b/.github/workflows/release-please.yml index f8507df6900f..434adbe69f79 100644 --- a/.github/workflows/release-please.yml +++ b/.github/workflows/release-please.yml @@ -5,10 +5,13 @@ on: push: branches: - master + - "release/*" permissions: contents: write pull-requests: write +env: + BRANCH_NAME: ${{ github.head_ref || github.ref_name }} jobs: release-please: @@ -27,7 +30,7 @@ jobs: if: ${{ steps.release.outputs.tag_name }} with: workflow: publish-aztec-packages.yml - ref: master + ref: ${{ env.BRANCH_NAME }} token: ${{ secrets.AZTEC_BOT_GITHUB_TOKEN }} inputs: '{ "tag": "${{ steps.release.outputs.tag_name }}", "publish": true }' @@ -36,7 +39,7 @@ jobs: if: ${{ steps.release.outputs.tag_name }} with: workflow: publish-bb.yml - ref: master + ref: ${{ env.BRANCH_NAME }} token: ${{ secrets.AZTEC_BOT_GITHUB_TOKEN }} inputs: '{ "tag": "${{ steps.release.outputs.tag_name }}", "publish": true }' @@ -45,6 +48,6 @@ jobs: if: ${{ steps.release.outputs.tag_name }} with: workflow: publish-docs.yml - ref: master + ref: ${{ env.BRANCH_NAME }} token: ${{ secrets.AZTEC_BOT_GITHUB_TOKEN }} inputs: '{ "tag": "${{ steps.release.outputs.tag_name }}" }' diff --git a/.github/workflows/sepolia-deploy.yml b/.github/workflows/sepolia-deploy.yml index d908f89827de..4f736c56dd85 100644 --- a/.github/workflows/sepolia-deploy.yml +++ b/.github/workflows/sepolia-deploy.yml @@ -85,6 +85,7 @@ jobs: echo "TF_VAR_OUTBOX_CONTRACT_ADDRESS=$(extract outboxAddress)" >>$GITHUB_ENV echo "TF_VAR_AVAILABILITY_ORACLE_CONTRACT_ADDRESS=$(extract availabilityOracleAddress)" >>$GITHUB_ENV echo "TF_VAR_FEE_JUICE_CONTRACT_ADDRESS=$(extract feeJuiceAddress)" >>$GITHUB_ENV + echo "TF_VAR_STAKING_ASSET_CONTRACT_ADDRESS=$(extract stakingAssetAddress)" >>$GITHUB_ENV echo "TF_VAR_FEE_JUICE_PORTAL_CONTRACT_ADDRESS=$(extract feeJuicePortalAddress)" >>$GITHUB_ENV - name: Apply l1-contracts Terraform diff --git a/.noir-sync-commit b/.noir-sync-commit index 9bbde85e56b5..29560ec97973 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -68c32b4ffd9b069fe4b119327dbf4018c17ab9d4 +6d0f86ba389a5b59b1d7fdcadcbce3e40eecaa48 diff --git a/.release-please-manifest.json b/.release-please-manifest.json index fe3c2693b52f..9b0c023a62d6 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,7 +1,7 @@ { - ".": "0.65.2", + ".": "0.66.0", "yarn-project/cli": "0.35.1", - "yarn-project/aztec": "0.65.2", - "barretenberg": "0.65.2", - "barretenberg/ts": "0.65.2" + "yarn-project/aztec": "0.66.0", + "barretenberg": "0.66.0", + "barretenberg/ts": "0.66.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 76f4e5e34cdc..d98f25dc01f0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,199 @@ # Changelog +## [0.66.0](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.65.2...aztec-packages-v0.66.0) (2024-12-06) + + +### ⚠ BREAKING CHANGES + +* remove `ec` module from stdlib (https://github.com/noir-lang/noir/pull/6612) +* Disallow `#[export]` on associated methods (https://github.com/noir-lang/noir/pull/6626) +* Require types of globals to be specified (https://github.com/noir-lang/noir/pull/6592) +* remove eddsa from stdlib (https://github.com/noir-lang/noir/pull/6591) +* Remove debug and winston in favor of pino ([#10355](https://github.com/AztecProtocol/aztec-packages/issues/10355)) +* remove SchnorrVerify opcode ([#9897](https://github.com/AztecProtocol/aztec-packages/issues/9897)) + +### Features + +* Add `array_refcount` and `slice_refcount` builtins for debugging (https://github.com/noir-lang/noir/pull/6584) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Add `BoundedVec::from_parts` and `BoundedVec::from_parts_unchecked` (https://github.com/noir-lang/noir/pull/6691) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Add memory report into the CI (https://github.com/noir-lang/noir/pull/6630) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Add workflow_call trigger to network-deploy ([#10451](https://github.com/AztecProtocol/aztec-packages/issues/10451)) ([18254e6](https://github.com/AztecProtocol/aztec-packages/commit/18254e6518bdcb93006d8f4c7cac2c4e8da05cbf)) +* Adding configurable data dir and p2p pk for testnet nodes ([#10422](https://github.com/AztecProtocol/aztec-packages/issues/10422)) ([77b0039](https://github.com/AztecProtocol/aztec-packages/commit/77b0039925ccdb322c8fa224cb05f91d82d8c0f1)) +* Agent and broker expose OTEL metrics ([#10264](https://github.com/AztecProtocol/aztec-packages/issues/10264)) ([c2c8cc6](https://github.com/AztecProtocol/aztec-packages/commit/c2c8cc6f7336cf4b2fa14d9a7f1af1a30f1b8f79)) +* Allow filtering which SSA passes are printed (https://github.com/noir-lang/noir/pull/6636) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Allow ignoring test failures from foreign calls (https://github.com/noir-lang/noir/pull/6660) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Allow querying block number for tree indices ([#10332](https://github.com/AztecProtocol/aztec-packages/issues/10332)) ([cf05a7a](https://github.com/AztecProtocol/aztec-packages/commit/cf05a7a346ea11853e940d5e9ac105ef0d629d35)) +* AMM ([#10153](https://github.com/AztecProtocol/aztec-packages/issues/10153)) ([90668c3](https://github.com/AztecProtocol/aztec-packages/commit/90668c35a8556c4e77fce9fb4e6e0de931c7f872)) +* Avoid incrementing reference counts in some cases (https://github.com/noir-lang/noir/pull/6568) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Avoid inserting an empty leaf in indexed trees on update ([#10281](https://github.com/AztecProtocol/aztec-packages/issues/10281)) ([5a04ca8](https://github.com/AztecProtocol/aztec-packages/commit/5a04ca880ae2a0f285b6a5a110286ba10bc4a6c3)) +* Avoid inserting an empty leaf in indexed trees on update ([#10334](https://github.com/AztecProtocol/aztec-packages/issues/10334)) ([80fad45](https://github.com/AztecProtocol/aztec-packages/commit/80fad4544a4d8c1b488f8b4b4f86fe508ed1f4cc)) +* **bb:** Define std::hash for field ([#10312](https://github.com/AztecProtocol/aztec-packages/issues/10312)) ([752bc59](https://github.com/AztecProtocol/aztec-packages/commit/752bc59c579710c21acf6cc97164e377f72c256c)) +* Better error message when trying to invoke struct function field (https://github.com/noir-lang/noir/pull/6661) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Client IVC API ([#10217](https://github.com/AztecProtocol/aztec-packages/issues/10217)) ([cc54a1e](https://github.com/AztecProtocol/aztec-packages/commit/cc54a1e1ef75b29d160a02d03cf9b29e28d3e4ca)) +* **comptime:** Implement blackbox functions in comptime interpreter (https://github.com/noir-lang/noir/pull/6551) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Delete attestations older than a slot ([#10326](https://github.com/AztecProtocol/aztec-packages/issues/10326)) ([24abcfe](https://github.com/AztecProtocol/aztec-packages/commit/24abcfeba2cbf506cae79246a545c65913ea5c2f)) +* Deploy devnet to k8s ([#10449](https://github.com/AztecProtocol/aztec-packages/issues/10449)) ([27506c1](https://github.com/AztecProtocol/aztec-packages/commit/27506c1112a224482f3b0479d92b2053dbf13512)) +* Deploy networks via github actions ([#10381](https://github.com/AztecProtocol/aztec-packages/issues/10381)) ([7e19b39](https://github.com/AztecProtocol/aztec-packages/commit/7e19b3991ca34bcf9dd43284d4d21ded87824366)) +* **docs:** Applied structure feedback ([#9288](https://github.com/AztecProtocol/aztec-packages/issues/9288)) ([5b0b721](https://github.com/AztecProtocol/aztec-packages/commit/5b0b721ec00545794b5e54e0e24dbc0e14b1fdd8)) +* Epoch cache, do not attest if not in committee or from current proposer ([#10327](https://github.com/AztecProtocol/aztec-packages/issues/10327)) ([9ebaa65](https://github.com/AztecProtocol/aztec-packages/commit/9ebaa65ce290481e5dc00174e92137561360549a)) +* Gas Utils for L1 operations ([#9834](https://github.com/AztecProtocol/aztec-packages/issues/9834)) ([17fa214](https://github.com/AztecProtocol/aztec-packages/commit/17fa214a5af4eb8364b09fc3e148fcd3a8949779)) +* Improve parser recovery of constructor field with '::' instead of ':' (https://github.com/noir-lang/noir/pull/6701) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Integrate verify_proof calls in mock protocol circuits ([#9253](https://github.com/AztecProtocol/aztec-packages/issues/9253)) ([7ed89aa](https://github.com/AztecProtocol/aztec-packages/commit/7ed89aaa9d0968af6334c1c8abf6c06a42754c52)) +* Making testnet script write a docker compose file ([#10333](https://github.com/AztecProtocol/aztec-packages/issues/10333)) ([be54cc3](https://github.com/AztecProtocol/aztec-packages/commit/be54cc3e2e58b809c3795a2b85e76711cdff2216)) +* Manage enqueued calls & phases in AVM witgen ([#10310](https://github.com/AztecProtocol/aztec-packages/issues/10310)) ([e7ebef8](https://github.com/AztecProtocol/aztec-packages/commit/e7ebef8d09744fdc24a79cb0bf74638b0a8f5dc8)) +* Mock IVC state from arbitrary acir IVC recursion constraints ([#10314](https://github.com/AztecProtocol/aztec-packages/issues/10314)) ([ac7c0da](https://github.com/AztecProtocol/aztec-packages/commit/ac7c0da38ff05d6f11c4d6a6244c4526ac00232e)) +* Optionally emit public bytecode ([#10365](https://github.com/AztecProtocol/aztec-packages/issues/10365)) ([84ff623](https://github.com/AztecProtocol/aztec-packages/commit/84ff623ea00d0c6da4db960653655d7d485bccb1)) +* **p2p:** Persist node private p2p keys ([#10324](https://github.com/AztecProtocol/aztec-packages/issues/10324)) ([1c32eda](https://github.com/AztecProtocol/aztec-packages/commit/1c32eda798158682db204a9e5efcd867694a6bd2)) +* **p2p:** Snappy compress p2p messages ([#10417](https://github.com/AztecProtocol/aztec-packages/issues/10417)) ([c643a54](https://github.com/AztecProtocol/aztec-packages/commit/c643a540262dcfe3106d03da3c3ca9bbaef338f0)) +* **perf:** Track last loads per block in mem2reg and remove them if possible (https://github.com/noir-lang/noir/pull/6088) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Process blocks in parallel during epoch proving ([#10263](https://github.com/AztecProtocol/aztec-packages/issues/10263)) ([a9d418c](https://github.com/AztecProtocol/aztec-packages/commit/a9d418c07268a38e0c5432983438ea00b97d233b)) +* Reduce memory consumption by storing array length as `u32` during SSA (https://github.com/noir-lang/noir/pull/6606) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Release please for release branch ([#10467](https://github.com/AztecProtocol/aztec-packages/issues/10467)) ([38941bf](https://github.com/AztecProtocol/aztec-packages/commit/38941bfec92ab2c61d2db25ac45c3c9f3312ee31)) +* Replace quadratic removal of `rc` instructions (https://github.com/noir-lang/noir/pull/6705) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Replace quadratic removal of rc instructions ([#10416](https://github.com/AztecProtocol/aztec-packages/issues/10416)) ([9d833c5](https://github.com/AztecProtocol/aztec-packages/commit/9d833c53dea362599374802e5d64c7c9d62f76be)) +* Revert changes to `ValueMerger` and `Instruction::IfElse` (https://github.com/noir-lang/noir/pull/6673) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Show printable byte arrays as byte strings in SSA (https://github.com/noir-lang/noir/pull/6709) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Simplify `jmpif`s by reversing branches if condition is negated (https://github.com/noir-lang/noir/pull/5891) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* **ssa:** Deduplicate intrinsics with predicates (https://github.com/noir-lang/noir/pull/6615) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* **ssa:** Hoisting of array get using known induction variable maximum (https://github.com/noir-lang/noir/pull/6639) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* **ssa:** Loop invariant code motion (https://github.com/noir-lang/noir/pull/6563) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* **ssa:** Option to set the maximum acceptable Brillig bytecode increase in unrolling (https://github.com/noir-lang/noir/pull/6641) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* **ssa:** Simplify array get from set that writes to the same dynamic index (https://github.com/noir-lang/noir/pull/6684) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Staking integration ([#10403](https://github.com/AztecProtocol/aztec-packages/issues/10403)) ([ecd6c4f](https://github.com/AztecProtocol/aztec-packages/commit/ecd6c4ff914129236b23ab6f4924e4faa3e9d523)) +* Standalone ssd ([#10317](https://github.com/AztecProtocol/aztec-packages/issues/10317)) ([c324781](https://github.com/AztecProtocol/aztec-packages/commit/c3247819751b8efab646ed05b3b781be403653e1)) +* Switch to using an external noir implementation of Schnorr ([#10330](https://github.com/AztecProtocol/aztec-packages/issues/10330)) ([6cbd375](https://github.com/AztecProtocol/aztec-packages/commit/6cbd375c4fddc0108b72a3092fcd75816305adde)) +* Sync from aztec-packages (https://github.com/noir-lang/noir/pull/6576) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Sync from aztec-packages (https://github.com/noir-lang/noir/pull/6634) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Sync from aztec-packages (https://github.com/noir-lang/noir/pull/6656) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Terraform for alerting on metrics ([#10192](https://github.com/AztecProtocol/aztec-packages/issues/10192)) ([05c9e5d](https://github.com/AztecProtocol/aztec-packages/commit/05c9e5df89f4f4185490a940d1d9daa2751e7219)), closes [#9956](https://github.com/AztecProtocol/aztec-packages/issues/9956) +* Test release network via ci workflow ([#10388](https://github.com/AztecProtocol/aztec-packages/issues/10388)) ([e6060ec](https://github.com/AztecProtocol/aztec-packages/commit/e6060ecca318ca4cdc60f1df77c1e7639a745f79)), closes [#10383](https://github.com/AztecProtocol/aztec-packages/issues/10383) +* **tooling:** Skip program transformation when loaded from cache (https://github.com/noir-lang/noir/pull/6689) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Try to inline brillig calls with all constant arguments (https://github.com/noir-lang/noir/pull/6548) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Ultra rollup flows ([#10162](https://github.com/AztecProtocol/aztec-packages/issues/10162)) ([c53f4cf](https://github.com/AztecProtocol/aztec-packages/commit/c53f4cf84c60b8d81cc62d5827ec4408da88cc4e)) +* Zip and propagate private logs ([#10210](https://github.com/AztecProtocol/aztec-packages/issues/10210)) ([5c32747](https://github.com/AztecProtocol/aztec-packages/commit/5c327473994b9dd983f936809529c2bc07691130)) +* Zip and silo and propagate private logs ([#10308](https://github.com/AztecProtocol/aztec-packages/issues/10308)) ([90d4385](https://github.com/AztecProtocol/aztec-packages/commit/90d43858532712a2b7182bdd06f9073e10fa5d41)) + + +### Bug Fixes + +* Add helm ([#10454](https://github.com/AztecProtocol/aztec-packages/issues/10454)) ([2eb9ade](https://github.com/AztecProtocol/aztec-packages/commit/2eb9ade6e778d247557fde534cd101391d3c3307)) +* Add secret ([#10453](https://github.com/AztecProtocol/aztec-packages/issues/10453)) ([95601df](https://github.com/AztecProtocol/aztec-packages/commit/95601df9a38590e1d6acf499b5aa2d8dcfb84b0f)) +* Add type ([#10452](https://github.com/AztecProtocol/aztec-packages/issues/10452)) ([cd9699f](https://github.com/AztecProtocol/aztec-packages/commit/cd9699fdadaa1123aebcad35535b7e4bd0b06193)) +* Allow multiple `_` parameters, and disallow `_` as an expression you can read from (https://github.com/noir-lang/noir/pull/6657) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Always return an array of `u8`s when simplifying `Intrinsic::ToRadix` calls (https://github.com/noir-lang/noir/pull/6663) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Await block unwind when a reorg happens ([#10380](https://github.com/AztecProtocol/aztec-packages/issues/10380)) ([5a02480](https://github.com/AztecProtocol/aztec-packages/commit/5a024803648e8a645cbafdeb4e2ab9f6bfa26117)) +* Bbup cleanup and fix ([#10067](https://github.com/AztecProtocol/aztec-packages/issues/10067)) ([0ff8177](https://github.com/AztecProtocol/aztec-packages/commit/0ff81773da58f7c28621d4e5711ce130afd3e51b)) +* Bootstrapping devnet ([#10396](https://github.com/AztecProtocol/aztec-packages/issues/10396)) ([f3c7294](https://github.com/AztecProtocol/aztec-packages/commit/f3c72942370a3ce01b73807bd729bb0d7500c177)) +* Bot waits for pxe synch ([#10316](https://github.com/AztecProtocol/aztec-packages/issues/10316)) ([ebd4165](https://github.com/AztecProtocol/aztec-packages/commit/ebd41651f5912fc2e0d1aa5d0df154620341c755)) +* Consider prereleases to be compatible with pre-1.0.0 releases (https://github.com/noir-lang/noir/pull/6580) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Correct signed integer handling in `noirc_abi` (https://github.com/noir-lang/noir/pull/6638) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Correct type when simplifying `derive_pedersen_generators` (https://github.com/noir-lang/noir/pull/6579) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Correct types returned by constant EC operations simplified within SSA (https://github.com/noir-lang/noir/pull/6652) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Disallow `#[export]` on associated methods (https://github.com/noir-lang/noir/pull/6626) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Do not warn on unused functions marked with #[export] (https://github.com/noir-lang/noir/pull/6625) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Don't pass default value for --node-url ([#10427](https://github.com/AztecProtocol/aztec-packages/issues/10427)) ([5299481](https://github.com/AztecProtocol/aztec-packages/commit/5299481bb631fa57b9e59cb923139d161b71e6b6)), closes [#10419](https://github.com/AztecProtocol/aztec-packages/issues/10419) +* Don't remove necessary RC instructions in DIE pass (https://github.com/noir-lang/noir/pull/6585) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Don't report visibility errors when elaborating comptime value (https://github.com/noir-lang/noir/pull/6498) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Get node info from a PXE ([#10420](https://github.com/AztecProtocol/aztec-packages/issues/10420)) ([ed972f3](https://github.com/AztecProtocol/aztec-packages/commit/ed972f320c350c37628b583b0913a554ee1745df)) +* Increase timeouts ([#10412](https://github.com/AztecProtocol/aztec-packages/issues/10412)) ([d3b8838](https://github.com/AztecProtocol/aztec-packages/commit/d3b883877620783d2e818650b5435cb243c56c96)) +* LSP auto-import text indent (https://github.com/noir-lang/noir/pull/6699) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* LSP code action wasn't triggering on beginning or end of identifier (https://github.com/noir-lang/noir/pull/6616) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* **LSP:** Use generic self type to narrow down methods to complete (https://github.com/noir-lang/noir/pull/6617) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Move spartan-script tf to spartan, use file in bucket ([#10395](https://github.com/AztecProtocol/aztec-packages/issues/10395)) ([5cef628](https://github.com/AztecProtocol/aztec-packages/commit/5cef62834e76f57514d0d09c24e4a2c98ea05485)) +* Nit ([#10392](https://github.com/AztecProtocol/aztec-packages/issues/10392)) ([d6985a8](https://github.com/AztecProtocol/aztec-packages/commit/d6985a80e82ee671a562866d7ed978c6f6e1b659)) +* Optimize array ref counts to copy arrays much less often (https://github.com/noir-lang/noir/pull/6685) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* **p2p:** Override msg Id ([#10415](https://github.com/AztecProtocol/aztec-packages/issues/10415)) ([990d11b](https://github.com/AztecProtocol/aztec-packages/commit/990d11b1d70126bb545e834724e51a5f8e46e64a)) +* Parse a bit more SSA stuff (https://github.com/noir-lang/noir/pull/6599) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Preserve newlines between comments when formatting statements (https://github.com/noir-lang/noir/pull/6601) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Prevent hoisting binary instructions which can overflow (https://github.com/noir-lang/noir/pull/6672) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* **prover:** Handle starting blocks out of order in prover ([#10350](https://github.com/AztecProtocol/aztec-packages/issues/10350)) ([9106102](https://github.com/AztecProtocol/aztec-packages/commit/910610251e04bd9e50a4cc6da8a3230c20e49be6)) +* Publicly register contract classes ([#10385](https://github.com/AztecProtocol/aztec-packages/issues/10385)) ([94e6e1a](https://github.com/AztecProtocol/aztec-packages/commit/94e6e1a954911b81e6af85edff55c64f13595b20)) +* Remove `compiler_version` from new `Nargo.toml` (https://github.com/noir-lang/noir/pull/6590) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Revert "feat: Avoid inserting an empty leaf in indexed trees on update" ([#10319](https://github.com/AztecProtocol/aztec-packages/issues/10319)) ([887c011](https://github.com/AztecProtocol/aztec-packages/commit/887c01103255ea4cbbb6cb33c8771d47123b3bff)) +* Revert "feat: zip and propagate private logs" ([#10302](https://github.com/AztecProtocol/aztec-packages/issues/10302)) ([9d70728](https://github.com/AztecProtocol/aztec-packages/commit/9d70728f0e494bbe63ecf7875877344de776d438)) +* Safely insert sibling paths ([#10423](https://github.com/AztecProtocol/aztec-packages/issues/10423)) ([41f7645](https://github.com/AztecProtocol/aztec-packages/commit/41f76457355fc10781613cdee7bfe0b7207f2fb4)) +* **ssa:** Don't deduplicate constraints in blocks that are not dominated (https://github.com/noir-lang/noir/pull/6627) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* **ssa:** Remove RC tracker in DIE (https://github.com/noir-lang/noir/pull/6700) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* **ssa:** Track all local allocations during flattening (https://github.com/noir-lang/noir/pull/6619) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Tf vars ([#10457](https://github.com/AztecProtocol/aztec-packages/issues/10457)) ([00aaef6](https://github.com/AztecProtocol/aztec-packages/commit/00aaef6a544580d8ec8a0bb64ca4c40a185b6410)) +* Typo in u128 docs (https://github.com/noir-lang/noir/pull/6711) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Url in bbup install ([#10456](https://github.com/AztecProtocol/aztec-packages/issues/10456)) ([1b0dfb7](https://github.com/AztecProtocol/aztec-packages/commit/1b0dfb77612cae9fa026da1d453bdf0d89442200)) +* Use correct type for attribute arguments (https://github.com/noir-lang/noir/pull/6640) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Used signed division for signed modulo (https://github.com/noir-lang/noir/pull/6635) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Witness changes in file sponge.hpp ([#10345](https://github.com/AztecProtocol/aztec-packages/issues/10345)) ([4a38edf](https://github.com/AztecProtocol/aztec-packages/commit/4a38edfc1580aa1cb5113993ff8a2e5574076226)) + + +### Miscellaneous + +* Add `ram_blowup_regression` to memory report (https://github.com/noir-lang/noir/pull/6683) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Add panic for compiler error described in [#6620](https://github.com/AztecProtocol/aztec-packages/issues/6620) (https://github.com/noir-lang/noir/pull/6621) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* **avm:** Fake verification routine for avm recursion in public base rollup ([#10382](https://github.com/AztecProtocol/aztec-packages/issues/10382)) ([a1e5966](https://github.com/AztecProtocol/aztec-packages/commit/a1e5966ffe98351d848bfa47608a2f22c381acfb)), closes [#10243](https://github.com/AztecProtocol/aztec-packages/issues/10243) +* **avm:** Remove function selector type of getenv opcode ([#10406](https://github.com/AztecProtocol/aztec-packages/issues/10406)) ([38c0c14](https://github.com/AztecProtocol/aztec-packages/commit/38c0c14fe90a1a920818f2f99a7d3204f0211091)), closes [#9396](https://github.com/AztecProtocol/aztec-packages/issues/9396) +* Batch archiver requests ([#10442](https://github.com/AztecProtocol/aztec-packages/issues/10442)) ([9443e8e](https://github.com/AztecProtocol/aztec-packages/commit/9443e8ea62237201342f111d846d321612fa2bb3)) +* Boot node has fixed peer id private key ([#10352](https://github.com/AztecProtocol/aztec-packages/issues/10352)) ([cae1203](https://github.com/AztecProtocol/aztec-packages/commit/cae1203ec4263d3b64fbc3fba5cfa281922004bd)) +* Bump alert in gossip_network.test.ts ([#10430](https://github.com/AztecProtocol/aztec-packages/issues/10430)) ([2c2169b](https://github.com/AztecProtocol/aztec-packages/commit/2c2169be46d489a1b2023b80e5426a13702c32ab)) +* Centralized helm flag for proving and clean release tf deploys ([#10221](https://github.com/AztecProtocol/aztec-packages/issues/10221)) ([c2c1744](https://github.com/AztecProtocol/aztec-packages/commit/c2c1744cb40f91773988476b23e61eb00babdc84)) +* **ci:** Move playwright install to `+deps` ([#10293](https://github.com/AztecProtocol/aztec-packages/issues/10293)) ([d7bd306](https://github.com/AztecProtocol/aztec-packages/commit/d7bd306ad85b663b96c022048840c51370da99ef)) +* Clean up archiver logs ([#10429](https://github.com/AztecProtocol/aztec-packages/issues/10429)) ([4fcbc59](https://github.com/AztecProtocol/aztec-packages/commit/4fcbc592c963389a132b5b72f0f68d1f6526943b)) +* Consolidate some CI workflows to reduce sprawl (https://github.com/noir-lang/noir/pull/6696) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Contracts on a diet ([#10389](https://github.com/AztecProtocol/aztec-packages/issues/10389)) ([dddb008](https://github.com/AztecProtocol/aztec-packages/commit/dddb008d0fe69da64574df9a21e0e91533f9ab15)) +* Deduplicate constants across blocks ([#9972](https://github.com/AztecProtocol/aztec-packages/issues/9972)) ([69bb64f](https://github.com/AztecProtocol/aztec-packages/commit/69bb64fa34667810e96ea85c7594595522ccdce1)) +* Derive PartialEq and Hash for FieldElement (https://github.com/noir-lang/noir/pull/6610) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* **docs:** Remove additional `DEBUG` references, add note on `LOG_LEVEL`s ([#10434](https://github.com/AztecProtocol/aztec-packages/issues/10434)) ([e1e5906](https://github.com/AztecProtocol/aztec-packages/commit/e1e5906c1dd1af4c3865572111438185c6ec8a41)) +* Don't generate proofs of verifier circuits in test ([#10405](https://github.com/AztecProtocol/aztec-packages/issues/10405)) ([c00ebdd](https://github.com/AztecProtocol/aztec-packages/commit/c00ebdd60373aa579587b03eeb4b44ada0bb1155)) +* Fix sassy-salamander chores v1 ([#10218](https://github.com/AztecProtocol/aztec-packages/issues/10218)) ([7227b48](https://github.com/AztecProtocol/aztec-packages/commit/7227b487f97e26a3f8f2aa8086fb7c2c7b0de557)), closes [#10074](https://github.com/AztecProtocol/aztec-packages/issues/10074) [#10075](https://github.com/AztecProtocol/aztec-packages/issues/10075) [#10077](https://github.com/AztecProtocol/aztec-packages/issues/10077) +* Fix tests in `noirc_abi_wasm` (https://github.com/noir-lang/noir/pull/6688) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Fix traces endpoint url in native testnet script ([#10309](https://github.com/AztecProtocol/aztec-packages/issues/10309)) ([2367c62](https://github.com/AztecProtocol/aztec-packages/commit/2367c629de001f70e455abdcb7984851bf19458c)) +* Fix typo in test name (https://github.com/noir-lang/noir/pull/6589) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Fix warning when compiling `noir_wasm` (https://github.com/noir-lang/noir/pull/6686) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Ignore almost-empty directories in nargo_cli tests (https://github.com/noir-lang/noir/pull/6611) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Improve error message of `&T` (https://github.com/noir-lang/noir/pull/6633) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Log manual contract class registrations ([#10354](https://github.com/AztecProtocol/aztec-packages/issues/10354)) ([da1470d](https://github.com/AztecProtocol/aztec-packages/commit/da1470d074f4884e61b51e450a661432c6f0a10f)) +* Making bbup a shell script ([#10426](https://github.com/AztecProtocol/aztec-packages/issues/10426)) ([1c29554](https://github.com/AztecProtocol/aztec-packages/commit/1c29554929268fe9f53961325ae6af3f9b799b1c)) +* **network_test.sh:** Work around 143 by disabling stern ([#10436](https://github.com/AztecProtocol/aztec-packages/issues/10436)) ([64f6dad](https://github.com/AztecProtocol/aztec-packages/commit/64f6dad8f95e4972ee4bef26b9e5da6d6b577f13)) +* Parallelise construction of perturbator coefficients at each level ([#10304](https://github.com/AztecProtocol/aztec-packages/issues/10304)) ([ba335bd](https://github.com/AztecProtocol/aztec-packages/commit/ba335bdff645398d20241ce7baab02f63b20f55c)) +* Parallelise inverse polynomial construction for lookup relations ([#10413](https://github.com/AztecProtocol/aztec-packages/issues/10413)) ([427cf59](https://github.com/AztecProtocol/aztec-packages/commit/427cf594ec9ca4b472ec5d4a249c7b49805c78e2)) +* Pin foundry version in CI (https://github.com/noir-lang/noir/pull/6642) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Public inputs in unit tests with proving were incorrectly set ([#10300](https://github.com/AztecProtocol/aztec-packages/issues/10300)) ([0311bf3](https://github.com/AztecProtocol/aztec-packages/commit/0311bf333acb2def3be1373b36514b99b132623a)) +* Pull out cfg simplification changes ([#10279](https://github.com/AztecProtocol/aztec-packages/issues/10279)) ([c48ae90](https://github.com/AztecProtocol/aztec-packages/commit/c48ae90c5d72450a3a19b76e552df1607ff79953)) +* Pull out constant brillig inliner ([#10291](https://github.com/AztecProtocol/aztec-packages/issues/10291)) ([0577c1a](https://github.com/AztecProtocol/aztec-packages/commit/0577c1a70e9746bd06f07d2813af1be39e01ca02)) +* Pull out loop invariant optimization ([#10277](https://github.com/AztecProtocol/aztec-packages/issues/10277)) ([94cba37](https://github.com/AztecProtocol/aztec-packages/commit/94cba373c0807e66a2633e2bdaacea538838e2e7)) +* Pull out sync changes ([#10292](https://github.com/AztecProtocol/aztec-packages/issues/10292)) ([49f80b3](https://github.com/AztecProtocol/aztec-packages/commit/49f80b30db59e2454347c4b742d536e317305f2e)) +* Random typos ([#10393](https://github.com/AztecProtocol/aztec-packages/issues/10393)) ([ed47a42](https://github.com/AztecProtocol/aztec-packages/commit/ed47a42e838ffb75e17a7897bc0b77658f6e4b15)) +* Redo typo PR by Dimitrolito ([#10364](https://github.com/AztecProtocol/aztec-packages/issues/10364)) ([da809c5](https://github.com/AztecProtocol/aztec-packages/commit/da809c58290f9590836f45ec59376cbf04d3c4ce)) +* Redo typo PR by Dimitrolito (https://github.com/noir-lang/noir/pull/6614) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Redo typo PR by donatik27 (https://github.com/noir-lang/noir/pull/6575) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Redo typo PR by leopardracer ([#10363](https://github.com/AztecProtocol/aztec-packages/issues/10363)) ([0d1b722](https://github.com/AztecProtocol/aztec-packages/commit/0d1b722ef7fdc501ca78cfca8f46009a29504c8f)) +* Redo typo PR by leopardracer ([#10444](https://github.com/AztecProtocol/aztec-packages/issues/10444)) ([3653c4c](https://github.com/AztecProtocol/aztec-packages/commit/3653c4c78e8ba3ab2036c6467e60c2c496db5811)) +* Refactor foreign call executors (https://github.com/noir-lang/noir/pull/6659) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Refactor indexed tree to use traits ([#10361](https://github.com/AztecProtocol/aztec-packages/issues/10361)) ([621cbaf](https://github.com/AztecProtocol/aztec-packages/commit/621cbafc49acee6fa4422fd5ebcccd6c27507670)) +* Refactor poseidon2 (https://github.com/noir-lang/noir/pull/6655) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Release Noir(1.0.0-beta.0) (https://github.com/noir-lang/noir/pull/6562) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Remove `ec` module from stdlib (https://github.com/noir-lang/noir/pull/6612) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Remove debug and winston in favor of pino ([#10355](https://github.com/AztecProtocol/aztec-packages/issues/10355)) ([c246aba](https://github.com/AztecProtocol/aztec-packages/commit/c246aba5dd51391e2b8a3bd8cdc67f0115b85a7a)) +* Remove eddsa from stdlib (https://github.com/noir-lang/noir/pull/6591) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Remove inliner override for `reference_counts` test (https://github.com/noir-lang/noir/pull/6714) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Remove SchnorrVerify opcode ([#9897](https://github.com/AztecProtocol/aztec-packages/issues/9897)) ([93cd323](https://github.com/AztecProtocol/aztec-packages/commit/93cd323e493118ce91097934216a364855a991db)) +* Remove temporary allocations from `num_bits` (https://github.com/noir-lang/noir/pull/6600) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Remove unused dep ([#10295](https://github.com/AztecProtocol/aztec-packages/issues/10295)) ([2a07355](https://github.com/AztecProtocol/aztec-packages/commit/2a0735583eb1dfb8aad47daf6f70b267fc2eca20)) +* Replace relative paths to noir-protocol-circuits ([8fd8236](https://github.com/AztecProtocol/aztec-packages/commit/8fd823689482c4ead689f24927ca57d7206c93a7)) +* Replace relative paths to noir-protocol-circuits ([5d11e24](https://github.com/AztecProtocol/aztec-packages/commit/5d11e24fa1bcdef097d4af0693f3f8556dbd4372)) +* Replace relative paths to noir-protocol-circuits ([e7a99f2](https://github.com/AztecProtocol/aztec-packages/commit/e7a99f28cdb54c7d462a43c8e971fa59696900f2)) +* Replace relative paths to noir-protocol-circuits ([2496118](https://github.com/AztecProtocol/aztec-packages/commit/2496118908db955d82222fe98514f4a55ff61e33)) +* Replace relative paths to noir-protocol-circuits ([d77dc96](https://github.com/AztecProtocol/aztec-packages/commit/d77dc96e699b3338ff624665be5f831b0d21afb7)) +* Replace relative paths to noir-protocol-circuits ([46d12e3](https://github.com/AztecProtocol/aztec-packages/commit/46d12e30bf9e4b523ccd5f5f4b2771498a72b8a5)) +* Require types of globals to be specified (https://github.com/noir-lang/noir/pull/6592) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Revert "fix: Don't remove necessary RC instructions in DIE pass (https://github.com/noir-lang/noir/pull/6585)" (https://github.com/noir-lang/noir/pull/6693) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Sassy network ([#10468](https://github.com/AztecProtocol/aztec-packages/issues/10468)) ([92eb377](https://github.com/AztecProtocol/aztec-packages/commit/92eb377ffb1ce192b608536fc39c85c5aa9ccfc4)) +* Simplify otel config, 1val setup, fix pod dns, retries ([#10344](https://github.com/AztecProtocol/aztec-packages/issues/10344)) ([be91d80](https://github.com/AztecProtocol/aztec-packages/commit/be91d807c91fbd829181c8b5935f93308fef6dbb)) +* Skip A->B B->A e2e_2_pxes test ([#10297](https://github.com/AztecProtocol/aztec-packages/issues/10297)) ([b75bfd0](https://github.com/AztecProtocol/aztec-packages/commit/b75bfd0a40547eab1d4700da80819d51e15a4428)) +* Sync logging with jest ([#10459](https://github.com/AztecProtocol/aztec-packages/issues/10459)) ([6e33cb9](https://github.com/AztecProtocol/aztec-packages/commit/6e33cb916643eadb62159421ba00c829e5162386)) +* Typo in oracles how to (https://github.com/noir-lang/noir/pull/6598) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Update noir-bench-report version (https://github.com/noir-lang/noir/pull/6675) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Update pprof (https://github.com/noir-lang/noir/pull/6710) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Update release-please action (https://github.com/noir-lang/noir/pull/6704) ([3304046](https://github.com/AztecProtocol/aztec-packages/commit/3304046704e257902e32b86baf1aafc8b23bcaf6)) +* Use non default mnemonic for releases ([#10400](https://github.com/AztecProtocol/aztec-packages/issues/10400)) ([bb5f364](https://github.com/AztecProtocol/aztec-packages/commit/bb5f364e4a086f7308137ccb8f77668d33367f3a)) + ## [0.65.2](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.65.1...aztec-packages-v0.65.2) (2024-11-28) diff --git a/README.md b/README.md index 83a2d819572c..7b2c6d3c5ea4 100644 --- a/README.md +++ b/README.md @@ -47,7 +47,7 @@ This provide an interactive environment for debugging the CI test. ## Debugging -Logging goes through the [DebugLogger](yarn-project/foundation/src/log/debug.ts) module in Typescript. To see the log output, set a `DEBUG` environment variable to the name of the module you want to debug, to `aztec:*`, or to `*` to see all logs. +Logging goes through the [DebugLogger](yarn-project/foundation/src/log/debug.ts) module in Typescript. `LOG_LEVEL` controls the default log level, and one can set alternate levels for specific modules, such as `debug; warn: module1, module2; error: module3`. ## Releases diff --git a/avm-transpiler/Cargo.lock b/avm-transpiler/Cargo.lock index bdac1771a70e..4da74e41190b 100644 --- a/avm-transpiler/Cargo.lock +++ b/avm-transpiler/Cargo.lock @@ -948,7 +948,6 @@ dependencies = [ "acvm", "iter-extended", "jsonrpc", - "regex", "serde", "serde_json", "thiserror", diff --git a/avm-transpiler/src/transpile.rs b/avm-transpiler/src/transpile.rs index 5e302c08020f..79f4a3f02bb6 100644 --- a/avm-transpiler/src/transpile.rs +++ b/avm-transpiler/src/transpile.rs @@ -786,7 +786,6 @@ fn handle_getter_instruction( enum EnvironmentVariable { ADDRESS, SENDER, - FUNCTIONSELECTOR, TRANSACTIONFEE, CHAINID, VERSION, @@ -821,7 +820,6 @@ fn handle_getter_instruction( "avmOpcodeTimestamp" => EnvironmentVariable::TIMESTAMP, "avmOpcodeL2GasLeft" => EnvironmentVariable::L2GASLEFT, "avmOpcodeDaGasLeft" => EnvironmentVariable::DAGASLEFT, - "avmOpcodeFunctionSelector" => EnvironmentVariable::FUNCTIONSELECTOR, "avmOpcodeIsStaticCall" => EnvironmentVariable::ISSTATICCALL, _ => panic!("Transpiler doesn't know how to process getter {:?}", function), }; diff --git a/aztec-up/bin/.aztec-run b/aztec-up/bin/.aztec-run index a7ab48a53131..142e71fb75ca 100755 --- a/aztec-up/bin/.aztec-run +++ b/aztec-up/bin/.aztec-run @@ -104,8 +104,8 @@ while [[ "$#" -gt 0 ]]; do done DOCKER_ENV="-e HOME=$HOME" -if ! [ -z "${DEBUG:-}" ] ; then - DOCKER_ENV="-e DEBUG=$DEBUG" +if ! [ -z "${LOG_LEVEL:-}" ] ; then + DOCKER_ENV="-e LOG_LEVEL=$LOG_LEVEL" fi for env in ${ENV_VARS_TO_INJECT:-}; do # SSH_AUTH_SOCK must be handled separately diff --git a/aztec-up/bin/docker-compose.sandbox.yml b/aztec-up/bin/docker-compose.sandbox.yml index 39d6ff203199..186ba3dec698 100644 --- a/aztec-up/bin/docker-compose.sandbox.yml +++ b/aztec-up/bin/docker-compose.sandbox.yml @@ -20,7 +20,7 @@ services: ports: - "${PXE_PORT:-8080}:${PXE_PORT:-8080}" environment: - DEBUG: # Loaded from the user shell if explicitly set + LOG_LEVEL: # Loaded from the user shell if explicitly set HOST_WORKDIR: "${PWD}" # Loaded from the user shell to show log files absolute path in host ETHEREUM_HOST: ${ETHEREUM_HOST:-http://ethereum}:${ANVIL_PORT:-8545} L1_CHAIN_ID: 31337 diff --git a/aztec-up/bin/docker-compose.test.yml b/aztec-up/bin/docker-compose.test.yml index 796e4c69b5a2..d3ad459b9de8 100644 --- a/aztec-up/bin/docker-compose.test.yml +++ b/aztec-up/bin/docker-compose.test.yml @@ -2,7 +2,6 @@ services: txe: image: "aztecprotocol/aztec" environment: - DEBUG: # Loaded from the user shell if explicitly set LOG_LEVEL: # Loaded from the user shell if explicitly set HOST_WORKDIR: "${PWD}" # Loaded from the user shell to show log files absolute path in host volumes: diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index c732750c3cea..f5f1d5fa3422 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = be0ed398422ef6c064d187f35f6514c327553b91 - parent = ed47a42e838ffb75e17a7897bc0b77658f6e4b15 + commit = f715310d31a164f2b5e7e7fd00e6760ef3e59aad + parent = 9301253f0488e6d96ed12a8c9bde72a653aa7d36 method = merge cmdver = 0.4.6 diff --git a/barretenberg/CHANGELOG.md b/barretenberg/CHANGELOG.md index 3a08bec70363..c0b8a4b35503 100644 --- a/barretenberg/CHANGELOG.md +++ b/barretenberg/CHANGELOG.md @@ -1,5 +1,46 @@ # Changelog +## [0.66.0](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.65.2...barretenberg-v0.66.0) (2024-12-06) + + +### ⚠ BREAKING CHANGES + +* remove SchnorrVerify opcode ([#9897](https://github.com/AztecProtocol/aztec-packages/issues/9897)) + +### Features + +* Allow querying block number for tree indices ([#10332](https://github.com/AztecProtocol/aztec-packages/issues/10332)) ([cf05a7a](https://github.com/AztecProtocol/aztec-packages/commit/cf05a7a346ea11853e940d5e9ac105ef0d629d35)) +* Avoid inserting an empty leaf in indexed trees on update ([#10281](https://github.com/AztecProtocol/aztec-packages/issues/10281)) ([5a04ca8](https://github.com/AztecProtocol/aztec-packages/commit/5a04ca880ae2a0f285b6a5a110286ba10bc4a6c3)) +* Avoid inserting an empty leaf in indexed trees on update ([#10334](https://github.com/AztecProtocol/aztec-packages/issues/10334)) ([80fad45](https://github.com/AztecProtocol/aztec-packages/commit/80fad4544a4d8c1b488f8b4b4f86fe508ed1f4cc)) +* **bb:** Define std::hash for field ([#10312](https://github.com/AztecProtocol/aztec-packages/issues/10312)) ([752bc59](https://github.com/AztecProtocol/aztec-packages/commit/752bc59c579710c21acf6cc97164e377f72c256c)) +* Client IVC API ([#10217](https://github.com/AztecProtocol/aztec-packages/issues/10217)) ([cc54a1e](https://github.com/AztecProtocol/aztec-packages/commit/cc54a1e1ef75b29d160a02d03cf9b29e28d3e4ca)) +* Integrate verify_proof calls in mock protocol circuits ([#9253](https://github.com/AztecProtocol/aztec-packages/issues/9253)) ([7ed89aa](https://github.com/AztecProtocol/aztec-packages/commit/7ed89aaa9d0968af6334c1c8abf6c06a42754c52)) +* Manage enqueued calls & phases in AVM witgen ([#10310](https://github.com/AztecProtocol/aztec-packages/issues/10310)) ([e7ebef8](https://github.com/AztecProtocol/aztec-packages/commit/e7ebef8d09744fdc24a79cb0bf74638b0a8f5dc8)) +* Mock IVC state from arbitrary acir IVC recursion constraints ([#10314](https://github.com/AztecProtocol/aztec-packages/issues/10314)) ([ac7c0da](https://github.com/AztecProtocol/aztec-packages/commit/ac7c0da38ff05d6f11c4d6a6244c4526ac00232e)) +* Ultra rollup flows ([#10162](https://github.com/AztecProtocol/aztec-packages/issues/10162)) ([c53f4cf](https://github.com/AztecProtocol/aztec-packages/commit/c53f4cf84c60b8d81cc62d5827ec4408da88cc4e)) + + +### Bug Fixes + +* Bbup cleanup and fix ([#10067](https://github.com/AztecProtocol/aztec-packages/issues/10067)) ([0ff8177](https://github.com/AztecProtocol/aztec-packages/commit/0ff81773da58f7c28621d4e5711ce130afd3e51b)) +* Revert "feat: Avoid inserting an empty leaf in indexed trees on update" ([#10319](https://github.com/AztecProtocol/aztec-packages/issues/10319)) ([887c011](https://github.com/AztecProtocol/aztec-packages/commit/887c01103255ea4cbbb6cb33c8771d47123b3bff)) +* Url in bbup install ([#10456](https://github.com/AztecProtocol/aztec-packages/issues/10456)) ([1b0dfb7](https://github.com/AztecProtocol/aztec-packages/commit/1b0dfb77612cae9fa026da1d453bdf0d89442200)) +* Witness changes in file sponge.hpp ([#10345](https://github.com/AztecProtocol/aztec-packages/issues/10345)) ([4a38edf](https://github.com/AztecProtocol/aztec-packages/commit/4a38edfc1580aa1cb5113993ff8a2e5574076226)) + + +### Miscellaneous + +* **avm:** Fake verification routine for avm recursion in public base rollup ([#10382](https://github.com/AztecProtocol/aztec-packages/issues/10382)) ([a1e5966](https://github.com/AztecProtocol/aztec-packages/commit/a1e5966ffe98351d848bfa47608a2f22c381acfb)), closes [#10243](https://github.com/AztecProtocol/aztec-packages/issues/10243) +* **avm:** Remove function selector type of getenv opcode ([#10406](https://github.com/AztecProtocol/aztec-packages/issues/10406)) ([38c0c14](https://github.com/AztecProtocol/aztec-packages/commit/38c0c14fe90a1a920818f2f99a7d3204f0211091)), closes [#9396](https://github.com/AztecProtocol/aztec-packages/issues/9396) +* Don't generate proofs of verifier circuits in test ([#10405](https://github.com/AztecProtocol/aztec-packages/issues/10405)) ([c00ebdd](https://github.com/AztecProtocol/aztec-packages/commit/c00ebdd60373aa579587b03eeb4b44ada0bb1155)) +* Making bbup a shell script ([#10426](https://github.com/AztecProtocol/aztec-packages/issues/10426)) ([1c29554](https://github.com/AztecProtocol/aztec-packages/commit/1c29554929268fe9f53961325ae6af3f9b799b1c)) +* Parallelise construction of perturbator coefficients at each level ([#10304](https://github.com/AztecProtocol/aztec-packages/issues/10304)) ([ba335bd](https://github.com/AztecProtocol/aztec-packages/commit/ba335bdff645398d20241ce7baab02f63b20f55c)) +* Parallelise inverse polynomial construction for lookup relations ([#10413](https://github.com/AztecProtocol/aztec-packages/issues/10413)) ([427cf59](https://github.com/AztecProtocol/aztec-packages/commit/427cf594ec9ca4b472ec5d4a249c7b49805c78e2)) +* Public inputs in unit tests with proving were incorrectly set ([#10300](https://github.com/AztecProtocol/aztec-packages/issues/10300)) ([0311bf3](https://github.com/AztecProtocol/aztec-packages/commit/0311bf333acb2def3be1373b36514b99b132623a)) +* Redo typo PR by Dimitrolito ([#10364](https://github.com/AztecProtocol/aztec-packages/issues/10364)) ([da809c5](https://github.com/AztecProtocol/aztec-packages/commit/da809c58290f9590836f45ec59376cbf04d3c4ce)) +* Redo typo PR by leopardracer ([#10363](https://github.com/AztecProtocol/aztec-packages/issues/10363)) ([0d1b722](https://github.com/AztecProtocol/aztec-packages/commit/0d1b722ef7fdc501ca78cfca8f46009a29504c8f)) +* Remove SchnorrVerify opcode ([#9897](https://github.com/AztecProtocol/aztec-packages/issues/9897)) ([93cd323](https://github.com/AztecProtocol/aztec-packages/commit/93cd323e493118ce91097934216a364855a991db)) + ## [0.65.2](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.65.1...barretenberg-v0.65.2) (2024-11-28) diff --git a/barretenberg/bbup/.gitignore b/barretenberg/bbup/.gitignore deleted file mode 100644 index 21114ccaaac3..000000000000 --- a/barretenberg/bbup/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -node_modules -yarn.lock -*.js -.yarn -bun.lockb diff --git a/barretenberg/bbup/.npmignore b/barretenberg/bbup/.npmignore deleted file mode 100644 index 7e4089baa7cb..000000000000 --- a/barretenberg/bbup/.npmignore +++ /dev/null @@ -1,4 +0,0 @@ -node_modules -yarn.lock -*.ts -.yarn diff --git a/barretenberg/bbup/README.md b/barretenberg/bbup/README.md index a2c009fe5d05..6e8ce8343d5e 100644 --- a/barretenberg/bbup/README.md +++ b/barretenberg/bbup/README.md @@ -6,12 +6,6 @@ It assumes you are using [Noir](https://noir-lang.org) as the frontend language. ## Installation -### Dependencies - -TODO - -### Installation script - BBup is an installer for whatever version of BB you may want. Install BBup with: ```bash @@ -19,7 +13,7 @@ curl -L bbup.dev | bash ``` > [!IMPORTANT] -> *Always* check what scripts do. The above one redirects to [the install script](https://github.com/AztecProtocol/aztec-packages/blob/master/barretenberg/bbup/install) which checks if you have `npm`, installing it with `nvm` otherwise. It then installs [bbup](https://github.com/AztecProtocol/aztec-packages/blob/master/barretenberg/bbup/bbup.ts) globally. +> *Always* check what scripts do. The above one redirects to [the install script](https://github.com/AztecProtocol/aztec-packages/blob/master/barretenberg/bbup/install) which installs [bbup](https://github.com/AztecProtocol/aztec-packages/blob/master/barretenberg/bbup/bbup) in your system's PATH ## Usage diff --git a/barretenberg/bbup/bbup b/barretenberg/bbup/bbup new file mode 100755 index 000000000000..c22a182c0323 --- /dev/null +++ b/barretenberg/bbup/bbup @@ -0,0 +1,188 @@ +#!/usr/bin/env bash + +set -e + +# Colors and symbols +RED='\033[0;31m' +GREEN='\033[0;32m' +BLUE='\033[0;34m' +NC='\033[0m' +SUCCESS="✓" +ERROR="✗" + +# Utility functions +print_spinner() { + local pid=$1 + local delay=0.1 + local spinstr='|/-\' + while [ "$(ps a | awk '{print $1}' | grep $pid)" ]; do + local temp=${spinstr#?} + printf " [%c] " "$spinstr" + local spinstr=$temp${spinstr%"$temp"} + sleep $delay + printf "\b\b\b\b\b\b" + done + printf " \b\b\b\b" +} + +get_bb_version_for_noir() { + local noir_version=$1 + local url="" + local resolved_version="" + + if [ "$noir_version" = "stable" ] || [ "$noir_version" = "nightly" ]; then + # Get releases from GitHub API + local releases=$(curl -s "https://api.github.com/repos/noir-lang/noir/releases") + + if [ "$noir_version" = "stable" ]; then + resolved_version=$(echo "$releases" | grep -o '"tag_name": "[^"]*"' | grep -v "aztec\|nightly" | head -1 | cut -d'"' -f4) + else + resolved_version=$(echo "$releases" | grep -o '"tag_name": "nightly[^"]*"' | head -1 | cut -d'"' -f4) + fi + + url="https://raw.githubusercontent.com/noir-lang/noir/${resolved_version}/scripts/install_bb.sh" + else + url="https://raw.githubusercontent.com/noir-lang/noir/v${noir_version}/scripts/install_bb.sh" + fi + + # Extract BB version from install script + local install_script=$(curl -s "$url") + local bb_version=$(echo "$install_script" | grep 'VERSION=' | cut -d'"' -f2) + echo "$bb_version" +} + +install_bb() { + local version=$1 + local architecture=$(uname -m) + local platform="" + + # Convert architecture names + if [ "$architecture" = "arm64" ]; then + architecture="aarch64" + elif [ "$architecture" = "x86_64" ]; then + architecture="x86_64" + else + printf "${RED}${ERROR} Unsupported architecture: ${architecture}${NC}\n" + exit 1 + fi + + # Determine platform + if [ "$(uname)" = "Darwin" ]; then + platform="apple-darwin" + elif [ "$(uname)" = "Linux" ]; then + platform="linux-gnu" + else + printf "${RED}${ERROR} Unsupported platform: $(uname)${NC}\n" + exit 1 + fi + + local home_dir=$HOME + local bb_path="${home_dir}/.bb" + + printf "${BLUE}Installing to ${bb_path}${NC}\n" + + # Create temporary directory + local temp_dir=$(mktemp -d) + local temp_tar="${temp_dir}/temp.tar.gz" + + # Download and extract + local release_url="https://github.com/AztecProtocol/aztec-packages/releases/download/aztec-packages-v${version}" + local binary_url="${release_url}/barretenberg-${architecture}-${platform}.tar.gz" + + curl -L "$binary_url" -o "$temp_tar" + mkdir -p "$bb_path" + tar xzf "$temp_tar" -C "$bb_path" + rm -rf "$temp_dir" + + # Update shell configuration + update_shell_config "$bb_path" + + printf "${GREEN}${SUCCESS} Installed barretenberg to ${bb_path}${NC}\n" +} + +update_shell_config() { + local bb_bin_path=$1 + local path_entry="export PATH=\"${bb_bin_path}:\$PATH\"" + + # Update various shell configs if they exist + if [ -f "${HOME}/.bashrc" ]; then + echo "$path_entry" >> "${HOME}/.bashrc" + fi + + if [ -f "${HOME}/.zshrc" ]; then + echo "$path_entry" >> "${HOME}/.zshrc" + fi + + if [ -f "${HOME}/.config/fish/config.fish" ]; then + echo "set -gx PATH ${bb_bin_path} \$PATH" >> "${HOME}/.config/fish/config.fish" + fi + + # Update current session's PATH + export PATH="${bb_bin_path}:$PATH" +} + +# Main script +main() { + local version="" + local noir_version="" + + # Parse arguments + while [[ $# -gt 0 ]]; do + case $1 in + -v|--version) + version="$2" + shift 2 + ;; + -nv|--noir-version) + noir_version="$2" + shift 2 + ;; + *) + printf "${RED}${ERROR} Unknown option: $1${NC}\n" + exit 1 + ;; + esac + done + + # If no version specified, try to get current noir version + if [ -z "$version" ] && [ -z "$noir_version" ]; then + noir_version="current" + fi + + if [ "$noir_version" = "current" ]; then + printf "${BLUE}Querying noir version from nargo${NC}\n" + if ! command -v nargo &> /dev/null; then + printf "${RED}${ERROR} Could not get noir version from nargo --version. Please specify a version.${NC}\n" + exit 1 + fi + noir_version=$(nargo --version | grep -o 'nargo version = [0-9]\+\.[0-9]\+\.[0-9]\+\(-[a-zA-Z]\+\.[0-9]\+\)\?' | cut -d' ' -f4) + printf "${GREEN}${SUCCESS} Resolved noir version ${noir_version} from nargo${NC}\n" + fi + + if [ -n "$noir_version" ]; then + printf "${BLUE}Getting compatible barretenberg version for noir version ${noir_version}${NC}\n" + if [ "$noir_version" = "stable" ] || [ "$noir_version" = "nightly" ]; then + printf "${BLUE}Resolving noir version ${noir_version}...${NC}\n" + # Get releases from GitHub API to show the resolved version + local releases=$(curl -s "https://api.github.com/repos/noir-lang/noir/releases") + local resolved_version="" + if [ "$noir_version" = "stable" ]; then + resolved_version=$(echo "$releases" | grep -o '"tag_name": "[^"]*"' | grep -v "aztec\|nightly" | head -1 | cut -d'"' -f4) + else + resolved_version=$(echo "$releases" | grep -o '"tag_name": "nightly[^"]*"' | head -1 | cut -d'"' -f4) + fi + printf "${GREEN}${SUCCESS} Resolved noir version ${noir_version} to ${resolved_version}${NC}\n" + fi + version=$(get_bb_version_for_noir "$noir_version") + printf "${GREEN}${SUCCESS} Resolved to barretenberg version ${version}${NC}\n" + fi + + if [ -z "$version" ]; then + printf "${RED}${ERROR} No version specified and couldn't determine version from noir${NC}\n" + exit 1 + fi + + install_bb "$version" +} + +main "$@" diff --git a/barretenberg/bbup/bbup.js b/barretenberg/bbup/bbup.js deleted file mode 100755 index 6b6fac09db8f..000000000000 --- a/barretenberg/bbup/bbup.js +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env node -import { Command, Option } from "commander"; -const program = new Command(); -import { installBB } from "./shell.js"; -import ora from "ora"; -import logSymbols from "log-symbols"; -import { getBbVersionForNoir } from "./versions.js"; -import { execSync } from "child_process"; -const spinner = ora({ color: "blue", discardStdin: false }); -const bbup = program - .command("install", { isDefault: true }) - .description("Installs Barretenberg.") - .addOption(new Option("-v, --version ", "The Barretenberg version to install").implies({ noirVersion: null })) - .addOption(new Option("-nv, --noir-version ", "The Noir version to match").default("current")) - .action(async ({ version, noirVersion }) => { - let resolvedBBVersion = ""; - if (noirVersion) { - let resolvedNoirVersion = noirVersion; - if (noirVersion === "current") { - spinner.start(`Querying noir version from nargo`); - try { - const output = execSync("nargo --version", { encoding: "utf-8" }); - resolvedNoirVersion = output.match(/nargo version = (\d+\.\d+\.\d+)/)[1]; - spinner.stopAndPersist({ - text: `Resolved noir version ${resolvedNoirVersion} from nargo`, - symbol: logSymbols.success, - }); - } - catch (e) { - spinner.stopAndPersist({ - text: `Could not get noir version from nargo --version. Please specify a version.`, - symbol: logSymbols.error, - }); - process.exit(1); - } - } - spinner.start(`Getting compatible barretenberg version for noir version ${resolvedNoirVersion}`); - resolvedBBVersion = await getBbVersionForNoir(resolvedNoirVersion, spinner); - spinner.stopAndPersist({ - text: `Resolved to barretenberg version ${resolvedBBVersion}`, - symbol: logSymbols.success, - }); - } - else if (version) { - resolvedBBVersion = version; - } - spinner.start(`Installing barretenberg`); - await installBB(resolvedBBVersion, spinner); -}); -bbup.parse(); diff --git a/barretenberg/bbup/bbup.ts b/barretenberg/bbup/bbup.ts deleted file mode 100755 index 98608ae50399..000000000000 --- a/barretenberg/bbup/bbup.ts +++ /dev/null @@ -1,72 +0,0 @@ -#!/usr/bin/env node -import { Command, Option } from "commander"; -const program = new Command(); -import { installBB } from "./shell.js"; -import ora from "ora"; -import logSymbols from "log-symbols"; -import { getBbVersionForNoir } from "./versions.js"; -import { execSync } from "child_process"; - -const spinner = ora({ color: "blue", discardStdin: false }); - -const bbup = program - .command("install", { isDefault: true }) - .description("Installs Barretenberg.") - .addOption( - new Option( - "-v, --version ", - "The Barretenberg version to install" - ).implies({ noirVersion: null }) - ) - .addOption( - new Option( - "-nv, --noir-version ", - "The Noir version to match" - ).default("current") - ) - .action(async ({ version, noirVersion }) => { - let resolvedBBVersion = ""; - if (noirVersion) { - let resolvedNoirVersion = noirVersion; - if (noirVersion === "current") { - spinner.start(`Querying noir version from nargo`); - try { - const output = execSync("nargo --version", { encoding: "utf-8" }); - resolvedNoirVersion = output.match( - /nargo version = (\d+\.\d+\.\d+(-\w+\.\d+)?)/ - )![1]; - console.log(resolvedNoirVersion); - spinner.stopAndPersist({ - text: `Resolved noir version ${resolvedNoirVersion} from nargo`, - symbol: logSymbols.success, - }); - } catch (e) { - spinner.stopAndPersist({ - text: `Could not get noir version from nargo --version. Please specify a version.`, - symbol: logSymbols.error, - }); - process.exit(1); - } - } - - spinner.start( - `Getting compatible barretenberg version for noir version ${resolvedNoirVersion}` - ); - resolvedBBVersion = await getBbVersionForNoir( - resolvedNoirVersion, - spinner - ); - spinner.stopAndPersist({ - text: `Resolved to barretenberg version ${resolvedBBVersion}`, - symbol: logSymbols.success, - }); - } else if (version) { - resolvedBBVersion = version; - } - - spinner.start(`Installing barretenberg`); - - await installBB(resolvedBBVersion, spinner); - }); - -bbup.parse(); diff --git a/barretenberg/bbup/install b/barretenberg/bbup/install index 848086087c41..030c5f6c5c54 100755 --- a/barretenberg/bbup/install +++ b/barretenberg/bbup/install @@ -1,42 +1,72 @@ -#!/bin/bash +#!/usr/bin/env bash set -e -# Function to check if a command exists -command_exists() { - command -v "$1" >/dev/null 2>&1 -} +# Colors and symbols +RED='\033[0;31m' +GREEN='\033[0;32m' +BLUE='\033[0;34m' +NC='\033[0m' +SUCCESS="✓" +ERROR="✗" + +BB_DIR="${HOME}/.bb" +INSTALL_PATH="${BB_DIR}/bbup" +BBUP_URL="https://raw.githubusercontent.com/AztecProtocol/aztec-packages/master/barretenberg/bbup/bbup" -# Function to install NVM and Node.js -install_nvm_and_node() { - echo "Installing NVM..." - wget -qO- https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.1/install.sh | bash +# Create .bb directory if it doesn't exist +mkdir -p "$BB_DIR" - # Load NVM - export NVM_DIR="$HOME/.nvm" - [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" +# Download bbup +printf "${BLUE}Downloading bbup...${NC}\n" +if command -v curl &> /dev/null; then + curl -fsSL "$BBUP_URL" -o "$INSTALL_PATH" +elif command -v wget &> /dev/null; then + wget -q "$BBUP_URL" -O "$INSTALL_PATH" +else + printf "${RED}${ERROR} Neither curl nor wget found. Please install either curl or wget.${NC}\n" + exit 1 +fi + +if [ ! -f "$INSTALL_PATH" ]; then + printf "${RED}${ERROR} Failed to download bbup${NC}\n" + exit 1 +fi - # Install the latest LTS version of Node.js - echo "Installing the latest LTS version of Node.js..." - nvm install --lts +chmod 755 "$INSTALL_PATH" - # Use the installed version - nvm use --lts +# Add to shell config files if not already present +PATH_ENTRY="export PATH=\"\${HOME}/.bb:\${PATH}\"" +FISH_PATH_ENTRY="set -gx PATH \${HOME}/.bb \$PATH" - # Verify installation - node --version - npm --version +add_to_config() { + local config_file="$1" + local entry="$2" + if [ -f "$config_file" ] && ! grep -q "/.bb:" "$config_file"; then + echo "$entry" >> "$config_file" + return 0 + fi + return 1 } -# Check if NPM is installed -if ! command_exists npm; then - install_nvm_and_node +SHELL_UPDATED=false + +if add_to_config "${HOME}/.bashrc" "$PATH_ENTRY"; then + SHELL_UPDATED=true fi +if add_to_config "${HOME}/.zshrc" "$PATH_ENTRY"; then + SHELL_UPDATED=true +fi -# Install bbup globally -echo "Installing bbup..." -npm install -g bbup +if [ -f "${HOME}/.config/fish/config.fish" ] && ! grep -q "/.bb " "${HOME}/.config/fish/config.fish"; then + echo "$FISH_PATH_ENTRY" >> "${HOME}/.config/fish/config.fish" + SHELL_UPDATED=true +fi -echo "Installation complete. You can now use the 'bbup' command." -echo "Please restart your terminal or run 'source ~/.bashrc' (or your shell's equivalent) to start using bbup." +printf "${GREEN}${SUCCESS} Successfully installed bbup${NC}\n" +if [ "$SHELL_UPDATED" = true ]; then + printf "${BLUE}Please run 'source ~/.bashrc' or restart your terminal to use bbup${NC}\n" +else + printf "${BLUE}Your PATH already includes ~/.bb - you can run 'bbup' from anywhere${NC}\n" +fi diff --git a/barretenberg/bbup/package.json b/barretenberg/bbup/package.json deleted file mode 100644 index 7283db443e5d..000000000000 --- a/barretenberg/bbup/package.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "name": "bbup", - "type": "module", - "description": "Barretenberg installation script", - "bin": "bbup.js", - "version": "0.0.12", - "license": "ISC", - "scripts": { - "start": "npx tsx bbup.ts", - "compile": "tsc bbup.ts --esModuleInterop true --module nodenext && chmod +x bbup.js", - "publish": "yarn compile && yarn npm publish --access public" - }, - "dependencies": { - "@inquirer/input": "^1.2.16", - "@inquirer/select": "^1.3.3", - "@types/node": "^22.9.1", - "axios": "^1.7.7", - "commander": "^11.1.0", - "log-symbols": "^7.0.0", - "ora": "^8.1.0", - "tar-fs": "^3.0.6", - "tiged": "^2.12.6" - }, - "packageManager": "yarn@4.5.0" -} diff --git a/barretenberg/bbup/shell.js b/barretenberg/bbup/shell.js deleted file mode 100644 index b8fd1418cb9d..000000000000 --- a/barretenberg/bbup/shell.js +++ /dev/null @@ -1,73 +0,0 @@ -import { execSync } from "child_process"; -import logSymbols from "log-symbols"; -import os from "os"; -import axios from "axios"; -import fs from "fs"; -import { createGunzip } from "zlib"; -import tar from "tar-fs"; -import { promisify } from "util"; -import { pipeline } from "stream"; -import path from "path"; -import { appendFileSync, existsSync } from "fs"; -export function sourceShellConfig() { - const home = os.homedir(); - const bbBinPath = path.join(home, ".bb"); - const pathEntry = `export PATH="${bbBinPath}:$PATH"\n`; - if (existsSync(path.join(home, ".bashrc"))) { - const bashrcPath = path.join(home, ".bashrc"); - appendFileSync(bashrcPath, pathEntry); - } - if (existsSync(path.join(home, ".zshrc"))) { - const zshrcPath = path.join(home, ".zshrc"); - appendFileSync(zshrcPath, pathEntry); - } - if (existsSync(path.join(home, ".config", "fish", "config.fish"))) { - const fishConfigPath = path.join(home, ".config", "fish", "config.fish"); - appendFileSync(fishConfigPath, `set -gx PATH ${bbBinPath} $PATH\n`); - } - // Update the current session's PATH - process.env.PATH = `${bbBinPath}:${process.env.PATH}`; -} -export function exec(cmd, options = {}) { - return execSync(cmd, { - encoding: "utf-8", - stdio: "pipe", - ...options, - }); -} -export async function installBB(version, spinner) { - let architecture = os.arch(); - if (architecture === "arm64") { - architecture = "aarch64"; - } - else if (architecture === "x64") { - architecture = "x86_64"; - } - let platform = os.platform(); - if (platform === "darwin") { - platform = "apple-darwin"; - } - else if (platform === "linux") { - platform = "linux-gnu"; - } - const home = os.homedir(); - const bbPath = path.join(home, ".bb"); - spinner.start(`Installing to ${bbPath}`); - const tempTarPath = path.join(fs.mkdtempSync("bb-"), "temp.tar.gz"); - if (!["x86_64", "aarch64"].includes(architecture) || - !["linux-gnu", "apple-darwin"].includes(platform)) { - throw new Error(`Unsupported architecture ${architecture} and platform ${platform}`); - } - const releaseUrl = `https://github.com/AztecProtocol/aztec-packages/releases/download/aztec-packages-v${version}`; - const binaryUrl = `${releaseUrl}/barretenberg-${architecture}-${platform}.tar.gz`; - const response = await axios.get(binaryUrl, { responseType: "stream" }); - const pipelineAsync = promisify(pipeline); - await pipelineAsync(response.data, fs.createWriteStream(tempTarPath)); - await pipelineAsync(fs.createReadStream(tempTarPath), createGunzip(), tar.extract(bbPath)); - fs.rmSync(path.dirname(tempTarPath), { recursive: true }); - spinner.stopAndPersist({ - text: `Installed barretenberg to ${bbPath}`, - symbol: logSymbols.success, - }); - sourceShellConfig(); -} diff --git a/barretenberg/bbup/shell.ts b/barretenberg/bbup/shell.ts deleted file mode 100644 index c2e8a6945f3a..000000000000 --- a/barretenberg/bbup/shell.ts +++ /dev/null @@ -1,94 +0,0 @@ -import { execSync } from "child_process"; -import logSymbols from "log-symbols"; -import { Ora } from "ora"; -import os from "os"; -import axios from "axios"; -import fs from "fs"; -import { createGunzip } from "zlib"; -import tar from "tar-fs"; -import { promisify } from "util"; - -import { pipeline } from "stream"; -import path from "path"; - -import { appendFileSync, existsSync } from "fs"; - -export function sourceShellConfig() { - const home = os.homedir(); - const bbBinPath = path.join(home, ".bb"); - const pathEntry = `export PATH="${bbBinPath}:$PATH"\n`; - - if (existsSync(path.join(home, ".bashrc"))) { - const bashrcPath = path.join(home, ".bashrc"); - appendFileSync(bashrcPath, pathEntry); - } - if (existsSync(path.join(home, ".zshrc"))) { - const zshrcPath = path.join(home, ".zshrc"); - appendFileSync(zshrcPath, pathEntry); - } - if (existsSync(path.join(home, ".config", "fish", "config.fish"))) { - const fishConfigPath = path.join(home, ".config", "fish", "config.fish"); - appendFileSync(fishConfigPath, `set -gx PATH ${bbBinPath} $PATH\n`); - } - - // Update the current session's PATH - process.env.PATH = `${bbBinPath}:${process.env.PATH}`; -} - -export function exec(cmd: string, options = {}) { - return execSync(cmd, { - encoding: "utf-8", - stdio: "pipe", - ...options, - }); -} -export async function installBB(version: string, spinner: Ora) { - let architecture = os.arch(); - if (architecture === "arm64") { - architecture = "aarch64"; - } else if (architecture === "x64") { - architecture = "x86_64"; - } - - let platform: string = os.platform(); - if (platform === "darwin") { - platform = "apple-darwin"; - } else if (platform === "linux") { - platform = "linux-gnu"; - } - - const home = os.homedir(); - const bbPath = path.join(home, ".bb"); - - spinner.start(`Installing to ${bbPath}`); - const tempTarPath = path.join(fs.mkdtempSync("bb-"), "temp.tar.gz"); - - if ( - !["x86_64", "aarch64"].includes(architecture) || - !["linux-gnu", "apple-darwin"].includes(platform) - ) { - throw new Error( - `Unsupported architecture ${architecture} and platform ${platform}` - ); - } - - const releaseUrl = `https://github.com/AztecProtocol/aztec-packages/releases/download/aztec-packages-v${version}`; - const binaryUrl = `${releaseUrl}/barretenberg-${architecture}-${platform}.tar.gz`; - - const response = await axios.get(binaryUrl, { responseType: "stream" }); - - const pipelineAsync = promisify(pipeline); - await pipelineAsync(response.data, fs.createWriteStream(tempTarPath)); - await pipelineAsync( - fs.createReadStream(tempTarPath), - createGunzip(), - tar.extract(bbPath) - ); - - fs.rmSync(path.dirname(tempTarPath), { recursive: true }); - spinner.stopAndPersist({ - text: `Installed barretenberg to ${bbPath}`, - symbol: logSymbols.success, - }); - sourceShellConfig(); -} diff --git a/barretenberg/bbup/versions.js b/barretenberg/bbup/versions.js deleted file mode 100644 index 5f6dee81f7f0..000000000000 --- a/barretenberg/bbup/versions.js +++ /dev/null @@ -1,44 +0,0 @@ -import axios from "axios"; -import logSymbols from "log-symbols"; -async function getNamedVersions(githubToken) { - const fetchOpts = { - // eslint-disable-next-line camelcase - params: { per_page: 100 }, - headers: {}, - }; - if (githubToken) - fetchOpts.headers = { Authorization: `token ${githubToken}` }; - const { data } = await axios.get(`https://api.github.com/repos/noir-lang/noir/releases`, fetchOpts); - const stable = data.filter((release) => !release.tag_name.includes("aztec") && - !release.tag_name.includes("nightly") && - !release.prerelease)[0].tag_name; - const nightly = data.filter((release) => release.tag_name.startsWith("nightly"))[0].tag_name; - return { - stable, - nightly, - }; -} -export async function getBbVersionForNoir(noirVersion, spinner, githubToken) { - let url = ""; - if (noirVersion === "stable" || noirVersion === "nightly") { - spinner.start(`Resolving noir version ${noirVersion}...`); - const resolvedVersions = await getNamedVersions(githubToken); - spinner.stopAndPersist({ - text: `Resolved noir version ${noirVersion} to ${resolvedVersions[noirVersion]}`, - symbol: logSymbols.success, - }); - url = `https://raw.githubusercontent.com/noir-lang/noir/${resolvedVersions[noirVersion]}/scripts/install_bb.sh`; - } - else { - url = `https://raw.githubusercontent.com/noir-lang/noir/v${noirVersion}/scripts/install_bb.sh`; - } - try { - const { data } = await axios.get(url); - const versionMatch = data.match(/VERSION="([\d.]+)"/); - const version = versionMatch ? versionMatch[1] : null; - return version; - } - catch (e) { - throw new Error(e.message || e); - } -} diff --git a/barretenberg/bbup/versions.ts b/barretenberg/bbup/versions.ts deleted file mode 100644 index 36b979b54f9f..000000000000 --- a/barretenberg/bbup/versions.ts +++ /dev/null @@ -1,64 +0,0 @@ -import axios from "axios"; -import logSymbols from "log-symbols"; -import { Ora } from "ora"; - -async function getNamedVersions(githubToken?: string) { - const fetchOpts = { - // eslint-disable-next-line camelcase - params: { per_page: 100 }, - headers: {}, - }; - - if (githubToken) - fetchOpts.headers = { Authorization: `token ${githubToken}` }; - - const { data } = await axios.get( - `https://api.github.com/repos/noir-lang/noir/releases`, - fetchOpts - ); - - const stable = data.filter( - (release: any) => - !release.tag_name.includes("aztec") && - !release.tag_name.includes("nightly") && - !release.prerelease - )[0].tag_name; - const nightly = data.filter((release: any) => - release.tag_name.startsWith("nightly") - )[0].tag_name; - - return { - stable, - nightly, - }; -} - -export async function getBbVersionForNoir( - noirVersion: string, - spinner: Ora, - githubToken?: string -) { - let url = ""; - - if (noirVersion === "stable" || noirVersion === "nightly") { - spinner.start(`Resolving noir version ${noirVersion}...`); - const resolvedVersions = await getNamedVersions(githubToken); - spinner.stopAndPersist({ - text: `Resolved noir version ${noirVersion} to ${resolvedVersions[noirVersion]}`, - symbol: logSymbols.success, - }); - url = `https://raw.githubusercontent.com/noir-lang/noir/${resolvedVersions[noirVersion]}/scripts/install_bb.sh`; - } else { - url = `https://raw.githubusercontent.com/noir-lang/noir/v${noirVersion}/scripts/install_bb.sh`; - } - - try { - const { data } = await axios.get(url); - const versionMatch = data.match(/VERSION="([\d.]+)"/); - const version = versionMatch ? versionMatch[1] : null; - - return version; - } catch (e: any) { - throw new Error(e.message || e); - } -} diff --git a/barretenberg/bbup/yarn.lock b/barretenberg/bbup/yarn.lock deleted file mode 100644 index caccde246a97..000000000000 --- a/barretenberg/bbup/yarn.lock +++ /dev/null @@ -1,999 +0,0 @@ -# This file is generated by running "yarn install" inside your project. -# Manual changes might be lost - proceed with caution! - -__metadata: - version: 8 - cacheKey: 10c0 - -"@inquirer/core@npm:^6.0.0": - version: 6.0.0 - resolution: "@inquirer/core@npm:6.0.0" - dependencies: - "@inquirer/type": "npm:^1.1.6" - "@types/mute-stream": "npm:^0.0.4" - "@types/node": "npm:^20.10.7" - "@types/wrap-ansi": "npm:^3.0.0" - ansi-escapes: "npm:^4.3.2" - chalk: "npm:^4.1.2" - cli-spinners: "npm:^2.9.2" - cli-width: "npm:^4.1.0" - figures: "npm:^3.2.0" - mute-stream: "npm:^1.0.0" - run-async: "npm:^3.0.0" - signal-exit: "npm:^4.1.0" - strip-ansi: "npm:^6.0.1" - wrap-ansi: "npm:^6.2.0" - checksum: 10c0/0663330936c9baea58d8a10e93de6c3446ab84ed909c41d7b3f6762842473b8f88e10d776326d89a278abfb3c4083240d0f5876293908eb1005d0026aa2cfb7d - languageName: node - linkType: hard - -"@inquirer/input@npm:^1.2.16": - version: 1.2.16 - resolution: "@inquirer/input@npm:1.2.16" - dependencies: - "@inquirer/core": "npm:^6.0.0" - "@inquirer/type": "npm:^1.1.6" - chalk: "npm:^4.1.2" - checksum: 10c0/89f612119ba208b34d693e013432898e5de4ddb61dde4b1cd326fb421a0bd16353872da915ec58f34ca5503b77081faf402bbea15033f84b7be8ac5e0672e4a8 - languageName: node - linkType: hard - -"@inquirer/select@npm:^1.3.3": - version: 1.3.3 - resolution: "@inquirer/select@npm:1.3.3" - dependencies: - "@inquirer/core": "npm:^6.0.0" - "@inquirer/type": "npm:^1.1.6" - ansi-escapes: "npm:^4.3.2" - chalk: "npm:^4.1.2" - figures: "npm:^3.2.0" - checksum: 10c0/695de7dc85bf1b4ae4d13bbacb39e73cf4ff12f04da5cff4f0cc046db6bb32ff6051d30753a94299370908051133535e0db7e011e3b61e9806908eb1a7ef6b39 - languageName: node - linkType: hard - -"@inquirer/type@npm:^1.1.6": - version: 1.5.5 - resolution: "@inquirer/type@npm:1.5.5" - dependencies: - mute-stream: "npm:^1.0.0" - checksum: 10c0/4c41736c09ba9426b5a9e44993bdd54e8f532e791518802e33866f233a2a6126a25c1c82c19d1abbf1df627e57b1b957dd3f8318ea96073d8bfc32193943bcb3 - languageName: node - linkType: hard - -"@types/mute-stream@npm:^0.0.4": - version: 0.0.4 - resolution: "@types/mute-stream@npm:0.0.4" - dependencies: - "@types/node": "npm:*" - checksum: 10c0/944730fd7b398c5078de3c3d4d0afeec8584283bc694da1803fdfca14149ea385e18b1b774326f1601baf53898ce6d121a952c51eb62d188ef6fcc41f725c0dc - languageName: node - linkType: hard - -"@types/node@npm:*": - version: 22.7.4 - resolution: "@types/node@npm:22.7.4" - dependencies: - undici-types: "npm:~6.19.2" - checksum: 10c0/c22bf54515c78ff3170142c1e718b90e2a0003419dc2d55f79c9c9362edd590a6ab1450deb09ff6e1b32d1b4698da407930b16285e8be3a009ea6cd2695cac01 - languageName: node - linkType: hard - -"@types/node@npm:^20.10.7": - version: 20.16.10 - resolution: "@types/node@npm:20.16.10" - dependencies: - undici-types: "npm:~6.19.2" - checksum: 10c0/c0c0c7ecb083ec638c2118e54b5242bb4c39a75608cbac9475cf15aaceb64b8bc997a87a0798e700a81d61651c8a7750ae0455be0f0996ada6e8b2bb818d90c5 - languageName: node - linkType: hard - -"@types/wrap-ansi@npm:^3.0.0": - version: 3.0.0 - resolution: "@types/wrap-ansi@npm:3.0.0" - checksum: 10c0/8d8f53363f360f38135301a06b596c295433ad01debd082078c33c6ed98b05a5c8fe8853a88265432126096084f4a135ec1564e3daad631b83296905509f90b3 - languageName: node - linkType: hard - -"agent-base@npm:6": - version: 6.0.2 - resolution: "agent-base@npm:6.0.2" - dependencies: - debug: "npm:4" - checksum: 10c0/dc4f757e40b5f3e3d674bc9beb4f1048f4ee83af189bae39be99f57bf1f48dde166a8b0a5342a84b5944ee8e6ed1e5a9d801858f4ad44764e84957122fe46261 - languageName: node - linkType: hard - -"ansi-colors@npm:^4.1.1": - version: 4.1.3 - resolution: "ansi-colors@npm:4.1.3" - checksum: 10c0/ec87a2f59902f74e61eada7f6e6fe20094a628dab765cfdbd03c3477599368768cffccdb5d3bb19a1b6c99126783a143b1fee31aab729b31ffe5836c7e5e28b9 - languageName: node - linkType: hard - -"ansi-escapes@npm:^4.3.2": - version: 4.3.2 - resolution: "ansi-escapes@npm:4.3.2" - dependencies: - type-fest: "npm:^0.21.3" - checksum: 10c0/da917be01871525a3dfcf925ae2977bc59e8c513d4423368645634bf5d4ceba5401574eb705c1e92b79f7292af5a656f78c5725a4b0e1cec97c4b413705c1d50 - languageName: node - linkType: hard - -"ansi-regex@npm:^5.0.1": - version: 5.0.1 - resolution: "ansi-regex@npm:5.0.1" - checksum: 10c0/9a64bb8627b434ba9327b60c027742e5d17ac69277960d041898596271d992d4d52ba7267a63ca10232e29f6107fc8a835f6ce8d719b88c5f8493f8254813737 - languageName: node - linkType: hard - -"ansi-regex@npm:^6.0.1": - version: 6.1.0 - resolution: "ansi-regex@npm:6.1.0" - checksum: 10c0/a91daeddd54746338478eef88af3439a7edf30f8e23196e2d6ed182da9add559c601266dbef01c2efa46a958ad6f1f8b176799657616c702b5b02e799e7fd8dc - languageName: node - linkType: hard - -"ansi-styles@npm:^4.0.0, ansi-styles@npm:^4.1.0": - version: 4.3.0 - resolution: "ansi-styles@npm:4.3.0" - dependencies: - color-convert: "npm:^2.0.1" - checksum: 10c0/895a23929da416f2bd3de7e9cb4eabd340949328ab85ddd6e484a637d8f6820d485f53933446f5291c3b760cbc488beb8e88573dd0f9c7daf83dccc8fe81b041 - languageName: node - linkType: hard - -"asynckit@npm:^0.4.0": - version: 0.4.0 - resolution: "asynckit@npm:0.4.0" - checksum: 10c0/d73e2ddf20c4eb9337e1b3df1a0f6159481050a5de457c55b14ea2e5cb6d90bb69e004c9af54737a5ee0917fcf2c9e25de67777bbe58261847846066ba75bc9d - languageName: node - linkType: hard - -"axios@npm:^1.7.7": - version: 1.7.7 - resolution: "axios@npm:1.7.7" - dependencies: - follow-redirects: "npm:^1.15.6" - form-data: "npm:^4.0.0" - proxy-from-env: "npm:^1.1.0" - checksum: 10c0/4499efc89e86b0b49ffddc018798de05fab26e3bf57913818266be73279a6418c3ce8f9e934c7d2d707ab8c095e837fc6c90608fb7715b94d357720b5f568af7 - languageName: node - linkType: hard - -"b4a@npm:^1.6.4, b4a@npm:^1.6.6": - version: 1.6.7 - resolution: "b4a@npm:1.6.7" - checksum: 10c0/ec2f004d1daae04be8c5a1f8aeb7fea213c34025e279db4958eb0b82c1729ee25f7c6e89f92a5f65c8a9cf2d017ce27e3dda912403341d1781bd74528a4849d4 - languageName: node - linkType: hard - -"balanced-match@npm:^1.0.0": - version: 1.0.2 - resolution: "balanced-match@npm:1.0.2" - checksum: 10c0/9308baf0a7e4838a82bbfd11e01b1cb0f0cf2893bc1676c27c2a8c0e70cbae1c59120c3268517a8ae7fb6376b4639ef81ca22582611dbee4ed28df945134aaee - languageName: node - linkType: hard - -"bare-events@npm:^2.0.0, bare-events@npm:^2.2.0": - version: 2.5.0 - resolution: "bare-events@npm:2.5.0" - checksum: 10c0/afbeec4e8be4d93fb4a3be65c3b4a891a2205aae30b5a38fafd42976cc76cf30dad348963fe330a0d70186e15dc507c11af42c89af5dddab2a54e5aff02e2896 - languageName: node - linkType: hard - -"bare-fs@npm:^2.1.1": - version: 2.3.5 - resolution: "bare-fs@npm:2.3.5" - dependencies: - bare-events: "npm:^2.0.0" - bare-path: "npm:^2.0.0" - bare-stream: "npm:^2.0.0" - checksum: 10c0/ff18cc9be7c557c38e0342681ba3672ae4b01e5696b567d4035e5995255dc6bc7d4df88ed210fa4d3eb940eb29512e924ebb42814c87fc59a2bee8cf83b7c2f9 - languageName: node - linkType: hard - -"bare-os@npm:^2.1.0": - version: 2.4.4 - resolution: "bare-os@npm:2.4.4" - checksum: 10c0/e7d1a7b2100c05da8d25b60d0d48cf850c6f57064577a3f2f51cf18d417fbcfd6967ed2d8314320914ed69e0f2ebcf54eb1b36092dd172d8e8f969cf8cccf041 - languageName: node - linkType: hard - -"bare-path@npm:^2.0.0, bare-path@npm:^2.1.0": - version: 2.1.3 - resolution: "bare-path@npm:2.1.3" - dependencies: - bare-os: "npm:^2.1.0" - checksum: 10c0/35587e177fc8fa5b13fb90bac8779b5ce49c99016d221ddaefe2232d02bd4295d79b941e14ae19fda75ec42a6fe5fb66c07d83ae7ec11462178e66b7be65ca74 - languageName: node - linkType: hard - -"bare-stream@npm:^2.0.0": - version: 2.3.0 - resolution: "bare-stream@npm:2.3.0" - dependencies: - b4a: "npm:^1.6.6" - streamx: "npm:^2.20.0" - checksum: 10c0/374a517542e6a0c3c07f3a1d567db612685e66708f79781112aa0e81c1f117ec561cc1ff3926144f15a2200316a77030c95dcc13a1b96d5303f0748798b764cf - languageName: node - linkType: hard - -"bbup@workspace:.": - version: 0.0.0-use.local - resolution: "bbup@workspace:." - dependencies: - "@inquirer/input": "npm:^1.2.16" - "@inquirer/select": "npm:^1.3.3" - axios: "npm:^1.7.7" - commander: "npm:^11.1.0" - log-symbols: "npm:^7.0.0" - ora: "npm:^8.1.0" - tar-fs: "npm:^3.0.6" - tiged: "npm:^2.12.6" - bin: - bbup: ./bbup.js - languageName: unknown - linkType: soft - -"brace-expansion@npm:^1.1.7": - version: 1.1.11 - resolution: "brace-expansion@npm:1.1.11" - dependencies: - balanced-match: "npm:^1.0.0" - concat-map: "npm:0.0.1" - checksum: 10c0/695a56cd058096a7cb71fb09d9d6a7070113c7be516699ed361317aca2ec169f618e28b8af352e02ab4233fb54eb0168460a40dc320bab0034b36ab59aaad668 - languageName: node - linkType: hard - -"chalk@npm:^4.1.2": - version: 4.1.2 - resolution: "chalk@npm:4.1.2" - dependencies: - ansi-styles: "npm:^4.1.0" - supports-color: "npm:^7.1.0" - checksum: 10c0/4a3fef5cc34975c898ffe77141450f679721df9dde00f6c304353fa9c8b571929123b26a0e4617bde5018977eb655b31970c297b91b63ee83bb82aeb04666880 - languageName: node - linkType: hard - -"chalk@npm:^5.3.0": - version: 5.3.0 - resolution: "chalk@npm:5.3.0" - checksum: 10c0/8297d436b2c0f95801103ff2ef67268d362021b8210daf8ddbe349695333eb3610a71122172ff3b0272f1ef2cf7cc2c41fdaa4715f52e49ffe04c56340feed09 - languageName: node - linkType: hard - -"chownr@npm:^2.0.0": - version: 2.0.0 - resolution: "chownr@npm:2.0.0" - checksum: 10c0/594754e1303672171cc04e50f6c398ae16128eb134a88f801bf5354fd96f205320f23536a045d9abd8b51024a149696e51231565891d4efdab8846021ecf88e6 - languageName: node - linkType: hard - -"cli-cursor@npm:^5.0.0": - version: 5.0.0 - resolution: "cli-cursor@npm:5.0.0" - dependencies: - restore-cursor: "npm:^5.0.0" - checksum: 10c0/7ec62f69b79f6734ab209a3e4dbdc8af7422d44d360a7cb1efa8a0887bbe466a6e625650c466fe4359aee44dbe2dc0b6994b583d40a05d0808a5cb193641d220 - languageName: node - linkType: hard - -"cli-spinners@npm:^2.9.2": - version: 2.9.2 - resolution: "cli-spinners@npm:2.9.2" - checksum: 10c0/907a1c227ddf0d7a101e7ab8b300affc742ead4b4ebe920a5bf1bc6d45dce2958fcd195eb28fa25275062fe6fa9b109b93b63bc8033396ed3bcb50297008b3a3 - languageName: node - linkType: hard - -"cli-width@npm:^4.1.0": - version: 4.1.0 - resolution: "cli-width@npm:4.1.0" - checksum: 10c0/1fbd56413578f6117abcaf858903ba1f4ad78370a4032f916745fa2c7e390183a9d9029cf837df320b0fdce8137668e522f60a30a5f3d6529ff3872d265a955f - languageName: node - linkType: hard - -"color-convert@npm:^2.0.1": - version: 2.0.1 - resolution: "color-convert@npm:2.0.1" - dependencies: - color-name: "npm:~1.1.4" - checksum: 10c0/37e1150172f2e311fe1b2df62c6293a342ee7380da7b9cfdba67ea539909afbd74da27033208d01d6d5cfc65ee7868a22e18d7e7648e004425441c0f8a15a7d7 - languageName: node - linkType: hard - -"color-name@npm:~1.1.4": - version: 1.1.4 - resolution: "color-name@npm:1.1.4" - checksum: 10c0/a1a3f914156960902f46f7f56bc62effc6c94e84b2cae157a526b1c1f74b677a47ec602bf68a61abfa2b42d15b7c5651c6dbe72a43af720bc588dff885b10f95 - languageName: node - linkType: hard - -"colorette@npm:1.2.1": - version: 1.2.1 - resolution: "colorette@npm:1.2.1" - checksum: 10c0/993422e8ef02c3e267ac49ea7f3457839ec261a27a9bf00c4eb1fab5eec40bc1e992972e6e4c392a488838c905c80410736dfc94109be6ae53f19434461022a6 - languageName: node - linkType: hard - -"combined-stream@npm:^1.0.8": - version: 1.0.8 - resolution: "combined-stream@npm:1.0.8" - dependencies: - delayed-stream: "npm:~1.0.0" - checksum: 10c0/0dbb829577e1b1e839fa82b40c07ffaf7de8a09b935cadd355a73652ae70a88b4320db322f6634a4ad93424292fa80973ac6480986247f1734a1137debf271d5 - languageName: node - linkType: hard - -"commander@npm:^11.1.0": - version: 11.1.0 - resolution: "commander@npm:11.1.0" - checksum: 10c0/13cc6ac875e48780250f723fb81c1c1178d35c5decb1abb1b628b3177af08a8554e76b2c0f29de72d69eef7c864d12613272a71fabef8047922bc622ab75a179 - languageName: node - linkType: hard - -"concat-map@npm:0.0.1": - version: 0.0.1 - resolution: "concat-map@npm:0.0.1" - checksum: 10c0/c996b1cfdf95b6c90fee4dae37e332c8b6eb7d106430c17d538034c0ad9a1630cb194d2ab37293b1bdd4d779494beee7786d586a50bd9376fd6f7bcc2bd4c98f - languageName: node - linkType: hard - -"debug@npm:4": - version: 4.3.7 - resolution: "debug@npm:4.3.7" - dependencies: - ms: "npm:^2.1.3" - peerDependenciesMeta: - supports-color: - optional: true - checksum: 10c0/1471db19c3b06d485a622d62f65947a19a23fbd0dd73f7fd3eafb697eec5360cde447fb075919987899b1a2096e85d35d4eb5a4de09a57600ac9cf7e6c8e768b - languageName: node - linkType: hard - -"delayed-stream@npm:~1.0.0": - version: 1.0.0 - resolution: "delayed-stream@npm:1.0.0" - checksum: 10c0/d758899da03392e6712f042bec80aa293bbe9e9ff1b2634baae6a360113e708b91326594c8a486d475c69d6259afb7efacdc3537bfcda1c6c648e390ce601b19 - languageName: node - linkType: hard - -"emoji-regex@npm:^10.3.0": - version: 10.4.0 - resolution: "emoji-regex@npm:10.4.0" - checksum: 10c0/a3fcedfc58bfcce21a05a5f36a529d81e88d602100145fcca3dc6f795e3c8acc4fc18fe773fbf9b6d6e9371205edb3afa2668ec3473fa2aa7fd47d2a9d46482d - languageName: node - linkType: hard - -"emoji-regex@npm:^8.0.0": - version: 8.0.0 - resolution: "emoji-regex@npm:8.0.0" - checksum: 10c0/b6053ad39951c4cf338f9092d7bfba448cdfd46fe6a2a034700b149ac9ffbc137e361cbd3c442297f86bed2e5f7576c1b54cc0a6bf8ef5106cc62f496af35010 - languageName: node - linkType: hard - -"end-of-stream@npm:^1.1.0": - version: 1.4.4 - resolution: "end-of-stream@npm:1.4.4" - dependencies: - once: "npm:^1.4.0" - checksum: 10c0/870b423afb2d54bb8d243c63e07c170409d41e20b47eeef0727547aea5740bd6717aca45597a9f2745525667a6b804c1e7bede41f856818faee5806dd9ff3975 - languageName: node - linkType: hard - -"enquirer@npm:2.3.6": - version: 2.3.6 - resolution: "enquirer@npm:2.3.6" - dependencies: - ansi-colors: "npm:^4.1.1" - checksum: 10c0/8e070e052c2c64326a2803db9084d21c8aaa8c688327f133bf65c4a712586beb126fd98c8a01cfb0433e82a4bd3b6262705c55a63e0f7fb91d06b9cedbde9a11 - languageName: node - linkType: hard - -"escape-string-regexp@npm:^1.0.5": - version: 1.0.5 - resolution: "escape-string-regexp@npm:1.0.5" - checksum: 10c0/a968ad453dd0c2724e14a4f20e177aaf32bb384ab41b674a8454afe9a41c5e6fe8903323e0a1052f56289d04bd600f81278edf140b0fcc02f5cac98d0f5b5371 - languageName: node - linkType: hard - -"fast-fifo@npm:^1.2.0, fast-fifo@npm:^1.3.2": - version: 1.3.2 - resolution: "fast-fifo@npm:1.3.2" - checksum: 10c0/d53f6f786875e8b0529f784b59b4b05d4b5c31c651710496440006a398389a579c8dbcd2081311478b5bf77f4b0b21de69109c5a4eabea9d8e8783d1eb864e4c - languageName: node - linkType: hard - -"figures@npm:^3.2.0": - version: 3.2.0 - resolution: "figures@npm:3.2.0" - dependencies: - escape-string-regexp: "npm:^1.0.5" - checksum: 10c0/9c421646ede432829a50bc4e55c7a4eb4bcb7cc07b5bab2f471ef1ab9a344595bbebb6c5c21470093fbb730cd81bbca119624c40473a125293f656f49cb47629 - languageName: node - linkType: hard - -"follow-redirects@npm:^1.15.6": - version: 1.15.9 - resolution: "follow-redirects@npm:1.15.9" - peerDependenciesMeta: - debug: - optional: true - checksum: 10c0/5829165bd112c3c0e82be6c15b1a58fa9dcfaede3b3c54697a82fe4a62dd5ae5e8222956b448d2f98e331525f05d00404aba7d696de9e761ef6e42fdc780244f - languageName: node - linkType: hard - -"form-data@npm:^4.0.0": - version: 4.0.0 - resolution: "form-data@npm:4.0.0" - dependencies: - asynckit: "npm:^0.4.0" - combined-stream: "npm:^1.0.8" - mime-types: "npm:^2.1.12" - checksum: 10c0/cb6f3ac49180be03ff07ba3ff125f9eba2ff0b277fb33c7fc47569fc5e616882c5b1c69b9904c4c4187e97dd0419dd03b134174756f296dec62041e6527e2c6e - languageName: node - linkType: hard - -"fs-extra@npm:10.1.0": - version: 10.1.0 - resolution: "fs-extra@npm:10.1.0" - dependencies: - graceful-fs: "npm:^4.2.0" - jsonfile: "npm:^6.0.1" - universalify: "npm:^2.0.0" - checksum: 10c0/5f579466e7109719d162a9249abbeffe7f426eb133ea486e020b89bc6d67a741134076bf439983f2eb79276ceaf6bd7b7c1e43c3fd67fe889863e69072fb0a5e - languageName: node - linkType: hard - -"fs-minipass@npm:^2.0.0": - version: 2.1.0 - resolution: "fs-minipass@npm:2.1.0" - dependencies: - minipass: "npm:^3.0.0" - checksum: 10c0/703d16522b8282d7299337539c3ed6edddd1afe82435e4f5b76e34a79cd74e488a8a0e26a636afc2440e1a23b03878e2122e3a2cfe375a5cf63c37d92b86a004 - languageName: node - linkType: hard - -"fs.realpath@npm:^1.0.0": - version: 1.0.0 - resolution: "fs.realpath@npm:1.0.0" - checksum: 10c0/444cf1291d997165dfd4c0d58b69f0e4782bfd9149fd72faa4fe299e68e0e93d6db941660b37dd29153bf7186672ececa3b50b7e7249477b03fdf850f287c948 - languageName: node - linkType: hard - -"fuzzysearch@npm:1.0.3": - version: 1.0.3 - resolution: "fuzzysearch@npm:1.0.3" - checksum: 10c0/de6ab4a84cb0d570d1b55c9b9c2bb435b2a781452d23e63911e95d333e3dd1badea743a1d1ab0cac6f28d7e262347dfce10632f0aa9e5df0baaae0270f49578f - languageName: node - linkType: hard - -"get-east-asian-width@npm:^1.0.0": - version: 1.2.0 - resolution: "get-east-asian-width@npm:1.2.0" - checksum: 10c0/914b1e217cf38436c24b4c60b4c45289e39a45bf9e65ef9fd343c2815a1a02b8a0215aeec8bf9c07c516089004b6e3826332481f40a09529fcadbf6e579f286b - languageName: node - linkType: hard - -"glob@npm:^7.1.3": - version: 7.2.3 - resolution: "glob@npm:7.2.3" - dependencies: - fs.realpath: "npm:^1.0.0" - inflight: "npm:^1.0.4" - inherits: "npm:2" - minimatch: "npm:^3.1.1" - once: "npm:^1.3.0" - path-is-absolute: "npm:^1.0.0" - checksum: 10c0/65676153e2b0c9095100fe7f25a778bf45608eeb32c6048cf307f579649bcc30353277b3b898a3792602c65764e5baa4f643714dfbdfd64ea271d210c7a425fe - languageName: node - linkType: hard - -"globalyzer@npm:0.1.0": - version: 0.1.0 - resolution: "globalyzer@npm:0.1.0" - checksum: 10c0/e16e47a5835cbe8a021423d4c7fcd9f5f85815b4190a7f50c1fdb95fc559d72e4fb30be96f106c66a99413f36d72da0f8323d19d27f60a8feec9d936139ec5a8 - languageName: node - linkType: hard - -"globrex@npm:^0.1.2": - version: 0.1.2 - resolution: "globrex@npm:0.1.2" - checksum: 10c0/a54c029520cf58bda1d8884f72bd49b4cd74e977883268d931fd83bcbd1a9eb96d57c7dbd4ad80148fb9247467ebfb9b215630b2ed7563b2a8de02e1ff7f89d1 - languageName: node - linkType: hard - -"graceful-fs@npm:^4.1.6, graceful-fs@npm:^4.2.0": - version: 4.2.11 - resolution: "graceful-fs@npm:4.2.11" - checksum: 10c0/386d011a553e02bc594ac2ca0bd6d9e4c22d7fa8cfbfc448a6d148c59ea881b092db9dbe3547ae4b88e55f1b01f7c4a2ecc53b310c042793e63aa44cf6c257f2 - languageName: node - linkType: hard - -"has-flag@npm:^4.0.0": - version: 4.0.0 - resolution: "has-flag@npm:4.0.0" - checksum: 10c0/2e789c61b7888d66993e14e8331449e525ef42aac53c627cc53d1c3334e768bcb6abdc4f5f0de1478a25beec6f0bd62c7549058b7ac53e924040d4f301f02fd1 - languageName: node - linkType: hard - -"https-proxy-agent@npm:5.0.0": - version: 5.0.0 - resolution: "https-proxy-agent@npm:5.0.0" - dependencies: - agent-base: "npm:6" - debug: "npm:4" - checksum: 10c0/670c04f7f0effb5a449c094ea037cbcfb28a5ab93ed22e8c343095202cc7288027869a5a21caf4ee3b8ea06f9624ef1e1fc9044669c0fd92617654ff39f30806 - languageName: node - linkType: hard - -"inflight@npm:^1.0.4": - version: 1.0.6 - resolution: "inflight@npm:1.0.6" - dependencies: - once: "npm:^1.3.0" - wrappy: "npm:1" - checksum: 10c0/7faca22584600a9dc5b9fca2cd5feb7135ac8c935449837b315676b4c90aa4f391ec4f42240178244b5a34e8bede1948627fda392ca3191522fc46b34e985ab2 - languageName: node - linkType: hard - -"inherits@npm:2": - version: 2.0.4 - resolution: "inherits@npm:2.0.4" - checksum: 10c0/4e531f648b29039fb7426fb94075e6545faa1eb9fe83c29f0b6d9e7263aceb4289d2d4557db0d428188eeb449cc7c5e77b0a0b2c4e248ff2a65933a0dee49ef2 - languageName: node - linkType: hard - -"is-fullwidth-code-point@npm:^3.0.0": - version: 3.0.0 - resolution: "is-fullwidth-code-point@npm:3.0.0" - checksum: 10c0/bb11d825e049f38e04c06373a8d72782eee0205bda9d908cc550ccb3c59b99d750ff9537982e01733c1c94a58e35400661f57042158ff5e8f3e90cf936daf0fc - languageName: node - linkType: hard - -"is-interactive@npm:^2.0.0": - version: 2.0.0 - resolution: "is-interactive@npm:2.0.0" - checksum: 10c0/801c8f6064f85199dc6bf99b5dd98db3282e930c3bc197b32f2c5b89313bb578a07d1b8a01365c4348c2927229234f3681eb861b9c2c92bee72ff397390fa600 - languageName: node - linkType: hard - -"is-unicode-supported@npm:^1.3.0": - version: 1.3.0 - resolution: "is-unicode-supported@npm:1.3.0" - checksum: 10c0/b8674ea95d869f6faabddc6a484767207058b91aea0250803cbf1221345cb0c56f466d4ecea375dc77f6633d248d33c47bd296fb8f4cdba0b4edba8917e83d8a - languageName: node - linkType: hard - -"is-unicode-supported@npm:^2.0.0": - version: 2.1.0 - resolution: "is-unicode-supported@npm:2.1.0" - checksum: 10c0/a0f53e9a7c1fdbcf2d2ef6e40d4736fdffff1c9f8944c75e15425118ff3610172c87bf7bc6c34d3903b04be59790bb2212ddbe21ee65b5a97030fc50370545a5 - languageName: node - linkType: hard - -"jsonfile@npm:^6.0.1": - version: 6.1.0 - resolution: "jsonfile@npm:6.1.0" - dependencies: - graceful-fs: "npm:^4.1.6" - universalify: "npm:^2.0.0" - dependenciesMeta: - graceful-fs: - optional: true - checksum: 10c0/4f95b5e8a5622b1e9e8f33c96b7ef3158122f595998114d1e7f03985649ea99cb3cd99ce1ed1831ae94c8c8543ab45ebd044207612f31a56fd08462140e46865 - languageName: node - linkType: hard - -"log-symbols@npm:^6.0.0": - version: 6.0.0 - resolution: "log-symbols@npm:6.0.0" - dependencies: - chalk: "npm:^5.3.0" - is-unicode-supported: "npm:^1.3.0" - checksum: 10c0/36636cacedba8f067d2deb4aad44e91a89d9efb3ead27e1846e7b82c9a10ea2e3a7bd6ce28a7ca616bebc60954ff25c67b0f92d20a6a746bb3cc52c3701891f6 - languageName: node - linkType: hard - -"log-symbols@npm:^7.0.0": - version: 7.0.0 - resolution: "log-symbols@npm:7.0.0" - dependencies: - is-unicode-supported: "npm:^2.0.0" - yoctocolors: "npm:^2.1.1" - checksum: 10c0/209eeb0009da6c3f9ebb736d3d65ff1ad3cb757b0c3ba66a5089d7463f77155ade88084c4db31b53341c89aeae3dc89dbc56888d2ae6ffd082bf96c4d2ac429d - languageName: node - linkType: hard - -"mime-db@npm:1.52.0": - version: 1.52.0 - resolution: "mime-db@npm:1.52.0" - checksum: 10c0/0557a01deebf45ac5f5777fe7740b2a5c309c6d62d40ceab4e23da9f821899ce7a900b7ac8157d4548ddbb7beffe9abc621250e6d182b0397ec7f10c7b91a5aa - languageName: node - linkType: hard - -"mime-types@npm:^2.1.12": - version: 2.1.35 - resolution: "mime-types@npm:2.1.35" - dependencies: - mime-db: "npm:1.52.0" - checksum: 10c0/82fb07ec56d8ff1fc999a84f2f217aa46cb6ed1033fefaabd5785b9a974ed225c90dc72fff460259e66b95b73648596dbcc50d51ed69cdf464af2d237d3149b2 - languageName: node - linkType: hard - -"mimic-function@npm:^5.0.0": - version: 5.0.1 - resolution: "mimic-function@npm:5.0.1" - checksum: 10c0/f3d9464dd1816ecf6bdf2aec6ba32c0728022039d992f178237d8e289b48764fee4131319e72eedd4f7f094e22ded0af836c3187a7edc4595d28dd74368fd81d - languageName: node - linkType: hard - -"minimatch@npm:^3.1.1": - version: 3.1.2 - resolution: "minimatch@npm:3.1.2" - dependencies: - brace-expansion: "npm:^1.1.7" - checksum: 10c0/0262810a8fc2e72cca45d6fd86bd349eee435eb95ac6aa45c9ea2180e7ee875ef44c32b55b5973ceabe95ea12682f6e3725cbb63d7a2d1da3ae1163c8b210311 - languageName: node - linkType: hard - -"minipass@npm:^3.0.0": - version: 3.3.6 - resolution: "minipass@npm:3.3.6" - dependencies: - yallist: "npm:^4.0.0" - checksum: 10c0/a114746943afa1dbbca8249e706d1d38b85ed1298b530f5808ce51f8e9e941962e2a5ad2e00eae7dd21d8a4aae6586a66d4216d1a259385e9d0358f0c1eba16c - languageName: node - linkType: hard - -"minipass@npm:^5.0.0": - version: 5.0.0 - resolution: "minipass@npm:5.0.0" - checksum: 10c0/a91d8043f691796a8ac88df039da19933ef0f633e3d7f0d35dcd5373af49131cf2399bfc355f41515dc495e3990369c3858cd319e5c2722b4753c90bf3152462 - languageName: node - linkType: hard - -"minizlib@npm:^2.1.1": - version: 2.1.2 - resolution: "minizlib@npm:2.1.2" - dependencies: - minipass: "npm:^3.0.0" - yallist: "npm:^4.0.0" - checksum: 10c0/64fae024e1a7d0346a1102bb670085b17b7f95bf6cfdf5b128772ec8faf9ea211464ea4add406a3a6384a7d87a0cd1a96263692134323477b4fb43659a6cab78 - languageName: node - linkType: hard - -"mkdirp@npm:^1.0.3": - version: 1.0.4 - resolution: "mkdirp@npm:1.0.4" - bin: - mkdirp: bin/cmd.js - checksum: 10c0/46ea0f3ffa8bc6a5bc0c7081ffc3907777f0ed6516888d40a518c5111f8366d97d2678911ad1a6882bf592fa9de6c784fea32e1687bb94e1f4944170af48a5cf - languageName: node - linkType: hard - -"mri@npm:1.1.6": - version: 1.1.6 - resolution: "mri@npm:1.1.6" - checksum: 10c0/dd29640dd5d4d3abc959156806adc2e7c6233b010609727499616f2047e9481dcbd3ba9b0bc7135428f1c42cca6b0475cee6f898b41ff8ccec730e4fa80de40d - languageName: node - linkType: hard - -"ms@npm:^2.1.3": - version: 2.1.3 - resolution: "ms@npm:2.1.3" - checksum: 10c0/d924b57e7312b3b63ad21fc5b3dc0af5e78d61a1fc7cfb5457edaf26326bf62be5307cc87ffb6862ef1c2b33b0233cdb5d4f01c4c958cc0d660948b65a287a48 - languageName: node - linkType: hard - -"mute-stream@npm:^1.0.0": - version: 1.0.0 - resolution: "mute-stream@npm:1.0.0" - checksum: 10c0/dce2a9ccda171ec979a3b4f869a102b1343dee35e920146776780de182f16eae459644d187e38d59a3d37adf85685e1c17c38cf7bfda7e39a9880f7a1d10a74c - languageName: node - linkType: hard - -"once@npm:^1.3.0, once@npm:^1.3.1, once@npm:^1.4.0": - version: 1.4.0 - resolution: "once@npm:1.4.0" - dependencies: - wrappy: "npm:1" - checksum: 10c0/5d48aca287dfefabd756621c5dfce5c91a549a93e9fdb7b8246bc4c4790aa2ec17b34a260530474635147aeb631a2dcc8b32c613df0675f96041cbb8244517d0 - languageName: node - linkType: hard - -"onetime@npm:^7.0.0": - version: 7.0.0 - resolution: "onetime@npm:7.0.0" - dependencies: - mimic-function: "npm:^5.0.0" - checksum: 10c0/5cb9179d74b63f52a196a2e7037ba2b9a893245a5532d3f44360012005c9cadb60851d56716ebff18a6f47129dab7168022445df47c2aff3b276d92585ed1221 - languageName: node - linkType: hard - -"ora@npm:^8.1.0": - version: 8.1.0 - resolution: "ora@npm:8.1.0" - dependencies: - chalk: "npm:^5.3.0" - cli-cursor: "npm:^5.0.0" - cli-spinners: "npm:^2.9.2" - is-interactive: "npm:^2.0.0" - is-unicode-supported: "npm:^2.0.0" - log-symbols: "npm:^6.0.0" - stdin-discarder: "npm:^0.2.2" - string-width: "npm:^7.2.0" - strip-ansi: "npm:^7.1.0" - checksum: 10c0/4ac9a6dd7fe915a354680f33ced21ee96d13d3c5ab0dc00b3c3ba9e3695ed141b1d045222990f5a71a9a91f801042a0b0d32e58dfc5509ff9b81efdd3fcf6339 - languageName: node - linkType: hard - -"path-is-absolute@npm:^1.0.0": - version: 1.0.1 - resolution: "path-is-absolute@npm:1.0.1" - checksum: 10c0/127da03c82172a2a50099cddbf02510c1791fc2cc5f7713ddb613a56838db1e8168b121a920079d052e0936c23005562059756d653b7c544c53185efe53be078 - languageName: node - linkType: hard - -"proxy-from-env@npm:^1.1.0": - version: 1.1.0 - resolution: "proxy-from-env@npm:1.1.0" - checksum: 10c0/fe7dd8b1bdbbbea18d1459107729c3e4a2243ca870d26d34c2c1bcd3e4425b7bcc5112362df2d93cc7fb9746f6142b5e272fd1cc5c86ddf8580175186f6ad42b - languageName: node - linkType: hard - -"pump@npm:^3.0.0": - version: 3.0.2 - resolution: "pump@npm:3.0.2" - dependencies: - end-of-stream: "npm:^1.1.0" - once: "npm:^1.3.1" - checksum: 10c0/5ad655cb2a7738b4bcf6406b24ad0970d680649d996b55ad20d1be8e0c02394034e4c45ff7cd105d87f1e9b96a0e3d06fd28e11fae8875da26e7f7a8e2c9726f - languageName: node - linkType: hard - -"queue-tick@npm:^1.0.1": - version: 1.0.1 - resolution: "queue-tick@npm:1.0.1" - checksum: 10c0/0db998e2c9b15215317dbcf801e9b23e6bcde4044e115155dae34f8e7454b9a783f737c9a725528d677b7a66c775eb7a955cf144fe0b87f62b575ce5bfd515a9 - languageName: node - linkType: hard - -"restore-cursor@npm:^5.0.0": - version: 5.1.0 - resolution: "restore-cursor@npm:5.1.0" - dependencies: - onetime: "npm:^7.0.0" - signal-exit: "npm:^4.1.0" - checksum: 10c0/c2ba89131eea791d1b25205bdfdc86699767e2b88dee2a590b1a6caa51737deac8bad0260a5ded2f7c074b7db2f3a626bcf1fcf3cdf35974cbeea5e2e6764f60 - languageName: node - linkType: hard - -"rimraf@npm:3.0.2": - version: 3.0.2 - resolution: "rimraf@npm:3.0.2" - dependencies: - glob: "npm:^7.1.3" - bin: - rimraf: bin.js - checksum: 10c0/9cb7757acb489bd83757ba1a274ab545eafd75598a9d817e0c3f8b164238dd90eba50d6b848bd4dcc5f3040912e882dc7ba71653e35af660d77b25c381d402e8 - languageName: node - linkType: hard - -"run-async@npm:^3.0.0": - version: 3.0.0 - resolution: "run-async@npm:3.0.0" - checksum: 10c0/b18b562ae37c3020083dcaae29642e4cc360c824fbfb6b7d50d809a9d5227bb986152d09310255842c8dce40526e82ca768f02f00806c91ba92a8dfa6159cb85 - languageName: node - linkType: hard - -"signal-exit@npm:^4.1.0": - version: 4.1.0 - resolution: "signal-exit@npm:4.1.0" - checksum: 10c0/41602dce540e46d599edba9d9860193398d135f7ff72cab629db5171516cfae628d21e7bfccde1bbfdf11c48726bc2a6d1a8fb8701125852fbfda7cf19c6aa83 - languageName: node - linkType: hard - -"stdin-discarder@npm:^0.2.2": - version: 0.2.2 - resolution: "stdin-discarder@npm:0.2.2" - checksum: 10c0/c78375e82e956d7a64be6e63c809c7f058f5303efcaf62ea48350af072bacdb99c06cba39209b45a071c1acbd49116af30df1df9abb448df78a6005b72f10537 - languageName: node - linkType: hard - -"streamx@npm:^2.15.0, streamx@npm:^2.20.0": - version: 2.20.1 - resolution: "streamx@npm:2.20.1" - dependencies: - bare-events: "npm:^2.2.0" - fast-fifo: "npm:^1.3.2" - queue-tick: "npm:^1.0.1" - text-decoder: "npm:^1.1.0" - dependenciesMeta: - bare-events: - optional: true - checksum: 10c0/34ffa2ee9465d70e18c7e2ba70189720c166d150ab83eb7700304620fa23ff42a69cb37d712ea4b5fc6234d8e74346a88bb4baceb873c6b05e52ac420f8abb4d - languageName: node - linkType: hard - -"string-width@npm:^4.1.0": - version: 4.2.3 - resolution: "string-width@npm:4.2.3" - dependencies: - emoji-regex: "npm:^8.0.0" - is-fullwidth-code-point: "npm:^3.0.0" - strip-ansi: "npm:^6.0.1" - checksum: 10c0/1e525e92e5eae0afd7454086eed9c818ee84374bb80328fc41217ae72ff5f065ef1c9d7f72da41de40c75fa8bb3dee63d92373fd492c84260a552c636392a47b - languageName: node - linkType: hard - -"string-width@npm:^7.2.0": - version: 7.2.0 - resolution: "string-width@npm:7.2.0" - dependencies: - emoji-regex: "npm:^10.3.0" - get-east-asian-width: "npm:^1.0.0" - strip-ansi: "npm:^7.1.0" - checksum: 10c0/eb0430dd43f3199c7a46dcbf7a0b34539c76fe3aa62763d0b0655acdcbdf360b3f66f3d58ca25ba0205f42ea3491fa00f09426d3b7d3040e506878fc7664c9b9 - languageName: node - linkType: hard - -"strip-ansi@npm:^6.0.0, strip-ansi@npm:^6.0.1": - version: 6.0.1 - resolution: "strip-ansi@npm:6.0.1" - dependencies: - ansi-regex: "npm:^5.0.1" - checksum: 10c0/1ae5f212a126fe5b167707f716942490e3933085a5ff6c008ab97ab2f272c8025d3aa218b7bd6ab25729ca20cc81cddb252102f8751e13482a5199e873680952 - languageName: node - linkType: hard - -"strip-ansi@npm:^7.1.0": - version: 7.1.0 - resolution: "strip-ansi@npm:7.1.0" - dependencies: - ansi-regex: "npm:^6.0.1" - checksum: 10c0/a198c3762e8832505328cbf9e8c8381de14a4fa50a4f9b2160138158ea88c0f5549fb50cb13c651c3088f47e63a108b34622ec18c0499b6c8c3a5ddf6b305ac4 - languageName: node - linkType: hard - -"supports-color@npm:^7.1.0": - version: 7.2.0 - resolution: "supports-color@npm:7.2.0" - dependencies: - has-flag: "npm:^4.0.0" - checksum: 10c0/afb4c88521b8b136b5f5f95160c98dee7243dc79d5432db7efc27efb219385bbc7d9427398e43dd6cc730a0f87d5085ce1652af7efbe391327bc0a7d0f7fc124 - languageName: node - linkType: hard - -"tar-fs@npm:^3.0.6": - version: 3.0.6 - resolution: "tar-fs@npm:3.0.6" - dependencies: - bare-fs: "npm:^2.1.1" - bare-path: "npm:^2.1.0" - pump: "npm:^3.0.0" - tar-stream: "npm:^3.1.5" - dependenciesMeta: - bare-fs: - optional: true - bare-path: - optional: true - checksum: 10c0/207b7c0f193495668bd9dbad09a0108ce4ffcfec5bce2133f90988cdda5c81fad83c99f963d01e47b565196594f7a17dbd063ae55b97b36268fcc843975278ee - languageName: node - linkType: hard - -"tar-stream@npm:^3.1.5": - version: 3.1.7 - resolution: "tar-stream@npm:3.1.7" - dependencies: - b4a: "npm:^1.6.4" - fast-fifo: "npm:^1.2.0" - streamx: "npm:^2.15.0" - checksum: 10c0/a09199d21f8714bd729993ac49b6c8efcb808b544b89f23378ad6ffff6d1cb540878614ba9d4cfec11a64ef39e1a6f009a5398371491eb1fda606ffc7f70f718 - languageName: node - linkType: hard - -"tar@npm:^6.1.11": - version: 6.2.1 - resolution: "tar@npm:6.2.1" - dependencies: - chownr: "npm:^2.0.0" - fs-minipass: "npm:^2.0.0" - minipass: "npm:^5.0.0" - minizlib: "npm:^2.1.1" - mkdirp: "npm:^1.0.3" - yallist: "npm:^4.0.0" - checksum: 10c0/a5eca3eb50bc11552d453488344e6507156b9193efd7635e98e867fab275d527af53d8866e2370cd09dfe74378a18111622ace35af6a608e5223a7d27fe99537 - languageName: node - linkType: hard - -"text-decoder@npm:^1.1.0": - version: 1.2.0 - resolution: "text-decoder@npm:1.2.0" - dependencies: - b4a: "npm:^1.6.4" - checksum: 10c0/398171bef376e06864cd6ba24e0787cc626bebc84a1bbda758d06a6e9b729cc8613f7923dd0d294abd88e8bb5cd7261aad5fda7911fb87253fe71b2b5ac6e507 - languageName: node - linkType: hard - -"tiged@npm:^2.12.6": - version: 2.12.7 - resolution: "tiged@npm:2.12.7" - dependencies: - colorette: "npm:1.2.1" - enquirer: "npm:2.3.6" - fs-extra: "npm:10.1.0" - fuzzysearch: "npm:1.0.3" - https-proxy-agent: "npm:5.0.0" - mri: "npm:1.1.6" - rimraf: "npm:3.0.2" - tar: "npm:^6.1.11" - tiny-glob: "npm:0.2.8" - bin: - degit: bin.js - tiged: bin.js - checksum: 10c0/925ef35312b956a88d87a707193288deb99ef4dcb9097e0b53f3f6e6bd6cfc20d5aff7d32848b67892d8092923936d53b9473643c9d715c3b757a105a4f0191a - languageName: node - linkType: hard - -"tiny-glob@npm:0.2.8": - version: 0.2.8 - resolution: "tiny-glob@npm:0.2.8" - dependencies: - globalyzer: "npm:0.1.0" - globrex: "npm:^0.1.2" - checksum: 10c0/3055a11a94e35a26630262a80404bc81e987d567824ea51dc6d937a8ea5b0e87088ac0c4acc3edcb34b94f074d103b78523293ccc737abc8cc5024eb227a9790 - languageName: node - linkType: hard - -"type-fest@npm:^0.21.3": - version: 0.21.3 - resolution: "type-fest@npm:0.21.3" - checksum: 10c0/902bd57bfa30d51d4779b641c2bc403cdf1371fb9c91d3c058b0133694fcfdb817aef07a47f40faf79039eecbaa39ee9d3c532deff244f3a19ce68cea71a61e8 - languageName: node - linkType: hard - -"undici-types@npm:~6.19.2": - version: 6.19.8 - resolution: "undici-types@npm:6.19.8" - checksum: 10c0/078afa5990fba110f6824823ace86073b4638f1d5112ee26e790155f481f2a868cc3e0615505b6f4282bdf74a3d8caad715fd809e870c2bb0704e3ea6082f344 - languageName: node - linkType: hard - -"universalify@npm:^2.0.0": - version: 2.0.1 - resolution: "universalify@npm:2.0.1" - checksum: 10c0/73e8ee3809041ca8b818efb141801a1004e3fc0002727f1531f4de613ea281b494a40909596dae4a042a4fb6cd385af5d4db2e137b1362e0e91384b828effd3a - languageName: node - linkType: hard - -"wrap-ansi@npm:^6.2.0": - version: 6.2.0 - resolution: "wrap-ansi@npm:6.2.0" - dependencies: - ansi-styles: "npm:^4.0.0" - string-width: "npm:^4.1.0" - strip-ansi: "npm:^6.0.0" - checksum: 10c0/baad244e6e33335ea24e86e51868fe6823626e3a3c88d9a6674642afff1d34d9a154c917e74af8d845fd25d170c4ea9cf69a47133c3f3656e1252b3d462d9f6c - languageName: node - linkType: hard - -"wrappy@npm:1": - version: 1.0.2 - resolution: "wrappy@npm:1.0.2" - checksum: 10c0/56fece1a4018c6a6c8e28fbc88c87e0fbf4ea8fd64fc6c63b18f4acc4bd13e0ad2515189786dd2c30d3eec9663d70f4ecf699330002f8ccb547e4a18231fc9f0 - languageName: node - linkType: hard - -"yallist@npm:^4.0.0": - version: 4.0.0 - resolution: "yallist@npm:4.0.0" - checksum: 10c0/2286b5e8dbfe22204ab66e2ef5cc9bbb1e55dfc873bbe0d568aa943eb255d131890dfd5bf243637273d31119b870f49c18fcde2c6ffbb7a7a092b870dc90625a - languageName: node - linkType: hard - -"yoctocolors@npm:^2.1.1": - version: 2.1.1 - resolution: "yoctocolors@npm:2.1.1" - checksum: 10c0/85903f7fa96f1c70badee94789fade709f9d83dab2ec92753d612d84fcea6d34c772337a9f8914c6bed2f5fc03a428ac5d893e76fab636da5f1236ab725486d0 - languageName: node - linkType: hard diff --git a/barretenberg/cpp/CMakeLists.txt b/barretenberg/cpp/CMakeLists.txt index 443ec3af6781..9448d66a9518 100644 --- a/barretenberg/cpp/CMakeLists.txt +++ b/barretenberg/cpp/CMakeLists.txt @@ -6,7 +6,7 @@ cmake_minimum_required(VERSION 3.24 FATAL_ERROR) project( Barretenberg DESCRIPTION "BN254 elliptic curve library, and PLONK SNARK prover" - VERSION 0.65.2 # x-release-please-version + VERSION 0.66.0 # x-release-please-version LANGUAGES CXX C ) # Insert version into `bb` config file diff --git a/barretenberg/cpp/pil/avm/constants_gen.pil b/barretenberg/cpp/pil/avm/constants_gen.pil index 0aa1c5193557..942924d0d9e9 100644 --- a/barretenberg/cpp/pil/avm/constants_gen.pil +++ b/barretenberg/cpp/pil/avm/constants_gen.pil @@ -21,7 +21,6 @@ namespace constants(256); pol MEM_TAG_U128 = 6; pol SENDER_KERNEL_INPUTS_COL_OFFSET = 0; pol ADDRESS_KERNEL_INPUTS_COL_OFFSET = 1; - pol FUNCTION_SELECTOR_KERNEL_INPUTS_COL_OFFSET = 2; pol IS_STATIC_CALL_KERNEL_INPUTS_COL_OFFSET = 3; pol CHAIN_ID_KERNEL_INPUTS_COL_OFFSET = 4; pol VERSION_KERNEL_INPUTS_COL_OFFSET = 5; diff --git a/barretenberg/cpp/pil/avm/kernel.pil b/barretenberg/cpp/pil/avm/kernel.pil index 03da4e3d1f7b..ee7579ab7be9 100644 --- a/barretenberg/cpp/pil/avm/kernel.pil +++ b/barretenberg/cpp/pil/avm/kernel.pil @@ -95,9 +95,6 @@ namespace main(256); #[SENDER_KERNEL] sel_op_sender * (kernel_in_offset - constants.SENDER_KERNEL_INPUTS_COL_OFFSET) = 0; - #[FUNCTION_SELECTOR_KERNEL] - sel_op_function_selector * (kernel_in_offset - constants.FUNCTION_SELECTOR_KERNEL_INPUTS_COL_OFFSET) = 0; - #[FEE_TRANSACTION_FEE_KERNEL] sel_op_transaction_fee * (kernel_in_offset - constants.TRANSACTION_FEE_KERNEL_INPUTS_COL_OFFSET) = 0; @@ -172,7 +169,7 @@ namespace main(256); //===== LOOKUPS INTO THE PUBLIC INPUTS =========================================== pol KERNEL_INPUT_SELECTORS = sel_op_address + sel_op_sender - + sel_op_function_selector + sel_op_transaction_fee + sel_op_chain_id + + sel_op_transaction_fee + sel_op_chain_id + sel_op_version + sel_op_block_number + sel_op_timestamp + sel_op_fee_per_l2_gas + sel_op_fee_per_da_gas + sel_op_is_static_call; // Ensure that only one kernel lookup is active when the kernel_in_offset is active diff --git a/barretenberg/cpp/pil/avm/main.pil b/barretenberg/cpp/pil/avm/main.pil index d264a8f8ce29..319216212164 100644 --- a/barretenberg/cpp/pil/avm/main.pil +++ b/barretenberg/cpp/pil/avm/main.pil @@ -61,7 +61,6 @@ namespace main(256); // CONTEXT - ENVIRONMENT pol commit sel_op_address; pol commit sel_op_sender; - pol commit sel_op_function_selector; pol commit sel_op_transaction_fee; pol commit sel_op_is_static_call; @@ -234,7 +233,6 @@ namespace main(256); // opcode decomposition. sel_op_address * (1 - sel_op_address) = 0; sel_op_sender * (1 - sel_op_sender) = 0; - sel_op_function_selector * (1 - sel_op_function_selector) = 0; sel_op_transaction_fee * (1 - sel_op_transaction_fee) = 0; sel_op_chain_id * (1 - sel_op_chain_id) = 0; sel_op_version * (1 - sel_op_version) = 0; @@ -445,7 +443,7 @@ namespace main(256); + sel_op_ecadd + sel_op_msm; pol SEL_ALL_MEMORY = sel_op_mov + sel_op_set; pol KERNEL_INPUT_SELECTORS = sel_op_address + sel_op_sender - + sel_op_function_selector + sel_op_transaction_fee + sel_op_chain_id + + sel_op_transaction_fee + sel_op_chain_id + sel_op_version + sel_op_block_number + sel_op_timestamp + sel_op_fee_per_l2_gas + sel_op_fee_per_da_gas + sel_op_is_static_call; pol KERNEL_OUTPUT_SELECTORS = sel_op_note_hash_exists + sel_op_emit_note_hash + sel_op_nullifier_exists diff --git a/barretenberg/cpp/src/barretenberg/bb/api.hpp b/barretenberg/cpp/src/barretenberg/bb/api.hpp index f33568f18699..ab1a3628a4fb 100644 --- a/barretenberg/cpp/src/barretenberg/bb/api.hpp +++ b/barretenberg/cpp/src/barretenberg/bb/api.hpp @@ -8,6 +8,7 @@ class API { struct Flags { std::optional output_type; // bytes, fields, bytes_and_fields, fields_msgpack std::optional input_type; // compiletime_stack, runtime_stack + bool no_auto_verify; // TODO(https://github.com/AztecProtocol/barretenberg/issues/1101): remove }; virtual void prove(const Flags& flags, diff --git a/barretenberg/cpp/src/barretenberg/bb/api_client_ivc.hpp b/barretenberg/cpp/src/barretenberg/bb/api_client_ivc.hpp index 37b251bd8cf9..8576f918d40f 100644 --- a/barretenberg/cpp/src/barretenberg/bb/api_client_ivc.hpp +++ b/barretenberg/cpp/src/barretenberg/bb/api_client_ivc.hpp @@ -141,8 +141,7 @@ class ClientIVCAPI : public API { ClientIVC ivc{ { E2E_FULL_TEST_STRUCTURE }, /*auto_verify_mode=*/true }; // Accumulate the entire program stack into the IVC - // TODO(https://github.com/AztecProtocol/barretenberg/issues/1116): remove manual setting of is_kernel once - // databus has been integrated into noir kernel programs + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1116): remove manual setting of is_kernel bool is_kernel = false; for (Program& program : folding_stack) { // Construct a bberg circuit from the acir representation then accumulate it into the IVC @@ -163,6 +162,47 @@ class ClientIVCAPI : public API { return ivc; }; + static ClientIVC _accumulate_without_auto_verify(std::vector& folding_stack) + { + using Builder = MegaCircuitBuilder; + using Program = acir_format::AcirProgram; + + using namespace acir_format; + + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1163) set these dynamically + init_bn254_crs(1 << 20); + init_grumpkin_crs(1 << 15); + + // TODO(#7371) dedupe this with the rest of the similar code + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1101): remove use of auto_verify_mode + ClientIVC ivc{ { E2E_FULL_TEST_STRUCTURE }, /*auto_verify_mode=*/false }; + + // Accumulate the entire program stack into the IVC + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1116): remove manual setting of is_kernel once + // databus has been integrated into noir kernel programs + bool is_kernel = false; + for (Program& program : folding_stack) { + + Builder circuit; + + is_kernel = !program.constraints.ivc_recursion_constraints.empty(); + if (is_kernel) { + vinfo("Accumulating KERNEL."); + circuit = create_kernel_circuit(program.constraints, ivc, program.witness); + } else { + vinfo("Accumulating APP."); + circuit = create_circuit( + program.constraints, /*recursive=*/false, 0, program.witness, false, ivc.goblin.op_queue); + } + + // Do one step of ivc accumulator or, if there is only one circuit in the stack, prove that circuit. In this + // case, no work is added to the Goblin opqueue, but VM proofs for trivial inputs are produced. + ivc.accumulate(circuit, /*one_circuit=*/folding_stack.size() == 1); + } + + return ivc; + }; + public: void prove(const API::Flags& flags, const std::filesystem::path& bytecode_path, @@ -179,7 +219,19 @@ class ClientIVCAPI : public API { std::vector folding_stack = _build_folding_stack(*flags.input_type, bytecode_path, witness_path); - ClientIVC ivc = _accumulate(folding_stack); + + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1163) set these dynamically + init_bn254_crs(1 << 20); + init_grumpkin_crs(1 << 15); + + ClientIVC ivc; + if (flags.no_auto_verify) { + vinfo("performing accumulation WITHOUT auto-verify"); + ivc = _accumulate_without_auto_verify(folding_stack); + } else { + vinfo("performing accumulation with auto-verify"); + ivc = _accumulate(folding_stack); + } ClientIVC::Proof proof = ivc.prove(); // Write the proof and verification keys into the working directory in 'binary' format (in practice it seems diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index 00b0b8a68e06..a1e2f0ff3c47 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -9,6 +9,7 @@ #include "barretenberg/constants.hpp" #include "barretenberg/dsl/acir_format/acir_format.hpp" #include "barretenberg/dsl/acir_format/acir_to_constraint_buf.hpp" +#include "barretenberg/dsl/acir_format/ivc_recursion_constraint.hpp" #include "barretenberg/dsl/acir_format/proof_surgeon.hpp" #include "barretenberg/dsl/acir_proofs/acir_composer.hpp" #include "barretenberg/dsl/acir_proofs/honk_contract.hpp" @@ -586,28 +587,27 @@ void vk_as_fields(const std::string& vk_path, const std::string& output_path) * Communication: * - Filesystem: The proof and vk are written to the paths output_path/proof and output_path/{vk, vk_fields.json} * - * @param bytecode_path Path to the file containing the serialised bytecode - * @param calldata_path Path to the file containing the serialised calldata (could be empty) * @param public_inputs_path Path to the file containing the serialised avm public inputs * @param hints_path Path to the file containing the serialised avm circuit hints * @param output_path Path (directory) to write the output proof and verification keys */ -void avm_prove(const std::filesystem::path& calldata_path, - const std::filesystem::path& public_inputs_path, +void avm_prove(const std::filesystem::path& public_inputs_path, const std::filesystem::path& hints_path, const std::filesystem::path& output_path) { - std::vector const calldata = many_from_buffer(read_file(calldata_path)); - auto const avm_new_public_inputs = AvmPublicInputs::from(read_file(public_inputs_path)); - auto const avm_hints = bb::avm_trace::ExecutionHints::from(read_file(hints_path)); + + const auto avm_public_inputs = AvmPublicInputs::from(read_file(public_inputs_path)); + const auto avm_hints = bb::avm_trace::ExecutionHints::from(read_file(hints_path)); // Using [0] is fine now for the top-level call, but we might need to index by address in future vinfo("bytecode size: ", avm_hints.all_contract_bytecode[0].bytecode.size()); - vinfo("calldata size: ", calldata.size()); - vinfo("hints.storage_value_hints size: ", avm_hints.storage_value_hints.size()); - vinfo("hints.note_hash_exists_hints size: ", avm_hints.note_hash_exists_hints.size()); - vinfo("hints.nullifier_exists_hints size: ", avm_hints.nullifier_exists_hints.size()); - vinfo("hints.l1_to_l2_message_exists_hints size: ", avm_hints.l1_to_l2_message_exists_hints.size()); + vinfo("hints.storage_read_hints size: ", avm_hints.storage_read_hints.size()); + vinfo("hints.storage_write_hints size: ", avm_hints.storage_write_hints.size()); + vinfo("hints.nullifier_read_hints size: ", avm_hints.nullifier_read_hints.size()); + vinfo("hints.nullifier_write_hints size: ", avm_hints.nullifier_write_hints.size()); + vinfo("hints.note_hash_read_hints size: ", avm_hints.note_hash_read_hints.size()); + vinfo("hints.note_hash_write_hints size: ", avm_hints.note_hash_write_hints.size()); + vinfo("hints.l1_to_l2_message_read_hints size: ", avm_hints.l1_to_l2_message_read_hints.size()); vinfo("hints.externalcall_hints size: ", avm_hints.externalcall_hints.size()); vinfo("hints.contract_instance_hints size: ", avm_hints.contract_instance_hints.size()); vinfo("hints.contract_bytecode_hints size: ", avm_hints.all_contract_bytecode.size()); @@ -617,7 +617,7 @@ void avm_prove(const std::filesystem::path& calldata_path, // Prove execution and return vk auto const [verification_key, proof] = - AVM_TRACK_TIME_V("prove/all", avm_trace::Execution::prove(calldata, avm_new_public_inputs, avm_hints)); + AVM_TRACK_TIME_V("prove/all", avm_trace::Execution::prove(avm_public_inputs, avm_hints)); std::vector vk_as_fields = verification_key.to_field_elements(); @@ -833,6 +833,62 @@ void write_vk_honk(const std::string& bytecodePath, const std::string& outputPat } } +/** + * @brief Compute and write to file a MegaHonk VK for a circuit to be accumulated in the IVC + * @note This method differes from write_vk_honk in that it handles kernel circuits which require special + * treatment (i.e. construction of mock IVC state to correctly complete the kernel logic). + * + * @param bytecodePath + * @param witnessPath + */ +void write_vk_for_ivc(const std::string& bytecodePath, const std::string& outputPath) +{ + using Builder = ClientIVC::ClientCircuit; + using Prover = ClientIVC::MegaProver; + using DeciderProvingKey = ClientIVC::DeciderProvingKey; + using VerificationKey = ClientIVC::MegaVerificationKey; + + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1163) set these dynamically + init_bn254_crs(1 << 20); + init_grumpkin_crs(1 << 15); + + auto constraints = get_constraint_system(bytecodePath, /*honk_recursion=*/false); + acir_format::WitnessVector witness = {}; + + TraceSettings trace_settings{ E2E_FULL_TEST_STRUCTURE }; + + // The presence of ivc recursion constraints determines whether or not the program is a kernel + bool is_kernel = !constraints.ivc_recursion_constraints.empty(); + + Builder builder; + if (is_kernel) { + // Create a mock IVC instance based on the IVC recursion constraints in the kernel program + ClientIVC mock_ivc = create_mock_ivc_from_constraints(constraints.ivc_recursion_constraints, trace_settings); + builder = acir_format::create_kernel_circuit(constraints, mock_ivc, witness); + } else { + builder = acir_format::create_circuit( + constraints, /*recursive=*/false, 0, witness, /*honk_recursion=*/false); + } + // Add public inputs corresponding to pairing point accumulator + builder.add_pairing_point_accumulator(stdlib::recursion::init_default_agg_obj_indices(builder)); + + // Construct the verification key via the prover-constructed proving key with the proper trace settings + auto proving_key = std::make_shared(builder, trace_settings); + Prover prover{ proving_key }; + init_bn254_crs(prover.proving_key->proving_key.circuit_size); + VerificationKey vk(prover.proving_key->proving_key); + + // Write the VK to file as a buffer + auto serialized_vk = to_buffer(vk); + if (outputPath == "-") { + writeRawBytesToStdout(serialized_vk); + vinfo("vk written to stdout"); + } else { + write_file(outputPath, serialized_vk); + vinfo("vk written to: ", outputPath); + } +} + /** * @brief Write a toml file containing recursive verifier inputs for a given program + witness * @@ -1073,7 +1129,8 @@ int main(int argc, char* argv[]) const API::Flags flags = [&args]() { return API::Flags{ .output_type = get_option(args, "--output_type", "fields_msgpack"), - .input_type = get_option(args, "--input_type", "compiletime_stack") }; + .input_type = get_option(args, "--input_type", "compiletime_stack"), + .no_auto_verify = flag_present(args, "--no_auto_verify") }; }(); const std::string command = args[0]; @@ -1185,7 +1242,6 @@ int main(int argc, char* argv[]) write_recursion_inputs_honk(bytecode_path, witness_path, output_path, recursive); #ifndef DISABLE_AZTEC_VM } else if (command == "avm_prove") { - std::filesystem::path avm_calldata_path = get_option(args, "--avm-calldata", "./target/avm_calldata.bin"); std::filesystem::path avm_public_inputs_path = get_option(args, "--avm-public-inputs", "./target/avm_public_inputs.bin"); std::filesystem::path avm_hints_path = get_option(args, "--avm-hints", "./target/avm_hints.bin"); @@ -1193,7 +1249,7 @@ int main(int argc, char* argv[]) std::filesystem::path output_path = get_option(args, "-o", "./proofs"); extern std::filesystem::path avm_dump_trace_path; avm_dump_trace_path = get_option(args, "--avm-dump-trace", ""); - avm_prove(avm_calldata_path, avm_public_inputs_path, avm_hints_path, output_path); + avm_prove(avm_public_inputs_path, avm_hints_path, output_path); } else if (command == "avm_verify") { return avm_verify(proof_path, vk_path) ? 0 : 1; #endif @@ -1227,6 +1283,9 @@ int main(int argc, char* argv[]) } else if (command == "write_vk_mega_honk") { std::string output_path = get_option(args, "-o", "./target/vk"); write_vk_honk(bytecode_path, output_path, recursive); + } else if (command == "write_vk_for_ivc") { + std::string output_path = get_option(args, "-o", "./target/vk"); + write_vk_for_ivc(bytecode_path, output_path); } else if (command == "proof_as_fields_honk") { std::string output_path = get_option(args, "-o", proof_path + "_fields.json"); proof_as_fields_honk(proof_path, output_path); diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/commitment_key.hpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/commitment_key.hpp index 6440c016a627..81ef54a2b9da 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/commitment_key.hpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/commitment_key.hpp @@ -85,7 +85,7 @@ template class CommitmentKey { */ Commitment commit(PolynomialSpan polynomial) { - PROFILE_THIS(); + PROFILE_THIS_NAME("commit"); // We must have a power-of-2 SRS points *after* subtracting by start_index. size_t dyadic_poly_size = numeric::round_up_power_2(polynomial.size()); // Because pippenger prefers a power-of-2 size, we must choose a starting index for the points so that we don't @@ -133,7 +133,7 @@ template class CommitmentKey { */ Commitment commit_sparse(PolynomialSpan polynomial) { - PROFILE_THIS(); + PROFILE_THIS_NAME("commit_sparse"); const size_t poly_size = polynomial.size(); ASSERT(polynomial.end_index() <= srs->get_monomial_size()); @@ -204,21 +204,24 @@ template class CommitmentKey { * @return Commitment */ Commitment commit_structured(PolynomialSpan polynomial, - const std::vector>& active_ranges) + const std::vector>& active_ranges, + size_t final_active_wire_idx = 0) { - BB_OP_COUNT_TIME(); + PROFILE_THIS_NAME("commit_structured"); ASSERT(polynomial.end_index() <= srs->get_monomial_size()); // Percentage of nonzero coefficients beyond which we resort to the conventional commit method constexpr size_t NONZERO_THRESHOLD = 75; + // Compute the number of non-zero coefficients in the polynomial size_t total_num_scalars = 0; - for (const auto& range : active_ranges) { - total_num_scalars += range.second - range.first; + for (const auto& [first, second] : active_ranges) { + total_num_scalars += second - first; } // Compute "active" percentage of polynomial; resort to standard commit if appropriate - size_t percentage_nonzero = total_num_scalars * 100 / polynomial.size(); + size_t polynomial_size = final_active_wire_idx != 0 ? final_active_wire_idx : polynomial.size(); + size_t percentage_nonzero = total_num_scalars * 100 / polynomial_size; if (percentage_nonzero > NONZERO_THRESHOLD) { return commit(polynomial); } @@ -259,9 +262,10 @@ template class CommitmentKey { * @return Commitment */ Commitment commit_structured_with_nonzero_complement(PolynomialSpan polynomial, - const std::vector>& active_ranges) + const std::vector>& active_ranges, + size_t final_active_wire_idx = 0) { - BB_OP_COUNT_TIME(); + PROFILE_THIS_NAME("commit_structured_with_nonzero_complement"); ASSERT(polynomial.end_index() <= srs->get_monomial_size()); using BatchedAddition = BatchedAffineAddition; @@ -273,20 +277,21 @@ template class CommitmentKey { // Note: the range from the end of the last active range to the end of the polynomial is excluded from the // complement since the polynomial is assumed to be zero there. std::vector> active_ranges_complement; + // Also compute total number of scalars in the constant regions + size_t total_num_complement_scalars = 0; for (size_t i = 0; i < active_ranges.size() - 1; ++i) { const size_t start = active_ranges[i].second; const size_t end = active_ranges[i + 1].first; - active_ranges_complement.emplace_back(start, end); - } - - // Compute the total number of scalars in the constant regions - size_t total_num_complement_scalars = 0; - for (const auto& range : active_ranges_complement) { - total_num_complement_scalars += range.second - range.first; + if (end > start) { + active_ranges_complement.emplace_back(start, end); + total_num_complement_scalars += end - start; + } } + size_t polynomial_size = final_active_wire_idx != 0 ? final_active_wire_idx : polynomial.size(); // Compute percentage of polynomial comprised of constant blocks; resort to standard commit if appropriate - size_t percentage_constant = total_num_complement_scalars * 100 / polynomial.size(); + size_t percentage_constant = total_num_complement_scalars * 100 / polynomial_size; + if (percentage_constant < CONSTANT_THRESHOLD) { return commit(polynomial); } @@ -299,12 +304,11 @@ template class CommitmentKey { // TODO(https://github.com/AztecProtocol/barretenberg/issues/1131): Peak memory usage could be improved by // performing this copy and the subsequent summation as a precomputation prior to constructing the point table. std::vector points; - points.reserve(2 * total_num_complement_scalars); - for (const auto& range : active_ranges_complement) { - const size_t start = 2 * range.first; - const size_t end = 2 * range.second; - for (size_t i = start; i < end; i += 2) { - points.emplace_back(point_table[i]); + + points.reserve(total_num_complement_scalars); + for (const auto& [start, end] : active_ranges_complement) { + for (size_t i = start; i < end; i++) { + points.emplace_back(point_table[2 * i]); } } @@ -313,17 +317,16 @@ template class CommitmentKey { std::vector unique_scalars; std::vector sequence_counts; for (const auto& range : active_ranges_complement) { - if (range.second - range.first > 0) { // only ranges with nonzero length - unique_scalars.emplace_back(polynomial.span[range.first]); - sequence_counts.emplace_back(range.second - range.first); - } + unique_scalars.emplace_back(polynomial.span[range.first]); + sequence_counts.emplace_back(range.second - range.first); } // Reduce each sequence to a single point auto reduced_points = BatchedAddition::add_in_place(points, sequence_counts); // Compute the full commitment as the sum of the "active" region commitment and the constant region contribution - Commitment result = commit_structured(polynomial, active_ranges); + Commitment result = commit_structured(polynomial, active_ranges, final_active_wire_idx); + for (auto [scalar, point] : zip_view(unique_scalars, reduced_points)) { result = result + point * scalar; } diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.cpp index a0d24e70e0b0..cae59733e3ba 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.cpp @@ -656,6 +656,11 @@ void handle_blackbox_func_call(Program::Opcode::BlackBoxFuncCall const& arg, af.honk_recursion_constraints.push_back(c); af.original_opcode_indices.honk_recursion_constraints.push_back(opcode_index); break; + case OINK: + case PG: + af.ivc_recursion_constraints.push_back(c); + af.original_opcode_indices.ivc_recursion_constraints.push_back(opcode_index); + break; case AVM: af.avm_recursion_constraints.push_back(c); af.original_opcode_indices.avm_recursion_constraints.push_back(opcode_index); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.cpp index 0549a1bbeedb..d6bf0d93323d 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.cpp @@ -86,6 +86,8 @@ void create_block_constraints(MegaCircuitBuilder& builder, process_call_data_operations(builder, constraint, has_valid_witness_assignments, init); // The presence of calldata is used to indicate that the present circuit is a kernel. This is needed in the // databus consistency checks to indicate that the corresponding return data belongs to a kernel (else an app). + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1165): is_kernel must be known prior to this stage + // since we must determine whether to use create_circuit or create_kernel_circuit. Resolve. builder.databus_propagation_data.is_kernel = true; } break; case BlockType::ReturnData: { diff --git a/barretenberg/cpp/src/barretenberg/ecc/batched_affine_addition/batched_affine_addition.cpp b/barretenberg/cpp/src/barretenberg/ecc/batched_affine_addition/batched_affine_addition.cpp index 447e6039c559..058bce377388 100644 --- a/barretenberg/cpp/src/barretenberg/ecc/batched_affine_addition/batched_affine_addition.cpp +++ b/barretenberg/cpp/src/barretenberg/ecc/batched_affine_addition/batched_affine_addition.cpp @@ -10,6 +10,7 @@ template std::vector::G1> BatchedAffineAddition::add_in_place( const std::span& points, const std::vector& sequence_counts) { + PROFILE_THIS_NAME("BatchedAffineAddition::add_in_place"); // Instantiate scratch space for point addition denominators and their calculation std::vector scratch_space_vector(points.size()); std::span scratch_space(scratch_space_vector); diff --git a/barretenberg/cpp/src/barretenberg/ecc/fields/field_impl.hpp b/barretenberg/cpp/src/barretenberg/ecc/fields/field_impl.hpp index 0ed0c481dec0..5a747150eb15 100644 --- a/barretenberg/cpp/src/barretenberg/ecc/fields/field_impl.hpp +++ b/barretenberg/cpp/src/barretenberg/ecc/fields/field_impl.hpp @@ -404,9 +404,10 @@ template void field::batch_invert(field* coeffs, const size_t n) no batch_invert(std::span{ coeffs, n }); } +// TODO(https://github.com/AztecProtocol/barretenberg/issues/1166) template void field::batch_invert(std::span coeffs) noexcept { - BB_OP_COUNT_TRACK_NAME("fr::batch_invert"); + PROFILE_THIS_NAME("fr::batch_invert"); const size_t n = coeffs.size(); auto temporaries_ptr = std::static_pointer_cast(get_mem_slab(n * sizeof(field))); diff --git a/barretenberg/cpp/src/barretenberg/flavor/flavor.hpp b/barretenberg/cpp/src/barretenberg/flavor/flavor.hpp index b9230e4675ab..e6fe7ef2aa36 100644 --- a/barretenberg/cpp/src/barretenberg/flavor/flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/flavor/flavor.hpp @@ -123,7 +123,7 @@ template class ProvingKey_ { // folded element by element. std::vector public_inputs; - // Ranges of the form [start, end) over which the execution trace is "active" + // Ranges of the form [start, end) where witnesses have non-zero values (hence the execution trace is "active") std::vector> active_block_ranges; ProvingKey_() = default; diff --git a/barretenberg/cpp/src/barretenberg/plonk_honk_shared/execution_trace/execution_trace_usage_tracker.hpp b/barretenberg/cpp/src/barretenberg/plonk_honk_shared/execution_trace/execution_trace_usage_tracker.hpp index 683a20a3d635..91837b267dda 100644 --- a/barretenberg/cpp/src/barretenberg/plonk_honk_shared/execution_trace/execution_trace_usage_tracker.hpp +++ b/barretenberg/cpp/src/barretenberg/plonk_honk_shared/execution_trace/execution_trace_usage_tracker.hpp @@ -22,6 +22,7 @@ struct ExecutionTraceUsageTracker { MegaTraceFixedBlockSizes fixed_sizes; // fixed size of each block prescribed by structuring // Store active ranges based on the most current accumulator and those based on all but the most recently // accumulated circuit. The former is needed for the combiner calculation and the latter for the perturbator. + // The ranges cover all areas in the trace where relations have nontrivial values. std::vector active_ranges; std::vector previous_active_ranges; diff --git a/barretenberg/cpp/src/barretenberg/plonk_honk_shared/library/grand_product_library.hpp b/barretenberg/cpp/src/barretenberg/plonk_honk_shared/library/grand_product_library.hpp index 58b1d35630a5..ee7eeb3c4379 100644 --- a/barretenberg/cpp/src/barretenberg/plonk_honk_shared/library/grand_product_library.hpp +++ b/barretenberg/cpp/src/barretenberg/plonk_honk_shared/library/grand_product_library.hpp @@ -56,8 +56,11 @@ namespace bb { template void compute_grand_product(typename Flavor::ProverPolynomials& full_polynomials, bb::RelationParameters& relation_parameters, - size_t size_override = 0) + size_t size_override = 0, + std::vector> active_block_ranges = {}) { + PROFILE_THIS_NAME("compute_grand_product"); + using FF = typename Flavor::FF; using Polynomial = typename Flavor::Polynomial; using Accumulator = std::tuple_element_t<0, typename GrandProdRelation::SumcheckArrayOfValuesOverSubrelations>; @@ -84,22 +87,34 @@ void compute_grand_product(typename Flavor::ProverPolynomials& full_polynomials, Polynomial numerator{ domain_size, domain_size }; Polynomial denominator{ domain_size, domain_size }; + auto check_is_active = [&](size_t idx) { + if (active_block_ranges.empty()) { + return true; + } + return std::any_of(active_block_ranges.begin(), active_block_ranges.end(), [idx](const auto& range) { + return idx >= range.first && idx < range.second; + }); + }; + // Step (1) // Populate `numerator` and `denominator` with the algebra described by Relation + FF gamma_fourth = relation_parameters.gamma.pow(4); parallel_for(num_threads, [&](size_t thread_idx) { - typename Flavor::AllValues evaluations; - // TODO(https://github.com/AztecProtocol/barretenberg/issues/940): construction of evaluations is equivalent to - // calling get_row which creates full copies. avoid? + typename Flavor::AllValues row; const size_t start = idx_bounds[thread_idx].first; const size_t end = idx_bounds[thread_idx].second; for (size_t i = start; i < end; ++i) { - for (auto [eval, full_poly] : zip_view(evaluations.get_all(), full_polynomials.get_all())) { - eval = full_poly.size() > i ? full_poly[i] : 0; + if (check_is_active(i)) { + // TODO(https://github.com/AztecProtocol/barretenberg/issues/940):consider avoiding get_row if possible. + row = full_polynomials.get_row(i); + numerator.at(i) = + GrandProdRelation::template compute_grand_product_numerator(row, relation_parameters); + denominator.at(i) = GrandProdRelation::template compute_grand_product_denominator( + row, relation_parameters); + } else { + numerator.at(i) = gamma_fourth; + denominator.at(i) = gamma_fourth; } - numerator.at(i) = GrandProdRelation::template compute_grand_product_numerator( - evaluations, relation_parameters); - denominator.at(i) = GrandProdRelation::template compute_grand_product_denominator( - evaluations, relation_parameters); } }); @@ -163,6 +178,7 @@ void compute_grand_product(typename Flavor::ProverPolynomials& full_polynomials, auto& grand_product_polynomial = GrandProdRelation::get_grand_product_polynomial(full_polynomials); // We have a 'virtual' 0 at the start (as this is a to-be-shifted polynomial) ASSERT(grand_product_polynomial.start_index() == 1); + parallel_for(num_threads, [&](size_t thread_idx) { const size_t start = idx_bounds[thread_idx].first; const size_t end = idx_bounds[thread_idx].second; diff --git a/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp index bb29d9bc8a90..d51062f991f0 100644 --- a/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp @@ -3,6 +3,7 @@ #include #include "barretenberg/common/constexpr_utils.hpp" +#include "barretenberg/common/thread.hpp" #include "barretenberg/polynomials/univariate.hpp" #include "barretenberg/relations/relation_types.hpp" @@ -230,33 +231,44 @@ template class DatabusLookupRelationImpl { auto& relation_parameters, const size_t circuit_size) { + PROFILE_THIS_NAME("Databus::compute_logderivative_inverse"); auto& inverse_polynomial = BusData::inverses(polynomials); - bool is_read = false; - bool nonzero_read_count = false; - for (size_t i = 0; i < circuit_size; ++i) { - // Determine if the present row contains a databus operation - auto q_busread = polynomials.q_busread[i]; - if constexpr (bus_idx == 0) { // calldata - is_read = q_busread == 1 && polynomials.q_l[i] == 1; - nonzero_read_count = polynomials.calldata_read_counts[i] > 0; - } - if constexpr (bus_idx == 1) { // secondary_calldata - is_read = q_busread == 1 && polynomials.q_r[i] == 1; - nonzero_read_count = polynomials.secondary_calldata_read_counts[i] > 0; - } - if constexpr (bus_idx == 2) { // return data - is_read = q_busread == 1 && polynomials.q_o[i] == 1; - nonzero_read_count = polynomials.return_data_read_counts[i] > 0; - } - // We only compute the inverse if this row contains a read gate or data that has been read - if (is_read || nonzero_read_count) { - // TODO(https://github.com/AztecProtocol/barretenberg/issues/940): avoid get_row if possible. - auto row = polynomials.get_row(i); // Note: this is a copy. use sparingly! - auto value = compute_read_term(row, relation_parameters) * - compute_write_term(row, relation_parameters); - inverse_polynomial.at(i) = value; + + size_t min_iterations_per_thread = 1 << 6; // min number of iterations for which we'll spin up a unique thread + size_t num_threads = bb::calculate_num_threads_pow2(circuit_size, min_iterations_per_thread); + size_t iterations_per_thread = circuit_size / num_threads; // actual iterations per thread + + parallel_for(num_threads, [&](size_t thread_idx) { + size_t start = thread_idx * iterations_per_thread; + size_t end = (thread_idx + 1) * iterations_per_thread; + bool is_read = false; + bool nonzero_read_count = false; + for (size_t i = start; i < end; ++i) { + // Determine if the present row contains a databus operation + auto q_busread = polynomials.q_busread[i]; + if constexpr (bus_idx == 0) { // calldata + is_read = q_busread == 1 && polynomials.q_l[i] == 1; + nonzero_read_count = polynomials.calldata_read_counts[i] > 0; + } + if constexpr (bus_idx == 1) { // secondary_calldata + is_read = q_busread == 1 && polynomials.q_r[i] == 1; + nonzero_read_count = polynomials.secondary_calldata_read_counts[i] > 0; + } + if constexpr (bus_idx == 2) { // return data + is_read = q_busread == 1 && polynomials.q_o[i] == 1; + nonzero_read_count = polynomials.return_data_read_counts[i] > 0; + } + // We only compute the inverse if this row contains a read gate or data that has been read + if (is_read || nonzero_read_count) { + // TODO(https://github.com/AztecProtocol/barretenberg/issues/940): avoid get_row if possible. + auto row = polynomials.get_row(i); // Note: this is a copy. use sparingly! + auto value = compute_read_term(row, relation_parameters) * + compute_write_term(row, relation_parameters); + inverse_polynomial.at(i) = value; + } } - } + }); + // Compute inverse polynomial I in place by inverting the product at each row // Note: zeroes are ignored as they are not used anyway FF::batch_invert(inverse_polynomial.coeffs()); @@ -299,8 +311,8 @@ template class DatabusLookupRelationImpl { constexpr size_t subrel_idx_1 = 2 * bus_idx; constexpr size_t subrel_idx_2 = 2 * bus_idx + 1; - // Establish the correctness of the polynomial of inverses I. Note: inverses is computed so that the value is 0 - // if !inverse_exists. Degree 3 (5) + // Establish the correctness of the polynomial of inverses I. Note: inverses is computed so that the value + // is 0 if !inverse_exists. Degree 3 (5) std::get(accumulator) += (read_term * write_term * inverses - inverse_exists) * scaling_factor; // Establish validity of the read. Note: no scaling factor here since this constraint is enforced across the diff --git a/barretenberg/cpp/src/barretenberg/relations/logderiv_lookup_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/logderiv_lookup_relation.hpp index afd1326a91da..dbd45cc2ed5c 100644 --- a/barretenberg/cpp/src/barretenberg/relations/logderiv_lookup_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/logderiv_lookup_relation.hpp @@ -3,6 +3,7 @@ #include #include "barretenberg/common/constexpr_utils.hpp" +#include "barretenberg/common/thread.hpp" #include "barretenberg/honk/proof_system/logderivative_library.hpp" #include "barretenberg/polynomials/univariate.hpp" #include "barretenberg/relations/relation_types.hpp" @@ -155,18 +156,28 @@ template class LogDerivLookupRelationImpl { auto& relation_parameters, const size_t circuit_size) { + PROFILE_THIS_NAME("Lookup::compute_logderivative_inverse"); auto& inverse_polynomial = get_inverse_polynomial(polynomials); - for (size_t i = 0; i < circuit_size; ++i) { - // We only compute the inverse if this row contains a lookup gate or data that has been looked up - if (polynomials.q_lookup.get(i) == 1 || polynomials.lookup_read_tags.get(i) == 1) { - // TODO(https://github.com/AztecProtocol/barretenberg/issues/940): avoid get_row if possible. - auto row = polynomials.get_row(i); // Note: this is a copy. use sparingly! - auto value = compute_read_term(row, relation_parameters) * - compute_write_term(row, relation_parameters); - inverse_polynomial.at(i) = value; + size_t min_iterations_per_thread = 1 << 6; // min number of iterations for which we'll spin up a unique thread + size_t num_threads = bb::calculate_num_threads_pow2(circuit_size, min_iterations_per_thread); + size_t iterations_per_thread = circuit_size / num_threads; // actual iterations per thread + + parallel_for(num_threads, [&](size_t thread_idx) { + size_t start = thread_idx * iterations_per_thread; + size_t end = (thread_idx + 1) * iterations_per_thread; + for (size_t i = start; i < end; ++i) { + // We only compute the inverse if this row contains a lookup gate or data that has been looked up + if (polynomials.q_lookup.get(i) == 1 || polynomials.lookup_read_tags.get(i) == 1) { + // TODO(https://github.com/AztecProtocol/barretenberg/issues/940): avoid get_row if possible. + auto row = polynomials.get_row(i); // Note: this is a copy. use sparingly! + auto value = compute_read_term(row, relation_parameters) * + compute_write_term(row, relation_parameters); + inverse_polynomial.at(i) = value; + } } - } + }); + // Compute inverse polynomial I in place by inverting the product at each row FF::batch_invert(inverse_polynomial.coeffs()); }; diff --git a/barretenberg/cpp/src/barretenberg/stdlib/plonk_recursion/verifier/verifier.test.cpp b/barretenberg/cpp/src/barretenberg/stdlib/plonk_recursion/verifier/verifier.test.cpp index 02efc92c2cdf..c1772d50f7ce 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/plonk_recursion/verifier/verifier.test.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/plonk_recursion/verifier/verifier.test.cpp @@ -345,14 +345,8 @@ template class stdlib_verifier : public testing::Test { static void check_recursive_verification_circuit(OuterBuilder& outer_circuit, bool expected_result) { info("number of gates in recursive verification circuit = ", outer_circuit.get_estimated_num_finalized_gates()); - OuterComposer outer_composer; - auto prover = outer_composer.create_prover(outer_circuit); - auto verifier = outer_composer.create_verifier(outer_circuit); - auto proof = prover.construct_proof(); - auto result = verifier.verify_proof(proof); - // bool result = CircuitChecker::check(outer_circuit); + const bool result = CircuitChecker::check(outer_circuit); EXPECT_EQ(result, expected_result); - static_cast(expected_result); auto g2_lines = srs::get_bn254_crs_factory()->get_verifier_crs()->get_precomputed_g2_lines(); EXPECT_EQ(check_pairing_point_accum_public_inputs(outer_circuit, g2_lines), true); } diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_flavor.hpp index f4129c363a37..24919feb2577 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_flavor.hpp @@ -492,6 +492,8 @@ class MegaFlavor { */ void compute_logderivative_inverses(const RelationParameters& relation_parameters) { + PROFILE_THIS_NAME("compute_logderivative_inverses"); + // Compute inverses for conventional lookups LogDerivLookupRelation::compute_logderivative_inverse( this->polynomials, relation_parameters, this->circuit_size); @@ -525,7 +527,7 @@ class MegaFlavor { // Compute permutation grand product polynomial compute_grand_product>( - this->polynomials, relation_parameters, size_override); + this->polynomials, relation_parameters, size_override, this->active_block_ranges); } uint64_t estimate_memory() diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp index 6ff45fb338d5..f34d0b487330 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_flavor.hpp @@ -328,7 +328,7 @@ class UltraFlavor { [[nodiscard]] size_t get_polynomial_size() const { return q_c.size(); } [[nodiscard]] AllValues get_row(const size_t row_idx) const { - PROFILE_THIS(); + PROFILE_THIS_NAME("UltraFlavor::get_row"); AllValues result; for (auto [result_field, polynomial] : zip_view(result.get_all(), get_all())) { result_field = polynomial[row_idx]; diff --git a/barretenberg/cpp/src/barretenberg/trace_to_polynomials/trace_to_polynomials.cpp b/barretenberg/cpp/src/barretenberg/trace_to_polynomials/trace_to_polynomials.cpp index f778a03c5033..39eccd6ef01a 100644 --- a/barretenberg/cpp/src/barretenberg/trace_to_polynomials/trace_to_polynomials.cpp +++ b/barretenberg/cpp/src/barretenberg/trace_to_polynomials/trace_to_polynomials.cpp @@ -102,7 +102,7 @@ typename TraceToPolynomials::TraceData TraceToPolynomials::const auto block_size = static_cast(block.size()); // Save ranges over which the blocks are "active" for use in structured commitments - if constexpr (IsUltraFlavor) { + if constexpr (IsUltraFlavor) { // Mega and Ultra if (block.size() > 0) { proving_key.active_block_ranges.emplace_back(offset, offset + block.size()); } @@ -150,6 +150,7 @@ typename TraceToPolynomials::TraceData TraceToPolynomials::const // otherwise, the next block starts immediately following the previous one offset += block.get_fixed_size(is_structured); } + return trace_data; } diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.cpp index 111497e91759..7377c7b31c72 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.cpp @@ -206,8 +206,8 @@ template void OinkProver::execute_log_derivative_ { PROFILE_THIS_NAME("COMMIT::lookup_inverses"); - witness_commitments.lookup_inverses = - proving_key->proving_key.commitment_key->commit(proving_key->proving_key.polynomials.lookup_inverses); + witness_commitments.lookup_inverses = proving_key->proving_key.commitment_key->commit_sparse( + proving_key->proving_key.polynomials.lookup_inverses); } transcript->send_to_verifier(domain_separator + commitment_labels.lookup_inverses, witness_commitments.lookup_inverses); @@ -235,6 +235,7 @@ template void OinkProver::execute_grand_product_c { PROFILE_THIS_NAME("OinkProver::execute_grand_product_computation_round"); // Compute the permutation grand product polynomial + proving_key->proving_key.compute_grand_product_polynomial(proving_key->relation_parameters, proving_key->final_active_wire_idx + 1); @@ -243,7 +244,9 @@ template void OinkProver::execute_grand_product_c if (proving_key->get_is_structured()) { witness_commitments.z_perm = proving_key->proving_key.commitment_key->commit_structured_with_nonzero_complement( - proving_key->proving_key.polynomials.z_perm, proving_key->proving_key.active_block_ranges); + proving_key->proving_key.polynomials.z_perm, + proving_key->proving_key.active_block_ranges, + proving_key->final_active_wire_idx + 1); } else { witness_commitments.z_perm = proving_key->proving_key.commitment_key->commit(proving_key->proving_key.polynomials.z_perm); diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.hpp b/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.hpp index 8ab9fed7aa1d..e6db742510b9 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.hpp @@ -19,6 +19,7 @@ // clang-format on #include +#include "barretenberg/plonk_honk_shared/execution_trace/execution_trace_usage_tracker.hpp" #include "barretenberg/ultra_honk/decider_proving_key.hpp" namespace bb { @@ -40,16 +41,20 @@ template class OinkProver { std::shared_ptr proving_key; std::shared_ptr transcript; std::string domain_separator; + ExecutionTraceUsageTracker trace_usage_tracker; + typename Flavor::WitnessCommitments witness_commitments; typename Flavor::CommitmentLabels commitment_labels; using RelationSeparator = typename Flavor::RelationSeparator; OinkProver(std::shared_ptr proving_key, const std::shared_ptr& transcript = std::make_shared(), - std::string domain_separator = "") + std::string domain_separator = "", + const ExecutionTraceUsageTracker& trace_usage_tracker = ExecutionTraceUsageTracker{}) : proving_key(proving_key) , transcript(transcript) , domain_separator(std::move(domain_separator)) + , trace_usage_tracker(trace_usage_tracker) {} void prove(); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/circuit_builder.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/circuit_builder.cpp index 53f2cbf9bdf6..cc096543dc70 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/circuit_builder.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/circuit_builder.cpp @@ -323,7 +323,6 @@ AvmCircuitBuilder::ProverPolynomials AvmCircuitBuilder::compute_polynomials() co polys.main_sel_op_fdiv.set_if_valid_index(i, rows[i].main_sel_op_fdiv); polys.main_sel_op_fee_per_da_gas.set_if_valid_index(i, rows[i].main_sel_op_fee_per_da_gas); polys.main_sel_op_fee_per_l2_gas.set_if_valid_index(i, rows[i].main_sel_op_fee_per_l2_gas); - polys.main_sel_op_function_selector.set_if_valid_index(i, rows[i].main_sel_op_function_selector); polys.main_sel_op_get_contract_instance.set_if_valid_index(i, rows[i].main_sel_op_get_contract_instance); polys.main_sel_op_internal_call.set_if_valid_index(i, rows[i].main_sel_op_internal_call); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.cpp index a1a7ca40675c..59c37874fe6b 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.cpp @@ -230,598 +230,597 @@ AvmFlavor::AllConstRefValues::AllConstRefValues( , main_sel_op_fdiv(il[222]) , main_sel_op_fee_per_da_gas(il[223]) , main_sel_op_fee_per_l2_gas(il[224]) - , main_sel_op_function_selector(il[225]) - , main_sel_op_get_contract_instance(il[226]) - , main_sel_op_internal_call(il[227]) - , main_sel_op_internal_return(il[228]) - , main_sel_op_is_static_call(il[229]) - , main_sel_op_jump(il[230]) - , main_sel_op_jumpi(il[231]) - , main_sel_op_keccak(il[232]) - , main_sel_op_l1_to_l2_msg_exists(il[233]) - , main_sel_op_l2gasleft(il[234]) - , main_sel_op_lt(il[235]) - , main_sel_op_lte(il[236]) - , main_sel_op_mov(il[237]) - , main_sel_op_msm(il[238]) - , main_sel_op_mul(il[239]) - , main_sel_op_not(il[240]) - , main_sel_op_note_hash_exists(il[241]) - , main_sel_op_nullifier_exists(il[242]) - , main_sel_op_or(il[243]) - , main_sel_op_poseidon2(il[244]) - , main_sel_op_radix_be(il[245]) - , main_sel_op_returndata_copy(il[246]) - , main_sel_op_returndata_size(il[247]) - , main_sel_op_sender(il[248]) - , main_sel_op_set(il[249]) - , main_sel_op_sha256(il[250]) - , main_sel_op_shl(il[251]) - , main_sel_op_shr(il[252]) - , main_sel_op_sload(il[253]) - , main_sel_op_sstore(il[254]) - , main_sel_op_static_call(il[255]) - , main_sel_op_sub(il[256]) - , main_sel_op_timestamp(il[257]) - , main_sel_op_transaction_fee(il[258]) - , main_sel_op_version(il[259]) - , main_sel_op_xor(il[260]) - , main_sel_q_kernel_lookup(il[261]) - , main_sel_q_kernel_output_lookup(il[262]) - , main_sel_resolve_ind_addr_a(il[263]) - , main_sel_resolve_ind_addr_b(il[264]) - , main_sel_resolve_ind_addr_c(il[265]) - , main_sel_resolve_ind_addr_d(il[266]) - , main_sel_returndata(il[267]) - , main_sel_rng_16(il[268]) - , main_sel_rng_8(il[269]) - , main_sel_slice_gadget(il[270]) - , main_space_id(il[271]) - , main_tag_err(il[272]) - , main_w_in_tag(il[273]) - , mem_addr(il[274]) - , mem_clk(il[275]) - , mem_diff(il[276]) - , mem_glob_addr(il[277]) - , mem_last(il[278]) - , mem_lastAccess(il[279]) - , mem_one_min_inv(il[280]) - , mem_r_in_tag(il[281]) - , mem_rw(il[282]) - , mem_sel_mem(il[283]) - , mem_sel_mov_ia_to_ic(il[284]) - , mem_sel_mov_ib_to_ic(il[285]) - , mem_sel_op_a(il[286]) - , mem_sel_op_b(il[287]) - , mem_sel_op_c(il[288]) - , mem_sel_op_d(il[289]) - , mem_sel_op_poseidon_read_a(il[290]) - , mem_sel_op_poseidon_read_b(il[291]) - , mem_sel_op_poseidon_read_c(il[292]) - , mem_sel_op_poseidon_read_d(il[293]) - , mem_sel_op_poseidon_write_a(il[294]) - , mem_sel_op_poseidon_write_b(il[295]) - , mem_sel_op_poseidon_write_c(il[296]) - , mem_sel_op_poseidon_write_d(il[297]) - , mem_sel_op_slice(il[298]) - , mem_sel_resolve_ind_addr_a(il[299]) - , mem_sel_resolve_ind_addr_b(il[300]) - , mem_sel_resolve_ind_addr_c(il[301]) - , mem_sel_resolve_ind_addr_d(il[302]) - , mem_sel_rng_chk(il[303]) - , mem_skip_check_tag(il[304]) - , mem_space_id(il[305]) - , mem_tag(il[306]) - , mem_tag_err(il[307]) - , mem_tsp(il[308]) - , mem_u16_r0(il[309]) - , mem_u16_r1(il[310]) - , mem_u8_r0(il[311]) - , mem_val(il[312]) - , mem_w_in_tag(il[313]) - , merkle_tree_clk(il[314]) - , merkle_tree_expected_tree_root(il[315]) - , merkle_tree_latch(il[316]) - , merkle_tree_leaf_index(il[317]) - , merkle_tree_leaf_index_is_even(il[318]) - , merkle_tree_leaf_value(il[319]) - , merkle_tree_left_hash(il[320]) - , merkle_tree_output_hash(il[321]) - , merkle_tree_path_len(il[322]) - , merkle_tree_path_len_inv(il[323]) - , merkle_tree_right_hash(il[324]) - , merkle_tree_sel_merkle_tree(il[325]) - , merkle_tree_sibling_value(il[326]) - , poseidon2_B_10_0(il[327]) - , poseidon2_B_10_1(il[328]) - , poseidon2_B_10_2(il[329]) - , poseidon2_B_10_3(il[330]) - , poseidon2_B_11_0(il[331]) - , poseidon2_B_11_1(il[332]) - , poseidon2_B_11_2(il[333]) - , poseidon2_B_11_3(il[334]) - , poseidon2_B_12_0(il[335]) - , poseidon2_B_12_1(il[336]) - , poseidon2_B_12_2(il[337]) - , poseidon2_B_12_3(il[338]) - , poseidon2_B_13_0(il[339]) - , poseidon2_B_13_1(il[340]) - , poseidon2_B_13_2(il[341]) - , poseidon2_B_13_3(il[342]) - , poseidon2_B_14_0(il[343]) - , poseidon2_B_14_1(il[344]) - , poseidon2_B_14_2(il[345]) - , poseidon2_B_14_3(il[346]) - , poseidon2_B_15_0(il[347]) - , poseidon2_B_15_1(il[348]) - , poseidon2_B_15_2(il[349]) - , poseidon2_B_15_3(il[350]) - , poseidon2_B_16_0(il[351]) - , poseidon2_B_16_1(il[352]) - , poseidon2_B_16_2(il[353]) - , poseidon2_B_16_3(il[354]) - , poseidon2_B_17_0(il[355]) - , poseidon2_B_17_1(il[356]) - , poseidon2_B_17_2(il[357]) - , poseidon2_B_17_3(il[358]) - , poseidon2_B_18_0(il[359]) - , poseidon2_B_18_1(il[360]) - , poseidon2_B_18_2(il[361]) - , poseidon2_B_18_3(il[362]) - , poseidon2_B_19_0(il[363]) - , poseidon2_B_19_1(il[364]) - , poseidon2_B_19_2(il[365]) - , poseidon2_B_19_3(il[366]) - , poseidon2_B_20_0(il[367]) - , poseidon2_B_20_1(il[368]) - , poseidon2_B_20_2(il[369]) - , poseidon2_B_20_3(il[370]) - , poseidon2_B_21_0(il[371]) - , poseidon2_B_21_1(il[372]) - , poseidon2_B_21_2(il[373]) - , poseidon2_B_21_3(il[374]) - , poseidon2_B_22_0(il[375]) - , poseidon2_B_22_1(il[376]) - , poseidon2_B_22_2(il[377]) - , poseidon2_B_22_3(il[378]) - , poseidon2_B_23_0(il[379]) - , poseidon2_B_23_1(il[380]) - , poseidon2_B_23_2(il[381]) - , poseidon2_B_23_3(il[382]) - , poseidon2_B_24_0(il[383]) - , poseidon2_B_24_1(il[384]) - , poseidon2_B_24_2(il[385]) - , poseidon2_B_24_3(il[386]) - , poseidon2_B_25_0(il[387]) - , poseidon2_B_25_1(il[388]) - , poseidon2_B_25_2(il[389]) - , poseidon2_B_25_3(il[390]) - , poseidon2_B_26_0(il[391]) - , poseidon2_B_26_1(il[392]) - , poseidon2_B_26_2(il[393]) - , poseidon2_B_26_3(il[394]) - , poseidon2_B_27_0(il[395]) - , poseidon2_B_27_1(il[396]) - , poseidon2_B_27_2(il[397]) - , poseidon2_B_27_3(il[398]) - , poseidon2_B_28_0(il[399]) - , poseidon2_B_28_1(il[400]) - , poseidon2_B_28_2(il[401]) - , poseidon2_B_28_3(il[402]) - , poseidon2_B_29_0(il[403]) - , poseidon2_B_29_1(il[404]) - , poseidon2_B_29_2(il[405]) - , poseidon2_B_29_3(il[406]) - , poseidon2_B_30_0(il[407]) - , poseidon2_B_30_1(il[408]) - , poseidon2_B_30_2(il[409]) - , poseidon2_B_30_3(il[410]) - , poseidon2_B_31_0(il[411]) - , poseidon2_B_31_1(il[412]) - , poseidon2_B_31_2(il[413]) - , poseidon2_B_31_3(il[414]) - , poseidon2_B_32_0(il[415]) - , poseidon2_B_32_1(il[416]) - , poseidon2_B_32_2(il[417]) - , poseidon2_B_32_3(il[418]) - , poseidon2_B_33_0(il[419]) - , poseidon2_B_33_1(il[420]) - , poseidon2_B_33_2(il[421]) - , poseidon2_B_33_3(il[422]) - , poseidon2_B_34_0(il[423]) - , poseidon2_B_34_1(il[424]) - , poseidon2_B_34_2(il[425]) - , poseidon2_B_34_3(il[426]) - , poseidon2_B_35_0(il[427]) - , poseidon2_B_35_1(il[428]) - , poseidon2_B_35_2(il[429]) - , poseidon2_B_35_3(il[430]) - , poseidon2_B_36_0(il[431]) - , poseidon2_B_36_1(il[432]) - , poseidon2_B_36_2(il[433]) - , poseidon2_B_36_3(il[434]) - , poseidon2_B_37_0(il[435]) - , poseidon2_B_37_1(il[436]) - , poseidon2_B_37_2(il[437]) - , poseidon2_B_37_3(il[438]) - , poseidon2_B_38_0(il[439]) - , poseidon2_B_38_1(il[440]) - , poseidon2_B_38_2(il[441]) - , poseidon2_B_38_3(il[442]) - , poseidon2_B_39_0(il[443]) - , poseidon2_B_39_1(il[444]) - , poseidon2_B_39_2(il[445]) - , poseidon2_B_39_3(il[446]) - , poseidon2_B_40_0(il[447]) - , poseidon2_B_40_1(il[448]) - , poseidon2_B_40_2(il[449]) - , poseidon2_B_40_3(il[450]) - , poseidon2_B_41_0(il[451]) - , poseidon2_B_41_1(il[452]) - , poseidon2_B_41_2(il[453]) - , poseidon2_B_41_3(il[454]) - , poseidon2_B_42_0(il[455]) - , poseidon2_B_42_1(il[456]) - , poseidon2_B_42_2(il[457]) - , poseidon2_B_42_3(il[458]) - , poseidon2_B_43_0(il[459]) - , poseidon2_B_43_1(il[460]) - , poseidon2_B_43_2(il[461]) - , poseidon2_B_43_3(il[462]) - , poseidon2_B_44_0(il[463]) - , poseidon2_B_44_1(il[464]) - , poseidon2_B_44_2(il[465]) - , poseidon2_B_44_3(il[466]) - , poseidon2_B_45_0(il[467]) - , poseidon2_B_45_1(il[468]) - , poseidon2_B_45_2(il[469]) - , poseidon2_B_45_3(il[470]) - , poseidon2_B_46_0(il[471]) - , poseidon2_B_46_1(il[472]) - , poseidon2_B_46_2(il[473]) - , poseidon2_B_46_3(il[474]) - , poseidon2_B_47_0(il[475]) - , poseidon2_B_47_1(il[476]) - , poseidon2_B_47_2(il[477]) - , poseidon2_B_47_3(il[478]) - , poseidon2_B_48_0(il[479]) - , poseidon2_B_48_1(il[480]) - , poseidon2_B_48_2(il[481]) - , poseidon2_B_48_3(il[482]) - , poseidon2_B_49_0(il[483]) - , poseidon2_B_49_1(il[484]) - , poseidon2_B_49_2(il[485]) - , poseidon2_B_49_3(il[486]) - , poseidon2_B_4_0(il[487]) - , poseidon2_B_4_1(il[488]) - , poseidon2_B_4_2(il[489]) - , poseidon2_B_4_3(il[490]) - , poseidon2_B_50_0(il[491]) - , poseidon2_B_50_1(il[492]) - , poseidon2_B_50_2(il[493]) - , poseidon2_B_50_3(il[494]) - , poseidon2_B_51_0(il[495]) - , poseidon2_B_51_1(il[496]) - , poseidon2_B_51_2(il[497]) - , poseidon2_B_51_3(il[498]) - , poseidon2_B_52_0(il[499]) - , poseidon2_B_52_1(il[500]) - , poseidon2_B_52_2(il[501]) - , poseidon2_B_52_3(il[502]) - , poseidon2_B_53_0(il[503]) - , poseidon2_B_53_1(il[504]) - , poseidon2_B_53_2(il[505]) - , poseidon2_B_53_3(il[506]) - , poseidon2_B_54_0(il[507]) - , poseidon2_B_54_1(il[508]) - , poseidon2_B_54_2(il[509]) - , poseidon2_B_54_3(il[510]) - , poseidon2_B_55_0(il[511]) - , poseidon2_B_55_1(il[512]) - , poseidon2_B_55_2(il[513]) - , poseidon2_B_55_3(il[514]) - , poseidon2_B_56_0(il[515]) - , poseidon2_B_56_1(il[516]) - , poseidon2_B_56_2(il[517]) - , poseidon2_B_56_3(il[518]) - , poseidon2_B_57_0(il[519]) - , poseidon2_B_57_1(il[520]) - , poseidon2_B_57_2(il[521]) - , poseidon2_B_57_3(il[522]) - , poseidon2_B_58_0(il[523]) - , poseidon2_B_58_1(il[524]) - , poseidon2_B_58_2(il[525]) - , poseidon2_B_58_3(il[526]) - , poseidon2_B_59_0(il[527]) - , poseidon2_B_59_1(il[528]) - , poseidon2_B_59_2(il[529]) - , poseidon2_B_59_3(il[530]) - , poseidon2_B_5_0(il[531]) - , poseidon2_B_5_1(il[532]) - , poseidon2_B_5_2(il[533]) - , poseidon2_B_5_3(il[534]) - , poseidon2_B_6_0(il[535]) - , poseidon2_B_6_1(il[536]) - , poseidon2_B_6_2(il[537]) - , poseidon2_B_6_3(il[538]) - , poseidon2_B_7_0(il[539]) - , poseidon2_B_7_1(il[540]) - , poseidon2_B_7_2(il[541]) - , poseidon2_B_7_3(il[542]) - , poseidon2_B_8_0(il[543]) - , poseidon2_B_8_1(il[544]) - , poseidon2_B_8_2(il[545]) - , poseidon2_B_8_3(il[546]) - , poseidon2_B_9_0(il[547]) - , poseidon2_B_9_1(il[548]) - , poseidon2_B_9_2(il[549]) - , poseidon2_B_9_3(il[550]) - , poseidon2_EXT_LAYER_4(il[551]) - , poseidon2_EXT_LAYER_5(il[552]) - , poseidon2_EXT_LAYER_6(il[553]) - , poseidon2_EXT_LAYER_7(il[554]) - , poseidon2_T_0_4(il[555]) - , poseidon2_T_0_5(il[556]) - , poseidon2_T_0_6(il[557]) - , poseidon2_T_0_7(il[558]) - , poseidon2_T_1_4(il[559]) - , poseidon2_T_1_5(il[560]) - , poseidon2_T_1_6(il[561]) - , poseidon2_T_1_7(il[562]) - , poseidon2_T_2_4(il[563]) - , poseidon2_T_2_5(il[564]) - , poseidon2_T_2_6(il[565]) - , poseidon2_T_2_7(il[566]) - , poseidon2_T_3_4(il[567]) - , poseidon2_T_3_5(il[568]) - , poseidon2_T_3_6(il[569]) - , poseidon2_T_3_7(il[570]) - , poseidon2_T_60_4(il[571]) - , poseidon2_T_60_5(il[572]) - , poseidon2_T_60_6(il[573]) - , poseidon2_T_60_7(il[574]) - , poseidon2_T_61_4(il[575]) - , poseidon2_T_61_5(il[576]) - , poseidon2_T_61_6(il[577]) - , poseidon2_T_61_7(il[578]) - , poseidon2_T_62_4(il[579]) - , poseidon2_T_62_5(il[580]) - , poseidon2_T_62_6(il[581]) - , poseidon2_T_62_7(il[582]) - , poseidon2_T_63_4(il[583]) - , poseidon2_T_63_5(il[584]) - , poseidon2_T_63_6(il[585]) - , poseidon2_T_63_7(il[586]) - , poseidon2_a_0(il[587]) - , poseidon2_a_1(il[588]) - , poseidon2_a_2(il[589]) - , poseidon2_a_3(il[590]) - , poseidon2_b_0(il[591]) - , poseidon2_b_1(il[592]) - , poseidon2_b_2(il[593]) - , poseidon2_b_3(il[594]) - , poseidon2_clk(il[595]) - , poseidon2_full_a_0(il[596]) - , poseidon2_full_a_1(il[597]) - , poseidon2_full_a_2(il[598]) - , poseidon2_full_a_3(il[599]) - , poseidon2_full_b_0(il[600]) - , poseidon2_full_b_1(il[601]) - , poseidon2_full_b_2(il[602]) - , poseidon2_full_b_3(il[603]) - , poseidon2_full_clk(il[604]) - , poseidon2_full_end_poseidon(il[605]) - , poseidon2_full_execute_poseidon_perm(il[606]) - , poseidon2_full_input_0(il[607]) - , poseidon2_full_input_1(il[608]) - , poseidon2_full_input_2(il[609]) - , poseidon2_full_input_len(il[610]) - , poseidon2_full_num_perm_rounds_rem(il[611]) - , poseidon2_full_num_perm_rounds_rem_inv(il[612]) - , poseidon2_full_output(il[613]) - , poseidon2_full_padding(il[614]) - , poseidon2_full_sel_merkle_tree(il[615]) - , poseidon2_full_sel_poseidon(il[616]) - , poseidon2_full_start_poseidon(il[617]) - , poseidon2_input_addr(il[618]) - , poseidon2_mem_addr_read_a(il[619]) - , poseidon2_mem_addr_read_b(il[620]) - , poseidon2_mem_addr_read_c(il[621]) - , poseidon2_mem_addr_read_d(il[622]) - , poseidon2_mem_addr_write_a(il[623]) - , poseidon2_mem_addr_write_b(il[624]) - , poseidon2_mem_addr_write_c(il[625]) - , poseidon2_mem_addr_write_d(il[626]) - , poseidon2_output_addr(il[627]) - , poseidon2_sel_poseidon_perm(il[628]) - , poseidon2_sel_poseidon_perm_immediate(il[629]) - , poseidon2_sel_poseidon_perm_mem_op(il[630]) - , poseidon2_space_id(il[631]) - , range_check_alu_rng_chk(il[632]) - , range_check_clk(il[633]) - , range_check_cmp_hi_bits_rng_chk(il[634]) - , range_check_cmp_lo_bits_rng_chk(il[635]) - , range_check_cmp_non_ff_rng_chk(il[636]) - , range_check_dyn_diff(il[637]) - , range_check_dyn_rng_chk_bits(il[638]) - , range_check_dyn_rng_chk_pow_2(il[639]) - , range_check_gas_da_rng_chk(il[640]) - , range_check_gas_l2_rng_chk(il[641]) - , range_check_is_lte_u112(il[642]) - , range_check_is_lte_u128(il[643]) - , range_check_is_lte_u16(il[644]) - , range_check_is_lte_u32(il[645]) - , range_check_is_lte_u48(il[646]) - , range_check_is_lte_u64(il[647]) - , range_check_is_lte_u80(il[648]) - , range_check_is_lte_u96(il[649]) - , range_check_rng_chk_bits(il[650]) - , range_check_sel_lookup_0(il[651]) - , range_check_sel_lookup_1(il[652]) - , range_check_sel_lookup_2(il[653]) - , range_check_sel_lookup_3(il[654]) - , range_check_sel_lookup_4(il[655]) - , range_check_sel_lookup_5(il[656]) - , range_check_sel_lookup_6(il[657]) - , range_check_sel_rng_chk(il[658]) - , range_check_u16_r0(il[659]) - , range_check_u16_r1(il[660]) - , range_check_u16_r2(il[661]) - , range_check_u16_r3(il[662]) - , range_check_u16_r4(il[663]) - , range_check_u16_r5(il[664]) - , range_check_u16_r6(il[665]) - , range_check_u16_r7(il[666]) - , range_check_value(il[667]) - , sha256_clk(il[668]) - , sha256_input(il[669]) - , sha256_output(il[670]) - , sha256_sel_sha256_compression(il[671]) - , sha256_state(il[672]) - , slice_addr(il[673]) - , slice_clk(il[674]) - , slice_cnt(il[675]) - , slice_col_offset(il[676]) - , slice_one_min_inv(il[677]) - , slice_sel_cd_cpy(il[678]) - , slice_sel_mem_active(il[679]) - , slice_sel_return(il[680]) - , slice_sel_start(il[681]) - , slice_space_id(il[682]) - , slice_val(il[683]) - , lookup_rng_chk_pow_2_counts(il[684]) - , lookup_rng_chk_diff_counts(il[685]) - , lookup_rng_chk_0_counts(il[686]) - , lookup_rng_chk_1_counts(il[687]) - , lookup_rng_chk_2_counts(il[688]) - , lookup_rng_chk_3_counts(il[689]) - , lookup_rng_chk_4_counts(il[690]) - , lookup_rng_chk_5_counts(il[691]) - , lookup_rng_chk_6_counts(il[692]) - , lookup_rng_chk_7_counts(il[693]) - , lookup_mem_rng_chk_0_counts(il[694]) - , lookup_mem_rng_chk_1_counts(il[695]) - , lookup_mem_rng_chk_2_counts(il[696]) - , lookup_pow_2_0_counts(il[697]) - , lookup_pow_2_1_counts(il[698]) - , lookup_byte_lengths_counts(il[699]) - , lookup_byte_operations_counts(il[700]) - , lookup_opcode_gas_counts(il[701]) - , lookup_l2_gas_rng_chk_0_counts(il[702]) - , lookup_l2_gas_rng_chk_1_counts(il[703]) - , lookup_da_gas_rng_chk_0_counts(il[704]) - , lookup_da_gas_rng_chk_1_counts(il[705]) - , lookup_cd_value_counts(il[706]) - , lookup_ret_value_counts(il[707]) - , incl_main_tag_err_counts(il[708]) - , incl_mem_tag_err_counts(il[709]) - , perm_rng_non_ff_cmp_inv(il[710]) - , perm_rng_cmp_lo_inv(il[711]) - , perm_rng_cmp_hi_inv(il[712]) - , perm_rng_alu_inv(il[713]) - , perm_cmp_alu_inv(il[714]) - , perm_pos_mem_read_a_inv(il[715]) - , perm_pos_mem_read_b_inv(il[716]) - , perm_pos_mem_read_c_inv(il[717]) - , perm_pos_mem_read_d_inv(il[718]) - , perm_pos_mem_write_a_inv(il[719]) - , perm_pos_mem_write_b_inv(il[720]) - , perm_pos_mem_write_c_inv(il[721]) - , perm_pos_mem_write_d_inv(il[722]) - , perm_pos2_fixed_pos2_perm_inv(il[723]) - , perm_slice_mem_inv(il[724]) - , perm_merkle_poseidon2_inv(il[725]) - , perm_main_alu_inv(il[726]) - , perm_main_bin_inv(il[727]) - , perm_main_conv_inv(il[728]) - , perm_main_sha256_inv(il[729]) - , perm_main_pos2_perm_inv(il[730]) - , perm_main_slice_inv(il[731]) - , perm_main_mem_a_inv(il[732]) - , perm_main_mem_b_inv(il[733]) - , perm_main_mem_c_inv(il[734]) - , perm_main_mem_d_inv(il[735]) - , perm_main_mem_ind_addr_a_inv(il[736]) - , perm_main_mem_ind_addr_b_inv(il[737]) - , perm_main_mem_ind_addr_c_inv(il[738]) - , perm_main_mem_ind_addr_d_inv(il[739]) - , lookup_rng_chk_pow_2_inv(il[740]) - , lookup_rng_chk_diff_inv(il[741]) - , lookup_rng_chk_0_inv(il[742]) - , lookup_rng_chk_1_inv(il[743]) - , lookup_rng_chk_2_inv(il[744]) - , lookup_rng_chk_3_inv(il[745]) - , lookup_rng_chk_4_inv(il[746]) - , lookup_rng_chk_5_inv(il[747]) - , lookup_rng_chk_6_inv(il[748]) - , lookup_rng_chk_7_inv(il[749]) - , lookup_mem_rng_chk_0_inv(il[750]) - , lookup_mem_rng_chk_1_inv(il[751]) - , lookup_mem_rng_chk_2_inv(il[752]) - , lookup_pow_2_0_inv(il[753]) - , lookup_pow_2_1_inv(il[754]) - , lookup_byte_lengths_inv(il[755]) - , lookup_byte_operations_inv(il[756]) - , lookup_opcode_gas_inv(il[757]) - , lookup_l2_gas_rng_chk_0_inv(il[758]) - , lookup_l2_gas_rng_chk_1_inv(il[759]) - , lookup_da_gas_rng_chk_0_inv(il[760]) - , lookup_da_gas_rng_chk_1_inv(il[761]) - , lookup_cd_value_inv(il[762]) - , lookup_ret_value_inv(il[763]) - , incl_main_tag_err_inv(il[764]) - , incl_mem_tag_err_inv(il[765]) - , binary_acc_ia_shift(il[766]) - , binary_acc_ib_shift(il[767]) - , binary_acc_ic_shift(il[768]) - , binary_mem_tag_ctr_shift(il[769]) - , binary_op_id_shift(il[770]) - , cmp_a_hi_shift(il[771]) - , cmp_a_lo_shift(il[772]) - , cmp_b_hi_shift(il[773]) - , cmp_b_lo_shift(il[774]) - , cmp_cmp_rng_ctr_shift(il[775]) - , cmp_op_gt_shift(il[776]) - , cmp_p_sub_a_hi_shift(il[777]) - , cmp_p_sub_a_lo_shift(il[778]) - , cmp_p_sub_b_hi_shift(il[779]) - , cmp_p_sub_b_lo_shift(il[780]) - , cmp_sel_rng_chk_shift(il[781]) - , main_da_gas_remaining_shift(il[782]) - , main_internal_return_ptr_shift(il[783]) - , main_l2_gas_remaining_shift(il[784]) - , main_pc_shift(il[785]) - , main_sel_execution_end_shift(il[786]) - , main_sel_execution_row_shift(il[787]) - , mem_glob_addr_shift(il[788]) - , mem_rw_shift(il[789]) - , mem_sel_mem_shift(il[790]) - , mem_tag_shift(il[791]) - , mem_tsp_shift(il[792]) - , mem_val_shift(il[793]) - , merkle_tree_leaf_index_shift(il[794]) - , merkle_tree_leaf_value_shift(il[795]) - , merkle_tree_path_len_shift(il[796]) - , poseidon2_full_a_0_shift(il[797]) - , poseidon2_full_a_1_shift(il[798]) - , poseidon2_full_a_2_shift(il[799]) - , poseidon2_full_a_3_shift(il[800]) - , poseidon2_full_execute_poseidon_perm_shift(il[801]) - , poseidon2_full_input_0_shift(il[802]) - , poseidon2_full_input_1_shift(il[803]) - , poseidon2_full_input_2_shift(il[804]) - , poseidon2_full_num_perm_rounds_rem_shift(il[805]) - , poseidon2_full_sel_poseidon_shift(il[806]) - , poseidon2_full_start_poseidon_shift(il[807]) - , slice_addr_shift(il[808]) - , slice_clk_shift(il[809]) - , slice_cnt_shift(il[810]) - , slice_col_offset_shift(il[811]) - , slice_sel_cd_cpy_shift(il[812]) - , slice_sel_mem_active_shift(il[813]) - , slice_sel_return_shift(il[814]) - , slice_sel_start_shift(il[815]) - , slice_space_id_shift(il[816]) + , main_sel_op_get_contract_instance(il[225]) + , main_sel_op_internal_call(il[226]) + , main_sel_op_internal_return(il[227]) + , main_sel_op_is_static_call(il[228]) + , main_sel_op_jump(il[229]) + , main_sel_op_jumpi(il[230]) + , main_sel_op_keccak(il[231]) + , main_sel_op_l1_to_l2_msg_exists(il[232]) + , main_sel_op_l2gasleft(il[233]) + , main_sel_op_lt(il[234]) + , main_sel_op_lte(il[235]) + , main_sel_op_mov(il[236]) + , main_sel_op_msm(il[237]) + , main_sel_op_mul(il[238]) + , main_sel_op_not(il[239]) + , main_sel_op_note_hash_exists(il[240]) + , main_sel_op_nullifier_exists(il[241]) + , main_sel_op_or(il[242]) + , main_sel_op_poseidon2(il[243]) + , main_sel_op_radix_be(il[244]) + , main_sel_op_returndata_copy(il[245]) + , main_sel_op_returndata_size(il[246]) + , main_sel_op_sender(il[247]) + , main_sel_op_set(il[248]) + , main_sel_op_sha256(il[249]) + , main_sel_op_shl(il[250]) + , main_sel_op_shr(il[251]) + , main_sel_op_sload(il[252]) + , main_sel_op_sstore(il[253]) + , main_sel_op_static_call(il[254]) + , main_sel_op_sub(il[255]) + , main_sel_op_timestamp(il[256]) + , main_sel_op_transaction_fee(il[257]) + , main_sel_op_version(il[258]) + , main_sel_op_xor(il[259]) + , main_sel_q_kernel_lookup(il[260]) + , main_sel_q_kernel_output_lookup(il[261]) + , main_sel_resolve_ind_addr_a(il[262]) + , main_sel_resolve_ind_addr_b(il[263]) + , main_sel_resolve_ind_addr_c(il[264]) + , main_sel_resolve_ind_addr_d(il[265]) + , main_sel_returndata(il[266]) + , main_sel_rng_16(il[267]) + , main_sel_rng_8(il[268]) + , main_sel_slice_gadget(il[269]) + , main_space_id(il[270]) + , main_tag_err(il[271]) + , main_w_in_tag(il[272]) + , mem_addr(il[273]) + , mem_clk(il[274]) + , mem_diff(il[275]) + , mem_glob_addr(il[276]) + , mem_last(il[277]) + , mem_lastAccess(il[278]) + , mem_one_min_inv(il[279]) + , mem_r_in_tag(il[280]) + , mem_rw(il[281]) + , mem_sel_mem(il[282]) + , mem_sel_mov_ia_to_ic(il[283]) + , mem_sel_mov_ib_to_ic(il[284]) + , mem_sel_op_a(il[285]) + , mem_sel_op_b(il[286]) + , mem_sel_op_c(il[287]) + , mem_sel_op_d(il[288]) + , mem_sel_op_poseidon_read_a(il[289]) + , mem_sel_op_poseidon_read_b(il[290]) + , mem_sel_op_poseidon_read_c(il[291]) + , mem_sel_op_poseidon_read_d(il[292]) + , mem_sel_op_poseidon_write_a(il[293]) + , mem_sel_op_poseidon_write_b(il[294]) + , mem_sel_op_poseidon_write_c(il[295]) + , mem_sel_op_poseidon_write_d(il[296]) + , mem_sel_op_slice(il[297]) + , mem_sel_resolve_ind_addr_a(il[298]) + , mem_sel_resolve_ind_addr_b(il[299]) + , mem_sel_resolve_ind_addr_c(il[300]) + , mem_sel_resolve_ind_addr_d(il[301]) + , mem_sel_rng_chk(il[302]) + , mem_skip_check_tag(il[303]) + , mem_space_id(il[304]) + , mem_tag(il[305]) + , mem_tag_err(il[306]) + , mem_tsp(il[307]) + , mem_u16_r0(il[308]) + , mem_u16_r1(il[309]) + , mem_u8_r0(il[310]) + , mem_val(il[311]) + , mem_w_in_tag(il[312]) + , merkle_tree_clk(il[313]) + , merkle_tree_expected_tree_root(il[314]) + , merkle_tree_latch(il[315]) + , merkle_tree_leaf_index(il[316]) + , merkle_tree_leaf_index_is_even(il[317]) + , merkle_tree_leaf_value(il[318]) + , merkle_tree_left_hash(il[319]) + , merkle_tree_output_hash(il[320]) + , merkle_tree_path_len(il[321]) + , merkle_tree_path_len_inv(il[322]) + , merkle_tree_right_hash(il[323]) + , merkle_tree_sel_merkle_tree(il[324]) + , merkle_tree_sibling_value(il[325]) + , poseidon2_B_10_0(il[326]) + , poseidon2_B_10_1(il[327]) + , poseidon2_B_10_2(il[328]) + , poseidon2_B_10_3(il[329]) + , poseidon2_B_11_0(il[330]) + , poseidon2_B_11_1(il[331]) + , poseidon2_B_11_2(il[332]) + , poseidon2_B_11_3(il[333]) + , poseidon2_B_12_0(il[334]) + , poseidon2_B_12_1(il[335]) + , poseidon2_B_12_2(il[336]) + , poseidon2_B_12_3(il[337]) + , poseidon2_B_13_0(il[338]) + , poseidon2_B_13_1(il[339]) + , poseidon2_B_13_2(il[340]) + , poseidon2_B_13_3(il[341]) + , poseidon2_B_14_0(il[342]) + , poseidon2_B_14_1(il[343]) + , poseidon2_B_14_2(il[344]) + , poseidon2_B_14_3(il[345]) + , poseidon2_B_15_0(il[346]) + , poseidon2_B_15_1(il[347]) + , poseidon2_B_15_2(il[348]) + , poseidon2_B_15_3(il[349]) + , poseidon2_B_16_0(il[350]) + , poseidon2_B_16_1(il[351]) + , poseidon2_B_16_2(il[352]) + , poseidon2_B_16_3(il[353]) + , poseidon2_B_17_0(il[354]) + , poseidon2_B_17_1(il[355]) + , poseidon2_B_17_2(il[356]) + , poseidon2_B_17_3(il[357]) + , poseidon2_B_18_0(il[358]) + , poseidon2_B_18_1(il[359]) + , poseidon2_B_18_2(il[360]) + , poseidon2_B_18_3(il[361]) + , poseidon2_B_19_0(il[362]) + , poseidon2_B_19_1(il[363]) + , poseidon2_B_19_2(il[364]) + , poseidon2_B_19_3(il[365]) + , poseidon2_B_20_0(il[366]) + , poseidon2_B_20_1(il[367]) + , poseidon2_B_20_2(il[368]) + , poseidon2_B_20_3(il[369]) + , poseidon2_B_21_0(il[370]) + , poseidon2_B_21_1(il[371]) + , poseidon2_B_21_2(il[372]) + , poseidon2_B_21_3(il[373]) + , poseidon2_B_22_0(il[374]) + , poseidon2_B_22_1(il[375]) + , poseidon2_B_22_2(il[376]) + , poseidon2_B_22_3(il[377]) + , poseidon2_B_23_0(il[378]) + , poseidon2_B_23_1(il[379]) + , poseidon2_B_23_2(il[380]) + , poseidon2_B_23_3(il[381]) + , poseidon2_B_24_0(il[382]) + , poseidon2_B_24_1(il[383]) + , poseidon2_B_24_2(il[384]) + , poseidon2_B_24_3(il[385]) + , poseidon2_B_25_0(il[386]) + , poseidon2_B_25_1(il[387]) + , poseidon2_B_25_2(il[388]) + , poseidon2_B_25_3(il[389]) + , poseidon2_B_26_0(il[390]) + , poseidon2_B_26_1(il[391]) + , poseidon2_B_26_2(il[392]) + , poseidon2_B_26_3(il[393]) + , poseidon2_B_27_0(il[394]) + , poseidon2_B_27_1(il[395]) + , poseidon2_B_27_2(il[396]) + , poseidon2_B_27_3(il[397]) + , poseidon2_B_28_0(il[398]) + , poseidon2_B_28_1(il[399]) + , poseidon2_B_28_2(il[400]) + , poseidon2_B_28_3(il[401]) + , poseidon2_B_29_0(il[402]) + , poseidon2_B_29_1(il[403]) + , poseidon2_B_29_2(il[404]) + , poseidon2_B_29_3(il[405]) + , poseidon2_B_30_0(il[406]) + , poseidon2_B_30_1(il[407]) + , poseidon2_B_30_2(il[408]) + , poseidon2_B_30_3(il[409]) + , poseidon2_B_31_0(il[410]) + , poseidon2_B_31_1(il[411]) + , poseidon2_B_31_2(il[412]) + , poseidon2_B_31_3(il[413]) + , poseidon2_B_32_0(il[414]) + , poseidon2_B_32_1(il[415]) + , poseidon2_B_32_2(il[416]) + , poseidon2_B_32_3(il[417]) + , poseidon2_B_33_0(il[418]) + , poseidon2_B_33_1(il[419]) + , poseidon2_B_33_2(il[420]) + , poseidon2_B_33_3(il[421]) + , poseidon2_B_34_0(il[422]) + , poseidon2_B_34_1(il[423]) + , poseidon2_B_34_2(il[424]) + , poseidon2_B_34_3(il[425]) + , poseidon2_B_35_0(il[426]) + , poseidon2_B_35_1(il[427]) + , poseidon2_B_35_2(il[428]) + , poseidon2_B_35_3(il[429]) + , poseidon2_B_36_0(il[430]) + , poseidon2_B_36_1(il[431]) + , poseidon2_B_36_2(il[432]) + , poseidon2_B_36_3(il[433]) + , poseidon2_B_37_0(il[434]) + , poseidon2_B_37_1(il[435]) + , poseidon2_B_37_2(il[436]) + , poseidon2_B_37_3(il[437]) + , poseidon2_B_38_0(il[438]) + , poseidon2_B_38_1(il[439]) + , poseidon2_B_38_2(il[440]) + , poseidon2_B_38_3(il[441]) + , poseidon2_B_39_0(il[442]) + , poseidon2_B_39_1(il[443]) + , poseidon2_B_39_2(il[444]) + , poseidon2_B_39_3(il[445]) + , poseidon2_B_40_0(il[446]) + , poseidon2_B_40_1(il[447]) + , poseidon2_B_40_2(il[448]) + , poseidon2_B_40_3(il[449]) + , poseidon2_B_41_0(il[450]) + , poseidon2_B_41_1(il[451]) + , poseidon2_B_41_2(il[452]) + , poseidon2_B_41_3(il[453]) + , poseidon2_B_42_0(il[454]) + , poseidon2_B_42_1(il[455]) + , poseidon2_B_42_2(il[456]) + , poseidon2_B_42_3(il[457]) + , poseidon2_B_43_0(il[458]) + , poseidon2_B_43_1(il[459]) + , poseidon2_B_43_2(il[460]) + , poseidon2_B_43_3(il[461]) + , poseidon2_B_44_0(il[462]) + , poseidon2_B_44_1(il[463]) + , poseidon2_B_44_2(il[464]) + , poseidon2_B_44_3(il[465]) + , poseidon2_B_45_0(il[466]) + , poseidon2_B_45_1(il[467]) + , poseidon2_B_45_2(il[468]) + , poseidon2_B_45_3(il[469]) + , poseidon2_B_46_0(il[470]) + , poseidon2_B_46_1(il[471]) + , poseidon2_B_46_2(il[472]) + , poseidon2_B_46_3(il[473]) + , poseidon2_B_47_0(il[474]) + , poseidon2_B_47_1(il[475]) + , poseidon2_B_47_2(il[476]) + , poseidon2_B_47_3(il[477]) + , poseidon2_B_48_0(il[478]) + , poseidon2_B_48_1(il[479]) + , poseidon2_B_48_2(il[480]) + , poseidon2_B_48_3(il[481]) + , poseidon2_B_49_0(il[482]) + , poseidon2_B_49_1(il[483]) + , poseidon2_B_49_2(il[484]) + , poseidon2_B_49_3(il[485]) + , poseidon2_B_4_0(il[486]) + , poseidon2_B_4_1(il[487]) + , poseidon2_B_4_2(il[488]) + , poseidon2_B_4_3(il[489]) + , poseidon2_B_50_0(il[490]) + , poseidon2_B_50_1(il[491]) + , poseidon2_B_50_2(il[492]) + , poseidon2_B_50_3(il[493]) + , poseidon2_B_51_0(il[494]) + , poseidon2_B_51_1(il[495]) + , poseidon2_B_51_2(il[496]) + , poseidon2_B_51_3(il[497]) + , poseidon2_B_52_0(il[498]) + , poseidon2_B_52_1(il[499]) + , poseidon2_B_52_2(il[500]) + , poseidon2_B_52_3(il[501]) + , poseidon2_B_53_0(il[502]) + , poseidon2_B_53_1(il[503]) + , poseidon2_B_53_2(il[504]) + , poseidon2_B_53_3(il[505]) + , poseidon2_B_54_0(il[506]) + , poseidon2_B_54_1(il[507]) + , poseidon2_B_54_2(il[508]) + , poseidon2_B_54_3(il[509]) + , poseidon2_B_55_0(il[510]) + , poseidon2_B_55_1(il[511]) + , poseidon2_B_55_2(il[512]) + , poseidon2_B_55_3(il[513]) + , poseidon2_B_56_0(il[514]) + , poseidon2_B_56_1(il[515]) + , poseidon2_B_56_2(il[516]) + , poseidon2_B_56_3(il[517]) + , poseidon2_B_57_0(il[518]) + , poseidon2_B_57_1(il[519]) + , poseidon2_B_57_2(il[520]) + , poseidon2_B_57_3(il[521]) + , poseidon2_B_58_0(il[522]) + , poseidon2_B_58_1(il[523]) + , poseidon2_B_58_2(il[524]) + , poseidon2_B_58_3(il[525]) + , poseidon2_B_59_0(il[526]) + , poseidon2_B_59_1(il[527]) + , poseidon2_B_59_2(il[528]) + , poseidon2_B_59_3(il[529]) + , poseidon2_B_5_0(il[530]) + , poseidon2_B_5_1(il[531]) + , poseidon2_B_5_2(il[532]) + , poseidon2_B_5_3(il[533]) + , poseidon2_B_6_0(il[534]) + , poseidon2_B_6_1(il[535]) + , poseidon2_B_6_2(il[536]) + , poseidon2_B_6_3(il[537]) + , poseidon2_B_7_0(il[538]) + , poseidon2_B_7_1(il[539]) + , poseidon2_B_7_2(il[540]) + , poseidon2_B_7_3(il[541]) + , poseidon2_B_8_0(il[542]) + , poseidon2_B_8_1(il[543]) + , poseidon2_B_8_2(il[544]) + , poseidon2_B_8_3(il[545]) + , poseidon2_B_9_0(il[546]) + , poseidon2_B_9_1(il[547]) + , poseidon2_B_9_2(il[548]) + , poseidon2_B_9_3(il[549]) + , poseidon2_EXT_LAYER_4(il[550]) + , poseidon2_EXT_LAYER_5(il[551]) + , poseidon2_EXT_LAYER_6(il[552]) + , poseidon2_EXT_LAYER_7(il[553]) + , poseidon2_T_0_4(il[554]) + , poseidon2_T_0_5(il[555]) + , poseidon2_T_0_6(il[556]) + , poseidon2_T_0_7(il[557]) + , poseidon2_T_1_4(il[558]) + , poseidon2_T_1_5(il[559]) + , poseidon2_T_1_6(il[560]) + , poseidon2_T_1_7(il[561]) + , poseidon2_T_2_4(il[562]) + , poseidon2_T_2_5(il[563]) + , poseidon2_T_2_6(il[564]) + , poseidon2_T_2_7(il[565]) + , poseidon2_T_3_4(il[566]) + , poseidon2_T_3_5(il[567]) + , poseidon2_T_3_6(il[568]) + , poseidon2_T_3_7(il[569]) + , poseidon2_T_60_4(il[570]) + , poseidon2_T_60_5(il[571]) + , poseidon2_T_60_6(il[572]) + , poseidon2_T_60_7(il[573]) + , poseidon2_T_61_4(il[574]) + , poseidon2_T_61_5(il[575]) + , poseidon2_T_61_6(il[576]) + , poseidon2_T_61_7(il[577]) + , poseidon2_T_62_4(il[578]) + , poseidon2_T_62_5(il[579]) + , poseidon2_T_62_6(il[580]) + , poseidon2_T_62_7(il[581]) + , poseidon2_T_63_4(il[582]) + , poseidon2_T_63_5(il[583]) + , poseidon2_T_63_6(il[584]) + , poseidon2_T_63_7(il[585]) + , poseidon2_a_0(il[586]) + , poseidon2_a_1(il[587]) + , poseidon2_a_2(il[588]) + , poseidon2_a_3(il[589]) + , poseidon2_b_0(il[590]) + , poseidon2_b_1(il[591]) + , poseidon2_b_2(il[592]) + , poseidon2_b_3(il[593]) + , poseidon2_clk(il[594]) + , poseidon2_full_a_0(il[595]) + , poseidon2_full_a_1(il[596]) + , poseidon2_full_a_2(il[597]) + , poseidon2_full_a_3(il[598]) + , poseidon2_full_b_0(il[599]) + , poseidon2_full_b_1(il[600]) + , poseidon2_full_b_2(il[601]) + , poseidon2_full_b_3(il[602]) + , poseidon2_full_clk(il[603]) + , poseidon2_full_end_poseidon(il[604]) + , poseidon2_full_execute_poseidon_perm(il[605]) + , poseidon2_full_input_0(il[606]) + , poseidon2_full_input_1(il[607]) + , poseidon2_full_input_2(il[608]) + , poseidon2_full_input_len(il[609]) + , poseidon2_full_num_perm_rounds_rem(il[610]) + , poseidon2_full_num_perm_rounds_rem_inv(il[611]) + , poseidon2_full_output(il[612]) + , poseidon2_full_padding(il[613]) + , poseidon2_full_sel_merkle_tree(il[614]) + , poseidon2_full_sel_poseidon(il[615]) + , poseidon2_full_start_poseidon(il[616]) + , poseidon2_input_addr(il[617]) + , poseidon2_mem_addr_read_a(il[618]) + , poseidon2_mem_addr_read_b(il[619]) + , poseidon2_mem_addr_read_c(il[620]) + , poseidon2_mem_addr_read_d(il[621]) + , poseidon2_mem_addr_write_a(il[622]) + , poseidon2_mem_addr_write_b(il[623]) + , poseidon2_mem_addr_write_c(il[624]) + , poseidon2_mem_addr_write_d(il[625]) + , poseidon2_output_addr(il[626]) + , poseidon2_sel_poseidon_perm(il[627]) + , poseidon2_sel_poseidon_perm_immediate(il[628]) + , poseidon2_sel_poseidon_perm_mem_op(il[629]) + , poseidon2_space_id(il[630]) + , range_check_alu_rng_chk(il[631]) + , range_check_clk(il[632]) + , range_check_cmp_hi_bits_rng_chk(il[633]) + , range_check_cmp_lo_bits_rng_chk(il[634]) + , range_check_cmp_non_ff_rng_chk(il[635]) + , range_check_dyn_diff(il[636]) + , range_check_dyn_rng_chk_bits(il[637]) + , range_check_dyn_rng_chk_pow_2(il[638]) + , range_check_gas_da_rng_chk(il[639]) + , range_check_gas_l2_rng_chk(il[640]) + , range_check_is_lte_u112(il[641]) + , range_check_is_lte_u128(il[642]) + , range_check_is_lte_u16(il[643]) + , range_check_is_lte_u32(il[644]) + , range_check_is_lte_u48(il[645]) + , range_check_is_lte_u64(il[646]) + , range_check_is_lte_u80(il[647]) + , range_check_is_lte_u96(il[648]) + , range_check_rng_chk_bits(il[649]) + , range_check_sel_lookup_0(il[650]) + , range_check_sel_lookup_1(il[651]) + , range_check_sel_lookup_2(il[652]) + , range_check_sel_lookup_3(il[653]) + , range_check_sel_lookup_4(il[654]) + , range_check_sel_lookup_5(il[655]) + , range_check_sel_lookup_6(il[656]) + , range_check_sel_rng_chk(il[657]) + , range_check_u16_r0(il[658]) + , range_check_u16_r1(il[659]) + , range_check_u16_r2(il[660]) + , range_check_u16_r3(il[661]) + , range_check_u16_r4(il[662]) + , range_check_u16_r5(il[663]) + , range_check_u16_r6(il[664]) + , range_check_u16_r7(il[665]) + , range_check_value(il[666]) + , sha256_clk(il[667]) + , sha256_input(il[668]) + , sha256_output(il[669]) + , sha256_sel_sha256_compression(il[670]) + , sha256_state(il[671]) + , slice_addr(il[672]) + , slice_clk(il[673]) + , slice_cnt(il[674]) + , slice_col_offset(il[675]) + , slice_one_min_inv(il[676]) + , slice_sel_cd_cpy(il[677]) + , slice_sel_mem_active(il[678]) + , slice_sel_return(il[679]) + , slice_sel_start(il[680]) + , slice_space_id(il[681]) + , slice_val(il[682]) + , lookup_rng_chk_pow_2_counts(il[683]) + , lookup_rng_chk_diff_counts(il[684]) + , lookup_rng_chk_0_counts(il[685]) + , lookup_rng_chk_1_counts(il[686]) + , lookup_rng_chk_2_counts(il[687]) + , lookup_rng_chk_3_counts(il[688]) + , lookup_rng_chk_4_counts(il[689]) + , lookup_rng_chk_5_counts(il[690]) + , lookup_rng_chk_6_counts(il[691]) + , lookup_rng_chk_7_counts(il[692]) + , lookup_mem_rng_chk_0_counts(il[693]) + , lookup_mem_rng_chk_1_counts(il[694]) + , lookup_mem_rng_chk_2_counts(il[695]) + , lookup_pow_2_0_counts(il[696]) + , lookup_pow_2_1_counts(il[697]) + , lookup_byte_lengths_counts(il[698]) + , lookup_byte_operations_counts(il[699]) + , lookup_opcode_gas_counts(il[700]) + , lookup_l2_gas_rng_chk_0_counts(il[701]) + , lookup_l2_gas_rng_chk_1_counts(il[702]) + , lookup_da_gas_rng_chk_0_counts(il[703]) + , lookup_da_gas_rng_chk_1_counts(il[704]) + , lookup_cd_value_counts(il[705]) + , lookup_ret_value_counts(il[706]) + , incl_main_tag_err_counts(il[707]) + , incl_mem_tag_err_counts(il[708]) + , perm_rng_non_ff_cmp_inv(il[709]) + , perm_rng_cmp_lo_inv(il[710]) + , perm_rng_cmp_hi_inv(il[711]) + , perm_rng_alu_inv(il[712]) + , perm_cmp_alu_inv(il[713]) + , perm_pos_mem_read_a_inv(il[714]) + , perm_pos_mem_read_b_inv(il[715]) + , perm_pos_mem_read_c_inv(il[716]) + , perm_pos_mem_read_d_inv(il[717]) + , perm_pos_mem_write_a_inv(il[718]) + , perm_pos_mem_write_b_inv(il[719]) + , perm_pos_mem_write_c_inv(il[720]) + , perm_pos_mem_write_d_inv(il[721]) + , perm_pos2_fixed_pos2_perm_inv(il[722]) + , perm_slice_mem_inv(il[723]) + , perm_merkle_poseidon2_inv(il[724]) + , perm_main_alu_inv(il[725]) + , perm_main_bin_inv(il[726]) + , perm_main_conv_inv(il[727]) + , perm_main_sha256_inv(il[728]) + , perm_main_pos2_perm_inv(il[729]) + , perm_main_slice_inv(il[730]) + , perm_main_mem_a_inv(il[731]) + , perm_main_mem_b_inv(il[732]) + , perm_main_mem_c_inv(il[733]) + , perm_main_mem_d_inv(il[734]) + , perm_main_mem_ind_addr_a_inv(il[735]) + , perm_main_mem_ind_addr_b_inv(il[736]) + , perm_main_mem_ind_addr_c_inv(il[737]) + , perm_main_mem_ind_addr_d_inv(il[738]) + , lookup_rng_chk_pow_2_inv(il[739]) + , lookup_rng_chk_diff_inv(il[740]) + , lookup_rng_chk_0_inv(il[741]) + , lookup_rng_chk_1_inv(il[742]) + , lookup_rng_chk_2_inv(il[743]) + , lookup_rng_chk_3_inv(il[744]) + , lookup_rng_chk_4_inv(il[745]) + , lookup_rng_chk_5_inv(il[746]) + , lookup_rng_chk_6_inv(il[747]) + , lookup_rng_chk_7_inv(il[748]) + , lookup_mem_rng_chk_0_inv(il[749]) + , lookup_mem_rng_chk_1_inv(il[750]) + , lookup_mem_rng_chk_2_inv(il[751]) + , lookup_pow_2_0_inv(il[752]) + , lookup_pow_2_1_inv(il[753]) + , lookup_byte_lengths_inv(il[754]) + , lookup_byte_operations_inv(il[755]) + , lookup_opcode_gas_inv(il[756]) + , lookup_l2_gas_rng_chk_0_inv(il[757]) + , lookup_l2_gas_rng_chk_1_inv(il[758]) + , lookup_da_gas_rng_chk_0_inv(il[759]) + , lookup_da_gas_rng_chk_1_inv(il[760]) + , lookup_cd_value_inv(il[761]) + , lookup_ret_value_inv(il[762]) + , incl_main_tag_err_inv(il[763]) + , incl_mem_tag_err_inv(il[764]) + , binary_acc_ia_shift(il[765]) + , binary_acc_ib_shift(il[766]) + , binary_acc_ic_shift(il[767]) + , binary_mem_tag_ctr_shift(il[768]) + , binary_op_id_shift(il[769]) + , cmp_a_hi_shift(il[770]) + , cmp_a_lo_shift(il[771]) + , cmp_b_hi_shift(il[772]) + , cmp_b_lo_shift(il[773]) + , cmp_cmp_rng_ctr_shift(il[774]) + , cmp_op_gt_shift(il[775]) + , cmp_p_sub_a_hi_shift(il[776]) + , cmp_p_sub_a_lo_shift(il[777]) + , cmp_p_sub_b_hi_shift(il[778]) + , cmp_p_sub_b_lo_shift(il[779]) + , cmp_sel_rng_chk_shift(il[780]) + , main_da_gas_remaining_shift(il[781]) + , main_internal_return_ptr_shift(il[782]) + , main_l2_gas_remaining_shift(il[783]) + , main_pc_shift(il[784]) + , main_sel_execution_end_shift(il[785]) + , main_sel_execution_row_shift(il[786]) + , mem_glob_addr_shift(il[787]) + , mem_rw_shift(il[788]) + , mem_sel_mem_shift(il[789]) + , mem_tag_shift(il[790]) + , mem_tsp_shift(il[791]) + , mem_val_shift(il[792]) + , merkle_tree_leaf_index_shift(il[793]) + , merkle_tree_leaf_value_shift(il[794]) + , merkle_tree_path_len_shift(il[795]) + , poseidon2_full_a_0_shift(il[796]) + , poseidon2_full_a_1_shift(il[797]) + , poseidon2_full_a_2_shift(il[798]) + , poseidon2_full_a_3_shift(il[799]) + , poseidon2_full_execute_poseidon_perm_shift(il[800]) + , poseidon2_full_input_0_shift(il[801]) + , poseidon2_full_input_1_shift(il[802]) + , poseidon2_full_input_2_shift(il[803]) + , poseidon2_full_num_perm_rounds_rem_shift(il[804]) + , poseidon2_full_sel_poseidon_shift(il[805]) + , poseidon2_full_start_poseidon_shift(il[806]) + , slice_addr_shift(il[807]) + , slice_clk_shift(il[808]) + , slice_cnt_shift(il[809]) + , slice_col_offset_shift(il[810]) + , slice_sel_cd_cpy_shift(il[811]) + , slice_sel_mem_active_shift(il[812]) + , slice_sel_return_shift(il[813]) + , slice_sel_start_shift(il[814]) + , slice_space_id_shift(il[815]) {} AvmFlavor::ProverPolynomials::ProverPolynomials(ProvingKey& proving_key) @@ -1063,7 +1062,6 @@ AvmFlavor::AllConstRefValues AvmFlavor::ProverPolynomials::get_row(size_t row_id main_sel_op_fdiv[row_idx], main_sel_op_fee_per_da_gas[row_idx], main_sel_op_fee_per_l2_gas[row_idx], - main_sel_op_function_selector[row_idx], main_sel_op_get_contract_instance[row_idx], main_sel_op_internal_call[row_idx], main_sel_op_internal_return[row_idx], @@ -1884,7 +1882,6 @@ AvmFlavor::CommitmentLabels::CommitmentLabels() Base::main_sel_op_fdiv = "MAIN_SEL_OP_FDIV"; Base::main_sel_op_fee_per_da_gas = "MAIN_SEL_OP_FEE_PER_DA_GAS"; Base::main_sel_op_fee_per_l2_gas = "MAIN_SEL_OP_FEE_PER_L2_GAS"; - Base::main_sel_op_function_selector = "MAIN_SEL_OP_FUNCTION_SELECTOR"; Base::main_sel_op_get_contract_instance = "MAIN_SEL_OP_GET_CONTRACT_INSTANCE"; Base::main_sel_op_internal_call = "MAIN_SEL_OP_INTERNAL_CALL"; Base::main_sel_op_internal_return = "MAIN_SEL_OP_INTERNAL_RETURN"; diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.hpp index f82d78abe6ed..ae4c6a9dec10 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.hpp @@ -96,7 +96,7 @@ template using tuple_cat_t = decltype(std::tuple_cat(std:: // The entities that will be used in the flavor. // clang-format off #define PRECOMPUTED_ENTITIES byte_lookup_sel_bin, byte_lookup_table_byte_lengths, byte_lookup_table_in_tags, byte_lookup_table_input_a, byte_lookup_table_input_b, byte_lookup_table_op_id, byte_lookup_table_output, gas_base_da_gas_fixed_table, gas_base_l2_gas_fixed_table, gas_dyn_da_gas_fixed_table, gas_dyn_l2_gas_fixed_table, gas_sel_gas_cost, main_clk, main_sel_da_end_gas_kernel_input, main_sel_da_start_gas_kernel_input, main_sel_first, main_sel_l2_end_gas_kernel_input, main_sel_l2_start_gas_kernel_input, main_sel_start_exec, main_zeroes, powers_power_of_2 -#define WIRE_ENTITIES main_kernel_inputs, main_kernel_value_out, main_kernel_side_effect_out, main_kernel_metadata_out, main_calldata, main_returndata, alu_a_hi, alu_a_lo, alu_b_hi, alu_b_lo, alu_b_pow, alu_c_hi, alu_c_lo, alu_cf, alu_clk, alu_cmp_gadget_gt, alu_cmp_gadget_input_a, alu_cmp_gadget_input_b, alu_cmp_gadget_non_ff_gt, alu_cmp_gadget_result, alu_cmp_gadget_sel, alu_ff_tag, alu_ia, alu_ib, alu_ic, alu_in_tag, alu_max_bits_sub_b_bits, alu_max_bits_sub_b_pow, alu_op_add, alu_op_cast, alu_op_div, alu_op_eq, alu_op_lt, alu_op_lte, alu_op_mul, alu_op_not, alu_op_shl, alu_op_shr, alu_op_sub, alu_partial_prod_hi, alu_partial_prod_lo, alu_range_check_input_value, alu_range_check_num_bits, alu_range_check_sel, alu_remainder, alu_sel_alu, alu_sel_cmp, alu_sel_shift_which, alu_u128_tag, alu_u16_tag, alu_u1_tag, alu_u32_tag, alu_u64_tag, alu_u8_tag, alu_zero_shift, binary_acc_ia, binary_acc_ib, binary_acc_ic, binary_clk, binary_ia_bytes, binary_ib_bytes, binary_ic_bytes, binary_in_tag, binary_mem_tag_ctr, binary_mem_tag_ctr_inv, binary_op_id, binary_sel_bin, binary_start, bytecode_arifact_hash, bytecode_as_fields, bytecode_bytes, bytecode_bytes_pc, bytecode_class_id, bytecode_contract_address, bytecode_decomposed, bytecode_deployer_addr, bytecode_end_latch, bytecode_incoming_viewing_key_x, bytecode_incoming_viewing_key_y, bytecode_initialization_hash, bytecode_length_remaining, bytecode_nullifier_key_x, bytecode_nullifier_key_y, bytecode_outgoing_viewing_key_x, bytecode_outgoing_viewing_key_y, bytecode_private_fn_root, bytecode_public_key_hash, bytecode_running_hash, bytecode_salt, bytecode_tagging_key_x, bytecode_tagging_key_y, cmp_a_hi, cmp_a_lo, cmp_b_hi, cmp_b_lo, cmp_borrow, cmp_clk, cmp_cmp_rng_ctr, cmp_diff, cmp_input_a, cmp_input_b, cmp_op_eq, cmp_op_eq_diff_inv, cmp_op_gt, cmp_op_non_ff_gt, cmp_p_a_borrow, cmp_p_b_borrow, cmp_p_sub_a_hi, cmp_p_sub_a_lo, cmp_p_sub_b_hi, cmp_p_sub_b_lo, cmp_range_chk_clk, cmp_res_hi, cmp_res_lo, cmp_result, cmp_sel_cmp, cmp_sel_rng_chk, cmp_shift_sel, conversion_clk, conversion_input, conversion_num_limbs, conversion_output_bits, conversion_radix, conversion_sel_to_radix_be, keccakf1600_clk, keccakf1600_input, keccakf1600_output, keccakf1600_sel_keccakf1600, main_abs_da_rem_gas, main_abs_l2_rem_gas, main_alu_in_tag, main_base_da_gas_op_cost, main_base_l2_gas_op_cost, main_bin_op_id, main_call_ptr, main_da_gas_remaining, main_da_gas_u16_r0, main_da_gas_u16_r1, main_da_out_of_gas, main_dyn_da_gas_op_cost, main_dyn_gas_multiplier, main_dyn_l2_gas_op_cost, main_ia, main_ib, main_ic, main_id, main_id_zero, main_ind_addr_a, main_ind_addr_b, main_ind_addr_c, main_ind_addr_d, main_internal_return_ptr, main_inv, main_is_fake_row, main_is_gas_accounted, main_l2_gas_remaining, main_l2_gas_u16_r0, main_l2_gas_u16_r1, main_l2_out_of_gas, main_mem_addr_a, main_mem_addr_b, main_mem_addr_c, main_mem_addr_d, main_op_err, main_opcode_val, main_pc, main_r_in_tag, main_rwa, main_rwb, main_rwc, main_rwd, main_sel_alu, main_sel_bin, main_sel_calldata, main_sel_execution_end, main_sel_execution_row, main_sel_mem_op_a, main_sel_mem_op_b, main_sel_mem_op_c, main_sel_mem_op_d, main_sel_mov_ia_to_ic, main_sel_mov_ib_to_ic, main_sel_op_add, main_sel_op_address, main_sel_op_and, main_sel_op_block_number, main_sel_op_calldata_copy, main_sel_op_cast, main_sel_op_chain_id, main_sel_op_dagasleft, main_sel_op_debug_log, main_sel_op_div, main_sel_op_ecadd, main_sel_op_emit_l2_to_l1_msg, main_sel_op_emit_note_hash, main_sel_op_emit_nullifier, main_sel_op_emit_unencrypted_log, main_sel_op_eq, main_sel_op_external_call, main_sel_op_external_return, main_sel_op_external_revert, main_sel_op_fdiv, main_sel_op_fee_per_da_gas, main_sel_op_fee_per_l2_gas, main_sel_op_function_selector, main_sel_op_get_contract_instance, main_sel_op_internal_call, main_sel_op_internal_return, main_sel_op_is_static_call, main_sel_op_jump, main_sel_op_jumpi, main_sel_op_keccak, main_sel_op_l1_to_l2_msg_exists, main_sel_op_l2gasleft, main_sel_op_lt, main_sel_op_lte, main_sel_op_mov, main_sel_op_msm, main_sel_op_mul, main_sel_op_not, main_sel_op_note_hash_exists, main_sel_op_nullifier_exists, main_sel_op_or, main_sel_op_poseidon2, main_sel_op_radix_be, main_sel_op_returndata_copy, main_sel_op_returndata_size, main_sel_op_sender, main_sel_op_set, main_sel_op_sha256, main_sel_op_shl, main_sel_op_shr, main_sel_op_sload, main_sel_op_sstore, main_sel_op_static_call, main_sel_op_sub, main_sel_op_timestamp, main_sel_op_transaction_fee, main_sel_op_version, main_sel_op_xor, main_sel_q_kernel_lookup, main_sel_q_kernel_output_lookup, main_sel_resolve_ind_addr_a, main_sel_resolve_ind_addr_b, main_sel_resolve_ind_addr_c, main_sel_resolve_ind_addr_d, main_sel_returndata, main_sel_rng_16, main_sel_rng_8, main_sel_slice_gadget, main_space_id, main_tag_err, main_w_in_tag, mem_addr, mem_clk, mem_diff, mem_glob_addr, mem_last, mem_lastAccess, mem_one_min_inv, mem_r_in_tag, mem_rw, mem_sel_mem, mem_sel_mov_ia_to_ic, mem_sel_mov_ib_to_ic, mem_sel_op_a, mem_sel_op_b, mem_sel_op_c, mem_sel_op_d, mem_sel_op_poseidon_read_a, mem_sel_op_poseidon_read_b, mem_sel_op_poseidon_read_c, mem_sel_op_poseidon_read_d, mem_sel_op_poseidon_write_a, mem_sel_op_poseidon_write_b, mem_sel_op_poseidon_write_c, mem_sel_op_poseidon_write_d, mem_sel_op_slice, mem_sel_resolve_ind_addr_a, mem_sel_resolve_ind_addr_b, mem_sel_resolve_ind_addr_c, mem_sel_resolve_ind_addr_d, mem_sel_rng_chk, mem_skip_check_tag, mem_space_id, mem_tag, mem_tag_err, mem_tsp, mem_u16_r0, mem_u16_r1, mem_u8_r0, mem_val, mem_w_in_tag, merkle_tree_clk, merkle_tree_expected_tree_root, merkle_tree_latch, merkle_tree_leaf_index, merkle_tree_leaf_index_is_even, merkle_tree_leaf_value, merkle_tree_left_hash, merkle_tree_output_hash, merkle_tree_path_len, merkle_tree_path_len_inv, merkle_tree_right_hash, merkle_tree_sel_merkle_tree, merkle_tree_sibling_value, poseidon2_B_10_0, poseidon2_B_10_1, poseidon2_B_10_2, poseidon2_B_10_3, poseidon2_B_11_0, poseidon2_B_11_1, poseidon2_B_11_2, poseidon2_B_11_3, poseidon2_B_12_0, poseidon2_B_12_1, poseidon2_B_12_2, poseidon2_B_12_3, poseidon2_B_13_0, poseidon2_B_13_1, poseidon2_B_13_2, poseidon2_B_13_3, poseidon2_B_14_0, poseidon2_B_14_1, poseidon2_B_14_2, poseidon2_B_14_3, poseidon2_B_15_0, poseidon2_B_15_1, poseidon2_B_15_2, poseidon2_B_15_3, poseidon2_B_16_0, poseidon2_B_16_1, poseidon2_B_16_2, poseidon2_B_16_3, poseidon2_B_17_0, poseidon2_B_17_1, poseidon2_B_17_2, poseidon2_B_17_3, poseidon2_B_18_0, poseidon2_B_18_1, poseidon2_B_18_2, poseidon2_B_18_3, poseidon2_B_19_0, poseidon2_B_19_1, poseidon2_B_19_2, poseidon2_B_19_3, poseidon2_B_20_0, poseidon2_B_20_1, poseidon2_B_20_2, poseidon2_B_20_3, poseidon2_B_21_0, poseidon2_B_21_1, poseidon2_B_21_2, poseidon2_B_21_3, poseidon2_B_22_0, poseidon2_B_22_1, poseidon2_B_22_2, poseidon2_B_22_3, poseidon2_B_23_0, poseidon2_B_23_1, poseidon2_B_23_2, poseidon2_B_23_3, poseidon2_B_24_0, poseidon2_B_24_1, poseidon2_B_24_2, poseidon2_B_24_3, poseidon2_B_25_0, poseidon2_B_25_1, poseidon2_B_25_2, poseidon2_B_25_3, poseidon2_B_26_0, poseidon2_B_26_1, poseidon2_B_26_2, poseidon2_B_26_3, poseidon2_B_27_0, poseidon2_B_27_1, poseidon2_B_27_2, poseidon2_B_27_3, poseidon2_B_28_0, poseidon2_B_28_1, poseidon2_B_28_2, poseidon2_B_28_3, poseidon2_B_29_0, poseidon2_B_29_1, poseidon2_B_29_2, poseidon2_B_29_3, poseidon2_B_30_0, poseidon2_B_30_1, poseidon2_B_30_2, poseidon2_B_30_3, poseidon2_B_31_0, poseidon2_B_31_1, poseidon2_B_31_2, poseidon2_B_31_3, poseidon2_B_32_0, poseidon2_B_32_1, poseidon2_B_32_2, poseidon2_B_32_3, poseidon2_B_33_0, poseidon2_B_33_1, poseidon2_B_33_2, poseidon2_B_33_3, poseidon2_B_34_0, poseidon2_B_34_1, poseidon2_B_34_2, poseidon2_B_34_3, poseidon2_B_35_0, poseidon2_B_35_1, poseidon2_B_35_2, poseidon2_B_35_3, poseidon2_B_36_0, poseidon2_B_36_1, poseidon2_B_36_2, poseidon2_B_36_3, poseidon2_B_37_0, poseidon2_B_37_1, poseidon2_B_37_2, poseidon2_B_37_3, poseidon2_B_38_0, poseidon2_B_38_1, poseidon2_B_38_2, poseidon2_B_38_3, poseidon2_B_39_0, poseidon2_B_39_1, poseidon2_B_39_2, poseidon2_B_39_3, poseidon2_B_40_0, poseidon2_B_40_1, poseidon2_B_40_2, poseidon2_B_40_3, poseidon2_B_41_0, poseidon2_B_41_1, poseidon2_B_41_2, poseidon2_B_41_3, poseidon2_B_42_0, poseidon2_B_42_1, poseidon2_B_42_2, poseidon2_B_42_3, poseidon2_B_43_0, poseidon2_B_43_1, poseidon2_B_43_2, poseidon2_B_43_3, poseidon2_B_44_0, poseidon2_B_44_1, poseidon2_B_44_2, poseidon2_B_44_3, poseidon2_B_45_0, poseidon2_B_45_1, poseidon2_B_45_2, poseidon2_B_45_3, poseidon2_B_46_0, poseidon2_B_46_1, poseidon2_B_46_2, poseidon2_B_46_3, poseidon2_B_47_0, poseidon2_B_47_1, poseidon2_B_47_2, poseidon2_B_47_3, poseidon2_B_48_0, poseidon2_B_48_1, poseidon2_B_48_2, poseidon2_B_48_3, poseidon2_B_49_0, poseidon2_B_49_1, poseidon2_B_49_2, poseidon2_B_49_3, poseidon2_B_4_0, poseidon2_B_4_1, poseidon2_B_4_2, poseidon2_B_4_3, poseidon2_B_50_0, poseidon2_B_50_1, poseidon2_B_50_2, poseidon2_B_50_3, poseidon2_B_51_0, poseidon2_B_51_1, poseidon2_B_51_2, poseidon2_B_51_3, poseidon2_B_52_0, poseidon2_B_52_1, poseidon2_B_52_2, poseidon2_B_52_3, poseidon2_B_53_0, poseidon2_B_53_1, poseidon2_B_53_2, poseidon2_B_53_3, poseidon2_B_54_0, poseidon2_B_54_1, poseidon2_B_54_2, poseidon2_B_54_3, poseidon2_B_55_0, poseidon2_B_55_1, poseidon2_B_55_2, poseidon2_B_55_3, poseidon2_B_56_0, poseidon2_B_56_1, poseidon2_B_56_2, poseidon2_B_56_3, poseidon2_B_57_0, poseidon2_B_57_1, poseidon2_B_57_2, poseidon2_B_57_3, poseidon2_B_58_0, poseidon2_B_58_1, poseidon2_B_58_2, poseidon2_B_58_3, poseidon2_B_59_0, poseidon2_B_59_1, poseidon2_B_59_2, poseidon2_B_59_3, poseidon2_B_5_0, poseidon2_B_5_1, poseidon2_B_5_2, poseidon2_B_5_3, poseidon2_B_6_0, poseidon2_B_6_1, poseidon2_B_6_2, poseidon2_B_6_3, poseidon2_B_7_0, poseidon2_B_7_1, poseidon2_B_7_2, poseidon2_B_7_3, poseidon2_B_8_0, poseidon2_B_8_1, poseidon2_B_8_2, poseidon2_B_8_3, poseidon2_B_9_0, poseidon2_B_9_1, poseidon2_B_9_2, poseidon2_B_9_3, poseidon2_EXT_LAYER_4, poseidon2_EXT_LAYER_5, poseidon2_EXT_LAYER_6, poseidon2_EXT_LAYER_7, poseidon2_T_0_4, poseidon2_T_0_5, poseidon2_T_0_6, poseidon2_T_0_7, poseidon2_T_1_4, poseidon2_T_1_5, poseidon2_T_1_6, poseidon2_T_1_7, poseidon2_T_2_4, poseidon2_T_2_5, poseidon2_T_2_6, poseidon2_T_2_7, poseidon2_T_3_4, poseidon2_T_3_5, poseidon2_T_3_6, poseidon2_T_3_7, poseidon2_T_60_4, poseidon2_T_60_5, poseidon2_T_60_6, poseidon2_T_60_7, poseidon2_T_61_4, poseidon2_T_61_5, poseidon2_T_61_6, poseidon2_T_61_7, poseidon2_T_62_4, poseidon2_T_62_5, poseidon2_T_62_6, poseidon2_T_62_7, poseidon2_T_63_4, poseidon2_T_63_5, poseidon2_T_63_6, poseidon2_T_63_7, poseidon2_a_0, poseidon2_a_1, poseidon2_a_2, poseidon2_a_3, poseidon2_b_0, poseidon2_b_1, poseidon2_b_2, poseidon2_b_3, poseidon2_clk, poseidon2_full_a_0, poseidon2_full_a_1, poseidon2_full_a_2, poseidon2_full_a_3, poseidon2_full_b_0, poseidon2_full_b_1, poseidon2_full_b_2, poseidon2_full_b_3, poseidon2_full_clk, poseidon2_full_end_poseidon, poseidon2_full_execute_poseidon_perm, poseidon2_full_input_0, poseidon2_full_input_1, poseidon2_full_input_2, poseidon2_full_input_len, poseidon2_full_num_perm_rounds_rem, poseidon2_full_num_perm_rounds_rem_inv, poseidon2_full_output, poseidon2_full_padding, poseidon2_full_sel_merkle_tree, poseidon2_full_sel_poseidon, poseidon2_full_start_poseidon, poseidon2_input_addr, poseidon2_mem_addr_read_a, poseidon2_mem_addr_read_b, poseidon2_mem_addr_read_c, poseidon2_mem_addr_read_d, poseidon2_mem_addr_write_a, poseidon2_mem_addr_write_b, poseidon2_mem_addr_write_c, poseidon2_mem_addr_write_d, poseidon2_output_addr, poseidon2_sel_poseidon_perm, poseidon2_sel_poseidon_perm_immediate, poseidon2_sel_poseidon_perm_mem_op, poseidon2_space_id, range_check_alu_rng_chk, range_check_clk, range_check_cmp_hi_bits_rng_chk, range_check_cmp_lo_bits_rng_chk, range_check_cmp_non_ff_rng_chk, range_check_dyn_diff, range_check_dyn_rng_chk_bits, range_check_dyn_rng_chk_pow_2, range_check_gas_da_rng_chk, range_check_gas_l2_rng_chk, range_check_is_lte_u112, range_check_is_lte_u128, range_check_is_lte_u16, range_check_is_lte_u32, range_check_is_lte_u48, range_check_is_lte_u64, range_check_is_lte_u80, range_check_is_lte_u96, range_check_rng_chk_bits, range_check_sel_lookup_0, range_check_sel_lookup_1, range_check_sel_lookup_2, range_check_sel_lookup_3, range_check_sel_lookup_4, range_check_sel_lookup_5, range_check_sel_lookup_6, range_check_sel_rng_chk, range_check_u16_r0, range_check_u16_r1, range_check_u16_r2, range_check_u16_r3, range_check_u16_r4, range_check_u16_r5, range_check_u16_r6, range_check_u16_r7, range_check_value, sha256_clk, sha256_input, sha256_output, sha256_sel_sha256_compression, sha256_state, slice_addr, slice_clk, slice_cnt, slice_col_offset, slice_one_min_inv, slice_sel_cd_cpy, slice_sel_mem_active, slice_sel_return, slice_sel_start, slice_space_id, slice_val, lookup_rng_chk_pow_2_counts, lookup_rng_chk_diff_counts, lookup_rng_chk_0_counts, lookup_rng_chk_1_counts, lookup_rng_chk_2_counts, lookup_rng_chk_3_counts, lookup_rng_chk_4_counts, lookup_rng_chk_5_counts, lookup_rng_chk_6_counts, lookup_rng_chk_7_counts, lookup_mem_rng_chk_0_counts, lookup_mem_rng_chk_1_counts, lookup_mem_rng_chk_2_counts, lookup_pow_2_0_counts, lookup_pow_2_1_counts, lookup_byte_lengths_counts, lookup_byte_operations_counts, lookup_opcode_gas_counts, lookup_l2_gas_rng_chk_0_counts, lookup_l2_gas_rng_chk_1_counts, lookup_da_gas_rng_chk_0_counts, lookup_da_gas_rng_chk_1_counts, lookup_cd_value_counts, lookup_ret_value_counts, incl_main_tag_err_counts, incl_mem_tag_err_counts +#define WIRE_ENTITIES main_kernel_inputs, main_kernel_value_out, main_kernel_side_effect_out, main_kernel_metadata_out, main_calldata, main_returndata, alu_a_hi, alu_a_lo, alu_b_hi, alu_b_lo, alu_b_pow, alu_c_hi, alu_c_lo, alu_cf, alu_clk, alu_cmp_gadget_gt, alu_cmp_gadget_input_a, alu_cmp_gadget_input_b, alu_cmp_gadget_non_ff_gt, alu_cmp_gadget_result, alu_cmp_gadget_sel, alu_ff_tag, alu_ia, alu_ib, alu_ic, alu_in_tag, alu_max_bits_sub_b_bits, alu_max_bits_sub_b_pow, alu_op_add, alu_op_cast, alu_op_div, alu_op_eq, alu_op_lt, alu_op_lte, alu_op_mul, alu_op_not, alu_op_shl, alu_op_shr, alu_op_sub, alu_partial_prod_hi, alu_partial_prod_lo, alu_range_check_input_value, alu_range_check_num_bits, alu_range_check_sel, alu_remainder, alu_sel_alu, alu_sel_cmp, alu_sel_shift_which, alu_u128_tag, alu_u16_tag, alu_u1_tag, alu_u32_tag, alu_u64_tag, alu_u8_tag, alu_zero_shift, binary_acc_ia, binary_acc_ib, binary_acc_ic, binary_clk, binary_ia_bytes, binary_ib_bytes, binary_ic_bytes, binary_in_tag, binary_mem_tag_ctr, binary_mem_tag_ctr_inv, binary_op_id, binary_sel_bin, binary_start, bytecode_arifact_hash, bytecode_as_fields, bytecode_bytes, bytecode_bytes_pc, bytecode_class_id, bytecode_contract_address, bytecode_decomposed, bytecode_deployer_addr, bytecode_end_latch, bytecode_incoming_viewing_key_x, bytecode_incoming_viewing_key_y, bytecode_initialization_hash, bytecode_length_remaining, bytecode_nullifier_key_x, bytecode_nullifier_key_y, bytecode_outgoing_viewing_key_x, bytecode_outgoing_viewing_key_y, bytecode_private_fn_root, bytecode_public_key_hash, bytecode_running_hash, bytecode_salt, bytecode_tagging_key_x, bytecode_tagging_key_y, cmp_a_hi, cmp_a_lo, cmp_b_hi, cmp_b_lo, cmp_borrow, cmp_clk, cmp_cmp_rng_ctr, cmp_diff, cmp_input_a, cmp_input_b, cmp_op_eq, cmp_op_eq_diff_inv, cmp_op_gt, cmp_op_non_ff_gt, cmp_p_a_borrow, cmp_p_b_borrow, cmp_p_sub_a_hi, cmp_p_sub_a_lo, cmp_p_sub_b_hi, cmp_p_sub_b_lo, cmp_range_chk_clk, cmp_res_hi, cmp_res_lo, cmp_result, cmp_sel_cmp, cmp_sel_rng_chk, cmp_shift_sel, conversion_clk, conversion_input, conversion_num_limbs, conversion_output_bits, conversion_radix, conversion_sel_to_radix_be, keccakf1600_clk, keccakf1600_input, keccakf1600_output, keccakf1600_sel_keccakf1600, main_abs_da_rem_gas, main_abs_l2_rem_gas, main_alu_in_tag, main_base_da_gas_op_cost, main_base_l2_gas_op_cost, main_bin_op_id, main_call_ptr, main_da_gas_remaining, main_da_gas_u16_r0, main_da_gas_u16_r1, main_da_out_of_gas, main_dyn_da_gas_op_cost, main_dyn_gas_multiplier, main_dyn_l2_gas_op_cost, main_ia, main_ib, main_ic, main_id, main_id_zero, main_ind_addr_a, main_ind_addr_b, main_ind_addr_c, main_ind_addr_d, main_internal_return_ptr, main_inv, main_is_fake_row, main_is_gas_accounted, main_l2_gas_remaining, main_l2_gas_u16_r0, main_l2_gas_u16_r1, main_l2_out_of_gas, main_mem_addr_a, main_mem_addr_b, main_mem_addr_c, main_mem_addr_d, main_op_err, main_opcode_val, main_pc, main_r_in_tag, main_rwa, main_rwb, main_rwc, main_rwd, main_sel_alu, main_sel_bin, main_sel_calldata, main_sel_execution_end, main_sel_execution_row, main_sel_mem_op_a, main_sel_mem_op_b, main_sel_mem_op_c, main_sel_mem_op_d, main_sel_mov_ia_to_ic, main_sel_mov_ib_to_ic, main_sel_op_add, main_sel_op_address, main_sel_op_and, main_sel_op_block_number, main_sel_op_calldata_copy, main_sel_op_cast, main_sel_op_chain_id, main_sel_op_dagasleft, main_sel_op_debug_log, main_sel_op_div, main_sel_op_ecadd, main_sel_op_emit_l2_to_l1_msg, main_sel_op_emit_note_hash, main_sel_op_emit_nullifier, main_sel_op_emit_unencrypted_log, main_sel_op_eq, main_sel_op_external_call, main_sel_op_external_return, main_sel_op_external_revert, main_sel_op_fdiv, main_sel_op_fee_per_da_gas, main_sel_op_fee_per_l2_gas, main_sel_op_get_contract_instance, main_sel_op_internal_call, main_sel_op_internal_return, main_sel_op_is_static_call, main_sel_op_jump, main_sel_op_jumpi, main_sel_op_keccak, main_sel_op_l1_to_l2_msg_exists, main_sel_op_l2gasleft, main_sel_op_lt, main_sel_op_lte, main_sel_op_mov, main_sel_op_msm, main_sel_op_mul, main_sel_op_not, main_sel_op_note_hash_exists, main_sel_op_nullifier_exists, main_sel_op_or, main_sel_op_poseidon2, main_sel_op_radix_be, main_sel_op_returndata_copy, main_sel_op_returndata_size, main_sel_op_sender, main_sel_op_set, main_sel_op_sha256, main_sel_op_shl, main_sel_op_shr, main_sel_op_sload, main_sel_op_sstore, main_sel_op_static_call, main_sel_op_sub, main_sel_op_timestamp, main_sel_op_transaction_fee, main_sel_op_version, main_sel_op_xor, main_sel_q_kernel_lookup, main_sel_q_kernel_output_lookup, main_sel_resolve_ind_addr_a, main_sel_resolve_ind_addr_b, main_sel_resolve_ind_addr_c, main_sel_resolve_ind_addr_d, main_sel_returndata, main_sel_rng_16, main_sel_rng_8, main_sel_slice_gadget, main_space_id, main_tag_err, main_w_in_tag, mem_addr, mem_clk, mem_diff, mem_glob_addr, mem_last, mem_lastAccess, mem_one_min_inv, mem_r_in_tag, mem_rw, mem_sel_mem, mem_sel_mov_ia_to_ic, mem_sel_mov_ib_to_ic, mem_sel_op_a, mem_sel_op_b, mem_sel_op_c, mem_sel_op_d, mem_sel_op_poseidon_read_a, mem_sel_op_poseidon_read_b, mem_sel_op_poseidon_read_c, mem_sel_op_poseidon_read_d, mem_sel_op_poseidon_write_a, mem_sel_op_poseidon_write_b, mem_sel_op_poseidon_write_c, mem_sel_op_poseidon_write_d, mem_sel_op_slice, mem_sel_resolve_ind_addr_a, mem_sel_resolve_ind_addr_b, mem_sel_resolve_ind_addr_c, mem_sel_resolve_ind_addr_d, mem_sel_rng_chk, mem_skip_check_tag, mem_space_id, mem_tag, mem_tag_err, mem_tsp, mem_u16_r0, mem_u16_r1, mem_u8_r0, mem_val, mem_w_in_tag, merkle_tree_clk, merkle_tree_expected_tree_root, merkle_tree_latch, merkle_tree_leaf_index, merkle_tree_leaf_index_is_even, merkle_tree_leaf_value, merkle_tree_left_hash, merkle_tree_output_hash, merkle_tree_path_len, merkle_tree_path_len_inv, merkle_tree_right_hash, merkle_tree_sel_merkle_tree, merkle_tree_sibling_value, poseidon2_B_10_0, poseidon2_B_10_1, poseidon2_B_10_2, poseidon2_B_10_3, poseidon2_B_11_0, poseidon2_B_11_1, poseidon2_B_11_2, poseidon2_B_11_3, poseidon2_B_12_0, poseidon2_B_12_1, poseidon2_B_12_2, poseidon2_B_12_3, poseidon2_B_13_0, poseidon2_B_13_1, poseidon2_B_13_2, poseidon2_B_13_3, poseidon2_B_14_0, poseidon2_B_14_1, poseidon2_B_14_2, poseidon2_B_14_3, poseidon2_B_15_0, poseidon2_B_15_1, poseidon2_B_15_2, poseidon2_B_15_3, poseidon2_B_16_0, poseidon2_B_16_1, poseidon2_B_16_2, poseidon2_B_16_3, poseidon2_B_17_0, poseidon2_B_17_1, poseidon2_B_17_2, poseidon2_B_17_3, poseidon2_B_18_0, poseidon2_B_18_1, poseidon2_B_18_2, poseidon2_B_18_3, poseidon2_B_19_0, poseidon2_B_19_1, poseidon2_B_19_2, poseidon2_B_19_3, poseidon2_B_20_0, poseidon2_B_20_1, poseidon2_B_20_2, poseidon2_B_20_3, poseidon2_B_21_0, poseidon2_B_21_1, poseidon2_B_21_2, poseidon2_B_21_3, poseidon2_B_22_0, poseidon2_B_22_1, poseidon2_B_22_2, poseidon2_B_22_3, poseidon2_B_23_0, poseidon2_B_23_1, poseidon2_B_23_2, poseidon2_B_23_3, poseidon2_B_24_0, poseidon2_B_24_1, poseidon2_B_24_2, poseidon2_B_24_3, poseidon2_B_25_0, poseidon2_B_25_1, poseidon2_B_25_2, poseidon2_B_25_3, poseidon2_B_26_0, poseidon2_B_26_1, poseidon2_B_26_2, poseidon2_B_26_3, poseidon2_B_27_0, poseidon2_B_27_1, poseidon2_B_27_2, poseidon2_B_27_3, poseidon2_B_28_0, poseidon2_B_28_1, poseidon2_B_28_2, poseidon2_B_28_3, poseidon2_B_29_0, poseidon2_B_29_1, poseidon2_B_29_2, poseidon2_B_29_3, poseidon2_B_30_0, poseidon2_B_30_1, poseidon2_B_30_2, poseidon2_B_30_3, poseidon2_B_31_0, poseidon2_B_31_1, poseidon2_B_31_2, poseidon2_B_31_3, poseidon2_B_32_0, poseidon2_B_32_1, poseidon2_B_32_2, poseidon2_B_32_3, poseidon2_B_33_0, poseidon2_B_33_1, poseidon2_B_33_2, poseidon2_B_33_3, poseidon2_B_34_0, poseidon2_B_34_1, poseidon2_B_34_2, poseidon2_B_34_3, poseidon2_B_35_0, poseidon2_B_35_1, poseidon2_B_35_2, poseidon2_B_35_3, poseidon2_B_36_0, poseidon2_B_36_1, poseidon2_B_36_2, poseidon2_B_36_3, poseidon2_B_37_0, poseidon2_B_37_1, poseidon2_B_37_2, poseidon2_B_37_3, poseidon2_B_38_0, poseidon2_B_38_1, poseidon2_B_38_2, poseidon2_B_38_3, poseidon2_B_39_0, poseidon2_B_39_1, poseidon2_B_39_2, poseidon2_B_39_3, poseidon2_B_40_0, poseidon2_B_40_1, poseidon2_B_40_2, poseidon2_B_40_3, poseidon2_B_41_0, poseidon2_B_41_1, poseidon2_B_41_2, poseidon2_B_41_3, poseidon2_B_42_0, poseidon2_B_42_1, poseidon2_B_42_2, poseidon2_B_42_3, poseidon2_B_43_0, poseidon2_B_43_1, poseidon2_B_43_2, poseidon2_B_43_3, poseidon2_B_44_0, poseidon2_B_44_1, poseidon2_B_44_2, poseidon2_B_44_3, poseidon2_B_45_0, poseidon2_B_45_1, poseidon2_B_45_2, poseidon2_B_45_3, poseidon2_B_46_0, poseidon2_B_46_1, poseidon2_B_46_2, poseidon2_B_46_3, poseidon2_B_47_0, poseidon2_B_47_1, poseidon2_B_47_2, poseidon2_B_47_3, poseidon2_B_48_0, poseidon2_B_48_1, poseidon2_B_48_2, poseidon2_B_48_3, poseidon2_B_49_0, poseidon2_B_49_1, poseidon2_B_49_2, poseidon2_B_49_3, poseidon2_B_4_0, poseidon2_B_4_1, poseidon2_B_4_2, poseidon2_B_4_3, poseidon2_B_50_0, poseidon2_B_50_1, poseidon2_B_50_2, poseidon2_B_50_3, poseidon2_B_51_0, poseidon2_B_51_1, poseidon2_B_51_2, poseidon2_B_51_3, poseidon2_B_52_0, poseidon2_B_52_1, poseidon2_B_52_2, poseidon2_B_52_3, poseidon2_B_53_0, poseidon2_B_53_1, poseidon2_B_53_2, poseidon2_B_53_3, poseidon2_B_54_0, poseidon2_B_54_1, poseidon2_B_54_2, poseidon2_B_54_3, poseidon2_B_55_0, poseidon2_B_55_1, poseidon2_B_55_2, poseidon2_B_55_3, poseidon2_B_56_0, poseidon2_B_56_1, poseidon2_B_56_2, poseidon2_B_56_3, poseidon2_B_57_0, poseidon2_B_57_1, poseidon2_B_57_2, poseidon2_B_57_3, poseidon2_B_58_0, poseidon2_B_58_1, poseidon2_B_58_2, poseidon2_B_58_3, poseidon2_B_59_0, poseidon2_B_59_1, poseidon2_B_59_2, poseidon2_B_59_3, poseidon2_B_5_0, poseidon2_B_5_1, poseidon2_B_5_2, poseidon2_B_5_3, poseidon2_B_6_0, poseidon2_B_6_1, poseidon2_B_6_2, poseidon2_B_6_3, poseidon2_B_7_0, poseidon2_B_7_1, poseidon2_B_7_2, poseidon2_B_7_3, poseidon2_B_8_0, poseidon2_B_8_1, poseidon2_B_8_2, poseidon2_B_8_3, poseidon2_B_9_0, poseidon2_B_9_1, poseidon2_B_9_2, poseidon2_B_9_3, poseidon2_EXT_LAYER_4, poseidon2_EXT_LAYER_5, poseidon2_EXT_LAYER_6, poseidon2_EXT_LAYER_7, poseidon2_T_0_4, poseidon2_T_0_5, poseidon2_T_0_6, poseidon2_T_0_7, poseidon2_T_1_4, poseidon2_T_1_5, poseidon2_T_1_6, poseidon2_T_1_7, poseidon2_T_2_4, poseidon2_T_2_5, poseidon2_T_2_6, poseidon2_T_2_7, poseidon2_T_3_4, poseidon2_T_3_5, poseidon2_T_3_6, poseidon2_T_3_7, poseidon2_T_60_4, poseidon2_T_60_5, poseidon2_T_60_6, poseidon2_T_60_7, poseidon2_T_61_4, poseidon2_T_61_5, poseidon2_T_61_6, poseidon2_T_61_7, poseidon2_T_62_4, poseidon2_T_62_5, poseidon2_T_62_6, poseidon2_T_62_7, poseidon2_T_63_4, poseidon2_T_63_5, poseidon2_T_63_6, poseidon2_T_63_7, poseidon2_a_0, poseidon2_a_1, poseidon2_a_2, poseidon2_a_3, poseidon2_b_0, poseidon2_b_1, poseidon2_b_2, poseidon2_b_3, poseidon2_clk, poseidon2_full_a_0, poseidon2_full_a_1, poseidon2_full_a_2, poseidon2_full_a_3, poseidon2_full_b_0, poseidon2_full_b_1, poseidon2_full_b_2, poseidon2_full_b_3, poseidon2_full_clk, poseidon2_full_end_poseidon, poseidon2_full_execute_poseidon_perm, poseidon2_full_input_0, poseidon2_full_input_1, poseidon2_full_input_2, poseidon2_full_input_len, poseidon2_full_num_perm_rounds_rem, poseidon2_full_num_perm_rounds_rem_inv, poseidon2_full_output, poseidon2_full_padding, poseidon2_full_sel_merkle_tree, poseidon2_full_sel_poseidon, poseidon2_full_start_poseidon, poseidon2_input_addr, poseidon2_mem_addr_read_a, poseidon2_mem_addr_read_b, poseidon2_mem_addr_read_c, poseidon2_mem_addr_read_d, poseidon2_mem_addr_write_a, poseidon2_mem_addr_write_b, poseidon2_mem_addr_write_c, poseidon2_mem_addr_write_d, poseidon2_output_addr, poseidon2_sel_poseidon_perm, poseidon2_sel_poseidon_perm_immediate, poseidon2_sel_poseidon_perm_mem_op, poseidon2_space_id, range_check_alu_rng_chk, range_check_clk, range_check_cmp_hi_bits_rng_chk, range_check_cmp_lo_bits_rng_chk, range_check_cmp_non_ff_rng_chk, range_check_dyn_diff, range_check_dyn_rng_chk_bits, range_check_dyn_rng_chk_pow_2, range_check_gas_da_rng_chk, range_check_gas_l2_rng_chk, range_check_is_lte_u112, range_check_is_lte_u128, range_check_is_lte_u16, range_check_is_lte_u32, range_check_is_lte_u48, range_check_is_lte_u64, range_check_is_lte_u80, range_check_is_lte_u96, range_check_rng_chk_bits, range_check_sel_lookup_0, range_check_sel_lookup_1, range_check_sel_lookup_2, range_check_sel_lookup_3, range_check_sel_lookup_4, range_check_sel_lookup_5, range_check_sel_lookup_6, range_check_sel_rng_chk, range_check_u16_r0, range_check_u16_r1, range_check_u16_r2, range_check_u16_r3, range_check_u16_r4, range_check_u16_r5, range_check_u16_r6, range_check_u16_r7, range_check_value, sha256_clk, sha256_input, sha256_output, sha256_sel_sha256_compression, sha256_state, slice_addr, slice_clk, slice_cnt, slice_col_offset, slice_one_min_inv, slice_sel_cd_cpy, slice_sel_mem_active, slice_sel_return, slice_sel_start, slice_space_id, slice_val, lookup_rng_chk_pow_2_counts, lookup_rng_chk_diff_counts, lookup_rng_chk_0_counts, lookup_rng_chk_1_counts, lookup_rng_chk_2_counts, lookup_rng_chk_3_counts, lookup_rng_chk_4_counts, lookup_rng_chk_5_counts, lookup_rng_chk_6_counts, lookup_rng_chk_7_counts, lookup_mem_rng_chk_0_counts, lookup_mem_rng_chk_1_counts, lookup_mem_rng_chk_2_counts, lookup_pow_2_0_counts, lookup_pow_2_1_counts, lookup_byte_lengths_counts, lookup_byte_operations_counts, lookup_opcode_gas_counts, lookup_l2_gas_rng_chk_0_counts, lookup_l2_gas_rng_chk_1_counts, lookup_da_gas_rng_chk_0_counts, lookup_da_gas_rng_chk_1_counts, lookup_cd_value_counts, lookup_ret_value_counts, incl_main_tag_err_counts, incl_mem_tag_err_counts #define DERIVED_WITNESS_ENTITIES perm_rng_non_ff_cmp_inv, perm_rng_cmp_lo_inv, perm_rng_cmp_hi_inv, perm_rng_alu_inv, perm_cmp_alu_inv, perm_pos_mem_read_a_inv, perm_pos_mem_read_b_inv, perm_pos_mem_read_c_inv, perm_pos_mem_read_d_inv, perm_pos_mem_write_a_inv, perm_pos_mem_write_b_inv, perm_pos_mem_write_c_inv, perm_pos_mem_write_d_inv, perm_pos2_fixed_pos2_perm_inv, perm_slice_mem_inv, perm_merkle_poseidon2_inv, perm_main_alu_inv, perm_main_bin_inv, perm_main_conv_inv, perm_main_sha256_inv, perm_main_pos2_perm_inv, perm_main_slice_inv, perm_main_mem_a_inv, perm_main_mem_b_inv, perm_main_mem_c_inv, perm_main_mem_d_inv, perm_main_mem_ind_addr_a_inv, perm_main_mem_ind_addr_b_inv, perm_main_mem_ind_addr_c_inv, perm_main_mem_ind_addr_d_inv, lookup_rng_chk_pow_2_inv, lookup_rng_chk_diff_inv, lookup_rng_chk_0_inv, lookup_rng_chk_1_inv, lookup_rng_chk_2_inv, lookup_rng_chk_3_inv, lookup_rng_chk_4_inv, lookup_rng_chk_5_inv, lookup_rng_chk_6_inv, lookup_rng_chk_7_inv, lookup_mem_rng_chk_0_inv, lookup_mem_rng_chk_1_inv, lookup_mem_rng_chk_2_inv, lookup_pow_2_0_inv, lookup_pow_2_1_inv, lookup_byte_lengths_inv, lookup_byte_operations_inv, lookup_opcode_gas_inv, lookup_l2_gas_rng_chk_0_inv, lookup_l2_gas_rng_chk_1_inv, lookup_da_gas_rng_chk_0_inv, lookup_da_gas_rng_chk_1_inv, lookup_cd_value_inv, lookup_ret_value_inv, incl_main_tag_err_inv, incl_mem_tag_err_inv #define SHIFTED_ENTITIES binary_acc_ia_shift, binary_acc_ib_shift, binary_acc_ic_shift, binary_mem_tag_ctr_shift, binary_op_id_shift, cmp_a_hi_shift, cmp_a_lo_shift, cmp_b_hi_shift, cmp_b_lo_shift, cmp_cmp_rng_ctr_shift, cmp_op_gt_shift, cmp_p_sub_a_hi_shift, cmp_p_sub_a_lo_shift, cmp_p_sub_b_hi_shift, cmp_p_sub_b_lo_shift, cmp_sel_rng_chk_shift, main_da_gas_remaining_shift, main_internal_return_ptr_shift, main_l2_gas_remaining_shift, main_pc_shift, main_sel_execution_end_shift, main_sel_execution_row_shift, mem_glob_addr_shift, mem_rw_shift, mem_sel_mem_shift, mem_tag_shift, mem_tsp_shift, mem_val_shift, merkle_tree_leaf_index_shift, merkle_tree_leaf_value_shift, merkle_tree_path_len_shift, poseidon2_full_a_0_shift, poseidon2_full_a_1_shift, poseidon2_full_a_2_shift, poseidon2_full_a_3_shift, poseidon2_full_execute_poseidon_perm_shift, poseidon2_full_input_0_shift, poseidon2_full_input_1_shift, poseidon2_full_input_2_shift, poseidon2_full_num_perm_rounds_rem_shift, poseidon2_full_sel_poseidon_shift, poseidon2_full_start_poseidon_shift, slice_addr_shift, slice_clk_shift, slice_cnt_shift, slice_col_offset_shift, slice_sel_cd_cpy_shift, slice_sel_mem_active_shift, slice_sel_return_shift, slice_sel_start_shift, slice_space_id_shift #define TO_BE_SHIFTED(e) e.binary_acc_ia, e.binary_acc_ib, e.binary_acc_ic, e.binary_mem_tag_ctr, e.binary_op_id, e.cmp_a_hi, e.cmp_a_lo, e.cmp_b_hi, e.cmp_b_lo, e.cmp_cmp_rng_ctr, e.cmp_op_gt, e.cmp_p_sub_a_hi, e.cmp_p_sub_a_lo, e.cmp_p_sub_b_hi, e.cmp_p_sub_b_lo, e.cmp_sel_rng_chk, e.main_da_gas_remaining, e.main_internal_return_ptr, e.main_l2_gas_remaining, e.main_pc, e.main_sel_execution_end, e.main_sel_execution_row, e.mem_glob_addr, e.mem_rw, e.mem_sel_mem, e.mem_tag, e.mem_tsp, e.mem_val, e.merkle_tree_leaf_index, e.merkle_tree_leaf_value, e.merkle_tree_path_len, e.poseidon2_full_a_0, e.poseidon2_full_a_1, e.poseidon2_full_a_2, e.poseidon2_full_a_3, e.poseidon2_full_execute_poseidon_perm, e.poseidon2_full_input_0, e.poseidon2_full_input_1, e.poseidon2_full_input_2, e.poseidon2_full_num_perm_rounds_rem, e.poseidon2_full_sel_poseidon, e.poseidon2_full_start_poseidon, e.slice_addr, e.slice_clk, e.slice_cnt, e.slice_col_offset, e.slice_sel_cd_cpy, e.slice_sel_mem_active, e.slice_sel_return, e.slice_sel_start, e.slice_space_id @@ -125,12 +125,12 @@ class AvmFlavor { static constexpr bool HasZK = false; static constexpr size_t NUM_PRECOMPUTED_ENTITIES = 21; - static constexpr size_t NUM_WITNESS_ENTITIES = 745; + static constexpr size_t NUM_WITNESS_ENTITIES = 744; static constexpr size_t NUM_SHIFTED_ENTITIES = 51; static constexpr size_t NUM_WIRES = NUM_WITNESS_ENTITIES + NUM_PRECOMPUTED_ENTITIES; // We have two copies of the witness entities, so we subtract the number of fixed ones (they have no shift), one for // the unshifted and one for the shifted - static constexpr size_t NUM_ALL_ENTITIES = 817; + static constexpr size_t NUM_ALL_ENTITIES = 816; // The total number of witnesses including shifts and derived entities. static constexpr size_t NUM_ALL_WITNESS_ENTITIES = NUM_WITNESS_ENTITIES + NUM_SHIFTED_ENTITIES; diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/full_row.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/full_row.cpp index cff3284cea75..496d30cc203d 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/full_row.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/full_row.cpp @@ -244,7 +244,6 @@ template std::vector AvmFullRow::names() "main_sel_op_fdiv", "main_sel_op_fee_per_da_gas", "main_sel_op_fee_per_l2_gas", - "main_sel_op_function_selector", "main_sel_op_get_contract_instance", "main_sel_op_internal_call", "main_sel_op_internal_return", @@ -1015,7 +1014,6 @@ template RefVector AvmFullRow::as_vector() const main_sel_op_fdiv, main_sel_op_fee_per_da_gas, main_sel_op_fee_per_l2_gas, - main_sel_op_function_selector, main_sel_op_get_contract_instance, main_sel_op_internal_call, main_sel_op_internal_return, diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/full_row.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/full_row.hpp index ebfbce7f71b5..fab4b8342635 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/full_row.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/full_row.hpp @@ -235,7 +235,6 @@ template struct AvmFullRow { FF main_sel_op_fdiv{}; FF main_sel_op_fee_per_da_gas{}; FF main_sel_op_fee_per_l2_gas{}; - FF main_sel_op_function_selector{}; FF main_sel_op_get_contract_instance{}; FF main_sel_op_internal_call{}; FF main_sel_op_internal_return{}; @@ -780,7 +779,7 @@ template struct AvmFullRow { RefVector as_vector() const; static std::vector names(); - static constexpr size_t SIZE = 766; + static constexpr size_t SIZE = 765; }; template std::ostream& operator<<(std::ostream& os, AvmFullRow const& row); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/main.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/main.hpp index 3ebb15e52931..8f3bd984d59d 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/main.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/generated/relations/main.hpp @@ -10,11 +10,11 @@ template class mainImpl { public: using FF = FF_; - static constexpr std::array SUBRELATION_PARTIAL_LENGTHS = { - 2, 3, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 4, 4, 3, 3, 3, 3, 4, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 3, 3, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 2, 3 + static constexpr std::array SUBRELATION_PARTIAL_LENGTHS = { + 2, 3, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 4, 4, 3, 3, 3, 3, 4, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 3, 3, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 2, 3 }; template @@ -53,8 +53,7 @@ template class mainImpl { new_term.main_sel_op_ecadd) + new_term.main_sel_op_msm); const auto main_SEL_ALL_MEMORY = (new_term.main_sel_op_mov + new_term.main_sel_op_set); - const auto main_KERNEL_INPUT_SELECTORS = ((((((((((new_term.main_sel_op_address + new_term.main_sel_op_sender) + - new_term.main_sel_op_function_selector) + + const auto main_KERNEL_INPUT_SELECTORS = (((((((((new_term.main_sel_op_address + new_term.main_sel_op_sender) + new_term.main_sel_op_transaction_fee) + new_term.main_sel_op_chain_id) + new_term.main_sel_op_version) + @@ -123,671 +122,665 @@ template class mainImpl { } { using Accumulator = typename std::tuple_element_t<5, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_function_selector * (FF(1) - new_term.main_sel_op_function_selector)); + auto tmp = (new_term.main_sel_op_transaction_fee * (FF(1) - new_term.main_sel_op_transaction_fee)); tmp *= scaling_factor; std::get<5>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<6, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_transaction_fee * (FF(1) - new_term.main_sel_op_transaction_fee)); + auto tmp = (new_term.main_sel_op_chain_id * (FF(1) - new_term.main_sel_op_chain_id)); tmp *= scaling_factor; std::get<6>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<7, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_chain_id * (FF(1) - new_term.main_sel_op_chain_id)); + auto tmp = (new_term.main_sel_op_version * (FF(1) - new_term.main_sel_op_version)); tmp *= scaling_factor; std::get<7>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<8, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_version * (FF(1) - new_term.main_sel_op_version)); + auto tmp = (new_term.main_sel_op_block_number * (FF(1) - new_term.main_sel_op_block_number)); tmp *= scaling_factor; std::get<8>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<9, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_block_number * (FF(1) - new_term.main_sel_op_block_number)); + auto tmp = (new_term.main_sel_op_timestamp * (FF(1) - new_term.main_sel_op_timestamp)); tmp *= scaling_factor; std::get<9>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<10, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_timestamp * (FF(1) - new_term.main_sel_op_timestamp)); + auto tmp = (new_term.main_sel_op_fee_per_l2_gas * (FF(1) - new_term.main_sel_op_fee_per_l2_gas)); tmp *= scaling_factor; std::get<10>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<11, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_fee_per_l2_gas * (FF(1) - new_term.main_sel_op_fee_per_l2_gas)); + auto tmp = (new_term.main_sel_op_fee_per_da_gas * (FF(1) - new_term.main_sel_op_fee_per_da_gas)); tmp *= scaling_factor; std::get<11>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<12, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_fee_per_da_gas * (FF(1) - new_term.main_sel_op_fee_per_da_gas)); + auto tmp = (new_term.main_sel_op_is_static_call * (FF(1) - new_term.main_sel_op_is_static_call)); tmp *= scaling_factor; std::get<12>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<13, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_is_static_call * (FF(1) - new_term.main_sel_op_is_static_call)); + auto tmp = (new_term.main_sel_op_l2gasleft * (FF(1) - new_term.main_sel_op_l2gasleft)); tmp *= scaling_factor; std::get<13>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<14, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_l2gasleft * (FF(1) - new_term.main_sel_op_l2gasleft)); + auto tmp = (new_term.main_sel_op_dagasleft * (FF(1) - new_term.main_sel_op_dagasleft)); tmp *= scaling_factor; std::get<14>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<15, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_dagasleft * (FF(1) - new_term.main_sel_op_dagasleft)); + auto tmp = (new_term.main_sel_op_note_hash_exists * (FF(1) - new_term.main_sel_op_note_hash_exists)); tmp *= scaling_factor; std::get<15>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<16, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_note_hash_exists * (FF(1) - new_term.main_sel_op_note_hash_exists)); + auto tmp = (new_term.main_sel_op_emit_note_hash * (FF(1) - new_term.main_sel_op_emit_note_hash)); tmp *= scaling_factor; std::get<16>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<17, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_emit_note_hash * (FF(1) - new_term.main_sel_op_emit_note_hash)); + auto tmp = (new_term.main_sel_op_nullifier_exists * (FF(1) - new_term.main_sel_op_nullifier_exists)); tmp *= scaling_factor; std::get<17>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<18, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_nullifier_exists * (FF(1) - new_term.main_sel_op_nullifier_exists)); + auto tmp = (new_term.main_sel_op_emit_nullifier * (FF(1) - new_term.main_sel_op_emit_nullifier)); tmp *= scaling_factor; std::get<18>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<19, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_emit_nullifier * (FF(1) - new_term.main_sel_op_emit_nullifier)); + auto tmp = (new_term.main_sel_op_l1_to_l2_msg_exists * (FF(1) - new_term.main_sel_op_l1_to_l2_msg_exists)); tmp *= scaling_factor; std::get<19>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<20, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_l1_to_l2_msg_exists * (FF(1) - new_term.main_sel_op_l1_to_l2_msg_exists)); + auto tmp = + (new_term.main_sel_op_emit_unencrypted_log * (FF(1) - new_term.main_sel_op_emit_unencrypted_log)); tmp *= scaling_factor; std::get<20>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<21, ContainerOverSubrelations>; - auto tmp = - (new_term.main_sel_op_emit_unencrypted_log * (FF(1) - new_term.main_sel_op_emit_unencrypted_log)); + auto tmp = (new_term.main_sel_op_emit_l2_to_l1_msg * (FF(1) - new_term.main_sel_op_emit_l2_to_l1_msg)); tmp *= scaling_factor; std::get<21>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<22, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_emit_l2_to_l1_msg * (FF(1) - new_term.main_sel_op_emit_l2_to_l1_msg)); + auto tmp = + (new_term.main_sel_op_get_contract_instance * (FF(1) - new_term.main_sel_op_get_contract_instance)); tmp *= scaling_factor; std::get<22>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<23, ContainerOverSubrelations>; - auto tmp = - (new_term.main_sel_op_get_contract_instance * (FF(1) - new_term.main_sel_op_get_contract_instance)); + auto tmp = (new_term.main_sel_op_sload * (FF(1) - new_term.main_sel_op_sload)); tmp *= scaling_factor; std::get<23>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<24, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_sload * (FF(1) - new_term.main_sel_op_sload)); + auto tmp = (new_term.main_sel_op_sstore * (FF(1) - new_term.main_sel_op_sstore)); tmp *= scaling_factor; std::get<24>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<25, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_sstore * (FF(1) - new_term.main_sel_op_sstore)); + auto tmp = (new_term.main_sel_op_debug_log * (FF(1) - new_term.main_sel_op_debug_log)); tmp *= scaling_factor; std::get<25>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<26, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_debug_log * (FF(1) - new_term.main_sel_op_debug_log)); + auto tmp = (new_term.main_sel_op_radix_be * (FF(1) - new_term.main_sel_op_radix_be)); tmp *= scaling_factor; std::get<26>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<27, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_radix_be * (FF(1) - new_term.main_sel_op_radix_be)); + auto tmp = (new_term.main_sel_op_sha256 * (FF(1) - new_term.main_sel_op_sha256)); tmp *= scaling_factor; std::get<27>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<28, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_sha256 * (FF(1) - new_term.main_sel_op_sha256)); + auto tmp = (new_term.main_sel_op_poseidon2 * (FF(1) - new_term.main_sel_op_poseidon2)); tmp *= scaling_factor; std::get<28>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<29, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_poseidon2 * (FF(1) - new_term.main_sel_op_poseidon2)); + auto tmp = (new_term.main_sel_op_keccak * (FF(1) - new_term.main_sel_op_keccak)); tmp *= scaling_factor; std::get<29>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<30, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_keccak * (FF(1) - new_term.main_sel_op_keccak)); + auto tmp = (new_term.main_sel_op_ecadd * (FF(1) - new_term.main_sel_op_ecadd)); tmp *= scaling_factor; std::get<30>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<31, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_ecadd * (FF(1) - new_term.main_sel_op_ecadd)); + auto tmp = (new_term.main_sel_op_msm * (FF(1) - new_term.main_sel_op_msm)); tmp *= scaling_factor; std::get<31>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<32, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_msm * (FF(1) - new_term.main_sel_op_msm)); + auto tmp = (new_term.main_sel_op_add * (FF(1) - new_term.main_sel_op_add)); tmp *= scaling_factor; std::get<32>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<33, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_add * (FF(1) - new_term.main_sel_op_add)); + auto tmp = (new_term.main_sel_op_sub * (FF(1) - new_term.main_sel_op_sub)); tmp *= scaling_factor; std::get<33>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<34, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_sub * (FF(1) - new_term.main_sel_op_sub)); + auto tmp = (new_term.main_sel_op_mul * (FF(1) - new_term.main_sel_op_mul)); tmp *= scaling_factor; std::get<34>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<35, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_mul * (FF(1) - new_term.main_sel_op_mul)); + auto tmp = (new_term.main_sel_op_div * (FF(1) - new_term.main_sel_op_div)); tmp *= scaling_factor; std::get<35>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<36, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_div * (FF(1) - new_term.main_sel_op_div)); + auto tmp = (new_term.main_sel_op_fdiv * (FF(1) - new_term.main_sel_op_fdiv)); tmp *= scaling_factor; std::get<36>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<37, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_fdiv * (FF(1) - new_term.main_sel_op_fdiv)); + auto tmp = (new_term.main_sel_op_not * (FF(1) - new_term.main_sel_op_not)); tmp *= scaling_factor; std::get<37>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<38, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_not * (FF(1) - new_term.main_sel_op_not)); + auto tmp = (new_term.main_sel_op_eq * (FF(1) - new_term.main_sel_op_eq)); tmp *= scaling_factor; std::get<38>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<39, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_eq * (FF(1) - new_term.main_sel_op_eq)); + auto tmp = (new_term.main_sel_op_and * (FF(1) - new_term.main_sel_op_and)); tmp *= scaling_factor; std::get<39>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<40, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_and * (FF(1) - new_term.main_sel_op_and)); + auto tmp = (new_term.main_sel_op_or * (FF(1) - new_term.main_sel_op_or)); tmp *= scaling_factor; std::get<40>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<41, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_or * (FF(1) - new_term.main_sel_op_or)); + auto tmp = (new_term.main_sel_op_xor * (FF(1) - new_term.main_sel_op_xor)); tmp *= scaling_factor; std::get<41>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<42, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_xor * (FF(1) - new_term.main_sel_op_xor)); + auto tmp = (new_term.main_sel_op_cast * (FF(1) - new_term.main_sel_op_cast)); tmp *= scaling_factor; std::get<42>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<43, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_cast * (FF(1) - new_term.main_sel_op_cast)); + auto tmp = (new_term.main_sel_op_lt * (FF(1) - new_term.main_sel_op_lt)); tmp *= scaling_factor; std::get<43>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<44, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_lt * (FF(1) - new_term.main_sel_op_lt)); + auto tmp = (new_term.main_sel_op_lte * (FF(1) - new_term.main_sel_op_lte)); tmp *= scaling_factor; std::get<44>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<45, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_lte * (FF(1) - new_term.main_sel_op_lte)); + auto tmp = (new_term.main_sel_op_shl * (FF(1) - new_term.main_sel_op_shl)); tmp *= scaling_factor; std::get<45>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<46, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_shl * (FF(1) - new_term.main_sel_op_shl)); + auto tmp = (new_term.main_sel_op_shr * (FF(1) - new_term.main_sel_op_shr)); tmp *= scaling_factor; std::get<46>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<47, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_shr * (FF(1) - new_term.main_sel_op_shr)); + auto tmp = (new_term.main_sel_op_internal_call * (FF(1) - new_term.main_sel_op_internal_call)); tmp *= scaling_factor; std::get<47>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<48, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_internal_call * (FF(1) - new_term.main_sel_op_internal_call)); + auto tmp = (new_term.main_sel_op_internal_return * (FF(1) - new_term.main_sel_op_internal_return)); tmp *= scaling_factor; std::get<48>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<49, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_internal_return * (FF(1) - new_term.main_sel_op_internal_return)); + auto tmp = (new_term.main_sel_op_jump * (FF(1) - new_term.main_sel_op_jump)); tmp *= scaling_factor; std::get<49>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<50, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_jump * (FF(1) - new_term.main_sel_op_jump)); + auto tmp = (new_term.main_sel_op_jumpi * (FF(1) - new_term.main_sel_op_jumpi)); tmp *= scaling_factor; std::get<50>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<51, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_jumpi * (FF(1) - new_term.main_sel_op_jumpi)); + auto tmp = (new_term.main_sel_op_external_call * (FF(1) - new_term.main_sel_op_external_call)); tmp *= scaling_factor; std::get<51>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<52, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_external_call * (FF(1) - new_term.main_sel_op_external_call)); + auto tmp = (new_term.main_sel_op_static_call * (FF(1) - new_term.main_sel_op_static_call)); tmp *= scaling_factor; std::get<52>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<53, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_static_call * (FF(1) - new_term.main_sel_op_static_call)); + auto tmp = (new_term.main_sel_op_calldata_copy * (FF(1) - new_term.main_sel_op_calldata_copy)); tmp *= scaling_factor; std::get<53>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<54, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_calldata_copy * (FF(1) - new_term.main_sel_op_calldata_copy)); + auto tmp = (new_term.main_sel_op_returndata_size * (FF(1) - new_term.main_sel_op_returndata_size)); tmp *= scaling_factor; std::get<54>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<55, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_returndata_size * (FF(1) - new_term.main_sel_op_returndata_size)); + auto tmp = (new_term.main_sel_op_returndata_copy * (FF(1) - new_term.main_sel_op_returndata_copy)); tmp *= scaling_factor; std::get<55>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<56, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_returndata_copy * (FF(1) - new_term.main_sel_op_returndata_copy)); + auto tmp = (new_term.main_sel_op_external_return * (FF(1) - new_term.main_sel_op_external_return)); tmp *= scaling_factor; std::get<56>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<57, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_external_return * (FF(1) - new_term.main_sel_op_external_return)); + auto tmp = (new_term.main_sel_op_external_revert * (FF(1) - new_term.main_sel_op_external_revert)); tmp *= scaling_factor; std::get<57>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<58, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_external_revert * (FF(1) - new_term.main_sel_op_external_revert)); + auto tmp = (new_term.main_sel_op_set * (FF(1) - new_term.main_sel_op_set)); tmp *= scaling_factor; std::get<58>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<59, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_set * (FF(1) - new_term.main_sel_op_set)); + auto tmp = (new_term.main_sel_op_mov * (FF(1) - new_term.main_sel_op_mov)); tmp *= scaling_factor; std::get<59>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<60, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_mov * (FF(1) - new_term.main_sel_op_mov)); + auto tmp = (new_term.main_op_err * (FF(1) - new_term.main_op_err)); tmp *= scaling_factor; std::get<60>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<61, ContainerOverSubrelations>; - auto tmp = (new_term.main_op_err * (FF(1) - new_term.main_op_err)); + auto tmp = (new_term.main_tag_err * (FF(1) - new_term.main_tag_err)); tmp *= scaling_factor; std::get<61>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<62, ContainerOverSubrelations>; - auto tmp = (new_term.main_tag_err * (FF(1) - new_term.main_tag_err)); + auto tmp = (new_term.main_id_zero * (FF(1) - new_term.main_id_zero)); tmp *= scaling_factor; std::get<62>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<63, ContainerOverSubrelations>; - auto tmp = (new_term.main_id_zero * (FF(1) - new_term.main_id_zero)); + auto tmp = (new_term.main_sel_mem_op_a * (FF(1) - new_term.main_sel_mem_op_a)); tmp *= scaling_factor; std::get<63>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<64, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_mem_op_a * (FF(1) - new_term.main_sel_mem_op_a)); + auto tmp = (new_term.main_sel_mem_op_b * (FF(1) - new_term.main_sel_mem_op_b)); tmp *= scaling_factor; std::get<64>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<65, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_mem_op_b * (FF(1) - new_term.main_sel_mem_op_b)); + auto tmp = (new_term.main_sel_mem_op_c * (FF(1) - new_term.main_sel_mem_op_c)); tmp *= scaling_factor; std::get<65>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<66, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_mem_op_c * (FF(1) - new_term.main_sel_mem_op_c)); + auto tmp = (new_term.main_sel_mem_op_d * (FF(1) - new_term.main_sel_mem_op_d)); tmp *= scaling_factor; std::get<66>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<67, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_mem_op_d * (FF(1) - new_term.main_sel_mem_op_d)); + auto tmp = (new_term.main_rwa * (FF(1) - new_term.main_rwa)); tmp *= scaling_factor; std::get<67>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<68, ContainerOverSubrelations>; - auto tmp = (new_term.main_rwa * (FF(1) - new_term.main_rwa)); + auto tmp = (new_term.main_rwb * (FF(1) - new_term.main_rwb)); tmp *= scaling_factor; std::get<68>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<69, ContainerOverSubrelations>; - auto tmp = (new_term.main_rwb * (FF(1) - new_term.main_rwb)); + auto tmp = (new_term.main_rwc * (FF(1) - new_term.main_rwc)); tmp *= scaling_factor; std::get<69>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<70, ContainerOverSubrelations>; - auto tmp = (new_term.main_rwc * (FF(1) - new_term.main_rwc)); + auto tmp = (new_term.main_rwd * (FF(1) - new_term.main_rwd)); tmp *= scaling_factor; std::get<70>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<71, ContainerOverSubrelations>; - auto tmp = (new_term.main_rwd * (FF(1) - new_term.main_rwd)); + auto tmp = (new_term.main_sel_resolve_ind_addr_a * (FF(1) - new_term.main_sel_resolve_ind_addr_a)); tmp *= scaling_factor; std::get<71>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<72, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_resolve_ind_addr_a * (FF(1) - new_term.main_sel_resolve_ind_addr_a)); + auto tmp = (new_term.main_sel_resolve_ind_addr_b * (FF(1) - new_term.main_sel_resolve_ind_addr_b)); tmp *= scaling_factor; std::get<72>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<73, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_resolve_ind_addr_b * (FF(1) - new_term.main_sel_resolve_ind_addr_b)); + auto tmp = (new_term.main_sel_resolve_ind_addr_c * (FF(1) - new_term.main_sel_resolve_ind_addr_c)); tmp *= scaling_factor; std::get<73>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<74, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_resolve_ind_addr_c * (FF(1) - new_term.main_sel_resolve_ind_addr_c)); + auto tmp = (new_term.main_sel_resolve_ind_addr_d * (FF(1) - new_term.main_sel_resolve_ind_addr_d)); tmp *= scaling_factor; std::get<74>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<75, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_resolve_ind_addr_d * (FF(1) - new_term.main_sel_resolve_ind_addr_d)); + auto tmp = (((new_term.main_sel_op_eq + new_term.main_sel_op_lte) + new_term.main_sel_op_lt) * + (new_term.main_w_in_tag - constants_MEM_TAG_U1)); tmp *= scaling_factor; std::get<75>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<76, ContainerOverSubrelations>; - auto tmp = (((new_term.main_sel_op_eq + new_term.main_sel_op_lte) + new_term.main_sel_op_lt) * - (new_term.main_w_in_tag - constants_MEM_TAG_U1)); + auto tmp = ((new_term.main_sel_op_fdiv * (FF(1) - new_term.main_op_err)) * + ((new_term.main_ic * new_term.main_ib) - new_term.main_ia)); tmp *= scaling_factor; std::get<76>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<77, ContainerOverSubrelations>; - auto tmp = ((new_term.main_sel_op_fdiv * (FF(1) - new_term.main_op_err)) * - ((new_term.main_ic * new_term.main_ib) - new_term.main_ia)); + auto tmp = ((new_term.main_sel_op_fdiv + new_term.main_sel_op_div) * + (((new_term.main_ib * new_term.main_inv) - FF(1)) + new_term.main_op_err)); tmp *= scaling_factor; std::get<77>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<78, ContainerOverSubrelations>; - auto tmp = ((new_term.main_sel_op_fdiv + new_term.main_sel_op_div) * - (((new_term.main_ib * new_term.main_inv) - FF(1)) + new_term.main_op_err)); + auto tmp = (((new_term.main_sel_op_fdiv + new_term.main_sel_op_div) * new_term.main_op_err) * + (FF(1) - new_term.main_inv)); tmp *= scaling_factor; std::get<78>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<79, ContainerOverSubrelations>; - auto tmp = (((new_term.main_sel_op_fdiv + new_term.main_sel_op_div) * new_term.main_op_err) * - (FF(1) - new_term.main_inv)); + auto tmp = (new_term.main_sel_op_fdiv * (new_term.main_r_in_tag - constants_MEM_TAG_FF)); tmp *= scaling_factor; std::get<79>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<80, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_fdiv * (new_term.main_r_in_tag - constants_MEM_TAG_FF)); + auto tmp = (new_term.main_sel_op_fdiv * (new_term.main_w_in_tag - constants_MEM_TAG_FF)); tmp *= scaling_factor; std::get<80>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<81, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_fdiv * (new_term.main_w_in_tag - constants_MEM_TAG_FF)); + auto tmp = (new_term.main_tag_err * (FF(1) - new_term.main_op_err)); tmp *= scaling_factor; std::get<81>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<82, ContainerOverSubrelations>; - auto tmp = (new_term.main_tag_err * (FF(1) - new_term.main_op_err)); + auto tmp = (new_term.main_sel_op_jump * (new_term.main_pc_shift - new_term.main_ia)); tmp *= scaling_factor; std::get<82>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<83, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_jump * (new_term.main_pc_shift - new_term.main_ia)); + auto tmp = (new_term.main_sel_op_jumpi * + (((FF(1) - new_term.main_id_zero) * (new_term.main_pc_shift - new_term.main_ia)) + + (new_term.main_id_zero * ((new_term.main_pc_shift - new_term.main_pc) - FF(8))))); tmp *= scaling_factor; std::get<83>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<84, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_jumpi * - (((FF(1) - new_term.main_id_zero) * (new_term.main_pc_shift - new_term.main_ia)) + - (new_term.main_id_zero * ((new_term.main_pc_shift - new_term.main_pc) - FF(8))))); + auto tmp = (new_term.main_sel_op_internal_call * + (new_term.main_internal_return_ptr_shift - (new_term.main_internal_return_ptr + FF(1)))); tmp *= scaling_factor; std::get<84>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<85, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_internal_call * - (new_term.main_internal_return_ptr_shift - (new_term.main_internal_return_ptr + FF(1)))); + auto tmp = + (new_term.main_sel_op_internal_call * (new_term.main_internal_return_ptr - new_term.main_mem_addr_b)); tmp *= scaling_factor; std::get<85>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<86, ContainerOverSubrelations>; - auto tmp = - (new_term.main_sel_op_internal_call * (new_term.main_internal_return_ptr - new_term.main_mem_addr_b)); + auto tmp = (new_term.main_sel_op_internal_call * (new_term.main_pc_shift - new_term.main_ia)); tmp *= scaling_factor; std::get<86>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<87, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_internal_call * (new_term.main_pc_shift - new_term.main_ia)); + auto tmp = (new_term.main_sel_op_internal_call * ((new_term.main_pc + FF(5)) - new_term.main_ib)); tmp *= scaling_factor; std::get<87>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<88, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_internal_call * ((new_term.main_pc + FF(5)) - new_term.main_ib)); + auto tmp = (new_term.main_sel_op_internal_call * (new_term.main_rwb - FF(1))); tmp *= scaling_factor; std::get<88>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<89, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_internal_call * (new_term.main_rwb - FF(1))); + auto tmp = (new_term.main_sel_op_internal_call * (new_term.main_sel_mem_op_b - FF(1))); tmp *= scaling_factor; std::get<89>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<90, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_internal_call * (new_term.main_sel_mem_op_b - FF(1))); + auto tmp = (new_term.main_sel_op_internal_return * + (new_term.main_internal_return_ptr_shift - (new_term.main_internal_return_ptr - FF(1)))); tmp *= scaling_factor; std::get<90>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<91, ContainerOverSubrelations>; auto tmp = (new_term.main_sel_op_internal_return * - (new_term.main_internal_return_ptr_shift - (new_term.main_internal_return_ptr - FF(1)))); + ((new_term.main_internal_return_ptr - FF(1)) - new_term.main_mem_addr_a)); tmp *= scaling_factor; std::get<91>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<92, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_internal_return * - ((new_term.main_internal_return_ptr - FF(1)) - new_term.main_mem_addr_a)); + auto tmp = (new_term.main_sel_op_internal_return * (new_term.main_pc_shift - new_term.main_ia)); tmp *= scaling_factor; std::get<92>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<93, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_internal_return * (new_term.main_pc_shift - new_term.main_ia)); + auto tmp = (new_term.main_sel_op_internal_return * new_term.main_rwa); tmp *= scaling_factor; std::get<93>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<94, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_internal_return * new_term.main_rwa); + auto tmp = (new_term.main_sel_op_internal_return * (new_term.main_sel_mem_op_a - FF(1))); tmp *= scaling_factor; std::get<94>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<95, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_internal_return * (new_term.main_sel_mem_op_a - FF(1))); + auto tmp = ((main_CUR_AND_NEXT_ARE_MAIN * (FF(1) - main_SEL_ALL_CTRL_FLOW)) * + (new_term.main_internal_return_ptr_shift - new_term.main_internal_return_ptr)); tmp *= scaling_factor; std::get<95>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<96, ContainerOverSubrelations>; - auto tmp = ((main_CUR_AND_NEXT_ARE_MAIN * (FF(1) - main_SEL_ALL_CTRL_FLOW)) * - (new_term.main_internal_return_ptr_shift - new_term.main_internal_return_ptr)); + auto tmp = ((new_term.main_sel_op_internal_call + new_term.main_sel_op_internal_return) * + (new_term.main_space_id - constants_misc_INTERNAL_CALL_SPACE_ID)); tmp *= scaling_factor; std::get<96>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<97, ContainerOverSubrelations>; - auto tmp = ((new_term.main_sel_op_internal_call + new_term.main_sel_op_internal_return) * - (new_term.main_space_id - constants_misc_INTERNAL_CALL_SPACE_ID)); + auto tmp = (((FF(1) - new_term.main_sel_op_internal_call) - new_term.main_sel_op_internal_return) * + (new_term.main_call_ptr - new_term.main_space_id)); tmp *= scaling_factor; std::get<97>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<98, ContainerOverSubrelations>; - auto tmp = (((FF(1) - new_term.main_sel_op_internal_call) - new_term.main_sel_op_internal_return) * - (new_term.main_call_ptr - new_term.main_space_id)); + auto tmp = (new_term.main_sel_op_jumpi * + (((new_term.main_id * new_term.main_inv) - FF(1)) + new_term.main_id_zero)); tmp *= scaling_factor; std::get<98>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<99, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_jumpi * - (((new_term.main_id * new_term.main_inv) - FF(1)) + new_term.main_id_zero)); + auto tmp = ((new_term.main_sel_op_jumpi * new_term.main_id_zero) * (FF(1) - new_term.main_inv)); tmp *= scaling_factor; std::get<99>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<100, ContainerOverSubrelations>; - auto tmp = ((new_term.main_sel_op_jumpi * new_term.main_id_zero) * (FF(1) - new_term.main_inv)); + auto tmp = (new_term.main_sel_mov_ia_to_ic - (new_term.main_sel_op_mov * (FF(1) - new_term.main_id_zero))); tmp *= scaling_factor; std::get<100>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<101, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_mov_ia_to_ic - (new_term.main_sel_op_mov * (FF(1) - new_term.main_id_zero))); + auto tmp = (new_term.main_sel_mov_ia_to_ic * (new_term.main_ia - new_term.main_ic)); tmp *= scaling_factor; std::get<101>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<102, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_mov_ia_to_ic * (new_term.main_ia - new_term.main_ic)); + auto tmp = (new_term.main_sel_mov_ib_to_ic * (new_term.main_ib - new_term.main_ic)); tmp *= scaling_factor; std::get<102>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<103, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_mov_ib_to_ic * (new_term.main_ib - new_term.main_ic)); + auto tmp = (new_term.main_sel_op_mov * (new_term.main_r_in_tag - new_term.main_w_in_tag)); tmp *= scaling_factor; std::get<103>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<104, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_mov * (new_term.main_r_in_tag - new_term.main_w_in_tag)); + auto tmp = (new_term.main_sel_alu - (main_SEL_ALL_ALU * (FF(1) - new_term.main_op_err))); tmp *= scaling_factor; std::get<104>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<105, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_alu - (main_SEL_ALL_ALU * (FF(1) - new_term.main_op_err))); + auto tmp = (main_SEL_ALU_R_TAG * (new_term.main_alu_in_tag - new_term.main_r_in_tag)); tmp *= scaling_factor; std::get<105>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<106, ContainerOverSubrelations>; - auto tmp = (main_SEL_ALU_R_TAG * (new_term.main_alu_in_tag - new_term.main_r_in_tag)); + auto tmp = (main_SEL_ALU_W_TAG * (new_term.main_alu_in_tag - new_term.main_w_in_tag)); tmp *= scaling_factor; std::get<106>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<107, ContainerOverSubrelations>; - auto tmp = (main_SEL_ALU_W_TAG * (new_term.main_alu_in_tag - new_term.main_w_in_tag)); + auto tmp = (new_term.main_sel_op_l2gasleft * (new_term.main_ia - new_term.main_l2_gas_remaining_shift)); tmp *= scaling_factor; std::get<107>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<108, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_l2gasleft * (new_term.main_ia - new_term.main_l2_gas_remaining_shift)); + auto tmp = (new_term.main_sel_op_dagasleft * (new_term.main_ia - new_term.main_da_gas_remaining_shift)); tmp *= scaling_factor; std::get<108>(evals) += typename Accumulator::View(tmp); } { using Accumulator = typename std::tuple_element_t<109, ContainerOverSubrelations>; - auto tmp = (new_term.main_sel_op_dagasleft * (new_term.main_ia - new_term.main_da_gas_remaining_shift)); - tmp *= scaling_factor; - std::get<109>(evals) += typename Accumulator::View(tmp); - } - { - using Accumulator = typename std::tuple_element_t<110, ContainerOverSubrelations>; auto tmp = ((new_term.main_ib * (FF(1) - new_term.main_op_err)) * ((new_term.main_sel_op_calldata_copy + new_term.main_sel_op_external_return) - new_term.main_sel_slice_gadget)); tmp *= scaling_factor; - std::get<110>(evals) += typename Accumulator::View(tmp); + std::get<109>(evals) += typename Accumulator::View(tmp); } { - using Accumulator = typename std::tuple_element_t<111, ContainerOverSubrelations>; + using Accumulator = typename std::tuple_element_t<110, ContainerOverSubrelations>; auto tmp = (new_term.main_bin_op_id - (new_term.main_sel_op_or + (FF(2) * new_term.main_sel_op_xor))); tmp *= scaling_factor; - std::get<111>(evals) += typename Accumulator::View(tmp); + std::get<110>(evals) += typename Accumulator::View(tmp); } { - using Accumulator = typename std::tuple_element_t<112, ContainerOverSubrelations>; + using Accumulator = typename std::tuple_element_t<111, ContainerOverSubrelations>; auto tmp = (new_term.main_sel_bin - (((new_term.main_sel_op_and + new_term.main_sel_op_or) + new_term.main_sel_op_xor) * (FF(1) - new_term.main_op_err))); tmp *= scaling_factor; - std::get<112>(evals) += typename Accumulator::View(tmp); + std::get<111>(evals) += typename Accumulator::View(tmp); } } }; @@ -801,51 +794,51 @@ template class main : public Relation> { switch (index) { case 0: return "OPCODE_SELECTORS"; - case 76: + case 75: return "OUTPUT_U1"; - case 77: + case 76: return "SUBOP_FDIV"; - case 78: + case 77: return "SUBOP_FDIV_ZERO_ERR1"; - case 79: + case 78: return "SUBOP_FDIV_ZERO_ERR2"; - case 80: + case 79: return "SUBOP_FDIV_R_IN_TAG_FF"; - case 81: + case 80: return "SUBOP_FDIV_W_IN_TAG_FF"; - case 82: + case 81: return "TAG_ERR_IMPLIES_OP_ERR"; - case 83: + case 82: return "PC_JUMP"; - case 84: + case 83: return "PC_JUMPI"; - case 85: + case 84: return "RETURN_POINTER_INCREMENT"; - case 91: + case 90: return "RETURN_POINTER_DECREMENT"; - case 96: + case 95: return "INTERNAL_RETURN_POINTER_CONSISTENCY"; - case 97: + case 96: return "SPACE_ID_INTERNAL"; - case 98: + case 97: return "SPACE_ID_STANDARD_OPCODES"; - case 99: + case 98: return "JMP_CONDITION_RES_1"; - case 100: + case 99: return "JMP_CONDITION_RES_2"; - case 102: + case 101: return "MOV_SAME_VALUE_A"; - case 103: + case 102: return "MOV_SAME_VALUE_B"; - case 104: + case 103: return "MOV_MAIN_SAME_TAG"; - case 108: + case 107: return "L2GASLEFT"; - case 109: + case 108: return "DAGASLEFT"; - case 111: + case 110: return "BIN_SEL_1"; - case 112: + case 111: return "BIN_SEL_2"; } return std::to_string(index); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/tests/execution.test.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/tests/execution.test.cpp index a5d6a718246c..260a961d3fe8 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/tests/execution.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/tests/execution.test.cpp @@ -74,7 +74,7 @@ class AvmExecutionTests : public ::testing::Test { * @param bytecode * @return The trace as a vector of Row. */ - std::vector gen_trace_from_bytecode(const std::vector& bytecode) + std::vector gen_trace_from_bytecode(const std::vector& bytecode) const { std::vector calldata{}; std::vector returndata{}; @@ -89,9 +89,9 @@ class AvmExecutionTests : public ::testing::Test { static std::vector gen_trace(const std::vector& bytecode, const std::vector& calldata, - AvmPublicInputs& public_inputs, + AvmPublicInputs public_inputs, std::vector& returndata, - ExecutionHints& execution_hints) + ExecutionHints execution_hints) { auto [contract_class_id, contract_instance] = gen_test_contract_hint(bytecode); execution_hints.with_avm_contract_bytecode( @@ -99,7 +99,11 @@ class AvmExecutionTests : public ::testing::Test { // These are magic values because of how some tests work! Don't change them public_inputs.public_app_logic_call_requests[0].contract_address = contract_instance.address; - return Execution::gen_trace(calldata, public_inputs, returndata, execution_hints); + execution_hints.enqueued_call_hints.push_back({ + .contract_address = contract_instance.address, + .calldata = calldata, + }); + return Execution::gen_trace(public_inputs, returndata, execution_hints); } static std::tuple gen_test_contract_hint( @@ -115,7 +119,8 @@ class AvmExecutionTests : public ::testing::Test { PublicKeysHint public_keys{ nullifier_key, incoming_viewing_key, outgoing_viewing_key, tagging_key }; ContractInstanceHint contract_instance = { FF::one() /* temp address */, true /* exists */, FF(2) /* salt */, FF(3) /* deployer_addr */, class_id, - FF(8) /* initialisation_hash */, public_keys + FF(8) /* initialisation_hash */, public_keys, + /*membership_hint=*/ { .low_leaf_preimage = { .nullifier = 0, .next_nullifier = 0, .next_index = 0, }, .low_leaf_index = 0, .low_leaf_sibling_path = {} }, }; FF address = AvmBytecodeTraceBuilder::compute_address_from_instance(contract_instance); contract_instance.address = address; @@ -1369,65 +1374,61 @@ TEST_F(AvmExecutionTests, msmOpCode) TEST_F(AvmExecutionTests, getEnvOpcode) { std::string bytecode_hex = - to_hex(OpCode::GETENVVAR_16) + // opcode GETENVVAR_16 - "00" // Indirect flag - "0001" // dst_offset - + to_hex(static_cast(EnvironmentVariable::ADDRESS)) // envvar ADDRESS - + to_hex(OpCode::GETENVVAR_16) + // opcode GETENVVAR_16 - "00" // Indirect flag - "0002" // dst_offset - + to_hex(static_cast(EnvironmentVariable::SENDER)) // envvar SENDER - + to_hex(OpCode::GETENVVAR_16) + // opcode GETENVVAR_16 - "00" // Indirect flag - "0003" // dst_offset - + to_hex(static_cast(EnvironmentVariable::FUNCTIONSELECTOR)) // envvar FUNCTIONSELECTOR - + to_hex(OpCode::GETENVVAR_16) + // opcode GETENVVAR_16 - "00" // Indirect flag - "0004" // dst_offset - + to_hex(static_cast(EnvironmentVariable::TRANSACTIONFEE)) // envvar TRANSACTIONFEE - + to_hex(OpCode::GETENVVAR_16) + // opcode GETENVVAR_16 - "00" // Indirect flag - "0005" // dst_offset - + to_hex(static_cast(EnvironmentVariable::CHAINID)) // envvar CHAINID - + to_hex(OpCode::GETENVVAR_16) + // opcode GETENVVAR_16 - "00" // Indirect flag - "0006" // dst_offset - + to_hex(static_cast(EnvironmentVariable::VERSION)) // envvar VERSION - + to_hex(OpCode::GETENVVAR_16) + // opcode GETENVVAR_16 - "00" // Indirect flag - "0007" // dst_offset - + to_hex(static_cast(EnvironmentVariable::BLOCKNUMBER)) // envvar BLOCKNUMBER - + to_hex(OpCode::GETENVVAR_16) + // opcode GETENVVAR_16 - "00" // Indirect flag - "0008" // dst_offset - + to_hex(static_cast(EnvironmentVariable::TIMESTAMP)) // envvar TIMESTAMP - + to_hex(OpCode::GETENVVAR_16) + // opcode GETENVVAR_16 - "00" // Indirect flag - "0009" // dst_offset - + to_hex(static_cast(EnvironmentVariable::FEEPERL2GAS)) // envvar FEEPERL2GAS - + to_hex(OpCode::GETENVVAR_16) + // opcode GETENVVAR_16 - "00" // Indirect flag - "000A" // dst_offset - + to_hex(static_cast(EnvironmentVariable::FEEPERDAGAS)) // envvar FEEPERDAGAS - + to_hex(OpCode::GETENVVAR_16) + // opcode GETENVVAR_16 - "00" // Indirect flag - "000B" // dst_offset - + to_hex(static_cast(EnvironmentVariable::ISSTATICCALL)) // envvar ISSTATICCALL - + to_hex(OpCode::SET_16) + // opcode SET (for return size) - "00" // Indirect flag - "0200" // dst_offset=512 - + to_hex(AvmMemoryTag::U32) + // tag U32 - "000B" // val: 12 - + to_hex(OpCode::RETURN) + // opcode RETURN - "00" // Indirect flag - "0001" // ret offset 1 - "0200"; // ret size offset 512 + to_hex(OpCode::GETENVVAR_16) + // opcode GETENVVAR_16 + "00" // Indirect flag + "0001" // dst_offset + + to_hex(static_cast(EnvironmentVariable::ADDRESS)) // envvar ADDRESS + + to_hex(OpCode::GETENVVAR_16) + // opcode GETENVVAR_16 + "00" // Indirect flag + "0002" // dst_offset + + to_hex(static_cast(EnvironmentVariable::SENDER)) // envvar SENDER + + to_hex(OpCode::GETENVVAR_16) + // opcode GETENVVAR_16 + "00" // Indirect flag + "0003" // dst_offset + + to_hex(static_cast(EnvironmentVariable::TRANSACTIONFEE)) // envvar TRANSACTIONFEE + + to_hex(OpCode::GETENVVAR_16) + // opcode GETENVVAR_16 + "00" // Indirect flag + "0004" // dst_offset + + to_hex(static_cast(EnvironmentVariable::CHAINID)) // envvar CHAINID + + to_hex(OpCode::GETENVVAR_16) + // opcode GETENVVAR_16 + "00" // Indirect flag + "0005" // dst_offset + + to_hex(static_cast(EnvironmentVariable::VERSION)) // envvar VERSION + + to_hex(OpCode::GETENVVAR_16) + // opcode GETENVVAR_16 + "00" // Indirect flag + "0006" // dst_offset + + to_hex(static_cast(EnvironmentVariable::BLOCKNUMBER)) // envvar BLOCKNUMBER + + to_hex(OpCode::GETENVVAR_16) + // opcode GETENVVAR_16 + "00" // Indirect flag + "0007" // dst_offset + + to_hex(static_cast(EnvironmentVariable::TIMESTAMP)) // envvar TIMESTAMP + + to_hex(OpCode::GETENVVAR_16) + // opcode GETENVVAR_16 + "00" // Indirect flag + "0008" // dst_offset + + to_hex(static_cast(EnvironmentVariable::FEEPERL2GAS)) // envvar FEEPERL2GAS + + to_hex(OpCode::GETENVVAR_16) + // opcode GETENVVAR_16 + "00" // Indirect flag + "0009" // dst_offset + + to_hex(static_cast(EnvironmentVariable::FEEPERDAGAS)) // envvar FEEPERDAGAS + + to_hex(OpCode::GETENVVAR_16) + // opcode GETENVVAR_16 + "00" // Indirect flag + "000A" // dst_offset + + to_hex(static_cast(EnvironmentVariable::ISSTATICCALL)) // envvar ISSTATICCALL + + to_hex(OpCode::SET_16) + // opcode SET (for return size) + "00" // Indirect flag + "0200" // dst_offset=512 + + to_hex(AvmMemoryTag::U32) + // tag U32 + "000A" // val: 12 + + to_hex(OpCode::RETURN) + // opcode RETURN + "00" // Indirect flag + "0001" // ret offset 1 + "0200"; // ret size offset 512 auto bytecode = hex_to_bytes(bytecode_hex); auto [instructions, error] = Deserialization::parse_bytecode_statically(bytecode); ASSERT_TRUE(is_ok(error)); - ASSERT_THAT(instructions, SizeIs(13)); + ASSERT_THAT(instructions, SizeIs(12)); // ADDRESS EXPECT_THAT(instructions.at(0), @@ -1445,82 +1446,73 @@ TEST_F(AvmExecutionTests, getEnvOpcode) VariantWith(2), VariantWith(static_cast(EnvironmentVariable::SENDER)))))); - // FUNCTIONSELECTOR + // TRANSACTIONFEE EXPECT_THAT( instructions.at(2), AllOf(Field(&Instruction::op_code, OpCode::GETENVVAR_16), Field(&Instruction::operands, ElementsAre(VariantWith(0), VariantWith(3), - VariantWith(static_cast(EnvironmentVariable::FUNCTIONSELECTOR)))))); - - // TRANSACTIONFEE - EXPECT_THAT( - instructions.at(3), - AllOf(Field(&Instruction::op_code, OpCode::GETENVVAR_16), - Field(&Instruction::operands, - ElementsAre(VariantWith(0), - VariantWith(4), VariantWith(static_cast(EnvironmentVariable::TRANSACTIONFEE)))))); // CHAINID - EXPECT_THAT(instructions.at(4), + EXPECT_THAT(instructions.at(3), AllOf(Field(&Instruction::op_code, OpCode::GETENVVAR_16), Field(&Instruction::operands, ElementsAre(VariantWith(0), - VariantWith(5), + VariantWith(4), VariantWith(static_cast(EnvironmentVariable::CHAINID)))))); // VERSION - EXPECT_THAT(instructions.at(5), + EXPECT_THAT(instructions.at(4), AllOf(Field(&Instruction::op_code, OpCode::GETENVVAR_16), Field(&Instruction::operands, ElementsAre(VariantWith(0), - VariantWith(6), + VariantWith(5), VariantWith(static_cast(EnvironmentVariable::VERSION)))))); // BLOCKNUMBER EXPECT_THAT( - instructions.at(6), + instructions.at(5), AllOf(Field(&Instruction::op_code, OpCode::GETENVVAR_16), Field(&Instruction::operands, ElementsAre(VariantWith(0), - VariantWith(7), + VariantWith(6), VariantWith(static_cast(EnvironmentVariable::BLOCKNUMBER)))))); // TIMESTAMP - EXPECT_THAT(instructions.at(7), + EXPECT_THAT(instructions.at(6), AllOf(Field(&Instruction::op_code, OpCode::GETENVVAR_16), Field(&Instruction::operands, ElementsAre(VariantWith(0), - VariantWith(8), + VariantWith(7), VariantWith(static_cast(EnvironmentVariable::TIMESTAMP)))))); // FEEPERL2GAS EXPECT_THAT( - instructions.at(8), + instructions.at(7), AllOf(Field(&Instruction::op_code, OpCode::GETENVVAR_16), Field(&Instruction::operands, ElementsAre(VariantWith(0), - VariantWith(9), + VariantWith(8), VariantWith(static_cast(EnvironmentVariable::FEEPERL2GAS)))))); // FEEPERDAGAS EXPECT_THAT( - instructions.at(9), + instructions.at(8), AllOf(Field(&Instruction::op_code, OpCode::GETENVVAR_16), Field(&Instruction::operands, ElementsAre(VariantWith(0), - VariantWith(10), + VariantWith(9), VariantWith(static_cast(EnvironmentVariable::FEEPERDAGAS)))))); // ISSTATICCALL EXPECT_THAT( - instructions.at(10), + instructions.at(9), AllOf(Field(&Instruction::op_code, OpCode::GETENVVAR_16), Field(&Instruction::operands, ElementsAre(VariantWith(0), - VariantWith(11), + VariantWith(10), VariantWith(static_cast(EnvironmentVariable::ISSTATICCALL)))))); // Public inputs for the circuit @@ -1529,7 +1521,6 @@ TEST_F(AvmExecutionTests, getEnvOpcode) FF sender = 1; FF address = contract_instance.address; - FF function_selector = 3; FF transaction_fee = 5; FF chainid = 6; FF version = 7; @@ -1542,8 +1533,8 @@ TEST_F(AvmExecutionTests, getEnvOpcode) // The return data for this test should be a the opcodes in sequence, as the opcodes dst address lines up with // this array The returndata call above will then return this array std::vector const expected_returndata = { - address, sender, function_selector, transaction_fee, chainid, version, - blocknumber, timestamp, feeperl2gas, feeperdagas, is_static_call, + address, sender, transaction_fee, chainid, version, + blocknumber, timestamp, feeperl2gas, feeperdagas, is_static_call, }; // Set up public inputs to contain the above values @@ -1552,7 +1543,6 @@ TEST_F(AvmExecutionTests, getEnvOpcode) // public_inputs.public_app_logic_call_requests[0].contract_address = address; public_inputs.public_app_logic_call_requests[0].msg_sender = sender; - public_inputs.public_app_logic_call_requests[0].function_selector = static_cast(function_selector); public_inputs.transaction_fee = transaction_fee; public_inputs.public_app_logic_call_requests[0].is_static_call = is_static_call > FF::zero(); @@ -1583,11 +1573,6 @@ TEST_F(AvmExecutionTests, getEnvOpcode) auto sender_row = std::ranges::find_if(trace.begin(), trace.end(), [](Row r) { return r.main_sel_op_sender == 1; }); EXPECT_EQ(sender_row->main_ia, sender); - // Check function selector - auto function_selector_row = - std::ranges::find_if(trace.begin(), trace.end(), [](Row r) { return r.main_sel_op_function_selector == 1; }); - EXPECT_EQ(function_selector_row->main_ia, function_selector); - // Check transactionfee auto transaction_fee_row = std::ranges::find_if(trace.begin(), trace.end(), [](Row r) { return r.main_sel_op_transaction_fee == 1; }); @@ -2364,6 +2349,8 @@ TEST_F(AvmExecutionTests, opCallOpcodes) TEST_F(AvmExecutionTests, opGetContractInstanceOpcode) { + // FIXME: Skip until we have an easy way to mock contract instance nullifier memberhip + GTEST_SKIP(); const uint8_t address_byte = 0x42; const FF address(address_byte); @@ -2385,6 +2372,7 @@ TEST_F(AvmExecutionTests, opGetContractInstanceOpcode) .contract_class_id = 66, .initialisation_hash = 99, .public_keys = public_keys_hints, + .membership_hint = { .low_leaf_preimage = { .nullifier = 0, .next_nullifier = 0, .next_index = 0, }, .low_leaf_index = 0, .low_leaf_sibling_path = {} }, }; auto execution_hints = ExecutionHints().with_contract_instance_hints({ { address, instance } }); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/tests/kernel.test.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/tests/kernel.test.cpp index da8117135739..c2f7fc1f1efd 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/tests/kernel.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/tests/kernel.test.cpp @@ -257,42 +257,6 @@ // test_kernel_lookup(true, indirect_apply_opcodes, checks); // } // -// TEST_F(AvmKernelPositiveTests, kernelFunctionSelector) -// { -// // Direct -// uint32_t dst_offset = 42; -// uint32_t indirect_dst_offset = 69; -// // We test that the function selector opcode is included at index 0 in the public inputs -// auto direct_apply_opcodes = [=](AvmTraceBuilder& trace_builder) { -// trace_builder.op_function_selector(/*indirect*/ 0, dst_offset); -// }; -// auto indirect_apply_opcodes = [=](AvmTraceBuilder& trace_builder) { -// trace_builder.op_set( -// /*indirect*/ 0, -// /*value*/ dst_offset, -// /*dst_offset*/ indirect_dst_offset, -// AvmMemoryTag::U32); -// trace_builder.op_function_selector(/*indirect*/ 1, indirect_dst_offset); -// }; -// -// auto checks = [=](bool indirect, const std::vector& trace) { -// auto row = std::ranges::find_if( -// trace.begin(), trace.end(), [](Row r) { return r.main_sel_op_function_selector == FF(1); }); -// EXPECT_TRUE(row != trace.end()); -// -// expect_row(row, -// /*kernel_in_offset=*/FUNCTION_SELECTOR_KERNEL_INPUTS_COL_OFFSET, -// /*ia=*/FUNCTION_SELECTOR_KERNEL_INPUTS_COL_OFFSET + -// 1, // Note the value generated above for public inputs is the same as the index read + 1 -// /*ind_a*/ indirect ? indirect_dst_offset : 0, -// /*mem_addr_a=*/dst_offset, -// /*w_in_tag=*/AvmMemoryTag::U32); -// }; -// -// test_kernel_lookup(false, direct_apply_opcodes, checks); -// test_kernel_lookup(true, indirect_apply_opcodes, checks); -// } -// // TEST_F(AvmKernelPositiveTests, kernelFeePerDa) // { // uint32_t dst_offset = 42; @@ -652,33 +616,6 @@ // negative_test_incorrect_ia_kernel_lookup(apply_opcodes, checks, incorrect_ia, BAD_LOOKUP); // } // -// TEST_F(AvmKernelNegativeTests, incorrectIaFunctionSelector) -// { -// uint32_t dst_offset = 42; -// FF incorrect_ia = FF(69); -// -// // We test that the sender opcode is inlcuded at index x in the public inputs -// auto apply_opcodes = [=](AvmTraceBuilder& trace_builder) { -// trace_builder.op_function_selector(/*indirect*/ 0, dst_offset); -// }; -// auto checks = [=](bool indirect, const std::vector& trace) { -// auto row = std::ranges::find_if( -// trace.begin(), trace.end(), [](Row r) { return r.main_sel_op_function_selector == FF(1); }); -// EXPECT_TRUE(row != trace.end()); -// -// expect_row( -// row, -// /*kernel_in_offset=*/FUNCTION_SELECTOR_KERNEL_INPUTS_COL_OFFSET, -// /*ia=*/incorrect_ia, // Note the value generated above for public inputs is the same as the index read + -// 1 -// /*ind_a*/ indirect, -// /*mem_addr_a=*/dst_offset, -// /*w_in_tag=*/AvmMemoryTag::U32); -// }; -// -// negative_test_incorrect_ia_kernel_lookup(apply_opcodes, checks, incorrect_ia, BAD_LOOKUP); -// } -// // TEST_F(AvmKernelNegativeTests, incorrectIaDaGas) // { // uint32_t dst_offset = 42; diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/errors.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/errors.hpp index e31d486e502b..ca121ebefa29 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/errors.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/errors.hpp @@ -6,6 +6,7 @@ namespace bb::avm_trace { enum class AvmError : uint32_t { NO_ERROR, + REVERT_OPCODE, INVALID_PROGRAM_COUNTER, INVALID_OPCODE, INVALID_TAG_VALUE, @@ -18,6 +19,7 @@ enum class AvmError : uint32_t { CONTRACT_INST_MEM_UNKNOWN, RADIX_OUT_OF_BOUNDS, DUPLICATE_NULLIFIER, + SIDE_EFFECT_LIMIT_REACHED, }; } // namespace bb::avm_trace diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution.cpp index cab6b49ddb59..639db23a31bd 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution.cpp @@ -38,8 +38,39 @@ using namespace bb; std::filesystem::path avm_dump_trace_path; namespace bb::avm_trace { + +std::string to_name(TxExecutionPhase phase) +{ + switch (phase) { + case TxExecutionPhase::SETUP: + return "SETUP"; + case TxExecutionPhase::APP_LOGIC: + return "APP_LOGIC"; + case TxExecutionPhase::TEARDOWN: + return "TEARDOWN"; + default: + throw std::runtime_error("Invalid tx phase"); + break; + } +} + +/************************************************************************************************** + * HELPERS IN ANONYMOUS NAMESPACE + **************************************************************************************************/ namespace { +template +std::vector non_empty_call_requests(std::array call_requests_array) +{ + std::vector call_requests_vec; + for (const auto& call_request : call_requests_array) { + if (!call_request.is_empty()) { + call_requests_vec.push_back(call_request); + } + } + return call_requests_vec; +} + // The SRS needs to be able to accommodate the circuit subgroup size. // Note: The *2 is due to how init_bn254_crs works, look there. static_assert(Execution::SRS_SIZE >= AvmCircuitBuilder::CIRCUIT_SUBGROUP_SIZE * 2); @@ -147,13 +178,16 @@ void show_trace_info(const auto& trace) } // namespace +/************************************************************************************************** + * Execution + **************************************************************************************************/ + // Needed for dependency injection in tests. Execution::TraceBuilderConstructor Execution::trace_builder_constructor = [](AvmPublicInputs public_inputs, ExecutionHints execution_hints, uint32_t side_effect_counter, std::vector calldata) { - return AvmTraceBuilder( - std::move(public_inputs), std::move(execution_hints), side_effect_counter, std::move(calldata)); + return AvmTraceBuilder(public_inputs, std::move(execution_hints), side_effect_counter, std::move(calldata)); }; /** @@ -172,18 +206,19 @@ std::vector Execution::getDefaultPublicInputs() * @brief Run the bytecode, generate the corresponding execution trace and prove the correctness * of the execution of the supplied bytecode. * - * @param bytecode A vector of bytes representing the bytecode to execute. - * @param calldata expressed as a vector of finite field elements. * @throws runtime_error exception when the bytecode is invalid. * @return The verifier key and zk proof of the execution. */ -std::tuple Execution::prove(std::vector const& calldata, - AvmPublicInputs const& public_inputs, +std::tuple Execution::prove(AvmPublicInputs const& public_inputs, ExecutionHints const& execution_hints) { std::vector returndata; - std::vector trace = - AVM_TRACK_TIME_V("prove/gen_trace", gen_trace(calldata, public_inputs, returndata, execution_hints)); + std::vector calldata; + for (const auto& enqueued_call_hints : execution_hints.enqueued_call_hints) { + calldata.insert(calldata.end(), enqueued_call_hints.calldata.begin(), enqueued_call_hints.calldata.end()); + } + std::vector trace = AVM_TRACK_TIME_V( + "prove/gen_trace", gen_trace(public_inputs, returndata, execution_hints, /*apply_e2e_assertions=*/true)); if (!avm_dump_trace_path.empty()) { info("Dumping trace as CSV to: " + avm_dump_trace_path.string()); dump_trace_as_csv(trace, avm_dump_trace_path); @@ -258,585 +293,641 @@ bool Execution::verify(AvmFlavor::VerificationKey vk, HonkProof const& proof) /** * @brief Generate the execution trace pertaining to the supplied instructions returns the return data. * - * @param instructions A vector of the instructions to be executed. - * @param calldata expressed as a vector of finite field elements. - * @param public_inputs expressed as a vector of finite field elements. + * @param public_inputs - to constrain execution inputs & results against + * @param returndata - to add to for each enqueued call + * @param execution_hints - to inform execution + * @param apply_e2e_assertions - should we apply assertions on public inputs (like end gas) and bytecode membership? * @return The trace as a vector of Row. */ -std::vector Execution::gen_trace(std::vector const& calldata, - AvmPublicInputs const& public_inputs, +std::vector Execution::gen_trace(AvmPublicInputs const& public_inputs, std::vector& returndata, - ExecutionHints const& execution_hints) + ExecutionHints const& execution_hints, + bool apply_e2e_assertions) { vinfo("------- GENERATING TRACE -------"); // TODO(https://github.com/AztecProtocol/aztec-packages/issues/6718): construction of the public input columns // should be done in the kernel - this is stubbed and underconstrained // VmPublicInputs public_inputs = avm_trace::convert_public_inputs(public_inputs_vec); - uint32_t start_side_effect_counter = - 0; // What to do here??? - // !public_inputs_vec.empty() ? - // static_cast(public_inputs_vec[START_SIDE_EFFECT_COUNTER_PCPI_OFFSET]) - // : 0; - // + uint32_t start_side_effect_counter = 0; + // Temporary until we get proper nested call handling + std::vector calldata; + for (const auto& enqueued_call_hints : execution_hints.enqueued_call_hints) { + calldata.insert(calldata.end(), enqueued_call_hints.calldata.begin(), enqueued_call_hints.calldata.end()); + } AvmTraceBuilder trace_builder = Execution::trace_builder_constructor(public_inputs, execution_hints, start_side_effect_counter, calldata); - std::vector public_call_requests; - for (const auto& setup_requests : public_inputs.public_setup_call_requests) { - if (setup_requests.contract_address != 0) { - public_call_requests.push_back(setup_requests); - } - } - for (const auto& app_requests : public_inputs.public_app_logic_call_requests) { - if (app_requests.contract_address != 0) { - public_call_requests.push_back(app_requests); - } + const auto setup_call_requests = non_empty_call_requests(public_inputs.public_setup_call_requests); + const auto app_logic_call_requests = non_empty_call_requests(public_inputs.public_app_logic_call_requests); + std::vector teardown_call_requests; + if (!public_inputs.public_teardown_call_request.is_empty()) { + // teardown is always one call request + teardown_call_requests.push_back(public_inputs.public_teardown_call_request); } - // We should not need to guard teardown, but while we are testing with handcrafted txs we do - if (public_inputs.public_teardown_call_request.contract_address != 0) { - public_call_requests.push_back(public_inputs.public_teardown_call_request); - } - - // We should use the public input address, but for now we just take the first element in the list - // const std::vector& bytecode = execution_hints.all_contract_bytecode.at(0).bytecode; // Loop over all the public call requests uint8_t call_ctx = 0; - for (const auto& public_call_request : public_call_requests) { - trace_builder.set_public_call_request(public_call_request); - trace_builder.set_call_ptr(call_ctx++); - - // Find the bytecode based on contract address of the public call request - const std::vector& bytecode = - std::ranges::find_if(execution_hints.all_contract_bytecode, [public_call_request](const auto& contract) { - return contract.contract_instance.address == public_call_request.contract_address; - })->bytecode; - info("Found bytecode for contract address: ", public_call_request.contract_address); - - // Set this also on nested call - - // Copied version of pc maintained in trace builder. The value of pc is evolving based - // on opcode logic and therefore is not maintained here. However, the next opcode in the execution - // is determined by this value which require read access to the code below. - uint32_t pc = 0; - uint32_t counter = 0; - AvmError error = AvmError::NO_ERROR; - while (is_ok(error) && (pc = trace_builder.get_pc()) < bytecode.size()) { - auto [inst, parse_error] = Deserialization::parse(bytecode, pc); - error = parse_error; - - if (!is_ok(error)) { + const auto phases = { TxExecutionPhase::SETUP, TxExecutionPhase::APP_LOGIC, TxExecutionPhase::TEARDOWN }; + for (auto phase : phases) { + const auto public_call_requests = phase == TxExecutionPhase::SETUP ? setup_call_requests + : phase == TxExecutionPhase::APP_LOGIC ? app_logic_call_requests + : teardown_call_requests; + + // When we get this, it means we have done our non-revertible setup phase + if (phase == TxExecutionPhase::SETUP) { + vinfo("Inserting non-revertible side effects from private before SETUP phase. Checkpointing trees."); + // Temporary spot for private non-revertible insertion + std::vector siloed_nullifiers; + siloed_nullifiers.insert( + siloed_nullifiers.end(), + public_inputs.previous_non_revertible_accumulated_data.nullifiers.begin(), + public_inputs.previous_non_revertible_accumulated_data.nullifiers.begin() + + public_inputs.previous_non_revertible_accumulated_data_array_lengths.nullifiers); + trace_builder.insert_private_state(siloed_nullifiers, {}); + trace_builder.checkpoint_non_revertible_state(); + } else if (phase == TxExecutionPhase::APP_LOGIC) { + vinfo("Inserting revertible side effects from private before APP_LOGIC phase"); + // Temporary spot for private revertible insertion + std::vector siloed_nullifiers; + siloed_nullifiers.insert(siloed_nullifiers.end(), + public_inputs.previous_revertible_accumulated_data.nullifiers.begin(), + public_inputs.previous_revertible_accumulated_data.nullifiers.begin() + + public_inputs.previous_revertible_accumulated_data_array_lengths.nullifiers); + trace_builder.insert_private_state(siloed_nullifiers, {}); + } + + vinfo("Beginning execution of phase ", to_name(phase), " (", public_call_requests.size(), " enqueued calls)."); + AvmError phase_error = AvmError::NO_ERROR; + for (auto public_call_request : public_call_requests) { + trace_builder.set_public_call_request(public_call_request); + trace_builder.set_call_ptr(call_ctx++); + // Execute! + phase_error = + Execution::execute_enqueued_call(trace_builder, public_call_request, returndata, apply_e2e_assertions); + + if (!is_ok(phase_error)) { + info("Phase ", to_name(phase), " reverted."); + // otherwise, reverting in a revertible phase rolls back state + vinfo("Rolling back tree roots to non-revertible checkpoint"); + trace_builder.rollback_to_non_revertible_checkpoint(); break; } + } - debug("[PC:" + std::to_string(pc) + "] [IC:" + std::to_string(counter++) + "] " + inst.to_string() + - " (gasLeft l2=" + std::to_string(trace_builder.get_l2_gas_left()) + ")"); - - switch (inst.op_code) { - // Compute - // Compute - Arithmetic - case OpCode::ADD_8: - error = trace_builder.op_add(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::ADD_8); - break; - case OpCode::ADD_16: - error = trace_builder.op_add(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::ADD_16); - break; - case OpCode::SUB_8: - error = trace_builder.op_sub(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::SUB_8); - break; - case OpCode::SUB_16: - error = trace_builder.op_sub(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::SUB_16); - break; - case OpCode::MUL_8: - error = trace_builder.op_mul(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::MUL_8); - break; - case OpCode::MUL_16: - error = trace_builder.op_mul(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::MUL_16); - break; - case OpCode::DIV_8: - error = trace_builder.op_div(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::DIV_8); - break; - case OpCode::DIV_16: - error = trace_builder.op_div(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::DIV_16); - break; - case OpCode::FDIV_8: - error = trace_builder.op_fdiv(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::FDIV_8); - break; - case OpCode::FDIV_16: - error = trace_builder.op_fdiv(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::FDIV_16); - break; - case OpCode::EQ_8: - error = trace_builder.op_eq(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::EQ_8); - break; - case OpCode::EQ_16: - error = trace_builder.op_eq(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::EQ_16); - break; - case OpCode::LT_8: - error = trace_builder.op_lt(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::LT_8); - break; - case OpCode::LT_16: - error = trace_builder.op_lt(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::LT_16); - break; - case OpCode::LTE_8: - error = trace_builder.op_lte(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::LTE_8); - break; - case OpCode::LTE_16: - error = trace_builder.op_lte(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::LTE_16); - break; - case OpCode::AND_8: - error = trace_builder.op_and(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::AND_8); - break; - case OpCode::AND_16: - error = trace_builder.op_and(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::AND_16); - break; - case OpCode::OR_8: - error = trace_builder.op_or(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::OR_8); - break; - case OpCode::OR_16: - error = trace_builder.op_or(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::OR_16); - break; - case OpCode::XOR_8: - error = trace_builder.op_xor(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::XOR_8); - break; - case OpCode::XOR_16: - error = trace_builder.op_xor(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::XOR_16); - break; - case OpCode::NOT_8: - error = trace_builder.op_not(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - OpCode::NOT_8); - break; - case OpCode::NOT_16: - error = trace_builder.op_not(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - OpCode::NOT_16); - break; - case OpCode::SHL_8: - error = trace_builder.op_shl(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::SHL_8); - break; - case OpCode::SHL_16: - error = trace_builder.op_shl(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::SHL_16); - break; - case OpCode::SHR_8: - error = trace_builder.op_shr(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::SHR_8); - break; - case OpCode::SHR_16: - error = trace_builder.op_shr(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::SHR_16); - break; + if (!is_ok(phase_error) && phase == TxExecutionPhase::SETUP) { + // Stop processing phases. Halt TX. + info("A revert during SETUP phase halts the entire TX"); + break; + } + } + auto trace = trace_builder.finalize(apply_e2e_assertions); - // Compute - Type Conversions - case OpCode::CAST_8: - error = trace_builder.op_cast(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::CAST_8); - break; - case OpCode::CAST_16: - error = trace_builder.op_cast(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - OpCode::CAST_16); - break; + show_trace_info(trace); + return trace; +} - // Execution Environment - // TODO(https://github.com/AztecProtocol/aztec-packages/issues/6284): support indirect for below - case OpCode::GETENVVAR_16: - error = trace_builder.op_get_env_var(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2))); - break; +/** + * @brief Execute one enqueued call, adding its results to the trace. + * + * @param trace_builder - the trace builder to add rows to + * @param public_call_request - the enqueued call to execute + * @param returndata - to add to for each enqueued call + * @returns the error/result of the enqueued call + * + */ +AvmError Execution::execute_enqueued_call(AvmTraceBuilder& trace_builder, + PublicCallRequest& public_call_request, + std::vector& returndata, + bool check_bytecode_membership) +{ + AvmError error = AvmError::NO_ERROR; + // Find the bytecode based on contract address of the public call request + // TODO(dbanks12): accept check_membership flag as arg + std::vector bytecode = + trace_builder.get_bytecode(public_call_request.contract_address, check_bytecode_membership); + + // Set this also on nested call + + // Copied version of pc maintained in trace builder. The value of pc is evolving based + // on opcode logic and therefore is not maintained here. However, the next opcode in the execution + // is determined by this value which require read access to the code below. + uint32_t pc = 0; + uint32_t counter = 0; + while (is_ok(error) && (pc = trace_builder.get_pc()) < bytecode.size()) { + auto [inst, parse_error] = Deserialization::parse(bytecode, pc); + error = parse_error; - // Execution Environment - Calldata - case OpCode::CALLDATACOPY: - error = trace_builder.op_calldata_copy(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3))); - break; + if (!is_ok(error)) { + break; + } - case OpCode::RETURNDATASIZE: - error = trace_builder.op_returndata_size(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1))); - break; + debug("[PC:" + std::to_string(pc) + "] [IC:" + std::to_string(counter++) + "] " + inst.to_string() + + " (gasLeft l2=" + std::to_string(trace_builder.get_l2_gas_left()) + ")"); + + switch (inst.op_code) { + // Compute + // Compute - Arithmetic + case OpCode::ADD_8: + error = trace_builder.op_add(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::ADD_8); + break; + case OpCode::ADD_16: + error = trace_builder.op_add(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::ADD_16); + break; + case OpCode::SUB_8: + error = trace_builder.op_sub(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::SUB_8); + break; + case OpCode::SUB_16: + error = trace_builder.op_sub(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::SUB_16); + break; + case OpCode::MUL_8: + error = trace_builder.op_mul(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::MUL_8); + break; + case OpCode::MUL_16: + error = trace_builder.op_mul(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::MUL_16); + break; + case OpCode::DIV_8: + error = trace_builder.op_div(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::DIV_8); + break; + case OpCode::DIV_16: + error = trace_builder.op_div(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::DIV_16); + break; + case OpCode::FDIV_8: + error = trace_builder.op_fdiv(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::FDIV_8); + break; + case OpCode::FDIV_16: + error = trace_builder.op_fdiv(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::FDIV_16); + break; + case OpCode::EQ_8: + error = trace_builder.op_eq(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::EQ_8); + break; + case OpCode::EQ_16: + error = trace_builder.op_eq(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::EQ_16); + break; + case OpCode::LT_8: + error = trace_builder.op_lt(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::LT_8); + break; + case OpCode::LT_16: + error = trace_builder.op_lt(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::LT_16); + break; + case OpCode::LTE_8: + error = trace_builder.op_lte(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::LTE_8); + break; + case OpCode::LTE_16: + error = trace_builder.op_lte(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::LTE_16); + break; + case OpCode::AND_8: + error = trace_builder.op_and(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::AND_8); + break; + case OpCode::AND_16: + error = trace_builder.op_and(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::AND_16); + break; + case OpCode::OR_8: + error = trace_builder.op_or(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::OR_8); + break; + case OpCode::OR_16: + error = trace_builder.op_or(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::OR_16); + break; + case OpCode::XOR_8: + error = trace_builder.op_xor(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::XOR_8); + break; + case OpCode::XOR_16: + error = trace_builder.op_xor(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::XOR_16); + break; + case OpCode::NOT_8: + error = trace_builder.op_not(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + OpCode::NOT_8); + break; + case OpCode::NOT_16: + error = trace_builder.op_not(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + OpCode::NOT_16); + break; + case OpCode::SHL_8: + error = trace_builder.op_shl(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::SHL_8); + break; + case OpCode::SHL_16: + error = trace_builder.op_shl(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::SHL_16); + break; + case OpCode::SHR_8: + error = trace_builder.op_shr(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::SHR_8); + break; + case OpCode::SHR_16: + error = trace_builder.op_shr(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::SHR_16); + break; + + // Compute - Type Conversions + case OpCode::CAST_8: + error = trace_builder.op_cast(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::CAST_8); + break; + case OpCode::CAST_16: + error = trace_builder.op_cast(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + OpCode::CAST_16); + break; + + // Execution Environment + // TODO(https://github.com/AztecProtocol/aztec-packages/issues/6284): support indirect for below + case OpCode::GETENVVAR_16: + error = trace_builder.op_get_env_var(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2))); + break; + + // Execution Environment - Calldata + case OpCode::CALLDATACOPY: + error = trace_builder.op_calldata_copy(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3))); + break; + + case OpCode::RETURNDATASIZE: + error = trace_builder.op_returndata_size(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1))); + break; - case OpCode::RETURNDATACOPY: - error = trace_builder.op_returndata_copy(std::get(inst.operands.at(0)), + case OpCode::RETURNDATACOPY: + error = trace_builder.op_returndata_copy(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3))); + break; + + // Machine State - Internal Control Flow + case OpCode::JUMP_32: + error = trace_builder.op_jump(std::get(inst.operands.at(0))); + break; + case OpCode::JUMPI_32: + error = trace_builder.op_jumpi(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2))); + break; + case OpCode::INTERNALCALL: + error = trace_builder.op_internal_call(std::get(inst.operands.at(0))); + break; + case OpCode::INTERNALRETURN: + error = trace_builder.op_internal_return(); + break; + + // Machine State - Memory + case OpCode::SET_8: { + error = trace_builder.op_set(std::get(inst.operands.at(0)), + std::get(inst.operands.at(3)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + OpCode::SET_8); + break; + } + case OpCode::SET_16: { + error = trace_builder.op_set(std::get(inst.operands.at(0)), + std::get(inst.operands.at(3)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + OpCode::SET_16); + break; + } + case OpCode::SET_32: { + error = trace_builder.op_set(std::get(inst.operands.at(0)), + std::get(inst.operands.at(3)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + OpCode::SET_32); + break; + } + case OpCode::SET_64: { + error = trace_builder.op_set(std::get(inst.operands.at(0)), + std::get(inst.operands.at(3)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + OpCode::SET_64); + break; + } + case OpCode::SET_128: { + error = trace_builder.op_set(std::get(inst.operands.at(0)), + uint256_t::from_uint128(std::get(inst.operands.at(3))), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + OpCode::SET_128); + break; + } + case OpCode::SET_FF: { + error = trace_builder.op_set(std::get(inst.operands.at(0)), + std::get(inst.operands.at(3)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + OpCode::SET_FF); + break; + } + case OpCode::MOV_8: + error = trace_builder.op_mov(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + OpCode::MOV_8); + break; + case OpCode::MOV_16: + error = trace_builder.op_mov(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + OpCode::MOV_16); + break; + + // World State + case OpCode::SLOAD: + error = trace_builder.op_sload(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2))); + break; + case OpCode::SSTORE: + error = trace_builder.op_sstore(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2))); + break; + case OpCode::NOTEHASHEXISTS: + error = trace_builder.op_note_hash_exists(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3))); + break; + case OpCode::EMITNOTEHASH: + error = trace_builder.op_emit_note_hash(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1))); + break; + case OpCode::NULLIFIEREXISTS: + error = trace_builder.op_nullifier_exists(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3))); + break; + case OpCode::EMITNULLIFIER: + error = trace_builder.op_emit_nullifier(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1))); + break; + + case OpCode::L1TOL2MSGEXISTS: + error = trace_builder.op_l1_to_l2_msg_exists(std::get(inst.operands.at(0)), std::get(inst.operands.at(1)), std::get(inst.operands.at(2)), std::get(inst.operands.at(3))); - break; - - // Machine State - Internal Control Flow - case OpCode::JUMP_32: - error = trace_builder.op_jump(std::get(inst.operands.at(0))); - break; - case OpCode::JUMPI_32: - error = trace_builder.op_jumpi(std::get(inst.operands.at(0)), + break; + case OpCode::GETCONTRACTINSTANCE: + error = trace_builder.op_get_contract_instance(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + std::get(inst.operands.at(4))); + break; + + // Accrued Substate + case OpCode::EMITUNENCRYPTEDLOG: + error = trace_builder.op_emit_unencrypted_log(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2))); + break; + case OpCode::SENDL2TOL1MSG: + error = trace_builder.op_emit_l2_to_l1_msg(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2))); + break; + + // Control Flow - Contract Calls + case OpCode::CALL: + error = trace_builder.op_call(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + std::get(inst.operands.at(4)), + std::get(inst.operands.at(5))); + break; + case OpCode::STATICCALL: + error = trace_builder.op_static_call(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + std::get(inst.operands.at(4)), + std::get(inst.operands.at(5))); + break; + case OpCode::RETURN: { + auto ret = trace_builder.op_return(std::get(inst.operands.at(0)), std::get(inst.operands.at(1)), - std::get(inst.operands.at(2))); - break; - case OpCode::INTERNALCALL: - error = trace_builder.op_internal_call(std::get(inst.operands.at(0))); - break; - case OpCode::INTERNALRETURN: - error = trace_builder.op_internal_return(); - break; - - // Machine State - Memory - case OpCode::SET_8: { - error = trace_builder.op_set(std::get(inst.operands.at(0)), - std::get(inst.operands.at(3)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - OpCode::SET_8); - break; - } - case OpCode::SET_16: { - error = trace_builder.op_set(std::get(inst.operands.at(0)), - std::get(inst.operands.at(3)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - OpCode::SET_16); - break; - } - case OpCode::SET_32: { - error = trace_builder.op_set(std::get(inst.operands.at(0)), - std::get(inst.operands.at(3)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - OpCode::SET_32); - break; - } - case OpCode::SET_64: { - error = trace_builder.op_set(std::get(inst.operands.at(0)), - std::get(inst.operands.at(3)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - OpCode::SET_64); - break; - } - case OpCode::SET_128: { - error = trace_builder.op_set(std::get(inst.operands.at(0)), - uint256_t::from_uint128(std::get(inst.operands.at(3))), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - OpCode::SET_128); - break; - } - case OpCode::SET_FF: { - error = trace_builder.op_set(std::get(inst.operands.at(0)), - std::get(inst.operands.at(3)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - OpCode::SET_FF); - break; - } - case OpCode::MOV_8: - error = trace_builder.op_mov(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - OpCode::MOV_8); - break; - case OpCode::MOV_16: - error = trace_builder.op_mov(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - OpCode::MOV_16); - break; + std::get(inst.operands.at(2))); + error = ret.error; + returndata.insert(returndata.end(), ret.return_data.begin(), ret.return_data.end()); - // World State - case OpCode::SLOAD: - error = trace_builder.op_sload(std::get(inst.operands.at(0)), + break; + } + case OpCode::REVERT_8: { + info("HIT REVERT_8 ", "[PC=" + std::to_string(pc) + "] " + inst.to_string()); + auto ret = trace_builder.op_revert(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2))); + error = ret.error; + returndata.insert(returndata.end(), ret.return_data.begin(), ret.return_data.end()); + + break; + } + case OpCode::REVERT_16: { + info("HIT REVERT_16 ", "[PC=" + std::to_string(pc) + "] " + inst.to_string()); + auto ret = trace_builder.op_revert(std::get(inst.operands.at(0)), std::get(inst.operands.at(1)), std::get(inst.operands.at(2))); - break; - case OpCode::SSTORE: - error = trace_builder.op_sstore(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2))); - break; - case OpCode::NOTEHASHEXISTS: - error = trace_builder.op_note_hash_exists(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3))); - break; - case OpCode::EMITNOTEHASH: - error = trace_builder.op_emit_note_hash(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1))); - break; - case OpCode::NULLIFIEREXISTS: - error = trace_builder.op_nullifier_exists(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3))); - break; - case OpCode::EMITNULLIFIER: - error = trace_builder.op_emit_nullifier(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1))); - break; + error = ret.error; + returndata.insert(returndata.end(), ret.return_data.begin(), ret.return_data.end()); - case OpCode::L1TOL2MSGEXISTS: - error = trace_builder.op_l1_to_l2_msg_exists(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3))); - break; - case OpCode::GETCONTRACTINSTANCE: - error = trace_builder.op_get_contract_instance(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - std::get(inst.operands.at(4))); - break; + break; + } - // Accrued Substate - case OpCode::EMITUNENCRYPTEDLOG: - error = trace_builder.op_emit_unencrypted_log(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2))); - break; - case OpCode::SENDL2TOL1MSG: - error = trace_builder.op_emit_l2_to_l1_msg(std::get(inst.operands.at(0)), + // Misc + case OpCode::DEBUGLOG: + error = trace_builder.op_debug_log(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + std::get(inst.operands.at(4))); + break; + + // Gadgets + case OpCode::POSEIDON2PERM: + error = trace_builder.op_poseidon2_permutation(std::get(inst.operands.at(0)), std::get(inst.operands.at(1)), std::get(inst.operands.at(2))); - break; - // Control Flow - Contract Calls - case OpCode::CALL: - error = trace_builder.op_call(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - std::get(inst.operands.at(4)), - std::get(inst.operands.at(5))); - break; - case OpCode::STATICCALL: - error = trace_builder.op_static_call(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - std::get(inst.operands.at(4)), - std::get(inst.operands.at(5))); - break; - case OpCode::RETURN: { - auto ret = trace_builder.op_return(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2))); - error = ret.error; - returndata.insert(returndata.end(), ret.return_data.begin(), ret.return_data.end()); + break; - break; - } - case OpCode::REVERT_8: { - info("HIT REVERT_8 ", "[PC=" + std::to_string(pc) + "] " + inst.to_string()); - auto ret = trace_builder.op_revert(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2))); - error = ret.error; - returndata.insert(returndata.end(), ret.return_data.begin(), ret.return_data.end()); + case OpCode::SHA256COMPRESSION: + error = trace_builder.op_sha256_compression(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3))); + break; - break; - } - case OpCode::REVERT_16: { - info("HIT REVERT_16 ", "[PC=" + std::to_string(pc) + "] " + inst.to_string()); - auto ret = trace_builder.op_revert(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2))); - error = ret.error; - returndata.insert(returndata.end(), ret.return_data.begin(), ret.return_data.end()); + case OpCode::KECCAKF1600: + error = trace_builder.op_keccakf1600(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2))); - break; - } + break; - // Misc - case OpCode::DEBUGLOG: - error = trace_builder.op_debug_log(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - std::get(inst.operands.at(4))); - break; - - // Gadgets - case OpCode::POSEIDON2PERM: - error = trace_builder.op_poseidon2_permutation(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2))); - - break; - - case OpCode::SHA256COMPRESSION: - error = trace_builder.op_sha256_compression(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3))); - break; - - case OpCode::KECCAKF1600: - error = trace_builder.op_keccakf1600(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2))); - - break; - - case OpCode::ECADD: - error = trace_builder.op_ec_add(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - std::get(inst.operands.at(4)), - std::get(inst.operands.at(5)), - std::get(inst.operands.at(6)), - std::get(inst.operands.at(7))); - break; - case OpCode::MSM: - error = trace_builder.op_variable_msm(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - std::get(inst.operands.at(4))); - break; - - // Conversions - case OpCode::TORADIXBE: - error = trace_builder.op_to_radix_be(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), - std::get(inst.operands.at(2)), - std::get(inst.operands.at(3)), - std::get(inst.operands.at(4)), - std::get(inst.operands.at(5))); - break; - - default: - throw_or_abort("Don't know how to execute opcode " + to_hex(inst.op_code) + " at pc " + - std::to_string(pc) + "."); - break; - } - } - - if (!is_ok(error)) { - info("AVM stopped due to exceptional halting condition. Error: ", - to_name(error), - " at PC: ", - pc, - " IC: ", - counter - 1); // Need adjustement as counter increment occurs in loop body + case OpCode::ECADD: + error = trace_builder.op_ec_add(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + std::get(inst.operands.at(4)), + std::get(inst.operands.at(5)), + std::get(inst.operands.at(6)), + std::get(inst.operands.at(7))); + break; + case OpCode::MSM: + error = trace_builder.op_variable_msm(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + std::get(inst.operands.at(4))); + break; + + // Conversions + case OpCode::TORADIXBE: + error = trace_builder.op_to_radix_be(std::get(inst.operands.at(0)), + std::get(inst.operands.at(1)), + std::get(inst.operands.at(2)), + std::get(inst.operands.at(3)), + std::get(inst.operands.at(4)), + std::get(inst.operands.at(5))); + break; + + default: + throw_or_abort("Don't know how to execute opcode " + to_hex(inst.op_code) + " at pc " + std::to_string(pc) + + "."); + break; } } - auto trace = trace_builder.finalize(); - - show_trace_info(trace); - return trace; + if (!is_ok(error)) { + auto const error_ic = counter - 1; // Need adjustement as counter increment occurs in loop body + std::string reason_prefix = exceptionally_halted(error) ? "exceptional halt" : "REVERT opcode"; + info("AVM enqueued call halted due to ", + reason_prefix, + ". Error: ", + to_name(error), + " at PC: ", + pc, + " IC: ", + error_ic); + } + return error; } } // namespace bb::avm_trace diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution.hpp index a9f3ad6d6955..df048bd8622c 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution.hpp @@ -13,6 +13,14 @@ namespace bb::avm_trace { +enum class TxExecutionPhase : uint32_t { + SETUP, + APP_LOGIC, + TEARDOWN, +}; + +std::string to_name(TxExecutionPhase phase); + class Execution { public: static constexpr size_t SRS_SIZE = 1 << 22; @@ -29,10 +37,15 @@ class Execution { // Bytecode is currently the bytecode of the top-level function call // Eventually this will be the bytecode of the dispatch function of top-level contract - static std::vector gen_trace(std::vector const& calldata, - AvmPublicInputs const& new_public_inputs, + static std::vector gen_trace(AvmPublicInputs const& public_inputs, std::vector& returndata, - ExecutionHints const& execution_hints); + ExecutionHints const& execution_hints, + bool apply_e2e_assertions = false); + + static AvmError execute_enqueued_call(AvmTraceBuilder& trace_builder, + PublicCallRequest& public_call_request, + std::vector& returndata, + bool check_bytecode_membership); // For testing purposes only. static void set_trace_builder_constructor(TraceBuilderConstructor constructor) @@ -41,9 +54,7 @@ class Execution { } static std::tuple prove( - std::vector const& calldata = {}, - AvmPublicInputs const& public_inputs = AvmPublicInputs(), - ExecutionHints const& execution_hints = {}); + AvmPublicInputs const& public_inputs = AvmPublicInputs(), ExecutionHints const& execution_hints = {}); static bool verify(AvmFlavor::VerificationKey vk, HonkProof const& proof); private: diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution_hints.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution_hints.hpp index c29057911193..3ef6969eb942 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution_hints.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution_hints.hpp @@ -166,6 +166,7 @@ struct ContractInstanceHint { FF contract_class_id{}; FF initialisation_hash{}; PublicKeysHint public_keys; + NullifierReadTreeHint membership_hint; }; inline void read(uint8_t const*& it, PublicKeysHint& hint) @@ -189,6 +190,7 @@ inline void read(uint8_t const*& it, ContractInstanceHint& hint) read(it, hint.contract_class_id); read(it, hint.initialisation_hash); read(it, hint.public_keys); + read(it, hint.membership_hint); } struct AvmContractBytecode { @@ -201,7 +203,7 @@ struct AvmContractBytecode { ContractInstanceHint contract_instance, ContractClassIdHint contract_class_id_preimage) : bytecode(std::move(bytecode)) - , contract_instance(contract_instance) + , contract_instance(std::move(contract_instance)) , contract_class_id_preimage(contract_class_id_preimage) {} AvmContractBytecode(std::vector bytecode) @@ -217,7 +219,20 @@ inline void read(uint8_t const*& it, AvmContractBytecode& bytecode) read(it, bytecode.contract_class_id_preimage); } +struct AvmEnqueuedCallHint { + FF contract_address; + std::vector calldata; +}; + +inline void read(uint8_t const*& it, AvmEnqueuedCallHint& hint) +{ + using serialize::read; + read(it, hint.contract_address); + read(it, hint.calldata); +} + struct ExecutionHints { + std::vector enqueued_call_hints; std::vector> storage_value_hints; std::vector> note_hash_exists_hints; std::vector> nullifier_exists_hints; @@ -309,6 +324,9 @@ struct ExecutionHints { using serialize::read; const auto* it = data.data(); + std::vector enqueued_call_hints; + read(it, enqueued_call_hints); + read(it, storage_value_hints); read(it, note_hash_exists_hints); read(it, nullifier_exists_hints); @@ -353,19 +371,28 @@ struct ExecutionHints { " bytes out of " + std::to_string(data.size()) + " bytes"); } - return { std::move(storage_value_hints), std::move(note_hash_exists_hints), - std::move(nullifier_exists_hints), std::move(l1_to_l2_message_exists_hints), - std::move(externalcall_hints), std::move(contract_instance_hints), - std::move(all_contract_bytecode), std::move(storage_read_hints), - std::move(storage_write_hints), std::move(nullifier_read_hints), - std::move(nullifier_write_hints), std::move(note_hash_read_hints), - std::move(note_hash_write_hints), std::move(l1_to_l2_message_read_hints) + return { std::move(enqueued_call_hints), + std::move(storage_value_hints), + std::move(note_hash_exists_hints), + std::move(nullifier_exists_hints), + std::move(l1_to_l2_message_exists_hints), + std::move(externalcall_hints), + std::move(contract_instance_hints), + std::move(all_contract_bytecode), + std::move(storage_read_hints), + std::move(storage_write_hints), + std::move(nullifier_read_hints), + std::move(nullifier_write_hints), + std::move(note_hash_read_hints), + std::move(note_hash_write_hints), + std::move(l1_to_l2_message_read_hints) }; } private: - ExecutionHints(std::vector> storage_value_hints, + ExecutionHints(std::vector enqueued_call_hints, + std::vector> storage_value_hints, std::vector> note_hash_exists_hints, std::vector> nullifier_exists_hints, std::vector> l1_to_l2_message_exists_hints, @@ -380,7 +407,8 @@ struct ExecutionHints { std::vector note_hash_write_hints, std::vector l1_to_l2_message_read_hints) - : storage_value_hints(std::move(storage_value_hints)) + : enqueued_call_hints(std::move(enqueued_call_hints)) + , storage_value_hints(std::move(storage_value_hints)) , note_hash_exists_hints(std::move(note_hash_exists_hints)) , nullifier_exists_hints(std::move(nullifier_exists_hints)) , l1_to_l2_message_exists_hints(std::move(l1_to_l2_message_exists_hints)) diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/gadgets/merkle_tree.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/gadgets/merkle_tree.cpp index 32a1e7eec2b7..87e46bf71289 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/gadgets/merkle_tree.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/gadgets/merkle_tree.cpp @@ -10,6 +10,15 @@ using Poseidon2 = crypto::Poseidon2; * UNCONSTRAINED TREE OPERATIONS **************************************************************************************************/ +void AvmMerkleTreeTraceBuilder::checkpoint_non_revertible_state() +{ + non_revertible_tree_snapshots = tree_snapshots.copy(); +} +void AvmMerkleTreeTraceBuilder::rollback_to_non_revertible_checkpoint() +{ + tree_snapshots = non_revertible_tree_snapshots; +} + FF AvmMerkleTreeTraceBuilder::unconstrained_hash_nullifier_preimage(const NullifierLeafPreimage& preimage) { return Poseidon2::hash({ preimage.nullifier, preimage.next_nullifier, preimage.next_index }); @@ -75,14 +84,13 @@ FF AvmMerkleTreeTraceBuilder::unconstrained_update_leaf_index(const FF& leaf_val bool AvmMerkleTreeTraceBuilder::perform_storage_read([[maybe_unused]] uint32_t clk, const PublicDataTreeLeafPreimage& preimage, const FF& leaf_index, - const std::vector& path, - const FF& root) + const std::vector& path) const { // Hash the preimage FF preimage_hash = unconstrained_hash_public_data_preimage(preimage); auto index = static_cast(leaf_index); // Check if the leaf is a member of the tree - return unconstrained_check_membership(preimage_hash, index, path, root); + return unconstrained_check_membership(preimage_hash, index, path, tree_snapshots.public_data_tree.root); } FF AvmMerkleTreeTraceBuilder::perform_storage_write([[maybe_unused]] uint32_t clk, @@ -91,19 +99,19 @@ FF AvmMerkleTreeTraceBuilder::perform_storage_write([[maybe_unused]] uint32_t cl const std::vector& low_path, const FF& slot, const FF& value, - const FF& insertion_index, - const std::vector& insertion_path, - const FF& initial_root) + const std::vector& insertion_path) { // Check membership of the low leaf - bool low_leaf_member = perform_storage_read(clk, low_preimage, low_index, low_path, initial_root); + bool low_leaf_member = perform_storage_read(clk, low_preimage, low_index, low_path); ASSERT(low_leaf_member); if (slot == low_preimage.slot) { // We update the low value low_preimage.value = value; FF low_preimage_hash = unconstrained_hash_public_data_preimage(low_preimage); // Update the low leaf - return unconstrained_update_leaf_index(low_preimage_hash, static_cast(low_index), low_path); + tree_snapshots.public_data_tree.root = + unconstrained_update_leaf_index(low_preimage_hash, static_cast(low_index), low_path); + return tree_snapshots.public_data_tree.root; } // The new leaf for an insertion is PublicDataTreeLeafPreimage new_preimage{ @@ -111,32 +119,34 @@ FF AvmMerkleTreeTraceBuilder::perform_storage_write([[maybe_unused]] uint32_t cl }; // Update the low preimage with the new leaf preimage low_preimage.next_slot = slot; - low_preimage.next_index = insertion_index; + low_preimage.next_index = tree_snapshots.public_data_tree.size; // Hash the low preimage FF low_preimage_hash = unconstrained_hash_public_data_preimage(low_preimage); // Compute the new root FF new_root = unconstrained_update_leaf_index(low_preimage_hash, static_cast(low_index), low_path); // Check membership of the zero leaf at the insertion index against the new root - auto index = static_cast(insertion_index); + auto index = static_cast(tree_snapshots.public_data_tree.size); bool zero_leaf_member = unconstrained_check_membership(FF::zero(), index, insertion_path, new_root); ASSERT(zero_leaf_member); // Hash the new preimage FF leaf_preimage_hash = unconstrained_hash_public_data_preimage(new_preimage); // Insert the new leaf into the tree - return unconstrained_update_leaf_index(leaf_preimage_hash, index, insertion_path); + tree_snapshots.public_data_tree.root = unconstrained_update_leaf_index(leaf_preimage_hash, index, insertion_path); + tree_snapshots.public_data_tree.size++; + return tree_snapshots.public_data_tree.root; } bool AvmMerkleTreeTraceBuilder::perform_nullifier_read([[maybe_unused]] uint32_t clk, const NullifierLeafPreimage& preimage, const FF& leaf_index, - const std::vector& path, - const FF& root) + const std::vector& path) const + { // Hash the preimage FF preimage_hash = unconstrained_hash_nullifier_preimage(preimage); auto index = static_cast(leaf_index); // Check if the leaf is a member of the tree - return unconstrained_check_membership(preimage_hash, index, path, root); + return unconstrained_check_membership(preimage_hash, index, path, tree_snapshots.nullifier_tree.root); } FF AvmMerkleTreeTraceBuilder::perform_nullifier_append([[maybe_unused]] uint32_t clk, @@ -144,22 +154,20 @@ FF AvmMerkleTreeTraceBuilder::perform_nullifier_append([[maybe_unused]] uint32_t const FF& low_index, const std::vector& low_path, const FF& nullifier, - const FF& insertion_index, - const std::vector& insertion_path, - const FF& root) + const std::vector& insertion_path) { bool is_update = low_preimage.nullifier == nullifier; FF low_preimage_hash = unconstrained_hash_nullifier_preimage(low_preimage); if (is_update) { // We need to raise an error here, since updates arent allowed in the nullifier tree - bool is_member = - unconstrained_check_membership(low_preimage_hash, static_cast(low_index), low_path, root); + bool is_member = unconstrained_check_membership( + low_preimage_hash, static_cast(low_index), low_path, tree_snapshots.nullifier_tree.root); ASSERT(is_member); - return root; + return tree_snapshots.nullifier_tree.root; } // Check membership of the low leaf - bool low_leaf_member = - unconstrained_check_membership(low_preimage_hash, static_cast(low_index), low_path, root); + bool low_leaf_member = unconstrained_check_membership( + low_preimage_hash, static_cast(low_index), low_path, tree_snapshots.nullifier_tree.root); ASSERT(low_leaf_member); // The new leaf for an insertion is NullifierLeafPreimage new_preimage{ .nullifier = nullifier, @@ -167,19 +175,52 @@ FF AvmMerkleTreeTraceBuilder::perform_nullifier_append([[maybe_unused]] uint32_t .next_index = low_preimage.next_index }; // Update the low preimage low_preimage.next_nullifier = nullifier; - low_preimage.next_index = insertion_index; + low_preimage.next_index = tree_snapshots.nullifier_tree.size; // Update hash of the low preimage low_preimage_hash = unconstrained_hash_nullifier_preimage(low_preimage); // Update the root with new low preimage FF updated_root = unconstrained_update_leaf_index(low_preimage_hash, static_cast(low_index), low_path); // Check membership of the zero leaf at the insertion index against the new root - auto index = static_cast(insertion_index); + auto index = static_cast(tree_snapshots.nullifier_tree.size); bool zero_leaf_member = unconstrained_check_membership(FF::zero(), index, insertion_path, updated_root); ASSERT(zero_leaf_member); // Hash the new preimage FF leaf_preimage_hash = unconstrained_hash_nullifier_preimage(new_preimage); // Insert the new leaf into the tree - return unconstrained_update_leaf_index(leaf_preimage_hash, index, insertion_path); + tree_snapshots.nullifier_tree.root = unconstrained_update_leaf_index(leaf_preimage_hash, index, insertion_path); + tree_snapshots.nullifier_tree.size++; + return tree_snapshots.nullifier_tree.root; +} + +bool AvmMerkleTreeTraceBuilder::perform_note_hash_read([[maybe_unused]] uint32_t clk, + const FF& note_hash, + const FF& leaf_index, + const std::vector& path) const +{ + auto index = static_cast(leaf_index); + return unconstrained_check_membership(note_hash, index, path, tree_snapshots.note_hash_tree.root); +} + +FF AvmMerkleTreeTraceBuilder::perform_note_hash_append([[maybe_unused]] uint32_t clk, + const FF& note_hash, + const std::vector& insertion_path) +{ + auto index = static_cast(tree_snapshots.note_hash_tree.size); + bool zero_leaf_member = + unconstrained_check_membership(FF::zero(), index, insertion_path, tree_snapshots.note_hash_tree.root); + ASSERT(zero_leaf_member); + tree_snapshots.note_hash_tree.root = unconstrained_update_leaf_index(note_hash, index, insertion_path); + tree_snapshots.note_hash_tree.size++; + return tree_snapshots.note_hash_tree.root; +} + +bool AvmMerkleTreeTraceBuilder::perform_l1_to_l2_message_read([[maybe_unused]] uint32_t clk, + const FF& leaf_value, + const FF leaf_index, + const std::vector& path) const +{ + auto index = static_cast(leaf_index); + return unconstrained_check_membership(leaf_value, index, path, tree_snapshots.l1_to_l2_message_tree.root); } /************************************************************************************************** diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/gadgets/merkle_tree.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/gadgets/merkle_tree.hpp index 382c49942faa..7b67db893138 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/gadgets/merkle_tree.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/gadgets/merkle_tree.hpp @@ -22,8 +22,14 @@ class AvmMerkleTreeTraceBuilder { }; AvmMerkleTreeTraceBuilder() = default; + AvmMerkleTreeTraceBuilder(TreeSnapshots& tree_snapshots) + : tree_snapshots(tree_snapshots){}; + void reset(); + void checkpoint_non_revertible_state(); + void rollback_to_non_revertible_checkpoint(); + bool check_membership( uint32_t clk, const FF& leaf_value, const uint64_t leaf_index, const std::vector& path, const FF& root); @@ -35,12 +41,13 @@ class AvmMerkleTreeTraceBuilder { FF compute_public_tree_leaf_slot(uint32_t clk, FF contract_address, FF leaf_index); - // These can be static, but not yet in-case we want to store the tree snapshots in this gadget + TreeSnapshots& get_tree_snapshots() { return tree_snapshots; } + + // Public Data Tree bool perform_storage_read(uint32_t clk, const PublicDataTreeLeafPreimage& preimage, const FF& leaf_index, - const std::vector& path, - const FF& root); + const std::vector& path) const; FF perform_storage_write(uint32_t clk, PublicDataTreeLeafPreimage& low_preimage, @@ -48,24 +55,34 @@ class AvmMerkleTreeTraceBuilder { const std::vector& low_path, const FF& slot, const FF& value, - const FF& insertion_index, - const std::vector& insertion_path, - const FF& initial_root); + const std::vector& insertion_path); + // Nullifier Tree bool perform_nullifier_read(uint32_t clk, const NullifierLeafPreimage& preimage, const FF& leaf_index, - const std::vector& path, - const FF& root); + const std::vector& path) const; FF perform_nullifier_append(uint32_t clk, NullifierLeafPreimage& low_preimage, const FF& low_index, const std::vector& low_path, const FF& nullifier, - const FF& insertion_index, - const std::vector& insertion_path, - const FF& root); + const std::vector& insertion_path); + + // Note Hash Tree + bool perform_note_hash_read(uint32_t clk, + const FF& note_hash, + const FF& leaf_index, + const std::vector& path) const; + + FF perform_note_hash_append(uint32_t clk, const FF& note_hash, const std::vector& insertion_path); + + // L1 to L2 Message Tree + bool perform_l1_to_l2_message_read(uint32_t clk, + const FF& leaf_value, + const FF leaf_index, + const std::vector& path) const; // Unconstrained variants while circuit stuff is being worked out static bool unconstrained_check_membership(const FF& leaf_value, @@ -86,11 +103,14 @@ class AvmMerkleTreeTraceBuilder { static FF unconstrained_compute_public_tree_leaf_slot(FF contract_address, FF leaf_index); void finalize(std::vector>& main_trace); + // We need access to the poseidon2 gadget AvmPoseidon2TraceBuilder poseidon2_builder; private: std::vector merkle_check_trace; + TreeSnapshots non_revertible_tree_snapshots; + TreeSnapshots tree_snapshots; MerkleEntry compute_root_from_path(uint32_t clk, const FF& leaf_value, const uint64_t leaf_index, diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/helper.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/helper.cpp index e40a90129d54..28a540f69aaa 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/helper.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/helper.cpp @@ -105,6 +105,8 @@ std::string to_name(AvmError error) switch (error) { case AvmError::NO_ERROR: return "NO ERROR"; + case AvmError::REVERT_OPCODE: + return "REVERT OPCODE"; case AvmError::INVALID_PROGRAM_COUNTER: return "INVALID PROGRAM COUNTER"; case AvmError::INVALID_OPCODE: @@ -127,6 +129,10 @@ std::string to_name(AvmError error) return "CONTRACT INSTANCE MEMBER UNKNOWN"; case AvmError::RADIX_OUT_OF_BOUNDS: return "RADIX OUT OF BOUNDS"; + case AvmError::DUPLICATE_NULLIFIER: + return "DUPLICATE NULLIFIER"; + case AvmError::SIDE_EFFECT_LIMIT_REACHED: + return "SIDE EFFECT LIMIT REACHED"; default: throw std::runtime_error("Invalid error type"); break; @@ -138,6 +144,11 @@ bool is_ok(AvmError error) return error == AvmError::NO_ERROR; } +bool exceptionally_halted(AvmError error) +{ + return error != AvmError::NO_ERROR && error != AvmError::REVERT_OPCODE; +} + /** * * ONLY FOR TESTS - Required by dsl module and therefore cannot be moved to test/helpers.test.cpp diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/helper.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/helper.hpp index 1f3b845c8e40..0c622cfe024c 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/helper.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/helper.hpp @@ -59,7 +59,6 @@ template VmPublicInputs_ convert_public_inputs(std::vector& trace); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/kernel_trace.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/kernel_trace.cpp index 275d2a950ee8..ecacb7d41c23 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/kernel_trace.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/kernel_trace.cpp @@ -65,16 +65,6 @@ FF AvmKernelTraceBuilder::op_sender(uint32_t clk) return perform_kernel_input_lookup(SENDER_KERNEL_INPUTS_COL_OFFSET); } -FF AvmKernelTraceBuilder::op_function_selector(uint32_t clk) -{ - KernelTraceEntry entry = { - .clk = clk, - .operation = KernelTraceOpType::FUNCTION_SELECTOR, - }; - kernel_trace.push_back(entry); - return perform_kernel_input_lookup(FUNCTION_SELECTOR_KERNEL_INPUTS_COL_OFFSET); -} - FF AvmKernelTraceBuilder::op_transaction_fee(uint32_t clk) { KernelTraceEntry entry = { @@ -363,10 +353,6 @@ void AvmKernelTraceBuilder::op_sstore(uint32_t clk, uint32_t side_effect_counter // dest.main_kernel_in_offset = SENDER_KERNEL_INPUTS_COL_OFFSET; // dest.main_sel_q_kernel_lookup = 1; // break; -// case KernelTraceOpType::FUNCTION_SELECTOR: -// dest.main_kernel_in_offset = FUNCTION_SELECTOR_KERNEL_INPUTS_COL_OFFSET; -// dest.main_sel_q_kernel_lookup = 1; -// break; // case KernelTraceOpType::TRANSACTION_FEE: // dest.main_kernel_in_offset = TRANSACTION_FEE_KERNEL_INPUTS_COL_OFFSET; // dest.main_sel_q_kernel_lookup = 1; diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/kernel_trace.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/kernel_trace.hpp index 094441fe83b3..6ecc60763245 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/kernel_trace.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/kernel_trace.hpp @@ -18,7 +18,6 @@ class AvmKernelTraceBuilder { // IN ADDRESS, SENDER, - FUNCTION_SELECTOR, TRANSACTION_FEE, CHAIN_ID, VERSION, @@ -70,7 +69,6 @@ class AvmKernelTraceBuilder { // Context FF op_address(uint32_t clk); FF op_sender(uint32_t clk); - FF op_function_selector(uint32_t clk); FF op_transaction_fee(uint32_t clk); FF op_is_static_call(uint32_t clk); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/opcode.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/opcode.hpp index d2ad53e4a268..380c8c9eeb3f 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/opcode.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/opcode.hpp @@ -115,7 +115,6 @@ enum class OpCode : uint8_t { enum class EnvironmentVariable { ADDRESS, SENDER, - FUNCTIONSELECTOR, TRANSACTIONFEE, CHAINID, VERSION, diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/public_inputs.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/public_inputs.hpp index fb2f236aa6ab..c68d5e363fd3 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/public_inputs.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/public_inputs.hpp @@ -18,8 +18,8 @@ struct Gas { inline void read(uint8_t const*& it, Gas& gas) { using serialize::read; - read(it, gas.l2_gas); read(it, gas.da_gas); + read(it, gas.l2_gas); } struct GasFees { @@ -87,6 +87,7 @@ inline void read(uint8_t const*& it, GlobalVariables& global_variables) struct AppendOnlyTreeSnapshot { FF root{}; uint32_t size = 0; + inline bool operator==(const AppendOnlyTreeSnapshot& rhs) const { return root == rhs.root && size == rhs.size; } }; inline void read(uint8_t const*& it, AppendOnlyTreeSnapshot& tree_snapshot) @@ -101,6 +102,32 @@ struct TreeSnapshots { AppendOnlyTreeSnapshot note_hash_tree; AppendOnlyTreeSnapshot nullifier_tree; AppendOnlyTreeSnapshot public_data_tree; + inline bool operator==(const TreeSnapshots& rhs) const + { + return l1_to_l2_message_tree == rhs.l1_to_l2_message_tree && note_hash_tree == rhs.note_hash_tree && + nullifier_tree == rhs.nullifier_tree && public_data_tree == rhs.public_data_tree; + } + inline TreeSnapshots copy() + { + return { + .l1_to_l2_message_tree = { + .root = l1_to_l2_message_tree.root, + .size = l1_to_l2_message_tree.size, + }, + .note_hash_tree = { + .root = note_hash_tree.root, + .size = note_hash_tree.size, + }, + .nullifier_tree = { + .root = nullifier_tree.root, + .size = nullifier_tree.size, + }, + .public_data_tree = { + .root = public_data_tree.root, + .size = public_data_tree.size, + }, + }; + } }; inline void read(uint8_t const*& it, TreeSnapshots& tree_snapshots) @@ -130,6 +157,10 @@ struct PublicCallRequest { */ bool is_static_call = false; FF args_hash{}; + inline bool is_empty() const + { + return msg_sender == 0 && contract_address == 0 && function_selector == 0 && !is_static_call && args_hash == 0; + } }; inline void read(uint8_t const*& it, PublicCallRequest& public_call_request) @@ -155,9 +186,24 @@ inline void read(uint8_t const*& it, PrivateToAvmAccumulatedDataArrayLengths& le read(it, lengths.nullifiers); read(it, lengths.l2_to_l1_msgs); } +struct L2ToL1Message { + FF recipient{}; // This is an eth address so it's actually only 20 bytes + FF content{}; + uint32_t counter = 0; +}; + +inline void read(uint8_t const*& it, L2ToL1Message& l2_to_l1_message) +{ + using serialize::read; + std::array recipient; + read(it, recipient); + l2_to_l1_message.recipient = FF::serialize_from_buffer(recipient.data()); + read(it, l2_to_l1_message.content); + read(it, l2_to_l1_message.counter); +} struct ScopedL2ToL1Message { - FF l2_to_l1_message{}; + L2ToL1Message l2_to_l1_message{}; FF contract_address{}; }; @@ -170,27 +216,21 @@ inline void read(uint8_t const*& it, ScopedL2ToL1Message& l2_to_l1_message) struct PrivateToAvmAccumulatedData { std::array note_hashes{}; - std::array nullifiers{}; - std::array l2_to_l1_msgs; + std::array nullifiers{}; + std::array l2_to_l1_msgs; }; inline void read(uint8_t const*& it, PrivateToAvmAccumulatedData& accumulated_data) { using serialize::read; - for (size_t i = 0; i < MAX_NOTE_HASHES_PER_TX; i++) { - read(it, accumulated_data.note_hashes[i]); - } - for (size_t i = 0; i < MAX_NULLIFIERS_PER_CALL; i++) { - read(it, accumulated_data.nullifiers[i]); - } - for (size_t i = 0; i < MAX_L2_TO_L1_MSGS_PER_CALL; i++) { - read(it, accumulated_data.l2_to_l1_msgs[i]); - } + read(it, accumulated_data.note_hashes); + read(it, accumulated_data.nullifiers); + read(it, accumulated_data.l2_to_l1_msgs); } struct LogHash { FF value{}; - FF counter{}; + uint32_t counter = 0; FF length{}; }; @@ -234,39 +274,30 @@ struct AvmAccumulatedData { /** * The nullifiers from private combining with those made in the AVM execution. */ - std::array nullifiers{}; + std::array nullifiers{}; /** * The L2 to L1 messages from private combining with those made in the AVM execution. */ - std::array l2_to_l1_msgs; + std::array l2_to_l1_msgs{}; /** * The unencrypted logs emitted from the AVM execution. */ - std::array unencrypted_logs_hashes; + std::array unencrypted_logs_hashes{}; /** * The public data writes made in the AVM execution. */ - std::array public_data_writes; + std::array public_data_writes{}; }; inline void read(uint8_t const*& it, AvmAccumulatedData& accumulated_data) { using serialize::read; - for (size_t i = 0; i < MAX_NOTE_HASHES_PER_TX; i++) { - read(it, accumulated_data.note_hashes[i]); - } - for (size_t i = 0; i < MAX_NULLIFIERS_PER_CALL; i++) { - read(it, accumulated_data.nullifiers[i]); - } - for (size_t i = 0; i < MAX_L2_TO_L1_MSGS_PER_CALL; i++) { - read(it, accumulated_data.l2_to_l1_msgs[i]); - } - for (size_t i = 0; i < MAX_UNENCRYPTED_LOGS_PER_CALL; i++) { - read(it, accumulated_data.unencrypted_logs_hashes[i]); - } - for (size_t i = 0; i < MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX; i++) { - read(it, accumulated_data.public_data_writes[i]); - } + + read(it, accumulated_data.note_hashes); + read(it, accumulated_data.nullifiers); + read(it, accumulated_data.l2_to_l1_msgs); + read(it, accumulated_data.unencrypted_logs_hashes); + read(it, accumulated_data.public_data_writes); }; class AvmPublicInputs { diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp index ecd740ca9bae..4247240f0d3b 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp @@ -35,7 +35,9 @@ #include "barretenberg/vm/avm/trace/gadgets/slice_trace.hpp" #include "barretenberg/vm/avm/trace/helper.hpp" #include "barretenberg/vm/avm/trace/opcode.hpp" +#include "barretenberg/vm/avm/trace/public_inputs.hpp" #include "barretenberg/vm/avm/trace/trace.hpp" +#include "barretenberg/vm/aztec_constants.hpp" #include "barretenberg/vm/stats.hpp" namespace bb::avm_trace { @@ -128,12 +130,94 @@ bool check_tag_integral(AvmMemoryTag tag) } } +bool isCanonical(FF contract_address) +{ + // TODO: constrain this! + return contract_address == CANONICAL_AUTH_REGISTRY_ADDRESS || contract_address == DEPLOYER_CONTRACT_ADDRESS || + contract_address == REGISTERER_CONTRACT_ADDRESS || contract_address == MULTI_CALL_ENTRYPOINT_ADDRESS || + contract_address == FEE_JUICE_ADDRESS || contract_address == ROUTER_ADDRESS; +} + } // anonymous namespace /************************************************************************************************** * HELPERS **************************************************************************************************/ +void AvmTraceBuilder::checkpoint_non_revertible_state() +{ + merkle_tree_trace_builder.checkpoint_non_revertible_state(); +} +void AvmTraceBuilder::rollback_to_non_revertible_checkpoint() +{ + merkle_tree_trace_builder.rollback_to_non_revertible_checkpoint(); +} + +std::vector AvmTraceBuilder::get_bytecode(const FF contract_address, bool check_membership) +{ + auto clk = static_cast(main_trace.size()) + 1; + + // Find the bytecode based on contract address of the public call request + const AvmContractBytecode bytecode_hint = + *std::ranges::find_if(execution_hints.all_contract_bytecode, [contract_address](const auto& contract) { + return contract.contract_instance.address == contract_address; + }); + + bool exists = true; + if (check_membership && !isCanonical(contract_address)) { + const auto contract_address_nullifier = AvmMerkleTreeTraceBuilder::unconstrained_silo_nullifier( + DEPLOYER_CONTRACT_ADDRESS, /*nullifier=*/contract_address); + // nullifier read hint for the contract address + NullifierReadTreeHint nullifier_read_hint = bytecode_hint.contract_instance.membership_hint; + + // If the hinted preimage matches the contract address nullifier, the membership check will prove its existence, + // otherwise the membership check will prove that a low-leaf exists that skips the contract address nullifier. + exists = nullifier_read_hint.low_leaf_preimage.nullifier == contract_address_nullifier; + // perform the membership or non-membership check + bool is_member = merkle_tree_trace_builder.perform_nullifier_read(clk, + nullifier_read_hint.low_leaf_preimage, + nullifier_read_hint.low_leaf_index, + nullifier_read_hint.low_leaf_sibling_path); + // membership check must always pass + ASSERT(is_member); + + if (exists) { + // This was a membership proof! + // Assert that the hint's exists flag matches. The flag isn't really necessary... + ASSERT(bytecode_hint.contract_instance.exists); + } else { + // This was a non-membership proof! + // Enforce that the tree access membership checked a low-leaf that skips the contract address nullifier. + // Show that the contract address nullifier meets the non membership conditions (sandwich or max) + ASSERT(contract_address_nullifier < nullifier_read_hint.low_leaf_preimage.nullifier && + (nullifier_read_hint.low_leaf_preimage.next_nullifier == FF::zero() || + contract_address_nullifier > nullifier_read_hint.low_leaf_preimage.next_nullifier)); + } + } + + if (exists) { + vinfo("Found bytecode for contract address: ", contract_address); + return bytecode_hint.bytecode; + } + // TODO(dbanks12): handle non-existent bytecode + vinfo("Bytecode not found for contract address: ", contract_address); + throw std::runtime_error("Bytecode not found"); +} + +void AvmTraceBuilder::insert_private_state(const std::vector& siloed_nullifiers, + [[maybe_unused]] const std::vector& siloed_note_hashes) +{ + for (const auto& siloed_nullifier : siloed_nullifiers) { + auto hint = execution_hints.nullifier_write_hints[nullifier_write_counter++]; + merkle_tree_trace_builder.perform_nullifier_append(0, + hint.low_leaf_membership.low_leaf_preimage, + hint.low_leaf_membership.low_leaf_index, + hint.low_leaf_membership.low_leaf_sibling_path, + siloed_nullifier, + hint.insertion_path); + } +} + /** * @brief Loads a value from memory into a given intermediate register at a specified clock cycle. * Handles both direct and indirect memory access. @@ -306,17 +390,16 @@ AvmTraceBuilder::AvmTraceBuilder(AvmPublicInputs public_inputs, std::vector calldata) // NOTE: we initialise the environment builder here as it requires public inputs : calldata(std::move(calldata)) - , new_public_inputs(public_inputs) + , public_inputs(public_inputs) , side_effect_counter(side_effect_counter) , execution_hints(std::move(execution_hints_)) - , intermediate_tree_snapshots(public_inputs.start_tree_snapshots) , bytecode_trace_builder(execution_hints.all_contract_bytecode) + , merkle_tree_trace_builder(public_inputs.start_tree_snapshots) { // TODO: think about cast - gas_trace_builder.set_initial_gas(static_cast(new_public_inputs.gas_settings.gas_limits.l2_gas - - new_public_inputs.start_gas_used.l2_gas), - static_cast(new_public_inputs.gas_settings.gas_limits.da_gas - - new_public_inputs.start_gas_used.da_gas)); + gas_trace_builder.set_initial_gas( + static_cast(public_inputs.gas_settings.gas_limits.l2_gas - public_inputs.start_gas_used.l2_gas), + static_cast(public_inputs.gas_settings.gas_limits.da_gas - public_inputs.start_gas_used.da_gas)); } /************************************************************************************************** @@ -1542,9 +1625,6 @@ AvmError AvmTraceBuilder::op_get_env_var(uint8_t indirect, uint32_t dst_offset, case EnvironmentVariable::SENDER: error = op_sender(indirect, dst_offset); break; - case EnvironmentVariable::FUNCTIONSELECTOR: - error = op_function_selector(indirect, dst_offset); - break; case EnvironmentVariable::TRANSACTIONFEE: error = op_transaction_fee(indirect, dst_offset); break; @@ -1611,22 +1691,9 @@ AvmError AvmTraceBuilder::op_sender(uint8_t indirect, uint32_t dst_offset) return error; } -AvmError AvmTraceBuilder::op_function_selector(uint8_t indirect, uint32_t dst_offset) -{ - FF ia_value = this->current_public_call_request.function_selector; - auto [row, error] = create_kernel_lookup_opcode(indirect, dst_offset, ia_value, AvmMemoryTag::U32); - row.main_sel_op_function_selector = FF(1); - - // Constrain gas cost - gas_trace_builder.constrain_gas(static_cast(row.main_clk), OpCode::GETENVVAR_16); - - main_trace.push_back(row); - return error; -} - AvmError AvmTraceBuilder::op_transaction_fee(uint8_t indirect, uint32_t dst_offset) { - FF ia_value = new_public_inputs.transaction_fee; + FF ia_value = public_inputs.transaction_fee; auto [row, error] = create_kernel_lookup_opcode(indirect, dst_offset, ia_value, AvmMemoryTag::FF); row.main_sel_op_transaction_fee = FF(1); @@ -1656,7 +1723,7 @@ AvmError AvmTraceBuilder::op_is_static_call(uint8_t indirect, uint32_t dst_offse AvmError AvmTraceBuilder::op_chain_id(uint8_t indirect, uint32_t dst_offset) { - FF ia_value = new_public_inputs.global_variables.chain_id; + FF ia_value = public_inputs.global_variables.chain_id; auto [row, error] = create_kernel_lookup_opcode(indirect, dst_offset, ia_value, AvmMemoryTag::FF); row.main_sel_op_chain_id = FF(1); @@ -1669,7 +1736,7 @@ AvmError AvmTraceBuilder::op_chain_id(uint8_t indirect, uint32_t dst_offset) AvmError AvmTraceBuilder::op_version(uint8_t indirect, uint32_t dst_offset) { - FF ia_value = new_public_inputs.global_variables.version; + FF ia_value = public_inputs.global_variables.version; auto [row, error] = create_kernel_lookup_opcode(indirect, dst_offset, ia_value, AvmMemoryTag::FF); row.main_sel_op_version = FF(1); @@ -1682,7 +1749,7 @@ AvmError AvmTraceBuilder::op_version(uint8_t indirect, uint32_t dst_offset) AvmError AvmTraceBuilder::op_block_number(uint8_t indirect, uint32_t dst_offset) { - FF ia_value = new_public_inputs.global_variables.block_number; + FF ia_value = public_inputs.global_variables.block_number; auto [row, error] = create_kernel_lookup_opcode(indirect, dst_offset, ia_value, AvmMemoryTag::FF); row.main_sel_op_block_number = FF(1); @@ -1695,7 +1762,7 @@ AvmError AvmTraceBuilder::op_block_number(uint8_t indirect, uint32_t dst_offset) AvmError AvmTraceBuilder::op_timestamp(uint8_t indirect, uint32_t dst_offset) { - FF ia_value = new_public_inputs.global_variables.timestamp; + FF ia_value = public_inputs.global_variables.timestamp; auto [row, error] = create_kernel_lookup_opcode(indirect, dst_offset, ia_value, AvmMemoryTag::U64); row.main_sel_op_timestamp = FF(1); @@ -1708,7 +1775,7 @@ AvmError AvmTraceBuilder::op_timestamp(uint8_t indirect, uint32_t dst_offset) AvmError AvmTraceBuilder::op_fee_per_l2_gas(uint8_t indirect, uint32_t dst_offset) { - FF ia_value = new_public_inputs.global_variables.gas_fees.fee_per_l2_gas; + FF ia_value = public_inputs.global_variables.gas_fees.fee_per_l2_gas; auto [row, error] = create_kernel_lookup_opcode(indirect, dst_offset, ia_value, AvmMemoryTag::FF); row.main_sel_op_fee_per_l2_gas = FF(1); @@ -1721,7 +1788,7 @@ AvmError AvmTraceBuilder::op_fee_per_l2_gas(uint8_t indirect, uint32_t dst_offse AvmError AvmTraceBuilder::op_fee_per_da_gas(uint8_t indirect, uint32_t dst_offset) { - FF ia_value = new_public_inputs.global_variables.gas_fees.fee_per_da_gas; + FF ia_value = public_inputs.global_variables.gas_fees.fee_per_da_gas; auto [row, error] = create_kernel_lookup_opcode(indirect, dst_offset, ia_value, AvmMemoryTag::FF); row.main_sel_op_fee_per_da_gas = FF(1); @@ -2564,10 +2631,9 @@ AvmError AvmTraceBuilder::op_sload(uint8_t indirect, uint32_t slot_offset, uint3 // Sanity check that the computed slot using the value read from slot_offset should match the read hint ASSERT(computed_tree_slot == read_hint.leaf_preimage.slot); - FF public_data_tree_root = intermediate_tree_snapshots.public_data_tree.root; // Check that the leaf is a member of the public data tree bool is_member = merkle_tree_trace_builder.perform_storage_read( - clk, read_hint.leaf_preimage, read_hint.leaf_index, read_hint.sibling_path, public_data_tree_root); + clk, read_hint.leaf_preimage, read_hint.leaf_index, read_hint.sibling_path); ASSERT(is_member); FF value = read_hint.leaf_preimage.value; @@ -2613,10 +2679,24 @@ AvmError AvmTraceBuilder::op_sload(uint8_t indirect, uint32_t slot_offset, uint3 AvmError AvmTraceBuilder::op_sstore(uint8_t indirect, uint32_t src_offset, uint32_t slot_offset) { // We keep the first encountered error + AvmError error = AvmError::NO_ERROR; auto clk = static_cast(main_trace.size()) + 1; - // We keep the first encountered error - AvmError error = AvmError::NO_ERROR; + if (storage_write_counter >= MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX) { + error = AvmError::SIDE_EFFECT_LIMIT_REACHED; + auto row = Row{ + .main_clk = clk, + .main_internal_return_ptr = internal_return_ptr, + .main_op_err = FF(static_cast(!is_ok(error))), + .main_pc = pc, + .main_sel_op_sstore = FF(1), + }; + gas_trace_builder.constrain_gas(clk, OpCode::SSTORE); + main_trace.push_back(row); + pc += Deserialization::get_pc_increment(OpCode::SSTORE); + return error; + } + auto [resolved_addrs, res_error] = Addressing<2>::fromWire(indirect, call_ptr).resolve({ src_offset, slot_offset }, mem_trace_builder); auto [resolved_src, resolved_slot] = resolved_addrs; @@ -2642,17 +2722,13 @@ AvmError AvmTraceBuilder::op_sstore(uint8_t indirect, uint32_t src_offset, uint3 // (e) We create a new preimage for the new write // (f) We compute the new root by updating at the leaf index with the hash of the new preimage PublicDataWriteTreeHint write_hint = execution_hints.storage_write_hints.at(storage_write_counter++); - FF root = merkle_tree_trace_builder.perform_storage_write(clk, - write_hint.low_leaf_membership.leaf_preimage, - write_hint.low_leaf_membership.leaf_index, - write_hint.low_leaf_membership.sibling_path, - write_hint.new_leaf_preimage.slot, - write_hint.new_leaf_preimage.value, - intermediate_tree_snapshots.public_data_tree.size, - write_hint.insertion_path, - intermediate_tree_snapshots.public_data_tree.root); - intermediate_tree_snapshots.public_data_tree.root = root; - intermediate_tree_snapshots.public_data_tree.size++; + merkle_tree_trace_builder.perform_storage_write(clk, + write_hint.low_leaf_membership.leaf_preimage, + write_hint.low_leaf_membership.leaf_index, + write_hint.low_leaf_membership.sibling_path, + write_hint.new_leaf_preimage.slot, + write_hint.new_leaf_preimage.value, + write_hint.insertion_path); // TODO(8945): remove fake rows Row row = Row{ @@ -2662,6 +2738,7 @@ AvmError AvmTraceBuilder::op_sstore(uint8_t indirect, uint32_t src_offset, uint3 .main_ind_addr_a = read_a.indirect_address, .main_internal_return_ptr = internal_return_ptr, .main_mem_addr_a = read_a.direct_address, // direct address incremented at end of the loop + .main_op_err = FF(static_cast(!is_ok(error))), .main_pc = pc, .main_r_in_tag = static_cast(AvmMemoryTag::FF), .main_sel_mem_op_a = 1, @@ -2712,11 +2789,9 @@ AvmError AvmTraceBuilder::op_note_hash_exists(uint8_t indirect, bool exists = note_hash_value == note_hash_read_hint.leaf_value; // Check membership of the leaf index in the note hash tree const auto leaf_index = unconstrained_read_from_memory(resolved_leaf_index); - bool is_member = - AvmMerkleTreeTraceBuilder::unconstrained_check_membership(note_hash_read_hint.leaf_value, - static_cast(leaf_index), - note_hash_read_hint.sibling_path, - intermediate_tree_snapshots.note_hash_tree.root); + bool is_member = merkle_tree_trace_builder.perform_note_hash_read( + clk, note_hash_read_hint.leaf_value, leaf_index, note_hash_read_hint.sibling_path); + ASSERT(is_member); // This already does memory reads @@ -2782,23 +2857,36 @@ AvmError AvmTraceBuilder::op_emit_note_hash(uint8_t indirect, uint32_t note_hash { auto const clk = static_cast(main_trace.size()) + 1; + if (note_hash_write_counter >= MAX_NOTE_HASHES_PER_TX) { + AvmError error = AvmError::SIDE_EFFECT_LIMIT_REACHED; + auto row = Row{ + .main_clk = clk, + .main_internal_return_ptr = internal_return_ptr, + .main_op_err = FF(static_cast(!is_ok(error))), + .main_pc = pc, + .main_sel_op_emit_note_hash = FF(1), + }; + gas_trace_builder.constrain_gas(clk, OpCode::EMITNOTEHASH); + main_trace.push_back(row); + pc += Deserialization::get_pc_increment(OpCode::EMITNOTEHASH); + return error; + } + + auto [row, error] = create_kernel_output_opcode(indirect, clk, note_hash_offset); + row.main_sel_op_emit_note_hash = FF(1); + row.main_op_err = FF(static_cast(!is_ok(error))); + AppendTreeHint note_hash_write_hint = execution_hints.note_hash_write_hints.at(note_hash_write_counter++); + auto siloed_note_hash = AvmMerkleTreeTraceBuilder::unconstrained_silo_note_hash( + current_public_call_request.contract_address, row.main_ia); + ASSERT(row.main_ia == note_hash_write_hint.leaf_value); // We first check that the index is currently empty - auto insertion_index = static_cast(intermediate_tree_snapshots.note_hash_tree.size); - bool insert_index_is_empty = - AvmMerkleTreeTraceBuilder::unconstrained_check_membership(FF::zero(), - insertion_index, - note_hash_write_hint.sibling_path, - intermediate_tree_snapshots.note_hash_tree.root); + bool insert_index_is_empty = merkle_tree_trace_builder.perform_note_hash_read( + clk, FF::zero(), note_hash_write_hint.leaf_index, note_hash_write_hint.sibling_path); ASSERT(insert_index_is_empty); - // Update the root with the new leaf that is appended - FF new_root = AvmMerkleTreeTraceBuilder::unconstrained_update_leaf_index( - note_hash_write_hint.leaf_value, insertion_index, note_hash_write_hint.sibling_path); - intermediate_tree_snapshots.note_hash_tree.root = new_root; - intermediate_tree_snapshots.note_hash_tree.size++; - auto [row, error] = create_kernel_output_opcode(indirect, clk, note_hash_offset); - row.main_sel_op_emit_note_hash = FF(1); + // Update the root with the new leaf that is appended + merkle_tree_trace_builder.perform_note_hash_append(clk, siloed_note_hash, note_hash_write_hint.sibling_path); // Constrain gas cost gas_trace_builder.constrain_gas(clk, OpCode::EMITNOTEHASH); @@ -2840,12 +2928,10 @@ AvmError AvmTraceBuilder::op_nullifier_exists(uint8_t indirect, FF nullifier_value = unconstrained_read_from_memory(resolved_nullifier_offset); FF address_value = unconstrained_read_from_memory(resolved_address); FF siloed_nullifier = AvmMerkleTreeTraceBuilder::unconstrained_silo_nullifier(address_value, nullifier_value); - bool is_member = - merkle_tree_trace_builder.perform_nullifier_read(clk, - nullifier_read_hint.low_leaf_preimage, - nullifier_read_hint.low_leaf_index, - nullifier_read_hint.low_leaf_sibling_path, - intermediate_tree_snapshots.nullifier_tree.root); + bool is_member = merkle_tree_trace_builder.perform_nullifier_read(clk, + nullifier_read_hint.low_leaf_preimage, + nullifier_read_hint.low_leaf_index, + nullifier_read_hint.low_leaf_sibling_path); ASSERT(is_member); if (siloed_nullifier == nullifier_read_hint.low_leaf_preimage.nullifier) { @@ -2921,48 +3007,67 @@ AvmError AvmTraceBuilder::op_nullifier_exists(uint8_t indirect, AvmError AvmTraceBuilder::op_emit_nullifier(uint8_t indirect, uint32_t nullifier_offset) { + // We keep the first encountered error + AvmError error = AvmError::NO_ERROR; auto const clk = static_cast(main_trace.size()) + 1; - auto [row, error] = create_kernel_output_opcode(indirect, clk, nullifier_offset); + if (nullifier_write_counter >= MAX_NULLIFIERS_PER_TX) { + error = AvmError::SIDE_EFFECT_LIMIT_REACHED; + auto row = Row{ + .main_clk = clk, + .main_internal_return_ptr = internal_return_ptr, + .main_op_err = FF(static_cast(!is_ok(error))), + .main_pc = pc, + .main_sel_op_emit_nullifier = FF(1), + }; + gas_trace_builder.constrain_gas(clk, OpCode::EMITNULLIFIER); + main_trace.push_back(row); + pc += Deserialization::get_pc_increment(OpCode::EMITNULLIFIER); + return error; + } + + auto [row, output_error] = create_kernel_output_opcode(indirect, clk, nullifier_offset); row.main_sel_op_emit_nullifier = FF(1); + if (is_ok(error)) { + error = output_error; + } // Do merkle check FF nullifier_value = row.main_ia; FF siloed_nullifier = AvmMerkleTreeTraceBuilder::unconstrained_silo_nullifier( current_public_call_request.contract_address, nullifier_value); - // This is a little bit fragile - but we use the fact that if we traced a nullifier that already exists (which is - // invalid), we would have stored it under a read hint. - NullifierReadTreeHint nullifier_read_hint = execution_hints.nullifier_read_hints.at(nullifier_read_counter); - bool is_update = merkle_tree_trace_builder.perform_nullifier_read(clk, - nullifier_read_hint.low_leaf_preimage, - nullifier_read_hint.low_leaf_index, - nullifier_read_hint.low_leaf_sibling_path, - intermediate_tree_snapshots.nullifier_tree.root); + NullifierWriteTreeHint nullifier_write_hint = execution_hints.nullifier_write_hints.at(nullifier_write_counter++); + bool is_update = siloed_nullifier == nullifier_write_hint.low_leaf_membership.low_leaf_preimage.next_nullifier; if (is_update) { - // If we are in this branch, then the nullifier already exists in the tree - // WE NEED TO RAISE AN ERROR FLAG HERE - for now we do nothing, except increment the counter - + // hinted low-leaf points to the target nullifier, so it already exists + // prove membership of that low-leaf, which also proves membership of the target nullifier + bool exists = merkle_tree_trace_builder.perform_nullifier_read( + clk, + nullifier_write_hint.low_leaf_membership.low_leaf_preimage, + nullifier_write_hint.low_leaf_membership.low_leaf_index, + nullifier_write_hint.low_leaf_membership.low_leaf_sibling_path); + // if hinted low-leaf that skips the nullifier fails membership check, bad hint! + ASSERT(exists); nullifier_read_counter++; - error = AvmError::DUPLICATE_NULLIFIER; + // Cannot update an existing nullifier, and cannot emit a duplicate. Error! + if (is_ok(error)) { + error = AvmError::DUPLICATE_NULLIFIER; + } } else { - // This is a non-membership proof which means our insertion is valid - NullifierWriteTreeHint nullifier_write_hint = - execution_hints.nullifier_write_hints.at(nullifier_write_counter++); - FF new_root = merkle_tree_trace_builder.perform_nullifier_append( + // hinted low-leaf SKIPS the target nullifier, so it does NOT exist + // prove membership of the low leaf which also proves non-membership of the target nullifier + merkle_tree_trace_builder.perform_nullifier_append( clk, nullifier_write_hint.low_leaf_membership.low_leaf_preimage, nullifier_write_hint.low_leaf_membership.low_leaf_index, nullifier_write_hint.low_leaf_membership.low_leaf_sibling_path, siloed_nullifier, - intermediate_tree_snapshots.nullifier_tree.size, - nullifier_write_hint.insertion_path, - intermediate_tree_snapshots.nullifier_tree.root); - - intermediate_tree_snapshots.nullifier_tree.root = new_root; - intermediate_tree_snapshots.nullifier_tree.size++; + nullifier_write_hint.insertion_path); } + row.main_op_err = FF(static_cast(!is_ok(error))); + // Constrain gas cost gas_trace_builder.constrain_gas(clk, OpCode::EMITNULLIFIER); @@ -3006,11 +3111,8 @@ AvmError AvmTraceBuilder::op_l1_to_l2_msg_exists(uint8_t indirect, bool exists = l1_to_l2_msg_value == l1_to_l2_msg_read_hint.leaf_value; // Check membership of the leaf index in the l1_to_l2_msg tree - bool is_member = AvmMerkleTreeTraceBuilder::unconstrained_check_membership( - l1_to_l2_msg_read_hint.leaf_value, - static_cast(l1_to_l2_msg_read_hint.leaf_index), - l1_to_l2_msg_read_hint.sibling_path, - intermediate_tree_snapshots.l1_to_l2_message_tree.root); + bool is_member = merkle_tree_trace_builder.perform_l1_to_l2_message_read( + clk, l1_to_l2_msg_read_hint.leaf_value, leaf_index, l1_to_l2_msg_read_hint.sibling_path); ASSERT(is_member); auto read_a = constrained_read_from_memory( @@ -3111,23 +3213,66 @@ AvmError AvmTraceBuilder::op_get_contract_instance( error = AvmError::CHECK_TAG_ERROR; } - // Read the contract instance - ContractInstanceHint instance = execution_hints.contract_instance_hints.at(read_address.val); - - FF member_value; - switch (chosen_member) { - case ContractInstanceMember::DEPLOYER: - member_value = instance.deployer_addr; - break; - case ContractInstanceMember::CLASS_ID: - member_value = instance.contract_class_id; - break; - case ContractInstanceMember::INIT_HASH: - member_value = instance.initialisation_hash; - break; - default: - member_value = 0; - break; + FF member_value = 0; + bool exists = false; + + if (is_ok(error)) { + const auto contract_address = read_address.val; + const auto contract_address_nullifier = AvmMerkleTreeTraceBuilder::unconstrained_silo_nullifier( + DEPLOYER_CONTRACT_ADDRESS, /*nullifier=*/contract_address); + // Read the contract instance hint + ContractInstanceHint instance = execution_hints.contract_instance_hints.at(contract_address); + + if (isCanonical(contract_address)) { + // skip membership check for canonical contracts + exists = true; + } else { + // nullifier read hint for the contract address + NullifierReadTreeHint nullifier_read_hint = instance.membership_hint; + + // If the hinted preimage matches the contract address nullifier, the membership check will prove its + // existence, otherwise the membership check will prove that a low-leaf exists that skips the contract + // address nullifier. + exists = nullifier_read_hint.low_leaf_preimage.nullifier == contract_address_nullifier; + + bool is_member = + merkle_tree_trace_builder.perform_nullifier_read(clk, + nullifier_read_hint.low_leaf_preimage, + nullifier_read_hint.low_leaf_index, + nullifier_read_hint.low_leaf_sibling_path); + // membership check must always pass + ASSERT(is_member); + + if (exists) { + // This was a membership proof! + // Assert that the hint's exists flag matches. The flag isn't really necessary... + ASSERT(instance.exists); + } else { + // This was a non-membership proof! + // Enforce that the tree access membership checked a low-leaf that skips the contract address nullifier. + // Show that the contract address nullifier meets the non membership conditions (sandwich or max) + ASSERT(contract_address_nullifier < nullifier_read_hint.low_leaf_preimage.nullifier && + (nullifier_read_hint.low_leaf_preimage.next_nullifier == FF::zero() || + contract_address_nullifier > nullifier_read_hint.low_leaf_preimage.next_nullifier)); + } + } + + if (exists) { + switch (chosen_member) { + case ContractInstanceMember::DEPLOYER: + member_value = instance.deployer_addr; + break; + case ContractInstanceMember::CLASS_ID: + member_value = instance.contract_class_id; + break; + case ContractInstanceMember::INIT_HASH: + member_value = instance.initialisation_hash; + break; + default: + member_value = 0; + break; + } + } } // TODO(8603): once instructions can have multiple different tags for writes, write dst as FF and exists as @@ -3171,7 +3316,7 @@ AvmError AvmTraceBuilder::op_get_contract_instance( // TODO(8603): once instructions can have multiple different tags for writes, remove this and do a // constrained writes write_to_memory(resolved_dst_offset, member_value, AvmMemoryTag::FF); - write_to_memory(resolved_exists_offset, FF(static_cast(instance.exists)), AvmMemoryTag::U1); + write_to_memory(resolved_exists_offset, FF(static_cast(exists)), AvmMemoryTag::U1); // TODO(dbanks12): compute contract address nullifier from instance preimage and perform membership check @@ -3235,6 +3380,26 @@ AvmError AvmTraceBuilder::op_emit_unencrypted_log(uint8_t indirect, uint32_t log }; } + // Can't return earlier as we do elsewhere for side-effect-limit because we need + // to at least retrieve log_size first to charge proper gas. + // This means a tag error could occur before side-effect-limit first. + if (is_ok(error) && unencrypted_log_write_counter >= MAX_UNENCRYPTED_LOGS_PER_TX) { + error = AvmError::SIDE_EFFECT_LIMIT_REACHED; + auto row = Row{ + .main_clk = clk, + .main_internal_return_ptr = internal_return_ptr, + .main_op_err = FF(static_cast(!is_ok(error))), + .main_pc = pc, + .main_sel_op_emit_unencrypted_log = FF(1), + }; + // Constrain gas cost + gas_trace_builder.constrain_gas(clk, OpCode::EMITUNENCRYPTEDLOG, static_cast(log_size)); + main_trace.push_back(row); + pc += Deserialization::get_pc_increment(OpCode::EMITUNENCRYPTEDLOG); + return error; + } + unencrypted_log_write_counter++; + if (is_ok(error)) { // We need to read the rest of the log_size number of elements for (uint32_t i = 0; i < log_size; i++) { @@ -3289,14 +3454,38 @@ AvmError AvmTraceBuilder::op_emit_unencrypted_log(uint8_t indirect, uint32_t log AvmError AvmTraceBuilder::op_emit_l2_to_l1_msg(uint8_t indirect, uint32_t recipient_offset, uint32_t content_offset) { + // We keep the first encountered error + AvmError error = AvmError::NO_ERROR; auto const clk = static_cast(main_trace.size()) + 1; + if (l2_to_l1_msg_write_counter >= MAX_L2_TO_L1_MSGS_PER_TX) { + error = AvmError::SIDE_EFFECT_LIMIT_REACHED; + auto row = Row{ + .main_clk = clk, + .main_internal_return_ptr = internal_return_ptr, + .main_op_err = FF(static_cast(!is_ok(error))), + .main_pc = pc, + .main_sel_op_emit_l2_to_l1_msg = FF(1), + }; + gas_trace_builder.constrain_gas(clk, OpCode::SENDL2TOL1MSG); + main_trace.push_back(row); + pc += Deserialization::get_pc_increment(OpCode::SENDL2TOL1MSG); + return error; + } + l2_to_l1_msg_write_counter++; + // Note: unorthodox order - as seen in L2ToL1Message struct in TS - auto [row, error] = create_kernel_output_opcode_with_metadata( + auto [row, output_error] = create_kernel_output_opcode_with_metadata( indirect, clk, content_offset, AvmMemoryTag::FF, recipient_offset, AvmMemoryTag::FF); + + if (is_ok(error)) { + error = output_error; + } + // Wtite to output // kernel_trace_builder.op_emit_l2_to_l1_msg(clk, side_effect_counter, row.main_ia, row.main_ib); row.main_sel_op_emit_l2_to_l1_msg = FF(1); + row.main_op_err = FF(static_cast(!is_ok(error))); // Constrain gas cost gas_trace_builder.constrain_gas(clk, OpCode::SENDL2TOL1MSG); @@ -3617,6 +3806,10 @@ ReturnDataError AvmTraceBuilder::op_revert(uint8_t indirect, uint32_t ret_offset pc = UINT32_MAX; // This ensures that no subsequent opcode will be executed. + if (is_ok(error)) { + error = AvmError::REVERT_OPCODE; + } + // op_valid == true otherwise, ret_size == 0 and we would have returned above. return ReturnDataError{ .return_data = returndata, @@ -4334,8 +4527,13 @@ AvmError AvmTraceBuilder::op_to_radix_be(uint8_t indirect, * * @return The main trace */ -std::vector AvmTraceBuilder::finalize() +std::vector AvmTraceBuilder::finalize(bool apply_end_gas_assertions) { + // Some sanity checks + // Check that the final merkle tree lines up with the public inputs + TreeSnapshots tree_snapshots = merkle_tree_trace_builder.get_tree_snapshots(); + ASSERT(tree_snapshots == public_inputs.end_tree_snapshots); + vinfo("range_check_required: ", range_check_required); vinfo("full_precomputed_tables: ", full_precomputed_tables); @@ -4356,7 +4554,6 @@ std::vector AvmTraceBuilder::finalize() size_t bin_trace_size = bin_trace_builder.size(); size_t gas_trace_size = gas_trace_builder.size(); size_t slice_trace_size = slice_trace.size(); - // size_t kernel_trace_size = kernel_trace_builder.size(); // Range check size is 1 less than it needs to be since we insert a "first row" at the top of the trace at the // end, with clk 0 (this doubles as our range check) @@ -4599,6 +4796,16 @@ std::vector AvmTraceBuilder::finalize() gas_trace_builder.finalize(main_trace); + if (apply_end_gas_assertions) { + // Sanity check that the amount of gas consumed matches what we expect from the public inputs + auto last_l2_gas_remaining = main_trace.back().main_l2_gas_remaining; + auto expected_end_gas_l2 = public_inputs.gas_settings.gas_limits.l2_gas - public_inputs.end_gas_used.l2_gas; + ASSERT(last_l2_gas_remaining == expected_end_gas_l2); + auto last_da_gas_remaining = main_trace.back().main_da_gas_remaining; + auto expected_end_gas_da = public_inputs.gas_settings.gas_limits.da_gas - public_inputs.end_gas_used.da_gas; + ASSERT(last_da_gas_remaining == expected_end_gas_da); + } + /********************************************************************************************** * KERNEL TRACE INCLUSION **********************************************************************************************/ diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.hpp index aed311ee4436..aee0c4d0e61a 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.hpp @@ -15,7 +15,6 @@ #include "barretenberg/vm/avm/trace/gadgets/sha256.hpp" #include "barretenberg/vm/avm/trace/gadgets/slice_trace.hpp" #include "barretenberg/vm/avm/trace/gas_trace.hpp" -// #include "barretenberg/vm/avm/trace/kernel_trace.hpp" #include "barretenberg/vm/avm/trace/mem_trace.hpp" #include "barretenberg/vm/avm/trace/opcode.hpp" #include "barretenberg/vm/avm/trace/public_inputs.hpp" @@ -42,7 +41,7 @@ struct RowWithError { class AvmTraceBuilder { public: - AvmTraceBuilder(AvmPublicInputs new_public_inputs = {}, + AvmTraceBuilder(AvmPublicInputs public_inputs, ExecutionHints execution_hints = {}, uint32_t side_effect_counter = 0, std::vector calldata = {}); @@ -101,7 +100,6 @@ class AvmTraceBuilder { AvmError op_get_env_var(uint8_t indirect, uint32_t dst_offset, uint8_t env_var); AvmError op_address(uint8_t indirect, uint32_t dst_offset); AvmError op_sender(uint8_t indirect, uint32_t dst_offset); - AvmError op_function_selector(uint8_t indirect, uint32_t dst_offset); AvmError op_transaction_fee(uint8_t indirect, uint32_t dst_offset); AvmError op_is_static_call(uint8_t indirect, uint32_t dst_offset); @@ -222,9 +220,14 @@ class AvmTraceBuilder { uint32_t num_limbs, uint8_t output_bits); - std::vector finalize(); + std::vector finalize(bool apply_end_gas_assertions = false); void reset(); + void checkpoint_non_revertible_state(); + void rollback_to_non_revertible_checkpoint(); + std::vector get_bytecode(const FF contract_address, bool check_membership = false); + void insert_private_state(const std::vector& siloed_nullifiers, const std::vector& siloed_note_hashes); + // These are used for testing only. AvmTraceBuilder& set_range_check_required(bool required) { @@ -250,7 +253,7 @@ class AvmTraceBuilder { std::vector main_trace; std::vector calldata; - AvmPublicInputs new_public_inputs; + AvmPublicInputs public_inputs; PublicCallRequest current_public_call_request; std::vector returndata; @@ -261,16 +264,16 @@ class AvmTraceBuilder { uint32_t side_effect_counter = 0; uint32_t external_call_counter = 0; // Incremented both by OpCode::CALL and OpCode::STATICCALL ExecutionHints execution_hints; - // These are the tracked roots for intermediate steps - TreeSnapshots intermediate_tree_snapshots; // These are some counters for the tree acceess hints that we probably dont need in the future uint32_t note_hash_read_counter = 0; uint32_t note_hash_write_counter = 0; uint32_t nullifier_read_counter = 0; uint32_t nullifier_write_counter = 0; uint32_t l1_to_l2_msg_read_counter = 0; + uint32_t l2_to_l1_msg_write_counter = 0; uint32_t storage_read_counter = 0; uint32_t storage_write_counter = 0; + uint32_t unencrypted_log_write_counter = 0; // These exist due to testing only. bool range_check_required = true; diff --git a/barretenberg/cpp/src/barretenberg/vm/aztec_constants.hpp b/barretenberg/cpp/src/barretenberg/vm/aztec_constants.hpp index d2fff918e9ae..793e47251e6f 100644 --- a/barretenberg/cpp/src/barretenberg/vm/aztec_constants.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/aztec_constants.hpp @@ -20,6 +20,12 @@ #define MAX_UNENCRYPTED_LOGS_PER_TX 8 #define MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS 3000 #define MAX_L2_GAS_PER_ENQUEUED_CALL 12000000 +#define CANONICAL_AUTH_REGISTRY_ADDRESS 1 +#define DEPLOYER_CONTRACT_ADDRESS 2 +#define REGISTERER_CONTRACT_ADDRESS 3 +#define MULTI_CALL_ENTRYPOINT_ADDRESS 4 +#define FEE_JUICE_ADDRESS 5 +#define ROUTER_ADDRESS 6 #define AZTEC_ADDRESS_LENGTH 1 #define GAS_FEES_LENGTH 2 #define GAS_LENGTH 2 @@ -38,12 +44,11 @@ #define PUBLIC_INNER_CALL_REQUEST_LENGTH 13 #define STATE_REFERENCE_LENGTH 8 #define TOTAL_FEES_LENGTH 1 -#define HEADER_LENGTH 25 #define PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH 867 #define AVM_ACCUMULATED_DATA_LENGTH 318 #define AVM_CIRCUIT_PUBLIC_INPUTS_LENGTH 1006 #define AVM_VERIFICATION_KEY_LENGTH_IN_FIELDS 86 -#define AVM_PROOF_LENGTH_IN_FIELDS 4166 +#define AVM_PROOF_LENGTH_IN_FIELDS 4161 #define AVM_PUBLIC_COLUMN_MAX_SIZE 1024 #define AVM_PUBLIC_INPUTS_FLATTENED_SIZE 2915 #define MEM_TAG_FF 0 @@ -55,7 +60,6 @@ #define MEM_TAG_U128 6 #define SENDER_KERNEL_INPUTS_COL_OFFSET 0 #define ADDRESS_KERNEL_INPUTS_COL_OFFSET 1 -#define FUNCTION_SELECTOR_KERNEL_INPUTS_COL_OFFSET 2 #define IS_STATIC_CALL_KERNEL_INPUTS_COL_OFFSET 3 #define CHAIN_ID_KERNEL_INPUTS_COL_OFFSET 4 #define VERSION_KERNEL_INPUTS_COL_OFFSET 5 diff --git a/barretenberg/cpp/src/barretenberg/vm/constants.hpp b/barretenberg/cpp/src/barretenberg/vm/constants.hpp index fe1c0a835609..5efbbfd82dd6 100644 --- a/barretenberg/cpp/src/barretenberg/vm/constants.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/constants.hpp @@ -27,7 +27,6 @@ static_assert(KERNEL_OUTPUTS_LENGTH < AVM_PUBLIC_COLUMN_MAX_SIZE, // https://github.com/AztecProtocol/aztec-packages/blob/master/yarn-project/circuits.js/src/structs/public_circuit_public_inputs.ts inline const uint32_t SENDER_PCPI_OFFSET = 0; inline const uint32_t ADDRESS_PCPI_OFFSET = 1; -inline const uint32_t FUNCTION_SELECTOR_PCPI_OFFSET = 2; inline const uint32_t IS_STATIC_CALL_PCPI_OFFSET = 3; inline const uint32_t PCPI_GLOBALS_START = PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH - 7 - GLOBAL_VARIABLES_LENGTH; diff --git a/barretenberg/ts/CHANGELOG.md b/barretenberg/ts/CHANGELOG.md index 33e9682d19c3..781197873f92 100644 --- a/barretenberg/ts/CHANGELOG.md +++ b/barretenberg/ts/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.66.0](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.65.2...barretenberg.js-v0.66.0) (2024-12-06) + + +### Miscellaneous + +* **barretenberg.js:** Synchronize aztec-packages versions + ## [0.65.2](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.65.1...barretenberg.js-v0.65.2) (2024-11-28) diff --git a/barretenberg/ts/package.json b/barretenberg/ts/package.json index 42a97212870a..d6995a1d921a 100644 --- a/barretenberg/ts/package.json +++ b/barretenberg/ts/package.json @@ -1,7 +1,7 @@ { "name": "@aztec/bb.js", "packageManager": "yarn@1.22.22", - "version": "0.65.2", + "version": "0.66.0", "homepage": "https://github.com/AztecProtocol/aztec-packages/tree/master/barretenberg/ts", "license": "MIT", "type": "module", diff --git a/boxes/boxes/vite/.gitignore b/boxes/boxes/vite/.gitignore new file mode 100644 index 000000000000..4137dee7a54f --- /dev/null +++ b/boxes/boxes/vite/.gitignore @@ -0,0 +1,27 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* + +node_modules +dist +dist-ssr +*.local + +# Editor directories and files +.vscode/* +!.vscode/extensions.json +.idea +.DS_Store +*.suo +*.ntvs* +*.njsproj +*.sln +*.sw? + +artifacts/* +codegenCache.json diff --git a/boxes/boxes/vite/README.md b/boxes/boxes/vite/README.md new file mode 100644 index 000000000000..74872fd4af60 --- /dev/null +++ b/boxes/boxes/vite/README.md @@ -0,0 +1,50 @@ +# React + TypeScript + Vite + +This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules. + +Currently, two official plugins are available: + +- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react/README.md) uses [Babel](https://babeljs.io/) for Fast Refresh +- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh + +## Expanding the ESLint configuration + +If you are developing a production application, we recommend updating the configuration to enable type aware lint rules: + +- Configure the top-level `parserOptions` property like this: + +```js +export default tseslint.config({ + languageOptions: { + // other options... + parserOptions: { + project: ['./tsconfig.node.json', './tsconfig.app.json'], + tsconfigRootDir: import.meta.dirname, + }, + }, +}) +``` + +- Replace `tseslint.configs.recommended` to `tseslint.configs.recommendedTypeChecked` or `tseslint.configs.strictTypeChecked` +- Optionally add `...tseslint.configs.stylisticTypeChecked` +- Install [eslint-plugin-react](https://github.com/jsx-eslint/eslint-plugin-react) and update the config: + +```js +// eslint.config.js +import react from 'eslint-plugin-react' + +export default tseslint.config({ + // Set the react version + settings: { react: { version: '18.3' } }, + plugins: { + // Add the react plugin + react, + }, + rules: { + // other rules... + // Enable its recommended rules + ...react.configs.recommended.rules, + ...react.configs['jsx-runtime'].rules, + }, +}) +``` diff --git a/boxes/boxes/vite/eslint.config.js b/boxes/boxes/vite/eslint.config.js new file mode 100644 index 000000000000..092408a9f09e --- /dev/null +++ b/boxes/boxes/vite/eslint.config.js @@ -0,0 +1,28 @@ +import js from '@eslint/js' +import globals from 'globals' +import reactHooks from 'eslint-plugin-react-hooks' +import reactRefresh from 'eslint-plugin-react-refresh' +import tseslint from 'typescript-eslint' + +export default tseslint.config( + { ignores: ['dist'] }, + { + extends: [js.configs.recommended, ...tseslint.configs.recommended], + files: ['**/*.{ts,tsx}'], + languageOptions: { + ecmaVersion: 2020, + globals: globals.browser, + }, + plugins: { + 'react-hooks': reactHooks, + 'react-refresh': reactRefresh, + }, + rules: { + ...reactHooks.configs.recommended.rules, + 'react-refresh/only-export-components': [ + 'warn', + { allowConstantExport: true }, + ], + }, + }, +) diff --git a/boxes/boxes/vite/index.html b/boxes/boxes/vite/index.html new file mode 100644 index 000000000000..f6f53a86ef90 --- /dev/null +++ b/boxes/boxes/vite/index.html @@ -0,0 +1,25 @@ + + + + + + Private Token Noir Smart Contract + + + +
+ + + diff --git a/boxes/boxes/vite/package.json b/boxes/boxes/vite/package.json new file mode 100644 index 000000000000..cbc2f139190b --- /dev/null +++ b/boxes/boxes/vite/package.json @@ -0,0 +1,48 @@ +{ + "name": "vite", + "private": true, + "version": "0.0.0", + "type": "module", + "scripts": { + "compile": "cd src/contracts && ${AZTEC_NARGO:-aztec-nargo} compile --silence-warnings", + "codegen": "${AZTEC_BUILDER:-aztec} codegen src/contracts/target -o artifacts", + "clean": "rm -rf ./dist .tsbuildinfo ./artifacts ./src/contracts/target", + "prep": "yarn clean && yarn compile && yarn codegen", + "dev": "vite", + "build": "tsc -b && vite build", + "lint": "eslint .", + "preview": "vite preview" + }, + "dependencies": { + "@aztec/accounts": "portal:../../../yarn-project/accounts", + "@aztec/aztec.js": "portal:../../../yarn-project/aztec.js", + "@aztec/circuit-types": "portal:../../../yarn-project/circuit-types", + "@aztec/key-store": "link:../../../yarn-project/key-store", + "@aztec/kv-store": "portal:../../../yarn-project/kv-store", + "@aztec/pxe": "link:../../../yarn-project/pxe", + "@noir-lang/acvm_js": "link:../../../noir/packages/acvm_js", + "@noir-lang/noirc_abi": "link:../../../noir/packages/noirc_abi", + "buffer": "^6.0.3", + "react": "^18.3.1", + "react-dom": "^18.3.1", + "react-toastify": "^10.0.6" + }, + "devDependencies": { + "@eslint/js": "^9.13.0", + "@types/react": "^18.3.12", + "@types/react-dom": "^18.3.1", + "@vitejs/plugin-react-swc": "^3.5.0", + "eslint": "^9.13.0", + "eslint-plugin-react-hooks": "^5.0.0", + "eslint-plugin-react-refresh": "^0.4.14", + "globals": "^15.11.0", + "memfs": "^4.14.0", + "node-stdlib-browser": "^1.3.0", + "typescript": "~5.6.2", + "typescript-eslint": "^8.11.0", + "vite": "^5.4.10", + "vite-plugin-externalize-deps": "^0.8.0", + "vite-plugin-node-polyfills": "^0.22.0", + "vite-plugin-top-level-await": "^1.4.4" + } +} diff --git a/boxes/boxes/vite/src/App.css b/boxes/boxes/vite/src/App.css new file mode 100644 index 000000000000..b9d355df2a59 --- /dev/null +++ b/boxes/boxes/vite/src/App.css @@ -0,0 +1,42 @@ +#root { + max-width: 1280px; + margin: 0 auto; + padding: 2rem; + text-align: center; +} + +.logo { + height: 6em; + padding: 1.5em; + will-change: filter; + transition: filter 300ms; +} +.logo:hover { + filter: drop-shadow(0 0 2em #646cffaa); +} +.logo.react:hover { + filter: drop-shadow(0 0 2em #61dafbaa); +} + +@keyframes logo-spin { + from { + transform: rotate(0deg); + } + to { + transform: rotate(360deg); + } +} + +@media (prefers-reduced-motion: no-preference) { + a:nth-of-type(2) .logo { + animation: logo-spin infinite 20s linear; + } +} + +.card { + padding: 2em; +} + +.read-the-docs { + color: #888; +} diff --git a/boxes/boxes/vite/src/App.tsx b/boxes/boxes/vite/src/App.tsx new file mode 100644 index 000000000000..daf6f6d9d8d0 --- /dev/null +++ b/boxes/boxes/vite/src/App.tsx @@ -0,0 +1,35 @@ +import { ToastContainer } from "react-toastify"; +import "react-toastify/dist/ReactToastify.css"; +import "./App.css"; +import { Home } from "./pages/home"; +import { useEffect, useState } from "react"; +import initACVM from "@noir-lang/acvm_js/web/acvm_js"; +import initABI from "@noir-lang/noirc_abi/web/noirc_abi_wasm"; +import acvmURL from "@noir-lang/acvm_js/web/acvm_js_bg.wasm?url"; +import abiURL from "@noir-lang/noirc_abi/web/noirc_abi_wasm_bg.wasm?url"; + +const InitWasm = ({ children }: any) => { + const [init, setInit] = useState(false); + useEffect(() => { + (async () => { + await Promise.all([ + initACVM(new URL(acvmURL, import.meta.url).toString()), + initABI(new URL(abiURL, import.meta.url).toString()), + ]); + setInit(true); + })(); + }, []); + + return
{init && children}
; +}; + +function App() { + return ( + + + + + ); +} + +export default App; diff --git a/boxes/boxes/vite/src/config.ts b/boxes/boxes/vite/src/config.ts new file mode 100644 index 000000000000..0052072e235e --- /dev/null +++ b/boxes/boxes/vite/src/config.ts @@ -0,0 +1,101 @@ +import { + AztecNode, + Fr, + createDebugLogger, + deriveMasterIncomingViewingSecretKey, +} from "@aztec/aztec.js"; +import { BoxReactContractArtifact } from "../artifacts/BoxReact"; +import { AccountManager } from "@aztec/aztec.js/account"; +import { SingleKeyAccountContract } from "@aztec/accounts/single_key"; +import { createAztecNodeClient } from "@aztec/aztec.js"; +import { PXEService } from "@aztec/pxe/service"; +import { PXEServiceConfig, getPXEServiceConfig } from "@aztec/pxe/config"; +import { KVPxeDatabase } from "@aztec/pxe/database"; +import { TestPrivateKernelProver } from "@aztec/pxe/kernel_prover"; +import { KeyStore } from "@aztec/key-store"; +import { PrivateKernelProver } from "@aztec/circuit-types"; +import { L2TipsStore } from "@aztec/kv-store/stores"; +import { createStore } from "@aztec/kv-store/indexeddb"; + +const SECRET_KEY = Fr.random(); + +export class PrivateEnv { + pxe; + accountContract; + account: AccountManager; + + constructor( + private secretKey: Fr, + private nodeURL: string, + ) {} + + async init() { + const config = getPXEServiceConfig(); + config.dataDirectory = "pxe"; + const aztecNode = await createAztecNodeClient(this.nodeURL); + const proofCreator = new TestPrivateKernelProver(); + this.pxe = await this.createPXEService(aztecNode, config, proofCreator); + const encryptionPrivateKey = deriveMasterIncomingViewingSecretKey( + this.secretKey, + ); + this.accountContract = new SingleKeyAccountContract(encryptionPrivateKey); + this.account = new AccountManager( + this.pxe, + this.secretKey, + this.accountContract, + ); + } + + async createPXEService( + aztecNode: AztecNode, + config: PXEServiceConfig, + proofCreator?: PrivateKernelProver, + ) { + const l1Contracts = await aztecNode.getL1ContractAddresses(); + const configWithContracts = { + ...config, + l1Contracts, + } as PXEServiceConfig; + + const store = await createStore( + "pxe_data", + configWithContracts, + createDebugLogger("aztec:pxe:data:indexeddb"), + ); + + const keyStore = new KeyStore(store); + + const db = await KVPxeDatabase.create(store); + const tips = new L2TipsStore(store, "pxe"); + + const server = new PXEService( + keyStore, + aztecNode, + db, + tips, + proofCreator, + config, + ); + await server.start(); + return server; + } + + async getWallet() { + // taking advantage that register is no-op if already registered + return await this.account.register(); + } +} + +export const deployerEnv = new PrivateEnv( + SECRET_KEY, + process.env.PXE_URL || "http://localhost:8080", +); + +const IGNORE_FUNCTIONS = [ + "constructor", + "compute_note_hash_and_optionally_a_nullifier", + "sync_notes", +]; +export const filteredInterface = BoxReactContractArtifact.functions.filter( + (f) => !IGNORE_FUNCTIONS.includes(f.name), +); diff --git a/boxes/boxes/vite/src/contracts/Nargo.toml b/boxes/boxes/vite/src/contracts/Nargo.toml new file mode 100644 index 000000000000..9058cbde9a3b --- /dev/null +++ b/boxes/boxes/vite/src/contracts/Nargo.toml @@ -0,0 +1,9 @@ +[package] +name = "boxreact" +authors = [""] +compiler_version = ">=0.18.0" +type = "contract" + +[dependencies] +aztec = { path = "../../../../../noir-projects/aztec-nr/aztec" } +value_note = { path = "../../../../../noir-projects/aztec-nr/value-note" } diff --git a/boxes/boxes/vite/src/contracts/src/main.nr b/boxes/boxes/vite/src/contracts/src/main.nr new file mode 100644 index 000000000000..f4924981e8e4 --- /dev/null +++ b/boxes/boxes/vite/src/contracts/src/main.nr @@ -0,0 +1,48 @@ +use dep::aztec::macros::aztec; + +#[aztec] +contract BoxReact { + use dep::aztec::{ + protocol_types::public_keys::OvpkM, + keys::getters::get_public_keys, + prelude::{AztecAddress, PrivateMutable, Map, NoteInterface, NoteHeader, Point}, + encrypted_logs::encrypted_note_emission::encode_and_encrypt_note, + macros::{storage::storage, functions::{private, public, initializer}} + }; + use dep::value_note::value_note::ValueNote; + + #[storage] + struct Storage { + numbers: Map, Context>, + } + + #[private] + #[initializer] + fn constructor( + number: Field, + owner: AztecAddress + ) { + let numbers = storage.numbers; + let mut new_number = ValueNote::new(number, owner); + + let owner_ovpk_m = get_public_keys(owner).ovpk_m; + numbers.at(owner).initialize(&mut new_number).emit(encode_and_encrypt_note(&mut context, owner_ovpk_m, owner, context.msg_sender())); + } + + #[private] + fn setNumber( + number: Field, + owner: AztecAddress + ) { + let numbers = storage.numbers; + let mut new_number = ValueNote::new(number, owner); + + let owner_ovpk_m = get_public_keys(owner).ovpk_m; + numbers.at(owner).replace(&mut new_number).emit(encode_and_encrypt_note(&mut context, owner_ovpk_m, owner, context.msg_sender())); + } + + unconstrained fn getNumber(owner: AztecAddress) -> pub ValueNote { + let numbers = storage.numbers; + numbers.at(owner).view_note() + } +} diff --git a/boxes/boxes/vite/src/hooks/useContract.tsx b/boxes/boxes/vite/src/hooks/useContract.tsx new file mode 100644 index 000000000000..526081ab8501 --- /dev/null +++ b/boxes/boxes/vite/src/hooks/useContract.tsx @@ -0,0 +1,40 @@ +import { useState } from "react"; +import { deployerEnv } from "../config"; + +import { Contract, Fr } from "@aztec/aztec.js"; +import { BoxReactContract } from "../../artifacts/BoxReact"; +import { toast } from "react-toastify"; + +export function useContract() { + const [wait, setWait] = useState(false); + const [contract, setContract] = useState(); + + const deploy = async (e: React.FormEvent) => { + e.preventDefault(); + + setWait(true); + await deployerEnv.init(); + const wallet = await deployerEnv.getWallet(); + const salt = Fr.random(); + + const tx = await BoxReactContract.deploy( + wallet, + Fr.random(), + wallet.getCompleteAddress().address, + ).send({ + contractAddressSalt: salt, + }); + const contract = await toast.promise(tx.deployed(), { + pending: "Deploying contract...", + success: { + render: ({ data }) => `Address: ${data.address}`, + }, + error: "Error deploying contract", + }); + + setContract(contract); + setWait(false); + }; + + return { deploy, contract, wait }; +} diff --git a/boxes/boxes/vite/src/hooks/useNumber.tsx b/boxes/boxes/vite/src/hooks/useNumber.tsx new file mode 100644 index 000000000000..c9f7bce59dec --- /dev/null +++ b/boxes/boxes/vite/src/hooks/useNumber.tsx @@ -0,0 +1,48 @@ +import { useState } from 'react'; +import { Contract } from '@aztec/aztec.js'; +import { toast } from 'react-toastify'; +import { deployerEnv } from '../config'; + +export function useNumber({ contract }: { contract: Contract }) { + const [wait, setWait] = useState(false); + + const getNumber = async (e: React.FormEvent) => { + e.preventDefault(); + + setWait(true); + const deployerWallet = await deployerEnv.getWallet(); + const viewTxReceipt = await contract!.methods.getNumber(deployerWallet.getCompleteAddress().address).simulate(); + toast(`Number is: ${viewTxReceipt.value}`); + setWait(false); + }; + + const setNumber = async (e: React.FormEvent) => { + e.preventDefault(); + + const el = e.currentTarget.elements.namedItem('numberToSet') as HTMLInputElement; + if (el) { + setWait(true); + + const value = BigInt(el.value); + const deployerWallet = await deployerEnv.getWallet(); + + await toast.promise( + contract!.methods + .setNumber( + value, + deployerWallet.getCompleteAddress().address, + ) + .send() + .wait(), + { + pending: 'Setting number...', + success: `Number set to: ${value}`, + error: 'Error setting number', + }, + ); + setWait(false); + } + }; + + return { getNumber, setNumber, wait }; +} diff --git a/boxes/boxes/vite/src/index.css b/boxes/boxes/vite/src/index.css new file mode 100644 index 000000000000..6119ad9a8faa --- /dev/null +++ b/boxes/boxes/vite/src/index.css @@ -0,0 +1,68 @@ +:root { + font-family: Inter, system-ui, Avenir, Helvetica, Arial, sans-serif; + line-height: 1.5; + font-weight: 400; + + color-scheme: light dark; + color: rgba(255, 255, 255, 0.87); + background-color: #242424; + + font-synthesis: none; + text-rendering: optimizeLegibility; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +a { + font-weight: 500; + color: #646cff; + text-decoration: inherit; +} +a:hover { + color: #535bf2; +} + +body { + margin: 0; + display: flex; + place-items: center; + min-width: 320px; + min-height: 100vh; +} + +h1 { + font-size: 3.2em; + line-height: 1.1; +} + +button { + border-radius: 8px; + border: 1px solid transparent; + padding: 0.6em 1.2em; + font-size: 1em; + font-weight: 500; + font-family: inherit; + background-color: #1a1a1a; + cursor: pointer; + transition: border-color 0.25s; +} +button:hover { + border-color: #646cff; +} +button:focus, +button:focus-visible { + outline: 4px auto -webkit-focus-ring-color; +} + +@media (prefers-color-scheme: light) { + :root { + color: #213547; + background-color: #ffffff; + } + a:hover { + color: #747bff; + } + button { + background-color: #f9f9f9; + } +} diff --git a/boxes/boxes/vite/src/main.tsx b/boxes/boxes/vite/src/main.tsx new file mode 100644 index 000000000000..eff7ccc67760 --- /dev/null +++ b/boxes/boxes/vite/src/main.tsx @@ -0,0 +1,10 @@ +import { StrictMode } from "react"; +import { createRoot } from "react-dom/client"; +import "./index.css"; +import App from "./App.tsx"; + +createRoot(document.getElementById("root")!).render( + + + , +); diff --git a/boxes/boxes/vite/src/pages/contract.tsx b/boxes/boxes/vite/src/pages/contract.tsx new file mode 100644 index 000000000000..93d398ef1f4a --- /dev/null +++ b/boxes/boxes/vite/src/pages/contract.tsx @@ -0,0 +1,49 @@ +import { useState } from 'react'; +import { Contract } from '@aztec/aztec.js'; +import { useNumber } from '../hooks/useNumber'; +import { filteredInterface } from '../config'; + +export function ContractComponent({ contract }: { contract: Contract }) { + const [showInput, setShowInput] = useState(true); + const { wait, getNumber, setNumber } = useNumber({ contract }); + + return ( +
+

Your Contract

+
+ + + +
+ +
+ + + + +
+
+ ); +} diff --git a/boxes/boxes/vite/src/pages/home.tsx b/boxes/boxes/vite/src/pages/home.tsx new file mode 100644 index 000000000000..ba41f15cebf2 --- /dev/null +++ b/boxes/boxes/vite/src/pages/home.tsx @@ -0,0 +1,18 @@ +import { ContractComponent } from './contract'; +import { useContract } from '../hooks/useContract'; + +export function Home() { + const { contract, deploy, wait } = useContract(); + + if (!contract) { + return ( +
+ +
+ ); + } + + return ; +} diff --git a/boxes/boxes/vite/src/vite-env.d.ts b/boxes/boxes/vite/src/vite-env.d.ts new file mode 100644 index 000000000000..11f02fe2a006 --- /dev/null +++ b/boxes/boxes/vite/src/vite-env.d.ts @@ -0,0 +1 @@ +/// diff --git a/boxes/boxes/vite/tsconfig.json b/boxes/boxes/vite/tsconfig.json new file mode 100644 index 000000000000..e00f93d65911 --- /dev/null +++ b/boxes/boxes/vite/tsconfig.json @@ -0,0 +1,25 @@ +{ + "compilerOptions": { + "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo", + "target": "ES2020", + "useDefineForClassFields": true, + "lib": ["ES2020", "DOM", "DOM.Iterable"], + "module": "ESNext", + "skipLibCheck": true, + "composite": true, + + /* Bundler mode */ + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "isolatedModules": true, + "moduleDetection": "force", + "noEmit": true, + "jsx": "react-jsx", + }, + "include": [ + "src/**/*.ts*", + "tests/**/*.ts", + "src/contracts/target/*.json", + "artifacts/**/*.ts" + ] +} diff --git a/boxes/boxes/vite/vite.config.ts b/boxes/boxes/vite/vite.config.ts new file mode 100644 index 000000000000..eb101d8e821e --- /dev/null +++ b/boxes/boxes/vite/vite.config.ts @@ -0,0 +1,39 @@ +import { defineConfig } from "vite"; +import react from "@vitejs/plugin-react-swc"; +import { PolyfillOptions, nodePolyfills } from "vite-plugin-node-polyfills"; +import topLevelAwait from "vite-plugin-top-level-await"; + +// Unfortunate, but needed due to https://github.com/davidmyersdev/vite-plugin-node-polyfills/issues/81 +// Suspected to be because of the yarn workspace setup, but not sure +const nodePolyfillsFix = (options?: PolyfillOptions | undefined): Plugin => { + return { + ...nodePolyfills(options), + /* @ts-ignore */ + resolveId(source: string) { + const m = + /^vite-plugin-node-polyfills\/shims\/(buffer|global|process)$/.exec( + source, + ); + if (m) { + return `../../node_modules/vite-plugin-node-polyfills/shims/${m[1]}/dist/index.cjs`; + } + }, + }; +}; + +// https://vite.dev/config/ +export default defineConfig({ + plugins: [ + react(), + nodePolyfillsFix({ + overrides: { + fs: "memfs", + buffer: "buffer/", + }, + }), + topLevelAwait(), + ], + optimizeDeps: { + exclude: ["@noir-lang/acvm_js", "@noir-lang/noirc_abi"], + }, +}); diff --git a/boxes/yarn.lock b/boxes/yarn.lock index f3726279f967..5be003018981 100644 --- a/boxes/yarn.lock +++ b/boxes/yarn.lock @@ -5,7 +5,7 @@ __metadata: version: 8 cacheKey: 10c0 -"@ampproject/remapping@npm:^2.2.0, @ampproject/remapping@npm:^2.3.0": +"@ampproject/remapping@npm:^2.2.0": version: 2.3.0 resolution: "@ampproject/remapping@npm:2.3.0" dependencies: @@ -28,24 +28,60 @@ __metadata: linkType: soft "@aztec/builder@npm:latest": - version: 0.52.0 - resolution: "@aztec/builder@npm:0.52.0" + version: 0.63.1 + resolution: "@aztec/builder@npm:0.63.1" dependencies: - "@aztec/foundation": "npm:0.52.0" - "@aztec/types": "npm:0.52.0" + "@aztec/foundation": "npm:0.63.1" + "@aztec/types": "npm:0.63.1" commander: "npm:^12.1.0" bin: aztec-builder: dest/bin/cli.js - checksum: 2207259255fc3e2ffbbd08829f2a4adc9070befaf09e0541213beaf378632a501c29104e447f310aebbf65a21e3cb77b99259a4122e9253640ee232ce4413675 + checksum: c373e3c44cd08a460773458c5f19f8e7b884b5fc83252dcbe6f751a3319e04a7a600c219d7ab7af6bfaef9ccea6bf32bce11cbde861fe839ed0742c4c8f2c95a languageName: node linkType: hard +"@aztec/circuit-types@link:../yarn-project/circuit-types::locator=aztec-app%40workspace%3A.": + version: 0.0.0-use.local + resolution: "@aztec/circuit-types@link:../yarn-project/circuit-types::locator=aztec-app%40workspace%3A." + languageName: node + linkType: soft + +"@aztec/ethereum@link:../yarn-project/ethereum::locator=aztec-app%40workspace%3A.": + version: 0.0.0-use.local + resolution: "@aztec/ethereum@link:../yarn-project/ethereum::locator=aztec-app%40workspace%3A." + languageName: node + linkType: soft + "@aztec/foundation@link:../yarn-project/foundation::locator=aztec-app%40workspace%3A.": version: 0.0.0-use.local resolution: "@aztec/foundation@link:../yarn-project/foundation::locator=aztec-app%40workspace%3A." languageName: node linkType: soft +"@aztec/key-store@link:../../../yarn-project/key-store::locator=vite%40workspace%3Aboxes%2Fvite": + version: 0.0.0-use.local + resolution: "@aztec/key-store@link:../../../yarn-project/key-store::locator=vite%40workspace%3Aboxes%2Fvite" + languageName: node + linkType: soft + +"@aztec/kv-store@portal:../../../yarn-project/kv-store::locator=vite%40workspace%3Aboxes%2Fvite": + version: 0.0.0-use.local + resolution: "@aztec/kv-store@portal:../../../yarn-project/kv-store::locator=vite%40workspace%3Aboxes%2Fvite" + dependencies: + "@aztec/circuit-types": "workspace:^" + "@aztec/ethereum": "workspace:^" + "@aztec/foundation": "workspace:^" + idb: "npm:^8.0.0" + lmdb: "npm:^3.0.6" + languageName: node + linkType: soft + +"@aztec/pxe@link:../../../yarn-project/pxe::locator=vite%40workspace%3Aboxes%2Fvite": + version: 0.0.0-use.local + resolution: "@aztec/pxe@link:../../../yarn-project/pxe::locator=vite%40workspace%3Aboxes%2Fvite" + languageName: node + linkType: soft + "@aztec/react@workspace:boxes/react": version: 0.0.0-use.local resolution: "@aztec/react@workspace:boxes/react" @@ -125,163 +161,142 @@ __metadata: languageName: unknown linkType: soft -"@babel/code-frame@npm:^7.0.0, @babel/code-frame@npm:^7.12.13, @babel/code-frame@npm:^7.24.7": - version: 7.24.7 - resolution: "@babel/code-frame@npm:7.24.7" +"@babel/code-frame@npm:^7.0.0, @babel/code-frame@npm:^7.12.13, @babel/code-frame@npm:^7.25.9, @babel/code-frame@npm:^7.26.0": + version: 7.26.2 + resolution: "@babel/code-frame@npm:7.26.2" dependencies: - "@babel/highlight": "npm:^7.24.7" + "@babel/helper-validator-identifier": "npm:^7.25.9" + js-tokens: "npm:^4.0.0" picocolors: "npm:^1.0.0" - checksum: ab0af539473a9f5aeaac7047e377cb4f4edd255a81d84a76058595f8540784cc3fbe8acf73f1e073981104562490aabfb23008cd66dc677a456a4ed5390fdde6 + checksum: 7d79621a6849183c415486af99b1a20b84737e8c11cd55b6544f688c51ce1fd710e6d869c3dd21232023da272a79b91efb3e83b5bc2dc65c1187c5fcd1b72ea8 languageName: node linkType: hard -"@babel/compat-data@npm:^7.25.2": - version: 7.25.4 - resolution: "@babel/compat-data@npm:7.25.4" - checksum: 50d79734d584a28c69d6f5b99adfaa064d0f41609a378aef04eb06accc5b44f8520e68549eba3a082478180957b7d5783f1bfb1672e4ae8574e797ce8bae79fa +"@babel/compat-data@npm:^7.25.9": + version: 7.26.2 + resolution: "@babel/compat-data@npm:7.26.2" + checksum: c9b5f3724828d17f728a778f9d66c19b55c018d0d76de6d731178cca64f182c22b71400a73bf2b65dcc4fcfe52b630088a94d5902911b54206aa90e3ffe07d12 languageName: node linkType: hard "@babel/core@npm:^7.11.6, @babel/core@npm:^7.12.3, @babel/core@npm:^7.23.9": - version: 7.25.2 - resolution: "@babel/core@npm:7.25.2" + version: 7.26.0 + resolution: "@babel/core@npm:7.26.0" dependencies: "@ampproject/remapping": "npm:^2.2.0" - "@babel/code-frame": "npm:^7.24.7" - "@babel/generator": "npm:^7.25.0" - "@babel/helper-compilation-targets": "npm:^7.25.2" - "@babel/helper-module-transforms": "npm:^7.25.2" - "@babel/helpers": "npm:^7.25.0" - "@babel/parser": "npm:^7.25.0" - "@babel/template": "npm:^7.25.0" - "@babel/traverse": "npm:^7.25.2" - "@babel/types": "npm:^7.25.2" + "@babel/code-frame": "npm:^7.26.0" + "@babel/generator": "npm:^7.26.0" + "@babel/helper-compilation-targets": "npm:^7.25.9" + "@babel/helper-module-transforms": "npm:^7.26.0" + "@babel/helpers": "npm:^7.26.0" + "@babel/parser": "npm:^7.26.0" + "@babel/template": "npm:^7.25.9" + "@babel/traverse": "npm:^7.25.9" + "@babel/types": "npm:^7.26.0" convert-source-map: "npm:^2.0.0" debug: "npm:^4.1.0" gensync: "npm:^1.0.0-beta.2" json5: "npm:^2.2.3" semver: "npm:^6.3.1" - checksum: a425fa40e73cb72b6464063a57c478bc2de9dbcc19c280f1b55a3d88b35d572e87e8594e7d7b4880331addb6faef641bbeb701b91b41b8806cd4deae5d74f401 + checksum: 91de73a7ff5c4049fbc747930aa039300e4d2670c2a91f5aa622f1b4868600fc89b01b6278385fbcd46f9574186fa3d9b376a9e7538e50f8d118ec13cfbcb63e languageName: node linkType: hard -"@babel/generator@npm:^7.25.0, @babel/generator@npm:^7.25.6, @babel/generator@npm:^7.7.2": - version: 7.25.6 - resolution: "@babel/generator@npm:7.25.6" +"@babel/generator@npm:^7.25.9, @babel/generator@npm:^7.26.0, @babel/generator@npm:^7.7.2": + version: 7.26.2 + resolution: "@babel/generator@npm:7.26.2" dependencies: - "@babel/types": "npm:^7.25.6" + "@babel/parser": "npm:^7.26.2" + "@babel/types": "npm:^7.26.0" "@jridgewell/gen-mapping": "npm:^0.3.5" "@jridgewell/trace-mapping": "npm:^0.3.25" - jsesc: "npm:^2.5.1" - checksum: f89282cce4ddc63654470b98086994d219407d025497f483eb03ba102086e11e2b685b27122f6ff2e1d93b5b5fa0c3a6b7e974fbf2e4a75b685041a746a4291e + jsesc: "npm:^3.0.2" + checksum: 167ebce8977142f5012fad6bd91da51ac52bcd752f2261a54b7ab605d928aebe57e21636cdd2a9c7757e552652c68d9fcb5d40b06fcb66e02d9ee7526e118a5c languageName: node linkType: hard -"@babel/helper-compilation-targets@npm:^7.25.2": - version: 7.25.2 - resolution: "@babel/helper-compilation-targets@npm:7.25.2" +"@babel/helper-compilation-targets@npm:^7.25.9": + version: 7.25.9 + resolution: "@babel/helper-compilation-targets@npm:7.25.9" dependencies: - "@babel/compat-data": "npm:^7.25.2" - "@babel/helper-validator-option": "npm:^7.24.8" - browserslist: "npm:^4.23.1" + "@babel/compat-data": "npm:^7.25.9" + "@babel/helper-validator-option": "npm:^7.25.9" + browserslist: "npm:^4.24.0" lru-cache: "npm:^5.1.1" semver: "npm:^6.3.1" - checksum: de10e986b5322c9f807350467dc845ec59df9e596a5926a3b5edbb4710d8e3b8009d4396690e70b88c3844fe8ec4042d61436dd4b92d1f5f75655cf43ab07e99 + checksum: a6b26a1e4222e69ef8e62ee19374308f060b007828bc11c65025ecc9e814aba21ff2175d6d3f8bf53c863edd728ee8f94ba7870f8f90a37d39552ad9933a8aaa languageName: node linkType: hard -"@babel/helper-module-imports@npm:^7.24.7": - version: 7.24.7 - resolution: "@babel/helper-module-imports@npm:7.24.7" +"@babel/helper-module-imports@npm:^7.25.9": + version: 7.25.9 + resolution: "@babel/helper-module-imports@npm:7.25.9" dependencies: - "@babel/traverse": "npm:^7.24.7" - "@babel/types": "npm:^7.24.7" - checksum: 97c57db6c3eeaea31564286e328a9fb52b0313c5cfcc7eee4bc226aebcf0418ea5b6fe78673c0e4a774512ec6c86e309d0f326e99d2b37bfc16a25a032498af0 + "@babel/traverse": "npm:^7.25.9" + "@babel/types": "npm:^7.25.9" + checksum: 078d3c2b45d1f97ffe6bb47f61961be4785d2342a4156d8b42c92ee4e1b7b9e365655dd6cb25329e8fe1a675c91eeac7e3d04f0c518b67e417e29d6e27b6aa70 languageName: node linkType: hard -"@babel/helper-module-transforms@npm:^7.25.2": - version: 7.25.2 - resolution: "@babel/helper-module-transforms@npm:7.25.2" +"@babel/helper-module-transforms@npm:^7.26.0": + version: 7.26.0 + resolution: "@babel/helper-module-transforms@npm:7.26.0" dependencies: - "@babel/helper-module-imports": "npm:^7.24.7" - "@babel/helper-simple-access": "npm:^7.24.7" - "@babel/helper-validator-identifier": "npm:^7.24.7" - "@babel/traverse": "npm:^7.25.2" + "@babel/helper-module-imports": "npm:^7.25.9" + "@babel/helper-validator-identifier": "npm:^7.25.9" + "@babel/traverse": "npm:^7.25.9" peerDependencies: "@babel/core": ^7.0.0 - checksum: adaa15970ace0aee5934b5a633789b5795b6229c6a9cf3e09a7e80aa33e478675eee807006a862aa9aa517935d81f88a6db8a9f5936e3a2a40ec75f8062bc329 - languageName: node - linkType: hard - -"@babel/helper-plugin-utils@npm:^7.0.0, @babel/helper-plugin-utils@npm:^7.10.4, @babel/helper-plugin-utils@npm:^7.12.13, @babel/helper-plugin-utils@npm:^7.14.5, @babel/helper-plugin-utils@npm:^7.24.7, @babel/helper-plugin-utils@npm:^7.24.8, @babel/helper-plugin-utils@npm:^7.8.0": - version: 7.24.8 - resolution: "@babel/helper-plugin-utils@npm:7.24.8" - checksum: 0376037f94a3bfe6b820a39f81220ac04f243eaee7193774b983e956c1750883ff236b30785795abbcda43fac3ece74750566830c2daa4d6e3870bb0dff34c2d - languageName: node - linkType: hard - -"@babel/helper-simple-access@npm:^7.24.7": - version: 7.24.7 - resolution: "@babel/helper-simple-access@npm:7.24.7" - dependencies: - "@babel/traverse": "npm:^7.24.7" - "@babel/types": "npm:^7.24.7" - checksum: 7230e419d59a85f93153415100a5faff23c133d7442c19e0cd070da1784d13cd29096ee6c5a5761065c44e8164f9f80e3a518c41a0256df39e38f7ad6744fed7 + checksum: ee111b68a5933481d76633dad9cdab30c41df4479f0e5e1cc4756dc9447c1afd2c9473b5ba006362e35b17f4ebddd5fca090233bef8dfc84dca9d9127e56ec3a languageName: node linkType: hard -"@babel/helper-string-parser@npm:^7.24.8": - version: 7.24.8 - resolution: "@babel/helper-string-parser@npm:7.24.8" - checksum: 6361f72076c17fabf305e252bf6d580106429014b3ab3c1f5c4eb3e6d465536ea6b670cc0e9a637a77a9ad40454d3e41361a2909e70e305116a23d68ce094c08 +"@babel/helper-plugin-utils@npm:^7.0.0, @babel/helper-plugin-utils@npm:^7.10.4, @babel/helper-plugin-utils@npm:^7.12.13, @babel/helper-plugin-utils@npm:^7.14.5, @babel/helper-plugin-utils@npm:^7.25.9, @babel/helper-plugin-utils@npm:^7.8.0": + version: 7.25.9 + resolution: "@babel/helper-plugin-utils@npm:7.25.9" + checksum: 483066a1ba36ff16c0116cd24f93de05de746a603a777cd695ac7a1b034928a65a4ecb35f255761ca56626435d7abdb73219eba196f9aa83b6c3c3169325599d languageName: node linkType: hard -"@babel/helper-validator-identifier@npm:^7.24.7": - version: 7.24.7 - resolution: "@babel/helper-validator-identifier@npm:7.24.7" - checksum: 87ad608694c9477814093ed5b5c080c2e06d44cb1924ae8320474a74415241223cc2a725eea2640dd783ff1e3390e5f95eede978bc540e870053152e58f1d651 +"@babel/helper-string-parser@npm:^7.25.9": + version: 7.25.9 + resolution: "@babel/helper-string-parser@npm:7.25.9" + checksum: 7244b45d8e65f6b4338a6a68a8556f2cb161b782343e97281a5f2b9b93e420cad0d9f5773a59d79f61d0c448913d06f6a2358a87f2e203cf112e3c5b53522ee6 languageName: node linkType: hard -"@babel/helper-validator-option@npm:^7.24.8": - version: 7.24.8 - resolution: "@babel/helper-validator-option@npm:7.24.8" - checksum: 73db93a34ae89201351288bee7623eed81a54000779462a986105b54ffe82069e764afd15171a428b82e7c7a9b5fec10b5d5603b216317a414062edf5c67a21f +"@babel/helper-validator-identifier@npm:^7.25.9": + version: 7.25.9 + resolution: "@babel/helper-validator-identifier@npm:7.25.9" + checksum: 4fc6f830177b7b7e887ad3277ddb3b91d81e6c4a24151540d9d1023e8dc6b1c0505f0f0628ae653601eb4388a8db45c1c14b2c07a9173837aef7e4116456259d languageName: node linkType: hard -"@babel/helpers@npm:^7.25.0": - version: 7.25.6 - resolution: "@babel/helpers@npm:7.25.6" - dependencies: - "@babel/template": "npm:^7.25.0" - "@babel/types": "npm:^7.25.6" - checksum: 448c1cdabccca42fd97a252f73f1e4bcd93776dbf24044f3b4f49b756bf2ece73ee6df05177473bb74ea7456dddd18d6f481e4d96d2cc7839d078900d48c696c +"@babel/helper-validator-option@npm:^7.25.9": + version: 7.25.9 + resolution: "@babel/helper-validator-option@npm:7.25.9" + checksum: 27fb195d14c7dcb07f14e58fe77c44eea19a6a40a74472ec05c441478fa0bb49fa1c32b2d64be7a38870ee48ef6601bdebe98d512f0253aea0b39756c4014f3e languageName: node linkType: hard -"@babel/highlight@npm:^7.24.7": - version: 7.24.7 - resolution: "@babel/highlight@npm:7.24.7" +"@babel/helpers@npm:^7.26.0": + version: 7.26.0 + resolution: "@babel/helpers@npm:7.26.0" dependencies: - "@babel/helper-validator-identifier": "npm:^7.24.7" - chalk: "npm:^2.4.2" - js-tokens: "npm:^4.0.0" - picocolors: "npm:^1.0.0" - checksum: 674334c571d2bb9d1c89bdd87566383f59231e16bcdcf5bb7835babdf03c9ae585ca0887a7b25bdf78f303984af028df52831c7989fecebb5101cc132da9393a + "@babel/template": "npm:^7.25.9" + "@babel/types": "npm:^7.26.0" + checksum: 343333cced6946fe46617690a1d0789346960910225ce359021a88a60a65bc0d791f0c5d240c0ed46cf8cc63b5fd7df52734ff14e43b9c32feae2b61b1647097 languageName: node linkType: hard -"@babel/parser@npm:^7.1.0, @babel/parser@npm:^7.14.7, @babel/parser@npm:^7.20.7, @babel/parser@npm:^7.23.9, @babel/parser@npm:^7.25.0, @babel/parser@npm:^7.25.6": - version: 7.25.6 - resolution: "@babel/parser@npm:7.25.6" +"@babel/parser@npm:^7.1.0, @babel/parser@npm:^7.14.7, @babel/parser@npm:^7.20.7, @babel/parser@npm:^7.23.9, @babel/parser@npm:^7.25.9, @babel/parser@npm:^7.26.0, @babel/parser@npm:^7.26.2": + version: 7.26.2 + resolution: "@babel/parser@npm:7.26.2" dependencies: - "@babel/types": "npm:^7.25.6" + "@babel/types": "npm:^7.26.0" bin: parser: ./bin/babel-parser.js - checksum: f88a0e895dbb096fd37c4527ea97d12b5fc013720602580a941ac3a339698872f0c911e318c292b184c36b5fbe23b612f05aff9d24071bc847c7b1c21552c41d + checksum: 751a743087b3a9172a7599f1421830d44c38f065ef781588d2bfb1c98f9b461719a226feb13c868d7a284783eee120c88ea522593118f2668f46ebfb1105c4d7 languageName: node linkType: hard @@ -330,13 +345,13 @@ __metadata: linkType: hard "@babel/plugin-syntax-import-attributes@npm:^7.24.7": - version: 7.25.6 - resolution: "@babel/plugin-syntax-import-attributes@npm:7.25.6" + version: 7.26.0 + resolution: "@babel/plugin-syntax-import-attributes@npm:7.26.0" dependencies: - "@babel/helper-plugin-utils": "npm:^7.24.8" + "@babel/helper-plugin-utils": "npm:^7.25.9" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 0e9359cf2d117476310961dfcfd7204ed692e933707da10d6194153d3996cd2ea5b7635fc90d720dce3612083af89966bb862561064a509c350320dc98644751 + checksum: e594c185b12bfe0bbe7ca78dfeebe870e6d569a12128cac86f3164a075fe0ff70e25ddbd97fd0782906b91f65560c9dc6957716b7b4a68aba2516c9b7455e352 languageName: node linkType: hard @@ -363,13 +378,13 @@ __metadata: linkType: hard "@babel/plugin-syntax-jsx@npm:^7.7.2": - version: 7.24.7 - resolution: "@babel/plugin-syntax-jsx@npm:7.24.7" + version: 7.25.9 + resolution: "@babel/plugin-syntax-jsx@npm:7.25.9" dependencies: - "@babel/helper-plugin-utils": "npm:^7.24.7" + "@babel/helper-plugin-utils": "npm:^7.25.9" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: f44d927a9ae8d5ef016ff5b450e1671e56629ddc12e56b938e41fd46e141170d9dfc9a53d6cb2b9a20a7dd266a938885e6a3981c60c052a2e1daed602ac80e51 + checksum: d56597aff4df39d3decda50193b6dfbe596ca53f437ff2934622ce19a743bf7f43492d3fb3308b0289f5cee2b825d99ceb56526a2b9e7b68bf04901546c5618c languageName: node linkType: hard @@ -462,50 +477,49 @@ __metadata: linkType: hard "@babel/plugin-syntax-typescript@npm:^7.7.2": - version: 7.25.4 - resolution: "@babel/plugin-syntax-typescript@npm:7.25.4" + version: 7.25.9 + resolution: "@babel/plugin-syntax-typescript@npm:7.25.9" dependencies: - "@babel/helper-plugin-utils": "npm:^7.24.8" + "@babel/helper-plugin-utils": "npm:^7.25.9" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 199919d44c73e5edee9ffd311cf638f88d26a810189e32d338c46c7600441fd5c4a2e431f9be377707cbf318410895304e90b83bf8d9011d205150fa7f260e63 + checksum: 5192ebe11bd46aea68b7a60fd9555465c59af7e279e71126788e59121b86e00b505816685ab4782abe159232b0f73854e804b54449820b0d950b397ee158caa2 languageName: node linkType: hard -"@babel/template@npm:^7.25.0, @babel/template@npm:^7.3.3": - version: 7.25.0 - resolution: "@babel/template@npm:7.25.0" +"@babel/template@npm:^7.25.9, @babel/template@npm:^7.3.3": + version: 7.25.9 + resolution: "@babel/template@npm:7.25.9" dependencies: - "@babel/code-frame": "npm:^7.24.7" - "@babel/parser": "npm:^7.25.0" - "@babel/types": "npm:^7.25.0" - checksum: 4e31afd873215744c016e02b04f43b9fa23205d6d0766fb2e93eb4091c60c1b88897936adb895fb04e3c23de98dfdcbe31bc98daaa1a4e0133f78bb948e1209b + "@babel/code-frame": "npm:^7.25.9" + "@babel/parser": "npm:^7.25.9" + "@babel/types": "npm:^7.25.9" + checksum: ebe677273f96a36c92cc15b7aa7b11cc8bc8a3bb7a01d55b2125baca8f19cae94ff3ce15f1b1880fb8437f3a690d9f89d4e91f16fc1dc4d3eb66226d128983ab languageName: node linkType: hard -"@babel/traverse@npm:^7.24.7, @babel/traverse@npm:^7.25.2": - version: 7.25.6 - resolution: "@babel/traverse@npm:7.25.6" +"@babel/traverse@npm:^7.25.9": + version: 7.25.9 + resolution: "@babel/traverse@npm:7.25.9" dependencies: - "@babel/code-frame": "npm:^7.24.7" - "@babel/generator": "npm:^7.25.6" - "@babel/parser": "npm:^7.25.6" - "@babel/template": "npm:^7.25.0" - "@babel/types": "npm:^7.25.6" + "@babel/code-frame": "npm:^7.25.9" + "@babel/generator": "npm:^7.25.9" + "@babel/parser": "npm:^7.25.9" + "@babel/template": "npm:^7.25.9" + "@babel/types": "npm:^7.25.9" debug: "npm:^4.3.1" globals: "npm:^11.1.0" - checksum: 964304c6fa46bd705428ba380bf73177eeb481c3f26d82ea3d0661242b59e0dd4329d23886035e9ca9a4ceb565c03a76fd615109830687a27bcd350059d6377e + checksum: e90be586a714da4adb80e6cb6a3c5cfcaa9b28148abdafb065e34cc109676fc3db22cf98cd2b2fff66ffb9b50c0ef882cab0f466b6844be0f6c637b82719bba1 languageName: node linkType: hard -"@babel/types@npm:^7.0.0, @babel/types@npm:^7.20.7, @babel/types@npm:^7.24.7, @babel/types@npm:^7.25.0, @babel/types@npm:^7.25.2, @babel/types@npm:^7.25.6, @babel/types@npm:^7.3.3": - version: 7.25.6 - resolution: "@babel/types@npm:7.25.6" +"@babel/types@npm:^7.0.0, @babel/types@npm:^7.20.7, @babel/types@npm:^7.25.9, @babel/types@npm:^7.26.0, @babel/types@npm:^7.3.3": + version: 7.26.0 + resolution: "@babel/types@npm:7.26.0" dependencies: - "@babel/helper-string-parser": "npm:^7.24.8" - "@babel/helper-validator-identifier": "npm:^7.24.7" - to-fast-properties: "npm:^2.0.0" - checksum: 89d45fbee24e27a05dca2d08300a26b905bd384a480448823f6723c72d3a30327c517476389b7280ce8cb9a2c48ef8f47da7f9f6d326faf6f53fd6b68237bdc4 + "@babel/helper-string-parser": "npm:^7.25.9" + "@babel/helper-validator-identifier": "npm:^7.25.9" + checksum: b694f41ad1597127e16024d766c33a641508aad037abd08d0d1f73af753e1119fa03b4a107d04b5f92cc19c095a594660547ae9bead1db2299212d644b0a5cb8 languageName: node linkType: hard @@ -694,20 +708,38 @@ __metadata: linkType: hard "@eslint-community/eslint-utils@npm:^4.2.0, @eslint-community/eslint-utils@npm:^4.4.0": - version: 4.4.0 - resolution: "@eslint-community/eslint-utils@npm:4.4.0" + version: 4.4.1 + resolution: "@eslint-community/eslint-utils@npm:4.4.1" dependencies: - eslint-visitor-keys: "npm:^3.3.0" + eslint-visitor-keys: "npm:^3.4.3" peerDependencies: eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 - checksum: 7e559c4ce59cd3a06b1b5a517b593912e680a7f981ae7affab0d01d709e99cd5647019be8fafa38c350305bc32f1f7d42c7073edde2ab536c745e365f37b607e + checksum: 2aa0ac2fc50ff3f234408b10900ed4f1a0b19352f21346ad4cc3d83a1271481bdda11097baa45d484dd564c895e0762a27a8240be7a256b3ad47129e96528252 + languageName: node + linkType: hard + +"@eslint-community/regexpp@npm:^4.10.0, @eslint-community/regexpp@npm:^4.12.1, @eslint-community/regexpp@npm:^4.5.1, @eslint-community/regexpp@npm:^4.6.1": + version: 4.12.1 + resolution: "@eslint-community/regexpp@npm:4.12.1" + checksum: a03d98c246bcb9109aec2c08e4d10c8d010256538dcb3f56610191607214523d4fb1b00aa81df830b6dffb74c5fa0be03642513a289c567949d3e550ca11cdf6 + languageName: node + linkType: hard + +"@eslint/config-array@npm:^0.19.0": + version: 0.19.0 + resolution: "@eslint/config-array@npm:0.19.0" + dependencies: + "@eslint/object-schema": "npm:^2.1.4" + debug: "npm:^4.3.1" + minimatch: "npm:^3.1.2" + checksum: def23c6c67a8f98dc88f1b87e17a5668e5028f5ab9459661aabfe08e08f2acd557474bbaf9ba227be0921ae4db232c62773dbb7739815f8415678eb8f592dbf5 languageName: node linkType: hard -"@eslint-community/regexpp@npm:^4.5.1, @eslint-community/regexpp@npm:^4.6.1": - version: 4.11.0 - resolution: "@eslint-community/regexpp@npm:4.11.0" - checksum: 0f6328869b2741e2794da4ad80beac55cba7de2d3b44f796a60955b0586212ec75e6b0253291fd4aad2100ad471d1480d8895f2b54f1605439ba4c875e05e523 +"@eslint/core@npm:^0.9.0": + version: 0.9.0 + resolution: "@eslint/core@npm:0.9.0" + checksum: 6d8e8e0991cef12314c49425d8d2d9394f5fb1a36753ff82df7c03185a4646cb7c8736cf26638a4a714782cedf4b23cfc17667d282d3e5965b3920a0e7ce20d4 languageName: node linkType: hard @@ -728,21 +760,78 @@ __metadata: languageName: node linkType: hard -"@eslint/js@npm:8.57.0": - version: 8.57.0 - resolution: "@eslint/js@npm:8.57.0" - checksum: 9a518bb8625ba3350613903a6d8c622352ab0c6557a59fe6ff6178bf882bf57123f9d92aa826ee8ac3ee74b9c6203fe630e9ee00efb03d753962dcf65ee4bd94 +"@eslint/eslintrc@npm:^3.2.0": + version: 3.2.0 + resolution: "@eslint/eslintrc@npm:3.2.0" + dependencies: + ajv: "npm:^6.12.4" + debug: "npm:^4.3.2" + espree: "npm:^10.0.1" + globals: "npm:^14.0.0" + ignore: "npm:^5.2.0" + import-fresh: "npm:^3.2.1" + js-yaml: "npm:^4.1.0" + minimatch: "npm:^3.1.2" + strip-json-comments: "npm:^3.1.1" + checksum: 43867a07ff9884d895d9855edba41acf325ef7664a8df41d957135a81a477ff4df4196f5f74dc3382627e5cc8b7ad6b815c2cea1b58f04a75aced7c43414ab8b + languageName: node + linkType: hard + +"@eslint/js@npm:8.57.1": + version: 8.57.1 + resolution: "@eslint/js@npm:8.57.1" + checksum: b489c474a3b5b54381c62e82b3f7f65f4b8a5eaaed126546520bf2fede5532a8ed53212919fed1e9048dcf7f37167c8561d58d0ba4492a4244004e7793805223 + languageName: node + linkType: hard + +"@eslint/js@npm:9.15.0, @eslint/js@npm:^9.13.0": + version: 9.15.0 + resolution: "@eslint/js@npm:9.15.0" + checksum: 56552966ab1aa95332f70d0e006db5746b511c5f8b5e0c6a9b2d6764ff6d964e0b2622731877cbc4e3f0e74c5b39191290d5f48147be19175292575130d499ab + languageName: node + linkType: hard + +"@eslint/object-schema@npm:^2.1.4": + version: 2.1.4 + resolution: "@eslint/object-schema@npm:2.1.4" + checksum: e9885532ea70e483fb007bf1275968b05bb15ebaa506d98560c41a41220d33d342e19023d5f2939fed6eb59676c1bda5c847c284b4b55fce521d282004da4dda + languageName: node + linkType: hard + +"@eslint/plugin-kit@npm:^0.2.3": + version: 0.2.3 + resolution: "@eslint/plugin-kit@npm:0.2.3" + dependencies: + levn: "npm:^0.4.1" + checksum: 89a8035976bb1780e3fa8ffe682df013bd25f7d102d991cecd3b7c297f4ce8c1a1b6805e76dd16465b5353455b670b545eff2b4ec3133e0eab81a5f9e99bd90f + languageName: node + linkType: hard + +"@humanfs/core@npm:^0.19.1": + version: 0.19.1 + resolution: "@humanfs/core@npm:0.19.1" + checksum: aa4e0152171c07879b458d0e8a704b8c3a89a8c0541726c6b65b81e84fd8b7564b5d6c633feadc6598307d34564bd53294b533491424e8e313d7ab6c7bc5dc67 + languageName: node + linkType: hard + +"@humanfs/node@npm:^0.16.6": + version: 0.16.6 + resolution: "@humanfs/node@npm:0.16.6" + dependencies: + "@humanfs/core": "npm:^0.19.1" + "@humanwhocodes/retry": "npm:^0.3.0" + checksum: 8356359c9f60108ec204cbd249ecd0356667359b2524886b357617c4a7c3b6aace0fd5a369f63747b926a762a88f8a25bc066fa1778508d110195ce7686243e1 languageName: node linkType: hard -"@humanwhocodes/config-array@npm:^0.11.14": - version: 0.11.14 - resolution: "@humanwhocodes/config-array@npm:0.11.14" +"@humanwhocodes/config-array@npm:^0.13.0": + version: 0.13.0 + resolution: "@humanwhocodes/config-array@npm:0.13.0" dependencies: - "@humanwhocodes/object-schema": "npm:^2.0.2" + "@humanwhocodes/object-schema": "npm:^2.0.3" debug: "npm:^4.3.1" minimatch: "npm:^3.0.5" - checksum: 66f725b4ee5fdd8322c737cb5013e19fac72d4d69c8bf4b7feb192fcb83442b035b92186f8e9497c220e58b2d51a080f28a73f7899bc1ab288c3be172c467541 + checksum: 205c99e756b759f92e1f44a3dc6292b37db199beacba8f26c2165d4051fe73a4ae52fdcfd08ffa93e7e5cb63da7c88648f0e84e197d154bbbbe137b2e0dd332e languageName: node linkType: hard @@ -753,13 +842,27 @@ __metadata: languageName: node linkType: hard -"@humanwhocodes/object-schema@npm:^2.0.2": +"@humanwhocodes/object-schema@npm:^2.0.3": version: 2.0.3 resolution: "@humanwhocodes/object-schema@npm:2.0.3" checksum: 80520eabbfc2d32fe195a93557cef50dfe8c8905de447f022675aaf66abc33ae54098f5ea78548d925aa671cd4ab7c7daa5ad704fe42358c9b5e7db60f80696c languageName: node linkType: hard +"@humanwhocodes/retry@npm:^0.3.0": + version: 0.3.1 + resolution: "@humanwhocodes/retry@npm:0.3.1" + checksum: f0da1282dfb45e8120480b9e2e275e2ac9bbe1cf016d046fdad8e27cc1285c45bb9e711681237944445157b430093412b4446c1ab3fc4bb037861b5904101d3b + languageName: node + linkType: hard + +"@humanwhocodes/retry@npm:^0.4.1": + version: 0.4.1 + resolution: "@humanwhocodes/retry@npm:0.4.1" + checksum: be7bb6841c4c01d0b767d9bb1ec1c9359ee61421ce8ba66c249d035c5acdfd080f32d55a5c9e859cdd7868788b8935774f65b2caf24ec0b7bd7bf333791f063b + languageName: node + linkType: hard + "@iarna/toml@npm:^2.2.5": version: 2.2.5 resolution: "@iarna/toml@npm:2.2.5" @@ -778,30 +881,29 @@ __metadata: linkType: hard "@inquirer/core@npm:^9.1.0": - version: 9.1.0 - resolution: "@inquirer/core@npm:9.1.0" + version: 9.2.1 + resolution: "@inquirer/core@npm:9.2.1" dependencies: - "@inquirer/figures": "npm:^1.0.5" - "@inquirer/type": "npm:^1.5.3" + "@inquirer/figures": "npm:^1.0.6" + "@inquirer/type": "npm:^2.0.0" "@types/mute-stream": "npm:^0.0.4" - "@types/node": "npm:^22.5.2" + "@types/node": "npm:^22.5.5" "@types/wrap-ansi": "npm:^3.0.0" ansi-escapes: "npm:^4.3.2" - cli-spinners: "npm:^2.9.2" cli-width: "npm:^4.1.0" mute-stream: "npm:^1.0.0" signal-exit: "npm:^4.1.0" strip-ansi: "npm:^6.0.1" wrap-ansi: "npm:^6.2.0" yoctocolors-cjs: "npm:^2.1.2" - checksum: c86cbd1980788dee4151002ed717b5664a79eec1d925e1b38896bbad079647af5c423eaaa39a2291ba4fdf78a33c541ea3f69cbbf030f03815eb523fa05230f8 + checksum: 11c14be77a9fa85831de799a585721b0a49ab2f3b7d8fd1780c48ea2b29229c6bdc94e7892419086d0f7734136c2ba87b6a32e0782571eae5bbd655b1afad453 languageName: node linkType: hard -"@inquirer/figures@npm:^1.0.5": - version: 1.0.5 - resolution: "@inquirer/figures@npm:1.0.5" - checksum: ec9ba23db42cb33fa18eb919abf2a18e750e739e64c1883ce4a98345cd5711c60cac12d1faf56a859f52d387deb221c8d3dfe60344ee07955a9a262f8b821fe3 +"@inquirer/figures@npm:^1.0.5, @inquirer/figures@npm:^1.0.6": + version: 1.0.8 + resolution: "@inquirer/figures@npm:1.0.8" + checksum: 34d287ff1fd16476c58bbd5b169db315f8319b5ffb09f81a1bb9aabd4165114e7406b1f418d021fd9cd48923008446e3eec274bb818f378ea132a0450bbc91d4 languageName: node linkType: hard @@ -829,11 +931,20 @@ __metadata: linkType: hard "@inquirer/type@npm:^1.5.3": - version: 1.5.3 - resolution: "@inquirer/type@npm:1.5.3" + version: 1.5.5 + resolution: "@inquirer/type@npm:1.5.5" + dependencies: + mute-stream: "npm:^1.0.0" + checksum: 4c41736c09ba9426b5a9e44993bdd54e8f532e791518802e33866f233a2a6126a25c1c82c19d1abbf1df627e57b1b957dd3f8318ea96073d8bfc32193943bcb3 + languageName: node + linkType: hard + +"@inquirer/type@npm:^2.0.0": + version: 2.0.0 + resolution: "@inquirer/type@npm:2.0.0" dependencies: mute-stream: "npm:^1.0.0" - checksum: da92a7410efcb20cf12422558fb8e00136e2ff1746ae1d17ea05511e77139bf2044527d37a70e77f188f158099f7751ed808ca3f82769cbe99c1052509481e95 + checksum: 8c663d52beb2b89a896d3c3d5cc3d6d024fa149e565555bcb42fa640cbe23fba7ff2c51445342cef1fe6e46305e2d16c1590fa1d11ad0ddf93a67b655ef41f0a languageName: node linkType: hard @@ -1163,6 +1274,38 @@ __metadata: languageName: node linkType: hard +"@jsonjoy.com/base64@npm:^1.1.1": + version: 1.1.2 + resolution: "@jsonjoy.com/base64@npm:1.1.2" + peerDependencies: + tslib: 2 + checksum: 88717945f66dc89bf58ce75624c99fe6a5c9a0c8614e26d03e406447b28abff80c69fb37dabe5aafef1862cf315071ae66e5c85f6018b437d95f8d13d235e6eb + languageName: node + linkType: hard + +"@jsonjoy.com/json-pack@npm:^1.0.3": + version: 1.1.0 + resolution: "@jsonjoy.com/json-pack@npm:1.1.0" + dependencies: + "@jsonjoy.com/base64": "npm:^1.1.1" + "@jsonjoy.com/util": "npm:^1.1.2" + hyperdyperid: "npm:^1.2.0" + thingies: "npm:^1.20.0" + peerDependencies: + tslib: 2 + checksum: cdf5cb567a7f2e703d4966a3e3a5f7f7b54ee40a2102aa0ede5c79bcf2060c8465d82f39de8583db4cf1d8415bec8e57dfb1156ef663567b846cdea45813d9d1 + languageName: node + linkType: hard + +"@jsonjoy.com/util@npm:^1.1.2, @jsonjoy.com/util@npm:^1.3.0": + version: 1.5.0 + resolution: "@jsonjoy.com/util@npm:1.5.0" + peerDependencies: + tslib: 2 + checksum: 0065ae12c4108d8aede01a479c8d2b5a39bce99e9a449d235befc753f57e8385d9c1115720529f26597840b7398d512898155423d9859fd638319fb0c827365d + languageName: node + linkType: hard + "@leichtgewicht/ip-codec@npm:^2.0.1": version: 2.0.5 resolution: "@leichtgewicht/ip-codec@npm:2.0.5" @@ -1170,6 +1313,90 @@ __metadata: languageName: node linkType: hard +"@lmdb/lmdb-darwin-arm64@npm:3.1.6": + version: 3.1.6 + resolution: "@lmdb/lmdb-darwin-arm64@npm:3.1.6" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + +"@lmdb/lmdb-darwin-x64@npm:3.1.6": + version: 3.1.6 + resolution: "@lmdb/lmdb-darwin-x64@npm:3.1.6" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + +"@lmdb/lmdb-linux-arm64@npm:3.1.6": + version: 3.1.6 + resolution: "@lmdb/lmdb-linux-arm64@npm:3.1.6" + conditions: os=linux & cpu=arm64 + languageName: node + linkType: hard + +"@lmdb/lmdb-linux-arm@npm:3.1.6": + version: 3.1.6 + resolution: "@lmdb/lmdb-linux-arm@npm:3.1.6" + conditions: os=linux & cpu=arm + languageName: node + linkType: hard + +"@lmdb/lmdb-linux-x64@npm:3.1.6": + version: 3.1.6 + resolution: "@lmdb/lmdb-linux-x64@npm:3.1.6" + conditions: os=linux & cpu=x64 + languageName: node + linkType: hard + +"@lmdb/lmdb-win32-x64@npm:3.1.6": + version: 3.1.6 + resolution: "@lmdb/lmdb-win32-x64@npm:3.1.6" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + +"@msgpackr-extract/msgpackr-extract-darwin-arm64@npm:3.0.3": + version: 3.0.3 + resolution: "@msgpackr-extract/msgpackr-extract-darwin-arm64@npm:3.0.3" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + +"@msgpackr-extract/msgpackr-extract-darwin-x64@npm:3.0.3": + version: 3.0.3 + resolution: "@msgpackr-extract/msgpackr-extract-darwin-x64@npm:3.0.3" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + +"@msgpackr-extract/msgpackr-extract-linux-arm64@npm:3.0.3": + version: 3.0.3 + resolution: "@msgpackr-extract/msgpackr-extract-linux-arm64@npm:3.0.3" + conditions: os=linux & cpu=arm64 + languageName: node + linkType: hard + +"@msgpackr-extract/msgpackr-extract-linux-arm@npm:3.0.3": + version: 3.0.3 + resolution: "@msgpackr-extract/msgpackr-extract-linux-arm@npm:3.0.3" + conditions: os=linux & cpu=arm + languageName: node + linkType: hard + +"@msgpackr-extract/msgpackr-extract-linux-x64@npm:3.0.3": + version: 3.0.3 + resolution: "@msgpackr-extract/msgpackr-extract-linux-x64@npm:3.0.3" + conditions: os=linux & cpu=x64 + languageName: node + linkType: hard + +"@msgpackr-extract/msgpackr-extract-win32-x64@npm:3.0.3": + version: 3.0.3 + resolution: "@msgpackr-extract/msgpackr-extract-win32-x64@npm:3.0.3" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + "@nodelib/fs.scandir@npm:2.1.5": version: 2.1.5 resolution: "@nodelib/fs.scandir@npm:2.1.5" @@ -1197,6 +1424,18 @@ __metadata: languageName: node linkType: hard +"@noir-lang/acvm_js@link:../../../noir/packages/acvm_js::locator=vite%40workspace%3Aboxes%2Fvite": + version: 0.0.0-use.local + resolution: "@noir-lang/acvm_js@link:../../../noir/packages/acvm_js::locator=vite%40workspace%3Aboxes%2Fvite" + languageName: node + linkType: soft + +"@noir-lang/noirc_abi@link:../../../noir/packages/noirc_abi::locator=vite%40workspace%3Aboxes%2Fvite": + version: 0.0.0-use.local + resolution: "@noir-lang/noirc_abi@link:../../../noir/packages/noirc_abi::locator=vite%40workspace%3Aboxes%2Fvite" + languageName: node + linkType: soft + "@nolyfill/is-core-module@npm:1.0.39": version: 1.0.39 resolution: "@nolyfill/is-core-module@npm:1.0.39" @@ -1251,114 +1490,172 @@ __metadata: languageName: node linkType: hard -"@rollup/rollup-android-arm-eabi@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-android-arm-eabi@npm:4.21.2" +"@rollup/plugin-inject@npm:^5.0.5": + version: 5.0.5 + resolution: "@rollup/plugin-inject@npm:5.0.5" + dependencies: + "@rollup/pluginutils": "npm:^5.0.1" + estree-walker: "npm:^2.0.2" + magic-string: "npm:^0.30.3" + peerDependencies: + rollup: ^1.20.0||^2.0.0||^3.0.0||^4.0.0 + peerDependenciesMeta: + rollup: + optional: true + checksum: 22d10cf44fa56a6683d5ac4df24a9003379b3dcaae9897f5c30c844afc2ebca83cfaa5557f13a1399b1c8a0d312c3217bcacd508b7ebc4b2cbee401bd1ec8be2 + languageName: node + linkType: hard + +"@rollup/plugin-virtual@npm:^3.0.2": + version: 3.0.2 + resolution: "@rollup/plugin-virtual@npm:3.0.2" + peerDependencies: + rollup: ^1.20.0||^2.0.0||^3.0.0||^4.0.0 + peerDependenciesMeta: + rollup: + optional: true + checksum: 7115edb7989096d1ce334939fcf6e1ba365586b487bf61b2dd4f915386197f350db70904030342c0720fe58f5a52828975c645c4d415c1d432d9b1b6760a22ef + languageName: node + linkType: hard + +"@rollup/pluginutils@npm:^5.0.1": + version: 5.1.3 + resolution: "@rollup/pluginutils@npm:5.1.3" + dependencies: + "@types/estree": "npm:^1.0.0" + estree-walker: "npm:^2.0.2" + picomatch: "npm:^4.0.2" + peerDependencies: + rollup: ^1.20.0||^2.0.0||^3.0.0||^4.0.0 + peerDependenciesMeta: + rollup: + optional: true + checksum: ba46ad588733fb01d184ee3bc7a127d626158bc840b5874a94c129ff62689d12f16f537530709c54da6f3b71f67d705c4e09235b1dc9542e9d47ee8f2d0b8b9e + languageName: node + linkType: hard + +"@rollup/rollup-android-arm-eabi@npm:4.27.3": + version: 4.27.3 + resolution: "@rollup/rollup-android-arm-eabi@npm:4.27.3" conditions: os=android & cpu=arm languageName: node linkType: hard -"@rollup/rollup-android-arm64@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-android-arm64@npm:4.21.2" +"@rollup/rollup-android-arm64@npm:4.27.3": + version: 4.27.3 + resolution: "@rollup/rollup-android-arm64@npm:4.27.3" conditions: os=android & cpu=arm64 languageName: node linkType: hard -"@rollup/rollup-darwin-arm64@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-darwin-arm64@npm:4.21.2" +"@rollup/rollup-darwin-arm64@npm:4.27.3": + version: 4.27.3 + resolution: "@rollup/rollup-darwin-arm64@npm:4.27.3" conditions: os=darwin & cpu=arm64 languageName: node linkType: hard -"@rollup/rollup-darwin-x64@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-darwin-x64@npm:4.21.2" +"@rollup/rollup-darwin-x64@npm:4.27.3": + version: 4.27.3 + resolution: "@rollup/rollup-darwin-x64@npm:4.27.3" conditions: os=darwin & cpu=x64 languageName: node linkType: hard -"@rollup/rollup-linux-arm-gnueabihf@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-linux-arm-gnueabihf@npm:4.21.2" +"@rollup/rollup-freebsd-arm64@npm:4.27.3": + version: 4.27.3 + resolution: "@rollup/rollup-freebsd-arm64@npm:4.27.3" + conditions: os=freebsd & cpu=arm64 + languageName: node + linkType: hard + +"@rollup/rollup-freebsd-x64@npm:4.27.3": + version: 4.27.3 + resolution: "@rollup/rollup-freebsd-x64@npm:4.27.3" + conditions: os=freebsd & cpu=x64 + languageName: node + linkType: hard + +"@rollup/rollup-linux-arm-gnueabihf@npm:4.27.3": + version: 4.27.3 + resolution: "@rollup/rollup-linux-arm-gnueabihf@npm:4.27.3" conditions: os=linux & cpu=arm & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-arm-musleabihf@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-linux-arm-musleabihf@npm:4.21.2" +"@rollup/rollup-linux-arm-musleabihf@npm:4.27.3": + version: 4.27.3 + resolution: "@rollup/rollup-linux-arm-musleabihf@npm:4.27.3" conditions: os=linux & cpu=arm & libc=musl languageName: node linkType: hard -"@rollup/rollup-linux-arm64-gnu@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-linux-arm64-gnu@npm:4.21.2" +"@rollup/rollup-linux-arm64-gnu@npm:4.27.3": + version: 4.27.3 + resolution: "@rollup/rollup-linux-arm64-gnu@npm:4.27.3" conditions: os=linux & cpu=arm64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-arm64-musl@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-linux-arm64-musl@npm:4.21.2" +"@rollup/rollup-linux-arm64-musl@npm:4.27.3": + version: 4.27.3 + resolution: "@rollup/rollup-linux-arm64-musl@npm:4.27.3" conditions: os=linux & cpu=arm64 & libc=musl languageName: node linkType: hard -"@rollup/rollup-linux-powerpc64le-gnu@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-linux-powerpc64le-gnu@npm:4.21.2" +"@rollup/rollup-linux-powerpc64le-gnu@npm:4.27.3": + version: 4.27.3 + resolution: "@rollup/rollup-linux-powerpc64le-gnu@npm:4.27.3" conditions: os=linux & cpu=ppc64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-riscv64-gnu@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-linux-riscv64-gnu@npm:4.21.2" +"@rollup/rollup-linux-riscv64-gnu@npm:4.27.3": + version: 4.27.3 + resolution: "@rollup/rollup-linux-riscv64-gnu@npm:4.27.3" conditions: os=linux & cpu=riscv64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-s390x-gnu@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-linux-s390x-gnu@npm:4.21.2" +"@rollup/rollup-linux-s390x-gnu@npm:4.27.3": + version: 4.27.3 + resolution: "@rollup/rollup-linux-s390x-gnu@npm:4.27.3" conditions: os=linux & cpu=s390x & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-x64-gnu@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-linux-x64-gnu@npm:4.21.2" +"@rollup/rollup-linux-x64-gnu@npm:4.27.3": + version: 4.27.3 + resolution: "@rollup/rollup-linux-x64-gnu@npm:4.27.3" conditions: os=linux & cpu=x64 & libc=glibc languageName: node linkType: hard -"@rollup/rollup-linux-x64-musl@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-linux-x64-musl@npm:4.21.2" +"@rollup/rollup-linux-x64-musl@npm:4.27.3": + version: 4.27.3 + resolution: "@rollup/rollup-linux-x64-musl@npm:4.27.3" conditions: os=linux & cpu=x64 & libc=musl languageName: node linkType: hard -"@rollup/rollup-win32-arm64-msvc@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-win32-arm64-msvc@npm:4.21.2" +"@rollup/rollup-win32-arm64-msvc@npm:4.27.3": + version: 4.27.3 + resolution: "@rollup/rollup-win32-arm64-msvc@npm:4.27.3" conditions: os=win32 & cpu=arm64 languageName: node linkType: hard -"@rollup/rollup-win32-ia32-msvc@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-win32-ia32-msvc@npm:4.21.2" +"@rollup/rollup-win32-ia32-msvc@npm:4.27.3": + version: 4.27.3 + resolution: "@rollup/rollup-win32-ia32-msvc@npm:4.27.3" conditions: os=win32 & cpu=ia32 languageName: node linkType: hard -"@rollup/rollup-win32-x64-msvc@npm:4.21.2": - version: 4.21.2 - resolution: "@rollup/rollup-win32-x64-msvc@npm:4.21.2" +"@rollup/rollup-win32-x64-msvc@npm:4.27.3": + version: 4.27.3 + resolution: "@rollup/rollup-win32-x64-msvc@npm:4.27.3" conditions: os=win32 & cpu=x64 languageName: node linkType: hard @@ -1395,6 +1692,138 @@ __metadata: languageName: node linkType: hard +"@swc/core-darwin-arm64@npm:1.9.3": + version: 1.9.3 + resolution: "@swc/core-darwin-arm64@npm:1.9.3" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + +"@swc/core-darwin-x64@npm:1.9.3": + version: 1.9.3 + resolution: "@swc/core-darwin-x64@npm:1.9.3" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + +"@swc/core-linux-arm-gnueabihf@npm:1.9.3": + version: 1.9.3 + resolution: "@swc/core-linux-arm-gnueabihf@npm:1.9.3" + conditions: os=linux & cpu=arm + languageName: node + linkType: hard + +"@swc/core-linux-arm64-gnu@npm:1.9.3": + version: 1.9.3 + resolution: "@swc/core-linux-arm64-gnu@npm:1.9.3" + conditions: os=linux & cpu=arm64 & libc=glibc + languageName: node + linkType: hard + +"@swc/core-linux-arm64-musl@npm:1.9.3": + version: 1.9.3 + resolution: "@swc/core-linux-arm64-musl@npm:1.9.3" + conditions: os=linux & cpu=arm64 & libc=musl + languageName: node + linkType: hard + +"@swc/core-linux-x64-gnu@npm:1.9.3": + version: 1.9.3 + resolution: "@swc/core-linux-x64-gnu@npm:1.9.3" + conditions: os=linux & cpu=x64 & libc=glibc + languageName: node + linkType: hard + +"@swc/core-linux-x64-musl@npm:1.9.3": + version: 1.9.3 + resolution: "@swc/core-linux-x64-musl@npm:1.9.3" + conditions: os=linux & cpu=x64 & libc=musl + languageName: node + linkType: hard + +"@swc/core-win32-arm64-msvc@npm:1.9.3": + version: 1.9.3 + resolution: "@swc/core-win32-arm64-msvc@npm:1.9.3" + conditions: os=win32 & cpu=arm64 + languageName: node + linkType: hard + +"@swc/core-win32-ia32-msvc@npm:1.9.3": + version: 1.9.3 + resolution: "@swc/core-win32-ia32-msvc@npm:1.9.3" + conditions: os=win32 & cpu=ia32 + languageName: node + linkType: hard + +"@swc/core-win32-x64-msvc@npm:1.9.3": + version: 1.9.3 + resolution: "@swc/core-win32-x64-msvc@npm:1.9.3" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + +"@swc/core@npm:^1.7.0, @swc/core@npm:^1.7.26": + version: 1.9.3 + resolution: "@swc/core@npm:1.9.3" + dependencies: + "@swc/core-darwin-arm64": "npm:1.9.3" + "@swc/core-darwin-x64": "npm:1.9.3" + "@swc/core-linux-arm-gnueabihf": "npm:1.9.3" + "@swc/core-linux-arm64-gnu": "npm:1.9.3" + "@swc/core-linux-arm64-musl": "npm:1.9.3" + "@swc/core-linux-x64-gnu": "npm:1.9.3" + "@swc/core-linux-x64-musl": "npm:1.9.3" + "@swc/core-win32-arm64-msvc": "npm:1.9.3" + "@swc/core-win32-ia32-msvc": "npm:1.9.3" + "@swc/core-win32-x64-msvc": "npm:1.9.3" + "@swc/counter": "npm:^0.1.3" + "@swc/types": "npm:^0.1.17" + peerDependencies: + "@swc/helpers": "*" + dependenciesMeta: + "@swc/core-darwin-arm64": + optional: true + "@swc/core-darwin-x64": + optional: true + "@swc/core-linux-arm-gnueabihf": + optional: true + "@swc/core-linux-arm64-gnu": + optional: true + "@swc/core-linux-arm64-musl": + optional: true + "@swc/core-linux-x64-gnu": + optional: true + "@swc/core-linux-x64-musl": + optional: true + "@swc/core-win32-arm64-msvc": + optional: true + "@swc/core-win32-ia32-msvc": + optional: true + "@swc/core-win32-x64-msvc": + optional: true + peerDependenciesMeta: + "@swc/helpers": + optional: true + checksum: a9507a5be580518d51cf7f41821a89e1044be6f72930efbdf3877366c27e9ff1dbca3e1a7f18698679f8c345b6698f43cd80d7dfa24ba30dcab493de9b7a336e + languageName: node + linkType: hard + +"@swc/counter@npm:^0.1.3": + version: 0.1.3 + resolution: "@swc/counter@npm:0.1.3" + checksum: 8424f60f6bf8694cfd2a9bca45845bce29f26105cda8cf19cdb9fd3e78dc6338699e4db77a89ae449260bafa1cc6bec307e81e7fb96dbf7dcfce0eea55151356 + languageName: node + linkType: hard + +"@swc/types@npm:^0.1.17": + version: 0.1.17 + resolution: "@swc/types@npm:0.1.17" + dependencies: + "@swc/counter": "npm:^0.1.3" + checksum: 29f5c8933a16042956f1adb7383e836ed7646cbf679826e78b53fdd0c08e8572cb42152e527b6b530a9bd1052d33d0972f90f589761ccd252c12652c9b7a72fc + languageName: node + linkType: hard + "@tsconfig/node10@npm:^1.0.7": version: 1.0.11 resolution: "@tsconfig/node10@npm:1.0.11" @@ -1502,26 +1931,70 @@ __metadata: languageName: node linkType: hard -"@types/estree@npm:1.0.5, @types/estree@npm:^1.0.0, @types/estree@npm:^1.0.5": - version: 1.0.5 - resolution: "@types/estree@npm:1.0.5" - checksum: b3b0e334288ddb407c7b3357ca67dbee75ee22db242ca7c56fe27db4e1a31989cb8af48a84dd401deb787fe10cc6b2ab1ee82dc4783be87ededbe3d53c79c70d +"@types/eslint-scope@npm:^3.7.7": + version: 3.7.7 + resolution: "@types/eslint-scope@npm:3.7.7" + dependencies: + "@types/eslint": "npm:*" + "@types/estree": "npm:*" + checksum: a0ecbdf2f03912679440550817ff77ef39a30fa8bfdacaf6372b88b1f931828aec392f52283240f0d648cf3055c5ddc564544a626bcf245f3d09fcb099ebe3cc languageName: node linkType: hard -"@types/express-serve-static-core@npm:*, @types/express-serve-static-core@npm:^4.17.33": - version: 4.19.5 - resolution: "@types/express-serve-static-core@npm:4.19.5" +"@types/eslint@npm:*": + version: 9.6.1 + resolution: "@types/eslint@npm:9.6.1" + dependencies: + "@types/estree": "npm:*" + "@types/json-schema": "npm:*" + checksum: 69ba24fee600d1e4c5abe0df086c1a4d798abf13792d8cfab912d76817fe1a894359a1518557d21237fbaf6eda93c5ab9309143dee4c59ef54336d1b3570420e + languageName: node + linkType: hard + +"@types/estree@npm:*, @types/estree@npm:1.0.6, @types/estree@npm:^1.0.0, @types/estree@npm:^1.0.6": + version: 1.0.6 + resolution: "@types/estree@npm:1.0.6" + checksum: cdfd751f6f9065442cd40957c07fd80361c962869aa853c1c2fd03e101af8b9389d8ff4955a43a6fcfa223dd387a089937f95be0f3eec21ca527039fd2d9859a + languageName: node + linkType: hard + +"@types/express-serve-static-core@npm:*, @types/express-serve-static-core@npm:^5.0.0": + version: 5.0.1 + resolution: "@types/express-serve-static-core@npm:5.0.1" + dependencies: + "@types/node": "npm:*" + "@types/qs": "npm:*" + "@types/range-parser": "npm:*" + "@types/send": "npm:*" + checksum: 42919f9de55e9fd1524dc72c2f06a3f3e7fbd21f42ccc6e71ea2d530c8942cc0004d468f09e8557bf51c585d9673efd455b9668c2cd2416f5d61e70dc1bc49ac + languageName: node + linkType: hard + +"@types/express-serve-static-core@npm:^4.17.33": + version: 4.19.6 + resolution: "@types/express-serve-static-core@npm:4.19.6" dependencies: "@types/node": "npm:*" "@types/qs": "npm:*" "@types/range-parser": "npm:*" "@types/send": "npm:*" - checksum: ba8d8d976ab797b2602c60e728802ff0c98a00f13d420d82770f3661b67fa36ea9d3be0b94f2ddd632afe1fbc6e41620008b01db7e4fabdd71a2beb5539b0725 + checksum: 4281f4ead71723f376b3ddf64868ae26244d434d9906c101cf8d436d4b5c779d01bd046e4ea0ed1a394d3e402216fabfa22b1fa4dba501061cd7c81c54045983 languageName: node linkType: hard -"@types/express@npm:*, @types/express@npm:^4.17.13": +"@types/express@npm:*": + version: 5.0.0 + resolution: "@types/express@npm:5.0.0" + dependencies: + "@types/body-parser": "npm:*" + "@types/express-serve-static-core": "npm:^5.0.0" + "@types/qs": "npm:*" + "@types/serve-static": "npm:*" + checksum: 0d74b53aefa69c3b3817ee9b5145fd50d7dbac52a8986afc2d7500085c446656d0b6dc13158c04e2d9f18f4324d4d93b0452337c5ff73dd086dca3e4ff11f47b + languageName: node + linkType: hard + +"@types/express@npm:^4.17.13": version: 4.17.21 resolution: "@types/express@npm:4.17.21" dependencies: @@ -1601,16 +2074,16 @@ __metadata: linkType: hard "@types/jest@npm:^29.5.0": - version: 29.5.12 - resolution: "@types/jest@npm:29.5.12" + version: 29.5.14 + resolution: "@types/jest@npm:29.5.14" dependencies: expect: "npm:^29.0.0" pretty-format: "npm:^29.0.0" - checksum: 25fc8e4c611fa6c4421e631432e9f0a6865a8cb07c9815ec9ac90d630271cad773b2ee5fe08066f7b95bebd18bb967f8ce05d018ee9ab0430f9dfd1d84665b6f + checksum: 18e0712d818890db8a8dab3d91e9ea9f7f19e3f83c2e50b312f557017dc81466207a71f3ed79cf4428e813ba939954fa26ffa0a9a7f153181ba174581b1c2aed languageName: node linkType: hard -"@types/json-schema@npm:^7.0.12, @types/json-schema@npm:^7.0.8, @types/json-schema@npm:^7.0.9": +"@types/json-schema@npm:*, @types/json-schema@npm:^7.0.12, @types/json-schema@npm:^7.0.15, @types/json-schema@npm:^7.0.8, @types/json-schema@npm:^7.0.9": version: 7.0.15 resolution: "@types/json-schema@npm:7.0.15" checksum: a996a745e6c5d60292f36731dd41341339d4eeed8180bb09226e5c8d23759067692b1d88e5d91d72ee83dfc00d3aca8e7bd43ea120516c17922cbcb7c3e252db @@ -1649,35 +2122,35 @@ __metadata: languageName: node linkType: hard -"@types/node@npm:*, @types/node@npm:^22.5.2": - version: 22.5.2 - resolution: "@types/node@npm:22.5.2" +"@types/node@npm:*, @types/node@npm:^22.5.5": + version: 22.9.1 + resolution: "@types/node@npm:22.9.1" dependencies: - undici-types: "npm:~6.19.2" - checksum: 624a7fd76229eacc6c158eb3b9afd55b811d7f01976c5f92c630d5b9d47047cc218928c343988484a165ac400e5eb6fe70ea300fc7242deeb0e920c7724290f6 + undici-types: "npm:~6.19.8" + checksum: ea489ae603aa8874e4e88980aab6f2dad09c755da779c88dd142983bfe9609803c89415ca7781f723072934066f63daf2b3339ef084a8ad1a8079cf3958be243 languageName: node linkType: hard "@types/node@npm:^20.11.17, @types/node@npm:^20.5.9": - version: 20.16.3 - resolution: "@types/node@npm:20.16.3" + version: 20.17.6 + resolution: "@types/node@npm:20.17.6" dependencies: undici-types: "npm:~6.19.2" - checksum: 907c01d58ae36695fbed0b101e7a14cc2e0c5b9b2ba7904ef21cef093e4aac0649ac2a7a283fc94e19311dd0551d778445dd45fcf2d8bd45c494c9ecd802de69 + checksum: 5918c7ff8368bbe6d06d5e739c8ae41a9db41628f28760c60cda797be7d233406f07c4d0e6fdd960a0a342ec4173c2217eb6624e06bece21c1f1dd1b92805c15 languageName: node linkType: hard "@types/prop-types@npm:*": - version: 15.7.12 - resolution: "@types/prop-types@npm:15.7.12" - checksum: 1babcc7db6a1177779f8fde0ccc78d64d459906e6ef69a4ed4dd6339c920c2e05b074ee5a92120fe4e9d9f1a01c952f843ebd550bee2332fc2ef81d1706878f8 + version: 15.7.13 + resolution: "@types/prop-types@npm:15.7.13" + checksum: 1b20fc67281902c6743379960247bc161f3f0406ffc0df8e7058745a85ea1538612109db0406290512947f9632fe9e10e7337bf0ce6338a91d6c948df16a7c61 languageName: node linkType: hard "@types/qs@npm:*": - version: 6.9.15 - resolution: "@types/qs@npm:6.9.15" - checksum: 49c5ff75ca3adb18a1939310042d273c9fc55920861bd8e5100c8a923b3cda90d759e1a95e18334092da1c8f7b820084687770c83a1ccef04fb2c6908117c823 + version: 6.9.17 + resolution: "@types/qs@npm:6.9.17" + checksum: a183fa0b3464267f8f421e2d66d960815080e8aab12b9aadab60479ba84183b1cdba8f4eff3c06f76675a8e42fe6a3b1313ea76c74f2885c3e25d32499c17d1b languageName: node linkType: hard @@ -1688,22 +2161,22 @@ __metadata: languageName: node linkType: hard -"@types/react-dom@npm:^18.2.7": - version: 18.3.0 - resolution: "@types/react-dom@npm:18.3.0" +"@types/react-dom@npm:^18.2.7, @types/react-dom@npm:^18.3.1": + version: 18.3.1 + resolution: "@types/react-dom@npm:18.3.1" dependencies: "@types/react": "npm:*" - checksum: 6c90d2ed72c5a0e440d2c75d99287e4b5df3e7b011838cdc03ae5cd518ab52164d86990e73246b9d812eaf02ec351d74e3b4f5bd325bf341e13bf980392fd53b + checksum: 8b416551c60bb6bd8ec10e198c957910cfb271bc3922463040b0d57cf4739cdcd24b13224f8d68f10318926e1ec3cd69af0af79f0291b599a992f8c80d47f1eb languageName: node linkType: hard -"@types/react@npm:*, @types/react@npm:^18.2.15": - version: 18.3.5 - resolution: "@types/react@npm:18.3.5" +"@types/react@npm:*, @types/react@npm:^18.2.15, @types/react@npm:^18.3.12": + version: 18.3.12 + resolution: "@types/react@npm:18.3.12" dependencies: "@types/prop-types": "npm:*" csstype: "npm:^3.0.2" - checksum: 548b1d3d7c2f0242fbfdbbd658731b4ce69a134be072fa83e6ab516f2840402a3f20e3e7f72e95133b23d4880ef24a6d864050dc8e1f7c68f39fa87ca8445917 + checksum: 8bae8d9a41619804561574792e29112b413044eb0d53746dde2b9720c1f9a59f71c895bbd7987cd8ce9500b00786e53bc032dced38cddf42910458e145675290 languageName: node linkType: hard @@ -1775,11 +2248,11 @@ __metadata: linkType: hard "@types/ws@npm:^8.5.5": - version: 8.5.12 - resolution: "@types/ws@npm:8.5.12" + version: 8.5.13 + resolution: "@types/ws@npm:8.5.13" dependencies: "@types/node": "npm:*" - checksum: 3fd77c9e4e05c24ce42bfc7647f7506b08c40a40fe2aea236ef6d4e96fc7cb4006a81ed1b28ec9c457e177a74a72924f4768b7b4652680b42dfd52bc380e15f9 + checksum: a5430aa479bde588e69cb9175518d72f9338b6999e3b2ae16fc03d3bdcff8347e486dc031e4ed14601260463c07e1f9a0d7511dfc653712b047c439c680b0b34 languageName: node linkType: hard @@ -1799,6 +2272,29 @@ __metadata: languageName: node linkType: hard +"@typescript-eslint/eslint-plugin@npm:8.15.0": + version: 8.15.0 + resolution: "@typescript-eslint/eslint-plugin@npm:8.15.0" + dependencies: + "@eslint-community/regexpp": "npm:^4.10.0" + "@typescript-eslint/scope-manager": "npm:8.15.0" + "@typescript-eslint/type-utils": "npm:8.15.0" + "@typescript-eslint/utils": "npm:8.15.0" + "@typescript-eslint/visitor-keys": "npm:8.15.0" + graphemer: "npm:^1.4.0" + ignore: "npm:^5.3.1" + natural-compare: "npm:^1.4.0" + ts-api-utils: "npm:^1.3.0" + peerDependencies: + "@typescript-eslint/parser": ^8.0.0 || ^8.0.0-alpha.0 + eslint: ^8.57.0 || ^9.0.0 + peerDependenciesMeta: + typescript: + optional: true + checksum: 90ef10cc7d37a81abec4f4a3ffdfc3a0da8e99d949e03c75437e96e8ab2e896e34b85ab64718690180a7712581031b8611c5d8e7666d6ed4d60b9ace834d58e3 + languageName: node + linkType: hard + "@typescript-eslint/eslint-plugin@npm:^6.0.0": version: 6.21.0 resolution: "@typescript-eslint/eslint-plugin@npm:6.21.0" @@ -1824,6 +2320,24 @@ __metadata: languageName: node linkType: hard +"@typescript-eslint/parser@npm:8.15.0": + version: 8.15.0 + resolution: "@typescript-eslint/parser@npm:8.15.0" + dependencies: + "@typescript-eslint/scope-manager": "npm:8.15.0" + "@typescript-eslint/types": "npm:8.15.0" + "@typescript-eslint/typescript-estree": "npm:8.15.0" + "@typescript-eslint/visitor-keys": "npm:8.15.0" + debug: "npm:^4.3.4" + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + peerDependenciesMeta: + typescript: + optional: true + checksum: 19c25aea0dc51faa758701a5319a89950fd30494d9d645db8ced84fb60714c5e7d4b51fc4ee8ccb07ddefec88c51ee307ee7e49addd6330ee8f3e7ee9ba329fc + languageName: node + linkType: hard + "@typescript-eslint/parser@npm:^6.0.0": version: 6.21.0 resolution: "@typescript-eslint/parser@npm:6.21.0" @@ -1852,6 +2366,16 @@ __metadata: languageName: node linkType: hard +"@typescript-eslint/scope-manager@npm:8.15.0": + version: 8.15.0 + resolution: "@typescript-eslint/scope-manager@npm:8.15.0" + dependencies: + "@typescript-eslint/types": "npm:8.15.0" + "@typescript-eslint/visitor-keys": "npm:8.15.0" + checksum: c27dfdcea4100cc2d6fa967f857067cbc93155b55e648f9f10887a1b9372bb76cf864f7c804f3fa48d7868d9461cdef10bcea3dab7637d5337e8aa8042dc08b9 + languageName: node + linkType: hard + "@typescript-eslint/type-utils@npm:6.21.0": version: 6.21.0 resolution: "@typescript-eslint/type-utils@npm:6.21.0" @@ -1869,6 +2393,23 @@ __metadata: languageName: node linkType: hard +"@typescript-eslint/type-utils@npm:8.15.0": + version: 8.15.0 + resolution: "@typescript-eslint/type-utils@npm:8.15.0" + dependencies: + "@typescript-eslint/typescript-estree": "npm:8.15.0" + "@typescript-eslint/utils": "npm:8.15.0" + debug: "npm:^4.3.4" + ts-api-utils: "npm:^1.3.0" + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + peerDependenciesMeta: + typescript: + optional: true + checksum: 20f09c79c83b38a962cf7eff10d47a2c01bcc0bab7bf6d762594221cd89023ef8c7aec26751c47b524f53f5c8d38bba55a282529b3df82d5f5ab4350496316f9 + languageName: node + linkType: hard + "@typescript-eslint/types@npm:6.21.0": version: 6.21.0 resolution: "@typescript-eslint/types@npm:6.21.0" @@ -1876,6 +2417,13 @@ __metadata: languageName: node linkType: hard +"@typescript-eslint/types@npm:8.15.0": + version: 8.15.0 + resolution: "@typescript-eslint/types@npm:8.15.0" + checksum: 84abc6fd954aff13822a76ac49efdcb90a55c0025c20eee5d8cebcfb68faff33b79bbc711ea524e0209cecd90c5ee3a5f92babc7083c081d3a383a0710264a41 + languageName: node + linkType: hard + "@typescript-eslint/typescript-estree@npm:6.21.0": version: 6.21.0 resolution: "@typescript-eslint/typescript-estree@npm:6.21.0" @@ -1895,6 +2443,25 @@ __metadata: languageName: node linkType: hard +"@typescript-eslint/typescript-estree@npm:8.15.0": + version: 8.15.0 + resolution: "@typescript-eslint/typescript-estree@npm:8.15.0" + dependencies: + "@typescript-eslint/types": "npm:8.15.0" + "@typescript-eslint/visitor-keys": "npm:8.15.0" + debug: "npm:^4.3.4" + fast-glob: "npm:^3.3.2" + is-glob: "npm:^4.0.3" + minimatch: "npm:^9.0.4" + semver: "npm:^7.6.0" + ts-api-utils: "npm:^1.3.0" + peerDependenciesMeta: + typescript: + optional: true + checksum: 3af5c129532db3575349571bbf64d32aeccc4f4df924ac447f5d8f6af8b387148df51965eb2c9b99991951d3dadef4f2509d7ce69bf34a2885d013c040762412 + languageName: node + linkType: hard + "@typescript-eslint/utils@npm:6.21.0": version: 6.21.0 resolution: "@typescript-eslint/utils@npm:6.21.0" @@ -1912,6 +2479,23 @@ __metadata: languageName: node linkType: hard +"@typescript-eslint/utils@npm:8.15.0": + version: 8.15.0 + resolution: "@typescript-eslint/utils@npm:8.15.0" + dependencies: + "@eslint-community/eslint-utils": "npm:^4.4.0" + "@typescript-eslint/scope-manager": "npm:8.15.0" + "@typescript-eslint/types": "npm:8.15.0" + "@typescript-eslint/typescript-estree": "npm:8.15.0" + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + peerDependenciesMeta: + typescript: + optional: true + checksum: 65743f51845a1f6fd2d21f66ca56182ba33e966716bdca73d30b7a67c294e47889c322de7d7b90ab0818296cd33c628e5eeeb03cec7ef2f76c47de7a453eeda2 + languageName: node + linkType: hard + "@typescript-eslint/visitor-keys@npm:6.21.0": version: 6.21.0 resolution: "@typescript-eslint/visitor-keys@npm:6.21.0" @@ -1922,6 +2506,16 @@ __metadata: languageName: node linkType: hard +"@typescript-eslint/visitor-keys@npm:8.15.0": + version: 8.15.0 + resolution: "@typescript-eslint/visitor-keys@npm:8.15.0" + dependencies: + "@typescript-eslint/types": "npm:8.15.0" + eslint-visitor-keys: "npm:^4.2.0" + checksum: 02a954c3752c4328482a884eb1da06ca8fb72ae78ef28f1d854b18f3779406ed47263af22321cf3f65a637ec7584e5f483e34a263b5c8cec60ec85aebc263574 + languageName: node + linkType: hard + "@ungap/structured-clone@npm:^1.2.0": version: 1.2.0 resolution: "@ungap/structured-clone@npm:1.2.0" @@ -1929,217 +2523,246 @@ __metadata: languageName: node linkType: hard -"@vitest/expect@npm:2.0.5": - version: 2.0.5 - resolution: "@vitest/expect@npm:2.0.5" +"@vitejs/plugin-react-swc@npm:^3.5.0": + version: 3.7.1 + resolution: "@vitejs/plugin-react-swc@npm:3.7.1" dependencies: - "@vitest/spy": "npm:2.0.5" - "@vitest/utils": "npm:2.0.5" - chai: "npm:^5.1.1" + "@swc/core": "npm:^1.7.26" + peerDependencies: + vite: ^4 || ^5 + checksum: 2d613e69c0d0b809c94df80ca2b0caf39c50f0b98aa1f8599fd086bc37dac1449898eb6572000e1c133313137cac93440c4cb0861e05820c78bd2c07a52e64a8 + languageName: node + linkType: hard + +"@vitest/expect@npm:2.1.5": + version: 2.1.5 + resolution: "@vitest/expect@npm:2.1.5" + dependencies: + "@vitest/spy": "npm:2.1.5" + "@vitest/utils": "npm:2.1.5" + chai: "npm:^5.1.2" tinyrainbow: "npm:^1.2.0" - checksum: 08cb1b0f106d16a5b60db733e3d436fa5eefc68571488eb570dfe4f599f214ab52e4342273b03dbe12331cc6c0cdc325ac6c94f651ad254cd62f3aa0e3d185aa + checksum: 68f7011e7883dea1d1974fa05d30d7a1eff72f08741312e84f1b138f474e75e9db7ff7ced23a50fc16605baa123a2f10ef9a834b418e03dbeed23d1e0043fc90 + languageName: node + linkType: hard + +"@vitest/mocker@npm:2.1.5": + version: 2.1.5 + resolution: "@vitest/mocker@npm:2.1.5" + dependencies: + "@vitest/spy": "npm:2.1.5" + estree-walker: "npm:^3.0.3" + magic-string: "npm:^0.30.12" + peerDependencies: + msw: ^2.4.9 + vite: ^5.0.0 + peerDependenciesMeta: + msw: + optional: true + vite: + optional: true + checksum: 57034aa3476768133042c6b4193d71dbd4ace98c39241ae2c1fa21c33d5afd6d469de86511cdc59a0d7dd5585c05ac605406c60b0ae3cfbf3f650326642d4aca languageName: node linkType: hard -"@vitest/pretty-format@npm:2.0.5, @vitest/pretty-format@npm:^2.0.5": - version: 2.0.5 - resolution: "@vitest/pretty-format@npm:2.0.5" +"@vitest/pretty-format@npm:2.1.5, @vitest/pretty-format@npm:^2.1.5": + version: 2.1.5 + resolution: "@vitest/pretty-format@npm:2.1.5" dependencies: tinyrainbow: "npm:^1.2.0" - checksum: 236c0798c5170a0b5ad5d4bd06118533738e820b4dd30079d8fbcb15baee949d41c60f42a9f769906c4a5ce366d7ef11279546070646c0efc03128c220c31f37 + checksum: d6667f1e5d272f557f8cca440af65645346b5aa74a04041466859087f14a78a296e3f1928caa05de0cc558880cc8a49ce14696fef7b8f5dbc3eb856d672b0abf languageName: node linkType: hard -"@vitest/runner@npm:2.0.5": - version: 2.0.5 - resolution: "@vitest/runner@npm:2.0.5" +"@vitest/runner@npm:2.1.5": + version: 2.1.5 + resolution: "@vitest/runner@npm:2.1.5" dependencies: - "@vitest/utils": "npm:2.0.5" + "@vitest/utils": "npm:2.1.5" pathe: "npm:^1.1.2" - checksum: d0ed3302a7e015bf44b7c0df9d8f7da163659e082d86f9406944b5a31a61ab9ddc1de530e06176d1f4ef0bde994b44bff4c7dab62aacdc235c8fc04b98e4a72a + checksum: d39ea4c6f8805aa3e52130ac0a3d325506a4d4bb97d0d7ac80734beb21d9a496ee50586de9801f4b66f2dc8ff38f27a75065a258fd3633bc1cfe68bd9c1dd73e languageName: node linkType: hard -"@vitest/snapshot@npm:2.0.5": - version: 2.0.5 - resolution: "@vitest/snapshot@npm:2.0.5" +"@vitest/snapshot@npm:2.1.5": + version: 2.1.5 + resolution: "@vitest/snapshot@npm:2.1.5" dependencies: - "@vitest/pretty-format": "npm:2.0.5" - magic-string: "npm:^0.30.10" + "@vitest/pretty-format": "npm:2.1.5" + magic-string: "npm:^0.30.12" pathe: "npm:^1.1.2" - checksum: 7bf38474248f5ae0aac6afad511785d2b7a023ac5158803c2868fd172b5b9c1a569fb1dd64a09a49e43fd342cab71ea485ada89b7f08d37b1622a5a0ac00271d + checksum: 3dc44b5a043acbbd15e08c3c0519ef5a344d06ade10ee9522b4e4305f4826f2be8353b58d0b6e11aa272078ba42ff0d2ffa62368b6e0cf996ad0d7977df9f22f languageName: node linkType: hard -"@vitest/spy@npm:2.0.5": - version: 2.0.5 - resolution: "@vitest/spy@npm:2.0.5" +"@vitest/spy@npm:2.1.5": + version: 2.1.5 + resolution: "@vitest/spy@npm:2.1.5" dependencies: - tinyspy: "npm:^3.0.0" - checksum: 70634c21921eb271b54d2986c21d7ab6896a31c0f4f1d266940c9bafb8ac36237846d6736638cbf18b958bd98e5261b158a6944352742accfde50b7818ff655e + tinyspy: "npm:^3.0.2" + checksum: c5222cc7074db5705573e5da674b8488f9e46d61a2bd64e992f5f5819feff35f015e8d0236c7e07d1870bddf5d36dc0622f674c071ab4ca8fa4f4f5d02172315 languageName: node linkType: hard -"@vitest/utils@npm:2.0.5": - version: 2.0.5 - resolution: "@vitest/utils@npm:2.0.5" +"@vitest/utils@npm:2.1.5": + version: 2.1.5 + resolution: "@vitest/utils@npm:2.1.5" dependencies: - "@vitest/pretty-format": "npm:2.0.5" - estree-walker: "npm:^3.0.3" - loupe: "npm:^3.1.1" + "@vitest/pretty-format": "npm:2.1.5" + loupe: "npm:^3.1.2" tinyrainbow: "npm:^1.2.0" - checksum: 0d1de748298f07a50281e1ba058b05dcd58da3280c14e6f016265e950bd79adab6b97822de8f0ea82d3070f585654801a9b1bcf26db4372e51cf7746bf86d73b + checksum: 3d1e65025e418948b215b8856548a91856522660d898b872485a91acf397e085e90968ee9c3f521589b5274717da32e954ef8a549aa60cc1c3338224fdfb4c5e languageName: node linkType: hard -"@webassemblyjs/ast@npm:1.12.1, @webassemblyjs/ast@npm:^1.12.1": - version: 1.12.1 - resolution: "@webassemblyjs/ast@npm:1.12.1" +"@webassemblyjs/ast@npm:1.14.1, @webassemblyjs/ast@npm:^1.12.1": + version: 1.14.1 + resolution: "@webassemblyjs/ast@npm:1.14.1" dependencies: - "@webassemblyjs/helper-numbers": "npm:1.11.6" - "@webassemblyjs/helper-wasm-bytecode": "npm:1.11.6" - checksum: ba7f2b96c6e67e249df6156d02c69eb5f1bd18d5005303cdc42accb053bebbbde673826e54db0437c9748e97abd218366a1d13fa46859b23cde611b6b409998c + "@webassemblyjs/helper-numbers": "npm:1.13.2" + "@webassemblyjs/helper-wasm-bytecode": "npm:1.13.2" + checksum: 67a59be8ed50ddd33fbb2e09daa5193ac215bf7f40a9371be9a0d9797a114d0d1196316d2f3943efdb923a3d809175e1563a3cb80c814fb8edccd1e77494972b languageName: node linkType: hard -"@webassemblyjs/floating-point-hex-parser@npm:1.11.6": - version: 1.11.6 - resolution: "@webassemblyjs/floating-point-hex-parser@npm:1.11.6" - checksum: 37fe26f89e18e4ca0e7d89cfe3b9f17cfa327d7daf906ae01400416dbb2e33c8a125b4dc55ad7ff405e5fcfb6cf0d764074c9bc532b9a31a71e762be57d2ea0a +"@webassemblyjs/floating-point-hex-parser@npm:1.13.2": + version: 1.13.2 + resolution: "@webassemblyjs/floating-point-hex-parser@npm:1.13.2" + checksum: 0e88bdb8b50507d9938be64df0867f00396b55eba9df7d3546eb5dc0ca64d62e06f8d881ec4a6153f2127d0f4c11d102b6e7d17aec2f26bb5ff95a5e60652412 languageName: node linkType: hard -"@webassemblyjs/helper-api-error@npm:1.11.6": - version: 1.11.6 - resolution: "@webassemblyjs/helper-api-error@npm:1.11.6" - checksum: a681ed51863e4ff18cf38d223429f414894e5f7496856854d9a886eeddcee32d7c9f66290f2919c9bb6d2fc2b2fae3f989b6a1e02a81e829359738ea0c4d371a +"@webassemblyjs/helper-api-error@npm:1.13.2": + version: 1.13.2 + resolution: "@webassemblyjs/helper-api-error@npm:1.13.2" + checksum: 31be497f996ed30aae4c08cac3cce50c8dcd5b29660383c0155fce1753804fc55d47fcba74e10141c7dd2899033164e117b3bcfcda23a6b043e4ded4f1003dfb languageName: node linkType: hard -"@webassemblyjs/helper-buffer@npm:1.12.1": - version: 1.12.1 - resolution: "@webassemblyjs/helper-buffer@npm:1.12.1" - checksum: 0270724afb4601237410f7fd845ab58ccda1d5456a8783aadfb16eaaf3f2c9610c28e4a5bcb6ad880cde5183c82f7f116d5ccfc2310502439d33f14b6888b48a +"@webassemblyjs/helper-buffer@npm:1.14.1": + version: 1.14.1 + resolution: "@webassemblyjs/helper-buffer@npm:1.14.1" + checksum: 0d54105dc373c0fe6287f1091e41e3a02e36cdc05e8cf8533cdc16c59ff05a646355415893449d3768cda588af451c274f13263300a251dc11a575bc4c9bd210 languageName: node linkType: hard -"@webassemblyjs/helper-numbers@npm:1.11.6": - version: 1.11.6 - resolution: "@webassemblyjs/helper-numbers@npm:1.11.6" +"@webassemblyjs/helper-numbers@npm:1.13.2": + version: 1.13.2 + resolution: "@webassemblyjs/helper-numbers@npm:1.13.2" dependencies: - "@webassemblyjs/floating-point-hex-parser": "npm:1.11.6" - "@webassemblyjs/helper-api-error": "npm:1.11.6" + "@webassemblyjs/floating-point-hex-parser": "npm:1.13.2" + "@webassemblyjs/helper-api-error": "npm:1.13.2" "@xtuc/long": "npm:4.2.2" - checksum: c7d5afc0ff3bd748339b466d8d2f27b908208bf3ff26b2e8e72c39814479d486e0dca6f3d4d776fd9027c1efe05b5c0716c57a23041eb34473892b2731c33af3 + checksum: 9c46852f31b234a8fb5a5a9d3f027bc542392a0d4de32f1a9c0075d5e8684aa073cb5929b56df565500b3f9cc0a2ab983b650314295b9bf208d1a1651bfc825a languageName: node linkType: hard -"@webassemblyjs/helper-wasm-bytecode@npm:1.11.6": - version: 1.11.6 - resolution: "@webassemblyjs/helper-wasm-bytecode@npm:1.11.6" - checksum: 79d2bebdd11383d142745efa32781249745213af8e022651847382685ca76709f83e1d97adc5f0d3c2b8546bf02864f8b43a531fdf5ca0748cb9e4e0ef2acaa5 +"@webassemblyjs/helper-wasm-bytecode@npm:1.13.2": + version: 1.13.2 + resolution: "@webassemblyjs/helper-wasm-bytecode@npm:1.13.2" + checksum: c4355d14f369b30cf3cbdd3acfafc7d0488e086be6d578e3c9780bd1b512932352246be96e034e2a7fcfba4f540ec813352f312bfcbbfe5bcfbf694f82ccc682 languageName: node linkType: hard -"@webassemblyjs/helper-wasm-section@npm:1.12.1": - version: 1.12.1 - resolution: "@webassemblyjs/helper-wasm-section@npm:1.12.1" +"@webassemblyjs/helper-wasm-section@npm:1.14.1": + version: 1.14.1 + resolution: "@webassemblyjs/helper-wasm-section@npm:1.14.1" dependencies: - "@webassemblyjs/ast": "npm:1.12.1" - "@webassemblyjs/helper-buffer": "npm:1.12.1" - "@webassemblyjs/helper-wasm-bytecode": "npm:1.11.6" - "@webassemblyjs/wasm-gen": "npm:1.12.1" - checksum: 0546350724d285ae3c26e6fc444be4c3b5fb824f3be0ec8ceb474179dc3f4430336dd2e36a44b3e3a1a6815960e5eec98cd9b3a8ec66dc53d86daedd3296a6a2 + "@webassemblyjs/ast": "npm:1.14.1" + "@webassemblyjs/helper-buffer": "npm:1.14.1" + "@webassemblyjs/helper-wasm-bytecode": "npm:1.13.2" + "@webassemblyjs/wasm-gen": "npm:1.14.1" + checksum: 1f9b33731c3c6dbac3a9c483269562fa00d1b6a4e7133217f40e83e975e636fd0f8736e53abd9a47b06b66082ecc976c7384391ab0a68e12d509ea4e4b948d64 languageName: node linkType: hard -"@webassemblyjs/ieee754@npm:1.11.6": - version: 1.11.6 - resolution: "@webassemblyjs/ieee754@npm:1.11.6" +"@webassemblyjs/ieee754@npm:1.13.2": + version: 1.13.2 + resolution: "@webassemblyjs/ieee754@npm:1.13.2" dependencies: "@xtuc/ieee754": "npm:^1.2.0" - checksum: 59de0365da450322c958deadade5ec2d300c70f75e17ae55de3c9ce564deff5b429e757d107c7ec69bd0ba169c6b6cc2ff66293ab7264a7053c829b50ffa732f + checksum: 2e732ca78c6fbae3c9b112f4915d85caecdab285c0b337954b180460290ccd0fb00d2b1dc4bb69df3504abead5191e0d28d0d17dfd6c9d2f30acac8c4961c8a7 languageName: node linkType: hard -"@webassemblyjs/leb128@npm:1.11.6": - version: 1.11.6 - resolution: "@webassemblyjs/leb128@npm:1.11.6" +"@webassemblyjs/leb128@npm:1.13.2": + version: 1.13.2 + resolution: "@webassemblyjs/leb128@npm:1.13.2" dependencies: "@xtuc/long": "npm:4.2.2" - checksum: cb344fc04f1968209804de4da018679c5d4708a03b472a33e0fa75657bb024978f570d3ccf9263b7f341f77ecaa75d0e051b9cd4b7bb17a339032cfd1c37f96e + checksum: dad5ef9e383c8ab523ce432dfd80098384bf01c45f70eb179d594f85ce5db2f80fa8c9cba03adafd85684e6d6310f0d3969a882538975989919329ac4c984659 languageName: node linkType: hard -"@webassemblyjs/utf8@npm:1.11.6": - version: 1.11.6 - resolution: "@webassemblyjs/utf8@npm:1.11.6" - checksum: 14d6c24751a89ad9d801180b0d770f30a853c39f035a15fbc96266d6ac46355227abd27a3fd2eeaa97b4294ced2440a6b012750ae17bafe1a7633029a87b6bee +"@webassemblyjs/utf8@npm:1.13.2": + version: 1.13.2 + resolution: "@webassemblyjs/utf8@npm:1.13.2" + checksum: d3fac9130b0e3e5a1a7f2886124a278e9323827c87a2b971e6d0da22a2ba1278ac9f66a4f2e363ecd9fac8da42e6941b22df061a119e5c0335f81006de9ee799 languageName: node linkType: hard "@webassemblyjs/wasm-edit@npm:^1.12.1": - version: 1.12.1 - resolution: "@webassemblyjs/wasm-edit@npm:1.12.1" + version: 1.14.1 + resolution: "@webassemblyjs/wasm-edit@npm:1.14.1" dependencies: - "@webassemblyjs/ast": "npm:1.12.1" - "@webassemblyjs/helper-buffer": "npm:1.12.1" - "@webassemblyjs/helper-wasm-bytecode": "npm:1.11.6" - "@webassemblyjs/helper-wasm-section": "npm:1.12.1" - "@webassemblyjs/wasm-gen": "npm:1.12.1" - "@webassemblyjs/wasm-opt": "npm:1.12.1" - "@webassemblyjs/wasm-parser": "npm:1.12.1" - "@webassemblyjs/wast-printer": "npm:1.12.1" - checksum: 972f5e6c522890743999e0ed45260aae728098801c6128856b310dd21f1ee63435fc7b518e30e0ba1cdafd0d1e38275829c1e4451c3536a1d9e726e07a5bba0b + "@webassemblyjs/ast": "npm:1.14.1" + "@webassemblyjs/helper-buffer": "npm:1.14.1" + "@webassemblyjs/helper-wasm-bytecode": "npm:1.13.2" + "@webassemblyjs/helper-wasm-section": "npm:1.14.1" + "@webassemblyjs/wasm-gen": "npm:1.14.1" + "@webassemblyjs/wasm-opt": "npm:1.14.1" + "@webassemblyjs/wasm-parser": "npm:1.14.1" + "@webassemblyjs/wast-printer": "npm:1.14.1" + checksum: 5ac4781086a2ca4b320bdbfd965a209655fe8a208ca38d89197148f8597e587c9a2c94fb6bd6f1a7dbd4527c49c6844fcdc2af981f8d793a97bf63a016aa86d2 languageName: node linkType: hard -"@webassemblyjs/wasm-gen@npm:1.12.1": - version: 1.12.1 - resolution: "@webassemblyjs/wasm-gen@npm:1.12.1" +"@webassemblyjs/wasm-gen@npm:1.14.1": + version: 1.14.1 + resolution: "@webassemblyjs/wasm-gen@npm:1.14.1" dependencies: - "@webassemblyjs/ast": "npm:1.12.1" - "@webassemblyjs/helper-wasm-bytecode": "npm:1.11.6" - "@webassemblyjs/ieee754": "npm:1.11.6" - "@webassemblyjs/leb128": "npm:1.11.6" - "@webassemblyjs/utf8": "npm:1.11.6" - checksum: 1e257288177af9fa34c69cab94f4d9036ebed611f77f3897c988874e75182eeeec759c79b89a7a49dd24624fc2d3d48d5580b62b67c4a1c9bfbdcd266b281c16 + "@webassemblyjs/ast": "npm:1.14.1" + "@webassemblyjs/helper-wasm-bytecode": "npm:1.13.2" + "@webassemblyjs/ieee754": "npm:1.13.2" + "@webassemblyjs/leb128": "npm:1.13.2" + "@webassemblyjs/utf8": "npm:1.13.2" + checksum: d678810d7f3f8fecb2e2bdadfb9afad2ec1d2bc79f59e4711ab49c81cec578371e22732d4966f59067abe5fba8e9c54923b57060a729d28d408e608beef67b10 languageName: node linkType: hard -"@webassemblyjs/wasm-opt@npm:1.12.1": - version: 1.12.1 - resolution: "@webassemblyjs/wasm-opt@npm:1.12.1" +"@webassemblyjs/wasm-opt@npm:1.14.1": + version: 1.14.1 + resolution: "@webassemblyjs/wasm-opt@npm:1.14.1" dependencies: - "@webassemblyjs/ast": "npm:1.12.1" - "@webassemblyjs/helper-buffer": "npm:1.12.1" - "@webassemblyjs/wasm-gen": "npm:1.12.1" - "@webassemblyjs/wasm-parser": "npm:1.12.1" - checksum: 992a45e1f1871033c36987459436ab4e6430642ca49328e6e32a13de9106fe69ae6c0ac27d7050efd76851e502d11cd1ac0e06b55655dfa889ad82f11a2712fb + "@webassemblyjs/ast": "npm:1.14.1" + "@webassemblyjs/helper-buffer": "npm:1.14.1" + "@webassemblyjs/wasm-gen": "npm:1.14.1" + "@webassemblyjs/wasm-parser": "npm:1.14.1" + checksum: 515bfb15277ee99ba6b11d2232ddbf22aed32aad6d0956fe8a0a0a004a1b5a3a277a71d9a3a38365d0538ac40d1b7b7243b1a244ad6cd6dece1c1bb2eb5de7ee languageName: node linkType: hard -"@webassemblyjs/wasm-parser@npm:1.12.1, @webassemblyjs/wasm-parser@npm:^1.12.1": - version: 1.12.1 - resolution: "@webassemblyjs/wasm-parser@npm:1.12.1" +"@webassemblyjs/wasm-parser@npm:1.14.1, @webassemblyjs/wasm-parser@npm:^1.12.1": + version: 1.14.1 + resolution: "@webassemblyjs/wasm-parser@npm:1.14.1" dependencies: - "@webassemblyjs/ast": "npm:1.12.1" - "@webassemblyjs/helper-api-error": "npm:1.11.6" - "@webassemblyjs/helper-wasm-bytecode": "npm:1.11.6" - "@webassemblyjs/ieee754": "npm:1.11.6" - "@webassemblyjs/leb128": "npm:1.11.6" - "@webassemblyjs/utf8": "npm:1.11.6" - checksum: e85cec1acad07e5eb65b92d37c8e6ca09c6ca50d7ca58803a1532b452c7321050a0328c49810c337cc2dfd100c5326a54d5ebd1aa5c339ebe6ef10c250323a0e + "@webassemblyjs/ast": "npm:1.14.1" + "@webassemblyjs/helper-api-error": "npm:1.13.2" + "@webassemblyjs/helper-wasm-bytecode": "npm:1.13.2" + "@webassemblyjs/ieee754": "npm:1.13.2" + "@webassemblyjs/leb128": "npm:1.13.2" + "@webassemblyjs/utf8": "npm:1.13.2" + checksum: 95427b9e5addbd0f647939bd28e3e06b8deefdbdadcf892385b5edc70091bf9b92fa5faac3fce8333554437c5d85835afef8c8a7d9d27ab6ba01ffab954db8c6 languageName: node linkType: hard -"@webassemblyjs/wast-printer@npm:1.12.1": - version: 1.12.1 - resolution: "@webassemblyjs/wast-printer@npm:1.12.1" +"@webassemblyjs/wast-printer@npm:1.14.1": + version: 1.14.1 + resolution: "@webassemblyjs/wast-printer@npm:1.14.1" dependencies: - "@webassemblyjs/ast": "npm:1.12.1" + "@webassemblyjs/ast": "npm:1.14.1" "@xtuc/long": "npm:4.2.2" - checksum: 39bf746eb7a79aa69953f194943bbc43bebae98bd7cadd4d8bc8c0df470ca6bf9d2b789effaa180e900fab4e2691983c1f7d41571458bd2a26267f2f0c73705a + checksum: 8d7768608996a052545251e896eac079c98e0401842af8dd4de78fba8d90bd505efb6c537e909cd6dae96e09db3fa2e765a6f26492553a675da56e2db51f9d24 languageName: node linkType: hard @@ -2223,15 +2846,6 @@ __metadata: languageName: node linkType: hard -"acorn-import-attributes@npm:^1.9.5": - version: 1.9.5 - resolution: "acorn-import-attributes@npm:1.9.5" - peerDependencies: - acorn: ^8 - checksum: 5926eaaead2326d5a86f322ff1b617b0f698aa61dc719a5baa0e9d955c9885cc71febac3fb5bacff71bbf2c4f9c12db2056883c68c53eb962c048b952e1e013d - languageName: node - linkType: hard - "acorn-jsx@npm:^5.3.2": version: 5.3.2 resolution: "acorn-jsx@npm:5.3.2" @@ -2242,20 +2856,20 @@ __metadata: linkType: hard "acorn-walk@npm:^8.1.1": - version: 8.3.3 - resolution: "acorn-walk@npm:8.3.3" + version: 8.3.4 + resolution: "acorn-walk@npm:8.3.4" dependencies: acorn: "npm:^8.11.0" - checksum: 4a9e24313e6a0a7b389e712ba69b66b455b4cb25988903506a8d247e7b126f02060b05a8a5b738a9284214e4ca95f383dd93443a4ba84f1af9b528305c7f243b + checksum: 76537ac5fb2c37a64560feaf3342023dadc086c46da57da363e64c6148dc21b57d49ace26f949e225063acb6fb441eabffd89f7a3066de5ad37ab3e328927c62 languageName: node linkType: hard -"acorn@npm:^8.11.0, acorn@npm:^8.4.1, acorn@npm:^8.7.1, acorn@npm:^8.8.2, acorn@npm:^8.9.0": - version: 8.12.1 - resolution: "acorn@npm:8.12.1" +"acorn@npm:^8.11.0, acorn@npm:^8.14.0, acorn@npm:^8.4.1, acorn@npm:^8.8.2, acorn@npm:^8.9.0": + version: 8.14.0 + resolution: "acorn@npm:8.14.0" bin: acorn: bin/acorn - checksum: 51fb26cd678f914e13287e886da2d7021f8c2bc0ccc95e03d3e0447ee278dd3b40b9c57dc222acd5881adcf26f3edc40901a4953403232129e3876793cd17386 + checksum: 6d4ee461a7734b2f48836ee0fbb752903606e576cc100eb49340295129ca0b452f3ba91ddd4424a1d4406a98adfb2ebb6bd0ff4c49d7a0930c10e462719bbfd7 languageName: node linkType: hard @@ -2399,18 +3013,9 @@ __metadata: linkType: hard "ansi-regex@npm:^6.0.1": - version: 6.0.1 - resolution: "ansi-regex@npm:6.0.1" - checksum: cbe16dbd2c6b2735d1df7976a7070dd277326434f0212f43abf6d87674095d247968209babdaad31bb00882fa68807256ba9be340eec2f1004de14ca75f52a08 - languageName: node - linkType: hard - -"ansi-styles@npm:^3.2.1": - version: 3.2.1 - resolution: "ansi-styles@npm:3.2.1" - dependencies: - color-convert: "npm:^1.9.0" - checksum: ece5a8ef069fcc5298f67e3f4771a663129abd174ea2dfa87923a2be2abf6cd367ef72ac87942da00ce85bd1d651d4cd8595aebdb1b385889b89b205860e977b + version: 6.1.0 + resolution: "ansi-regex@npm:6.1.0" + checksum: a91daeddd54746338478eef88af3439a7edf30f8e23196e2d6ed182da9add559c601266dbef01c2efa46a958ad6f1f8b176799657616c702b5b02e799e7fd8dc languageName: node linkType: hard @@ -2576,7 +3181,18 @@ __metadata: languageName: node linkType: hard -"assert@npm:^2.1.0": +"asn1.js@npm:^4.10.1": + version: 4.10.1 + resolution: "asn1.js@npm:4.10.1" + dependencies: + bn.js: "npm:^4.0.0" + inherits: "npm:^2.0.1" + minimalistic-assert: "npm:^1.0.0" + checksum: afa7f3ab9e31566c80175a75b182e5dba50589dcc738aa485be42bdd787e2a07246a4b034d481861123cbe646a7656f318f4f1cad2e9e5e808a210d5d6feaa88 + languageName: node + linkType: hard + +"assert@npm:^2.0.0, assert@npm:^2.1.0": version: 2.1.0 resolution: "assert@npm:2.1.0" dependencies: @@ -2783,9 +3399,23 @@ __metadata: languageName: node linkType: hard -"body-parser@npm:1.20.2": - version: 1.20.2 - resolution: "body-parser@npm:1.20.2" +"bn.js@npm:^4.0.0, bn.js@npm:^4.1.0, bn.js@npm:^4.11.9": + version: 4.12.1 + resolution: "bn.js@npm:4.12.1" + checksum: b7f37a0cd5e4b79142b6f4292d518b416be34ae55d6dd6b0f66f96550c8083a50ffbbf8bda8d0ab471158cb81aa74ea4ee58fe33c7802e4a30b13810e98df116 + languageName: node + linkType: hard + +"bn.js@npm:^5.2.1": + version: 5.2.1 + resolution: "bn.js@npm:5.2.1" + checksum: bed3d8bd34ec89dbcf9f20f88bd7d4a49c160fda3b561c7bb227501f974d3e435a48fb9b61bc3de304acab9215a3bda0803f7017ffb4d0016a0c3a740a283caa + languageName: node + linkType: hard + +"body-parser@npm:1.20.3": + version: 1.20.3 + resolution: "body-parser@npm:1.20.3" dependencies: bytes: "npm:3.1.2" content-type: "npm:~1.0.5" @@ -2795,21 +3425,21 @@ __metadata: http-errors: "npm:2.0.0" iconv-lite: "npm:0.4.24" on-finished: "npm:2.4.1" - qs: "npm:6.11.0" + qs: "npm:6.13.0" raw-body: "npm:2.5.2" type-is: "npm:~1.6.18" unpipe: "npm:1.0.0" - checksum: 06f1438fff388a2e2354c96aa3ea8147b79bfcb1262dfcc2aae68ec13723d01d5781680657b74e9f83c808266d5baf52804032fbde2b7382b89bd8cdb273ace9 + checksum: 0a9a93b7518f222885498dcecaad528cf010dd109b071bf471c93def4bfe30958b83e03496eb9c1ad4896db543d999bb62be1a3087294162a88cfa1b42c16310 languageName: node linkType: hard "bonjour-service@npm:^1.0.11": - version: 1.2.1 - resolution: "bonjour-service@npm:1.2.1" + version: 1.3.0 + resolution: "bonjour-service@npm:1.3.0" dependencies: fast-deep-equal: "npm:^3.1.3" multicast-dns: "npm:^7.2.5" - checksum: 953cbfc27fc9e36e6f988012993ab2244817d82426603e0390d4715639031396c932b6657b1aa4ec30dbb5fa903d6b2c7f1be3af7a8ba24165c93e987c849730 + checksum: 5721fd9f9bb968e9cc16c1e8116d770863dd2329cb1f753231de1515870648c225142b7eefa71f14a5c22bc7b37ddd7fdeb018700f28a8c936d50d4162d433c7 languageName: node linkType: hard @@ -2864,17 +3494,108 @@ __metadata: languageName: node linkType: hard -"browserslist@npm:^4.21.10, browserslist@npm:^4.23.1, browserslist@npm:^4.23.3": - version: 4.23.3 - resolution: "browserslist@npm:4.23.3" +"brorand@npm:^1.0.1, brorand@npm:^1.1.0": + version: 1.1.0 + resolution: "brorand@npm:1.1.0" + checksum: 6f366d7c4990f82c366e3878492ba9a372a73163c09871e80d82fb4ae0d23f9f8924cb8a662330308206e6b3b76ba1d528b4601c9ef73c2166b440b2ea3b7571 + languageName: node + linkType: hard + +"browser-resolve@npm:^2.0.0": + version: 2.0.0 + resolution: "browser-resolve@npm:2.0.0" dependencies: - caniuse-lite: "npm:^1.0.30001646" - electron-to-chromium: "npm:^1.5.4" + resolve: "npm:^1.17.0" + checksum: 06c43adf3cb1939825ab9a4ac355b23272820ee421a20d04f62e0dabd9ea305e497b97f3ac027f87d53c366483aafe8673bbe1aaa5e41cd69eeafa65ac5fda6e + languageName: node + linkType: hard + +"browserify-aes@npm:^1.0.4, browserify-aes@npm:^1.2.0": + version: 1.2.0 + resolution: "browserify-aes@npm:1.2.0" + dependencies: + buffer-xor: "npm:^1.0.3" + cipher-base: "npm:^1.0.0" + create-hash: "npm:^1.1.0" + evp_bytestokey: "npm:^1.0.3" + inherits: "npm:^2.0.1" + safe-buffer: "npm:^5.0.1" + checksum: 967f2ae60d610b7b252a4cbb55a7a3331c78293c94b4dd9c264d384ca93354c089b3af9c0dd023534efdc74ffbc82510f7ad4399cf82bc37bc07052eea485f18 + languageName: node + linkType: hard + +"browserify-cipher@npm:^1.0.1": + version: 1.0.1 + resolution: "browserify-cipher@npm:1.0.1" + dependencies: + browserify-aes: "npm:^1.0.4" + browserify-des: "npm:^1.0.0" + evp_bytestokey: "npm:^1.0.0" + checksum: aa256dcb42bc53a67168bbc94ab85d243b0a3b56109dee3b51230b7d010d9b78985ffc1fb36e145c6e4db151f888076c1cfc207baf1525d3e375cbe8187fe27d + languageName: node + linkType: hard + +"browserify-des@npm:^1.0.0": + version: 1.0.2 + resolution: "browserify-des@npm:1.0.2" + dependencies: + cipher-base: "npm:^1.0.1" + des.js: "npm:^1.0.0" + inherits: "npm:^2.0.1" + safe-buffer: "npm:^5.1.2" + checksum: 943eb5d4045eff80a6cde5be4e5fbb1f2d5002126b5a4789c3c1aae3cdddb1eb92b00fb92277f512288e5c6af330730b1dbabcf7ce0923e749e151fcee5a074d + languageName: node + linkType: hard + +"browserify-rsa@npm:^4.0.0, browserify-rsa@npm:^4.1.0": + version: 4.1.1 + resolution: "browserify-rsa@npm:4.1.1" + dependencies: + bn.js: "npm:^5.2.1" + randombytes: "npm:^2.1.0" + safe-buffer: "npm:^5.2.1" + checksum: b650ee1192e3d7f3d779edc06dd96ed8720362e72ac310c367b9d7fe35f7e8dbb983c1829142b2b3215458be8bf17c38adc7224920843024ed8cf39e19c513c0 + languageName: node + linkType: hard + +"browserify-sign@npm:^4.2.3": + version: 4.2.3 + resolution: "browserify-sign@npm:4.2.3" + dependencies: + bn.js: "npm:^5.2.1" + browserify-rsa: "npm:^4.1.0" + create-hash: "npm:^1.2.0" + create-hmac: "npm:^1.1.7" + elliptic: "npm:^6.5.5" + hash-base: "npm:~3.0" + inherits: "npm:^2.0.4" + parse-asn1: "npm:^5.1.7" + readable-stream: "npm:^2.3.8" + safe-buffer: "npm:^5.2.1" + checksum: 30c0eba3f5970a20866a4d3fbba2c5bd1928cd24f47faf995f913f1499214c6f3be14bb4d6ec1ab5c6cafb1eca9cb76ba1c2e1c04ed018370634d4e659c77216 + languageName: node + linkType: hard + +"browserify-zlib@npm:^0.2.0": + version: 0.2.0 + resolution: "browserify-zlib@npm:0.2.0" + dependencies: + pako: "npm:~1.0.5" + checksum: 9ab10b6dc732c6c5ec8ebcbe5cb7fe1467f97402c9b2140113f47b5f187b9438f93a8e065d8baf8b929323c18324fbf1105af479ee86d9d36cab7d7ef3424ad9 + languageName: node + linkType: hard + +"browserslist@npm:^4.23.3, browserslist@npm:^4.24.0": + version: 4.24.2 + resolution: "browserslist@npm:4.24.2" + dependencies: + caniuse-lite: "npm:^1.0.30001669" + electron-to-chromium: "npm:^1.5.41" node-releases: "npm:^2.0.18" - update-browserslist-db: "npm:^1.1.0" + update-browserslist-db: "npm:^1.1.1" bin: browserslist: cli.js - checksum: 3063bfdf812815346447f4796c8f04601bf5d62003374305fd323c2a463e42776475bcc5309264e39bcf9a8605851e53560695991a623be988138b3ff8c66642 + checksum: d747c9fb65ed7b4f1abcae4959405707ed9a7b835639f8a9ba0da2911995a6ab9b0648fd05baf2a4d4e3cf7f9fdbad56d3753f91881e365992c1d49c8d88ff7a languageName: node linkType: hard @@ -2903,6 +3624,23 @@ __metadata: languageName: node linkType: hard +"buffer-xor@npm:^1.0.3": + version: 1.0.3 + resolution: "buffer-xor@npm:1.0.3" + checksum: fd269d0e0bf71ecac3146187cfc79edc9dbb054e2ee69b4d97dfb857c6d997c33de391696d04bdd669272751fa48e7872a22f3a6c7b07d6c0bc31dbe02a4075c + languageName: node + linkType: hard + +"buffer@npm:^5.7.1": + version: 5.7.1 + resolution: "buffer@npm:5.7.1" + dependencies: + base64-js: "npm:^1.3.1" + ieee754: "npm:^1.1.13" + checksum: 27cac81cff434ed2876058d72e7c4789d11ff1120ef32c9de48f59eab58179b66710c488987d295ae89a228f835fc66d088652dffeb8e3ba8659f80eb091d55e + languageName: node + linkType: hard + "buffer@npm:^6.0.3": version: 6.0.3 resolution: "buffer@npm:6.0.3" @@ -2913,6 +3651,13 @@ __metadata: languageName: node linkType: hard +"builtin-status-codes@npm:^3.0.0": + version: 3.0.0 + resolution: "builtin-status-codes@npm:3.0.0" + checksum: c37bbba11a34c4431e56bd681b175512e99147defbe2358318d8152b3a01df7bf25e0305873947e5b350073d5ef41a364a22b37e48f1fb6d2fe6d5286a0f348c + languageName: node + linkType: hard + "bytes@npm:3.0.0": version: 3.0.0 resolution: "bytes@npm:3.0.0" @@ -3005,23 +3750,23 @@ __metadata: languageName: node linkType: hard -"caniuse-lite@npm:^1.0.30001646": - version: 1.0.30001655 - resolution: "caniuse-lite@npm:1.0.30001655" - checksum: fff0c0c3ffcba89828bfa6b99f118e82c064f46f15bb8655b9f2a352a3f552ccac0b87a9fe9532f8c5a29e284aae5579791e196480ec717d11ef1d1a1c2e3ff9 +"caniuse-lite@npm:^1.0.30001646, caniuse-lite@npm:^1.0.30001669": + version: 1.0.30001683 + resolution: "caniuse-lite@npm:1.0.30001683" + checksum: 0ca7f4f2fbd0c71fde5a14ca2e29bb1dcb84c095d7a3d88e47371e062f276f1dc31da3f10931ec134ef7fb096259c0d67c9ffb843a9ec4a040a85eb2fea0bdec languageName: node linkType: hard -"chai@npm:^5.1.1": - version: 5.1.1 - resolution: "chai@npm:5.1.1" +"chai@npm:^5.1.2": + version: 5.1.2 + resolution: "chai@npm:5.1.2" dependencies: assertion-error: "npm:^2.0.1" check-error: "npm:^2.1.1" deep-eql: "npm:^5.0.1" loupe: "npm:^3.1.0" pathval: "npm:^2.0.0" - checksum: e7f00e5881e3d5224f08fe63966ed6566bd9fdde175863c7c16dd5240416de9b34c4a0dd925f4fd64ad56256ca6507d32cf6131c49e1db65c62578eb31d4566c + checksum: 6c04ff8495b6e535df9c1b062b6b094828454e9a3c9493393e55b2f4dbff7aa2a29a4645133cad160fb00a16196c4dc03dc9bb37e1f4ba9df3b5f50d7533a736 languageName: node linkType: hard @@ -3041,17 +3786,6 @@ __metadata: languageName: node linkType: hard -"chalk@npm:^2.4.2": - version: 2.4.2 - resolution: "chalk@npm:2.4.2" - dependencies: - ansi-styles: "npm:^3.2.1" - escape-string-regexp: "npm:^1.0.5" - supports-color: "npm:^5.3.0" - checksum: e6543f02ec877732e3a2d1c3c3323ddb4d39fbab687c23f526e25bd4c6a9bf3b83a696e8c769d078e04e5754921648f7821b2a2acfd16c550435fd630026e073 - languageName: node - linkType: hard - "chalk@npm:^4.0.0, chalk@npm:^4.0.2, chalk@npm:^4.1.0, chalk@npm:^4.1.2": version: 4.1.2 resolution: "chalk@npm:4.1.2" @@ -3123,10 +3857,20 @@ __metadata: languageName: node linkType: hard +"cipher-base@npm:^1.0.0, cipher-base@npm:^1.0.1, cipher-base@npm:^1.0.3": + version: 1.0.5 + resolution: "cipher-base@npm:1.0.5" + dependencies: + inherits: "npm:^2.0.4" + safe-buffer: "npm:^5.2.1" + checksum: 064a7f9323ba5416c8f4ab98bd0fca7234f05b39b0784b8131429e84ac5c735e7fc9f87e2bd39b278a0121d833ca20fa9f5b4dd11fbe289191e7d29471bb3f5b + languageName: node + linkType: hard + "cjs-module-lexer@npm:^1.0.0": - version: 1.4.0 - resolution: "cjs-module-lexer@npm:1.4.0" - checksum: b5ef03e10297c24f0db56b13d7d2f92e377499c83d7bf9352ec73df544b5310e024dfb1779a6b810e7a06eb18caa6a0e2da5f11df8116af73306f362e67fb61a + version: 1.4.1 + resolution: "cjs-module-lexer@npm:1.4.1" + checksum: 5a7d8279629c9ba8ccf38078c2fed75b7737973ced22b9b5a54180efa57fb2fe2bb7bec6aec55e3b8f3f5044f5d7b240347ad9bd285e7c3d0ee5b0a1d0504dfc languageName: node linkType: hard @@ -3237,15 +3981,6 @@ __metadata: languageName: node linkType: hard -"color-convert@npm:^1.9.0": - version: 1.9.3 - resolution: "color-convert@npm:1.9.3" - dependencies: - color-name: "npm:1.1.3" - checksum: 5ad3c534949a8c68fca8fbc6f09068f435f0ad290ab8b2f76841b9e6af7e0bb57b98cb05b0e19fe33f5d91e5a8611ad457e5f69e0a484caad1f7487fd0e8253c - languageName: node - linkType: hard - "color-convert@npm:^2.0.1": version: 2.0.1 resolution: "color-convert@npm:2.0.1" @@ -3255,13 +3990,6 @@ __metadata: languageName: node linkType: hard -"color-name@npm:1.1.3": - version: 1.1.3 - resolution: "color-name@npm:1.1.3" - checksum: 566a3d42cca25b9b3cd5528cd7754b8e89c0eb646b7f214e8e2eaddb69994ac5f0557d9c175eb5d8f0ad73531140d9c47525085ee752a91a2ab15ab459caf6d6 - languageName: node - linkType: hard - "color-name@npm:~1.1.4": version: 1.1.4 resolution: "color-name@npm:1.1.4" @@ -3320,7 +4048,7 @@ __metadata: languageName: node linkType: hard -"compressible@npm:~2.0.16": +"compressible@npm:~2.0.16, compressible@npm:~2.0.18": version: 2.0.18 resolution: "compressible@npm:2.0.18" dependencies: @@ -3329,7 +4057,7 @@ __metadata: languageName: node linkType: hard -"compression@npm:1.7.4, compression@npm:^1.7.4": +"compression@npm:1.7.4": version: 1.7.4 resolution: "compression@npm:1.7.4" dependencies: @@ -3344,6 +4072,21 @@ __metadata: languageName: node linkType: hard +"compression@npm:^1.7.4": + version: 1.7.5 + resolution: "compression@npm:1.7.5" + dependencies: + bytes: "npm:3.1.2" + compressible: "npm:~2.0.18" + debug: "npm:2.6.9" + negotiator: "npm:~0.6.4" + on-headers: "npm:~1.0.2" + safe-buffer: "npm:5.2.1" + vary: "npm:~1.1.2" + checksum: 35c9d2d57c86d8107eab5e637f2146fcefec8475a2ff3e162f5eb0982ff856d385fb5d8c9823c3d50e075f2d9304bc622dac3df27bfef0355309c0a5307861c5 + languageName: node + linkType: hard + "concat-map@npm:0.0.1": version: 0.0.1 resolution: "concat-map@npm:0.0.1" @@ -3358,6 +4101,20 @@ __metadata: languageName: node linkType: hard +"console-browserify@npm:^1.1.0": + version: 1.2.0 + resolution: "console-browserify@npm:1.2.0" + checksum: 89b99a53b7d6cee54e1e64fa6b1f7ac24b844b4019c5d39db298637e55c1f4ffa5c165457ad984864de1379df2c8e1886cbbdac85d9dbb6876a9f26c3106f226 + languageName: node + linkType: hard + +"constants-browserify@npm:^1.0.0": + version: 1.0.0 + resolution: "constants-browserify@npm:1.0.0" + checksum: ab49b1d59a433ed77c964d90d19e08b2f77213fb823da4729c0baead55e3c597f8f97ebccfdfc47bd896d43854a117d114c849a6f659d9986420e97da0f83ac5 + languageName: node + linkType: hard + "content-disposition@npm:0.5.2": version: 0.5.2 resolution: "content-disposition@npm:0.5.2" @@ -3395,10 +4152,10 @@ __metadata: languageName: node linkType: hard -"cookie@npm:0.6.0": - version: 0.6.0 - resolution: "cookie@npm:0.6.0" - checksum: f2318b31af7a31b4ddb4a678d024514df5e705f9be5909a192d7f116cfb6d45cbacf96a473fa733faa95050e7cff26e7832bb3ef94751592f1387b71c8956686 +"cookie@npm:0.7.1": + version: 0.7.1 + resolution: "cookie@npm:0.7.1" + checksum: 5de60c67a410e7c8dc8a46a4b72eb0fe925871d057c9a5d2c0e8145c4270a4f81076de83410c4d397179744b478e33cd80ccbcc457abf40a9409ad27dcd21dde languageName: node linkType: hard @@ -3442,6 +4199,43 @@ __metadata: languageName: node linkType: hard +"create-ecdh@npm:^4.0.4": + version: 4.0.4 + resolution: "create-ecdh@npm:4.0.4" + dependencies: + bn.js: "npm:^4.1.0" + elliptic: "npm:^6.5.3" + checksum: 77b11a51360fec9c3bce7a76288fc0deba4b9c838d5fb354b3e40c59194d23d66efe6355fd4b81df7580da0661e1334a235a2a5c040b7569ba97db428d466e7f + languageName: node + linkType: hard + +"create-hash@npm:^1.1.0, create-hash@npm:^1.1.2, create-hash@npm:^1.2.0": + version: 1.2.0 + resolution: "create-hash@npm:1.2.0" + dependencies: + cipher-base: "npm:^1.0.1" + inherits: "npm:^2.0.1" + md5.js: "npm:^1.3.4" + ripemd160: "npm:^2.0.1" + sha.js: "npm:^2.4.0" + checksum: d402e60e65e70e5083cb57af96d89567954d0669e90550d7cec58b56d49c4b193d35c43cec8338bc72358198b8cbf2f0cac14775b651e99238e1cf411490f915 + languageName: node + linkType: hard + +"create-hmac@npm:^1.1.4, create-hmac@npm:^1.1.7": + version: 1.1.7 + resolution: "create-hmac@npm:1.1.7" + dependencies: + cipher-base: "npm:^1.0.3" + create-hash: "npm:^1.1.0" + inherits: "npm:^2.0.1" + ripemd160: "npm:^2.0.0" + safe-buffer: "npm:^5.0.1" + sha.js: "npm:^2.4.8" + checksum: 24332bab51011652a9a0a6d160eed1e8caa091b802335324ae056b0dcb5acbc9fcf173cf10d128eba8548c3ce98dfa4eadaa01bd02f44a34414baee26b651835 + languageName: node + linkType: hard + "create-jest@npm:^29.7.0": version: 29.7.0 resolution: "create-jest@npm:29.7.0" @@ -3459,21 +4253,41 @@ __metadata: languageName: node linkType: hard -"create-require@npm:^1.1.0": +"create-require@npm:^1.1.0, create-require@npm:^1.1.1": version: 1.1.1 resolution: "create-require@npm:1.1.1" checksum: 157cbc59b2430ae9a90034a5f3a1b398b6738bf510f713edc4d4e45e169bc514d3d99dd34d8d01ca7ae7830b5b8b537e46ae8f3c8f932371b0875c0151d7ec91 languageName: node linkType: hard -"cross-spawn@npm:^7.0.0, cross-spawn@npm:^7.0.2, cross-spawn@npm:^7.0.3": - version: 7.0.3 - resolution: "cross-spawn@npm:7.0.3" +"cross-spawn@npm:^7.0.0, cross-spawn@npm:^7.0.2, cross-spawn@npm:^7.0.3, cross-spawn@npm:^7.0.5": + version: 7.0.6 + resolution: "cross-spawn@npm:7.0.6" dependencies: path-key: "npm:^3.1.0" shebang-command: "npm:^2.0.0" which: "npm:^2.0.1" - checksum: 5738c312387081c98d69c98e105b6327b069197f864a60593245d64c8089c8a0a744e16349281210d56835bb9274130d825a78b2ad6853ca13cfbeffc0c31750 + checksum: 053ea8b2135caff68a9e81470e845613e374e7309a47731e81639de3eaeb90c3d01af0e0b44d2ab9d50b43467223b88567dfeb3262db942dc063b9976718ffc1 + languageName: node + linkType: hard + +"crypto-browserify@npm:^3.11.0": + version: 3.12.1 + resolution: "crypto-browserify@npm:3.12.1" + dependencies: + browserify-cipher: "npm:^1.0.1" + browserify-sign: "npm:^4.2.3" + create-ecdh: "npm:^4.0.4" + create-hash: "npm:^1.2.0" + create-hmac: "npm:^1.1.7" + diffie-hellman: "npm:^5.0.3" + hash-base: "npm:~3.0.4" + inherits: "npm:^2.0.4" + pbkdf2: "npm:^3.1.2" + public-encrypt: "npm:^4.0.3" + randombytes: "npm:^2.1.0" + randomfill: "npm:^1.0.4" + checksum: 184a2def7b16628e79841243232ab5497f18d8e158ac21b7ce90ab172427d0a892a561280adc08f9d4d517bce8db2a5b335dc21abb970f787f8e874bd7b9db7d languageName: node linkType: hard @@ -3586,15 +4400,15 @@ __metadata: languageName: node linkType: hard -"debug@npm:4, debug@npm:^4.1.0, debug@npm:^4.1.1, debug@npm:^4.3.1, debug@npm:^4.3.2, debug@npm:^4.3.4, debug@npm:^4.3.5": - version: 4.3.6 - resolution: "debug@npm:4.3.6" +"debug@npm:4, debug@npm:^4.1.0, debug@npm:^4.1.1, debug@npm:^4.3.1, debug@npm:^4.3.2, debug@npm:^4.3.4, debug@npm:^4.3.5, debug@npm:^4.3.7": + version: 4.3.7 + resolution: "debug@npm:4.3.7" dependencies: - ms: "npm:2.1.2" + ms: "npm:^2.1.3" peerDependenciesMeta: supports-color: optional: true - checksum: 3293416bff072389c101697d4611c402a6bacd1900ac20c0492f61a9cdd6b3b29750fc7f5e299f8058469ef60ff8fb79b86395a30374fbd2490113c1c7112285 + checksum: 1471db19c3b06d485a622d62f65947a19a23fbd0dd73f7fd3eafb697eec5360cde447fb075919987899b1a2096e85d35d4eb5a4de09a57600ac9cf7e6c8e768b languageName: node linkType: hard @@ -3713,6 +4527,16 @@ __metadata: languageName: node linkType: hard +"des.js@npm:^1.0.0": + version: 1.1.0 + resolution: "des.js@npm:1.1.0" + dependencies: + inherits: "npm:^2.0.1" + minimalistic-assert: "npm:^1.0.0" + checksum: 671354943ad67493e49eb4c555480ab153edd7cee3a51c658082fcde539d2690ed2a4a0b5d1f401f9cde822edf3939a6afb2585f32c091f2d3a1b1665cd45236 + languageName: node + linkType: hard + "destroy@npm:1.2.0": version: 1.2.0 resolution: "destroy@npm:1.2.0" @@ -3720,6 +4544,13 @@ __metadata: languageName: node linkType: hard +"detect-libc@npm:^2.0.1": + version: 2.0.3 + resolution: "detect-libc@npm:2.0.3" + checksum: 88095bda8f90220c95f162bf92cad70bd0e424913e655c20578600e35b91edc261af27531cf160a331e185c0ced93944bc7e09939143225f56312d7fd800fdb7 + languageName: node + linkType: hard + "detect-newline@npm:^3.0.0": version: 3.1.0 resolution: "detect-newline@npm:3.1.0" @@ -3748,6 +4579,17 @@ __metadata: languageName: node linkType: hard +"diffie-hellman@npm:^5.0.3": + version: 5.0.3 + resolution: "diffie-hellman@npm:5.0.3" + dependencies: + bn.js: "npm:^4.1.0" + miller-rabin: "npm:^4.0.0" + randombytes: "npm:^2.0.0" + checksum: ce53ccafa9ca544b7fc29b08a626e23a9b6562efc2a98559a0c97b4718937cebaa9b5d7d0a05032cc9c1435e9b3c1532b9e9bf2e0ede868525922807ad6e1ecf + languageName: node + linkType: hard + "dir-glob@npm:^3.0.1": version: 3.0.1 resolution: "dir-glob@npm:3.0.1" @@ -3804,6 +4646,13 @@ __metadata: languageName: node linkType: hard +"domain-browser@npm:4.22.0": + version: 4.22.0 + resolution: "domain-browser@npm:4.22.0" + checksum: 2ef7eda6d2161038fda0c9aa4c9e18cc7a0baa89ea6be975d449527c2eefd4b608425db88508e2859acc472f46f402079274b24bd75e3fb506f28c5dba203129 + languageName: node + linkType: hard + "domelementtype@npm:^2.0.1, domelementtype@npm:^2.2.0": version: 2.3.0 resolution: "domelementtype@npm:2.3.0" @@ -3866,10 +4715,25 @@ __metadata: languageName: node linkType: hard -"electron-to-chromium@npm:^1.5.4": - version: 1.5.13 - resolution: "electron-to-chromium@npm:1.5.13" - checksum: 1d88ac39447e1d718c4296f92fe89836df4688daf2d362d6c49108136795f05a56dd9c950f1c6715e0395fa037c3b5f5ea686c543fdc90e6d74a005877c45022 +"electron-to-chromium@npm:^1.5.41": + version: 1.5.64 + resolution: "electron-to-chromium@npm:1.5.64" + checksum: 331c2160cc37ef85317b44f2078af8ff16f068fc95d4af2210fe943b567f20b1445a7faa40c05d290bc229102ef1b662371464ba2725d10ff6c8543af6d40adf + languageName: node + linkType: hard + +"elliptic@npm:^6.5.3, elliptic@npm:^6.5.5": + version: 6.6.1 + resolution: "elliptic@npm:6.6.1" + dependencies: + bn.js: "npm:^4.11.9" + brorand: "npm:^1.1.0" + hash.js: "npm:^1.0.0" + hmac-drbg: "npm:^1.0.1" + inherits: "npm:^2.0.4" + minimalistic-assert: "npm:^1.0.1" + minimalistic-crypto-utils: "npm:^1.0.1" + checksum: 8b24ef782eec8b472053793ea1e91ae6bee41afffdfcb78a81c0a53b191e715cbe1292aa07165958a9bbe675bd0955142560b1a007ffce7d6c765bcaf951a867 languageName: node linkType: hard @@ -3908,6 +4772,13 @@ __metadata: languageName: node linkType: hard +"encodeurl@npm:~2.0.0": + version: 2.0.0 + resolution: "encodeurl@npm:2.0.0" + checksum: 5d317306acb13e6590e28e27924c754163946a2480de11865c991a3a7eed4315cd3fba378b543ca145829569eefe9b899f3d84bb09870f675ae60bc924b01ceb + languageName: node + linkType: hard + "encoding@npm:^0.1.13": version: 0.1.13 resolution: "encoding@npm:0.1.13" @@ -3960,11 +4831,11 @@ __metadata: linkType: hard "envinfo@npm:^7.7.3": - version: 7.13.0 - resolution: "envinfo@npm:7.13.0" + version: 7.14.0 + resolution: "envinfo@npm:7.14.0" bin: envinfo: dist/cli.js - checksum: 9c279213cbbb353b3171e8e333fd2ed564054abade08ab3d735fe136e10a0e14e0588e1ce77e6f01285f2462eaca945d64f0778be5ae3d9e82804943e36a4411 + checksum: 059a031eee101e056bd9cc5cbfe25c2fab433fe1780e86cf0a82d24a000c6931e327da6a8ffb3dce528a24f83f256e7efc0b36813113eff8fdc6839018efe327 languageName: node linkType: hard @@ -3984,9 +4855,9 @@ __metadata: languageName: node linkType: hard -"es-abstract@npm:^1.22.1, es-abstract@npm:^1.22.3, es-abstract@npm:^1.23.0, es-abstract@npm:^1.23.2": - version: 1.23.3 - resolution: "es-abstract@npm:1.23.3" +"es-abstract@npm:^1.22.1, es-abstract@npm:^1.22.3, es-abstract@npm:^1.23.0, es-abstract@npm:^1.23.1, es-abstract@npm:^1.23.2": + version: 1.23.5 + resolution: "es-abstract@npm:1.23.5" dependencies: array-buffer-byte-length: "npm:^1.0.1" arraybuffer.prototype.slice: "npm:^1.0.3" @@ -4003,7 +4874,7 @@ __metadata: function.prototype.name: "npm:^1.1.6" get-intrinsic: "npm:^1.2.4" get-symbol-description: "npm:^1.0.2" - globalthis: "npm:^1.0.3" + globalthis: "npm:^1.0.4" gopd: "npm:^1.0.1" has-property-descriptors: "npm:^1.0.2" has-proto: "npm:^1.0.3" @@ -4019,10 +4890,10 @@ __metadata: is-string: "npm:^1.0.7" is-typed-array: "npm:^1.1.13" is-weakref: "npm:^1.0.2" - object-inspect: "npm:^1.13.1" + object-inspect: "npm:^1.13.3" object-keys: "npm:^1.1.1" object.assign: "npm:^4.1.5" - regexp.prototype.flags: "npm:^1.5.2" + regexp.prototype.flags: "npm:^1.5.3" safe-array-concat: "npm:^1.1.2" safe-regex-test: "npm:^1.0.3" string.prototype.trim: "npm:^1.2.9" @@ -4034,7 +4905,7 @@ __metadata: typed-array-length: "npm:^1.0.6" unbox-primitive: "npm:^1.0.2" which-typed-array: "npm:^1.1.15" - checksum: d27e9afafb225c6924bee9971a7f25f20c314f2d6cb93a63cada4ac11dcf42040896a6c22e5fb8f2a10767055ed4ddf400be3b1eb12297d281726de470b75666 + checksum: 1f6f91da9cf7ee2c81652d57d3046621d598654d1d1b05c1578bafe5c4c2d3d69513901679bdca2de589f620666ec21de337e4935cec108a4ed0871d5ef04a5d languageName: node linkType: hard @@ -4054,7 +4925,7 @@ __metadata: languageName: node linkType: hard -"es-module-lexer@npm:^1.2.1": +"es-module-lexer@npm:^1.2.1, es-module-lexer@npm:^1.5.4": version: 1.5.4 resolution: "es-module-lexer@npm:1.5.4" checksum: 300a469488c2f22081df1e4c8398c78db92358496e639b0df7f89ac6455462aaf5d8893939087c1a1cbcbf20eed4610c70e0bcb8f3e4b0d80a5d2611c539408c @@ -4181,7 +5052,7 @@ __metadata: languageName: node linkType: hard -"escalade@npm:^3.1.1, escalade@npm:^3.1.2": +"escalade@npm:^3.1.1, escalade@npm:^3.2.0": version: 3.2.0 resolution: "escalade@npm:3.2.0" checksum: ced4dd3a78e15897ed3be74e635110bbf3b08877b0a41be50dcb325ee0e0b5f65fc2d50e9845194d7c4633f327e2e1c6cce00a71b617c5673df0374201d67f65 @@ -4195,13 +5066,6 @@ __metadata: languageName: node linkType: hard -"escape-string-regexp@npm:^1.0.5": - version: 1.0.5 - resolution: "escape-string-regexp@npm:1.0.5" - checksum: a968ad453dd0c2724e14a4f20e177aaf32bb384ab41b674a8454afe9a41c5e6fe8903323e0a1052f56289d04bd600f81278edf140b0fcc02f5cac98d0f5b5371 - languageName: node - linkType: hard - "escape-string-regexp@npm:^2.0.0": version: 2.0.0 resolution: "escape-string-regexp@npm:2.0.0" @@ -4263,21 +5127,21 @@ __metadata: languageName: node linkType: hard -"eslint-module-utils@npm:^2.8.1, eslint-module-utils@npm:^2.9.0": - version: 2.9.0 - resolution: "eslint-module-utils@npm:2.9.0" +"eslint-module-utils@npm:^2.12.0, eslint-module-utils@npm:^2.8.1": + version: 2.12.0 + resolution: "eslint-module-utils@npm:2.12.0" dependencies: debug: "npm:^3.2.7" peerDependenciesMeta: eslint: optional: true - checksum: 7c45c5b54402a969e99315890c10e9bf8c8bee16c7890573343af05dfa04566d61546585678c413e5228af0550e39461be47e35a8ff0d1863e113bdbb28d1d29 + checksum: 4d8b46dcd525d71276f9be9ffac1d2be61c9d54cc53c992e6333cf957840dee09381842b1acbbb15fc6b255ebab99cd481c5007ab438e5455a14abe1a0468558 languageName: node linkType: hard "eslint-plugin-import@npm:^2.27.5": - version: 2.30.0 - resolution: "eslint-plugin-import@npm:2.30.0" + version: 2.31.0 + resolution: "eslint-plugin-import@npm:2.31.0" dependencies: "@rtsao/scc": "npm:^1.1.0" array-includes: "npm:^3.1.8" @@ -4287,7 +5151,7 @@ __metadata: debug: "npm:^3.2.7" doctrine: "npm:^2.1.0" eslint-import-resolver-node: "npm:^0.3.9" - eslint-module-utils: "npm:^2.9.0" + eslint-module-utils: "npm:^2.12.0" hasown: "npm:^2.0.2" is-core-module: "npm:^2.15.1" is-glob: "npm:^4.0.3" @@ -4296,10 +5160,11 @@ __metadata: object.groupby: "npm:^1.0.3" object.values: "npm:^1.2.0" semver: "npm:^6.3.1" + string.prototype.trimend: "npm:^1.0.8" tsconfig-paths: "npm:^3.15.0" peerDependencies: - eslint: ^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 - checksum: 4c9dcb1f27505c4d5dd891d2b551f56c70786d136aa3992a77e785bdc67c9f60200a2c7fb0ce55b7647fe550b12bc433d5dfa59e2c00ab44227791c5ab86badf + eslint: ^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 || ^9 + checksum: e21d116ddd1900e091ad120b3eb68c5dd5437fe2c930f1211781cd38b246f090a6b74d5f3800b8255a0ed29782591521ad44eb21c5534960a8f1fb4040fd913a languageName: node linkType: hard @@ -4332,12 +5197,21 @@ __metadata: languageName: node linkType: hard -"eslint-plugin-react-refresh@npm:^0.4.3": - version: 0.4.11 - resolution: "eslint-plugin-react-refresh@npm:0.4.11" +"eslint-plugin-react-hooks@npm:^5.0.0": + version: 5.0.0 + resolution: "eslint-plugin-react-hooks@npm:5.0.0" + peerDependencies: + eslint: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 || ^9.0.0 + checksum: bcb74b421f32e4203a7100405b57aab85526be4461e5a1da01bc537969a30012d2ee209a2c2a6cac543833a27188ce1e6ad71e4628d0bb4a2e5365cad86c5002 + languageName: node + linkType: hard + +"eslint-plugin-react-refresh@npm:^0.4.14, eslint-plugin-react-refresh@npm:^0.4.3": + version: 0.4.14 + resolution: "eslint-plugin-react-refresh@npm:0.4.14" peerDependencies: eslint: ">=7" - checksum: 0c7d4ce30a70fbd6460ea9ca45b029b1cc806fd922d308ad332315d0e1725a37a578283809913bf7a7c84c613e3313e891dde7692a8e6ef2979dbff7edf45901 + checksum: 427108008ffcc2e0be36897398e61a2fae54c5bf092af0171bc4cf1927080d40619bb07be02ecd7c515372210228cf849023997cfa0252d37115f9b0c0debcd2 languageName: node linkType: hard @@ -4361,58 +5235,135 @@ __metadata: languageName: node linkType: hard -"eslint-visitor-keys@npm:^3.3.0, eslint-visitor-keys@npm:^3.4.1, eslint-visitor-keys@npm:^3.4.3": +"eslint-scope@npm:^8.2.0": + version: 8.2.0 + resolution: "eslint-scope@npm:8.2.0" + dependencies: + esrecurse: "npm:^4.3.0" + estraverse: "npm:^5.2.0" + checksum: 8d2d58e2136d548ac7e0099b1a90d9fab56f990d86eb518de1247a7066d38c908be2f3df477a79cf60d70b30ba18735d6c6e70e9914dca2ee515a729975d70d6 + languageName: node + linkType: hard + +"eslint-visitor-keys@npm:^3.4.1, eslint-visitor-keys@npm:^3.4.3": version: 3.4.3 resolution: "eslint-visitor-keys@npm:3.4.3" checksum: 92708e882c0a5ffd88c23c0b404ac1628cf20104a108c745f240a13c332a11aac54f49a22d5762efbffc18ecbc9a580d1b7ad034bf5f3cc3307e5cbff2ec9820 languageName: node linkType: hard -"eslint@npm:^8.21.0": - version: 8.57.0 - resolution: "eslint@npm:8.57.0" +"eslint-visitor-keys@npm:^4.2.0": + version: 4.2.0 + resolution: "eslint-visitor-keys@npm:4.2.0" + checksum: 2ed81c663b147ca6f578312919483eb040295bbab759e5a371953456c636c5b49a559883e2677112453728d66293c0a4c90ab11cab3428cf02a0236d2e738269 + languageName: node + linkType: hard + +"eslint@npm:^8.21.0": + version: 8.57.1 + resolution: "eslint@npm:8.57.1" + dependencies: + "@eslint-community/eslint-utils": "npm:^4.2.0" + "@eslint-community/regexpp": "npm:^4.6.1" + "@eslint/eslintrc": "npm:^2.1.4" + "@eslint/js": "npm:8.57.1" + "@humanwhocodes/config-array": "npm:^0.13.0" + "@humanwhocodes/module-importer": "npm:^1.0.1" + "@nodelib/fs.walk": "npm:^1.2.8" + "@ungap/structured-clone": "npm:^1.2.0" + ajv: "npm:^6.12.4" + chalk: "npm:^4.0.0" + cross-spawn: "npm:^7.0.2" + debug: "npm:^4.3.2" + doctrine: "npm:^3.0.0" + escape-string-regexp: "npm:^4.0.0" + eslint-scope: "npm:^7.2.2" + eslint-visitor-keys: "npm:^3.4.3" + espree: "npm:^9.6.1" + esquery: "npm:^1.4.2" + esutils: "npm:^2.0.2" + fast-deep-equal: "npm:^3.1.3" + file-entry-cache: "npm:^6.0.1" + find-up: "npm:^5.0.0" + glob-parent: "npm:^6.0.2" + globals: "npm:^13.19.0" + graphemer: "npm:^1.4.0" + ignore: "npm:^5.2.0" + imurmurhash: "npm:^0.1.4" + is-glob: "npm:^4.0.0" + is-path-inside: "npm:^3.0.3" + js-yaml: "npm:^4.1.0" + json-stable-stringify-without-jsonify: "npm:^1.0.1" + levn: "npm:^0.4.1" + lodash.merge: "npm:^4.6.2" + minimatch: "npm:^3.1.2" + natural-compare: "npm:^1.4.0" + optionator: "npm:^0.9.3" + strip-ansi: "npm:^6.0.1" + text-table: "npm:^0.2.0" + bin: + eslint: bin/eslint.js + checksum: 1fd31533086c1b72f86770a4d9d7058ee8b4643fd1cfd10c7aac1ecb8725698e88352a87805cf4b2ce890aa35947df4b4da9655fb7fdfa60dbb448a43f6ebcf1 + languageName: node + linkType: hard + +"eslint@npm:^9.13.0": + version: 9.15.0 + resolution: "eslint@npm:9.15.0" dependencies: "@eslint-community/eslint-utils": "npm:^4.2.0" - "@eslint-community/regexpp": "npm:^4.6.1" - "@eslint/eslintrc": "npm:^2.1.4" - "@eslint/js": "npm:8.57.0" - "@humanwhocodes/config-array": "npm:^0.11.14" + "@eslint-community/regexpp": "npm:^4.12.1" + "@eslint/config-array": "npm:^0.19.0" + "@eslint/core": "npm:^0.9.0" + "@eslint/eslintrc": "npm:^3.2.0" + "@eslint/js": "npm:9.15.0" + "@eslint/plugin-kit": "npm:^0.2.3" + "@humanfs/node": "npm:^0.16.6" "@humanwhocodes/module-importer": "npm:^1.0.1" - "@nodelib/fs.walk": "npm:^1.2.8" - "@ungap/structured-clone": "npm:^1.2.0" + "@humanwhocodes/retry": "npm:^0.4.1" + "@types/estree": "npm:^1.0.6" + "@types/json-schema": "npm:^7.0.15" ajv: "npm:^6.12.4" chalk: "npm:^4.0.0" - cross-spawn: "npm:^7.0.2" + cross-spawn: "npm:^7.0.5" debug: "npm:^4.3.2" - doctrine: "npm:^3.0.0" escape-string-regexp: "npm:^4.0.0" - eslint-scope: "npm:^7.2.2" - eslint-visitor-keys: "npm:^3.4.3" - espree: "npm:^9.6.1" - esquery: "npm:^1.4.2" + eslint-scope: "npm:^8.2.0" + eslint-visitor-keys: "npm:^4.2.0" + espree: "npm:^10.3.0" + esquery: "npm:^1.5.0" esutils: "npm:^2.0.2" fast-deep-equal: "npm:^3.1.3" - file-entry-cache: "npm:^6.0.1" + file-entry-cache: "npm:^8.0.0" find-up: "npm:^5.0.0" glob-parent: "npm:^6.0.2" - globals: "npm:^13.19.0" - graphemer: "npm:^1.4.0" ignore: "npm:^5.2.0" imurmurhash: "npm:^0.1.4" is-glob: "npm:^4.0.0" - is-path-inside: "npm:^3.0.3" - js-yaml: "npm:^4.1.0" json-stable-stringify-without-jsonify: "npm:^1.0.1" - levn: "npm:^0.4.1" lodash.merge: "npm:^4.6.2" minimatch: "npm:^3.1.2" natural-compare: "npm:^1.4.0" optionator: "npm:^0.9.3" - strip-ansi: "npm:^6.0.1" - text-table: "npm:^0.2.0" + peerDependencies: + jiti: "*" + peerDependenciesMeta: + jiti: + optional: true bin: eslint: bin/eslint.js - checksum: 00bb96fd2471039a312435a6776fe1fd557c056755eaa2b96093ef3a8508c92c8775d5f754768be6b1dddd09fdd3379ddb231eeb9b6c579ee17ea7d68000a529 + checksum: d0d7606f36bfcccb1c3703d0a24df32067b207a616f17efe5fb1765a91d13f085afffc4fc97ecde4ab9c9f4edd64d9b4ce750e13ff7937a25074b24bee15b20f + languageName: node + linkType: hard + +"espree@npm:^10.0.1, espree@npm:^10.3.0": + version: 10.3.0 + resolution: "espree@npm:10.3.0" + dependencies: + acorn: "npm:^8.14.0" + acorn-jsx: "npm:^5.3.2" + eslint-visitor-keys: "npm:^4.2.0" + checksum: 272beeaca70d0a1a047d61baff64db04664a33d7cfb5d144f84bc8a5c6194c6c8ebe9cc594093ca53add88baa23e59b01e69e8a0160ab32eac570482e165c462 languageName: node linkType: hard @@ -4437,7 +5388,7 @@ __metadata: languageName: node linkType: hard -"esquery@npm:^1.4.2": +"esquery@npm:^1.4.2, esquery@npm:^1.5.0": version: 1.6.0 resolution: "esquery@npm:1.6.0" dependencies: @@ -4469,6 +5420,13 @@ __metadata: languageName: node linkType: hard +"estree-walker@npm:^2.0.2": + version: 2.0.2 + resolution: "estree-walker@npm:2.0.2" + checksum: 53a6c54e2019b8c914dc395890153ffdc2322781acf4bd7d1a32d7aedc1710807bdcd866ac133903d5629ec601fbb50abe8c2e5553c7f5a0afdd9b6af6c945af + languageName: node + linkType: hard + "estree-walker@npm:^3.0.3": version: 3.0.3 resolution: "estree-walker@npm:3.0.3" @@ -4506,13 +5464,24 @@ __metadata: languageName: node linkType: hard -"events@npm:^3.2.0, events@npm:^3.3.0": +"events@npm:^3.0.0, events@npm:^3.2.0, events@npm:^3.3.0": version: 3.3.0 resolution: "events@npm:3.3.0" checksum: d6b6f2adbccbcda74ddbab52ed07db727ef52e31a61ed26db9feb7dc62af7fc8e060defa65e5f8af9449b86b52cc1a1f6a79f2eafcf4e62add2b7a1fa4a432f6 languageName: node linkType: hard +"evp_bytestokey@npm:^1.0.0, evp_bytestokey@npm:^1.0.3": + version: 1.0.3 + resolution: "evp_bytestokey@npm:1.0.3" + dependencies: + md5.js: "npm:^1.3.4" + node-gyp: "npm:latest" + safe-buffer: "npm:^5.1.1" + checksum: 77fbe2d94a902a80e9b8f5a73dcd695d9c14899c5e82967a61b1fc6cbbb28c46552d9b127cff47c45fcf684748bdbcfa0a50410349109de87ceb4b199ef6ee99 + languageName: node + linkType: hard + "execa@npm:^5.0.0, execa@npm:^5.1.1": version: 5.1.1 resolution: "execa@npm:5.1.1" @@ -4530,23 +5499,6 @@ __metadata: languageName: node linkType: hard -"execa@npm:^8.0.1": - version: 8.0.1 - resolution: "execa@npm:8.0.1" - dependencies: - cross-spawn: "npm:^7.0.3" - get-stream: "npm:^8.0.1" - human-signals: "npm:^5.0.0" - is-stream: "npm:^3.0.0" - merge-stream: "npm:^2.0.0" - npm-run-path: "npm:^5.1.0" - onetime: "npm:^6.0.0" - signal-exit: "npm:^4.1.0" - strip-final-newline: "npm:^3.0.0" - checksum: 2c52d8775f5bf103ce8eec9c7ab3059909ba350a5164744e9947ed14a53f51687c040a250bda833f906d1283aa8803975b84e6c8f7a7c42f99dc8ef80250d1af - languageName: node - linkType: hard - "exit@npm:^0.1.2": version: 0.1.2 resolution: "exit@npm:0.1.2" @@ -4554,6 +5506,13 @@ __metadata: languageName: node linkType: hard +"expect-type@npm:^1.1.0": + version: 1.1.0 + resolution: "expect-type@npm:1.1.0" + checksum: 5af0febbe8fe18da05a6d51e3677adafd75213512285408156b368ca471252565d5ca6e59e4bddab25121f3cfcbbebc6a5489f8cc9db131cc29e69dcdcc7ae15 + languageName: node + linkType: hard + "expect@npm:^29.0.0, expect@npm:^29.7.0": version: 29.7.0 resolution: "expect@npm:29.7.0" @@ -4575,41 +5534,41 @@ __metadata: linkType: hard "express@npm:^4.17.3": - version: 4.19.2 - resolution: "express@npm:4.19.2" + version: 4.21.1 + resolution: "express@npm:4.21.1" dependencies: accepts: "npm:~1.3.8" array-flatten: "npm:1.1.1" - body-parser: "npm:1.20.2" + body-parser: "npm:1.20.3" content-disposition: "npm:0.5.4" content-type: "npm:~1.0.4" - cookie: "npm:0.6.0" + cookie: "npm:0.7.1" cookie-signature: "npm:1.0.6" debug: "npm:2.6.9" depd: "npm:2.0.0" - encodeurl: "npm:~1.0.2" + encodeurl: "npm:~2.0.0" escape-html: "npm:~1.0.3" etag: "npm:~1.8.1" - finalhandler: "npm:1.2.0" + finalhandler: "npm:1.3.1" fresh: "npm:0.5.2" http-errors: "npm:2.0.0" - merge-descriptors: "npm:1.0.1" + merge-descriptors: "npm:1.0.3" methods: "npm:~1.1.2" on-finished: "npm:2.4.1" parseurl: "npm:~1.3.3" - path-to-regexp: "npm:0.1.7" + path-to-regexp: "npm:0.1.10" proxy-addr: "npm:~2.0.7" - qs: "npm:6.11.0" + qs: "npm:6.13.0" range-parser: "npm:~1.2.1" safe-buffer: "npm:5.2.1" - send: "npm:0.18.0" - serve-static: "npm:1.15.0" + send: "npm:0.19.0" + serve-static: "npm:1.16.2" setprototypeof: "npm:1.2.0" statuses: "npm:2.0.1" type-is: "npm:~1.6.18" utils-merge: "npm:1.0.1" vary: "npm:~1.1.2" - checksum: e82e2662ea9971c1407aea9fc3c16d6b963e55e3830cd0ef5e00b533feda8b770af4e3be630488ef8a752d7c75c4fcefb15892868eeaafe7353cb9e3e269fdcb + checksum: 0c287867e5f6129d3def1edd9b63103a53c40d4dc8628839d4b6827e35eb8f0de5a4656f9d85f4457eba584f9871ebb2ad26c750b36bd75d9bbb8bcebdc4892c languageName: node linkType: hard @@ -4676,18 +5635,9 @@ __metadata: linkType: hard "fast-uri@npm:^3.0.1": - version: 3.0.1 - resolution: "fast-uri@npm:3.0.1" - checksum: 3cd46d6006083b14ca61ffe9a05b8eef75ef87e9574b6f68f2e17ecf4daa7aaadeff44e3f0f7a0ef4e0f7e7c20fc07beec49ff14dc72d0b500f00386592f2d10 - languageName: node - linkType: hard - -"fast-url-parser@npm:1.1.3": - version: 1.1.3 - resolution: "fast-url-parser@npm:1.1.3" - dependencies: - punycode: "npm:^1.3.2" - checksum: d85c5c409cf0215417380f98a2d29c23a95004d93ff0d8bdf1af5f1a9d1fc608ac89ac6ffe863783d2c73efb3850dd35390feb1de3296f49877bfee0392eb5d3 + version: 3.0.3 + resolution: "fast-uri@npm:3.0.3" + checksum: 4b2c5ce681a062425eae4f15cdc8fc151fd310b2f69b1f96680677820a8b49c3cd6e80661a406e19d50f0c40a3f8bffdd458791baf66f4a879d80be28e10a320 languageName: node linkType: hard @@ -4734,6 +5684,15 @@ __metadata: languageName: node linkType: hard +"file-entry-cache@npm:^8.0.0": + version: 8.0.0 + resolution: "file-entry-cache@npm:8.0.0" + dependencies: + flat-cache: "npm:^4.0.0" + checksum: 9e2b5938b1cd9b6d7e3612bdc533afd4ac17b2fc646569e9a8abbf2eb48e5eb8e316bc38815a3ef6a1b456f4107f0d0f055a614ca613e75db6bf9ff4d72c1638 + languageName: node + linkType: hard + "filelist@npm:^1.0.4": version: 1.0.4 resolution: "filelist@npm:1.0.4" @@ -4752,18 +5711,18 @@ __metadata: languageName: node linkType: hard -"finalhandler@npm:1.2.0": - version: 1.2.0 - resolution: "finalhandler@npm:1.2.0" +"finalhandler@npm:1.3.1": + version: 1.3.1 + resolution: "finalhandler@npm:1.3.1" dependencies: debug: "npm:2.6.9" - encodeurl: "npm:~1.0.2" + encodeurl: "npm:~2.0.0" escape-html: "npm:~1.0.3" on-finished: "npm:2.4.1" parseurl: "npm:~1.3.3" statuses: "npm:2.0.1" unpipe: "npm:~1.0.0" - checksum: 64b7e5ff2ad1fcb14931cd012651631b721ce657da24aedb5650ddde9378bf8e95daa451da43398123f5de161a81e79ff5affe4f9f2a6d2df4a813d6d3e254b7 + checksum: d38035831865a49b5610206a3a9a9aae4e8523cbbcd01175d0480ffbf1278c47f11d89be3ca7f617ae6d94f29cf797546a4619cd84dd109009ef33f12f69019f languageName: node linkType: hard @@ -4798,6 +5757,16 @@ __metadata: languageName: node linkType: hard +"flat-cache@npm:^4.0.0": + version: 4.0.1 + resolution: "flat-cache@npm:4.0.1" + dependencies: + flatted: "npm:^3.2.9" + keyv: "npm:^4.5.4" + checksum: 2c59d93e9faa2523e4fda6b4ada749bed432cfa28c8e251f33b25795e426a1c6dbada777afb1f74fcfff33934fdbdea921ee738fcc33e71adc9d6eca984a1cfc + languageName: node + linkType: hard + "flat@npm:^5.0.2": version: 5.0.2 resolution: "flat@npm:5.0.2" @@ -4808,19 +5777,19 @@ __metadata: linkType: hard "flatted@npm:^3.2.9": - version: 3.3.1 - resolution: "flatted@npm:3.3.1" - checksum: 324166b125ee07d4ca9bcf3a5f98d915d5db4f39d711fba640a3178b959919aae1f7cfd8aabcfef5826ed8aa8a2aa14cc85b2d7d18ff638ddf4ae3df39573eaf + version: 3.3.2 + resolution: "flatted@npm:3.3.2" + checksum: 24cc735e74d593b6c767fe04f2ef369abe15b62f6906158079b9874bdb3ee5ae7110bb75042e70cd3f99d409d766f357caf78d5ecee9780206f5fdc5edbad334 languageName: node linkType: hard "follow-redirects@npm:^1.0.0, follow-redirects@npm:^1.15.6": - version: 1.15.6 - resolution: "follow-redirects@npm:1.15.6" + version: 1.15.9 + resolution: "follow-redirects@npm:1.15.9" peerDependenciesMeta: debug: optional: true - checksum: 9ff767f0d7be6aa6870c82ac79cf0368cd73e01bbc00e9eb1c2a16fbb198ec105e3c9b6628bb98e9f3ac66fe29a957b9645bcb9a490bb7aa0d35f908b6b85071 + checksum: 5829165bd112c3c0e82be6c15b1a58fa9dcfaede3b3c54697a82fe4a62dd5ae5e8222956b448d2f98e331525f05d00404aba7d696de9e761ef6e42fdc780244f languageName: node linkType: hard @@ -4844,13 +5813,13 @@ __metadata: linkType: hard "form-data@npm:^4.0.0": - version: 4.0.0 - resolution: "form-data@npm:4.0.0" + version: 4.0.1 + resolution: "form-data@npm:4.0.1" dependencies: asynckit: "npm:^0.4.0" combined-stream: "npm:^1.0.8" mime-types: "npm:^2.1.12" - checksum: cb6f3ac49180be03ff07ba3ff125f9eba2ff0b277fb33c7fc47569fc5e616882c5b1c69b9904c4c4187e97dd0419dd03b134174756f296dec62041e6527e2c6e + checksum: bb102d570be8592c23f4ea72d7df9daa50c7792eb0cf1c5d7e506c1706e7426a4e4ae48a35b109e91c85f1c0ec63774a21ae252b66f4eb981cb8efef7d0463c8 languageName: node linkType: hard @@ -5022,16 +5991,9 @@ __metadata: linkType: hard "get-east-asian-width@npm:^1.0.0": - version: 1.2.0 - resolution: "get-east-asian-width@npm:1.2.0" - checksum: 914b1e217cf38436c24b4c60b4c45289e39a45bf9e65ef9fd343c2815a1a02b8a0215aeec8bf9c07c516089004b6e3826332481f40a09529fcadbf6e579f286b - languageName: node - linkType: hard - -"get-func-name@npm:^2.0.1": - version: 2.0.2 - resolution: "get-func-name@npm:2.0.2" - checksum: 89830fd07623fa73429a711b9daecdb304386d237c71268007f788f113505ef1d4cc2d0b9680e072c5082490aec9df5d7758bf5ac6f1c37062855e8e3dc0b9df + version: 1.3.0 + resolution: "get-east-asian-width@npm:1.3.0" + checksum: 1a049ba697e0f9a4d5514c4623781c5246982bdb61082da6b5ae6c33d838e52ce6726407df285cdbb27ec1908b333cf2820989bd3e986e37bb20979437fdf34b languageName: node linkType: hard @@ -5062,13 +6024,6 @@ __metadata: languageName: node linkType: hard -"get-stream@npm:^8.0.1": - version: 8.0.1 - resolution: "get-stream@npm:8.0.1" - checksum: 5c2181e98202b9dae0bb4a849979291043e5892eb40312b47f0c22b9414fc9b28a3b6063d2375705eb24abc41ecf97894d9a51f64ff021511b504477b27b4290 - languageName: node - linkType: hard - "get-symbol-description@npm:^1.0.2": version: 1.0.2 resolution: "get-symbol-description@npm:1.0.2" @@ -5081,11 +6036,11 @@ __metadata: linkType: hard "get-tsconfig@npm:^4.7.5": - version: 4.8.0 - resolution: "get-tsconfig@npm:4.8.0" + version: 4.8.1 + resolution: "get-tsconfig@npm:4.8.1" dependencies: resolve-pkg-maps: "npm:^1.0.0" - checksum: 943721c996d9a77351aa7c07956de77baece97f997bd30f3247f46907e4b743f7b9da02c7b3692a36f0884d3724271faeb88ed1c3aca3aba2afe3f27d6c4aeb3 + checksum: 536ee85d202f604f4b5fb6be81bcd6e6d9a96846811e83e9acc6de4a04fb49506edea0e1b8cf1d5ee7af33e469916ec2809d4c5445ab8ae015a7a51fbd1572f9 languageName: node linkType: hard @@ -5160,7 +6115,21 @@ __metadata: languageName: node linkType: hard -"globalthis@npm:^1.0.3": +"globals@npm:^14.0.0": + version: 14.0.0 + resolution: "globals@npm:14.0.0" + checksum: b96ff42620c9231ad468d4c58ff42afee7777ee1c963013ff8aabe095a451d0ceeb8dcd8ef4cbd64d2538cef45f787a78ba3a9574f4a634438963e334471302d + languageName: node + linkType: hard + +"globals@npm:^15.11.0": + version: 15.12.0 + resolution: "globals@npm:15.12.0" + checksum: f34e0a1845b694f45188331742af9f488b07ba7440a06e9d2039fce0386fbbfc24afdbb9846ebdccd4092d03644e43081c49eb27b30f4b88e43af156e1c1dc34 + languageName: node + linkType: hard + +"globalthis@npm:^1.0.3, globalthis@npm:^1.0.4": version: 1.0.4 resolution: "globalthis@npm:1.0.4" dependencies: @@ -5248,13 +6217,6 @@ __metadata: languageName: node linkType: hard -"has-flag@npm:^3.0.0": - version: 3.0.0 - resolution: "has-flag@npm:3.0.0" - checksum: 1c6c83b14b8b1b3c25b0727b8ba3e3b647f99e9e6e13eb7322107261de07a4c1be56fc0d45678fc376e09772a3a1642ccdaf8fc69bdf123b6c086598397ce473 - languageName: node - linkType: hard - "has-flag@npm:^4.0.0": version: 4.0.0 resolution: "has-flag@npm:4.0.0" @@ -5294,6 +6256,37 @@ __metadata: languageName: node linkType: hard +"hash-base@npm:^3.0.0": + version: 3.1.0 + resolution: "hash-base@npm:3.1.0" + dependencies: + inherits: "npm:^2.0.4" + readable-stream: "npm:^3.6.0" + safe-buffer: "npm:^5.2.0" + checksum: 663eabcf4173326fbb65a1918a509045590a26cc7e0964b754eef248d281305c6ec9f6b31cb508d02ffca383ab50028180ce5aefe013e942b44a903ac8dc80d0 + languageName: node + linkType: hard + +"hash-base@npm:~3.0, hash-base@npm:~3.0.4": + version: 3.0.4 + resolution: "hash-base@npm:3.0.4" + dependencies: + inherits: "npm:^2.0.1" + safe-buffer: "npm:^5.0.1" + checksum: a13357dccb3827f0bb0b56bf928da85c428dc8670f6e4a1c7265e4f1653ce02d69030b40fd01b0f1d218a995a066eea279cded9cec72d207b593bcdfe309c2f0 + languageName: node + linkType: hard + +"hash.js@npm:^1.0.0, hash.js@npm:^1.0.3": + version: 1.1.7 + resolution: "hash.js@npm:1.1.7" + dependencies: + inherits: "npm:^2.0.3" + minimalistic-assert: "npm:^1.0.1" + checksum: 41ada59494eac5332cfc1ce6b7ebdd7b88a3864a6d6b08a3ea8ef261332ed60f37f10877e0c825aaa4bddebf164fbffa618286aeeec5296675e2671cbfa746c4 + languageName: node + linkType: hard + "hasown@npm:^2.0.0, hasown@npm:^2.0.1, hasown@npm:^2.0.2": version: 2.0.2 resolution: "hasown@npm:2.0.2" @@ -5319,6 +6312,17 @@ __metadata: languageName: node linkType: hard +"hmac-drbg@npm:^1.0.1": + version: 1.0.1 + resolution: "hmac-drbg@npm:1.0.1" + dependencies: + hash.js: "npm:^1.0.3" + minimalistic-assert: "npm:^1.0.0" + minimalistic-crypto-utils: "npm:^1.0.1" + checksum: f3d9ba31b40257a573f162176ac5930109816036c59a09f901eb2ffd7e5e705c6832bedfff507957125f2086a0ab8f853c0df225642a88bf1fcaea945f20600d + languageName: node + linkType: hard + "hoist-non-react-statics@npm:^3.3.0": version: 3.3.2 resolution: "hoist-non-react-statics@npm:3.3.2" @@ -5372,8 +6376,8 @@ __metadata: linkType: hard "html-webpack-plugin@npm:^5.6.0": - version: 5.6.0 - resolution: "html-webpack-plugin@npm:5.6.0" + version: 5.6.3 + resolution: "html-webpack-plugin@npm:5.6.3" dependencies: "@types/html-minifier-terser": "npm:^6.0.0" html-minifier-terser: "npm:^6.0.2" @@ -5388,7 +6392,7 @@ __metadata: optional: true webpack: optional: true - checksum: 50d1a0f90d512463ea8d798985d91a7ccc9d5e461713dedb240125b2ff0671f58135dd9355f7969af341ff4725e73b2defbc0984cfdce930887a48506d970002 + checksum: 25a21f83a8823d3711396dd8050bc0080c0ae55537352d432903eff58a7d9838fc811e3c26462419036190720357e67c7977efd106fb9a252770632824f0cc25 languageName: node linkType: hard @@ -5461,8 +6465,8 @@ __metadata: linkType: hard "http-proxy-middleware@npm:^2.0.3": - version: 2.0.6 - resolution: "http-proxy-middleware@npm:2.0.6" + version: 2.0.7 + resolution: "http-proxy-middleware@npm:2.0.7" dependencies: "@types/http-proxy": "npm:^1.17.8" http-proxy: "npm:^1.18.1" @@ -5474,7 +6478,7 @@ __metadata: peerDependenciesMeta: "@types/express": optional: true - checksum: 25a0e550dd1900ee5048a692e0e9b2b6339d06d487a705d90c47e359e9c6561d648cd7862d001d090e651c9efffa1b6e5160fcf1f299b5fa4935f76e9754eb11 + checksum: 8d00a61eb215b83826460b07489d8bb095368ec16e02a9d63e228dcf7524e7c20d61561e5476de1391aecd4ec32ea093279cdc972115b311f8e0a95a24c9e47e languageName: node linkType: hard @@ -5489,6 +6493,13 @@ __metadata: languageName: node linkType: hard +"https-browserify@npm:^1.0.0": + version: 1.0.0 + resolution: "https-browserify@npm:1.0.0" + checksum: e17b6943bc24ea9b9a7da5714645d808670af75a425f29baffc3284962626efdc1eb3aa9bbffaa6e64028a6ad98af5b09fabcb454a8f918fb686abfdc9e9b8ae + languageName: node + linkType: hard + "https-proxy-agent@npm:5.0.0": version: 5.0.0 resolution: "https-proxy-agent@npm:5.0.0" @@ -5516,10 +6527,10 @@ __metadata: languageName: node linkType: hard -"human-signals@npm:^5.0.0": - version: 5.0.0 - resolution: "human-signals@npm:5.0.0" - checksum: 5a9359073fe17a8b58e5a085e9a39a950366d9f00217c4ff5878bd312e09d80f460536ea6a3f260b5943a01fe55c158d1cea3fc7bee3d0520aeef04f6d915c82 +"hyperdyperid@npm:^1.2.0": + version: 1.2.0 + resolution: "hyperdyperid@npm:1.2.0" + checksum: 885ba3177c7181d315a856ee9c0005ff8eb5dcb1ce9e9d61be70987895d934d84686c37c981cceeb53216d4c9c15c1cc25f1804e84cc6a74a16993c5d7fd0893 languageName: node linkType: hard @@ -5550,14 +6561,21 @@ __metadata: languageName: node linkType: hard -"ieee754@npm:^1.2.1": +"idb@npm:^8.0.0": + version: 8.0.0 + resolution: "idb@npm:8.0.0" + checksum: d27547e03939d3d269cea38c3d4528569621ec134c717ebfc1ff816dce18e4f77372dba1d930384a9949ac56dc600e3790f98f1812a4164004e71fec302ee491 + languageName: node + linkType: hard + +"ieee754@npm:^1.1.13, ieee754@npm:^1.2.1": version: 1.2.1 resolution: "ieee754@npm:1.2.1" checksum: b0782ef5e0935b9f12883a2e2aa37baa75da6e66ce6515c168697b42160807d9330de9a32ec1ed73149aea02e0d822e572bca6f1e22bdcbd2149e13b050b17bb languageName: node linkType: hard -"ignore@npm:^5.2.0, ignore@npm:^5.2.4": +"ignore@npm:^5.2.0, ignore@npm:^5.2.4, ignore@npm:^5.3.1": version: 5.3.2 resolution: "ignore@npm:5.3.2" checksum: f9f652c957983634ded1e7f02da3b559a0d4cc210fca3792cb67f1b153623c9c42efdc1c4121af171e295444459fc4a9201101fb041b1104a3c000bccb188337 @@ -5610,7 +6628,7 @@ __metadata: languageName: node linkType: hard -"inherits@npm:2, inherits@npm:2.0.4, inherits@npm:^2.0.1, inherits@npm:^2.0.3, inherits@npm:~2.0.3, inherits@npm:~2.0.4": +"inherits@npm:2, inherits@npm:2.0.4, inherits@npm:^2.0.1, inherits@npm:^2.0.3, inherits@npm:^2.0.4, inherits@npm:~2.0.3, inherits@npm:~2.0.4": version: 2.0.4 resolution: "inherits@npm:2.0.4" checksum: 4e531f648b29039fb7426fb94075e6545faa1eb9fe83c29f0b6d9e7263aceb4289d2d4557db0d428188eeb449cc7c5e77b0a0b2c4e248ff2a65933a0dee49ef2 @@ -5700,6 +6718,15 @@ __metadata: languageName: node linkType: hard +"is-async-function@npm:^2.0.0": + version: 2.0.0 + resolution: "is-async-function@npm:2.0.0" + dependencies: + has-tostringtag: "npm:^1.0.0" + checksum: 787bc931576aad525d751fc5ce211960fe91e49ac84a5c22d6ae0bc9541945fbc3f686dc590c3175722ce4f6d7b798a93f6f8ff4847fdb2199aea6f4baf5d668 + languageName: node + linkType: hard + "is-bigint@npm:^1.0.1": version: 1.0.4 resolution: "is-bigint@npm:1.0.4" @@ -5729,11 +6756,11 @@ __metadata: linkType: hard "is-bun-module@npm:^1.0.2": - version: 1.1.0 - resolution: "is-bun-module@npm:1.1.0" + version: 1.2.1 + resolution: "is-bun-module@npm:1.2.1" dependencies: semver: "npm:^7.6.3" - checksum: 17cae968c3fe08e2bd66f8477e4d5a166d6299b5e7ce5c7558355551c50267f77dd386297fada6b68e4a32f01ce8920b0423e4d258242ea463b45901ec474beb + checksum: 819e63cd4468265a3e89cdc241554e37aeb85e40375a56dd559c022f4395491273267a0f843274fda6cad1eac3b0f8dc6d9e06cc349e33e2bf45098761184736 languageName: node linkType: hard @@ -5762,7 +6789,7 @@ __metadata: languageName: node linkType: hard -"is-date-object@npm:^1.0.1": +"is-date-object@npm:^1.0.1, is-date-object@npm:^1.0.5": version: 1.0.5 resolution: "is-date-object@npm:1.0.5" dependencies: @@ -5787,6 +6814,15 @@ __metadata: languageName: node linkType: hard +"is-finalizationregistry@npm:^1.0.2": + version: 1.0.2 + resolution: "is-finalizationregistry@npm:1.0.2" + dependencies: + call-bind: "npm:^1.0.2" + checksum: 81caecc984d27b1a35c68741156fc651fb1fa5e3e6710d21410abc527eb226d400c0943a167922b2e920f6b3e58b0dede9aa795882b038b85f50b3a4b877db86 + languageName: node + linkType: hard + "is-fullwidth-code-point@npm:^3.0.0": version: 3.0.0 resolution: "is-fullwidth-code-point@npm:3.0.0" @@ -5801,7 +6837,7 @@ __metadata: languageName: node linkType: hard -"is-generator-function@npm:^1.0.7": +"is-generator-function@npm:^1.0.10, is-generator-function@npm:^1.0.7": version: 1.0.10 resolution: "is-generator-function@npm:1.0.10" dependencies: @@ -5833,6 +6869,13 @@ __metadata: languageName: node linkType: hard +"is-map@npm:^2.0.3": + version: 2.0.3 + resolution: "is-map@npm:2.0.3" + checksum: 2c4d431b74e00fdda7162cd8e4b763d6f6f217edf97d4f8538b94b8702b150610e2c64961340015fe8df5b1fcee33ccd2e9b62619c4a8a3a155f8de6d6d355fc + languageName: node + linkType: hard + "is-nan@npm:^1.3.2": version: 1.3.2 resolution: "is-nan@npm:1.3.2" @@ -5906,6 +6949,13 @@ __metadata: languageName: node linkType: hard +"is-set@npm:^2.0.3": + version: 2.0.3 + resolution: "is-set@npm:2.0.3" + checksum: f73732e13f099b2dc879c2a12341cfc22ccaca8dd504e6edae26484bd5707a35d503fba5b4daad530a9b088ced1ae6c9d8200fd92e09b428fe14ea79ce8080b7 + languageName: node + linkType: hard + "is-shared-array-buffer@npm:^1.0.2, is-shared-array-buffer@npm:^1.0.3": version: 1.0.3 resolution: "is-shared-array-buffer@npm:1.0.3" @@ -5922,13 +6972,6 @@ __metadata: languageName: node linkType: hard -"is-stream@npm:^3.0.0": - version: 3.0.0 - resolution: "is-stream@npm:3.0.0" - checksum: eb2f7127af02ee9aa2a0237b730e47ac2de0d4e76a4a905a50a11557f2339df5765eaea4ceb8029f1efa978586abe776908720bfcb1900c20c6ec5145f6f29d8 - languageName: node - linkType: hard - "is-string@npm:^1.0.5, is-string@npm:^1.0.7": version: 1.0.7 resolution: "is-string@npm:1.0.7" @@ -5964,9 +7007,16 @@ __metadata: linkType: hard "is-unicode-supported@npm:^2.0.0": - version: 2.0.0 - resolution: "is-unicode-supported@npm:2.0.0" - checksum: 3013dfb8265fe9f9a0d1e9433fc4e766595631a8d85d60876c457b4bedc066768dab1477c553d02e2f626d88a4e019162706e04263c94d74994ef636a33b5f94 + version: 2.1.0 + resolution: "is-unicode-supported@npm:2.1.0" + checksum: a0f53e9a7c1fdbcf2d2ef6e40d4736fdffff1c9f8944c75e15425118ff3610172c87bf7bc6c34d3903b04be59790bb2212ddbe21ee65b5a97030fc50370545a5 + languageName: node + linkType: hard + +"is-weakmap@npm:^2.0.2": + version: 2.0.2 + resolution: "is-weakmap@npm:2.0.2" + checksum: 443c35bb86d5e6cc5929cd9c75a4024bb0fff9586ed50b092f94e700b89c43a33b186b76dbc6d54f3d3d09ece689ab38dcdc1af6a482cbe79c0f2da0a17f1299 languageName: node linkType: hard @@ -5979,6 +7029,16 @@ __metadata: languageName: node linkType: hard +"is-weakset@npm:^2.0.3": + version: 2.0.3 + resolution: "is-weakset@npm:2.0.3" + dependencies: + call-bind: "npm:^1.0.7" + get-intrinsic: "npm:^1.2.4" + checksum: 8ad6141b6a400e7ce7c7442a13928c676d07b1f315ab77d9912920bf5f4170622f43126f111615788f26c3b1871158a6797c862233124507db0bcc33a9537d1a + languageName: node + linkType: hard + "is-wsl@npm:^2.2.0": version: 2.2.0 resolution: "is-wsl@npm:2.2.0" @@ -6023,6 +7083,13 @@ __metadata: languageName: node linkType: hard +"isomorphic-timers-promises@npm:^1.0.1": + version: 1.0.1 + resolution: "isomorphic-timers-promises@npm:1.0.1" + checksum: 3b4761d0012ebe6b6382246079fc667f3513f36fe4042638f2bfb7db1557e4f1acd33a9c9907706c04270890ec6434120f132f3f300161a42a7dd8628926c8a4 + languageName: node + linkType: hard + "istanbul-lib-coverage@npm:^3.0.0, istanbul-lib-coverage@npm:^3.2.0": version: 3.2.2 resolution: "istanbul-lib-coverage@npm:3.2.2" @@ -6618,12 +7685,12 @@ __metadata: languageName: node linkType: hard -"jsesc@npm:^2.5.1": - version: 2.5.2 - resolution: "jsesc@npm:2.5.2" +"jsesc@npm:^3.0.2": + version: 3.0.2 + resolution: "jsesc@npm:3.0.2" bin: jsesc: bin/jsesc - checksum: dbf59312e0ebf2b4405ef413ec2b25abb5f8f4d9bc5fb8d9f90381622ebca5f2af6a6aa9a8578f65903f9e33990a6dc798edd0ce5586894bf0e9e31803a1de88 + checksum: ef22148f9e793180b14d8a145ee6f9f60f301abf443288117b4b6c53d0ecd58354898dc506ccbb553a5f7827965cd38bc5fb726575aae93c5e8915e2de8290e1 languageName: node linkType: hard @@ -6695,7 +7762,7 @@ __metadata: languageName: node linkType: hard -"keyv@npm:^4.5.3": +"keyv@npm:^4.5.3, keyv@npm:^4.5.4": version: 4.5.4 resolution: "keyv@npm:4.5.4" dependencies: @@ -6719,12 +7786,12 @@ __metadata: linkType: hard "launch-editor@npm:^2.6.0": - version: 2.8.2 - resolution: "launch-editor@npm:2.8.2" + version: 2.9.1 + resolution: "launch-editor@npm:2.9.1" dependencies: picocolors: "npm:^1.0.0" shell-quote: "npm:^1.8.1" - checksum: 61f5465b2f4bc9fe88f29248a38e5e1c1e17eb7c85a8a2fa12f661fed5aabb9dfab2c55e4203a94c7768c2c67b5061e116cdadf54f56a9f2a3ae5beca42ea997 + checksum: 891f1d136ed8e4ea12e16c196a0d2e07f23c7b983e3ab532b2be1775fb244909581507cce97c50f9d5ca92680b53e4a75c72ddcf20184aa6c4da6ebbe87703f5 languageName: node linkType: hard @@ -6752,6 +7819,41 @@ __metadata: languageName: node linkType: hard +"lmdb@npm:^3.0.6": + version: 3.1.6 + resolution: "lmdb@npm:3.1.6" + dependencies: + "@lmdb/lmdb-darwin-arm64": "npm:3.1.6" + "@lmdb/lmdb-darwin-x64": "npm:3.1.6" + "@lmdb/lmdb-linux-arm": "npm:3.1.6" + "@lmdb/lmdb-linux-arm64": "npm:3.1.6" + "@lmdb/lmdb-linux-x64": "npm:3.1.6" + "@lmdb/lmdb-win32-x64": "npm:3.1.6" + msgpackr: "npm:^1.11.2" + node-addon-api: "npm:^6.1.0" + node-gyp: "npm:latest" + node-gyp-build-optional-packages: "npm:5.2.2" + ordered-binary: "npm:^1.5.3" + weak-lru-cache: "npm:^1.2.2" + dependenciesMeta: + "@lmdb/lmdb-darwin-arm64": + optional: true + "@lmdb/lmdb-darwin-x64": + optional: true + "@lmdb/lmdb-linux-arm": + optional: true + "@lmdb/lmdb-linux-arm64": + optional: true + "@lmdb/lmdb-linux-x64": + optional: true + "@lmdb/lmdb-win32-x64": + optional: true + bin: + download-lmdb-prebuilds: bin/download-prebuilds.js + checksum: 081804f72aab6eb0f712654e3bbb2d454dd455bbfe09f223e10728971f201cfc166d4d6dd6a3099aabf79e4fd62e9c2a5eb9117bd5f2153ec5a419333f69a338 + languageName: node + linkType: hard + "loader-runner@npm:^4.2.0": version: 4.3.0 resolution: "loader-runner@npm:4.3.0" @@ -6826,12 +7928,10 @@ __metadata: languageName: node linkType: hard -"loupe@npm:^3.1.0, loupe@npm:^3.1.1": - version: 3.1.1 - resolution: "loupe@npm:3.1.1" - dependencies: - get-func-name: "npm:^2.0.1" - checksum: 99f88badc47e894016df0c403de846fedfea61154aadabbf776c8428dd59e8d8378007135d385d737de32ae47980af07d22ba7bec5ef7beebd721de9baa0a0af +"loupe@npm:^3.1.0, loupe@npm:^3.1.2": + version: 3.1.2 + resolution: "loupe@npm:3.1.2" + checksum: b13c02e3ddd6a9d5f8bf84133b3242de556512d824dddeea71cce2dbd6579c8f4d672381c4e742d45cf4423d0701765b4a6e5fbc24701def16bc2b40f8daa96a languageName: node linkType: hard @@ -6860,12 +7960,12 @@ __metadata: languageName: node linkType: hard -"magic-string@npm:^0.30.10": - version: 0.30.11 - resolution: "magic-string@npm:0.30.11" +"magic-string@npm:^0.30.12, magic-string@npm:^0.30.3": + version: 0.30.13 + resolution: "magic-string@npm:0.30.13" dependencies: "@jridgewell/sourcemap-codec": "npm:^1.5.0" - checksum: b9eb370773d0bd90ca11a848753409d8e5309b1ad56d2a1aa49d6649da710a6d2fe7237ad1a643c5a5d3800de2b9946ed9690acdfc00e6cc1aeafff3ab1752c4 + checksum: a275faeca1564c545019b4742c38a42ca80226c8c9e0805c32d1a1cc58b0e6ff7bbd914ed885fd10043858a7da0f732cb8f49c8975c3ecebde9cad4b57db5115 languageName: node linkType: hard @@ -6914,6 +8014,17 @@ __metadata: languageName: node linkType: hard +"md5.js@npm:^1.3.4": + version: 1.3.5 + resolution: "md5.js@npm:1.3.5" + dependencies: + hash-base: "npm:^3.0.0" + inherits: "npm:^2.0.1" + safe-buffer: "npm:^5.1.2" + checksum: b7bd75077f419c8e013fc4d4dada48be71882e37d69a44af65a2f2804b91e253441eb43a0614423a1c91bb830b8140b0dc906bc797245e2e275759584f4efcc5 + languageName: node + linkType: hard + "media-typer@npm:0.3.0": version: 0.3.0 resolution: "media-typer@npm:0.3.0" @@ -6930,10 +8041,22 @@ __metadata: languageName: node linkType: hard -"merge-descriptors@npm:1.0.1": - version: 1.0.1 - resolution: "merge-descriptors@npm:1.0.1" - checksum: b67d07bd44cfc45cebdec349bb6e1f7b077ee2fd5beb15d1f7af073849208cb6f144fe403e29a36571baf3f4e86469ac39acf13c318381e958e186b2766f54ec +"memfs@npm:^4.14.0": + version: 4.14.0 + resolution: "memfs@npm:4.14.0" + dependencies: + "@jsonjoy.com/json-pack": "npm:^1.0.3" + "@jsonjoy.com/util": "npm:^1.3.0" + tree-dump: "npm:^1.0.1" + tslib: "npm:^2.0.0" + checksum: d1de2e4b3c269f5b5f27b63f60bb8ea9ae5800843776e0bed4548f2957dcd55237ac5eab3a5ffe0d561a6be53e42c055a7bc79efc1613563b14e14c287ef3b0a + languageName: node + linkType: hard + +"merge-descriptors@npm:1.0.3": + version: 1.0.3 + resolution: "merge-descriptors@npm:1.0.3" + checksum: 866b7094afd9293b5ea5dcd82d71f80e51514bed33b4c4e9f516795dc366612a4cbb4dc94356e943a8a6914889a914530badff27f397191b9b75cda20b6bae93 languageName: node linkType: hard @@ -6968,6 +8091,18 @@ __metadata: languageName: node linkType: hard +"miller-rabin@npm:^4.0.0": + version: 4.0.1 + resolution: "miller-rabin@npm:4.0.1" + dependencies: + bn.js: "npm:^4.0.0" + brorand: "npm:^1.0.1" + bin: + miller-rabin: bin/miller-rabin + checksum: 26b2b96f6e49dbcff7faebb78708ed2f5f9ae27ac8cbbf1d7c08f83cf39bed3d418c0c11034dce997da70d135cc0ff6f3a4c15dc452f8e114c11986388a64346 + languageName: node + linkType: hard + "mime-db@npm:1.52.0": version: 1.52.0 resolution: "mime-db@npm:1.52.0" @@ -7023,13 +8158,6 @@ __metadata: languageName: node linkType: hard -"mimic-fn@npm:^4.0.0": - version: 4.0.0 - resolution: "mimic-fn@npm:4.0.0" - checksum: de9cc32be9996fd941e512248338e43407f63f6d497abe8441fa33447d922e927de54d4cc3c1a3c6d652857acd770389d5a3823f311a744132760ce2be15ccbf - languageName: node - linkType: hard - "mimic-function@npm:^5.0.0": version: 5.0.1 resolution: "mimic-function@npm:5.0.1" @@ -7037,13 +8165,20 @@ __metadata: languageName: node linkType: hard -"minimalistic-assert@npm:^1.0.0": +"minimalistic-assert@npm:^1.0.0, minimalistic-assert@npm:^1.0.1": version: 1.0.1 resolution: "minimalistic-assert@npm:1.0.1" checksum: 96730e5601cd31457f81a296f521eb56036e6f69133c0b18c13fe941109d53ad23a4204d946a0d638d7f3099482a0cec8c9bb6d642604612ce43ee536be3dddd languageName: node linkType: hard +"minimalistic-crypto-utils@npm:^1.0.1": + version: 1.0.1 + resolution: "minimalistic-crypto-utils@npm:1.0.1" + checksum: 790ecec8c5c73973a4fbf2c663d911033e8494d5fb0960a4500634766ab05d6107d20af896ca2132e7031741f19888154d44b2408ada0852446705441383e9f8 + languageName: node + linkType: hard + "minimatch@npm:3.1.2, minimatch@npm:^3.0.4, minimatch@npm:^3.0.5, minimatch@npm:^3.1.1, minimatch@npm:^3.1.2": version: 3.1.2 resolution: "minimatch@npm:3.1.2" @@ -7194,20 +8329,56 @@ __metadata: languageName: node linkType: hard -"ms@npm:2.1.2": - version: 2.1.2 - resolution: "ms@npm:2.1.2" - checksum: a437714e2f90dbf881b5191d35a6db792efbca5badf112f87b9e1c712aace4b4b9b742dd6537f3edf90fd6f684de897cec230abde57e87883766712ddda297cc - languageName: node - linkType: hard - -"ms@npm:2.1.3, ms@npm:^2.1.1": +"ms@npm:2.1.3, ms@npm:^2.1.1, ms@npm:^2.1.3": version: 2.1.3 resolution: "ms@npm:2.1.3" checksum: d924b57e7312b3b63ad21fc5b3dc0af5e78d61a1fc7cfb5457edaf26326bf62be5307cc87ffb6862ef1c2b33b0233cdb5d4f01c4c958cc0d660948b65a287a48 languageName: node linkType: hard +"msgpackr-extract@npm:^3.0.2": + version: 3.0.3 + resolution: "msgpackr-extract@npm:3.0.3" + dependencies: + "@msgpackr-extract/msgpackr-extract-darwin-arm64": "npm:3.0.3" + "@msgpackr-extract/msgpackr-extract-darwin-x64": "npm:3.0.3" + "@msgpackr-extract/msgpackr-extract-linux-arm": "npm:3.0.3" + "@msgpackr-extract/msgpackr-extract-linux-arm64": "npm:3.0.3" + "@msgpackr-extract/msgpackr-extract-linux-x64": "npm:3.0.3" + "@msgpackr-extract/msgpackr-extract-win32-x64": "npm:3.0.3" + node-gyp: "npm:latest" + node-gyp-build-optional-packages: "npm:5.2.2" + dependenciesMeta: + "@msgpackr-extract/msgpackr-extract-darwin-arm64": + optional: true + "@msgpackr-extract/msgpackr-extract-darwin-x64": + optional: true + "@msgpackr-extract/msgpackr-extract-linux-arm": + optional: true + "@msgpackr-extract/msgpackr-extract-linux-arm64": + optional: true + "@msgpackr-extract/msgpackr-extract-linux-x64": + optional: true + "@msgpackr-extract/msgpackr-extract-win32-x64": + optional: true + bin: + download-msgpackr-prebuilds: bin/download-prebuilds.js + checksum: e504fd8bf86a29d7527c83776530ee6dc92dcb0273bb3679fd4a85173efead7f0ee32fb82c8410a13c33ef32828c45f81118ffc0fbed5d6842e72299894623b4 + languageName: node + linkType: hard + +"msgpackr@npm:^1.11.2": + version: 1.11.2 + resolution: "msgpackr@npm:1.11.2" + dependencies: + msgpackr-extract: "npm:^3.0.2" + dependenciesMeta: + msgpackr-extract: + optional: true + checksum: 7d2e81ca82c397b2352d470d6bc8f4a967fe4fe14f8fc1fc9906b23009fdfb543999b1ad29c700b8861581e0b6bf903d6f0fefb69a09375cbca6d4d802e6c906 + languageName: node + linkType: hard + "multicast-dns@npm:^7.2.5": version: 7.2.5 resolution: "multicast-dns@npm:7.2.5" @@ -7243,13 +8414,20 @@ __metadata: languageName: node linkType: hard -"negotiator@npm:0.6.3, negotiator@npm:^0.6.3": +"negotiator@npm:0.6.3": version: 0.6.3 resolution: "negotiator@npm:0.6.3" checksum: 3ec9fd413e7bf071c937ae60d572bc67155262068ed522cf4b3be5edbe6ddf67d095ec03a3a14ebf8fc8e95f8e1d61be4869db0dbb0de696f6b837358bd43fc2 languageName: node linkType: hard +"negotiator@npm:^0.6.3, negotiator@npm:~0.6.4": + version: 0.6.4 + resolution: "negotiator@npm:0.6.4" + checksum: 3e677139c7fb7628a6f36335bf11a885a62c21d5390204590a1a214a5631fcbe5ea74ef6a610b60afe84b4d975cbe0566a23f20ee17c77c73e74b80032108dea + languageName: node + linkType: hard + "neo-async@npm:^2.6.2": version: 2.6.2 resolution: "neo-async@npm:2.6.2" @@ -7267,6 +8445,15 @@ __metadata: languageName: node linkType: hard +"node-addon-api@npm:^6.1.0": + version: 6.1.0 + resolution: "node-addon-api@npm:6.1.0" + dependencies: + node-gyp: "npm:latest" + checksum: d2699c4ad15740fd31482a3b6fca789af7723ab9d393adc6ac45250faaee72edad8f0b10b2b9d087df0de93f1bdc16d97afdd179b26b9ebc9ed68b569faa4bac + languageName: node + linkType: hard + "node-forge@npm:^1": version: 1.3.1 resolution: "node-forge@npm:1.3.1" @@ -7274,6 +8461,19 @@ __metadata: languageName: node linkType: hard +"node-gyp-build-optional-packages@npm:5.2.2": + version: 5.2.2 + resolution: "node-gyp-build-optional-packages@npm:5.2.2" + dependencies: + detect-libc: "npm:^2.0.1" + bin: + node-gyp-build-optional-packages: bin.js + node-gyp-build-optional-packages-optional: optional.js + node-gyp-build-optional-packages-test: build-test.js + checksum: c81128c6f91873381be178c5eddcbdf66a148a6a89a427ce2bcd457593ce69baf2a8662b6d22cac092d24aa9c43c230dec4e69b3a0da604503f4777cd77e282b + languageName: node + linkType: hard + "node-gyp@npm:latest": version: 10.2.0 resolution: "node-gyp@npm:10.2.0" @@ -7308,6 +8508,41 @@ __metadata: languageName: node linkType: hard +"node-stdlib-browser@npm:^1.2.0, node-stdlib-browser@npm:^1.3.0": + version: 1.3.0 + resolution: "node-stdlib-browser@npm:1.3.0" + dependencies: + assert: "npm:^2.0.0" + browser-resolve: "npm:^2.0.0" + browserify-zlib: "npm:^0.2.0" + buffer: "npm:^5.7.1" + console-browserify: "npm:^1.1.0" + constants-browserify: "npm:^1.0.0" + create-require: "npm:^1.1.1" + crypto-browserify: "npm:^3.11.0" + domain-browser: "npm:4.22.0" + events: "npm:^3.0.0" + https-browserify: "npm:^1.0.0" + isomorphic-timers-promises: "npm:^1.0.1" + os-browserify: "npm:^0.3.0" + path-browserify: "npm:^1.0.1" + pkg-dir: "npm:^5.0.0" + process: "npm:^0.11.10" + punycode: "npm:^1.4.1" + querystring-es3: "npm:^0.2.1" + readable-stream: "npm:^3.6.0" + stream-browserify: "npm:^3.0.0" + stream-http: "npm:^3.2.0" + string_decoder: "npm:^1.0.0" + timers-browserify: "npm:^2.0.4" + tty-browserify: "npm:0.0.1" + url: "npm:^0.11.4" + util: "npm:^0.12.4" + vm-browserify: "npm:^1.0.1" + checksum: e617f92f6af5a031fb9e670a04e1cf5d74e09ac46e182c784c5d5fff44c36d47f208ac01f267ec75d83c125a30e2c006090f676cd71d35e99a4c8a196a90cfff + languageName: node + linkType: hard + "nopt@npm:^7.0.0": version: 7.2.1 resolution: "nopt@npm:7.2.1" @@ -7342,15 +8577,6 @@ __metadata: languageName: node linkType: hard -"npm-run-path@npm:^5.1.0": - version: 5.3.0 - resolution: "npm-run-path@npm:5.3.0" - dependencies: - path-key: "npm:^4.0.0" - checksum: 124df74820c40c2eb9a8612a254ea1d557ddfab1581c3e751f825e3e366d9f00b0d76a3c94ecd8398e7f3eee193018622677e95816e8491f0797b21e30b2deba - languageName: node - linkType: hard - "nth-check@npm:^2.0.1": version: 2.1.1 resolution: "nth-check@npm:2.1.1" @@ -7360,10 +8586,10 @@ __metadata: languageName: node linkType: hard -"object-inspect@npm:^1.13.1": - version: 1.13.2 - resolution: "object-inspect@npm:1.13.2" - checksum: b97835b4c91ec37b5fd71add84f21c3f1047d1d155d00c0fcd6699516c256d4fcc6ff17a1aced873197fe447f91a3964178fd2a67a1ee2120cdaf60e81a050b4 +"object-inspect@npm:^1.13.1, object-inspect@npm:^1.13.3": + version: 1.13.3 + resolution: "object-inspect@npm:1.13.3" + checksum: cc3f15213406be89ffdc54b525e115156086796a515410a8d390215915db9f23c8eab485a06f1297402f440a33715fe8f71a528c1dcbad6e1a3bcaf5a46921d4 languageName: node linkType: hard @@ -7478,15 +8704,6 @@ __metadata: languageName: node linkType: hard -"onetime@npm:^6.0.0": - version: 6.0.0 - resolution: "onetime@npm:6.0.0" - dependencies: - mimic-fn: "npm:^4.0.0" - checksum: 4eef7c6abfef697dd4479345a4100c382d73c149d2d56170a54a07418c50816937ad09500e1ed1e79d235989d073a9bade8557122aee24f0576ecde0f392bb6c - languageName: node - linkType: hard - "onetime@npm:^7.0.0": version: 7.0.0 resolution: "onetime@npm:7.0.0" @@ -7522,8 +8739,8 @@ __metadata: linkType: hard "ora@npm:^8.0.1": - version: 8.1.0 - resolution: "ora@npm:8.1.0" + version: 8.1.1 + resolution: "ora@npm:8.1.1" dependencies: chalk: "npm:^5.3.0" cli-cursor: "npm:^5.0.0" @@ -7534,7 +8751,21 @@ __metadata: stdin-discarder: "npm:^0.2.2" string-width: "npm:^7.2.0" strip-ansi: "npm:^7.1.0" - checksum: 4ac9a6dd7fe915a354680f33ced21ee96d13d3c5ab0dc00b3c3ba9e3695ed141b1d045222990f5a71a9a91f801042a0b0d32e58dfc5509ff9b81efdd3fcf6339 + checksum: 996a81a9e997481339de3a7996c56131ea292c0a0e9e42d1cd454e2390f1ce7015ec925dcdd29e3d74dc5d037a4aa1877e575b491555507bcd9f219df760a63f + languageName: node + linkType: hard + +"ordered-binary@npm:^1.5.3": + version: 1.5.3 + resolution: "ordered-binary@npm:1.5.3" + checksum: 2b67c90c79071f54344762fcecac256c3c6fe02a3ce1d349c7cab38a55a6137320b13022d6dd26faac462d887f48a32e04693a3ae30592185f290c793b92de03 + languageName: node + linkType: hard + +"os-browserify@npm:^0.3.0": + version: 0.3.0 + resolution: "os-browserify@npm:0.3.0" + checksum: 6ff32cb1efe2bc6930ad0fd4c50e30c38010aee909eba8d65be60af55efd6cbb48f0287e3649b4e3f3a63dce5a667b23c187c4293a75e557f0d5489d735bcf52 languageName: node linkType: hard @@ -7601,9 +8832,16 @@ __metadata: linkType: hard "package-json-from-dist@npm:^1.0.0": - version: 1.0.0 - resolution: "package-json-from-dist@npm:1.0.0" - checksum: e3ffaf6ac1040ab6082a658230c041ad14e72fabe99076a2081bb1d5d41210f11872403fc09082daf4387fc0baa6577f96c9c0e94c90c394fd57794b66aa4033 + version: 1.0.1 + resolution: "package-json-from-dist@npm:1.0.1" + checksum: 62ba2785eb655fec084a257af34dbe24292ab74516d6aecef97ef72d4897310bc6898f6c85b5cd22770eaa1ce60d55a0230e150fb6a966e3ecd6c511e23d164b + languageName: node + linkType: hard + +"pako@npm:~1.0.5": + version: 1.0.11 + resolution: "pako@npm:1.0.11" + checksum: 86dd99d8b34c3930345b8bbeb5e1cd8a05f608eeb40967b293f72fe469d0e9c88b783a8777e4cc7dc7c91ce54c5e93d88ff4b4f060e6ff18408fd21030d9ffbe languageName: node linkType: hard @@ -7626,6 +8864,20 @@ __metadata: languageName: node linkType: hard +"parse-asn1@npm:^5.0.0, parse-asn1@npm:^5.1.7": + version: 5.1.7 + resolution: "parse-asn1@npm:5.1.7" + dependencies: + asn1.js: "npm:^4.10.1" + browserify-aes: "npm:^1.2.0" + evp_bytestokey: "npm:^1.0.3" + hash-base: "npm:~3.0" + pbkdf2: "npm:^3.1.2" + safe-buffer: "npm:^5.2.1" + checksum: 05eb5937405c904eb5a7f3633bab1acc11f4ae3478a07ef5c6d81ce88c3c0e505ff51f9c7b935ebc1265c868343793698fc91025755a895d0276f620f95e8a82 + languageName: node + linkType: hard + "parse-json@npm:^5.2.0": version: 5.2.0 resolution: "parse-json@npm:5.2.0" @@ -7655,6 +8907,13 @@ __metadata: languageName: node linkType: hard +"path-browserify@npm:^1.0.1": + version: 1.0.1 + resolution: "path-browserify@npm:1.0.1" + checksum: 8b8c3fd5c66bd340272180590ae4ff139769e9ab79522e2eb82e3d571a89b8117c04147f65ad066dccfb42fcad902e5b7d794b3d35e0fd840491a8ddbedf8c66 + languageName: node + linkType: hard + "path-exists@npm:^4.0.0": version: 4.0.0 resolution: "path-exists@npm:4.0.0" @@ -7683,13 +8942,6 @@ __metadata: languageName: node linkType: hard -"path-key@npm:^4.0.0": - version: 4.0.0 - resolution: "path-key@npm:4.0.0" - checksum: 794efeef32863a65ac312f3c0b0a99f921f3e827ff63afa5cb09a377e202c262b671f7b3832a4e64731003fa94af0263713962d317b9887bd1e0c48a342efba3 - languageName: node - linkType: hard - "path-parse@npm:^1.0.7": version: 1.0.7 resolution: "path-parse@npm:1.0.7" @@ -7707,17 +8959,17 @@ __metadata: languageName: node linkType: hard -"path-to-regexp@npm:0.1.7": - version: 0.1.7 - resolution: "path-to-regexp@npm:0.1.7" - checksum: 50a1ddb1af41a9e68bd67ca8e331a705899d16fb720a1ea3a41e310480948387daf603abb14d7b0826c58f10146d49050a1291ba6a82b78a382d1c02c0b8f905 +"path-to-regexp@npm:0.1.10": + version: 0.1.10 + resolution: "path-to-regexp@npm:0.1.10" + checksum: 34196775b9113ca6df88e94c8d83ba82c0e1a2063dd33bfe2803a980da8d49b91db8104f49d5191b44ea780d46b8670ce2b7f4a5e349b0c48c6779b653f1afe4 languageName: node linkType: hard -"path-to-regexp@npm:2.2.1": - version: 2.2.1 - resolution: "path-to-regexp@npm:2.2.1" - checksum: f4b51090a73dad5ce0720f13ce8528ac77914bc927d72cc4ba05ab32770ad3a8d2e431962734b688b9ed863d4098d858da6ff4746037e4e24259cbd3b2c32b79 +"path-to-regexp@npm:3.3.0": + version: 3.3.0 + resolution: "path-to-regexp@npm:3.3.0" + checksum: ffa0ebe7088d38d435a8d08b0fe6e8c93ceb2a81a65d4dd1d9a538f52e09d5e3474ed5f553cb3b180d894b0caa10698a68737ab599fd1e56b4663d1a64c9f77b languageName: node linkType: hard @@ -7742,10 +8994,23 @@ __metadata: languageName: node linkType: hard -"picocolors@npm:^1.0.0, picocolors@npm:^1.0.1": - version: 1.1.0 - resolution: "picocolors@npm:1.1.0" - checksum: 86946f6032148801ef09c051c6fb13b5cf942eaf147e30ea79edb91dd32d700934edebe782a1078ff859fb2b816792e97ef4dab03d7f0b804f6b01a0df35e023 +"pbkdf2@npm:^3.1.2": + version: 3.1.2 + resolution: "pbkdf2@npm:3.1.2" + dependencies: + create-hash: "npm:^1.1.2" + create-hmac: "npm:^1.1.4" + ripemd160: "npm:^2.0.1" + safe-buffer: "npm:^5.0.1" + sha.js: "npm:^2.4.8" + checksum: 5a30374e87d33fa080a92734d778cf172542cc7e41b96198c4c88763997b62d7850de3fbda5c3111ddf79805ee7c1da7046881c90ac4920b5e324204518b05fd + languageName: node + linkType: hard + +"picocolors@npm:^1.0.0, picocolors@npm:^1.0.1, picocolors@npm:^1.1.0, picocolors@npm:^1.1.1": + version: 1.1.1 + resolution: "picocolors@npm:1.1.1" + checksum: e2e3e8170ab9d7c7421969adaa7e1b31434f789afb9b3f115f6b96d91945041ac3ceb02e9ec6fe6510ff036bcc0bf91e69a1772edc0b707e12b19c0f2d6bcf58 languageName: node linkType: hard @@ -7756,6 +9021,13 @@ __metadata: languageName: node linkType: hard +"picomatch@npm:^4.0.2": + version: 4.0.2 + resolution: "picomatch@npm:4.0.2" + checksum: 7c51f3ad2bb42c776f49ebf964c644958158be30d0a510efd5a395e8d49cb5acfed5b82c0c5b365523ce18e6ab85013c9ebe574f60305892ec3fa8eee8304ccc + languageName: node + linkType: hard + "pino-abstract-transport@npm:^1.0.0, pino-abstract-transport@npm:^1.2.0": version: 1.2.0 resolution: "pino-abstract-transport@npm:1.2.0" @@ -7834,6 +9106,15 @@ __metadata: languageName: node linkType: hard +"pkg-dir@npm:^5.0.0": + version: 5.0.0 + resolution: "pkg-dir@npm:5.0.0" + dependencies: + find-up: "npm:^5.0.0" + checksum: 793a496d685dc55bbbdbbb22d884535c3b29241e48e3e8d37e448113a71b9e42f5481a61fdc672d7322de12fbb2c584dd3a68bf89b18fffce5c48a390f911bc5 + languageName: node + linkType: hard + "playwright-core@npm:1.46.1": version: 1.46.1 resolution: "playwright-core@npm:1.46.1" @@ -7889,26 +9170,26 @@ __metadata: linkType: hard "postcss-modules-local-by-default@npm:^4.0.5": - version: 4.0.5 - resolution: "postcss-modules-local-by-default@npm:4.0.5" + version: 4.1.0 + resolution: "postcss-modules-local-by-default@npm:4.1.0" dependencies: icss-utils: "npm:^5.0.0" - postcss-selector-parser: "npm:^6.0.2" + postcss-selector-parser: "npm:^7.0.0" postcss-value-parser: "npm:^4.1.0" peerDependencies: postcss: ^8.1.0 - checksum: f4ad35abeb685ecb25f80c93d9fe23c8b89ee45ac4185f3560e701b4d7372f9b798577e79c5ed03b6d9c80bc923b001210c127c04ced781f43cda9e32b202a5b + checksum: d6e47d2488c6fcde2c91696d15ef094e6b1cdd8d5dcdf20c6ac72567fcc4778f5f80b8381839232b37242f200b4d83e98a947bf3b3315b0bf673ea42528a3caf languageName: node linkType: hard "postcss-modules-scope@npm:^3.2.0": - version: 3.2.0 - resolution: "postcss-modules-scope@npm:3.2.0" + version: 3.2.1 + resolution: "postcss-modules-scope@npm:3.2.1" dependencies: - postcss-selector-parser: "npm:^6.0.4" + postcss-selector-parser: "npm:^7.0.0" peerDependencies: postcss: ^8.1.0 - checksum: a2f5ffe372169b3feb8628cd785eb748bf12e344cfa57bce9e5cdc4fa5adcdb40d36daa86bb35dad53427703b185772aad08825b5783f745fcb1b6039454a84b + checksum: bd2d81f79e3da0ef6365b8e2c78cc91469d05b58046b4601592cdeef6c4050ed8fe1478ae000a1608042fc7e692cb51fecbd2d9bce3f4eace4d32e883ffca10b languageName: node linkType: hard @@ -7923,13 +9204,13 @@ __metadata: languageName: node linkType: hard -"postcss-selector-parser@npm:^6.0.2, postcss-selector-parser@npm:^6.0.4": - version: 6.1.2 - resolution: "postcss-selector-parser@npm:6.1.2" +"postcss-selector-parser@npm:^7.0.0": + version: 7.0.0 + resolution: "postcss-selector-parser@npm:7.0.0" dependencies: cssesc: "npm:^3.0.0" util-deprecate: "npm:^1.0.2" - checksum: 523196a6bd8cf660bdf537ad95abd79e546d54180f9afb165a4ab3e651ac705d0f8b8ce6b3164fb9e3279ce482c5f751a69eb2d3a1e8eb0fd5e82294fb3ef13e + checksum: e96e096afcce70bf5c97789f5ea09d7415ae5eb701d82b05b5e8532885d31363b484fcb1ca9488c9a331f30508d9e5bb6c3109eb2eb5067ef3d3919f9928cd9d languageName: node linkType: hard @@ -7940,14 +9221,14 @@ __metadata: languageName: node linkType: hard -"postcss@npm:^8.4.29, postcss@npm:^8.4.33, postcss@npm:^8.4.41": - version: 8.4.44 - resolution: "postcss@npm:8.4.44" +"postcss@npm:^8.4.29, postcss@npm:^8.4.33, postcss@npm:^8.4.43": + version: 8.4.49 + resolution: "postcss@npm:8.4.49" dependencies: nanoid: "npm:^3.3.7" - picocolors: "npm:^1.0.1" - source-map-js: "npm:^1.2.0" - checksum: 53c33338261a3d4f4198f8893e9dfe8b828d8d9186142ee85f02d228f04245c5bbe31239411a357a556ad20ed96f28db24d0921d63edc428fdc9133289371a1d + picocolors: "npm:^1.1.1" + source-map-js: "npm:^1.2.1" + checksum: f1b3f17aaf36d136f59ec373459f18129908235e65dbdc3aee5eef8eba0756106f52de5ec4682e29a2eab53eb25170e7e871b3e4b52a8f1de3d344a514306be3 languageName: node linkType: hard @@ -8069,17 +9350,31 @@ __metadata: languageName: node linkType: hard +"public-encrypt@npm:^4.0.3": + version: 4.0.3 + resolution: "public-encrypt@npm:4.0.3" + dependencies: + bn.js: "npm:^4.1.0" + browserify-rsa: "npm:^4.0.0" + create-hash: "npm:^1.1.0" + parse-asn1: "npm:^5.0.0" + randombytes: "npm:^2.0.1" + safe-buffer: "npm:^5.1.2" + checksum: 6c2cc19fbb554449e47f2175065d6b32f828f9b3badbee4c76585ac28ae8641aafb9bb107afc430c33c5edd6b05dbe318df4f7d6d7712b1093407b11c4280700 + languageName: node + linkType: hard + "pump@npm:^3.0.0": - version: 3.0.0 - resolution: "pump@npm:3.0.0" + version: 3.0.2 + resolution: "pump@npm:3.0.2" dependencies: end-of-stream: "npm:^1.1.0" once: "npm:^1.3.1" - checksum: bbdeda4f747cdf47db97428f3a135728669e56a0ae5f354a9ac5b74556556f5446a46f720a8f14ca2ece5be9b4d5d23c346db02b555f46739934cc6c093a5478 + checksum: 5ad655cb2a7738b4bcf6406b24ad0970d680649d996b55ad20d1be8e0c02394034e4c45ff7cd105d87f1e9b96a0e3d06fd28e11fae8875da26e7f7a8e2c9726f languageName: node linkType: hard -"punycode@npm:^1.3.2": +"punycode@npm:^1.4.1": version: 1.4.1 resolution: "punycode@npm:1.4.1" checksum: 354b743320518aef36f77013be6e15da4db24c2b4f62c5f1eb0529a6ed02fbaf1cb52925785f6ab85a962f2b590d9cd5ad730b70da72b5f180e2556b8bd3ca08 @@ -8100,12 +9395,28 @@ __metadata: languageName: node linkType: hard -"qs@npm:6.11.0": - version: 6.11.0 - resolution: "qs@npm:6.11.0" +"qs@npm:6.13.0": + version: 6.13.0 + resolution: "qs@npm:6.13.0" dependencies: - side-channel: "npm:^1.0.4" - checksum: 4e4875e4d7c7c31c233d07a448e7e4650f456178b9dd3766b7cfa13158fdb24ecb8c4f059fa91e820dc6ab9f2d243721d071c9c0378892dcdad86e9e9a27c68f + side-channel: "npm:^1.0.6" + checksum: 62372cdeec24dc83a9fb240b7533c0fdcf0c5f7e0b83343edd7310f0ab4c8205a5e7c56406531f2e47e1b4878a3821d652be4192c841de5b032ca83619d8f860 + languageName: node + linkType: hard + +"qs@npm:^6.12.3": + version: 6.13.1 + resolution: "qs@npm:6.13.1" + dependencies: + side-channel: "npm:^1.0.6" + checksum: 5ef527c0d62ffca5501322f0832d800ddc78eeb00da3b906f1b260ca0492721f8cdc13ee4b8fd8ac314a6ec37b948798c7b603ccc167e954088df392092f160c + languageName: node + linkType: hard + +"querystring-es3@npm:^0.2.1": + version: 0.2.1 + resolution: "querystring-es3@npm:0.2.1" + checksum: 476938c1adb45c141f024fccd2ffd919a3746e79ed444d00e670aad68532977b793889648980e7ca7ff5ffc7bfece623118d0fbadcaf217495eeb7059ae51580 languageName: node linkType: hard @@ -8123,7 +9434,7 @@ __metadata: languageName: node linkType: hard -"randombytes@npm:^2.1.0": +"randombytes@npm:^2.0.0, randombytes@npm:^2.0.1, randombytes@npm:^2.0.5, randombytes@npm:^2.1.0": version: 2.1.0 resolution: "randombytes@npm:2.1.0" dependencies: @@ -8132,6 +9443,16 @@ __metadata: languageName: node linkType: hard +"randomfill@npm:^1.0.4": + version: 1.0.4 + resolution: "randomfill@npm:1.0.4" + dependencies: + randombytes: "npm:^2.0.5" + safe-buffer: "npm:^5.1.0" + checksum: 11aeed35515872e8f8a2edec306734e6b74c39c46653607f03c68385ab8030e2adcc4215f76b5e4598e028c4750d820afd5c65202527d831d2a5f207fe2bc87c + languageName: node + linkType: hard + "range-parser@npm:1.2.0": version: 1.2.0 resolution: "range-parser@npm:1.2.0" @@ -8172,7 +9493,7 @@ __metadata: languageName: node linkType: hard -"react-dom@npm:^18.2.0": +"react-dom@npm:^18.2.0, react-dom@npm:^18.3.1": version: 18.3.1 resolution: "react-dom@npm:18.3.1" dependencies: @@ -8205,19 +9526,19 @@ __metadata: languageName: node linkType: hard -"react-toastify@npm:^10.0.4": - version: 10.0.5 - resolution: "react-toastify@npm:10.0.5" +"react-toastify@npm:^10.0.4, react-toastify@npm:^10.0.6": + version: 10.0.6 + resolution: "react-toastify@npm:10.0.6" dependencies: clsx: "npm:^2.1.0" peerDependencies: react: ">=18" react-dom: ">=18" - checksum: 66c68ec3d6c017d9f32652d73bb925224921c6a80b629b9d481430d5b4fd504abb7a99995a64b9aef0fc31326c74f3cbe088b3287b978dd0c355079c4bbf4158 + checksum: 4042b716d008295d0feab32488d1e88ec655a1b7a9176fa7d253c70387578a8a0c04aca0ff86d20e1722f3b4baadae8970f50f462940d67a90453c307dd350a9 languageName: node linkType: hard -"react@npm:^18.2.0": +"react@npm:^18.2.0, react@npm:^18.3.1": version: 18.3.1 resolution: "react@npm:18.3.1" dependencies: @@ -8226,7 +9547,7 @@ __metadata: languageName: node linkType: hard -"readable-stream@npm:^2.0.1": +"readable-stream@npm:^2.0.1, readable-stream@npm:^2.3.8": version: 2.3.8 resolution: "readable-stream@npm:2.3.8" dependencies: @@ -8241,7 +9562,7 @@ __metadata: languageName: node linkType: hard -"readable-stream@npm:^3.0.6, readable-stream@npm:^3.5.0": +"readable-stream@npm:^3.0.6, readable-stream@npm:^3.5.0, readable-stream@npm:^3.6.0": version: 3.6.2 resolution: "readable-stream@npm:3.6.2" dependencies: @@ -8290,15 +9611,30 @@ __metadata: languageName: node linkType: hard -"regexp.prototype.flags@npm:^1.5.2": - version: 1.5.2 - resolution: "regexp.prototype.flags@npm:1.5.2" +"reflect.getprototypeof@npm:^1.0.6": + version: 1.0.6 + resolution: "reflect.getprototypeof@npm:1.0.6" dependencies: - call-bind: "npm:^1.0.6" + call-bind: "npm:^1.0.7" + define-properties: "npm:^1.2.1" + es-abstract: "npm:^1.23.1" + es-errors: "npm:^1.3.0" + get-intrinsic: "npm:^1.2.4" + globalthis: "npm:^1.0.3" + which-builtin-type: "npm:^1.1.3" + checksum: baf4ef8ee6ff341600f4720b251cf5a6cb552d6a6ab0fdc036988c451bf16f920e5feb0d46bd4f530a5cce568f1f7aca2d77447ca798920749cfc52783c39b55 + languageName: node + linkType: hard + +"regexp.prototype.flags@npm:^1.5.3": + version: 1.5.3 + resolution: "regexp.prototype.flags@npm:1.5.3" + dependencies: + call-bind: "npm:^1.0.7" define-properties: "npm:^1.2.1" es-errors: "npm:^1.3.0" - set-function-name: "npm:^2.0.1" - checksum: 0f3fc4f580d9c349f8b560b012725eb9c002f36daa0041b3fbf6f4238cb05932191a4d7d5db3b5e2caa336d5150ad0402ed2be81f711f9308fe7e1a9bf9bd552 + set-function-name: "npm:^2.0.2" + checksum: e1a7c7dc42cc91abf73e47a269c4b3a8f225321b7f617baa25821f6a123a91d23a73b5152f21872c566e699207e1135d075d2251cd3e84cc96d82a910adf6020 languageName: node linkType: hard @@ -8410,7 +9746,7 @@ __metadata: languageName: node linkType: hard -"resolve@npm:^1.20.0, resolve@npm:^1.22.4": +"resolve@npm:^1.17.0, resolve@npm:^1.20.0, resolve@npm:^1.22.4": version: 1.22.8 resolution: "resolve@npm:1.22.8" dependencies: @@ -8423,7 +9759,7 @@ __metadata: languageName: node linkType: hard -"resolve@patch:resolve@npm%3A^1.20.0#optional!builtin, resolve@patch:resolve@npm%3A^1.22.4#optional!builtin": +"resolve@patch:resolve@npm%3A^1.17.0#optional!builtin, resolve@patch:resolve@npm%3A^1.20.0#optional!builtin, resolve@patch:resolve@npm%3A^1.22.4#optional!builtin": version: 1.22.8 resolution: "resolve@patch:resolve@npm%3A1.22.8#optional!builtin::version=1.22.8&hash=c3c19d" dependencies: @@ -8478,27 +9814,39 @@ __metadata: languageName: node linkType: hard +"ripemd160@npm:^2.0.0, ripemd160@npm:^2.0.1": + version: 2.0.2 + resolution: "ripemd160@npm:2.0.2" + dependencies: + hash-base: "npm:^3.0.0" + inherits: "npm:^2.0.1" + checksum: f6f0df78817e78287c766687aed4d5accbebc308a8e7e673fb085b9977473c1f139f0c5335d353f172a915bb288098430755d2ad3c4f30612f4dd0c901cd2c3a + languageName: node + linkType: hard + "rollup@npm:^4.20.0": - version: 4.21.2 - resolution: "rollup@npm:4.21.2" - dependencies: - "@rollup/rollup-android-arm-eabi": "npm:4.21.2" - "@rollup/rollup-android-arm64": "npm:4.21.2" - "@rollup/rollup-darwin-arm64": "npm:4.21.2" - "@rollup/rollup-darwin-x64": "npm:4.21.2" - "@rollup/rollup-linux-arm-gnueabihf": "npm:4.21.2" - "@rollup/rollup-linux-arm-musleabihf": "npm:4.21.2" - "@rollup/rollup-linux-arm64-gnu": "npm:4.21.2" - "@rollup/rollup-linux-arm64-musl": "npm:4.21.2" - "@rollup/rollup-linux-powerpc64le-gnu": "npm:4.21.2" - "@rollup/rollup-linux-riscv64-gnu": "npm:4.21.2" - "@rollup/rollup-linux-s390x-gnu": "npm:4.21.2" - "@rollup/rollup-linux-x64-gnu": "npm:4.21.2" - "@rollup/rollup-linux-x64-musl": "npm:4.21.2" - "@rollup/rollup-win32-arm64-msvc": "npm:4.21.2" - "@rollup/rollup-win32-ia32-msvc": "npm:4.21.2" - "@rollup/rollup-win32-x64-msvc": "npm:4.21.2" - "@types/estree": "npm:1.0.5" + version: 4.27.3 + resolution: "rollup@npm:4.27.3" + dependencies: + "@rollup/rollup-android-arm-eabi": "npm:4.27.3" + "@rollup/rollup-android-arm64": "npm:4.27.3" + "@rollup/rollup-darwin-arm64": "npm:4.27.3" + "@rollup/rollup-darwin-x64": "npm:4.27.3" + "@rollup/rollup-freebsd-arm64": "npm:4.27.3" + "@rollup/rollup-freebsd-x64": "npm:4.27.3" + "@rollup/rollup-linux-arm-gnueabihf": "npm:4.27.3" + "@rollup/rollup-linux-arm-musleabihf": "npm:4.27.3" + "@rollup/rollup-linux-arm64-gnu": "npm:4.27.3" + "@rollup/rollup-linux-arm64-musl": "npm:4.27.3" + "@rollup/rollup-linux-powerpc64le-gnu": "npm:4.27.3" + "@rollup/rollup-linux-riscv64-gnu": "npm:4.27.3" + "@rollup/rollup-linux-s390x-gnu": "npm:4.27.3" + "@rollup/rollup-linux-x64-gnu": "npm:4.27.3" + "@rollup/rollup-linux-x64-musl": "npm:4.27.3" + "@rollup/rollup-win32-arm64-msvc": "npm:4.27.3" + "@rollup/rollup-win32-ia32-msvc": "npm:4.27.3" + "@rollup/rollup-win32-x64-msvc": "npm:4.27.3" + "@types/estree": "npm:1.0.6" fsevents: "npm:~2.3.2" dependenciesMeta: "@rollup/rollup-android-arm-eabi": @@ -8509,6 +9857,10 @@ __metadata: optional: true "@rollup/rollup-darwin-x64": optional: true + "@rollup/rollup-freebsd-arm64": + optional: true + "@rollup/rollup-freebsd-x64": + optional: true "@rollup/rollup-linux-arm-gnueabihf": optional: true "@rollup/rollup-linux-arm-musleabihf": @@ -8537,7 +9889,7 @@ __metadata: optional: true bin: rollup: dist/bin/rollup - checksum: c9d97f7a21cde110371b2e890a31a996fee09b81e639e79372b962a9638ae653d2d24186b94632fc5dfab8a0582e1d0639dfe34b8b75051facd86915a9585a5f + checksum: 789885d3f852ed7ca45bed14194a2ac7a2cf16b6b62b54f691c79e27d5557d31a2d612d3680c26c527a1957e0bd6811806ddd765e0dae589404cf24544ff2838 languageName: node linkType: hard @@ -8569,7 +9921,7 @@ __metadata: languageName: node linkType: hard -"safe-buffer@npm:5.2.1, safe-buffer@npm:>=5.1.0, safe-buffer@npm:^5.0.1, safe-buffer@npm:^5.1.0, safe-buffer@npm:~5.2.0": +"safe-buffer@npm:5.2.1, safe-buffer@npm:>=5.1.0, safe-buffer@npm:^5.0.1, safe-buffer@npm:^5.1.0, safe-buffer@npm:^5.1.1, safe-buffer@npm:^5.1.2, safe-buffer@npm:^5.2.0, safe-buffer@npm:^5.2.1, safe-buffer@npm:~5.2.0": version: 5.2.1 resolution: "safe-buffer@npm:5.2.1" checksum: 6501914237c0a86e9675d4e51d89ca3c21ffd6a31642efeba25ad65720bce6921c9e7e974e5be91a786b25aa058b5303285d3c15dbabf983a919f5f630d349f3 @@ -8666,7 +10018,7 @@ __metadata: languageName: node linkType: hard -"semver@npm:^7.3.4, semver@npm:^7.3.5, semver@npm:^7.5.3, semver@npm:^7.5.4, semver@npm:^7.6.3": +"semver@npm:^7.3.4, semver@npm:^7.3.5, semver@npm:^7.5.3, semver@npm:^7.5.4, semver@npm:^7.6.0, semver@npm:^7.6.3": version: 7.6.3 resolution: "semver@npm:7.6.3" bin: @@ -8675,9 +10027,9 @@ __metadata: languageName: node linkType: hard -"send@npm:0.18.0": - version: 0.18.0 - resolution: "send@npm:0.18.0" +"send@npm:0.19.0": + version: 0.19.0 + resolution: "send@npm:0.19.0" dependencies: debug: "npm:2.6.9" depd: "npm:2.0.0" @@ -8692,7 +10044,7 @@ __metadata: on-finished: "npm:2.4.1" range-parser: "npm:~1.2.1" statuses: "npm:2.0.1" - checksum: 0eb134d6a51fc13bbcb976a1f4214ea1e33f242fae046efc311e80aff66c7a43603e26a79d9d06670283a13000e51be6e0a2cb80ff0942eaf9f1cd30b7ae736a + checksum: ea3f8a67a8f0be3d6bf9080f0baed6d2c51d11d4f7b4470de96a5029c598a7011c497511ccc28968b70ef05508675cebff27da9151dd2ceadd60be4e6cf845e3 languageName: node linkType: hard @@ -8705,19 +10057,18 @@ __metadata: languageName: node linkType: hard -"serve-handler@npm:6.1.5": - version: 6.1.5 - resolution: "serve-handler@npm:6.1.5" +"serve-handler@npm:6.1.6": + version: 6.1.6 + resolution: "serve-handler@npm:6.1.6" dependencies: bytes: "npm:3.0.0" content-disposition: "npm:0.5.2" - fast-url-parser: "npm:1.1.3" mime-types: "npm:2.1.18" minimatch: "npm:3.1.2" path-is-inside: "npm:1.0.2" - path-to-regexp: "npm:2.2.1" + path-to-regexp: "npm:3.3.0" range-parser: "npm:1.2.0" - checksum: 6fd393ae37a0305107e634ca545322b00605322189fe70d8f1a4a90a101c4e354768c610efe5a7ef1af3820cec5c33d97467c88151f35a3cb41d8ff2075ef802 + checksum: 1e1cb6bbc51ee32bc1505f2e0605bdc2e96605c522277c977b67f83be9d66bd1eec8604388714a4d728e036d86b629bc9aec02120ea030d3d2c3899d44696503 languageName: node linkType: hard @@ -8736,21 +10087,21 @@ __metadata: languageName: node linkType: hard -"serve-static@npm:1.15.0": - version: 1.15.0 - resolution: "serve-static@npm:1.15.0" +"serve-static@npm:1.16.2": + version: 1.16.2 + resolution: "serve-static@npm:1.16.2" dependencies: - encodeurl: "npm:~1.0.2" + encodeurl: "npm:~2.0.0" escape-html: "npm:~1.0.3" parseurl: "npm:~1.3.3" - send: "npm:0.18.0" - checksum: fa9f0e21a540a28f301258dfe1e57bb4f81cd460d28f0e973860477dd4acef946a1f41748b5bd41c73b621bea2029569c935faa38578fd34cd42a9b4947088ba + send: "npm:0.19.0" + checksum: 528fff6f5e12d0c5a391229ad893910709bc51b5705962b09404a1d813857578149b8815f35d3ee5752f44cd378d0f31669d4b1d7e2d11f41e08283d5134bd1f languageName: node linkType: hard "serve@npm:^14.2.1": - version: 14.2.3 - resolution: "serve@npm:14.2.3" + version: 14.2.4 + resolution: "serve@npm:14.2.4" dependencies: "@zeit/schemas": "npm:2.36.0" ajv: "npm:8.12.0" @@ -8761,11 +10112,11 @@ __metadata: clipboardy: "npm:3.0.0" compression: "npm:1.7.4" is-port-reachable: "npm:4.0.0" - serve-handler: "npm:6.1.5" + serve-handler: "npm:6.1.6" update-check: "npm:1.5.4" bin: serve: build/main.js - checksum: b03c683f02ad99852b5ec9ce88b26a1dee6276b26d89c7b85c23a487a75cee7ddd63fec38c2b7d7563803b223d3bd8a2cd1996c286188f9e4b1276876bc0faad + checksum: 93abecd6214228d529065040f7c0cbe541c1cc321c6a94b8a968f45a519bd9c46a9fd5e45a9b24a1f5736c5b547b8fa60d5414ebc78f870e29431b64165c1d06 languageName: node linkType: hard @@ -8783,7 +10134,7 @@ __metadata: languageName: node linkType: hard -"set-function-name@npm:^2.0.1": +"set-function-name@npm:^2.0.2": version: 2.0.2 resolution: "set-function-name@npm:2.0.2" dependencies: @@ -8795,6 +10146,13 @@ __metadata: languageName: node linkType: hard +"setimmediate@npm:^1.0.4": + version: 1.0.5 + resolution: "setimmediate@npm:1.0.5" + checksum: 5bae81bfdbfbd0ce992893286d49c9693c82b1bcc00dcaaf3a09c8f428fdeacf4190c013598b81875dfac2b08a572422db7df779a99332d0fce186d15a3e4d49 + languageName: node + linkType: hard + "setprototypeof@npm:1.1.0": version: 1.1.0 resolution: "setprototypeof@npm:1.1.0" @@ -8809,6 +10167,18 @@ __metadata: languageName: node linkType: hard +"sha.js@npm:^2.4.0, sha.js@npm:^2.4.8": + version: 2.4.11 + resolution: "sha.js@npm:2.4.11" + dependencies: + inherits: "npm:^2.0.1" + safe-buffer: "npm:^5.0.1" + bin: + sha.js: ./bin.js + checksum: b7a371bca8821c9cc98a0aeff67444a03d48d745cb103f17228b96793f455f0eb0a691941b89ea1e60f6359207e36081d9be193252b0f128e0daf9cfea2815a5 + languageName: node + linkType: hard + "shallow-clone@npm:^3.0.0": version: 3.0.1 resolution: "shallow-clone@npm:3.0.1" @@ -8841,7 +10211,7 @@ __metadata: languageName: node linkType: hard -"side-channel@npm:^1.0.4": +"side-channel@npm:^1.0.4, side-channel@npm:^1.0.6": version: 1.0.6 resolution: "side-channel@npm:1.0.6" dependencies: @@ -8943,10 +10313,10 @@ __metadata: languageName: node linkType: hard -"source-map-js@npm:^1.2.0": - version: 1.2.0 - resolution: "source-map-js@npm:1.2.0" - checksum: 7e5f896ac10a3a50fe2898e5009c58ff0dc102dcb056ed27a354623a0ece8954d4b2649e1a1b2b52ef2e161d26f8859c7710350930751640e71e374fe2d321a4 +"source-map-js@npm:^1.2.1": + version: 1.2.1 + resolution: "source-map-js@npm:1.2.1" + checksum: 7bda1fc4c197e3c6ff17de1b8b2c20e60af81b63a52cb32ec5a5d67a20a7d42651e2cb34ebe93833c5a2a084377e17455854fee3e21e7925c64a51b6a52b0faf languageName: node linkType: hard @@ -9071,10 +10441,10 @@ __metadata: languageName: node linkType: hard -"std-env@npm:^3.7.0": - version: 3.7.0 - resolution: "std-env@npm:3.7.0" - checksum: 60edf2d130a4feb7002974af3d5a5f3343558d1ccf8d9b9934d225c638606884db4a20d2fe6440a09605bca282af6b042ae8070a10490c0800d69e82e478f41e +"std-env@npm:^3.8.0": + version: 3.8.0 + resolution: "std-env@npm:3.8.0" + checksum: f560a2902fd0fa3d648d7d0acecbd19d664006f7372c1fba197ed4c216b4c9e48db6e2769b5fe1616d42a9333c9f066c5011935035e85c59f45dc4f796272040 languageName: node linkType: hard @@ -9095,6 +10465,18 @@ __metadata: languageName: node linkType: hard +"stream-http@npm:^3.2.0": + version: 3.2.0 + resolution: "stream-http@npm:3.2.0" + dependencies: + builtin-status-codes: "npm:^3.0.0" + inherits: "npm:^2.0.4" + readable-stream: "npm:^3.6.0" + xtend: "npm:^4.0.2" + checksum: f128fb8076d60cd548f229554b6a1a70c08a04b7b2afd4dbe7811d20f27f7d4112562eb8bce86d72a8691df3b50573228afcf1271e55e81f981536c67498bc41 + languageName: node + linkType: hard + "string-length@npm:^4.0.1": version: 4.0.2 resolution: "string-length@npm:4.0.2" @@ -9172,7 +10554,7 @@ __metadata: languageName: node linkType: hard -"string_decoder@npm:^1.1.1, string_decoder@npm:^1.3.0": +"string_decoder@npm:^1.0.0, string_decoder@npm:^1.1.1, string_decoder@npm:^1.3.0": version: 1.3.0 resolution: "string_decoder@npm:1.3.0" dependencies: @@ -9229,13 +10611,6 @@ __metadata: languageName: node linkType: hard -"strip-final-newline@npm:^3.0.0": - version: 3.0.0 - resolution: "strip-final-newline@npm:3.0.0" - checksum: a771a17901427bac6293fd416db7577e2bc1c34a19d38351e9d5478c3c415f523f391003b42ed475f27e33a78233035df183525395f731d3bfb8cdcbd4da08ce - languageName: node - linkType: hard - "strip-json-comments@npm:^3.1.1": version: 3.1.1 resolution: "strip-json-comments@npm:3.1.1" @@ -9259,15 +10634,6 @@ __metadata: languageName: node linkType: hard -"supports-color@npm:^5.3.0": - version: 5.5.0 - resolution: "supports-color@npm:5.5.0" - dependencies: - has-flag: "npm:^3.0.0" - checksum: 6ae5ff319bfbb021f8a86da8ea1f8db52fac8bd4d499492e30ec17095b58af11f0c55f8577390a749b1c4dde691b6a0315dab78f5f54c9b3d83f8fb5905c1c05 - languageName: node - linkType: hard - "supports-color@npm:^7.1.0": version: 7.2.0 resolution: "supports-color@npm:7.2.0" @@ -9294,12 +10660,12 @@ __metadata: linkType: hard "synckit@npm:^0.9.1": - version: 0.9.1 - resolution: "synckit@npm:0.9.1" + version: 0.9.2 + resolution: "synckit@npm:0.9.2" dependencies: "@pkgr/core": "npm:^0.1.0" tslib: "npm:^2.6.2" - checksum: d8b89e1bf30ba3ffb469d8418c836ad9c0c062bf47028406b4d06548bc66af97155ea2303b96c93bf5c7c0f0d66153a6fbd6924c76521b434e6a9898982abc2e + checksum: e0c262817444e5b872708adb6f5ad37951ba33f6b2d1d4477d45db1f57573a784618ceed5e6614e0225db330632b1f6b95bb74d21e4d013e45ad4bde03d0cb59 languageName: node linkType: hard @@ -9347,8 +10713,8 @@ __metadata: linkType: hard "terser@npm:^5.10.0, terser@npm:^5.26.0": - version: 5.31.6 - resolution: "terser@npm:5.31.6" + version: 5.36.0 + resolution: "terser@npm:5.36.0" dependencies: "@jridgewell/source-map": "npm:^0.3.3" acorn: "npm:^8.8.2" @@ -9356,7 +10722,7 @@ __metadata: source-map-support: "npm:~0.5.20" bin: terser: bin/terser - checksum: b17d02b65a52a5041430572b3c514475820f5e7590fa93773c0f5b4be601ccf3f6d745bf5a79f3ee58187cf85edf61c24ddf4345783839fccb44c9c8fa9b427e + checksum: f4ed2bead19f64789ddcfb85b7cef78f3942f967b8890c54f57d1e35bc7d547d551c6a4c32210bce6ba45b1c738314bbfac6acbc6c762a45cd171777d0c120d9 languageName: node linkType: hard @@ -9378,6 +10744,15 @@ __metadata: languageName: node linkType: hard +"thingies@npm:^1.20.0": + version: 1.21.0 + resolution: "thingies@npm:1.21.0" + peerDependencies: + tslib: ^2 + checksum: 7570ee855aecb73185a672ecf3eb1c287a6512bf5476449388433b2d4debcf78100bc8bfd439b0edd38d2bc3bfb8341de5ce85b8557dec66d0f27b962c9a8bc1 + languageName: node + linkType: hard + "thread-stream@npm:^2.6.0": version: 2.7.0 resolution: "thread-stream@npm:2.7.0" @@ -9414,6 +10789,15 @@ __metadata: languageName: node linkType: hard +"timers-browserify@npm:^2.0.4": + version: 2.0.12 + resolution: "timers-browserify@npm:2.0.12" + dependencies: + setimmediate: "npm:^1.0.4" + checksum: 98e84db1a685bc8827c117a8bc62aac811ad56a995d07938fc7ed8cdc5bf3777bfe2d4e5da868847194e771aac3749a20f6cdd22091300fe889a76fe214a4641 + languageName: node + linkType: hard + "tiny-case@npm:^1.0.3": version: 1.0.3 resolution: "tiny-case@npm:1.0.3" @@ -9438,17 +10822,24 @@ __metadata: languageName: node linkType: hard -"tinybench@npm:^2.8.0": +"tinybench@npm:^2.9.0": version: 2.9.0 resolution: "tinybench@npm:2.9.0" checksum: c3500b0f60d2eb8db65250afe750b66d51623057ee88720b7f064894a6cb7eb93360ca824a60a31ab16dab30c7b1f06efe0795b352e37914a9d4bad86386a20c languageName: node linkType: hard -"tinypool@npm:^1.0.0": - version: 1.0.1 - resolution: "tinypool@npm:1.0.1" - checksum: 90939d6a03f1519c61007bf416632dc1f0b9c1a9dd673c179ccd9e36a408437384f984fc86555a5d040d45b595abc299c3bb39d354439e98a090766b5952e73d +"tinyexec@npm:^0.3.1": + version: 0.3.1 + resolution: "tinyexec@npm:0.3.1" + checksum: 11e7a7c5d8b3bddf8b5cbe82a9290d70a6fad84d528421d5d18297f165723cb53d2e737d8f58dcce5ca56f2e4aa2d060f02510b1f8971784f97eb3e9aec28f09 + languageName: node + linkType: hard + +"tinypool@npm:^1.0.1": + version: 1.0.2 + resolution: "tinypool@npm:1.0.2" + checksum: 31ac184c0ff1cf9a074741254fe9ea6de95026749eb2b8ec6fd2b9d8ca94abdccda731f8e102e7f32e72ed3b36d32c6975fd5f5523df3f1b6de6c3d8dfd95e63 languageName: node linkType: hard @@ -9459,10 +10850,10 @@ __metadata: languageName: node linkType: hard -"tinyspy@npm:^3.0.0": - version: 3.0.0 - resolution: "tinyspy@npm:3.0.0" - checksum: eb0dec264aa5370efd3d29743825eb115ed7f1ef8a72a431e9a75d5c9e7d67e99d04b0d61d86b8cd70c79ec27863f241ad0317bc453f78762e0cbd76d2c332d0 +"tinyspy@npm:^3.0.2": + version: 3.0.2 + resolution: "tinyspy@npm:3.0.2" + checksum: 55ffad24e346622b59292e097c2ee30a63919d5acb7ceca87fc0d1c223090089890587b426e20054733f97a58f20af2c349fb7cc193697203868ab7ba00bcea0 languageName: node linkType: hard @@ -9473,13 +10864,6 @@ __metadata: languageName: node linkType: hard -"to-fast-properties@npm:^2.0.0": - version: 2.0.0 - resolution: "to-fast-properties@npm:2.0.0" - checksum: b214d21dbfb4bce3452b6244b336806ffea9c05297148d32ebb428d5c43ce7545bdfc65a1ceb58c9ef4376a65c0cb2854d645f33961658b3e3b4f84910ddcdd7 - languageName: node - linkType: hard - "to-regex-range@npm:^5.0.1": version: 5.0.1 resolution: "to-regex-range@npm:5.0.1" @@ -9503,12 +10887,21 @@ __metadata: languageName: node linkType: hard -"ts-api-utils@npm:^1.0.1": - version: 1.3.0 - resolution: "ts-api-utils@npm:1.3.0" +"tree-dump@npm:^1.0.1": + version: 1.0.2 + resolution: "tree-dump@npm:1.0.2" + peerDependencies: + tslib: 2 + checksum: d1d180764e9c691b28332dbd74226c6b6af361dfb1e134bb11e60e17cb11c215894adee50ffc578da5dcf546006693947be8b6665eb1269b56e2f534926f1c1f + languageName: node + linkType: hard + +"ts-api-utils@npm:^1.0.1, ts-api-utils@npm:^1.3.0": + version: 1.4.0 + resolution: "ts-api-utils@npm:1.4.0" peerDependencies: typescript: ">=4.2.0" - checksum: f54a0ba9ed56ce66baea90a3fa087a484002e807f28a8ccb2d070c75e76bde64bd0f6dce98b3802834156306050871b67eec325cb4e918015a360a3f0868c77c + checksum: 1b2bfa50ea52771d564bb143bb69010d25cda03ed573095fbac9b86f717012426443af6647e00e3db70fca60360482a30c1be7cf73c3521c321f6bf5e3594ea0 languageName: node linkType: hard @@ -9623,9 +11016,16 @@ __metadata: linkType: hard "tslib@npm:^2.0.0, tslib@npm:^2.0.3, tslib@npm:^2.6.2": - version: 2.7.0 - resolution: "tslib@npm:2.7.0" - checksum: 469e1d5bf1af585742128827000711efa61010b699cb040ab1800bcd3ccdd37f63ec30642c9e07c4439c1db6e46345582614275daca3e0f4abae29b0083f04a6 + version: 2.8.1 + resolution: "tslib@npm:2.8.1" + checksum: 9c4759110a19c53f992d9aae23aac5ced636e99887b51b9e61def52611732872ff7668757d4e4c61f19691e36f4da981cd9485e869b4a7408d689f6bf1f14e62 + languageName: node + linkType: hard + +"tty-browserify@npm:0.0.1": + version: 0.0.1 + resolution: "tty-browserify@npm:0.0.1" + checksum: 5e34883388eb5f556234dae75b08e069b9e62de12bd6d87687f7817f5569430a6dfef550b51dbc961715ae0cd0eb5a059e6e3fc34dc127ea164aa0f9b5bb033d languageName: node linkType: hard @@ -9701,8 +11101,8 @@ __metadata: linkType: hard "typed-array-byte-offset@npm:^1.0.2": - version: 1.0.2 - resolution: "typed-array-byte-offset@npm:1.0.2" + version: 1.0.3 + resolution: "typed-array-byte-offset@npm:1.0.3" dependencies: available-typed-arrays: "npm:^1.0.7" call-bind: "npm:^1.0.7" @@ -9710,7 +11110,8 @@ __metadata: gopd: "npm:^1.0.1" has-proto: "npm:^1.0.3" is-typed-array: "npm:^1.1.13" - checksum: d2628bc739732072e39269389a758025f75339de2ed40c4f91357023c5512d237f255b633e3106c461ced41907c1bf9a533c7e8578066b0163690ca8bc61b22f + reflect.getprototypeof: "npm:^1.0.6" + checksum: 5da29585f96671c0521475226d3227000b3e01d1e99208b66bb05b75c7c8f4d0e9cc2e79920f3bfbc792a00102df1daa2608a2753e3f291b671d5a80245bde5b languageName: node linkType: hard @@ -9728,23 +11129,39 @@ __metadata: languageName: node linkType: hard -"typescript@npm:^5.0.4": - version: 5.5.4 - resolution: "typescript@npm:5.5.4" +"typescript-eslint@npm:^8.11.0": + version: 8.15.0 + resolution: "typescript-eslint@npm:8.15.0" + dependencies: + "@typescript-eslint/eslint-plugin": "npm:8.15.0" + "@typescript-eslint/parser": "npm:8.15.0" + "@typescript-eslint/utils": "npm:8.15.0" + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + peerDependenciesMeta: + typescript: + optional: true + checksum: 589aebf0d0b9b79db1cd0b7c2ea08c6b5727c1db095d39077d070c332066c7d549a0eb2ef60b0d41619720c317c1955236c5c8ee6320bc7c6ae475add7223b55 + languageName: node + linkType: hard + +"typescript@npm:^5.0.4, typescript@npm:~5.6.2": + version: 5.6.3 + resolution: "typescript@npm:5.6.3" bin: tsc: bin/tsc tsserver: bin/tsserver - checksum: 422be60f89e661eab29ac488c974b6cc0a660fb2228003b297c3d10c32c90f3bcffc1009b43876a082515a3c376b1eefcce823d6e78982e6878408b9a923199c + checksum: 44f61d3fb15c35359bc60399cb8127c30bae554cd555b8e2b46d68fa79d680354b83320ad419ff1b81a0bdf324197b29affe6cc28988cd6a74d4ac60c94f9799 languageName: node linkType: hard -"typescript@patch:typescript@npm%3A^5.0.4#optional!builtin": - version: 5.5.4 - resolution: "typescript@patch:typescript@npm%3A5.5.4#optional!builtin::version=5.5.4&hash=e012d7" +"typescript@patch:typescript@npm%3A^5.0.4#optional!builtin, typescript@patch:typescript@npm%3A~5.6.2#optional!builtin": + version: 5.6.3 + resolution: "typescript@patch:typescript@npm%3A5.6.3#optional!builtin::version=5.6.3&hash=e012d7" bin: tsc: bin/tsc tsserver: bin/tsserver - checksum: 10dd9881baba22763de859e8050d6cb6e2db854197495c6f1929b08d1eb2b2b00d0b5d9b0bcee8472f1c3f4a7ef6a5d7ebe0cfd703f853aa5ae465b8404bc1ba + checksum: ac8307bb06bbfd08ae7137da740769b7d8c3ee5943188743bb622c621f8ad61d244767480f90fbd840277fbf152d8932aa20c33f867dea1bb5e79b187ca1a92f languageName: node linkType: hard @@ -9760,7 +11177,7 @@ __metadata: languageName: node linkType: hard -"undici-types@npm:~6.19.2": +"undici-types@npm:~6.19.2, undici-types@npm:~6.19.8": version: 6.19.8 resolution: "undici-types@npm:6.19.8" checksum: 078afa5990fba110f6824823ace86073b4638f1d5112ee26e790155f481f2a868cc3e0615505b6f4282bdf74a3d8caad715fd809e870c2bb0704e3ea6082f344 @@ -9799,17 +11216,17 @@ __metadata: languageName: node linkType: hard -"update-browserslist-db@npm:^1.1.0": - version: 1.1.0 - resolution: "update-browserslist-db@npm:1.1.0" +"update-browserslist-db@npm:^1.1.1": + version: 1.1.1 + resolution: "update-browserslist-db@npm:1.1.1" dependencies: - escalade: "npm:^3.1.2" - picocolors: "npm:^1.0.1" + escalade: "npm:^3.2.0" + picocolors: "npm:^1.1.0" peerDependencies: browserslist: ">= 4.21.0" bin: update-browserslist-db: cli.js - checksum: a7452de47785842736fb71547651c5bbe5b4dc1e3722ccf48a704b7b34e4dcf633991eaa8e4a6a517ffb738b3252eede3773bef673ef9021baa26b056d63a5b9 + checksum: 536a2979adda2b4be81b07e311bd2f3ad5e978690987956bc5f514130ad50cac87cd22c710b686d79731e00fbee8ef43efe5fcd72baa241045209195d43dcc80 languageName: node linkType: hard @@ -9832,6 +11249,16 @@ __metadata: languageName: node linkType: hard +"url@npm:^0.11.4": + version: 0.11.4 + resolution: "url@npm:0.11.4" + dependencies: + punycode: "npm:^1.4.1" + qs: "npm:^6.12.3" + checksum: cc93405ae4a9b97a2aa60ca67f1cb1481c0221cb4725a7341d149be5e2f9cfda26fd432d64dbbec693d16593b68b8a46aad8e5eab21f814932134c9d8620c662 + languageName: node + linkType: hard + "util-deprecate@npm:^1.0.1, util-deprecate@npm:^1.0.2, util-deprecate@npm:~1.0.1": version: 1.0.2 resolution: "util-deprecate@npm:1.0.2" @@ -9839,7 +11266,7 @@ __metadata: languageName: node linkType: hard -"util@npm:^0.12.5": +"util@npm:^0.12.4, util@npm:^0.12.5": version: 0.12.5 resolution: "util@npm:0.12.5" dependencies: @@ -9866,6 +11293,15 @@ __metadata: languageName: node linkType: hard +"uuid@npm:^10.0.0": + version: 10.0.0 + resolution: "uuid@npm:10.0.0" + bin: + uuid: dist/bin/uuid + checksum: eab18c27fe4ab9fb9709a5d5f40119b45f2ec8314f8d4cf12ce27e4c6f4ffa4a6321dc7db6c515068fa373c075b49691ba969f0010bf37f44c37ca40cd6bf7fe + languageName: node + linkType: hard + "uuid@npm:^8.3.2": version: 8.3.2 resolution: "uuid@npm:8.3.2" @@ -9900,28 +11336,62 @@ __metadata: languageName: node linkType: hard -"vite-node@npm:2.0.5": - version: 2.0.5 - resolution: "vite-node@npm:2.0.5" +"vite-node@npm:2.1.5": + version: 2.1.5 + resolution: "vite-node@npm:2.1.5" dependencies: cac: "npm:^6.7.14" - debug: "npm:^4.3.5" + debug: "npm:^4.3.7" + es-module-lexer: "npm:^1.5.4" pathe: "npm:^1.1.2" - tinyrainbow: "npm:^1.2.0" vite: "npm:^5.0.0" bin: vite-node: vite-node.mjs - checksum: affcc58ae8d45bce3e8bc3b5767acd57c24441634e2cd967cf97f4e5ed2bcead1714b60150cdf7ee153ebad47659c5cd419883207e1a95b69790331e3243749f + checksum: 4ebe6bdf52f5ed65cb6f18af087faa87d8dca8e1a87413d1dbb8ead141d6e5d359ae006bd6c5e8f8c89cd5d90499bbf1d3f9e9a161dcc4bc86ec526862c01360 + languageName: node + linkType: hard + +"vite-plugin-externalize-deps@npm:^0.8.0": + version: 0.8.0 + resolution: "vite-plugin-externalize-deps@npm:0.8.0" + peerDependencies: + vite: ^2.0.0 || ^3.0.0 || ^4.0.0 || ^5.0.0 + checksum: 0ed0d2a85f96e6470b700c1a598de9e3b189b186f7a3d05e4e945c0381bb2717dce5a40de5d6b485ec8e30f31ff33b17d56d487f618395d7b55ad7ef74d35ca3 languageName: node linkType: hard -"vite@npm:^5.0.0": - version: 5.4.2 - resolution: "vite@npm:5.4.2" +"vite-plugin-node-polyfills@npm:^0.22.0": + version: 0.22.0 + resolution: "vite-plugin-node-polyfills@npm:0.22.0" + dependencies: + "@rollup/plugin-inject": "npm:^5.0.5" + node-stdlib-browser: "npm:^1.2.0" + peerDependencies: + vite: ^2.0.0 || ^3.0.0 || ^4.0.0 || ^5.0.0 + checksum: f8ddc452eb6fba280977d037f8a6406aa522e69590641ce72ce5bb31c3498856a9f63ab3671bc6a822dcd1ff9ba5cac02cacef4a0e170fd8500cdeeb38c81675 + languageName: node + linkType: hard + +"vite-plugin-top-level-await@npm:^1.4.4": + version: 1.4.4 + resolution: "vite-plugin-top-level-await@npm:1.4.4" + dependencies: + "@rollup/plugin-virtual": "npm:^3.0.2" + "@swc/core": "npm:^1.7.0" + uuid: "npm:^10.0.0" + peerDependencies: + vite: ">=2.8" + checksum: 013e7b2e28632d93d04c4061187198e699064fc208a1657c100354b32da30921fa835879fc17779d5e0b074855237408da2fadd720fa0f4571137427a1efd5e3 + languageName: node + linkType: hard + +"vite@npm:^5.0.0, vite@npm:^5.4.10": + version: 5.4.11 + resolution: "vite@npm:5.4.11" dependencies: esbuild: "npm:^0.21.3" fsevents: "npm:~2.3.3" - postcss: "npm:^8.4.41" + postcss: "npm:^8.4.43" rollup: "npm:^4.20.0" peerDependencies: "@types/node": ^18.0.0 || >=20.0.0 @@ -9954,38 +11424,74 @@ __metadata: optional: true bin: vite: bin/vite.js - checksum: 23e347ca8aa6f0a774227e4eb7abae228f12c6806a727b046aa75e7ee37ffc2d68cff74360e12a42c347f79adc294e2363bc723b957bf4b382b5a8fb39e4df9d + checksum: d536bb7af57dd0eca2a808f95f5ff1d7b7ffb8d86e17c6893087680a0448bd0d15e07475270c8a6de65cb5115592d037130a1dd979dc76bcef8c1dda202a1874 languageName: node linkType: hard +"vite@workspace:boxes/vite": + version: 0.0.0-use.local + resolution: "vite@workspace:boxes/vite" + dependencies: + "@aztec/accounts": "portal:../../../yarn-project/accounts" + "@aztec/aztec.js": "portal:../../../yarn-project/aztec.js" + "@aztec/circuit-types": "portal:../../../yarn-project/circuit-types" + "@aztec/key-store": "link:../../../yarn-project/key-store" + "@aztec/kv-store": "portal:../../../yarn-project/kv-store" + "@aztec/pxe": "link:../../../yarn-project/pxe" + "@eslint/js": "npm:^9.13.0" + "@noir-lang/acvm_js": "link:../../../noir/packages/acvm_js" + "@noir-lang/noirc_abi": "link:../../../noir/packages/noirc_abi" + "@types/react": "npm:^18.3.12" + "@types/react-dom": "npm:^18.3.1" + "@vitejs/plugin-react-swc": "npm:^3.5.0" + buffer: "npm:^6.0.3" + eslint: "npm:^9.13.0" + eslint-plugin-react-hooks: "npm:^5.0.0" + eslint-plugin-react-refresh: "npm:^0.4.14" + globals: "npm:^15.11.0" + memfs: "npm:^4.14.0" + node-stdlib-browser: "npm:^1.3.0" + react: "npm:^18.3.1" + react-dom: "npm:^18.3.1" + react-toastify: "npm:^10.0.6" + typescript: "npm:~5.6.2" + typescript-eslint: "npm:^8.11.0" + vite: "npm:^5.4.10" + vite-plugin-externalize-deps: "npm:^0.8.0" + vite-plugin-node-polyfills: "npm:^0.22.0" + vite-plugin-top-level-await: "npm:^1.4.4" + languageName: unknown + linkType: soft + "vitest@npm:^2.0.5": - version: 2.0.5 - resolution: "vitest@npm:2.0.5" - dependencies: - "@ampproject/remapping": "npm:^2.3.0" - "@vitest/expect": "npm:2.0.5" - "@vitest/pretty-format": "npm:^2.0.5" - "@vitest/runner": "npm:2.0.5" - "@vitest/snapshot": "npm:2.0.5" - "@vitest/spy": "npm:2.0.5" - "@vitest/utils": "npm:2.0.5" - chai: "npm:^5.1.1" - debug: "npm:^4.3.5" - execa: "npm:^8.0.1" - magic-string: "npm:^0.30.10" + version: 2.1.5 + resolution: "vitest@npm:2.1.5" + dependencies: + "@vitest/expect": "npm:2.1.5" + "@vitest/mocker": "npm:2.1.5" + "@vitest/pretty-format": "npm:^2.1.5" + "@vitest/runner": "npm:2.1.5" + "@vitest/snapshot": "npm:2.1.5" + "@vitest/spy": "npm:2.1.5" + "@vitest/utils": "npm:2.1.5" + chai: "npm:^5.1.2" + debug: "npm:^4.3.7" + expect-type: "npm:^1.1.0" + magic-string: "npm:^0.30.12" pathe: "npm:^1.1.2" - std-env: "npm:^3.7.0" - tinybench: "npm:^2.8.0" - tinypool: "npm:^1.0.0" + std-env: "npm:^3.8.0" + tinybench: "npm:^2.9.0" + tinyexec: "npm:^0.3.1" + tinypool: "npm:^1.0.1" tinyrainbow: "npm:^1.2.0" vite: "npm:^5.0.0" - vite-node: "npm:2.0.5" + vite-node: "npm:2.1.5" why-is-node-running: "npm:^2.3.0" peerDependencies: "@edge-runtime/vm": "*" "@types/node": ^18.0.0 || >=20.0.0 - "@vitest/browser": 2.0.5 - "@vitest/ui": 2.0.5 + "@vitest/browser": 2.1.5 + "@vitest/ui": 2.1.5 happy-dom: "*" jsdom: "*" peerDependenciesMeta: @@ -10003,7 +11509,14 @@ __metadata: optional: true bin: vitest: vitest.mjs - checksum: b4e6cca00816bf967a8589111ded72faa12f92f94ccdd0dcd0698ffcfdfc52ec662753f66b387549c600ac699b993fd952efbd99dc57fcf4d1c69a2f1022b259 + checksum: 1befb842da0826eed8761fe6cbd6ecae6b38d1ae83ac6619b994544d07e47905feaff2b254210315aa8e9b86645174c71a63b5d809799a289679a0063381c9a4 + languageName: node + linkType: hard + +"vm-browserify@npm:^1.0.1": + version: 1.1.2 + resolution: "vm-browserify@npm:1.1.2" + checksum: 0cc1af6e0d880deb58bc974921320c187f9e0a94f25570fca6b1bd64e798ce454ab87dfd797551b1b0cc1849307421aae0193cedf5f06bdb5680476780ee344b languageName: node linkType: hard @@ -10035,6 +11548,13 @@ __metadata: languageName: node linkType: hard +"weak-lru-cache@npm:^1.2.2": + version: 1.2.2 + resolution: "weak-lru-cache@npm:1.2.2" + checksum: 744847bd5b96ca86db1cb40d0aea7e92c02bbdb05f501181bf9c581e82fa2afbda32a327ffbe75749302b8492ab449f1c657ca02410d725f5d412d1e6c607d72 + languageName: node + linkType: hard + "webpack-cli@npm:^5.1.4": version: 5.1.4 resolution: "webpack-cli@npm:5.1.4" @@ -10148,16 +11668,16 @@ __metadata: linkType: hard "webpack@npm:^5.88.2, webpack@npm:^5.90.1": - version: 5.94.0 - resolution: "webpack@npm:5.94.0" + version: 5.96.1 + resolution: "webpack@npm:5.96.1" dependencies: - "@types/estree": "npm:^1.0.5" + "@types/eslint-scope": "npm:^3.7.7" + "@types/estree": "npm:^1.0.6" "@webassemblyjs/ast": "npm:^1.12.1" "@webassemblyjs/wasm-edit": "npm:^1.12.1" "@webassemblyjs/wasm-parser": "npm:^1.12.1" - acorn: "npm:^8.7.1" - acorn-import-attributes: "npm:^1.9.5" - browserslist: "npm:^4.21.10" + acorn: "npm:^8.14.0" + browserslist: "npm:^4.24.0" chrome-trace-event: "npm:^1.0.2" enhanced-resolve: "npm:^5.17.1" es-module-lexer: "npm:^1.2.1" @@ -10179,7 +11699,7 @@ __metadata: optional: true bin: webpack: bin/webpack.js - checksum: b4d1b751f634079bd177a89eef84d80fa5bb8d6fc15d72ab40fc2b9ca5167a79b56585e1a849e9e27e259803ee5c4365cb719e54af70a43c06358ec268ff4ebf + checksum: ae6052fde9a546f79f14987b65823ba4024c6642a8489339ecfee7a351dff93325842aad453295bbdc6b65fb1690e4ef07529db63aa84ece55c7869e991a0039 languageName: node linkType: hard @@ -10214,6 +11734,38 @@ __metadata: languageName: node linkType: hard +"which-builtin-type@npm:^1.1.3": + version: 1.1.4 + resolution: "which-builtin-type@npm:1.1.4" + dependencies: + function.prototype.name: "npm:^1.1.6" + has-tostringtag: "npm:^1.0.2" + is-async-function: "npm:^2.0.0" + is-date-object: "npm:^1.0.5" + is-finalizationregistry: "npm:^1.0.2" + is-generator-function: "npm:^1.0.10" + is-regex: "npm:^1.1.4" + is-weakref: "npm:^1.0.2" + isarray: "npm:^2.0.5" + which-boxed-primitive: "npm:^1.0.2" + which-collection: "npm:^1.0.2" + which-typed-array: "npm:^1.1.15" + checksum: a4a76d20d869a81b1dbb4adea31edc7e6c1a4466d3ab7c2cd757c9219d48d3723b04076c85583257b0f0f8e3ebe5af337248b8ceed57b9051cb97bce5bd881d1 + languageName: node + linkType: hard + +"which-collection@npm:^1.0.2": + version: 1.0.2 + resolution: "which-collection@npm:1.0.2" + dependencies: + is-map: "npm:^2.0.3" + is-set: "npm:^2.0.3" + is-weakmap: "npm:^2.0.2" + is-weakset: "npm:^2.0.3" + checksum: 3345fde20964525a04cdf7c4a96821f85f0cc198f1b2ecb4576e08096746d129eb133571998fe121c77782ac8f21cbd67745a3d35ce100d26d4e684c142ea1f2 + languageName: node + linkType: hard + "which-typed-array@npm:^1.1.14, which-typed-array@npm:^1.1.15, which-typed-array@npm:^1.1.2": version: 1.1.15 resolution: "which-typed-array@npm:1.1.15" @@ -10349,6 +11901,13 @@ __metadata: languageName: node linkType: hard +"xtend@npm:^4.0.2": + version: 4.0.2 + resolution: "xtend@npm:4.0.2" + checksum: 366ae4783eec6100f8a02dff02ac907bf29f9a00b82ac0264b4d8b832ead18306797e283cf19de776538babfdcb2101375ec5646b59f08c52128ac4ab812ed0e + languageName: node + linkType: hard + "y18n@npm:^5.0.5": version: 5.0.8 resolution: "y18n@npm:5.0.8" diff --git a/build-system/scripts/build b/build-system/scripts/build index 090878dd135f..a67c9fdb6b21 100755 --- a/build-system/scripts/build +++ b/build-system/scripts/build @@ -68,7 +68,7 @@ if [ -d $ROOT_PATH/$PROJECT_DIR/terraform ]; then popd fi -# For each dependency, substitute references to the dependency in dockerfile, with the relevent built image uri. +# For each dependency, substitute references to the dependency in dockerfile, with the relevant built image uri. # This is necessary vs pulling and retagging the image, as that doesn't work with buildx. # We have to perform a bit of probing to determine which actual image we want to use. # When we used buildx to create a multiarch image, there will be no images with "-$ARCH" suffixes (normalise this?). @@ -91,7 +91,7 @@ for PARENT_REPO in $(query_manifest dependencies $REPOSITORY); do fi fi - # Substitute references to parent repo, with the relevent built image uri. + # Substitute references to parent repo, with the relevant built image uri. DEPLOY_URI=aztecprotocol/$PARENT_REPO PARENT_IMAGE_URI=$ECR_URL/$PARENT_REPO:$PARENT_IMAGE_TAG sed -i "s#^FROM \\(.*\\)${DEPLOY_URI}\\( \|$\\)#FROM \\1${PARENT_IMAGE_URI}\\2#" $DOCKERFILE diff --git a/docker-compose.yml b/docker-compose.yml index 12d365c8e039..c0928538e49f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -6,8 +6,6 @@ services: platform: linux/amd64 environment: LOG_LEVEL: ${LOG_LEVEL:-info} - DEBUG: ${DEBUG:-aztec:*,-json-rpc:*,-aztec:circuits:artifact_hash,-aztec:randomness_singleton} - DEBUG_COLORS: 1 L1_CHAIN_ID: 31337 VERSION: 1 PXE_PROVER_ENABLED: ${PXE_PROVER_ENABLED:-1} @@ -38,8 +36,6 @@ services: platform: linux/amd64 environment: LOG_LEVEL: ${LOG_LEVEL:-info} - DEBUG: ${DEBUG:-aztec:*,-json-rpc:*,-aztec:circuits:artifact_hash,-aztec:randomness_singleton,-aztec:avm_simulator:*} - DEBUG_COLORS: 1 L1_CHAIN_ID: 31337 VERSION: 1 NODE_NO_WARNINGS: 1 diff --git a/docs/docs/aztec/concepts/accounts/authwit.md b/docs/docs/aztec/concepts/accounts/authwit.md index 79cb828ccb3f..307c07a0edba 100644 --- a/docs/docs/aztec/concepts/accounts/authwit.md +++ b/docs/docs/aztec/concepts/accounts/authwit.md @@ -5,6 +5,8 @@ importance: 1 keywords: [authwit, authentication witness, accounts] --- +import Image from "@theme/IdealImage"; + Authentication Witness is a scheme for authenticating actions on Aztec, so users can allow third-parties (eg protocols or other users) to execute an action on their behalf. ## Background @@ -13,15 +15,7 @@ When building DeFi or other smart contracts, it is often desired to interact wit In the EVM world, this is often accomplished by having the user `approve` the protocol to transfer funds from their account, and then calling a `deposit` function on it afterwards. -```mermaid -sequenceDiagram - actor Alice - Alice->>Token: approve(Defi, 1000); - Alice->>Defi: deposit(Token, 1000); - activate Defi - Defi->>Token: transferFrom(Alice, Defi, 1000); - deactivate Defi -``` + This flow makes it rather simple for the application developer to implement the deposit function, but does not come without its downsides. @@ -36,16 +30,7 @@ This can lead to a series of issues though, eg: To avoid this, many protocols implement the `permit` flow, which uses a meta-transaction to let the user sign the approval off-chain, and pass it as an input to the `deposit` function, that way the user only has to send one transaction to make the deposit. -```mermaid -sequenceDiagram - actor Alice - Alice->>Alice: sign permit(Defi, 1000); - Alice->>Defi: deposit(Token, 1000, signature); - activate Defi - Defi->>Token: permit(Alice, Defi, 1000, signature); - Defi->>Token: transferFrom(Alice, Defi, 1000); - deactivate Defi -``` + This is a great improvement to infinite approvals, but still has its own sets of issues. For example, if the user is using a smart-contract wallet (such as Argent or Gnosis Safe), they will not be able to sign the permit message since the usual signature validation does not work well with contracts. [EIP-1271](https://eips.ethereum.org/EIPS/eip-1271) was proposed to give contracts a way to emulate this, but it is not widely adopted. @@ -57,7 +42,7 @@ All of these issues have been discussed in the community for a while, and there Adopting ERC20 for Aztec is not as simple as it might seem because of private state. -If you recall from the [Hybrid State model](../state_model/index.md), private state is generally only known by its owner and those they have shared it with. Because it relies on secrets, private state might be "owned" by a contract, but it needs someone with knowledge of these secrets to actually spend it. You might see where this is going. +If you recall from the [Hybrid State model](../storage/state_model/index.md), private state is generally only known by its owner and those they have shared it with. Because it relies on secrets, private state might be "owned" by a contract, but it needs someone with knowledge of these secrets to actually spend it. You might see where this is going. If we were to implement the `approve` with an allowance in private, you might know the allowance, but unless you also know about the individual notes that make up the user's balances, it would be of no use to you! It is private after all. To spend the user's funds you would need to know the decryption key, see [keys for more](./keys.md). @@ -105,32 +90,7 @@ This can be read as "defi is allowed to call token transfer function with the ar With this out of the way, let's look at how this would work in the graph below. The exact contents of the witness will differ between implementations as mentioned before, but for the sake of simplicity you can think of it as a signature, which the account contract can then use to validate if it really should allow the action. -```mermaid -sequenceDiagram - actor Alice - participant AC as Alice Account - participant Token - Alice->>AC: Defi.deposit(Token, 1000); - activate AC - AC->>Defi: deposit(Token, 1000); - activate Defi - Defi->>Token: transfer(Alice, Defi, 1000); - activate Token - Token->>AC: Check if Defi may call transfer(Alice, Defi, 1000); - AC-->>Alice: Please give me AuthWit for DeFi
calling transfer(Alice, Defi, 1000); - activate Alice - Alice-->>Alice: Produces Authentication witness - Alice-->>AC: AuthWit for transfer(Alice, Defi, 1000); - AC->>Token: AuthWit validity - deactivate Alice - Token->>Token: throw if invalid AuthWit - Token->>Token: transfer(Alice, Defi, 1000); - Token->>Defi: success - deactivate Token - Defi->>Defi: deposit(Token, 1000); - deactivate Defi - deactivate AC -``` + :::info Static call for AuthWit checks The call to the account contract for checking authentication should be a static call, meaning that it cannot change state or make calls that change state. If this call is not static, it could be used to re-enter the flow and change the state of the contract. @@ -144,36 +104,7 @@ The above flow could be re-entered at token transfer. It is mainly for show to i As noted earlier, we could use the ERC20 standard for public. But this seems like a waste when we have the ability to try righting some wrongs. Instead, we can expand our AuthWit scheme to also work in public. This is actually quite simple, instead of asking an oracle (which we can't do as easily because not private execution) we can just store the AuthWit in a shared registry, and look it up when we need it. While this needs the storage to be updated ahead of time (can be same tx), we can quite easily do so by batching the AuthWit updates with the interaction - a benefit of Account Contracts. A shared registry is used such that execution from the sequencers point of view will be more straight forward and predictable. Furthermore, since we have the authorization data directly in public state, if they are both set and unset (authorized and then used) in the same transaction, there will be no state effect after the transaction for the authorization which saves gas ⛽. -```mermaid -sequenceDiagram - actor Alice - participant AC as Alice Account - participant AR as Auth Registry - participant Token - participant Defi - rect rgb(191, 223, 255) - note right of Alice: Alice sends a batch - Alice->>AC: Authorize Defi to call transfer(Alice, Defi, 1000); - activate AC - Alice->>AC: Defi.deposit(Token, 1000); - end - AC->>AR: Authorize Defi to call transfer(Alice, Defi, 1000); - AR->>AR: add authorize to true - AC->>Defi: deposit(Token, 1000); - activate Defi - Defi->>Token: transfer(Alice, Defi, 1000); - activate Token - Token->>AR: Check if Defi may call transfer(Alice, Defi, 1000); - AR->>AR: set authorize to false - AR->>Token: AuthWit validity - Token->>Token: throw if invalid AuthWit - Token->>Token: transfer(Alice, Defi, 1000); - Token->>Defi: success - deactivate Token - Defi->>Defi: deposit(Token, 1000); - deactivate Defi - deactivate AC -``` + ### Replays diff --git a/docs/docs/aztec/concepts/pxe/index.md b/docs/docs/aztec/concepts/pxe/index.md index 6e54b1ed354b..5f7dedf1b280 100644 --- a/docs/docs/aztec/concepts/pxe/index.md +++ b/docs/docs/aztec/concepts/pxe/index.md @@ -6,32 +6,13 @@ keywords: [pxe, private execution environment] importance: 1 --- +import Image from "@theme/IdealImage"; + The Private Execution Environment (or PXE, pronounced 'pixie') is a client-side library for the execution of private operations. It is a TypeScript library and can be run within Node, such as when you run the sandbox. In the future it could be run inside wallet software or a browser. The PXE generates proofs of private function execution, and sends these proofs along with public function requests to the sequencer. Private inputs never leave the client-side PXE. -```mermaid -graph TD; - - subgraph client[Client] - subgraph pxe [PXE] - acirSim[ACIR Simulator] - db[Database] - keyStore[KeyStore] - end - end - - subgraph server[Application Server] - subgraph pxeService [PXE Service] - acctMgmt[Account Management] - contractTxInteract[Contract & Transaction Interactions] - noteMgmt[Note Management] - end - end - - pxe -->|interfaces| server - -``` + ## PXE Service diff --git a/docs/docs/aztec/concepts/state_model/public_vm.md b/docs/docs/aztec/concepts/state_model/public_vm.md deleted file mode 100644 index d5c26f1d33d9..000000000000 --- a/docs/docs/aztec/concepts/state_model/public_vm.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -title: Public VM ---- - -Refer to the [protocol specs section](../../../protocol-specs/public-vm/index.md) for the latest information about the Aztec Public VM. diff --git a/docs/docs/aztec/concepts/storage/index.md b/docs/docs/aztec/concepts/storage/index.md index 9b3987f901f9..3e115ed8b44f 100644 --- a/docs/docs/aztec/concepts/storage/index.md +++ b/docs/docs/aztec/concepts/storage/index.md @@ -10,56 +10,3 @@ In Aztec, private data and public data are stored in two trees; a public data tr These trees have in common that they store state for _all_ accounts on the Aztec network directly as leaves. This is different from Ethereum, where a state trie contains smaller tries that hold the individual accounts' storage. It also means that we need to be careful about how we allocate storage to ensure that they don't collide! We say that storage should be _siloed_ to its contract. The exact way of siloing differs a little for public and private storage. Which we will see in the following sections. - -## Public State Slots - -As mentioned in [State Model](../state_model/index.md), Aztec public state behaves similarly to public state on Ethereum from the point of view of the developer. Behind the scenes however, the storage is managed differently. As mentioned, public state has just one large sparse tree in Aztec - so we silo slots of public data by hashing it together with its contract address. - -The mental model is that we have a key-value store, where the siloed slot is the key, and the value is the data stored in that slot. You can think of the `real_storage_slot` identifying its position in the tree, and the `logical_storage_slot` identifying the position in the contract storage. - -```rust -real_storage_slot = H(contract_address, logical_storage_slot) -``` - -The siloing is performed by the [Kernel circuits](../circuits/index.md). - -For structs and arrays, we are logically using a similar storage slot computation to ethereum, e.g., as a struct with 3 fields would be stored in 3 consecutive slots. However, because the "actual" storage slot is computed as a hash of the contract address and the logical storage slot, the actual storage slot is not consecutive. - -## Private State Slots - Slots aren't real - -Private storage is a different beast. As you might remember from [Hybrid State Model](../state_model/index.md), private state is stored in encrypted logs and the corresponding private state commitments in append-only tree where each leaf is a commitment. Being append-only, means that leaves are never updated or deleted; instead a nullifier is emitted to signify that some note is no longer valid. A major reason we used this tree, is that lookups at a specific storage slot would leak information in the context of private state. If you could look up a specific address balance just by looking at the storage slot, even if encrypted you would be able to see it changing! That is not good privacy. - -Following this, the storage slot as we know it doesn't really exist. The leaves of the note hashes tree are just commitments to content (think of it as a hash of its content). - -Nevertheless, the concept of a storage slot is very useful when writing applications, since it allows us to reason about distinct and disjoint pieces of data. For example we can say that the balance of an account is stored in a specific slot and that the balance of another account is stored in another slot with the total supply stored in some third slot. By making sure that these slots are disjoint, we can be sure that the balances are not mixed up and that someone cannot use the total supply as their balance. - -### But how? - -If we include the storage slot, as part of the note whose commitment is stored in the note hashes tree, we can _logically link_ all the notes that make up the storage slot. For the case of a balance, we can say that the balance is the sum of all the notes that have the same storage slot - in the same way that your physical \$ balance might be the sum of all the notes in your wallet. - -Similarly to how we siloed the public storage slots, we can silo our private storage by hashing the logical storage slot together with the note content. - -```rust -note_hash = H(logical_storage_slot, note_content_hash); -``` - -This siloing (there will be more) is done in the application circuit, since it is not necessary for security of the network (but only the application). -:::info -The private variable wrappers `PrivateSet` and `PrivateMutable` in Aztec.nr include the `logical_storage_slot` in the commitments they compute, to make it easier for developers to write contracts without having to think about how to correctly handle storage slots. -::: - -When reading the values for these notes, the application circuit can then constrain the values to only read notes with a specific logical storage slot. - -To ensure that one contract cannot insert storage that other contracts would believe is theirs, we do a second siloing by hashing the `commitment` with the contract address. - -```rust -siloed_note_hash = H(contract_address, note_hash); -``` - -By doing this address-siloing at the kernel circuit we _force_ the inserted commitments to include and not lie about the `contract_address`. - -:::info -To ensure that nullifiers don't collide across contracts we also force this contract siloing at the kernel level. -::: - -For an example of this see [developer documentation on storage](../../../reference/developer_references/smart_contract_reference/storage/index.md). diff --git a/docs/docs/aztec/concepts/state_model/index.md b/docs/docs/aztec/concepts/storage/state_model/index.md similarity index 98% rename from docs/docs/aztec/concepts/state_model/index.md rename to docs/docs/aztec/concepts/storage/state_model/index.md index 37a0455456ff..9aa78d92d3f8 100644 --- a/docs/docs/aztec/concepts/state_model/index.md +++ b/docs/docs/aztec/concepts/storage/state_model/index.md @@ -35,4 +35,4 @@ This is achieved with two main features: ## Further reading -Read more about how to leverage the Aztec state model in Aztec contracts [here](../storage/index.md). +Read more about how to leverage the Aztec state model in Aztec contracts [here](../../storage/index.md). diff --git a/docs/docs/aztec/concepts/storage/state_model/public_vm.md b/docs/docs/aztec/concepts/storage/state_model/public_vm.md new file mode 100644 index 000000000000..528ee5f8d46c --- /dev/null +++ b/docs/docs/aztec/concepts/storage/state_model/public_vm.md @@ -0,0 +1,5 @@ +--- +title: Public VM +--- + +Refer to the [protocol specs section](../../../../protocol-specs/public-vm/index.md) for the latest information about the Aztec Public VM. diff --git a/docs/docs/aztec/concepts/storage/storage_slots.md b/docs/docs/aztec/concepts/storage/storage_slots.md new file mode 100644 index 000000000000..d3121c645d94 --- /dev/null +++ b/docs/docs/aztec/concepts/storage/storage_slots.md @@ -0,0 +1,55 @@ + +# Storage Slots + +## Public State Slots + +As mentioned in [State Model](../storage/state_model/index.md), Aztec public state behaves similarly to public state on Ethereum from the point of view of the developer. Behind the scenes however, the storage is managed differently. As mentioned, public state has just one large sparse tree in Aztec - so we silo slots of public data by hashing it together with its contract address. + +The mental model is that we have a key-value store, where the siloed slot is the key, and the value is the data stored in that slot. You can think of the `real_storage_slot` identifying its position in the tree, and the `logical_storage_slot` identifying the position in the contract storage. + +```rust +real_storage_slot = H(contract_address, logical_storage_slot) +``` + +The siloing is performed by the [Kernel circuits](../circuits/index.md). + +For structs and arrays, we are logically using a similar storage slot computation to ethereum, e.g., as a struct with 3 fields would be stored in 3 consecutive slots. However, because the "actual" storage slot is computed as a hash of the contract address and the logical storage slot, the actual storage slot is not consecutive. + +## Private State Slots + +Private storage is a different beast. As you might remember from [Hybrid State Model](../storage/state_model/index.md), private state is stored in encrypted logs and the corresponding private state commitments in append-only tree, called the note hash tree where each leaf is a commitment. Append-only means that leaves are never updated or deleted; instead a nullifier is emitted to signify that some note is no longer valid. A major reason we used this tree, is that updates at a specific storage slot would leak information in the context of private state, even if the value is encrypted. That is not good privacy. + +Following this, the storage slot as we know it doesn't really exist. The leaves of the note hashes tree are just commitments to content (think of it as a hash of its content). + +Nevertheless, the concept of a storage slot is very useful when writing applications, since it allows us to reason about distinct and disjoint pieces of data. For example we can say that the balance of an account is stored in a specific slot and that the balance of another account is stored in another slot with the total supply stored in some third slot. By making sure that these slots are disjoint, we can be sure that the balances are not mixed up and that someone cannot use the total supply as their balance. + +### Implementation + +If we include the storage slot, as part of the note whose commitment is stored in the note hashes tree, we can _logically link_ all the notes that make up the storage slot. For the case of a balance, we can say that the balance is the sum of all the notes that have the same storage slot - in the same way that your physical wallet balance is the sum of all the physical notes in your wallet. + +Similarly to how we siloed the public storage slots, we can silo our private storage by hashing the logical storage slot together with the note content. + +```rust +note_hash = H(logical_storage_slot, note_content_hash); +``` + +Note hash siloing is done in the application circuit, since it is not necessary for security of the network (but only the application). +:::info +The private variable wrappers `PrivateSet` and `PrivateMutable` in Aztec.nr include the `logical_storage_slot` in the commitments they compute, to make it easier for developers to write contracts without having to think about how to correctly handle storage slots. +::: + +When reading the values for these notes, the application circuit can then constrain the values to only read notes with a specific logical storage slot. + +To ensure that contracts can only modify their own logical storage, we do a second siloing by hashing the `commitment` with the contract address. + +```rust +siloed_note_hash = H(contract_address, note_hash); +``` + +By doing this address-siloing at the kernel circuit we _force_ the inserted commitments to include and not lie about the `contract_address`. + +:::info +To ensure that nullifiers don't collide across contracts we also force this contract siloing at the kernel level. +::: + +For an example of this see [developer documentation on storage](../../../reference/developer_references/smart_contract_reference/storage/index.md). diff --git a/docs/docs/aztec/concepts_overview.md b/docs/docs/aztec/concepts_overview.md new file mode 100644 index 000000000000..878cd2c3796a --- /dev/null +++ b/docs/docs/aztec/concepts_overview.md @@ -0,0 +1,152 @@ +--- +title: Concepts Overview +sidebar_position: 0 +tags: [protocol] +--- + +import Image from "@theme/IdealImage"; + +This page outlines Aztec's fundamental technical concepts. It is recommended to read this before diving into building on Aztec. + +## What is Aztec? + +Aztec is a privacy-first Layer 2 on Ethereum. It supports smart contracts with both private & public state and private & public execution. + +## High level view + + + +1. A user interacts with Aztec through Aztec.js (like web3js or ethersjs) +2. Private functions are executed in the PXE, which is client-side +3. Proofs and tree updates are sent to the Public VM (running on an Aztec node) +4. Public functions are executed in the Public VM +5. The Public VM rolls up the transactions that include private and public state updates into blocks +6. The block data and proof of a correct state transition are submitted to Ethereum for verification + +## Private and public execution + +Private functions are executed client side, on user devices to maintain maximum privacy. Public functions are executed by a remote network of nodes, similar to other blockchains. These distinct execution environments create a directional execution flow for a single transaction--a transaction begins in the private context on the user's device then moves to the public network. This means that private functions executed by a transaction can enqueue public functions to be executed later in the transaction life cycle, but public functions cannot call private functions. + +### Private Execution Environment (PXE) + +Private functions are executed first on the user's device in the Private Execution Environment (PXE, pronounced 'pixie'). It is a client-side library for the execution of private operations. It holds keys, notes, and generates proofs. It is included in aztec.js, a TypeScript library, and can be run within Node or the browser. + +### Aztec VM + +Public functions are executed by the Aztec Virtual Machine (AVM), which is conceptually similar to the Ethereum Virtual Machine (EVM). To learn more about how it works and its instruction set, go to the [protocol specs](../protocol-specs/public-vm/intro.md). + +The PXE is unaware of the Public VM. And the Public VM is unaware of the PXE. They are completely separate execution environments. This means: + +- The PXE and the Public VM cannot directly communicate with each other +- Private transactions in the PXE are executed first, followed by public transactions + +## Private and public state + +Private state works with UTXOs, which are chunks of data that we call notes. To keep things private, notes are stored in an [append-only UTXO tree](./concepts/storage/trees/index.md), and a nullifier is created when notes are invalidated (aka deleted). Nullifiers are stored in their own [nullifier tree](./concepts/storage/trees/index.md). + +Public state works similarly to other chains like Ethereum, behaving like a public ledger. Public data is stored in a [public data tree](./concepts/storage/trees/index.md#public-state-tree). + +![Public vs private state](../../static/img/public-and-private-state-diagram.png) + +Aztec [smart contract](./smart_contracts_overview.md) developers should keep in mind that different data types are used when manipulating private or public state. Working with private state is creating commitments and nullifiers to state, whereas working with public state is directly updating state. + +## Accounts and keys + +### Account abstraction + +Every account in Aztec is a smart contract (account abstraction). This allows implementing different schemes for authorizing transactions, nonce management, and fee payments. + +Developers can write their own account contract to define the rules by which user transactions are authorized and paid for, as well as how user keys are managed. + +Learn more about account contracts [here](./concepts/accounts/index.md). + +### Key pairs + +Each account in Aztec is backed by 3 key pairs: + +- A **nullifier key pair** used for note nullifier computation +- A **incoming viewing key pair** used to encrypt a note for the recipient +- A **outgoing viewing key pair** used to encrypt a note for the sender + +As Aztec has native account abstraction, accounts do not automatically have a signing key pair to authenticate transactions. This is up to the account contract developer to implement. + +## Noir + +Noir is a zero-knowledge domain specific language used for writing smart contracts for the Aztec network. It is also possible to write circuits with Noir that can be verified on or offchain. For more in-depth docs into the features of Noir, go to the [Noir documentation](https://noir-lang.org/). + +## What's next? + +### Start coding + +
+ + +

Developer Getting Started Guide

+
+ + Follow the getting started guide to start developing with the Aztec Sandbox + +
+
+ +### Dive deeper into how Aztec works + +Explore the Concepts for a deeper understanding into the components that make up Aztec: + +
+ + + +

Accounts

+
+ + Learn about Aztec's native account abstraction - every account in Aztec is a smart contract which defines the rules for whether a transaction is or is not valid + +
+ + + +

Protocol Circuits

+
+ + Central to Aztec's operations are circuits in the core protocol and the developer-written Aztec.nr contracts + +
+ + + +

PXE (pronounced 'pixie')

+
+ + The Private Execution Environment (or PXE) is a client-side library for the execution of private operations + +
+ + + +

State model

+
+ + Aztec has a hybrid public/private state model + +
+ + + +

Storage

+
+ + In Aztec, private data and public data are stored in two trees: a public data tree and a note hashes tree + +
+ + + +

Wallets

+
+ + Wallets expose to dapps an interface that allows them to act on behalf of the user, such as querying private state or sending transactions + +
+ +
diff --git a/docs/docs/aztec/overview.md b/docs/docs/aztec/overview.md deleted file mode 100644 index 32a353fa80ee..000000000000 --- a/docs/docs/aztec/overview.md +++ /dev/null @@ -1,141 +0,0 @@ ---- -title: What is Aztec? -sidebar_position: 0 -id: overview -tags: [protocol] ---- - -import Image from "@theme/IdealImage"; - -This page outlines Aztec's fundamental technical concepts. - -## Aztec Overview - - - -1. A user interacts with Aztec through Aztec.js (like web3js or ethersjs) -2. Private functions are executed in the PXE, which is client-side -3. They are rolled up and sent to the Public VM (running on an Aztec node) -4. Public functions are executed in the Public VM -5. The Public VM rolls up the private & public transaction rollups -6. These rollups are submitted to Ethereum - -The PXE is unaware of the Public VM. And the Public VM is unaware of the PXE. They are completely separate execution environments. This means: - -- The PXE and the Public VM cannot directly communicate with each other -- Private transactions in the PXE are executed first, followed by public transactions - -### Private and public state - -Private state works with UTXOs, or what we call notes. To keep things private, everything is stored in an [append-only UTXO tree](./concepts/storage/trees/index.md), and a nullifier is created when notes are invalidated. Nullifiers are then stored in their own [nullifier tree](./concepts/storage/trees/index.md). - -Public state works similarly to other chains like Ethereum, behaving like a public ledger. Public data is stored in a [public data tree](./concepts/storage/trees/index.md#public-state-tree). - -Aztec [smart contract](./smart_contracts_overview.md) developers should keep in mind that different types are used when manipulating private or public state. Working with private state is creating commitments and nullifiers to state, whereas working with public state is directly updating state. - -## Accounts - -Every account in Aztec is a smart contract (account abstraction). This allows implementing different schemes for transaction signing, nonce management, and fee payments. - -Developers can write their own account contract to define the rules by which user transactions are authorized and paid for, as well as how user keys are managed. - -Learn more about account contracts [here](./concepts/accounts/index.md). - -## Smart contracts - -Developers can write [smart contracts](./smart_contracts_overview.md) that manipulate both public and private state. They are written in a framework on top of Noir, the zero-knowledge domain-specific language developed specifically for Aztec. Outside of Aztec, Noir is used for writing circuits that can be verified on EVM chains. - -Noir has its own doc site that you can find [here](https://noir-lang.org). - -## Communication with Ethereum - -Aztec allows private communications with Ethereum - ie no-one knows where the transaction is coming from, just that it is coming from somewhere on Aztec. - -This is achieved through portals - these are smart contracts deployed on an EVM that are related to the Ethereum smart contract you want to interact with. - -Learn more about portals [here](../protocol-specs/l1-smart-contracts/index.md). - -## Circuits - -Aztec operates on three types of circuits: - -- [Private kernel circuits](../aztec/concepts/circuits/kernels/private_kernel.md), which are executed by the user on their own device and prove correct execution of a function -- [Public kernel circuits](../aztec/concepts/circuits/kernels/public_kernel.md), which are executed by the [sequencer](./network/sequencer/index.md) and ensure the stack trace of transactions adheres to function execution rules -- [Rollup circuits](../aztec/concepts/circuits/index.md), which bundle all of the Aztec transactions into a proof that can be efficiently verified on Ethereum - -## What's next? - -### Dive deeper into how Aztec works - -Explore the Concepts for a deeper understanding into the components that make up Aztec: - -
- - - -

Accounts

-
- - Learn about Aztec's native account abstraction - every account in Aztec is a smart contract which defines the rules for whether a transaction is or is not valid - -
- - - -

Circuits

-
- - Central to Aztec's operations are circuits in the core protocol and the developer-written Aztec.nr contracts - -
- - - -

PXE (pronounced 'pixie')

-
- - The Private Execution Environment (or PXE) is a client-side library for the execution of private operations - -
- - - -

State model

-
- - Aztec has a hybrid public/private state model - -
- - - -

Storage

-
- - In Aztec, private data and public data are stored in two trees: a public data tree and a note hashes tree - -
- - - -

Wallets

-
- - Wallets expose to dapps an interface that allows them to act on behalf of the user, such as querying private state or sending transactions - -
- -
- -### Start coding - -
- - -

Developer Getting Started Guide

-
- - Follow the getting started guide to start developing with the Aztec Sandbox - -
-
\ No newline at end of file diff --git a/docs/docs/aztec/smart_contracts/contract_creation.md b/docs/docs/aztec/smart_contracts/contract_creation.md index b648178e14e9..02858dca0701 100644 --- a/docs/docs/aztec/smart_contracts/contract_creation.md +++ b/docs/docs/aztec/smart_contracts/contract_creation.md @@ -7,4 +7,4 @@ The latest information about contract deployment has moved to the protocol speci ## Further reading -To see how to deploy a contract in practice, check out the [dapp development tutorial](../../tutorials/codealong/simple_dapp/index.md). +To see how to deploy a contract in practice, check out the [dapp development tutorial](../../tutorials/codealong/js_tutorials/simple_dapp/index.md). diff --git a/docs/docs/aztec/smart_contracts/functions/context.md b/docs/docs/aztec/smart_contracts/functions/context.md index 6929153a1f7d..f8b0eae3c16c 100644 --- a/docs/docs/aztec/smart_contracts/functions/context.md +++ b/docs/docs/aztec/smart_contracts/functions/context.md @@ -66,13 +66,11 @@ The call context contains information about the current call being made: - is_delegate_call: Denotes whether the current call is a delegate call. If true, then the storage contract address will be the address of the sender. - is_static_call: This will be set if and only if the current call is a static call. In a static call, state changing altering operations are not allowed. -### Header +### Block Header -Another structure that is contained within the context is the Header object. -In the private context this is a header of a block which used to generate proofs against. -In the public context this header is set by sequencer (sequencer executes public calls) and it is set to 1 block before the block in which the transaction is included. +Another structure that is contained within the context is the `BlockHeader` object, which is the header of the block used to generate proofs against. -#include_code header /noir-projects/noir-protocol-circuits/crates/types/src/header.nr rust +#include_code block-header /noir-projects/noir-protocol-circuits/crates/types/src/block_header.nr rust ### Transaction Context diff --git a/docs/docs/guides/developer_guides/js_apps/test.md b/docs/docs/guides/developer_guides/js_apps/test.md index a290623a51c6..383358fa8c2c 100644 --- a/docs/docs/guides/developer_guides/js_apps/test.md +++ b/docs/docs/guides/developer_guides/js_apps/test.md @@ -8,7 +8,7 @@ In this guide we will cover how to interact with your Aztec.nr smart contracts i ## Prerequisites - A compiled contract with TS interface (read [how to compile](../smart_contracts/how_to_compile_contract.md)) -- Your sandbox running (read [getting started](../getting_started.md)) +- Your sandbox running (read [getting started](../../getting_started.md)) ## Create TS file and install libraries @@ -107,7 +107,7 @@ To query storage directly, you'll need to know the slot you want to access. This #### Querying private state -Private state in the Aztec is represented via sets of [private notes](../../../aztec/concepts/state_model/index.md#private-state). We can query the Private Execution Environment (PXE) for all notes encrypted for a given user in a contract slot. For example, this gets all notes encrypted for the `owner` user that are stored on the token contract address and on the slot that was calculated earlier. To calculate the actual balance, it extracts the `value` of each note, which is the first element, and sums them up. +Private state in the Aztec is represented via sets of [private notes](../../../aztec/concepts/storage/state_model/index.md#private-state). We can query the Private Execution Environment (PXE) for all notes encrypted for a given user in a contract slot. For example, this gets all notes encrypted for the `owner` user that are stored on the token contract address and on the slot that was calculated earlier. To calculate the actual balance, it extracts the `value` of each note, which is the first element, and sums them up. #include_code private-storage /yarn-project/end-to-end/src/guides/dapp_testing.test.ts typescript diff --git a/docs/docs/guides/developer_guides/local_env/creating_schnorr_accounts.md b/docs/docs/guides/developer_guides/local_env/creating_schnorr_accounts.md index e8668294c952..b45b653b7f9b 100644 --- a/docs/docs/guides/developer_guides/local_env/creating_schnorr_accounts.md +++ b/docs/docs/guides/developer_guides/local_env/creating_schnorr_accounts.md @@ -18,7 +18,7 @@ An in-depth explainer about accounts on aztec can be found [here](../../../aztec ## Pre-requisites -Have a running Sandbox and a repository that interacts with it as explained [in the quickstart](../getting_started.md). +Have a running Sandbox and a repository that interacts with it as explained [in the quickstart](../../getting_started.md). Let's assume you have a file `src/index.ts` from the example used in the Sandbox page. diff --git a/docs/docs/guides/developer_guides/local_env/run_more_than_one_pxe_sandbox.md b/docs/docs/guides/developer_guides/local_env/run_more_than_one_pxe_sandbox.md index ad575e6baf6c..d937f6f5f139 100644 --- a/docs/docs/guides/developer_guides/local_env/run_more_than_one_pxe_sandbox.md +++ b/docs/docs/guides/developer_guides/local_env/run_more_than_one_pxe_sandbox.md @@ -23,7 +23,7 @@ This removes any other arguments, allowing you to ensure an isolated environment In another terminal, run: ```bash -aztec start --port 8081 --pxe nodeUrl=http://host.docker.internal:8080/ +aztec start --port 8081 --pxe --pxe.nodeUrl=http://host.docker.internal:8080/ ``` This command uses the default ports, so they might need to be changed depending on yuor configuration. It will run the PXE on port `8081`. diff --git a/docs/docs/guides/developer_guides/smart_contracts/how_to_compile_contract.md b/docs/docs/guides/developer_guides/smart_contracts/how_to_compile_contract.md index 1c68a23fda7c..056290ad93bc 100644 --- a/docs/docs/guides/developer_guides/smart_contracts/how_to_compile_contract.md +++ b/docs/docs/guides/developer_guides/smart_contracts/how_to_compile_contract.md @@ -444,7 +444,7 @@ export class TokenContract extends ContractBase { } ``` -Read more about interacting with contracts using `aztec.js` [by following this tutorial](../../../tutorials/codealong/aztecjs-getting-started). +Read more about interacting with contracts using `aztec.js` [by following this tutorial](../../../tutorials/codealong/js_tutorials/aztecjs-getting-started.md). ### Aztec.nr interfaces diff --git a/docs/docs/guides/developer_guides/smart_contracts/how_to_deploy_contract.md b/docs/docs/guides/developer_guides/smart_contracts/how_to_deploy_contract.md index 7df9f76b1a8b..634be0fa19d9 100644 --- a/docs/docs/guides/developer_guides/smart_contracts/how_to_deploy_contract.md +++ b/docs/docs/guides/developer_guides/smart_contracts/how_to_deploy_contract.md @@ -12,7 +12,7 @@ Once you have [compiled](./how_to_compile_contract.md) your contracts you can pr - `aztec-nargo` installed (go to [Sandbox section](../../../reference/developer_references/sandbox_reference/sandbox-reference.md) for installation instructions) - contract artifacts ready (go to [How to Compile Contract](./how_to_compile_contract.md) for instructions on how to compile contracts) -- Aztec Sandbox running (go to [Getting Started](../getting_started.md) for instructions on how to install and run the sandbox) +- Aztec Sandbox running (go to [Getting Started](../../getting_started.md) for instructions on how to install and run the sandbox) ## Deploy diff --git a/docs/docs/guides/developer_guides/smart_contracts/testing_contracts/testing.md b/docs/docs/guides/developer_guides/smart_contracts/testing_contracts/testing.md index 86d470de6644..42950468940a 100644 --- a/docs/docs/guides/developer_guides/smart_contracts/testing_contracts/testing.md +++ b/docs/docs/guides/developer_guides/smart_contracts/testing_contracts/testing.md @@ -38,7 +38,7 @@ So to summarize: ### Running TXE -If you have [the sandbox](../../getting_started.md) installed, you can run TXE tests using: +If you have [the sandbox](../../../getting_started.md) installed, you can run TXE tests using: `aztec test` @@ -218,10 +218,11 @@ You can use `aztec.nr`'s oracles as usual for debug logging, as explained [here] :::warning Remember to set the following environment variables to activate debug logging: + ```bash -export DEBUG="aztec:*" export LOG_LEVEL="debug" ``` + ::: ### All Cheatcodes diff --git a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/call_functions.md b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/call_functions.md index fcca446ddadc..2df88f48025e 100644 --- a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/call_functions.md +++ b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/call_functions.md @@ -5,15 +5,13 @@ tags: [functions, contracts] --- - - A contract is a collection of persistent state variables and functions which may manipulate these variables. Functions and state variables within a contract's scope are said to belong to that contract. A contract can only access and modify its own state. If a contract wishes to access or modify another contract's state, it must make a call to an external function of the other contract. For anything to happen on the Aztec network, an external function of a contract needs to be called. -### Contract +### Defining a contract A contract may be declared and given a name using the `contract` keyword (see snippet below). By convention, contracts are named in `PascalCase`. @@ -32,4 +30,53 @@ contract MyContract { There is no [`main()` (GitHub link)](https://noir-lang.org/docs/getting_started/project_breakdown/#mainnr) function within a Noir `contract` scope. More than one function can be an entrypoint. ::: -To understand how to call a function from another contract, follow the [crowdfunding tutorial](../../../../tutorials/codealong/contract_tutorials/crowdfunding_contract.md). +### Add as a dependency in Nargo.toml + +Import the contract that you want to call into your `Nargo.toml` under `dependencies` like this: + +``` +token = { git="https://github.com/AztecProtocol/aztec-packages/", tag="#include_aztec_version", directory="noir-projects/noir-contracts/contracts/token_contract" } +``` + +### Import into your contract + +At the top of your contract, import the contract you want to call like this: + +``` +use token::Token; +``` + +### Call the function + +To call the function, you need to + +- Specify the address of the contract with `Contract::at(contract_address)` +- Call the function name with `.function_name()` +- Pass the parameters into the function call, like `.function_name(param1,param2)` +- Specify the type of call you want to make and pass a mut reference to the context, like `.call(&mut context)` + +#### Private calls + +To call a private function, you can just use `call()` like this: + +#include_code call_function noir-projects/noir-contracts/contracts/escrow_contract/src/main.nr rust + +#### Public -> Public calls + +To call a public function from a public function, it is the same as above. You can just use `call()` like this: + +#include_code public_to_public_call noir-projects/noir-contracts/contracts/lending_contract/src/main.nr rust + +#### Private -> Public calls + +To call a public function from private, you will need to enqueue it like this: + +#include_code enqueue_public /noir-projects/noir-contracts/contracts/lending_contract/src/main.nr rust + +Public functions are always executed after private execution. To learn why, read the [concepts overview](../../../../aztec/concepts_overview.md). + +#### Other call types + +There are other call types, for example to ensure no state changes are made. You can learn more about them in the [call types glossary](../../../../aztec/glossary/call_types.md). + + diff --git a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/common_patterns/index.md b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/common_patterns/index.md index b002d3dd13f2..aacd44df9d8e 100644 --- a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/common_patterns/index.md +++ b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/common_patterns/index.md @@ -108,7 +108,7 @@ Hence, it's necessary to add a "randomness" field to your note to prevent such a ### L1 -- L2 interactions -Refer to [Token Portal codealong tutorial on bridging tokens between L1 and L2](../../../../../tutorials/codealong/contract_tutorials/advanced/token_bridge/index.md) and/or [Uniswap smart contract example that shows how to swap on L1 using funds on L2](../../../../../tutorials/examples/uniswap/index.md). Both examples show how to: +Refer to [Token Portal codealong tutorial on bridging tokens between L1 and L2](../../../../../tutorials/codealong/contract_tutorials/token_bridge/index.md) and/or [Uniswap smart contract example that shows how to swap on L1 using funds on L2](../../../../../tutorials/examples/uniswap/index.md). Both examples show how to: 1. L1 -> L2 message flow 2. L2 -> L1 message flow diff --git a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/notes/index.md b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/notes/index.md index b40f4be495e5..ec9f829577b9 100644 --- a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/notes/index.md +++ b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/notes/index.md @@ -4,4 +4,4 @@ sidebar_position: 6 tags: [contracts, notes] --- -Notes are the fundamental data structure in Aztec when working with private state. In this section there are guides about how to work with `AddressNote`, `ValueNote`, and custom notes in Aztec.nr. You can learn more about notes in the [concepts section](../../../../../aztec/concepts/state_model/index.md#private-state). \ No newline at end of file +Notes are the fundamental data structure in Aztec when working with private state. In this section there are guides about how to work with `AddressNote`, `ValueNote`, and custom notes in Aztec.nr. You can learn more about notes in the [concepts section](../../../../../aztec/concepts/storage/state_model/index.md#private-state). \ No newline at end of file diff --git a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/portals/communicate_with_portal.md b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/portals/communicate_with_portal.md index d08078b0aec1..439bf5f93772 100644 --- a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/portals/communicate_with_portal.md +++ b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/portals/communicate_with_portal.md @@ -5,7 +5,7 @@ tags: [contracts, portals] Is this your first time hearing the word `Portal`? You might want to check out the [protocol specs](../../../../../protocol-specs/l1-smart-contracts/index.md). -Follow the [token bridge tutorial](../../../../../tutorials/codealong/contract_tutorials/advanced/token_bridge/index.md) for hands-on experience writing and deploying a Portal contract. +Follow the [token bridge tutorial](../../../../../tutorials/codealong/contract_tutorials/token_bridge/index.md) for hands-on experience writing and deploying a Portal contract. ## Passing data to the rollup @@ -43,7 +43,7 @@ Note that while the `secret` and the `content` are both hashed, they are actuall ### Token bridge example -Computing the `content` must currently be done manually, as we are still adding a number of bytes utilities. A good example exists within the [Token bridge example (codealong tutorial)](../../../../../tutorials/codealong/contract_tutorials/advanced/token_bridge/index.md). +Computing the `content` must currently be done manually, as we are still adding a number of bytes utilities. A good example exists within the [Token bridge example (codealong tutorial)](../../../../../tutorials/codealong/contract_tutorials/token_bridge/index.md). #include_code claim_public /noir-projects/noir-contracts/contracts/token_bridge_contract/src/main.nr rust diff --git a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/storage/notes.md b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/storage/notes.md index bea6aa7461bf..b4f737c78571 100644 --- a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/storage/notes.md +++ b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/storage/notes.md @@ -83,9 +83,9 @@ To update a value, its previous note hash(es) are nullified. The new note value Some optional background resources on notes can be found here: -- [High level network architecture](../../../../../aztec/overview.md), specifically the Private Execution Environment +- [High level network architecture](../../../../../aztec/concepts_overview.md), specifically the Private Execution Environment - [Transaction lifecycle (simple diagram)](../../../../../aztec/concepts/transactions.md#simple-example-of-the-private-transaction-lifecycle) -- [Public and Private state](../../../../../aztec/concepts/state_model/index.md) +- [Public and Private state](../../../../../aztec/concepts/storage/state_model/index.md) Notes touch several core components of the protocol, but we will focus on a the essentials first. diff --git a/docs/docs/guides/developer_guides/getting_started.md b/docs/docs/guides/getting_started.md similarity index 98% rename from docs/docs/guides/developer_guides/getting_started.md rename to docs/docs/guides/getting_started.md index cceabca3bfd0..1e1eafa8750f 100644 --- a/docs/docs/guides/developer_guides/getting_started.md +++ b/docs/docs/guides/getting_started.md @@ -225,8 +225,6 @@ Simulation result: 25n Now you have a development network running, so you're ready to start coding your first app with Aztec.nr and Aztec.js! -If you want to start coding, head over to the Tutorials & Examples section and write & deploy your first smart contract. -
@@ -237,3 +235,5 @@ If you want to start coding, head over to the Tutorials & Examples section and w
+ +If you'd rather clone a repo, check out the [Aztec Starter](https://github.com/AztecProtocol/aztec-starter). diff --git a/docs/docs/guides/index.md b/docs/docs/guides/index.md index 174895e4a377..cb047909a870 100644 --- a/docs/docs/guides/index.md +++ b/docs/docs/guides/index.md @@ -1,17 +1,20 @@ --- id: index sidebar_position: 0 -title: Guides +title: Guides and Tutorials --- -# Popular Guides +# Guides and Tutorials -Guides are step-by-step how-tos to achieve a specific goal. On this page you can find the most popular ones. You can also explore them all by checking out the sidebar. +In this section you will find: + +- A list of tutorials in order of increasing complexity, allowing you to write contracts and build applications on Aztec +- How-to guides for accomplishing quick, specific goals ## Getting Started
- +

Getting Started

@@ -21,94 +24,24 @@ Guides are step-by-step how-tos to achieve a specific goal. On this page you can
-## Building smart contracts +## Building applications
- - -

Compile a contract

-
- - Learn how to compile a smart contract and generate TypeScript bindings - -
- - - -

Deploy a contract

-
- - Deploy a contract to a local Aztec sandbox - -
- - + -

Testing Contracts

+

Contract Tutorials

- Write tests for your contracts and run them in the TXE + Go from zero to hero by following these tutorials in order, starting with a counter contract
- + -

Communicate with L1

+

Full stack app on Aztec

- How to use portals to communicate with L1 from your contract - -
-
- - - -## JavaScript - -
- - -

Send a transaction

-
- - Use Aztec.JS to send a transaction by calling a function on a smart contract - -
- - - -

Testing

-
- - Write end-to-end tests in Javascript using Aztec.JS - -
-
- - - -## Local environment - -
- - -

Update your environment

-
- - Update all aspects of your Aztec environment, including the sandbox, aztec-nargo, Aztec.nr packages, and Aztec.js packages - -
- - - -

Run more than one PXE

-
- - Test that your contracts can work with multiple interactions by running a second PXE + Learn how everything works together by building an app in JavaScript that connects to a contract
@@ -132,8 +65,4 @@ Guides are step-by-step how-tos to achieve a specific goal. On this page you can Participate in the Aztec protocol as a prover node, proving the rollup integrity that is pivotal to the protocol. Runs on hardware fit for data centers. - - - \ No newline at end of file diff --git a/docs/docs/index.mdx b/docs/docs/index.mdx index b38e104d38e7..938bf165d38d 100644 --- a/docs/docs/index.mdx +++ b/docs/docs/index.mdx @@ -7,29 +7,43 @@ sidebar_position: 0 # Aztec Documentation -## Aztec is a Privacy-First L2 on Ethereum +## What is Aztec? + +### Aztec is a Privacy-First L2 on Ethereum On Ethereum today, everything is publicly visible, by everyone. In the real world, people enjoy privacy. Aztec brings privacy to Ethereum. -## Get started +- private functions, executed and proved on a user's device +- public functions, executed in the Aztec Virtual Machine +- private state, stored as UTXOs that only the owner can decrypt +- public state, stored in a public merkle tree +- composability between private/public execution and private/public state +- public and private messaging with Ethereum + +To learn more about how Aztec achieves these things, check out the [Aztec concepts overview](/aztec/concepts_overview). -
- +## Start coding + +
+ -

Tutorials

+

Developer Getting Started Guide

- Start writing Aztec contracts with our tutorials. + Follow the getting started guide to start developing with the Aztec Sandbox
+
- +## Learn how Aztec works + +
+ -

References

+

Aztec Overview

- Review reference materials for building on Aztec. + Learn the core concepts that make up the Aztec Protocol -
-
+
\ No newline at end of file diff --git a/docs/docs/migration_notes.md b/docs/docs/migration_notes.md index bb841a9952ed..03c70c7934b1 100644 --- a/docs/docs/migration_notes.md +++ b/docs/docs/migration_notes.md @@ -6,6 +6,46 @@ keywords: [sandbox, aztec, notes, migration, updating, upgrading] Aztec is in full-speed development. Literally every version breaks compatibility with the previous ones. This page attempts to target errors and difficulties you might encounter when upgrading, and how to resolve them. +## TBD + +### [aztec.nr] Renamed `Header` and associated helpers + +The `Header` struct has been renamed to `BlockHeader`, and the `get_header()` family of functions have been similarly renamed to `get_block_header()`. + +```diff +- let header = context.get_header_at(block_number); ++ let header = context.get_block_header_at(block_number); +``` + +## 0.66 + +### DEBUG env var is removed + +The `DEBUG` variable is no longer used. Use `LOG_LEVEL` with one of `silent`, `fatal`, `error`, `warn`, `info`, `verbose`, `debug`, or `trace`. To tweak log levels per module, add a list of module prefixes with their overridden level. For example, LOG_LEVEL="info; verbose: aztec:sequencer, aztec:archiver; debug: aztec:kv-store" sets `info` as the default log level, `verbose` for the sequencer and archiver, and `debug` for the kv-store. Module name match is done by prefix. + +### `tty` resolve fallback required for browser bundling + +When bundling `aztec.js` for web, the `tty` package now needs to be specified as an empty fallback: + +```diff +resolve: { + plugins: [new ResolveTypeScriptPlugin()], + alias: { './node/index.js': false }, + fallback: { + crypto: false, + os: false, + fs: false, + path: false, + url: false, ++ tty: false, + worker_threads: false, + buffer: require.resolve('buffer/'), + util: require.resolve('util/'), + stream: require.resolve('stream-browserify'), + }, +}, +``` + ## 0.65 ### [aztec.nr] Removed SharedImmutable diff --git a/docs/docs/protocol-specs/public-vm/gen/_instruction-set.mdx b/docs/docs/protocol-specs/public-vm/gen/_instruction-set.mdx index 4d081801c4dc..d5409d96aeff 100644 --- a/docs/docs/protocol-specs/public-vm/gen/_instruction-set.mdx +++ b/docs/docs/protocol-specs/public-vm/gen/_instruction-set.mdx @@ -1,375 +1,636 @@ -[comment]: # (THIS IS A GENERATED FILE! DO NOT EDIT!) -[comment]: # (Generated via `yarn preprocess`) - -[comment]: # (Generated by genMarkdown.js, InstructionSet.js, InstructionSize.js) - -import Markdown from 'react-markdown' -import CodeBlock from '@theme/CodeBlock' +[comment]: # "THIS IS A GENERATED FILE! DO NOT EDIT!" +[comment]: # "Generated via `yarn preprocess`" +[comment]: # "Generated by genMarkdown.js, InstructionSet.js, InstructionSize.js" +import Markdown from "react-markdown"; +import CodeBlock from "@theme/CodeBlock"; ## Instructions Table Click on an instruction name to jump to its section. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - + + + + + + - - - - - - + + + + + + - - - - - - + + + + + + - - - - - - + + + + + + - - - - - - + + + + + + - - - - - - + + + + + + - - - - - - + + + + + + - - - - - - + + + + + + - - - - - - + + + + + + - - - - - - + + + + + + - - - - - - + + + + + + - - - - - - - + + + + + + + + +
OpcodeNameSummaryExpression
0x00\[\`ADD\`\](#isa-section-add)Addition (a + b){ - `M[dstOffset] = M[aOffset] + M[bOffset] mod 2^k` - }
0x01\[\`SUB\`\](#isa-section-sub)Subtraction (a - b){ - `M[dstOffset] = M[aOffset] - M[bOffset] mod 2^k` - }
0x02\[\`MUL\`\](#isa-section-mul)Multiplication (a * b){ - `M[dstOffset] = M[aOffset] * M[bOffset] mod 2^k` - }
0x03\[\`DIV\`\](#isa-section-div)Unsigned integer division (a / b){ - `M[dstOffset] = M[aOffset] / M[bOffset]` - }
0x04\[\`FDIV\`\](#isa-section-fdiv)Field division (a / b){ - `M[dstOffset] = M[aOffset] / M[bOffset]` - }
0x05\[\`EQ\`\](#isa-section-eq)Equality check (a == b){ - `M[dstOffset] = M[aOffset] == M[bOffset] ? 1 : 0` - }
0x06\[\`LT\`\](#isa-section-lt)Less-than check (a < b){ - `M[dstOffset] = M[aOffset] < M[bOffset] ? 1 : 0` - }
0x07\[\`LTE\`\](#isa-section-lte)Less-than-or-equals check (a <= b){ - `M[dstOffset] = M[aOffset] <= M[bOffset] ? 1 : 0` - }
0x08\[\`AND\`\](#isa-section-and)Bitwise AND (a & b){ - `M[dstOffset] = M[aOffset] AND M[bOffset]` - }
0x09\[\`OR\`\](#isa-section-or)Bitwise OR (a | b){ - `M[dstOffset] = M[aOffset] OR M[bOffset]` - }
0x0a\[\`XOR\`\](#isa-section-xor)Bitwise XOR (a ^ b){ - `M[dstOffset] = M[aOffset] XOR M[bOffset]` - }
0x0b\[\`NOT\`\](#isa-section-not)Bitwise NOT (inversion){ - `M[dstOffset] = NOT M[aOffset]` - }
0x0c\[\`SHL\`\](#isa-section-shl)Bitwise leftward shift (a << b){ - `M[dstOffset] = M[aOffset] << M[bOffset]` - }
0x0d\[\`SHR\`\](#isa-section-shr)Bitwise rightward shift (a >> b){ - `M[dstOffset] = M[aOffset] >> M[bOffset]` - }
0x0e\[\`CAST\`\](#isa-section-cast)Type cast{ - `M[dstOffset] = cast(M[aOffset])` - }
0x0f\[\`ADDRESS\`\](#isa-section-address)Get the address of the currently executing l2 contract{ - `M[dstOffset] = context.environment.address` - }
0x10\[\`SENDER\`\](#isa-section-sender)Get the address of the sender (caller of the current context){ - `M[dstOffset] = context.environment.sender` - }
0x11\[\`FUNCTIONSELECTOR\`\](#isa-section-functionselector)Get the function selector of the contract function being executed{ - `M[dstOffset] = context.environment.functionSelector` - }
0x12\[\`TRANSACTIONFEE\`\](#isa-section-transactionfee)Get the computed transaction fee during teardown phase, zero otherwise{ - `M[dstOffset] = context.environment.transactionFee` - }
0x13\[\`CHAINID\`\](#isa-section-chainid)Get this rollup's L1 chain ID{ - `M[dstOffset] = context.environment.globals.chainId` - }
0x14\[\`VERSION\`\](#isa-section-version)Get this rollup's L2 version ID{ - `M[dstOffset] = context.environment.globals.version` - }
0x15\[\`BLOCKNUMBER\`\](#isa-section-blocknumber)Get this L2 block's number{ - `M[dstOffset] = context.environment.globals.blocknumber` - }
0x16\[\`TIMESTAMP\`\](#isa-section-timestamp)Get this L2 block's timestamp{ - `M[dstOffset] = context.environment.globals.timestamp` - }
0x17\[\`FEEPERL2GAS\`\](#isa-section-feeperl2gas)Get the fee to be paid per "L2 gas" - constant for entire transaction{ - `M[dstOffset] = context.environment.globals.feePerL2Gas` - }
0x18\[\`FEEPERDAGAS\`\](#isa-section-feeperdagas)Get the fee to be paid per "DA gas" - constant for entire transaction{ - `M[dstOffset] = context.environment.globals.feePerDaGas` - }
0x19\[\`CALLDATACOPY\`\](#isa-section-calldatacopy)Copy calldata into memory{ - `M[dstOffset:dstOffset+copySize] = context.environment.calldata[cdOffset:cdOffset+copySize]` - }
0x1a\[\`L2GASLEFT\`\](#isa-section-l2gasleft)Remaining "L2 gas" for this call (after this instruction){ - `M[dstOffset] = context.MachineState.l2GasLeft` - }
0x1b\[\`DAGASLEFT\`\](#isa-section-dagasleft)Remaining "DA gas" for this call (after this instruction){ - `M[dstOffset] = context.machineState.daGasLeft` - }
0x1c\[\`JUMP\`\](#isa-section-jump)Jump to a location in the bytecode{ - `context.machineState.pc = loc` - }
0x1d\[\`JUMPI\`\](#isa-section-jumpi)Conditionally jump to a location in the bytecode{ - `context.machineState.pc = M[condOffset] > 0 ? loc : context.machineState.pc` - }
0x1e\[\`INTERNALCALL\`\](#isa-section-internalcall)Make an internal call. Push the current PC to the internal call stack and jump to the target location. -{`context.machineState.internalCallStack.push(context.machineState.pc) + OpcodeNameSummaryExpression
0x00 + + \[\`ADD\`\](#isa-section-add) + + Addition (a + b) + + {`M[dstOffset] = M[aOffset] + M[bOffset] mod 2^k`} +
0x01 + + \[\`SUB\`\](#isa-section-sub) + + Subtraction (a - b) + + {`M[dstOffset] = M[aOffset] - M[bOffset] mod 2^k`} +
0x02 + + \[\`MUL\`\](#isa-section-mul) + + Multiplication (a * b) + + {`M[dstOffset] = M[aOffset] * M[bOffset] mod 2^k`} +
0x03 + + \[\`DIV\`\](#isa-section-div) + + Unsigned integer division (a / b) + + {`M[dstOffset] = M[aOffset] / M[bOffset]`} +
0x04 + + \[\`FDIV\`\](#isa-section-fdiv) + + Field division (a / b) + + {`M[dstOffset] = M[aOffset] / M[bOffset]`} +
0x05 + + \[\`EQ\`\](#isa-section-eq) + + Equality check (a == b) + + {`M[dstOffset] = M[aOffset] == M[bOffset] ? 1 : 0`} +
0x06 + + \[\`LT\`\](#isa-section-lt) + + Less-than check (a < b) + + {`M[dstOffset] = M[aOffset] < M[bOffset] ? 1 : 0`} +
0x07 + + \[\`LTE\`\](#isa-section-lte) + + Less-than-or-equals check (a <= b) + + {`M[dstOffset] = M[aOffset] <= M[bOffset] ? 1 : 0`} +
0x08 + + \[\`AND\`\](#isa-section-and) + + Bitwise AND (a & b) + + {`M[dstOffset] = M[aOffset] AND M[bOffset]`} +
0x09 + + \[\`OR\`\](#isa-section-or) + + Bitwise OR (a | b) + + {`M[dstOffset] = M[aOffset] OR M[bOffset]`} +
0x0a + + \[\`XOR\`\](#isa-section-xor) + + Bitwise XOR (a ^ b) + + {`M[dstOffset] = M[aOffset] XOR M[bOffset]`} +
0x0b + + \[\`NOT\`\](#isa-section-not) + + Bitwise NOT (inversion) + + {`M[dstOffset] = NOT M[aOffset]`} +
0x0c + + \[\`SHL\`\](#isa-section-shl) + + Bitwise leftward shift (a << b) + + {`M[dstOffset] = M[aOffset] << M[bOffset]`} +
0x0d + + \[\`SHR\`\](#isa-section-shr) + + Bitwise rightward shift (a >> b) + + {`M[dstOffset] = M[aOffset] >> M[bOffset]`} +
0x0e + + \[\`CAST\`\](#isa-section-cast) + + Type cast + + {`M[dstOffset] = cast(M[aOffset])`} +
0x0f + + \[\`ADDRESS\`\](#isa-section-address) + + + Get the address of the currently executing l2 contract + + + {`M[dstOffset] = context.environment.address`} +
0x10 + + \[\`SENDER\`\](#isa-section-sender) + + + Get the address of the sender (caller of the current context) + + + {`M[dstOffset] = context.environment.sender`} +
0x12 + + \[\`TRANSACTIONFEE\`\](#isa-section-transactionfee) + + + Get the computed transaction fee during teardown phase, zero otherwise + + + {`M[dstOffset] = context.environment.transactionFee`} +
0x13 + + \[\`CHAINID\`\](#isa-section-chainid) + + Get this rollup's L1 chain ID + + {`M[dstOffset] = context.environment.globals.chainId`} +
0x14 + + \[\`VERSION\`\](#isa-section-version) + + Get this rollup's L2 version ID + + {`M[dstOffset] = context.environment.globals.version`} +
0x15 + + \[\`BLOCKNUMBER\`\](#isa-section-blocknumber) + + Get this L2 block's number + + {`M[dstOffset] = context.environment.globals.blocknumber`} +
0x16 + + \[\`TIMESTAMP\`\](#isa-section-timestamp) + + Get this L2 block's timestamp + + {`M[dstOffset] = context.environment.globals.timestamp`} +
0x17 + + \[\`FEEPERL2GAS\`\](#isa-section-feeperl2gas) + + + Get the fee to be paid per "L2 gas" - constant for entire transaction + + + {`M[dstOffset] = context.environment.globals.feePerL2Gas`} +
0x18 + + \[\`FEEPERDAGAS\`\](#isa-section-feeperdagas) + + + Get the fee to be paid per "DA gas" - constant for entire transaction + + + {`M[dstOffset] = context.environment.globals.feePerDaGas`} +
0x19 + + \[\`CALLDATACOPY\`\](#isa-section-calldatacopy) + + Copy calldata into memory + + {`M[dstOffset:dstOffset+copySize] = context.environment.calldata[cdOffset:cdOffset+copySize]`} +
0x1a + + \[\`L2GASLEFT\`\](#isa-section-l2gasleft) + + + Remaining "L2 gas" for this call (after this instruction) + + + {`M[dstOffset] = context.MachineState.l2GasLeft`} +
0x1b + + \[\`DAGASLEFT\`\](#isa-section-dagasleft) + + + Remaining "DA gas" for this call (after this instruction) + + + {`M[dstOffset] = context.machineState.daGasLeft`} +
0x1c + + \[\`JUMP\`\](#isa-section-jump) + + Jump to a location in the bytecode + + {`context.machineState.pc = loc`} +
0x1d + + \[\`JUMPI\`\](#isa-section-jumpi) + + Conditionally jump to a location in the bytecode + + {`context.machineState.pc = M[condOffset] > 0 ? loc : context.machineState.pc`} +
0x1e + + \[\`INTERNALCALL\`\](#isa-section-internalcall) + + + Make an internal call. Push the current PC to the internal call stack + and jump to the target location. + + + + {`context.machineState.internalCallStack.push(context.machineState.pc) context.machineState.pc = loc`} -
0x1f\[\`INTERNALRETURN\`\](#isa-section-internalreturn)Return from an internal call. Pop from the internal call stack and jump to the popped location.{ - `context.machineState.pc = context.machineState.internalCallStack.pop()` - }
0x20\[\`SET\`\](#isa-section-set)Set a memory word from a constant in the bytecode{ - `M[dstOffset] = const` - }
0x21\[\`MOV\`\](#isa-section-mov)Move a word from source memory location to destination{ - `M[dstOffset] = M[srcOffset]` - }
0x22\[\`CMOV\`\](#isa-section-cmov)Move a word (conditionally chosen) from one memory location to another (`d = cond > 0 ? a : b`){ - `M[dstOffset] = M[condOffset] > 0 ? M[aOffset] : M[bOffset]` - }
0x23\[\`SLOAD\`\](#isa-section-sload)Load a word from this contract's persistent public storage. Zero is loaded for unwritten slots. -{`M[dstOffset] = S[M[slotOffset]]`} -
0x24\[\`SSTORE\`\](#isa-section-sstore)Write a word to this contract's persistent public storage -{`S[M[slotOffset]] = M[srcOffset]`} -
0x25\[\`NOTEHASHEXISTS\`\](#isa-section-notehashexists)Check whether a note hash exists in the note hash tree (as of the start of the current block) -{`exists = context.worldState.noteHashes.has({ + +
0x1f + + \[\`INTERNALRETURN\`\](#isa-section-internalreturn) + + + Return from an internal call. Pop from the internal call stack and jump + to the popped location. + + + {`context.machineState.pc = context.machineState.internalCallStack.pop()`} +
0x20 + + \[\`SET\`\](#isa-section-set) + + Set a memory word from a constant in the bytecode + + {`M[dstOffset] = const`} +
0x21 + + \[\`MOV\`\](#isa-section-mov) + + + Move a word from source memory location to destination + + + {`M[dstOffset] = M[srcOffset]`} +
0x22 + + \[\`CMOV\`\](#isa-section-cmov) + + + Move a word (conditionally chosen) from one memory location to another + (`d = cond > 0 ? a : b`) + + + {`M[dstOffset] = M[condOffset] > 0 ? M[aOffset] : M[bOffset]`} +
0x23 + + \[\`SLOAD\`\](#isa-section-sload) + + + Load a word from this contract's persistent public storage. Zero is + loaded for unwritten slots. + + + {`M[dstOffset] = S[M[slotOffset]]`} +
0x24 + + \[\`SSTORE\`\](#isa-section-sstore) + + + Write a word to this contract's persistent public storage + + + {`S[M[slotOffset]] = M[srcOffset]`} +
0x25 + + \[\`NOTEHASHEXISTS\`\](#isa-section-notehashexists) + + + Check whether a note hash exists in the note hash tree (as of the start + of the current block) + + + + {`exists = context.worldState.noteHashes.has({ leafIndex: M[leafIndexOffset] leaf: hash(context.environment.address, M[noteHashOffset]), }) M[existsOffset] = exists`} -
0x26\[\`EMITNOTEHASH\`\](#isa-section-emitnotehash)Emit a new note hash to be inserted into the note hash tree -{`context.worldState.noteHashes.append( + +
0x26 + + \[\`EMITNOTEHASH\`\](#isa-section-emitnotehash) + + + Emit a new note hash to be inserted into the note hash tree + + + + {`context.worldState.noteHashes.append( hash(context.environment.address, M[noteHashOffset]) )`} -
0x27\[\`NULLIFIEREXISTS\`\](#isa-section-nullifierexists)Check whether a nullifier exists in the nullifier tree (including nullifiers from earlier in the current transaction or from earlier in the current block) -{`exists = pendingNullifiers.has(M[addressOffset], M[nullifierOffset]) || context.worldState.nullifiers.has( + +
0x27 + + \[\`NULLIFIEREXISTS\`\](#isa-section-nullifierexists) + + + Check whether a nullifier exists in the nullifier tree (including + nullifiers from earlier in the current transaction or from earlier in + the current block) + + + + {`exists = pendingNullifiers.has(M[addressOffset], M[nullifierOffset]) || context.worldState.nullifiers.has( hash(M[addressOffset], M[nullifierOffset]) ) M[existsOffset] = exists`} -
0x28\[\`EMITNULLIFIER\`\](#isa-section-emitnullifier)Emit a new nullifier to be inserted into the nullifier tree -{`context.worldState.nullifiers.append( + +
0x28 + + \[\`EMITNULLIFIER\`\](#isa-section-emitnullifier) + + + Emit a new nullifier to be inserted into the nullifier tree + + + + {`context.worldState.nullifiers.append( hash(context.environment.address, M[nullifierOffset]) )`} -
0x29\[\`L1TOL2MSGEXISTS\`\](#isa-section-l1tol2msgexists)Check if a message exists in the L1-to-L2 message tree -{`exists = context.worldState.l1ToL2Messages.has({ + +
0x29 + + \[\`L1TOL2MSGEXISTS\`\](#isa-section-l1tol2msgexists) + + + Check if a message exists in the L1-to-L2 message tree + + + + {`exists = context.worldState.l1ToL2Messages.has({ leafIndex: M[msgLeafIndexOffset], leaf: M[msgHashOffset] }) M[existsOffset] = exists`} -
0x2a\[\`GETCONTRACTINSTANCE\`\](#isa-section-getcontractinstance)Copies contract instance data to memory -{`M[dstOffset:dstOffset+CONTRACT_INSTANCE_SIZE+1] = [ + +
0x2a + + + \[\`GETCONTRACTINSTANCE\`\](#isa-section-getcontractinstance) + + + Copies contract instance data to memory + + + {`M[dstOffset:dstOffset+CONTRACT_INSTANCE_SIZE+1] = [ instance_found_in_address, instance.salt ?? 0, instance.deployer ?? 0, @@ -378,41 +639,64 @@ M[existsOffset] = exists`} instance.portalContractAddress ?? 0, instance.publicKeysHash ?? 0, ]`} -
0x2b\[\`EMITUNENCRYPTEDLOG\`\](#isa-section-emitunencryptedlog)Emit an unencrypted log -{`context.accruedSubstate.unencryptedLogs.append( + +
0x2b + + + \[\`EMITUNENCRYPTEDLOG\`\](#isa-section-emitunencryptedlog) + + + Emit an unencrypted log + + + {`context.accruedSubstate.unencryptedLogs.append( UnencryptedLog { address: context.environment.address, log: M[logOffset:logOffset+M[logSizeOffset]], } )`} -
0x2c\[\`SENDL2TOL1MSG\`\](#isa-section-sendl2tol1msg)Send an L2-to-L1 message -{`context.accruedSubstate.sentL2ToL1Messages.append( + +
0x2c + + \[\`SENDL2TOL1MSG\`\](#isa-section-sendl2tol1msg) + + Send an L2-to-L1 message + + + {`context.accruedSubstate.sentL2ToL1Messages.append( SentL2ToL1Message { address: context.environment.address, recipient: M[recipientOffset], message: M[contentOffset] } )`} -
0x2d\[\`CALL\`\](#isa-section-call)Call into another contract -{`// instr.args are { gasOffset, addrOffset, argsOffset, retOffset, retSize } + +
0x2d + + \[\`CALL\`\](#isa-section-call) + + Call into another contract + + + {`// instr.args are { gasOffset, addrOffset, argsOffset, retOffset, retSize } chargeGas(context, l2GasCost=M[instr.args.gasOffset], daGasCost=M[instr.args.gasOffset+1]) @@ -420,14 +704,24 @@ traceNestedCall(context, instr.args.addrOffset) nestedContext = deriveContext(context, instr.args, isStaticCall=false) execute(nestedContext) updateContextAfterNestedCall(context, instr.args, nestedContext)`} -
0x2e\[\`STATICCALL\`\](#isa-section-staticcall)Call into another contract, disallowing World State and Accrued Substate modifications -{`// instr.args are { gasOffset, addrOffset, argsOffset, retOffset, retSize } + +
0x2e + + \[\`STATICCALL\`\](#isa-section-staticcall) + + + Call into another contract, disallowing World State and Accrued Substate + modifications + + + + {`// instr.args are { gasOffset, addrOffset, argsOffset, retOffset, retSize } chargeGas(context, l2GasCost=M[instr.args.gasOffset], daGasCost=M[instr.args.gasOffset+1]) @@ -435,52 +729,84 @@ traceNestedCall(context, instr.args.addrOffset) nestedContext = deriveContext(context, instr.args, isStaticCall=true execute(nestedContext) updateContextAfterNestedCall(context, instr.args, nestedContext)`} -
0x2f\[\`RETURN\`\](#isa-section-return)Halt execution within this context (without revert), optionally returning some data -{`context.contractCallResults.output = M[retOffset:retOffset+retSize] + +
0x2f + + \[\`RETURN\`\](#isa-section-return) + + + Halt execution within this context (without revert), optionally + returning some data + + + + {`context.contractCallResults.output = M[retOffset:retOffset+retSize] halt`} -
0x30\[\`REVERT\`\](#isa-section-revert)Halt execution within this context as `reverted`, optionally returning some data -{`context.contractCallResults.output = M[retOffset:retOffset+retSize] + +
0x30 + + \[\`REVERT\`\](#isa-section-revert) + + + Halt execution within this context as `reverted`, optionally returning + some data + + + + {`context.contractCallResults.output = M[retOffset:retOffset+retSize] context.contractCallResults.reverted = true halt`} -
0x31\[\`TORADIXLE\`\](#isa-section-to_radix_le)Convert a word to an array of limbs in little-endian radix formTBD: Storage of limbs and if T[dstOffset] is constrained to U8
0x31 + + \[\`TORADIXLE\`\](#isa-section-to_radix_le) + + + Convert a word to an array of limbs in little-endian radix form + + + + TBD: Storage of limbs and if T[dstOffset] is constrained to U8 + +
- ## Instructions ### `ADD` + Addition (a + b) [See in table.](#isa-table-add) - **Opcode**: 0x00 - **Category**: Compute - Arithmetic -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. -- **Args**: - - **aOffset**: memory offset of the operation's left input - - **bOffset**: memory offset of the operation's right input - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. + - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. +- **Args**: + - **aOffset**: memory offset of the operation's left input + - **bOffset**: memory offset of the operation's right input + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] + M[bOffset] mod 2^k` - **Details**: Wraps on overflow - **Tag checks**: `T[aOffset] == T[bOffset] == inTag` @@ -490,19 +816,20 @@ Addition (a + b) [![](/img/protocol-specs/public-vm/bit-formats/ADD.png)](/img/protocol-specs/public-vm/bit-formats/ADD.png) ### `SUB` + Subtraction (a - b) [See in table.](#isa-table-sub) - **Opcode**: 0x01 - **Category**: Compute - Arithmetic -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. -- **Args**: - - **aOffset**: memory offset of the operation's left input - - **bOffset**: memory offset of the operation's right input - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. + - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. +- **Args**: + - **aOffset**: memory offset of the operation's left input + - **bOffset**: memory offset of the operation's right input + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] - M[bOffset] mod 2^k` - **Details**: Wraps on undeflow - **Tag checks**: `T[aOffset] == T[bOffset] == inTag` @@ -512,19 +839,20 @@ Subtraction (a - b) [![](/img/protocol-specs/public-vm/bit-formats/SUB.png)](/img/protocol-specs/public-vm/bit-formats/SUB.png) ### `MUL` -Multiplication (a * b) + +Multiplication (a \* b) [See in table.](#isa-table-mul) - **Opcode**: 0x02 - **Category**: Compute - Arithmetic -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. -- **Args**: - - **aOffset**: memory offset of the operation's left input - - **bOffset**: memory offset of the operation's right input - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. + - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. +- **Args**: + - **aOffset**: memory offset of the operation's left input + - **bOffset**: memory offset of the operation's right input + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] * M[bOffset] mod 2^k` - **Details**: Wraps on overflow - **Tag checks**: `T[aOffset] == T[bOffset] == inTag` @@ -534,19 +862,20 @@ Multiplication (a * b) [![](/img/protocol-specs/public-vm/bit-formats/MUL.png)](/img/protocol-specs/public-vm/bit-formats/MUL.png) ### `DIV` + Unsigned integer division (a / b) [See in table.](#isa-table-div) - **Opcode**: 0x03 - **Category**: Compute - Arithmetic -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. -- **Args**: - - **aOffset**: memory offset of the operation's left input - - **bOffset**: memory offset of the operation's right input - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. + - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. +- **Args**: + - **aOffset**: memory offset of the operation's left input + - **bOffset**: memory offset of the operation's right input + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] / M[bOffset]` - **Details**: If the input is a field, it will be interpreted as an integer - **Tag checks**: `T[aOffset] == T[bOffset] == inTag` @@ -556,38 +885,39 @@ Unsigned integer division (a / b) [![](/img/protocol-specs/public-vm/bit-formats/DIV.png)](/img/protocol-specs/public-vm/bit-formats/DIV.png) ### `FDIV` + Field division (a / b) [See in table.](#isa-table-fdiv) - **Opcode**: 0x04 - **Category**: Compute - Arithmetic -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **aOffset**: memory offset of the operation's left input - - **bOffset**: memory offset of the operation's right input - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **aOffset**: memory offset of the operation's left input + - **bOffset**: memory offset of the operation's right input + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] / M[bOffset]` - **Tag checks**: `T[aOffset] == T[bOffset] == field` - **Tag updates**: `T[dstOffset] = field` - **Bit-size**: 120 - ### `EQ` + Equality check (a \=\= b) [See in table.](#isa-table-eq) - **Opcode**: 0x05 - **Category**: Compute - Comparators -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. -- **Args**: - - **aOffset**: memory offset of the operation's left input - - **bOffset**: memory offset of the operation's right input - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. + - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. +- **Args**: + - **aOffset**: memory offset of the operation's left input + - **bOffset**: memory offset of the operation's right input + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] == M[bOffset] ? 1 : 0` - **Tag checks**: `T[aOffset] == T[bOffset] == inTag` - **Tag updates**: `T[dstOffset] = u8` @@ -596,19 +926,20 @@ Equality check (a \=\= b) [![](/img/protocol-specs/public-vm/bit-formats/EQ.png)](/img/protocol-specs/public-vm/bit-formats/EQ.png) ### `LT` + Less-than check (a \< b) [See in table.](#isa-table-lt) - **Opcode**: 0x06 - **Category**: Compute - Comparators -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. -- **Args**: - - **aOffset**: memory offset of the operation's left input - - **bOffset**: memory offset of the operation's right input - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. + - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. +- **Args**: + - **aOffset**: memory offset of the operation's left input + - **bOffset**: memory offset of the operation's right input + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] < M[bOffset] ? 1 : 0` - **Tag checks**: `T[aOffset] == T[bOffset] == inTag` - **Tag updates**: `T[dstOffset] = u8` @@ -617,19 +948,20 @@ Less-than check (a \< b) [![](/img/protocol-specs/public-vm/bit-formats/LT.png)](/img/protocol-specs/public-vm/bit-formats/LT.png) ### `LTE` + Less-than-or-equals check (a \<\= b) [See in table.](#isa-table-lte) - **Opcode**: 0x07 - **Category**: Compute - Comparators -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. -- **Args**: - - **aOffset**: memory offset of the operation's left input - - **bOffset**: memory offset of the operation's right input - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. + - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. +- **Args**: + - **aOffset**: memory offset of the operation's left input + - **bOffset**: memory offset of the operation's right input + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] <= M[bOffset] ? 1 : 0` - **Tag checks**: `T[aOffset] == T[bOffset] == inTag` - **Tag updates**: `T[dstOffset] = u8` @@ -638,19 +970,20 @@ Less-than-or-equals check (a \<\= b) [![](/img/protocol-specs/public-vm/bit-formats/LTE.png)](/img/protocol-specs/public-vm/bit-formats/LTE.png) ### `AND` + Bitwise AND (a & b) [See in table.](#isa-table-and) - **Opcode**: 0x08 - **Category**: Compute - Bitwise -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. `field` type is NOT supported for this instruction. -- **Args**: - - **aOffset**: memory offset of the operation's left input - - **bOffset**: memory offset of the operation's right input - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. + - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. `field` type is NOT supported for this instruction. +- **Args**: + - **aOffset**: memory offset of the operation's left input + - **bOffset**: memory offset of the operation's right input + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] AND M[bOffset]` - **Tag checks**: `T[aOffset] == T[bOffset] == inTag` - **Tag updates**: `T[dstOffset] = inTag` @@ -659,19 +992,20 @@ Bitwise AND (a & b) [![](/img/protocol-specs/public-vm/bit-formats/AND.png)](/img/protocol-specs/public-vm/bit-formats/AND.png) ### `OR` + Bitwise OR (a | b) [See in table.](#isa-table-or) - **Opcode**: 0x09 - **Category**: Compute - Bitwise -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. `field` type is NOT supported for this instruction. -- **Args**: - - **aOffset**: memory offset of the operation's left input - - **bOffset**: memory offset of the operation's right input - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. + - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. `field` type is NOT supported for this instruction. +- **Args**: + - **aOffset**: memory offset of the operation's left input + - **bOffset**: memory offset of the operation's right input + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] OR M[bOffset]` - **Tag checks**: `T[aOffset] == T[bOffset] == inTag` - **Tag updates**: `T[dstOffset] = inTag` @@ -680,19 +1014,20 @@ Bitwise OR (a | b) [![](/img/protocol-specs/public-vm/bit-formats/OR.png)](/img/protocol-specs/public-vm/bit-formats/OR.png) ### `XOR` + Bitwise XOR (a ^ b) [See in table.](#isa-table-xor) - **Opcode**: 0x0a - **Category**: Compute - Bitwise -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. `field` type is NOT supported for this instruction. -- **Args**: - - **aOffset**: memory offset of the operation's left input - - **bOffset**: memory offset of the operation's right input - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. + - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. `field` type is NOT supported for this instruction. +- **Args**: + - **aOffset**: memory offset of the operation's left input + - **bOffset**: memory offset of the operation's right input + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] XOR M[bOffset]` - **Tag checks**: `T[aOffset] == T[bOffset] == inTag` - **Tag updates**: `T[dstOffset] = inTag` @@ -701,18 +1036,19 @@ Bitwise XOR (a ^ b) [![](/img/protocol-specs/public-vm/bit-formats/XOR.png)](/img/protocol-specs/public-vm/bit-formats/XOR.png) ### `NOT` + Bitwise NOT (inversion) [See in table.](#isa-table-not) - **Opcode**: 0x0b - **Category**: Compute - Bitwise -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. `field` type is NOT supported for this instruction. -- **Args**: - - **aOffset**: memory offset of the operation's input - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. + - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. `field` type is NOT supported for this instruction. +- **Args**: + - **aOffset**: memory offset of the operation's input + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = NOT M[aOffset]` - **Tag checks**: `T[aOffset] == inTag` - **Tag updates**: `T[dstOffset] = inTag` @@ -721,19 +1057,20 @@ Bitwise NOT (inversion) [![](/img/protocol-specs/public-vm/bit-formats/NOT.png)](/img/protocol-specs/public-vm/bit-formats/NOT.png) ### `SHL` + Bitwise leftward shift (a \<\< b) [See in table.](#isa-table-shl) - **Opcode**: 0x0c - **Category**: Compute - Bitwise -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. `field` type is NOT supported for this instruction. -- **Args**: - - **aOffset**: memory offset of the operation's left input - - **bOffset**: memory offset of the operation's right input - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. + - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. `field` type is NOT supported for this instruction. +- **Args**: + - **aOffset**: memory offset of the operation's left input + - **bOffset**: memory offset of the operation's right input + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] << M[bOffset]` - **Tag checks**: `T[aOffset] == inTag`, `T[bOffset] == u8` - **Tag updates**: `T[dstOffset] = inTag` @@ -742,19 +1079,20 @@ Bitwise leftward shift (a \<\< b) [![](/img/protocol-specs/public-vm/bit-formats/SHL.png)](/img/protocol-specs/public-vm/bit-formats/SHL.png) ### `SHR` + Bitwise rightward shift (a \>\> b) [See in table.](#isa-table-shr) - **Opcode**: 0x0d - **Category**: Compute - Bitwise -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. `field` type is NOT supported for this instruction. -- **Args**: - - **aOffset**: memory offset of the operation's left input - - **bOffset**: memory offset of the operation's right input - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. + - **inTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. `field` type is NOT supported for this instruction. +- **Args**: + - **aOffset**: memory offset of the operation's left input + - **bOffset**: memory offset of the operation's right input + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] >> M[bOffset]` - **Tag checks**: `T[aOffset] == inTag`, `T[bOffset] == u8` - **Tag updates**: `T[dstOffset] = inTag` @@ -763,18 +1101,19 @@ Bitwise rightward shift (a \>\> b) [![](/img/protocol-specs/public-vm/bit-formats/SHR.png)](/img/protocol-specs/public-vm/bit-formats/SHR.png) ### `CAST` + Type cast [See in table.](#isa-table-cast) - **Opcode**: 0x0e - **Category**: Type Conversions -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **dstTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to tag the destination with but not to check inputs against. -- **Args**: - - **aOffset**: memory offset of word to cast - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. + - **dstTag**: The [tag/size](./memory-model#tags-and-tagged-memory) to tag the destination with but not to check inputs against. +- **Args**: + - **aOffset**: memory offset of word to cast + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = cast(M[aOffset])` - **Details**: Cast a word in memory based on the `dstTag` specified in the bytecode. Truncates (`M[dstOffset] = M[aOffset] mod 2^dstsize`) when casting to a smaller type, left-zero-pads when casting to a larger type. See [here](./memory-model#cast-and-tag-conversions) for more details. - **Tag updates**: `T[dstOffset] = dstTag` @@ -783,16 +1122,17 @@ Type cast [![](/img/protocol-specs/public-vm/bit-formats/CAST.png)](/img/protocol-specs/public-vm/bit-formats/CAST.png) ### `ADDRESS` + Get the address of the currently executing l2 contract [See in table.](#isa-table-address) - **Opcode**: 0x0f - **Category**: Execution Environment -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = context.environment.address` - **Tag updates**: `T[dstOffset] = field` - **Bit-size**: 56 @@ -800,65 +1140,51 @@ Get the address of the currently executing l2 contract [![](/img/protocol-specs/public-vm/bit-formats/ADDRESS.png)](/img/protocol-specs/public-vm/bit-formats/ADDRESS.png) ### `SENDER` + Get the address of the sender (caller of the current context) [See in table.](#isa-table-sender) - **Opcode**: 0x10 - **Category**: Execution Environment -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = context.environment.sender` - **Tag updates**: `T[dstOffset] = field` - **Bit-size**: 56 [![](/img/protocol-specs/public-vm/bit-formats/SENDER.png)](/img/protocol-specs/public-vm/bit-formats/SENDER.png) -### `FUNCTIONSELECTOR` -Get the function selector of the contract function being executed - -[See in table.](#isa-table-functionselector) - -- **Opcode**: 0x11 -- **Category**: Execution Environment -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **dstOffset**: memory offset specifying where to store operation's result -- **Expression**: `M[dstOffset] = context.environment.functionSelector` -- **Tag updates**: `T[dstOffset] = u32` -- **Bit-size**: 56 - - ### `TRANSACTIONFEE` + Get the computed transaction fee during teardown phase, zero otherwise [See in table.](#isa-table-transactionfee) - **Opcode**: 0x12 - **Category**: Execution Environment -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = context.environment.transactionFee` - **Tag updates**: `T[dstOffset] = field` - **Bit-size**: 56 - ### `CHAINID` + Get this rollup's L1 chain ID [See in table.](#isa-table-chainid) - **Opcode**: 0x13 - **Category**: Execution Environment - Globals -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = context.environment.globals.chainId` - **Tag updates**: `T[dstOffset] = field` - **Bit-size**: 56 @@ -866,16 +1192,17 @@ Get this rollup's L1 chain ID [![](/img/protocol-specs/public-vm/bit-formats/CHAINID.png)](/img/protocol-specs/public-vm/bit-formats/CHAINID.png) ### `VERSION` + Get this rollup's L2 version ID [See in table.](#isa-table-version) - **Opcode**: 0x14 - **Category**: Execution Environment - Globals -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = context.environment.globals.version` - **Tag updates**: `T[dstOffset] = field` - **Bit-size**: 56 @@ -883,16 +1210,17 @@ Get this rollup's L2 version ID [![](/img/protocol-specs/public-vm/bit-formats/VERSION.png)](/img/protocol-specs/public-vm/bit-formats/VERSION.png) ### `BLOCKNUMBER` + Get this L2 block's number [See in table.](#isa-table-blocknumber) - **Opcode**: 0x15 - **Category**: Execution Environment - Globals -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = context.environment.globals.blocknumber` - **Tag updates**: `T[dstOffset] = field` - **Bit-size**: 56 @@ -900,16 +1228,17 @@ Get this L2 block's number [![](/img/protocol-specs/public-vm/bit-formats/BLOCKNUMBER.png)](/img/protocol-specs/public-vm/bit-formats/BLOCKNUMBER.png) ### `TIMESTAMP` + Get this L2 block's timestamp [See in table.](#isa-table-timestamp) - **Opcode**: 0x16 - **Category**: Execution Environment - Globals -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = context.environment.globals.timestamp` - **Tag updates**: `T[dstOffset] = u64` - **Bit-size**: 56 @@ -917,16 +1246,17 @@ Get this L2 block's timestamp [![](/img/protocol-specs/public-vm/bit-formats/TIMESTAMP.png)](/img/protocol-specs/public-vm/bit-formats/TIMESTAMP.png) ### `FEEPERL2GAS` + Get the fee to be paid per "L2 gas" - constant for entire transaction [See in table.](#isa-table-feeperl2gas) - **Opcode**: 0x17 - **Category**: Execution Environment - Globals - Gas -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = context.environment.globals.feePerL2Gas` - **Tag updates**: `T[dstOffset] = field` - **Bit-size**: 56 @@ -934,16 +1264,17 @@ Get the fee to be paid per "L2 gas" - constant for entire transaction [![](/img/protocol-specs/public-vm/bit-formats/FEEPERL2GAS.png)](/img/protocol-specs/public-vm/bit-formats/FEEPERL2GAS.png) ### `FEEPERDAGAS` + Get the fee to be paid per "DA gas" - constant for entire transaction [See in table.](#isa-table-feeperdagas) - **Opcode**: 0x18 - **Category**: Execution Environment - Globals - Gas -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = context.environment.globals.feePerDaGas` - **Tag updates**: `T[dstOffset] = field` - **Bit-size**: 56 @@ -951,18 +1282,19 @@ Get the fee to be paid per "DA gas" - constant for entire transaction [![](/img/protocol-specs/public-vm/bit-formats/FEEPERDAGAS.png)](/img/protocol-specs/public-vm/bit-formats/FEEPERDAGAS.png) ### `CALLDATACOPY` + Copy calldata into memory [See in table.](#isa-table-calldatacopy) - **Opcode**: 0x19 - **Category**: Execution Environment - Calldata -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **cdOffset**: offset into calldata to copy from - - **copySize**: number of words to copy - - **dstOffset**: memory offset specifying where to copy the first word to +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **cdOffset**: offset into calldata to copy from + - **copySize**: number of words to copy + - **dstOffset**: memory offset specifying where to copy the first word to - **Expression**: `M[dstOffset:dstOffset+copySize] = context.environment.calldata[cdOffset:cdOffset+copySize]` - **Details**: Calldata is read-only and cannot be directly operated on by other instructions. This instruction moves words from calldata into memory so they can be operated on normally. - **Tag updates**: `T[dstOffset:dstOffset+copySize] = field` @@ -971,16 +1303,17 @@ Copy calldata into memory [![](/img/protocol-specs/public-vm/bit-formats/CALLDATACOPY.png)](/img/protocol-specs/public-vm/bit-formats/CALLDATACOPY.png) ### `L2GASLEFT` + Remaining "L2 gas" for this call (after this instruction) [See in table.](#isa-table-l2gasleft) - **Opcode**: 0x1a - **Category**: Machine State - Gas -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = context.MachineState.l2GasLeft` - **Tag updates**: `T[dstOffset] = u32` - **Bit-size**: 56 @@ -988,16 +1321,17 @@ Remaining "L2 gas" for this call (after this instruction) [![](/img/protocol-specs/public-vm/bit-formats/L2GASLEFT.png)](/img/protocol-specs/public-vm/bit-formats/L2GASLEFT.png) ### `DAGASLEFT` + Remaining "DA gas" for this call (after this instruction) [See in table.](#isa-table-dagasleft) - **Opcode**: 0x1b - **Category**: Machine State - Gas -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = context.machineState.daGasLeft` - **Tag updates**: `T[dstOffset] = u32` - **Bit-size**: 56 @@ -1005,14 +1339,15 @@ Remaining "DA gas" for this call (after this instruction) [![](/img/protocol-specs/public-vm/bit-formats/DAGASLEFT.png)](/img/protocol-specs/public-vm/bit-formats/DAGASLEFT.png) ### `JUMP` + Jump to a location in the bytecode [See in table.](#isa-table-jump) - **Opcode**: 0x1c - **Category**: Machine State - Control Flow -- **Args**: - - **loc**: target location to jump to +- **Args**: + - **loc**: target location to jump to - **Expression**: `context.machineState.pc = loc` - **Details**: Target location is an immediate value (a constant in the bytecode). - **Bit-size**: 48 @@ -1020,17 +1355,18 @@ Jump to a location in the bytecode [![](/img/protocol-specs/public-vm/bit-formats/JUMP.png)](/img/protocol-specs/public-vm/bit-formats/JUMP.png) ### `JUMPI` + Conditionally jump to a location in the bytecode [See in table.](#isa-table-jumpi) - **Opcode**: 0x1d - **Category**: Machine State - Control Flow -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **loc**: target location conditionally jump to - - **condOffset**: memory offset of the operations 'conditional' input +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **loc**: target location conditionally jump to + - **condOffset**: memory offset of the operations 'conditional' input - **Expression**: `context.machineState.pc = M[condOffset] > 0 ? loc : context.machineState.pc` - **Details**: Target location is an immediate value (a constant in the bytecode). `T[condOffset]` is not checked because the greater-than-zero suboperation is the same regardless of type. - **Bit-size**: 88 @@ -1038,24 +1374,30 @@ Conditionally jump to a location in the bytecode [![](/img/protocol-specs/public-vm/bit-formats/JUMPI.png)](/img/protocol-specs/public-vm/bit-formats/JUMPI.png) ### `INTERNALCALL` + Make an internal call. Push the current PC to the internal call stack and jump to the target location. [See in table.](#isa-table-internalcall) - **Opcode**: 0x1e - **Category**: Machine State - Control Flow -- **Args**: - - **loc**: target location to jump/call to -- **Expression**: - -{`context.machineState.internalCallStack.push(context.machineState.pc) +- **Args**: + - **loc**: target location to jump/call to +- **Expression**: + {" "} + {" "} + {" "} + + {`context.machineState.internalCallStack.push(context.machineState.pc) context.machineState.pc = loc`} - + - **Details**: Target location is an immediate value (a constant in the bytecode). - **Bit-size**: 48 +[![](/img/protocol-specs/public-vm/bit-formats/INTERNALCALL.png)](/img/protocol-specs/public-vm/bit-formats/INTERNALCALL.png) ### `INTERNALRETURN` + Return from an internal call. Pop from the internal call stack and jump to the popped location. [See in table.](#isa-table-internalreturn) @@ -1068,18 +1410,19 @@ Return from an internal call. Pop from the internal call stack and jump to the p [![](/img/protocol-specs/public-vm/bit-formats/INTERNALRETURN.png)](/img/protocol-specs/public-vm/bit-formats/INTERNALRETURN.png) ### `SET` + Set a memory word from a constant in the bytecode [See in table.](#isa-table-set) - **Opcode**: 0x20 - **Category**: Machine State - Memory -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **inTag**: The [type/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. `field` type is NOT supported for SET. -- **Args**: - - **const**: an N-bit constant value from the bytecode to store in memory (any type except `field`) - - **dstOffset**: memory offset specifying where to store the constant +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. + - **inTag**: The [type/size](./memory-model#tags-and-tagged-memory) to check inputs against and tag the destination with. `field` type is NOT supported for SET. +- **Args**: + - **const**: an N-bit constant value from the bytecode to store in memory (any type except `field`) + - **dstOffset**: memory offset specifying where to store the constant - **Expression**: `M[dstOffset] = const` - **Details**: Set memory word at `dstOffset` to `const`'s immediate value. `const`'s bit-size (N) can be 8, 16, 32, 64, or 128 based on `inTag`. It _cannot be 254 (`field` type)_! - **Tag updates**: `T[dstOffset] = inTag` @@ -1088,17 +1431,18 @@ Set a memory word from a constant in the bytecode [![](/img/protocol-specs/public-vm/bit-formats/SET.png)](/img/protocol-specs/public-vm/bit-formats/SET.png) ### `MOV` + Move a word from source memory location to destination [See in table.](#isa-table-mov) - **Opcode**: 0x21 - **Category**: Machine State - Memory -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **srcOffset**: memory offset of word to move - - **dstOffset**: memory offset specifying where to store that word +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **srcOffset**: memory offset of word to move + - **dstOffset**: memory offset specifying where to store that word - **Expression**: `M[dstOffset] = M[srcOffset]` - **Tag updates**: `T[dstOffset] = T[srcOffset]` - **Bit-size**: 88 @@ -1106,19 +1450,20 @@ Move a word from source memory location to destination [![](/img/protocol-specs/public-vm/bit-formats/MOV.png)](/img/protocol-specs/public-vm/bit-formats/MOV.png) ### `CMOV` + Move a word (conditionally chosen) from one memory location to another (`d \= cond \> 0 ? a : b`) [See in table.](#isa-table-cmov) - **Opcode**: 0x22 - **Category**: Machine State - Memory -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **aOffset**: memory offset of word 'a' to conditionally move - - **bOffset**: memory offset of word 'b' to conditionally move - - **condOffset**: memory offset of the operations 'conditional' input - - **dstOffset**: memory offset specifying where to store operation's result +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **aOffset**: memory offset of word 'a' to conditionally move + - **bOffset**: memory offset of word 'b' to conditionally move + - **condOffset**: memory offset of the operations 'conditional' input + - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[condOffset] > 0 ? M[aOffset] : M[bOffset]` - **Details**: One of two source memory locations is chosen based on the condition. `T[condOffset]` is not checked because the greater-than-zero suboperation is the same regardless of type. - **Tag updates**: `T[dstOffset] = M[condOffset] > 0 ? T[aOffset] : T[bOffset]` @@ -1127,24 +1472,26 @@ Move a word (conditionally chosen) from one memory location to another (`d \= co [![](/img/protocol-specs/public-vm/bit-formats/CMOV.png)](/img/protocol-specs/public-vm/bit-formats/CMOV.png) ### `SLOAD` + Load a word from this contract's persistent public storage. Zero is loaded for unwritten slots. [See in table.](#isa-table-sload) - **Opcode**: 0x23 - **Category**: World State - Public Storage -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **slotOffset**: memory offset of the storage slot to load from - - **dstOffset**: memory offset specifying where to store operation's result -- **Expression**: - -{`M[dstOffset] = S[M[slotOffset]]`} - -- **Details**: - -{`// Expression is shorthand for +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **slotOffset**: memory offset of the storage slot to load from + - **dstOffset**: memory offset specifying where to store operation's result +- **Expression**: + {`M[dstOffset] = S[M[slotOffset]]`} +- **Details**: + {" "} + {" "} + {" "} + + {`// Expression is shorthand for leafIndex = hash(context.environment.address, M[slotOffset]) exists = context.worldState.publicStorage.has(leafIndex) // exists == previously-written if exists: @@ -1152,10 +1499,13 @@ if exists: else: value = 0 M[dstOffset] = value`} - -- **World State access tracing**: - -{`context.worldStateAccessTrace.publicStorageReads.append( + +- **World State access tracing**: + {" "} + {" "} + {" "} + + {`context.worldStateAccessTrace.publicStorageReads.append( TracedStorageRead { callPointer: context.environment.callPointer, slot: M[slotOffset], @@ -1164,7 +1514,7 @@ M[dstOffset] = value`} counter: ++context.worldStateAccessTrace.accessCounter, } )`} - + - **Triggers downstream circuit operations**: Storage slot siloing (hash with contract address), public data tree membership check - **Tag updates**: `T[dstOffset] = field` - **Bit-size**: 88 @@ -1172,32 +1522,37 @@ M[dstOffset] = value`} [![](/img/protocol-specs/public-vm/bit-formats/SLOAD.png)](/img/protocol-specs/public-vm/bit-formats/SLOAD.png) ### `SSTORE` + Write a word to this contract's persistent public storage [See in table.](#isa-table-sstore) - **Opcode**: 0x24 - **Category**: World State - Public Storage -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **srcOffset**: memory offset of the word to store - - **slotOffset**: memory offset containing the storage slot to store to -- **Expression**: - -{`S[M[slotOffset]] = M[srcOffset]`} - -- **Details**: - -{`// Expression is shorthand for +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **srcOffset**: memory offset of the word to store + - **slotOffset**: memory offset containing the storage slot to store to +- **Expression**: + {`S[M[slotOffset]] = M[srcOffset]`} +- **Details**: + {" "} + {" "} + {" "} + + {`// Expression is shorthand for context.worldState.publicStorage.set({ leafIndex: hash(context.environment.address, M[slotOffset]), leaf: M[srcOffset], })`} - -- **World State access tracing**: - -{`context.worldStateAccessTrace.publicStorageWrites.append( + +- **World State access tracing**: + {" "} + {" "} + {" "} + + {`context.worldStateAccessTrace.publicStorageWrites.append( TracedStorageWrite { callPointer: context.environment.callPointer, slot: M[slotOffset], @@ -1205,36 +1560,43 @@ context.worldState.publicStorage.set({ counter: ++context.worldStateAccessTrace.accessCounter, } )`} - + - **Triggers downstream circuit operations**: Storage slot siloing (hash with contract address), public data tree update - **Bit-size**: 88 [![](/img/protocol-specs/public-vm/bit-formats/SSTORE.png)](/img/protocol-specs/public-vm/bit-formats/SSTORE.png) ### `NOTEHASHEXISTS` + Check whether a note hash exists in the note hash tree (as of the start of the current block) [See in table.](#isa-table-notehashexists) - **Opcode**: 0x25 - **Category**: World State - Notes & Nullifiers -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **noteHashOffset**: memory offset of the note hash - - **leafIndexOffset**: memory offset of the leaf index - - **existsOffset**: memory offset specifying where to store operation's result (whether the note hash leaf exists) -- **Expression**: - -{`exists = context.worldState.noteHashes.has({ +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **noteHashOffset**: memory offset of the note hash + - **leafIndexOffset**: memory offset of the leaf index + - **existsOffset**: memory offset specifying where to store operation's result (whether the note hash leaf exists) +- **Expression**: + {" "} + {" "} + {" "} + + {`exists = context.worldState.noteHashes.has({ leafIndex: M[leafIndexOffset] leaf: hash(context.environment.address, M[noteHashOffset]), }) M[existsOffset] = exists`} - -- **World State access tracing**: - -{`context.worldStateAccessTrace.noteHashChecks.append( + +- **World State access tracing**: + {" "} + {" "} + {" "} + + {`context.worldStateAccessTrace.noteHashChecks.append( TracedNoteHashCheck { callPointer: context.environment.callPointer, leafIndex: M[leafIndexOffset] @@ -1243,67 +1605,80 @@ M[existsOffset] = exists`} counter: ++context.worldStateAccessTrace.accessCounter, } )`} - + - **Triggers downstream circuit operations**: Note hash siloing (hash with storage contract address), note hash tree membership check - **Tag updates**: `T[existsOffset] = u8` - **Bit-size**: 120 - ### `EMITNOTEHASH` + Emit a new note hash to be inserted into the note hash tree [See in table.](#isa-table-emitnotehash) - **Opcode**: 0x26 - **Category**: World State - Notes & Nullifiers -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **noteHashOffset**: memory offset of the note hash -- **Expression**: - -{`context.worldState.noteHashes.append( +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **noteHashOffset**: memory offset of the note hash +- **Expression**: + {" "} + {" "} + {" "} + + {`context.worldState.noteHashes.append( hash(context.environment.address, M[noteHashOffset]) )`} - -- **World State access tracing**: - -{`context.worldStateAccessTrace.noteHashes.append( + +- **World State access tracing**: + {" "} + {" "} + {" "} + + {`context.worldStateAccessTrace.noteHashes.append( TracedNoteHash { callPointer: context.environment.callPointer, noteHash: M[noteHashOffset], // unsiloed note hash counter: ++context.worldStateAccessTrace.accessCounter, } )`} - + - **Triggers downstream circuit operations**: Note hash siloing (hash with contract address), note hash tree insertion. - **Bit-size**: 56 [![](/img/protocol-specs/public-vm/bit-formats/EMITNOTEHASH.png)](/img/protocol-specs/public-vm/bit-formats/EMITNOTEHASH.png) ### `NULLIFIEREXISTS` + Check whether a nullifier exists in the nullifier tree (including nullifiers from earlier in the current transaction or from earlier in the current block) [See in table.](#isa-table-nullifierexists) - **Opcode**: 0x27 - **Category**: World State - Notes & Nullifiers -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **nullifierOffset**: memory offset of the unsiloed nullifier - - **addressOffset**: memory offset of the storage address - - **existsOffset**: memory offset specifying where to store operation's result (whether the nullifier exists) -- **Expression**: - -{`exists = pendingNullifiers.has(M[addressOffset], M[nullifierOffset]) || context.worldState.nullifiers.has( +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **nullifierOffset**: memory offset of the unsiloed nullifier + - **addressOffset**: memory offset of the storage address + - **existsOffset**: memory offset specifying where to store operation's result (whether the nullifier exists) +- **Expression**: + {" "} + {" "} + {" "} + + {`exists = pendingNullifiers.has(M[addressOffset], M[nullifierOffset]) || context.worldState.nullifiers.has( hash(M[addressOffset], M[nullifierOffset]) ) M[existsOffset] = exists`} - -- **World State access tracing**: - -{`context.worldStateAccessTrace.nullifierChecks.append( + +- **World State access tracing**: + {" "} + {" "} + {" "} + + {`context.worldStateAccessTrace.nullifierChecks.append( TracedNullifierCheck { callPointer: context.environment.callPointer, nullifier: M[nullifierOffset], @@ -1312,67 +1687,80 @@ M[existsOffset] = exists`} counter: ++context.worldStateAccessTrace.accessCounter, } )`} - + - **Triggers downstream circuit operations**: Nullifier siloing (hash with storage contract address), nullifier tree membership check - **Tag updates**: `T[existsOffset] = u8` - **Bit-size**: 120 - ### `EMITNULLIFIER` + Emit a new nullifier to be inserted into the nullifier tree [See in table.](#isa-table-emitnullifier) - **Opcode**: 0x28 - **Category**: World State - Notes & Nullifiers -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **nullifierOffset**: memory offset of nullifier -- **Expression**: - -{`context.worldState.nullifiers.append( +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **nullifierOffset**: memory offset of nullifier +- **Expression**: + {" "} + {" "} + {" "} + + {`context.worldState.nullifiers.append( hash(context.environment.address, M[nullifierOffset]) )`} - -- **World State access tracing**: - -{`context.worldStateAccessTrace.nullifiers.append( + +- **World State access tracing**: + {" "} + {" "} + {" "} + + {`context.worldStateAccessTrace.nullifiers.append( TracedNullifier { callPointer: context.environment.callPointer, nullifier: M[nullifierOffset], // unsiloed nullifier counter: ++context.worldStateAccessTrace.accessCounter, } )`} - + - **Triggers downstream circuit operations**: Nullifier siloing (hash with contract address), nullifier tree non-membership-check and insertion. - **Bit-size**: 56 [![](/img/protocol-specs/public-vm/bit-formats/EMITNULLIFIER.png)](/img/protocol-specs/public-vm/bit-formats/EMITNULLIFIER.png) ### `L1TOL2MSGEXISTS` + Check if a message exists in the L1-to-L2 message tree [See in table.](#isa-table-l1tol2msgexists) - **Opcode**: 0x29 - **Category**: World State - Messaging -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **msgHashOffset**: memory offset of the message hash - - **msgLeafIndexOffset**: memory offset of the message's leaf index in the L1-to-L2 message tree - - **existsOffset**: memory offset specifying where to store operation's result (whether the message exists in the L1-to-L2 message tree) -- **Expression**: - -{`exists = context.worldState.l1ToL2Messages.has({ +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **msgHashOffset**: memory offset of the message hash + - **msgLeafIndexOffset**: memory offset of the message's leaf index in the L1-to-L2 message tree + - **existsOffset**: memory offset specifying where to store operation's result (whether the message exists in the L1-to-L2 message tree) +- **Expression**: + {" "} + {" "} + {" "} + + {`exists = context.worldState.l1ToL2Messages.has({ leafIndex: M[msgLeafIndexOffset], leaf: M[msgHashOffset] }) M[existsOffset] = exists`} - -- **World State access tracing**: - -{`context.worldStateAccessTrace.l1ToL2MessagesChecks.append( + +- **World State access tracing**: + {" "} + {" "} + {" "} + + {`context.worldStateAccessTrace.l1ToL2MessagesChecks.append( L1ToL2Message { callPointer: context.environment.callPointer, leafIndex: M[msgLeafIndexOffset], @@ -1380,30 +1768,31 @@ M[existsOffset] = exists`} exists: exists, // defined above } )`} - + - **Triggers downstream circuit operations**: L1-to-L2 message tree membership check -- **Tag updates**: - -{`T[existsOffset] = u8,`} - +- **Tag updates**: + {`T[existsOffset] = u8,`} - **Bit-size**: 120 - ### `GETCONTRACTINSTANCE` + Copies contract instance data to memory [See in table.](#isa-table-getcontractinstance) - **Opcode**: 0x2a - **Category**: Other -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **addressOffset**: memory offset of the contract instance address - - **dstOffset**: location to write the contract instance information to -- **Expression**: - -{`M[dstOffset:dstOffset+CONTRACT_INSTANCE_SIZE+1] = [ +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **addressOffset**: memory offset of the contract instance address + - **dstOffset**: location to write the contract instance information to +- **Expression**: + {" "} + {" "} + {" "} + + {`M[dstOffset:dstOffset+CONTRACT_INSTANCE_SIZE+1] = [ instance_found_in_address, instance.salt ?? 0, instance.deployer ?? 0, @@ -1412,84 +1801,95 @@ Copies contract instance data to memory instance.portalContractAddress ?? 0, instance.publicKeysHash ?? 0, ]`} - + - **Additional AVM circuit checks**: TO-DO - **Triggers downstream circuit operations**: TO-DO - **Tag updates**: T[dstOffset:dstOffset+CONTRACT_INSTANCE_SIZE+1] = field - **Bit-size**: 88 - ### `EMITUNENCRYPTEDLOG` + Emit an unencrypted log [See in table.](#isa-table-emitunencryptedlog) - **Opcode**: 0x2b - **Category**: Accrued Substate - Logging -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **logOffset**: memory offset of the data to log - - **logSizeOffset**: memory offset to number of words to log -- **Expression**: - -{`context.accruedSubstate.unencryptedLogs.append( +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **logOffset**: memory offset of the data to log + - **logSizeOffset**: memory offset to number of words to log +- **Expression**: + {" "} + {" "} + {" "} + + {`context.accruedSubstate.unencryptedLogs.append( UnencryptedLog { address: context.environment.address, log: M[logOffset:logOffset+M[logSizeOffset]], } )`} - + - **Bit-size**: 88 [![](/img/protocol-specs/public-vm/bit-formats/EMITUNENCRYPTEDLOG.png)](/img/protocol-specs/public-vm/bit-formats/EMITUNENCRYPTEDLOG.png) ### `SENDL2TOL1MSG` + Send an L2-to-L1 message [See in table.](#isa-table-sendl2tol1msg) - **Opcode**: 0x2c - **Category**: Accrued Substate - Messaging -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **recipientOffset**: memory offset of the message recipient - - **contentOffset**: memory offset of the message content -- **Expression**: - -{`context.accruedSubstate.sentL2ToL1Messages.append( +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **recipientOffset**: memory offset of the message recipient + - **contentOffset**: memory offset of the message content +- **Expression**: + {" "} + {" "} + {" "} + + {`context.accruedSubstate.sentL2ToL1Messages.append( SentL2ToL1Message { address: context.environment.address, recipient: M[recipientOffset], message: M[contentOffset] } )`} - + - **Bit-size**: 88 [![](/img/protocol-specs/public-vm/bit-formats/SENDL2TOL1MSG.png)](/img/protocol-specs/public-vm/bit-formats/SENDL2TOL1MSG.png) ### `CALL` + Call into another contract [See in table.](#isa-table-call) - **Opcode**: 0x2d - **Category**: Control Flow - Contract Calls -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **gasOffset**: offset to two words containing `{l2GasLeft, daGasLeft}`: amount of gas to provide to the callee - - **addrOffset**: address of the contract to call - - **argsOffset**: memory offset to args (will become the callee's calldata) - - **argsSizeOffset**: memory offset for the number of words to pass via callee's calldata - - **retOffset**: destination memory offset specifying where to store the data returned from the callee - - **retSize**: number of words to copy from data returned by callee - - **successOffset**: destination memory offset specifying where to store the call's success (0: failure, 1: success) -- **Expression**: - -{`// instr.args are { gasOffset, addrOffset, argsOffset, retOffset, retSize } +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **gasOffset**: offset to two words containing `{l2GasLeft, daGasLeft}`: amount of gas to provide to the callee + - **addrOffset**: address of the contract to call + - **argsOffset**: memory offset to args (will become the callee's calldata) + - **argsSizeOffset**: memory offset for the number of words to pass via callee's calldata + - **retOffset**: destination memory offset specifying where to store the data returned from the callee + - **retSize**: number of words to copy from data returned by callee + - **successOffset**: destination memory offset specifying where to store the call's success (0: failure, 1: success) +- **Expression**: + {" "} + {" "} + {" "} + + {`// instr.args are { gasOffset, addrOffset, argsOffset, retOffset, retSize } chargeGas(context, l2GasCost=M[instr.args.gasOffset], daGasCost=M[instr.args.gasOffset+1]) @@ -1497,46 +1897,53 @@ traceNestedCall(context, instr.args.addrOffset) nestedContext = deriveContext(context, instr.args, isStaticCall=false) execute(nestedContext) updateContextAfterNestedCall(context, instr.args, nestedContext)`} - + - **Details**: Creates a new (nested) execution context and triggers execution within that context. - Execution proceeds in the nested context until it reaches a halt at which point - execution resumes in the current/calling context. - A non-existent contract or one with no code will return success. - ["Nested contract calls"](./nested-calls) provides a full explanation of this - instruction along with the shorthand used in the expression above. - The explanation includes details on charging gas for nested calls, - nested context derivation, world state tracing, and updating the parent context - after the nested call halts. + Execution proceeds in the nested context until it reaches a halt at which point + execution resumes in the current/calling context. + A non-existent contract or one with no code will return success. + ["Nested contract calls"](./nested-calls) provides a full explanation of this + instruction along with the shorthand used in the expression above. + The explanation includes details on charging gas for nested calls, + nested context derivation, world state tracing, and updating the parent context + after the nested call halts. - **Tag checks**: `T[gasOffset] == T[gasOffset+1] == T[gasOffset+2] == u32` -- **Tag updates**: - -{`T[successOffset] = u8 +- **Tag updates**: + {" "} + {" "} + {" "} + + {`T[successOffset] = u8 T[retOffset:retOffset+retSize] = field`} - + - **Bit-size**: 248 [![](/img/protocol-specs/public-vm/bit-formats/CALL.png)](/img/protocol-specs/public-vm/bit-formats/CALL.png) ### `STATICCALL` + Call into another contract, disallowing World State and Accrued Substate modifications [See in table.](#isa-table-staticcall) - **Opcode**: 0x2e - **Category**: Control Flow - Contract Calls -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **gasOffset**: offset to two words containing `{l2GasLeft, daGasLeft}`: amount of gas to provide to the callee - - **addrOffset**: address of the contract to call - - **argsOffset**: memory offset to args (will become the callee's calldata) - - **argsSizeOffset**: memory offset for the number of words to pass via callee's calldata - - **retOffset**: destination memory offset specifying where to store the data returned from the callee - - **retSize**: number of words to copy from data returned by callee - - **successOffset**: destination memory offset specifying where to store the call's success (0: failure, 1: success) -- **Expression**: - -{`// instr.args are { gasOffset, addrOffset, argsOffset, retOffset, retSize } +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **gasOffset**: offset to two words containing `{l2GasLeft, daGasLeft}`: amount of gas to provide to the callee + - **addrOffset**: address of the contract to call + - **argsOffset**: memory offset to args (will become the callee's calldata) + - **argsSizeOffset**: memory offset for the number of words to pass via callee's calldata + - **retOffset**: destination memory offset specifying where to store the data returned from the callee + - **retSize**: number of words to copy from data returned by callee + - **successOffset**: destination memory offset specifying where to store the call's success (0: failure, 1: success) +- **Expression**: + {" "} + {" "} + {" "} + + {`// instr.args are { gasOffset, addrOffset, argsOffset, retOffset, retSize } chargeGas(context, l2GasCost=M[instr.args.gasOffset], daGasCost=M[instr.args.gasOffset+1]) @@ -1544,84 +1951,95 @@ traceNestedCall(context, instr.args.addrOffset) nestedContext = deriveContext(context, instr.args, isStaticCall=true execute(nestedContext) updateContextAfterNestedCall(context, instr.args, nestedContext)`} - -- **Details**: Same as `CALL`, but disallows World State and Accrued Substate modifications. - ["Nested contract calls"](./nested-calls) provides a full explanation of this - instruction along with the shorthand used in the expression above. - The explanation includes details on charging gas for nested calls, - nested context derivation, world state tracing, and updating the parent context - after the nested call halts. + +- **Details**: Same as `CALL`, but disallows World State and Accrued Substate modifications. + ["Nested contract calls"](./nested-calls) provides a full explanation of this + instruction along with the shorthand used in the expression above. + The explanation includes details on charging gas for nested calls, + nested context derivation, world state tracing, and updating the parent context + after the nested call halts. - **Tag checks**: `T[gasOffset] == T[gasOffset+1] == T[gasOffset+2] == u32` -- **Tag updates**: - -{`T[successOffset] = u8 +- **Tag updates**: + {" "} + {" "} + {" "} + + {`T[successOffset] = u8 T[retOffset:retOffset+retSize] = field`} - + - **Bit-size**: 248 [![](/img/protocol-specs/public-vm/bit-formats/STATICCALL.png)](/img/protocol-specs/public-vm/bit-formats/STATICCALL.png) ### `RETURN` + Halt execution within this context (without revert), optionally returning some data [See in table.](#isa-table-return) - **Opcode**: 0x2f - **Category**: Control Flow - Contract Calls -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **retOffset**: memory offset of first word to return - - **retSize**: number of words to return -- **Expression**: - -{`context.contractCallResults.output = M[retOffset:retOffset+retSize] +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **retOffset**: memory offset of first word to return + - **retSize**: number of words to return +- **Expression**: + {" "} + {" "} + {" "} + + {`context.contractCallResults.output = M[retOffset:retOffset+retSize] halt`} - + - **Details**: Return control flow to the calling context/contract. Caller will accept World State and Accrued Substate modifications. See ["Halting"](./execution#halting) to learn more. See ["Nested contract calls"](./nested-calls) to see how the caller updates its context after the nested call halts. - **Bit-size**: 88 [![](/img/protocol-specs/public-vm/bit-formats/RETURN.png)](/img/protocol-specs/public-vm/bit-formats/RETURN.png) ### `REVERT` + Halt execution within this context as `reverted`, optionally returning some data [See in table.](#isa-table-revert) - **Opcode**: 0x30 - **Category**: Control Flow - Contract Calls -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **retOffset**: memory offset of first word to return - - **retSize**: number of words to return -- **Expression**: - -{`context.contractCallResults.output = M[retOffset:retOffset+retSize] +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **retOffset**: memory offset of first word to return + - **retSize**: number of words to return +- **Expression**: + {" "} + {" "} + {" "} + + {`context.contractCallResults.output = M[retOffset:retOffset+retSize] context.contractCallResults.reverted = true halt`} - + - **Details**: Return control flow to the calling context/contract. Caller will reject World State and Accrued Substate modifications. See ["Halting"](./execution#halting) to learn more. See ["Nested contract calls"](./nested-calls) to see how the caller updates its context after the nested call halts. - **Bit-size**: 88 [![](/img/protocol-specs/public-vm/bit-formats/REVERT.png)](/img/protocol-specs/public-vm/bit-formats/REVERT.png) ### `TORADIXLE` + Convert a word to an array of limbs in little-endian radix form [See in table.](#isa-table-to_radix_le) - **Opcode**: 0x31 - **Category**: Conversions -- **Flags**: - - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. -- **Args**: - - **srcOffset**: memory offset of word to convert. - - **dstOffset**: memory offset specifying where the first limb of the radix-conversion result is stored. - - **radix**: the maximum bit-size of each limb. - - **numLimbs**: the number of limbs the word will be converted into. +- **Flags**: + - **indirect**: Toggles whether each memory-offset argument is an indirect offset. Rightmost bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. +- **Args**: + - **srcOffset**: memory offset of word to convert. + - **dstOffset**: memory offset specifying where the first limb of the radix-conversion result is stored. + - **radix**: the maximum bit-size of each limb. + - **numLimbs**: the number of limbs the word will be converted into. - **Expression**: TBD: Storage of limbs and if T[dstOffset] is constrained to U8 - **Details**: The limbs will be stored in a contiguous memory block starting at `dstOffset`. - **Tag checks**: `T[srcOffset] == field` - **Bit-size**: 152 - diff --git a/docs/docs/reference/developer_references/debugging.md b/docs/docs/reference/developer_references/debugging.md index 01b8d6d06aaa..edb5c05ae10c 100644 --- a/docs/docs/reference/developer_references/debugging.md +++ b/docs/docs/reference/developer_references/debugging.md @@ -53,7 +53,7 @@ debug_log_array(my_array); ### Start Sandbox in debug mode -Update the `DEBUG` environment variable in docker-compose.sandbox.yml to the following: +Set `LOG_LEVEL` to `verbose` or `debug`: ```yml # ~/.aztec/docker-compose.sandbox.yml @@ -65,8 +65,7 @@ aztec: ports: - "${PXE_PORT:-8080}:${PXE_PORT:-8080}" environment: - DEBUG: aztec:simulator:client_execution_context, aztec:sandbox, aztec:avm_simulator:debug_log - LOG_LEVEL: verbose # optionally add this for more logs + LOG_LEVEL: verbose # ... ``` diff --git a/docs/docs/reference/developer_references/sandbox_reference/sandbox-reference.md b/docs/docs/reference/developer_references/sandbox_reference/sandbox-reference.md index 82435874756f..ce0f32341b58 100644 --- a/docs/docs/reference/developer_references/sandbox_reference/sandbox-reference.md +++ b/docs/docs/reference/developer_references/sandbox_reference/sandbox-reference.md @@ -6,7 +6,7 @@ sidebar_position: 0 :::tip -For a quick start, follow the [guide](../../../guides/developer_guides/getting_started) to install the sandbox. +For a quick start, follow the [guide](../../../guides/getting_started) to install the sandbox. ::: @@ -19,8 +19,7 @@ To change them, you can open `~/.aztec/docker-compose.sandbox.yml` and edit them **Sandbox** ```sh -DEBUG=aztec:* # The level of debugging logs to be displayed. using "aztec:*" will log everything. -LOG_LEVEL=debug # Setting to 'debug' will print the debug logs +LOG_LEVEL=debug # Options are 'fatal', 'error', 'warn', 'info', 'verbose', 'debug', 'trace' HOST_WORKDIR='${PWD}' # The location to store log outputs. Will use ~/.aztec where the docker-compose.yml file is stored by default. ETHEREUM_HOST=http://ethereum:8545 # The Ethereum JSON RPC URL. We use an anvil instance that runs in parallel to the sandbox on docker by default. L1_CHAIN_ID=31337 # The Chain ID that the Ethereum host is using. diff --git a/docs/docs/reference/developer_references/smart_contract_reference/storage/index.md b/docs/docs/reference/developer_references/smart_contract_reference/storage/index.md index f85fd6f3a820..d5c3bccd90ea 100644 --- a/docs/docs/reference/developer_references/smart_contract_reference/storage/index.md +++ b/docs/docs/reference/developer_references/smart_contract_reference/storage/index.md @@ -101,6 +101,6 @@ require(minters[msg.sender], "caller is not minter"); ## Concepts mentioned -- [State Model](../../../../aztec/concepts/state_model/index.md) +- [State Model](../../../../aztec/concepts/storage/state_model/index.md) - [Public-private execution](../../../../aztec/smart_contracts/functions/public_private_calls.md) - [Function Contexts](../../../../aztec/smart_contracts/functions/context.md) diff --git a/docs/docs/reference/developer_references/smart_contract_reference/storage/private_state.md b/docs/docs/reference/developer_references/smart_contract_reference/storage/private_state.md index a2f84a70456e..ec24ad687b81 100644 --- a/docs/docs/reference/developer_references/smart_contract_reference/storage/private_state.md +++ b/docs/docs/reference/developer_references/smart_contract_reference/storage/private_state.md @@ -4,7 +4,7 @@ title: Private State On this page we will look at how to manage private state in Aztec contracts. We will look at how to declare private state, how to read and write to it, and how to use it in your contracts. -For a higher level overview of the state model in Aztec, see the [hybrid state model](../../../../aztec/concepts/state_model/index.md) page. +For a higher level overview of the state model in Aztec, see the [hybrid state model](../../../../aztec/concepts/storage/state_model/index.md) page. ## Overview diff --git a/docs/docs/reference/developer_references/smart_contract_reference/storage/public_state.md b/docs/docs/reference/developer_references/smart_contract_reference/storage/public_state.md index 1c2c46cded45..d49aebd81e22 100644 --- a/docs/docs/reference/developer_references/smart_contract_reference/storage/public_state.md +++ b/docs/docs/reference/developer_references/smart_contract_reference/storage/public_state.md @@ -4,7 +4,7 @@ title: Public State On this page we will look at how to manage public state in Aztec contracts. We will look at how to declare public state, how to read and write to it, and how to use it in your contracts. -For a higher level overview of the state model in Aztec, see the [state model](../../../../aztec/concepts/state_model/index.md) concepts page. +For a higher level overview of the state model in Aztec, see the [state model](../../../../aztec/concepts/storage/state_model/index.md) concepts page. ## `PublicMutable` diff --git a/docs/docs/tutorials/codealong/contract_tutorials/advanced/_category_.json b/docs/docs/tutorials/codealong/contract_tutorials/advanced/_category_.json deleted file mode 100644 index 5fe169c13f3d..000000000000 --- a/docs/docs/tutorials/codealong/contract_tutorials/advanced/_category_.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "label": "Advanced", - "position": 6, - "collapsible": true, - "collapsed": true -} diff --git a/docs/docs/tutorials/codealong/contract_tutorials/counter_contract.md b/docs/docs/tutorials/codealong/contract_tutorials/counter_contract.md index 22dc4e4e5b39..0ab8bc479b55 100644 --- a/docs/docs/tutorials/codealong/contract_tutorials/counter_contract.md +++ b/docs/docs/tutorials/codealong/contract_tutorials/counter_contract.md @@ -7,7 +7,7 @@ In this guide, we will create our first Aztec.nr smart contract. We will build a ## Prerequisites -- You have followed the [quickstart](../../../guides/developer_guides/getting_started.md) +- You have followed the [quickstart](../../../guides/getting_started.md) - Running Aztec Sandbox - Installed [Noir LSP](../../../guides/developer_guides/local_env/installing_noir_lsp.md) (optional) diff --git a/docs/docs/tutorials/codealong/contract_tutorials/crowdfunding_contract.md b/docs/docs/tutorials/codealong/contract_tutorials/crowdfunding_contract.md index 397ac5848397..1f11c0735e0f 100644 --- a/docs/docs/tutorials/codealong/contract_tutorials/crowdfunding_contract.md +++ b/docs/docs/tutorials/codealong/contract_tutorials/crowdfunding_contract.md @@ -26,7 +26,7 @@ Along the way you will: ### Install tools -Please ensure that you already have [Installed the Sandbox](../../../guides/developer_guides/getting_started) +Please ensure that you already have [Installed the Sandbox](../../../guides/getting_started) ### Create an Aztec project diff --git a/docs/docs/tutorials/codealong/contract_tutorials/private_voting_contract.md b/docs/docs/tutorials/codealong/contract_tutorials/private_voting_contract.md index 7f3aba4aa766..2c7f0d81f86f 100644 --- a/docs/docs/tutorials/codealong/contract_tutorials/private_voting_contract.md +++ b/docs/docs/tutorials/codealong/contract_tutorials/private_voting_contract.md @@ -21,7 +21,7 @@ To keep things simple, we won't create ballots or allow for delegate voting. ## Prerequisites -- You have followed the [quickstart](../../../guides/developer_guides/getting_started) to install `aztec-nargo` and `aztec`. +- You have followed the [quickstart](../../../guides/getting_started) to install `aztec-nargo` and `aztec`. - Running Aztec Sandbox ## Set up a project diff --git a/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/0_setup.md b/docs/docs/tutorials/codealong/contract_tutorials/token_bridge/0_setup.md similarity index 98% rename from docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/0_setup.md rename to docs/docs/tutorials/codealong/contract_tutorials/token_bridge/0_setup.md index 200c4347b481..4ec44d065e70 100644 --- a/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/0_setup.md +++ b/docs/docs/tutorials/codealong/contract_tutorials/token_bridge/0_setup.md @@ -17,7 +17,7 @@ We recommend going through this setup to fully understand where things live. - [node v18+ (GitHub link)](https://github.com/tj/n) - [docker](https://docs.docker.com/) -- [Aztec sandbox](../../../../../guides/developer_guides/getting_started) - you should have this running before starting the tutorial +- [Aztec sandbox](../../../../../guides/getting_started) - you should have this running before starting the tutorial Start the sandbox diff --git a/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/1_depositing_to_aztec.md b/docs/docs/tutorials/codealong/contract_tutorials/token_bridge/1_depositing_to_aztec.md similarity index 100% rename from docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/1_depositing_to_aztec.md rename to docs/docs/tutorials/codealong/contract_tutorials/token_bridge/1_depositing_to_aztec.md diff --git a/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/2_minting_on_aztec.md b/docs/docs/tutorials/codealong/contract_tutorials/token_bridge/2_minting_on_aztec.md similarity index 100% rename from docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/2_minting_on_aztec.md rename to docs/docs/tutorials/codealong/contract_tutorials/token_bridge/2_minting_on_aztec.md diff --git a/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/3_withdrawing_to_l1.md b/docs/docs/tutorials/codealong/contract_tutorials/token_bridge/3_withdrawing_to_l1.md similarity index 98% rename from docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/3_withdrawing_to_l1.md rename to docs/docs/tutorials/codealong/contract_tutorials/token_bridge/3_withdrawing_to_l1.md index a0c1b508212a..ae28246bde32 100644 --- a/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/3_withdrawing_to_l1.md +++ b/docs/docs/tutorials/codealong/contract_tutorials/token_bridge/3_withdrawing_to_l1.md @@ -19,7 +19,7 @@ The `exit_to_l1_public` function enables anyone to withdraw their L2 tokens back 1. Like with our deposit function, we need to create the L2 to L1 message. The content is the _amount_ to burn, the recipient address, and who can execute the withdraw on the L1 portal on behalf of the user. It can be `0x0` for anyone, or a specified address. 2. `context.message_portal()` passes this content to the kernel circuit which creates the proof for the transaction. The kernel circuit then adds the sender (the L2 address of the bridge + version of aztec) and the recipient (the portal to the L2 address + the chain ID of L1) under the hood, to create the message which gets added as part of the transaction data published by the sequencer and is stored in the outbox for consumption. 3. The `context.message_portal()` takes the recipient and content as input, and will insert a message into the outbox. We set the recipient to be the portal address read from storage of the contract. -4. Finally, you also burn the tokens on L2! Note that it burning is done at the end to follow the check effects interaction pattern. Note that the caller has to first approve the bridge contract to burn tokens on its behalf. Refer to [burn_public function on the token contract](../../token_contract.md#authorizing-token-spends). +4. Finally, you also burn the tokens on L2! Note that it burning is done at the end to follow the check effects interaction pattern. Note that the caller has to first approve the bridge contract to burn tokens on its behalf. Refer to [burn_public function on the token contract](../token_contract.md#burn_public). - We burn the tokens from the `msg_sender()`. Otherwise, a malicious user could burn someone else’s tokens and mint tokens on L1 to themselves. One could add another approval flow on the bridge but that might make it complex for other applications to call the bridge. ## Withdrawing Privately diff --git a/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/4_typescript_glue_code.md b/docs/docs/tutorials/codealong/contract_tutorials/token_bridge/4_typescript_glue_code.md similarity index 95% rename from docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/4_typescript_glue_code.md rename to docs/docs/tutorials/codealong/contract_tutorials/token_bridge/4_typescript_glue_code.md index 0600dbf1059d..5754ce1f0f8d 100644 --- a/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/4_typescript_glue_code.md +++ b/docs/docs/tutorials/codealong/contract_tutorials/token_bridge/4_typescript_glue_code.md @@ -167,9 +167,9 @@ Note - you might have a jest error at the end of each test saying "expected 1-2 ### Follow a more detailed Aztec.js tutorial -Follow the tutorial [here](../../../aztecjs-getting-started). +Follow the tutorial [here](../../js_tutorials/aztecjs-getting-started.md). ### Optional: Learn more about concepts mentioned here -- [Portals (protocol specs)](../../../../../protocol-specs/l1-smart-contracts/index.md) -- [Functions under the hood (concepts)](../../../../../aztec/smart_contracts/functions/function_transforms.md) +- [Portals (protocol specs)](../../../../protocol-specs/l1-smart-contracts/index.md#portals) +- [Functions under the hood (concepts)](../../../../aztec/smart_contracts/functions/function_transforms.md) diff --git a/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/index.md b/docs/docs/tutorials/codealong/contract_tutorials/token_bridge/index.md similarity index 97% rename from docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/index.md rename to docs/docs/tutorials/codealong/contract_tutorials/token_bridge/index.md index 8b3873c5a919..72a0c7061df0 100644 --- a/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/index.md +++ b/docs/docs/tutorials/codealong/contract_tutorials/token_bridge/index.md @@ -1,11 +1,11 @@ --- title: Token Bridge -sidebar_position: 2 +sidebar_position: 6 --- import Image from "@theme/IdealImage"; -In this tutorial, we will learn how to build the entire flow of a cross-chain token using portals. If this is your first time hearing the word portal, you’ll want to read [this page in the protocol specs](../../../../../protocol-specs/l1-smart-contracts/index.md). +In this tutorial, we will learn how to build the entire flow of a cross-chain token using portals. If this is your first time hearing the word portal, you’ll want to read [this page in the protocol specs](../../../../protocol-specs/l1-smart-contracts/index.md). ## A refresher on Portals @@ -42,7 +42,7 @@ The goal for this tutorial is to create functionality such that a token can be b This is just a reference implementation for educational purposes only. It has not been through an in-depth security audit. -Let’s assume a token exists on Ethereum and Aztec (see a [the token tutorial](../../token_contract.md)). +Let’s assume a token exists on Ethereum and Aztec (see a [the token tutorial](../token_contract.md)). We will build: diff --git a/docs/docs/tutorials/codealong/contract_tutorials/token_contract.md b/docs/docs/tutorials/codealong/contract_tutorials/token_contract.md index e650379aee45..9b93a6e0e72a 100644 --- a/docs/docs/tutorials/codealong/contract_tutorials/token_contract.md +++ b/docs/docs/tutorials/codealong/contract_tutorials/token_contract.md @@ -422,7 +422,7 @@ aztec codegen target -o src/artifacts ### Token Bridge Contract -The [token bridge tutorial](./advanced/token_bridge/index.md) is a great follow up to this one. +The [token bridge tutorial](.//token_bridge/index.md) is a great follow up to this one. It builds on the Token contract described here and goes into more detail about Aztec contract composability and Ethereum (L1) and Aztec (L2) cross-chain messaging. diff --git a/docs/docs/tutorials/codealong/js_tutorials/_category_.json b/docs/docs/tutorials/codealong/js_tutorials/_category_.json new file mode 100644 index 000000000000..79b35ed85280 --- /dev/null +++ b/docs/docs/tutorials/codealong/js_tutorials/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Dapp Tutorials", + "position": 1, + "collapsible": true, + "collapsed": true +} \ No newline at end of file diff --git a/docs/docs/tutorials/codealong/aztecjs-getting-started.md b/docs/docs/tutorials/codealong/js_tutorials/aztecjs-getting-started.md similarity index 93% rename from docs/docs/tutorials/codealong/aztecjs-getting-started.md rename to docs/docs/tutorials/codealong/js_tutorials/aztecjs-getting-started.md index d4eeae0b9769..6dd6b117e5c0 100644 --- a/docs/docs/tutorials/codealong/aztecjs-getting-started.md +++ b/docs/docs/tutorials/codealong/js_tutorials/aztecjs-getting-started.md @@ -1,13 +1,13 @@ --- title: Transferring Tokens with Aztec.js -sidebar_position: 1 +sidebar_position: 0 --- import Image from "@theme/IdealImage"; In this guide, we will retrieving the Sandbox and deploy a pre-written contract to it using Aztec.js. -This guide assumes you have followed the [quickstart](../../guides/developer_guides/getting_started). +This guide assumes you have followed the [quickstart](../../../guides/getting_started.md). ## Prerequisites @@ -75,7 +75,7 @@ yarn add @aztec/aztec.js @aztec/accounts @aztec/noir-contracts.js typescript @ty "build": "yarn clean && tsc -b", "build:dev": "tsc -b --watch", "clean": "rm -rf ./dest tsconfig.tsbuildinfo", - "start": "yarn build && DEBUG='token' node ./dest/index.js" + "start": "yarn build && LOG_LEVEL='info: token' node ./dest/index.js" }, ``` @@ -134,7 +134,7 @@ The sandbox is preloaded with multiple accounts so you don't have to sit and cre #include_code load_accounts /yarn-project/end-to-end/src/composed/e2e_sandbox_example.test.ts typescript -An explanation on accounts on Aztec can be found [here](../../aztec/concepts/accounts/index.md). +An explanation on accounts on Aztec can be found [here](../../../aztec/concepts/accounts/index.md). ## Deploy a contract @@ -295,7 +295,7 @@ This function takes: 2. A recipient 3. An amount of tokens to mint -This function starts as private to set up the creation of a [partial note](../../aztec/concepts/storage/partial_notes.md). The private function calls a public function that checks that the minter is authorized to mint new tokens an increments the public total supply. The recipient of the tokens remains private, but the minter and the amount of tokens minted are public. +This function starts as private to set up the creation of a [partial note](../../../aztec/concepts/storage/partial_notes.md). The private function calls a public function that checks that the minter is authorized to mint new tokens an increments the public total supply. The recipient of the tokens remains private, but the minter and the amount of tokens minted are public. Let's now use these functions to mint some tokens to Bob's account using Typescript, add this to `index.ts`: @@ -339,7 +339,7 @@ Our complete output should now be something like: token Bob's balance 10543 +43ms ``` -That's it! We have successfully deployed a token contract to an instance of the Aztec network and mined private state-transitioning transactions. We have also queried the resulting state all via the interfaces provided by the contract. To see exactly what has happened here, you can learn about the transaction flow [on the Concepts page here](../../aztec/concepts/transactions.md). +That's it! We have successfully deployed a token contract to an instance of the Aztec network and mined private state-transitioning transactions. We have also queried the resulting state all via the interfaces provided by the contract. To see exactly what has happened here, you can learn about the transaction flow [on the Concepts page here](../../../aztec/concepts/transactions.md). ## Next Steps @@ -349,5 +349,5 @@ Follow the [dapp tutorial](./simple_dapp/index.md). ### Optional: Learn more about concepts mentioned here -- [Authentication witness](../../aztec/concepts/accounts/authwit.md) -- [Functions under the hood](../../aztec/smart_contracts/functions/function_transforms.md) +- [Authentication witness](../../../aztec/concepts/accounts/authwit.md) +- [Functions under the hood](../../../aztec/smart_contracts/functions/function_transforms.md) diff --git a/docs/docs/tutorials/codealong/simple_dapp/0_project_setup.md b/docs/docs/tutorials/codealong/js_tutorials/simple_dapp/0_project_setup.md similarity index 100% rename from docs/docs/tutorials/codealong/simple_dapp/0_project_setup.md rename to docs/docs/tutorials/codealong/js_tutorials/simple_dapp/0_project_setup.md diff --git a/docs/docs/tutorials/codealong/simple_dapp/1_pxe_service.md b/docs/docs/tutorials/codealong/js_tutorials/simple_dapp/1_pxe_service.md similarity index 86% rename from docs/docs/tutorials/codealong/simple_dapp/1_pxe_service.md rename to docs/docs/tutorials/codealong/js_tutorials/simple_dapp/1_pxe_service.md index ba30343f5778..727b94d079b6 100644 --- a/docs/docs/tutorials/codealong/simple_dapp/1_pxe_service.md +++ b/docs/docs/tutorials/codealong/js_tutorials/simple_dapp/1_pxe_service.md @@ -4,7 +4,8 @@ PXE is a component of the Aztec Protocol that provides a private execution envir As an app developer, the PXE interface provides you with access to the user's accounts and their private state, as well as a connection to the network for accessing public global state. -The [Aztec Sandbox](../../../reference/developer_references/sandbox_reference/index.md) runs a local PXE and an Aztec Node, both connected to a local Ethereum development node like Anvil. +The [Aztec Sandbox](.././../../../reference/developer_references/sandbox_reference/sandbox-reference.md) runs a local PXE and an Aztec Node, both connected to a local Ethereum development node like Anvil. + The Sandbox also includes a set of pre-initialized accounts that you can use from your app. In this section, we'll connect to the Sandbox from our project. @@ -20,7 +21,7 @@ Let's create our first file `src/index.mjs` with the following contents: #include_code all yarn-project/end-to-end/src/sample-dapp/connect.mjs javascript -Make sure the [Sandbox is running](../../../guides/developer_guides/getting_started.md) and run the example +Make sure the [Sandbox is running](../../../../guides/getting_started.md) and run the example ```bash node src/index.mjs diff --git a/docs/docs/tutorials/codealong/simple_dapp/2_contract_deployment.md b/docs/docs/tutorials/codealong/js_tutorials/simple_dapp/2_contract_deployment.md similarity index 93% rename from docs/docs/tutorials/codealong/simple_dapp/2_contract_deployment.md rename to docs/docs/tutorials/codealong/js_tutorials/simple_dapp/2_contract_deployment.md index 392aacce3912..324e3d59d0cf 100644 --- a/docs/docs/tutorials/codealong/simple_dapp/2_contract_deployment.md +++ b/docs/docs/tutorials/codealong/js_tutorials/simple_dapp/2_contract_deployment.md @@ -3,7 +3,7 @@ To add contracts to your application, we'll start by creating a new `aztec-nargo` project. We'll then compile the contracts, and write a simple script to deploy them to our Sandbox. :::info -Follow the instructions [here](../../../reference/developer_references/sandbox_reference/index.md) to install `aztec-nargo` if you haven't done so already. +Follow the instructions [here](../../../../guides/getting_started.md) to install `aztec-nargo` if you haven't done so already. ::: ## Initialize Aztec project @@ -73,7 +73,7 @@ Here, we are using the `Contract` class with the compiled artifact to send a new Note that the token's `constructor()` method expects an `owner` address to set as the contract `admin`. We are using the first account from the Sandbox for this. :::info -If you are using the generated typescript classes, you can drop the generic `ContractDeployer` in favor of using the `deploy` method of the generated class, which will automatically load the artifact for you and type-check the constructor arguments. See the [How to deploy a contract](../../../guides/developer_guides/smart_contracts/how_to_deploy_contract.md) page for more info. +If you are using the generated typescript classes, you can drop the generic `ContractDeployer` in favor of using the `deploy` method of the generated class, which will automatically load the artifact for you and type-check the constructor arguments. See the [How to deploy a contract](../../../../guides/developer_guides/smart_contracts/how_to_deploy_contract.md) page for more info. ::: Run the snippet above as `node src/deploy.mjs`, and you should see the following output, along with a new `addresses.json` file in your project root: diff --git a/docs/docs/tutorials/codealong/simple_dapp/3_contract_interaction.md b/docs/docs/tutorials/codealong/js_tutorials/simple_dapp/3_contract_interaction.md similarity index 91% rename from docs/docs/tutorials/codealong/simple_dapp/3_contract_interaction.md rename to docs/docs/tutorials/codealong/js_tutorials/simple_dapp/3_contract_interaction.md index 769e53d6e00b..72583b361af3 100644 --- a/docs/docs/tutorials/codealong/simple_dapp/3_contract_interaction.md +++ b/docs/docs/tutorials/codealong/js_tutorials/simple_dapp/3_contract_interaction.md @@ -11,7 +11,7 @@ Let's start by showing our user's private balance for the token across their acc #include_code balance_of_private noir-projects/noir-contracts/contracts/token_contract/src/main.nr rust :::info -Note that this function will only return a valid response for accounts registered in the Private eXecution Environment (PXE), since it requires access to the [user's private state](../../../aztec/concepts/wallets/index.md#private-state). In other words, you cannot query the private balance of another user for the token contract. +Note that this function will only return a valid response for accounts registered in the Private eXecution Environment (PXE), since it requires access to the [user's private state](../../../../aztec/concepts/wallets/index.md#private-state). In other words, you cannot query the private balance of another user for the token contract. ::: To do this, let's first initialize a new `Contract` instance using `aztec.js` that represents our deployed token contracts. Create a new `src/contracts.mjs` file with the imports for our artifacts and other dependencies: @@ -99,12 +99,12 @@ At the time of this writing, there are no events emitted when new private notes ## Working with public state -While [private and public state](../../../aztec/concepts/state_model/index.md) are fundamentally different, the API for working with private and public functions and state from `aztec.js` is equivalent. To query the balance in public tokens for our user accounts, we can just call the `balance_of_public` view function in the contract: +While [private and public state](../../../../aztec/concepts/storage/state_model/index.md) are fundamentally different, the API for working with private and public functions and state from `aztec.js` is equivalent. To query the balance in public tokens for our user accounts, we can just call the `balance_of_public` view function in the contract: #include_code showPublicBalances yarn-project/end-to-end/src/sample-dapp/index.mjs javascript :::info -Since this we are working with public balances, we can now query the balance for any address, not just those registered in our local PXE. We can also send funds to addresses for which we don't know their [public encryption key](../../../aztec/concepts/accounts/keys.md#encryption-keys). +Since this we are working with public balances, we can now query the balance for any address, not just those registered in our local PXE. We can also send funds to addresses for which we don't know their [public encryption key](../../../../aztec/concepts/accounts/keys.md#encryption-keys). ::: Here, since the token contract does not mint any initial funds upon deployment, the balances for all of our user's accounts will be zero. diff --git a/docs/docs/tutorials/codealong/simple_dapp/4_testing.md b/docs/docs/tutorials/codealong/js_tutorials/simple_dapp/4_testing.md similarity index 87% rename from docs/docs/tutorials/codealong/simple_dapp/4_testing.md rename to docs/docs/tutorials/codealong/js_tutorials/simple_dapp/4_testing.md index f7b03eda1c9b..d162345d4486 100644 --- a/docs/docs/tutorials/codealong/simple_dapp/4_testing.md +++ b/docs/docs/tutorials/codealong/js_tutorials/simple_dapp/4_testing.md @@ -14,7 +14,7 @@ Start by installing our test runner, in this case jest: yarn add -D jest ``` -We'll need to [install and run the Sandbox](../../../reference/developer_references/sandbox_reference/sandbox-reference.md). +We'll need to [install and run the Sandbox](../../../../guides/getting_started.md). ## Test setup @@ -69,4 +69,6 @@ yarn node --experimental-vm-modules $(yarn bin jest) --testRegex '.*\.test\.mjs$ ## Next steps -Now that you have finished the tutorial, you can learn more about [writing contracts with Noir](../../../aztec/smart_contracts_overview.md) or read about the [fundamental concepts behind Aztec Network](../../../aztec/overview.md). +Have you written a contract from scratch? If not, follow a tutorial for [writing contracts with Noir](../../contract_tutorials/counter_contract.md) + +Or read about the [fundamental concepts behind Aztec Network](../../../../aztec/concepts_overview.md) and dive deeper into how things work. diff --git a/docs/docs/tutorials/codealong/simple_dapp/index.md b/docs/docs/tutorials/codealong/js_tutorials/simple_dapp/index.md similarity index 88% rename from docs/docs/tutorials/codealong/simple_dapp/index.md rename to docs/docs/tutorials/codealong/js_tutorials/simple_dapp/index.md index 26b04daebdc9..5457d22e4c5f 100644 --- a/docs/docs/tutorials/codealong/simple_dapp/index.md +++ b/docs/docs/tutorials/codealong/js_tutorials/simple_dapp/index.md @@ -1,10 +1,10 @@ --- -title: Dapp Tutorial +title: Node.js app that interacts with contracts --- In this tutorial we'll go through the steps for building a simple application that interacts with the Aztec Sandbox. We'll be building a console application using Javascript and NodeJS, but you may reuse the same concepts here for a web-based app. All Aztec libraries are written in Typescript and fully typed, so you can use Typescript instead of Javascript to make the most out of its type checker. -This tutorial will focus on environment setup, including creating accounts and deployments, as well as interacting with your contracts. It will not cover [how to write contracts in Noir](../../../aztec/smart_contracts_overview.md). +This tutorial will focus on environment setup, including creating accounts and deployments, as well as interacting with your contracts. It will not cover [how to write contracts in Noir](../../../../aztec/smart_contracts_overview.md). The full code for this tutorial is [available on the `aztec-packages` repository](https://github.com/AztecProtocol/aztec-packages/blob/master/yarn-project/end-to-end/src/sample-dapp). @@ -12,7 +12,7 @@ The full code for this tutorial is [available on the `aztec-packages` repository - Linux or OSX environment - [NodeJS](https://nodejs.org/) 18 or higher -- [Aztec Sandbox](../../../guides/developer_guides/getting_started) +- [Aztec Sandbox](../../../guides/getting_started) ## Prerequisites diff --git a/docs/docs/tutorials/examples/uniswap/index.md b/docs/docs/tutorials/examples/uniswap/index.md index c4fc55b539fa..d31c03b0418e 100644 --- a/docs/docs/tutorials/examples/uniswap/index.md +++ b/docs/docs/tutorials/examples/uniswap/index.md @@ -15,7 +15,7 @@ The flow will be: 2. We create an L2 → L1 message to swap on L1 3. On L1, the user gets their input tokens, consumes the swap message, and executes the swap 4. The user deposits the “output” tokens to the output token portal so it can be deposited into L2 -5. We will assume that token portals and token bridges for the input and output tokens must exist. These are what we built in the [token bridge tutorial](../../codealong/contract_tutorials/advanced/token_bridge/index.md). +5. We will assume that token portals and token bridges for the input and output tokens must exist. These are what we built in the [token bridge tutorial](../../codealong/contract_tutorials/token_bridge/index.md). The execution of swap on L1 should be designed such that any 3rd party can execute the swap on behalf of the user. This helps maintain user privacy by not requiring links between L1 and L2 activity. @@ -27,5 +27,5 @@ This reference will cover: This diagram describes the private flow. -This code works alongside a token portal that you can learn to build [in this codealong tutorial](../../codealong/contract_tutorials/advanced/token_bridge/index.md). +This code works alongside a token portal that you can learn to build [in this codealong tutorial](../../codealong/contract_tutorials/token_bridge/index.md). diff --git a/docs/docs/tutorials/examples/uniswap/l1_contract.md b/docs/docs/tutorials/examples/uniswap/l1_contract.md index 6eb246a7c186..c9b16928ed14 100644 --- a/docs/docs/tutorials/examples/uniswap/l1_contract.md +++ b/docs/docs/tutorials/examples/uniswap/l1_contract.md @@ -3,7 +3,7 @@ title: L1 contracts (EVM) sidebar_position: 2 --- -This page goes over the code in the L1 contract for Uniswap, which works alongside a [token portal (codealong tutorial)](../../codealong/contract_tutorials/advanced/token_bridge/index.md). +This page goes over the code in the L1 contract for Uniswap, which works alongside a [token portal (codealong tutorial)](../../codealong/contract_tutorials/token_bridge/index.md). ## Setup diff --git a/docs/docs/tutorials/examples/uniswap/l2_contract.md b/docs/docs/tutorials/examples/uniswap/l2_contract.md index 9d428e6332b0..a3ce0e6de53e 100644 --- a/docs/docs/tutorials/examples/uniswap/l2_contract.md +++ b/docs/docs/tutorials/examples/uniswap/l2_contract.md @@ -3,7 +3,7 @@ title: L2 Contracts (Aztec) sidebar_position: 1 --- -This page goes over the code in the L2 contract for Uniswap, which works alongside a [token bridge (codealong tutorial)](../../codealong/contract_tutorials/advanced/token_bridge/index.md). +This page goes over the code in the L2 contract for Uniswap, which works alongside a [token bridge (codealong tutorial)](../../codealong/contract_tutorials/token_bridge/index.md). ## Main.nr @@ -20,7 +20,7 @@ We just need to store the portal address for the token that we want to swap. 2. We fetch the underlying aztec token that needs to be swapped. 3. We transfer the user’s funds to the Uniswap contract. Like with Ethereum, the user must have provided approval to the Uniswap contract to do so. The user must provide the nonce they used in the approval for transfer, so that Uniswap can send it to the token contract, to prove it has appropriate approval. 4. Funds are added to the Uniswap contract. -5. Uniswap must exit the input tokens to L1. For this it has to approve the bridge to burn its tokens on its behalf and then actually exit the funds. We call the [`exit_to_l1_public()` method on the token bridge](../../codealong/contract_tutorials/advanced/token_bridge/index.md). We use the public flow for exiting since we are operating on public state. +5. Uniswap must exit the input tokens to L1. For this it has to approve the bridge to burn its tokens on its behalf and then actually exit the funds. We call the [`exit_to_l1_public()` method on the token bridge](../../codealong/contract_tutorials/token_bridge/index.md). We use the public flow for exiting since we are operating on public state. 6. It is not enough for us to simply emit a message to withdraw the funds. We also need to emit a message to display our swap intention. If we do not do this, there is nothing stopping a third party from calling the Uniswap portal with their own parameters and consuming our message. So the Uniswap portal (on L1) needs to know: diff --git a/docs/docs/tutorials/index.md b/docs/docs/tutorials/index.md deleted file mode 100644 index c6e9362c344c..000000000000 --- a/docs/docs/tutorials/index.md +++ /dev/null @@ -1,112 +0,0 @@ ---- -id: index -sidebar_position: 0 -title: Tutorials and Examples ---- - -# Code-Along Tutorials and Examples - -In this section, you will find two things: code-along tutorials and code examples of Aztec applications. - -Tutorials will teach you how to build a full application or smart contract locally. Examples are not intended for you to replicate locally as they have more complex setups, but can be useful for exploring what you can do with Aztec. - -This page includes the most popular tutorials in order of increasing complexity. Explore the sidebar for more! - -## Code-Along Tutorials - -### Beginner: Write your first smart contract - -
- - -

Simple counter contract

-
- - Follow this tutorial to build, compile and deploy your first Aztec smart contract - a simple private counter - -
-
- - -### Intermediate: Write increasingly more complex contracts - -It is recommended to follow these in order. - -
- - -

Simple private voting contract

-
- - Build a contract with hybrid state and calling public functions from private - -
- - - -

Crowdfunding contract

-
- - A more complex contract that interacts with other contracts - -
- - - -

Token contract with hybrid state

-
- - A very complex contract for a token that can move across public & private state and be transferred to others - -
- - - -

Accounts contract

-
- - A simple accounts contract that will teach you about account abstraction in Aztec - -
-
- -
- -## Examples - -
- - -

Interacting with L1 Uniswap from L2 Aztec

-
- - An example app inspired by Aztec Connect that allows users to swap publicly & privately on L1 Uniswap from Aztec - -
- - - - - - -

Card game contract ↗️

-
- - A set of contracts that allow players to take turns playing cards - -
-
diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 678e9cce2300..3c480ffaba6c 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -223,9 +223,9 @@ const config = { items: [ { type: "doc", - docId: "aztec/overview", + docId: "index", position: "left", - label: "Concepts", + label: "Learn", }, { type: "docSidebar", @@ -233,17 +233,11 @@ const config = { position: "left", label: "Guides", }, - { - type: "docSidebar", - sidebarId: "tutorialsSidebar", - position: "left", - label: "Examples", - }, { type: "docSidebar", sidebarId: "referenceSidebar", position: "left", - label: "References", + label: "Reference", }, { type: "dropdown", @@ -255,6 +249,13 @@ const config = { value: 'GitHub', className: "dropdown-subtitle", }, + { + to: "https://github.com/AztecProtocol/aztec-starter", + label: "Aztec Starter repo", + target: "_blank", + rel: "noopener noreferrer", + className: "github-item", + }, { to: "https://github.com/AztecProtocol/aztec-packages", label: "Aztec Monorepo", @@ -343,7 +344,7 @@ const config = { }, { label: "Developer Getting Started Guide", - to: "/guides/developer_guides/getting_started", + to: "/guides/getting_started", }, { label: "Aztec.nr", diff --git a/docs/netlify.toml b/docs/netlify.toml index bf5475fff6b3..3a0097de71b6 100644 --- a/docs/netlify.toml +++ b/docs/netlify.toml @@ -8,7 +8,7 @@ [[redirects]] from = "/getting_started" - to = "/guides/developer_guides/getting_started" + to = "/guides/getting_started" [[redirects]] from = "/tutorials/simple_dapp/*" @@ -19,28 +19,28 @@ to = "/tutorials/codealong/simple_dapp" [[redirects]] - from = "/tutorials/contract_tutorials/advanced/token_bridge/typescript_glue_code" - to = "/tutorials/codealong/contract_tutorials/advanced/token_bridge/typescript_glue_code" + from = "/tutorials/contract_tutorials//token_bridge/typescript_glue_code" + to = "/tutorials/codealong/contract_tutorials/token_bridge/typescript_glue_code" [[redirects]] - from = "/tutorials/contract_tutorials/advanced/token_bridge/withdrawing_to_l1" - to = "/tutorials/codealong/contract_tutorials/advanced/token_bridge/withdrawing_to_l1" + from = "/tutorials/contract_tutorials//token_bridge/withdrawing_to_l1" + to = "/tutorials/codealong/contract_tutorials/token_bridge/withdrawing_to_l1" [[redirects]] - from = "/tutorials/contract_tutorials/advanced/token_bridge/minting_on_aztec" - to = "/tutorials/codealong/contract_tutorials/advanced/token_bridge/minting_on_aztec" + from = "/tutorials/contract_tutorials//token_bridge/minting_on_aztec" + to = "/tutorials/codealong/contract_tutorials/token_bridge/minting_on_aztec" [[redirects]] - from = "/tutorials/contract_tutorials/advanced/token_bridge/depositing_to_aztec" - to = "/tutorials/codealong/contract_tutorials/advanced/token_bridge/depositing_to_aztec" + from = "/tutorials/contract_tutorials//token_bridge/depositing_to_aztec" + to = "/tutorials/codealong/contract_tutorials/token_bridge/depositing_to_aztec" [[redirects]] - from = "/tutorials/contract_tutorials/advanced/token_bridge" - to = "/tutorials/codealong/contract_tutorials/advanced/token_bridge" + from = "/tutorials/contract_tutorials//token_bridge" + to = "/tutorials/codealong/contract_tutorials/token_bridge" [[redirects]] - from = "/tutorials/contract_tutorials/advanced/token_bridge/setup" - to = "/tutorials/codealong/contract_tutorials/advanced/token_bridge/setup" + from = "/tutorials/contract_tutorials//token_bridge/setup" + to = "/tutorials/codealong/contract_tutorials/token_bridge/setup" [[redirects]] from = "/tutorials/contract_tutorials/crowdfunding_contract" @@ -64,7 +64,7 @@ [[redirects]] from = "/developers/sandbox/*" - to = "/guides/developer_guides/getting_started" + to = "/guides/getting_started" [[redirects]] from = "/developers/contracts/*" @@ -72,7 +72,7 @@ [[redirects]] from = "/dev_docs/*" - to = "/guides/developer_guides/getting_started" + to = "/guides/getting_started" [[redirects]] from = "/aztec/cryptography/cryptography-roadmap" @@ -136,12 +136,16 @@ [[redirects]] from = "/reference/sandbox_reference/sandbox-reference" - to = "/guides/developer_guides/getting_started" + to = "/guides/getting_started" [[redirects]] from = "/reference/sandbox_reference" - to = "/guides/developer_guides/getting_started" + to = "/guides/getting_started" [[redirects]] - from = "/guides/developer_guides/getting_started/quickstart" - to = "/guides/developer_guides/getting_started" \ No newline at end of file + from = "/guides/getting_started/quickstart" + to = "/guides/getting_started" + +[[redirects]] +from = "/guides/developer_guides/getting_started/quickstart" +to = "/guides/developer_guides/getting_started" \ No newline at end of file diff --git a/docs/sidebars.js b/docs/sidebars.js index f8a9c10e42b2..673ca9602fad 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -8,13 +8,13 @@ export default { sidebar: [ { type: "html", - value: 'Aztec Protocol', + value: 'Concepts', className: "sidebar-title", }, { type: "doc", - id: "aztec/overview", - label: "What is Aztec?", + id: "aztec/concepts_overview", + label: "Concepts Overview", }, { type: "category", @@ -68,54 +68,60 @@ export default { guidesSidebar: [ { - type: "doc", - label: "Popular Guides", - id: "guides/index", + type: "doc", + id: "guides/index", + label: "Guides and Tutorials", }, { type: "html", - value: 'Developer Guides', + value: 'Getting Started', className: "sidebar-title", }, { - type: "autogenerated", - dirName: "guides/developer_guides", + type: "doc", + label: "Quickstart", + id: "guides/getting_started" }, { type: "html", value: '', }, { - type: "doc", - label: "Privacy Considerations", - id: "guides/privacy_considerations", + type: "html", + value: 'Code-along Tutorials', + className: "sidebar-title", }, - ], - tutorialsSidebar: [ { - type: "doc", - label: "Tutorials and Examples", - id: "tutorials/index", + type: "autogenerated", + dirName: "tutorials/codealong", }, { type: "html", - value: 'Code-along Tutorials', + value: 'Advanced Examples', className: "sidebar-title", }, { type: "autogenerated", - dirName: "tutorials/codealong", + dirName: "tutorials/examples", + }, + { + type: "html", + value: '', }, { type: "html", - value: 'Examples', + value: 'How-to Guides', className: "sidebar-title", }, { type: "autogenerated", - dirName: "tutorials/examples", + dirName: "guides/developer_guides" }, - ], + { + type: "html", + value: '', + }, + ], referenceSidebar: [ { @@ -132,10 +138,20 @@ export default { type: "autogenerated", dirName: "reference/developer_references", }, + { + type: "html", + value: 'Considerations', + className: "sidebar-title", + }, { type: "doc", id: "migration_notes", }, + { + type: "doc", + label: "Privacy Considerations", + id: "guides/privacy_considerations" + }, { type: "html", value: '', @@ -144,7 +160,7 @@ export default { type: "doc", id: "aztec_connect_sunset", }, - ], + ], roadmapSidebar: [ { diff --git a/docs/src/components/TutorialCard/CardHeader/index.js b/docs/src/components/TutorialCard/CardHeader/index.js index e8a81543e0eb..6ff779bafe58 100644 --- a/docs/src/components/TutorialCard/CardHeader/index.js +++ b/docs/src/components/TutorialCard/CardHeader/index.js @@ -1,7 +1,7 @@ import React, { CSSProperties } from 'react'; // CSSProperties allows inline styling with better type checking. import clsx from 'clsx'; // clsx helps manage conditional className names in a clean and concise manner. const CardHeader = ({ - className, // classNamees for the container card + className, // classNames for the container card style, // Custom styles for the container card children, // Content to be included within the card textAlign, @@ -41,4 +41,4 @@ const CardHeader = ({ ); } -export default CardHeader; \ No newline at end of file +export default CardHeader; diff --git a/docs/src/preprocess/InstructionSet/InstructionSet.js b/docs/src/preprocess/InstructionSet/InstructionSet.js index cec5a392c7c7..f7c04fe07d3a 100644 --- a/docs/src/preprocess/InstructionSet/InstructionSet.js +++ b/docs/src/preprocess/InstructionSet/InstructionSet.js @@ -531,25 +531,6 @@ const INSTRUCTION_SET_RAW = [ "Tag checks": "", "Tag updates": "`T[dstOffset] = field`", }, - { - id: "functionselector", - Name: "`FUNCTIONSELECTOR`", - Category: "Execution Environment", - Flags: [{ name: "indirect", description: INDIRECT_FLAG_DESCRIPTION }], - Args: [ - { - name: "dstOffset", - description: - "memory offset specifying where to store operation's result", - }, - ], - Expression: "`M[dstOffset] = context.environment.functionSelector`", - Summary: - "Get the function selector of the contract function being executed", - Details: "", - "Tag checks": "", - "Tag updates": "`T[dstOffset] = u32`", - }, { id: "transactionfee", Name: "`TRANSACTIONFEE`", diff --git a/docs/static/img/authwit.png b/docs/static/img/authwit.png new file mode 100644 index 000000000000..bf941e6b4e01 Binary files /dev/null and b/docs/static/img/authwit.png differ diff --git a/docs/static/img/authwit2.png b/docs/static/img/authwit2.png new file mode 100644 index 000000000000..80cbf1ad8ee6 Binary files /dev/null and b/docs/static/img/authwit2.png differ diff --git a/docs/static/img/authwit3.png b/docs/static/img/authwit3.png new file mode 100644 index 000000000000..d6b375565c00 Binary files /dev/null and b/docs/static/img/authwit3.png differ diff --git a/docs/static/img/authwit4.png b/docs/static/img/authwit4.png new file mode 100644 index 000000000000..2cb6f4d14a0d Binary files /dev/null and b/docs/static/img/authwit4.png differ diff --git a/docs/static/img/aztec-high-level.png b/docs/static/img/aztec-high-level.png new file mode 100644 index 000000000000..cb4813f3257b Binary files /dev/null and b/docs/static/img/aztec-high-level.png differ diff --git a/docs/static/img/aztec_high_level_network_architecture.png b/docs/static/img/aztec_high_level_network_architecture.png deleted file mode 100644 index db7401112ff1..000000000000 Binary files a/docs/static/img/aztec_high_level_network_architecture.png and /dev/null differ diff --git a/docs/static/img/aztec_high_level_network_architecture.svg b/docs/static/img/aztec_high_level_network_architecture.svg deleted file mode 100644 index 12998f66e102..000000000000 --- a/docs/static/img/aztec_high_level_network_architecture.svg +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - - Aztec NetworkPrivate Execution Environment (PXE)UserAztec.jsACIR SimulatorAztec NodeSequencerEthereumRollup ContractProver NetworkProverProverProverProverDatabasePrivate Kernel CircuitPrivate Execution (Client-side)Public Execution (Network-side) \ No newline at end of file diff --git a/docs/static/img/context/sender_context_change.png b/docs/static/img/context/sender_context_change.png index 5f36f91c4883..e0ec94fa1eef 100644 Binary files a/docs/static/img/context/sender_context_change.png and b/docs/static/img/context/sender_context_change.png differ diff --git a/docs/static/img/how-does-aztec-work.webp b/docs/static/img/how-does-aztec-work.webp deleted file mode 100644 index 011378fee2db..000000000000 Binary files a/docs/static/img/how-does-aztec-work.webp and /dev/null differ diff --git a/docs/static/img/public-and-private-state-diagram.png b/docs/static/img/public-and-private-state-diagram.png new file mode 100644 index 000000000000..cb1f6ef53495 Binary files /dev/null and b/docs/static/img/public-and-private-state-diagram.png differ diff --git a/docs/static/img/pxe.png b/docs/static/img/pxe.png new file mode 100644 index 000000000000..2d34ebae78ff Binary files /dev/null and b/docs/static/img/pxe.png differ diff --git a/docs/static/img/transaction-lifecycle.png b/docs/static/img/transaction-lifecycle.png index c92950d71d2e..96852449fcc3 100644 Binary files a/docs/static/img/transaction-lifecycle.png and b/docs/static/img/transaction-lifecycle.png differ diff --git a/l1-contracts/src/core/Leonidas.sol b/l1-contracts/src/core/Leonidas.sol index 01899cd30032..77244bec6284 100644 --- a/l1-contracts/src/core/Leonidas.sol +++ b/l1-contracts/src/core/Leonidas.sol @@ -5,11 +5,13 @@ pragma solidity >=0.8.27; import {ILeonidas, EpochData, LeonidasStorage} from "@aztec/core/interfaces/ILeonidas.sol"; import {Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; import {DataStructures} from "@aztec/core/libraries/DataStructures.sol"; +import {Errors} from "@aztec/core/libraries/Errors.sol"; import {LeonidasLib} from "@aztec/core/libraries/LeonidasLib/LeonidasLib.sol"; import { Timestamp, Slot, Epoch, SlotLib, EpochLib, TimeFns } from "@aztec/core/libraries/TimeMath.sol"; -import {Ownable} from "@oz/access/Ownable.sol"; +import {Staking} from "@aztec/core/staking/Staking.sol"; +import {IERC20} from "@oz/token/ERC20/IERC20.sol"; import {EnumerableSet} from "@oz/utils/structs/EnumerableSet.sol"; /** @@ -19,16 +21,13 @@ import {EnumerableSet} from "@oz/utils/structs/EnumerableSet.sol"; * He define the structure needed for committee and leader selection and provides logic for validating that * the block and its "evidence" follows his rules. * - * @dev Leonidas is depending on Ares to add/remove warriors to/from his army competently. - * * @dev Leonidas have one thing in mind, he provide a reference of the LOGIC going on for the spartan selection. * He is not concerned about gas costs, he is a king, he just throw gas in the air like no-one cares. * It will be the duty of his successor (Pleistarchus) to optimize the costs with same functionality. * */ -contract Leonidas is Ownable, TimeFns, ILeonidas { +contract Leonidas is Staking, TimeFns, ILeonidas { using EnumerableSet for EnumerableSet.AddressSet; - using LeonidasLib for LeonidasStorage; using SlotLib for Slot; using EpochLib for Epoch; @@ -40,14 +39,16 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { // The time that the contract was deployed Timestamp public immutable GENESIS_TIME; - LeonidasStorage private store; + LeonidasStorage private leonidasStore; constructor( address _ares, + IERC20 _stakingAsset, + uint256 _minimumStake, uint256 _slotDuration, uint256 _epochDuration, uint256 _targetCommitteeSize - ) Ownable(_ares) TimeFns(_slotDuration, _epochDuration) { + ) Staking(_ares, _stakingAsset, _minimumStake) TimeFns(_slotDuration, _epochDuration) { GENESIS_TIME = Timestamp.wrap(block.timestamp); SLOT_DURATION = _slotDuration; EPOCH_DURATION = _epochDuration; @@ -55,70 +56,92 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { } /** - * @notice Adds a validator to the validator set + * @notice Get the validator set for a given epoch * - * @dev Only ARES can add validators + * @dev Consider removing this to replace with a `size` and individual getter. * - * @dev Will setup the epoch if needed BEFORE adding the validator. - * This means that the validator will effectively be added to the NEXT epoch. + * @param _epoch The epoch number to get the validator set for * - * @param _validator - The validator to add + * @return The validator set for the given epoch */ - function addValidator(address _validator) external override(ILeonidas) onlyOwner { - setupEpoch(); - _addValidator(_validator); + function getEpochCommittee(Epoch _epoch) + external + view + override(ILeonidas) + returns (address[] memory) + { + return leonidasStore.epochs[_epoch].committee; } /** - * @notice Removes a validator from the validator set - * - * @dev Only ARES can add validators - * - * @dev Will setup the epoch if needed BEFORE removing the validator. - * This means that the validator will effectively be removed from the NEXT epoch. - * - * @param _validator - The validator to remove + * @notice Get the validator set for the current epoch + * @return The validator set for the current epoch */ - function removeValidator(address _validator) external override(ILeonidas) onlyOwner { - setupEpoch(); - store.validatorSet.remove(_validator); + function getCurrentEpochCommittee() external view override(ILeonidas) returns (address[] memory) { + return LeonidasLib.getCommitteeAt( + leonidasStore, stakingStore, getCurrentEpoch(), TARGET_COMMITTEE_SIZE + ); } /** - * @notice Get the validator set for a given epoch - * - * @dev Consider removing this to replace with a `size` and individual getter. + * @notice Get the committee for a given timestamp * - * @param _epoch The epoch number to get the validator set for + * @param _ts - The timestamp to get the committee for * - * @return The validator set for the given epoch + * @return The committee for the given timestamp */ - function getEpochCommittee(Epoch _epoch) + function getCommitteeAt(Timestamp _ts) external view override(ILeonidas) returns (address[] memory) { - return store.epochs[_epoch].committee; + return LeonidasLib.getCommitteeAt( + leonidasStore, stakingStore, getEpochAt(_ts), TARGET_COMMITTEE_SIZE + ); } /** - * @notice Get the validator set for the current epoch - * @return The validator set for the current epoch + * @notice Get the sample seed for a given timestamp + * + * @param _ts - The timestamp to get the sample seed for + * + * @return The sample seed for the given timestamp */ - function getCurrentEpochCommittee() external view override(ILeonidas) returns (address[] memory) { - return store.getCommitteeAt(getCurrentEpoch(), TARGET_COMMITTEE_SIZE); + function getSampleSeedAt(Timestamp _ts) external view override(ILeonidas) returns (uint256) { + return LeonidasLib.getSampleSeed(leonidasStore, getEpochAt(_ts)); } /** - * @notice Get the validator set + * @notice Get the sample seed for the current epoch * - * @dev Consider removing this to replace with a `size` and individual getter. - * - * @return The validator set + * @return The sample seed for the current epoch */ - function getValidators() external view override(ILeonidas) returns (address[] memory) { - return store.validatorSet.values(); + function getCurrentSampleSeed() external view override(ILeonidas) returns (uint256) { + return LeonidasLib.getSampleSeed(leonidasStore, getCurrentEpoch()); + } + + function initiateWithdraw(address _attester, address _recipient) + public + override(Staking) + returns (bool) + { + // @note The attester might be chosen for the epoch, so the delay must be long enough + // to allow for that. + setupEpoch(); + return super.initiateWithdraw(_attester, _recipient); + } + + function deposit(address _attester, address _proposer, address _withdrawer, uint256 _amount) + public + override(Staking) + { + setupEpoch(); + require( + _attester != address(0) && _proposer != address(0), + Errors.Leonidas__InvalidDeposit(_attester, _proposer) + ); + super.deposit(_attester, _proposer, _withdrawer, _amount); } /** @@ -127,49 +150,31 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { * - Set the seed for the epoch * - Update the last seed * - * @dev Since this is a reference optimising for simplicity, we store the actual validator set in the epoch structure. + * @dev Since this is a reference optimising for simplicity, we leonidasStore the actual validator set in the epoch structure. * This is very heavy on gas, so start crying because the gas here will melt the poles * https://i.giphy.com/U1aN4HTfJ2SmgB2BBK.webp */ function setupEpoch() public override(ILeonidas) { Epoch epochNumber = getCurrentEpoch(); - EpochData storage epoch = store.epochs[epochNumber]; + EpochData storage epoch = leonidasStore.epochs[epochNumber]; if (epoch.sampleSeed == 0) { - epoch.sampleSeed = store.getSampleSeed(epochNumber); - epoch.nextSeed = store.lastSeed = _computeNextSeed(epochNumber); - - epoch.committee = store.sampleValidators(epoch.sampleSeed, TARGET_COMMITTEE_SIZE); + epoch.sampleSeed = LeonidasLib.getSampleSeed(leonidasStore, epochNumber); + epoch.nextSeed = leonidasStore.lastSeed = _computeNextSeed(epochNumber); + epoch.committee = + LeonidasLib.sampleValidators(stakingStore, epoch.sampleSeed, TARGET_COMMITTEE_SIZE); } } /** - * @notice Get the number of validators in the validator set + * @notice Get the attester set * - * @return The number of validators in the validator set - */ - function getValidatorCount() public view override(ILeonidas) returns (uint256) { - return store.validatorSet.length(); - } - - /** - * @notice Get the number of validators in the validator set - * - * @return The number of validators in the validator set - */ - function getValidatorAt(uint256 _index) public view override(ILeonidas) returns (address) { - return store.validatorSet.at(_index); - } - - /** - * @notice Checks if an address is in the validator set - * - * @param _validator - The address to check + * @dev Consider removing this to replace with a `size` and individual getter. * - * @return True if the address is in the validator set, false otherwise + * @return The validator set */ - function isValidator(address _validator) public view override(ILeonidas) returns (bool) { - return store.validatorSet.contains(_validator); + function getAttesters() public view override(ILeonidas) returns (address[] memory) { + return stakingStore.attesters.values(); } /** @@ -241,7 +246,9 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { function getProposerAt(Timestamp _ts) public view override(ILeonidas) returns (address) { Slot slot = getSlotAt(_ts); Epoch epochNumber = getEpochAtSlot(slot); - return store.getProposerAt(slot, epochNumber, TARGET_COMMITTEE_SIZE); + return LeonidasLib.getProposerAt( + leonidasStore, stakingStore, slot, epochNumber, TARGET_COMMITTEE_SIZE + ); } /** @@ -277,12 +284,19 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { return Epoch.wrap(_slotNumber.unwrap() / EPOCH_DURATION); } - /** - * @notice Adds a validator to the set WITHOUT setting up the epoch - * @param _validator - The validator to add - */ - function _addValidator(address _validator) internal { - store.validatorSet.add(_validator); + // Can be used to add validators without setting up the epoch, useful for the initial set. + function _cheat__Deposit( + address _attester, + address _proposer, + address _withdrawer, + uint256 _amount + ) internal { + require( + _attester != address(0) && _proposer != address(0), + Errors.Leonidas__InvalidDeposit(_attester, _proposer) + ); + + super.deposit(_attester, _proposer, _withdrawer, _amount); } /** @@ -308,7 +322,16 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { DataStructures.ExecutionFlags memory _flags ) internal view { Epoch epochNumber = getEpochAtSlot(_slot); - store.validateLeonidas(_slot, epochNumber, _signatures, _digest, _flags, TARGET_COMMITTEE_SIZE); + LeonidasLib.validateLeonidas( + leonidasStore, + stakingStore, + _slot, + epochNumber, + _signatures, + _digest, + _flags, + TARGET_COMMITTEE_SIZE + ); } /** diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index f706f40d5235..11ee424ba4dd 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -7,6 +7,7 @@ import {IProofCommitmentEscrow} from "@aztec/core/interfaces/IProofCommitmentEsc import { IRollup, ITestRollup, + CheatDepositArgs, FeeHeader, ManaBaseFeeComponents, BlockLog, @@ -40,6 +41,7 @@ import {Outbox} from "@aztec/core/messagebridge/Outbox.sol"; import {ProofCommitmentEscrow} from "@aztec/core/ProofCommitmentEscrow.sol"; import {IRewardDistributor} from "@aztec/governance/interfaces/IRewardDistributor.sol"; import {MockVerifier} from "@aztec/mock/MockVerifier.sol"; +import {Ownable} from "@oz/access/Ownable.sol"; import {IERC20} from "@oz/token/ERC20/IERC20.sol"; import {EIP712} from "@oz/utils/cryptography/EIP712.sol"; import {Vm} from "forge-std/Vm.sol"; @@ -49,6 +51,7 @@ struct Config { uint256 aztecEpochDuration; uint256 targetCommitteeSize; uint256 aztecEpochProofClaimWindowInL2Slots; + uint256 minimumStake; } /** @@ -57,7 +60,7 @@ struct Config { * @notice Rollup contract that is concerned about readability and velocity of development * not giving a damn about gas costs. */ -contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { +contract Rollup is EIP712("Aztec Rollup", "1"), Ownable, Leonidas, IRollup, ITestRollup { using SlotLib for Slot; using EpochLib for Epoch; using ProposeLib for ProposeArgs; @@ -97,14 +100,17 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { constructor( IFeeJuicePortal _fpcJuicePortal, IRewardDistributor _rewardDistributor, + IERC20 _stakingAsset, bytes32 _vkTreeRoot, bytes32 _protocolContractTreeRoot, address _ares, - address[] memory _validators, Config memory _config ) + Ownable(_ares) Leonidas( _ares, + _stakingAsset, + _config.minimumStake, _config.aztecSlotDuration, _config.aztecEpochDuration, _config.targetCommitteeSize @@ -145,8 +151,15 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { post: L1FeeData({baseFee: block.basefee, blobFee: _getBlobBaseFee()}), slotOfChange: LIFETIME }); - for (uint256 i = 0; i < _validators.length; i++) { - _addValidator(_validators[i]); + } + + function cheat__InitialiseValidatorSet(CheatDepositArgs[] memory _args) + external + override(ITestRollup) + onlyOwner + { + for (uint256 i = 0; i < _args.length; i++) { + _cheat__Deposit(_args[i].attester, _args[i].proposer, _args[i].withdrawer, _args[i].amount); } setupEpoch(); } diff --git a/l1-contracts/src/core/interfaces/ILeonidas.sol b/l1-contracts/src/core/interfaces/ILeonidas.sol index 256abed990e2..7c9aeef404b5 100644 --- a/l1-contracts/src/core/interfaces/ILeonidas.sol +++ b/l1-contracts/src/core/interfaces/ILeonidas.sol @@ -3,12 +3,11 @@ pragma solidity >=0.8.27; import {Timestamp, Slot, Epoch} from "@aztec/core/libraries/TimeMath.sol"; -import {EnumerableSet} from "@oz/utils/structs/EnumerableSet.sol"; /** * @notice The data structure for an epoch - * @param committee - The validator set for the epoch - * @param sampleSeed - The seed used to sample the validator set of the epoch + * @param committee - The attesters for the epoch + * @param sampleSeed - The seed used to sample the attesters of the epoch * @param nextSeed - The seed used to influence the NEXT epoch */ struct EpochData { @@ -18,7 +17,6 @@ struct EpochData { } struct LeonidasStorage { - EnumerableSet.AddressSet validatorSet; // A mapping to snapshots of the validator set mapping(Epoch => EpochData) epochs; // The last stored randao value, same value as `seed` in the last inserted epoch @@ -26,10 +24,6 @@ struct LeonidasStorage { } interface ILeonidas { - // Changing depending on sybil mechanism and slashing enforcement - function addValidator(address _validator) external; - function removeValidator(address _validator) external; - // Likely changing to optimize in Pleistarchus function setupEpoch() external; function getCurrentProposer() external view returns (address); @@ -38,9 +32,6 @@ interface ILeonidas { // Stable function getCurrentEpoch() external view returns (Epoch); function getCurrentSlot() external view returns (Slot); - function isValidator(address _validator) external view returns (bool); - function getValidatorCount() external view returns (uint256); - function getValidatorAt(uint256 _index) external view returns (address); // Consider removing below this point function getTimestampForSlot(Slot _slotNumber) external view returns (Timestamp); @@ -48,8 +39,12 @@ interface ILeonidas { // Likely removal of these to replace with a size and indiviual getter // Get the current epoch committee function getCurrentEpochCommittee() external view returns (address[] memory); + function getCommitteeAt(Timestamp _ts) external view returns (address[] memory); function getEpochCommittee(Epoch _epoch) external view returns (address[] memory); - function getValidators() external view returns (address[] memory); + function getAttesters() external view returns (address[] memory); + + function getSampleSeedAt(Timestamp _ts) external view returns (uint256); + function getCurrentSampleSeed() external view returns (uint256); function getEpochAt(Timestamp _ts) external view returns (Epoch); function getSlotAt(Timestamp _ts) external view returns (Slot); diff --git a/l1-contracts/src/core/interfaces/IRollup.sol b/l1-contracts/src/core/interfaces/IRollup.sol index 387fe7061751..fb22590932e9 100644 --- a/l1-contracts/src/core/interfaces/IRollup.sol +++ b/l1-contracts/src/core/interfaces/IRollup.sol @@ -53,11 +53,19 @@ struct RollupStore { IVerifier epochProofVerifier; } +struct CheatDepositArgs { + address attester; + address proposer; + address withdrawer; + uint256 amount; +} + interface ITestRollup { function setEpochVerifier(address _verifier) external; function setVkTreeRoot(bytes32 _vkTreeRoot) external; function setProtocolContractTreeRoot(bytes32 _protocolContractTreeRoot) external; function setAssumeProvenThroughBlockNumber(uint256 _blockNumber) external; + function cheat__InitialiseValidatorSet(CheatDepositArgs[] memory _args) external; function getManaBaseFeeComponentsAt(Timestamp _timestamp, bool _inFeeAsset) external view diff --git a/l1-contracts/src/core/interfaces/IStaking.sol b/l1-contracts/src/core/interfaces/IStaking.sol index 12d1cce4ab98..e2d469dd8a37 100644 --- a/l1-contracts/src/core/interfaces/IStaking.sol +++ b/l1-contracts/src/core/interfaces/IStaking.sol @@ -3,6 +3,7 @@ pragma solidity >=0.8.27; import {Timestamp} from "@aztec/core/libraries/TimeMath.sol"; +import {EnumerableSet} from "@oz/utils/structs/EnumerableSet.sol"; // None -> Does not exist in our setup // Validating -> Participating as validator @@ -33,6 +34,12 @@ struct Exit { address recipient; } +struct StakingStorage { + EnumerableSet.AddressSet attesters; + mapping(address attester => ValidatorInfo) info; + mapping(address attester => Exit) exits; +} + interface IStaking { event Deposit( address indexed attester, address indexed proposer, address indexed withdrawer, uint256 amount diff --git a/l1-contracts/src/core/libraries/ConstantsGen.sol b/l1-contracts/src/core/libraries/ConstantsGen.sol index f76deaaf78cc..79803ccb63b6 100644 --- a/l1-contracts/src/core/libraries/ConstantsGen.sol +++ b/l1-contracts/src/core/libraries/ConstantsGen.sol @@ -199,7 +199,7 @@ library Constants { uint256 internal constant TX_REQUEST_LENGTH = 12; uint256 internal constant TOTAL_FEES_LENGTH = 1; uint256 internal constant TOTAL_MANA_USED_LENGTH = 1; - uint256 internal constant HEADER_LENGTH = 25; + uint256 internal constant BLOCK_HEADER_LENGTH = 25; uint256 internal constant PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH = 739; uint256 internal constant PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH = 867; uint256 internal constant PRIVATE_CONTEXT_INPUTS_LENGTH = 38; @@ -248,7 +248,6 @@ library Constants { uint256 internal constant MEM_TAG_U128 = 6; uint256 internal constant SENDER_KERNEL_INPUTS_COL_OFFSET = 0; uint256 internal constant ADDRESS_KERNEL_INPUTS_COL_OFFSET = 1; - uint256 internal constant FUNCTION_SELECTOR_KERNEL_INPUTS_COL_OFFSET = 2; uint256 internal constant IS_STATIC_CALL_KERNEL_INPUTS_COL_OFFSET = 3; uint256 internal constant CHAIN_ID_KERNEL_INPUTS_COL_OFFSET = 4; uint256 internal constant VERSION_KERNEL_INPUTS_COL_OFFSET = 5; diff --git a/l1-contracts/src/core/libraries/Errors.sol b/l1-contracts/src/core/libraries/Errors.sol index 32d6e3a65ba2..3b97bb534b5d 100644 --- a/l1-contracts/src/core/libraries/Errors.sol +++ b/l1-contracts/src/core/libraries/Errors.sol @@ -98,6 +98,7 @@ library Errors { // Sequencer Selection (Leonidas) error Leonidas__EpochNotSetup(); // 0xcf4e597e error Leonidas__InvalidProposer(address expected, address actual); // 0xd02d278e + error Leonidas__InvalidDeposit(address attester, address proposer); // 0x1ef9a54b error Leonidas__InsufficientAttestations(uint256 minimumNeeded, uint256 provided); // 0xbf1ca4cb error Leonidas__InsufficientAttestationsProvided(uint256 minimumNeeded, uint256 provided); // 0xb3a697c2 diff --git a/l1-contracts/src/core/libraries/LeonidasLib/LeonidasLib.sol b/l1-contracts/src/core/libraries/LeonidasLib/LeonidasLib.sol index 28bc684fd845..56b465bbbc6b 100644 --- a/l1-contracts/src/core/libraries/LeonidasLib/LeonidasLib.sol +++ b/l1-contracts/src/core/libraries/LeonidasLib/LeonidasLib.sol @@ -3,6 +3,7 @@ pragma solidity >=0.8.27; import {EpochData, LeonidasStorage} from "@aztec/core/interfaces/ILeonidas.sol"; +import {StakingStorage} from "@aztec/core/interfaces/IStaking.sol"; import {SampleLib} from "@aztec/core/libraries/crypto/SampleLib.sol"; import {SignatureLib, Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; import {DataStructures} from "@aztec/core/libraries/DataStructures.sol"; @@ -25,28 +26,30 @@ library LeonidasLib { * @return The validators for the given epoch */ function sampleValidators( - LeonidasStorage storage _store, + StakingStorage storage _stakingStore, uint256 _seed, uint256 _targetCommitteeSize ) external view returns (address[] memory) { - return _sampleValidators(_store, _seed, _targetCommitteeSize); + return _sampleValidators(_stakingStore, _seed, _targetCommitteeSize); } function getProposerAt( - LeonidasStorage storage _store, + LeonidasStorage storage _leonidasStore, + StakingStorage storage _stakingStore, Slot _slot, Epoch _epochNumber, uint256 _targetCommitteeSize ) external view returns (address) { - return _getProposerAt(_store, _slot, _epochNumber, _targetCommitteeSize); + return _getProposerAt(_leonidasStore, _stakingStore, _slot, _epochNumber, _targetCommitteeSize); } function getCommitteeAt( - LeonidasStorage storage _store, + LeonidasStorage storage _leonidasStore, + StakingStorage storage _stakingStore, Epoch _epochNumber, uint256 _targetCommitteeSize ) external view returns (address[] memory) { - return _getCommitteeAt(_store, _epochNumber, _targetCommitteeSize); + return _getCommitteeAt(_leonidasStore, _stakingStore, _epochNumber, _targetCommitteeSize); } /** @@ -66,7 +69,8 @@ library LeonidasLib { * @param _digest - The digest of the block */ function validateLeonidas( - LeonidasStorage storage _store, + LeonidasStorage storage _leonidasStore, + StakingStorage storage _stakingStore, Slot _slot, Epoch _epochNumber, Signature[] memory _signatures, @@ -74,7 +78,16 @@ library LeonidasLib { DataStructures.ExecutionFlags memory _flags, uint256 _targetCommitteeSize ) external view { - address proposer = _getProposerAt(_store, _slot, _epochNumber, _targetCommitteeSize); + // Same logic as we got in getProposerAt + // Done do avoid duplicate computing the committee + address[] memory committee = + _getCommitteeAt(_leonidasStore, _stakingStore, _epochNumber, _targetCommitteeSize); + address attester = committee.length == 0 + ? address(0) + : committee[computeProposerIndex( + _epochNumber, _slot, getSampleSeed(_leonidasStore, _epochNumber), committee.length + )]; + address proposer = _stakingStore.info[attester].proposer; // @todo Consider getting rid of this option. // If the proposer is open, we allow anyone to propose without needing any signatures @@ -85,16 +98,10 @@ library LeonidasLib { // @todo We should allow to provide a signature instead of needing the proposer to broadcast. require(proposer == msg.sender, Errors.Leonidas__InvalidProposer(proposer, msg.sender)); - // @note This is NOT the efficient way to do it, but it is a very convenient way for us to do it - // that allows us to reduce the number of code paths. Also when changed with optimistic for - // pleistarchus, this will be changed, so we can live with it. - if (_flags.ignoreSignatures) { return; } - address[] memory committee = _getCommitteeAt(_store, _epochNumber, _targetCommitteeSize); - uint256 needed = committee.length * 2 / 3 + 1; require( _signatures.length >= needed, @@ -137,7 +144,7 @@ library LeonidasLib { * * @return The sample seed for the epoch */ - function getSampleSeed(LeonidasStorage storage _store, Epoch _epoch) + function getSampleSeed(LeonidasStorage storage _leonidasStore, Epoch _epoch) internal view returns (uint256) @@ -145,17 +152,17 @@ library LeonidasLib { if (Epoch.unwrap(_epoch) == 0) { return type(uint256).max; } - uint256 sampleSeed = _store.epochs[_epoch].sampleSeed; + uint256 sampleSeed = _leonidasStore.epochs[_epoch].sampleSeed; if (sampleSeed != 0) { return sampleSeed; } - sampleSeed = _store.epochs[_epoch - Epoch.wrap(1)].nextSeed; + sampleSeed = _leonidasStore.epochs[_epoch - Epoch.wrap(1)].nextSeed; if (sampleSeed != 0) { return sampleSeed; } - return _store.lastSeed; + return _leonidasStore.lastSeed; } /** @@ -167,32 +174,33 @@ library LeonidasLib { * @return The validators for the given epoch */ function _sampleValidators( - LeonidasStorage storage _store, + StakingStorage storage _stakingStore, uint256 _seed, uint256 _targetCommitteeSize ) private view returns (address[] memory) { - uint256 validatorSetSize = _store.validatorSet.length(); + uint256 validatorSetSize = _stakingStore.attesters.length(); if (validatorSetSize == 0) { return new address[](0); } // If we have less validators than the target committee size, we just return the full set if (validatorSetSize <= _targetCommitteeSize) { - return _store.validatorSet.values(); + return _stakingStore.attesters.values(); } - uint256[] memory indicies = + uint256[] memory indices = SampleLib.computeCommitteeClever(_targetCommitteeSize, validatorSetSize, _seed); address[] memory committee = new address[](_targetCommitteeSize); for (uint256 i = 0; i < _targetCommitteeSize; i++) { - committee[i] = _store.validatorSet.at(indicies[i]); + committee[i] = _stakingStore.attesters.at(indices[i]); } return committee; } function _getProposerAt( - LeonidasStorage storage _store, + LeonidasStorage storage _leonidasStore, + StakingStorage storage _stakingStore, Slot _slot, Epoch _epochNumber, uint256 _targetCommitteeSize @@ -201,21 +209,26 @@ library LeonidasLib { // it does not need to actually return the full committee and then draw from it // it can just return the proposer directly, but then we duplicate the code // which we just don't have room for right now... - address[] memory committee = _getCommitteeAt(_store, _epochNumber, _targetCommitteeSize); + address[] memory committee = + _getCommitteeAt(_leonidasStore, _stakingStore, _epochNumber, _targetCommitteeSize); if (committee.length == 0) { return address(0); } - return committee[computeProposerIndex( - _epochNumber, _slot, getSampleSeed(_store, _epochNumber), committee.length + + address attester = committee[computeProposerIndex( + _epochNumber, _slot, getSampleSeed(_leonidasStore, _epochNumber), committee.length )]; + + return _stakingStore.info[attester].proposer; } function _getCommitteeAt( - LeonidasStorage storage _store, + LeonidasStorage storage _leonidasStore, + StakingStorage storage _stakingStore, Epoch _epochNumber, uint256 _targetCommitteeSize ) private view returns (address[] memory) { - EpochData storage epoch = _store.epochs[_epochNumber]; + EpochData storage epoch = _leonidasStore.epochs[_epochNumber]; if (epoch.sampleSeed != 0) { uint256 committeeSize = epoch.committee.length; @@ -226,13 +239,13 @@ library LeonidasLib { } // Allow anyone if there is no validator set - if (_store.validatorSet.length() == 0) { + if (_stakingStore.attesters.length() == 0) { return new address[](0); } // Emulate a sampling of the validators - uint256 sampleSeed = getSampleSeed(_store, _epochNumber); - return _sampleValidators(_store, sampleSeed, _targetCommitteeSize); + uint256 sampleSeed = getSampleSeed(_leonidasStore, _epochNumber); + return _sampleValidators(_stakingStore, sampleSeed, _targetCommitteeSize); } /** diff --git a/l1-contracts/src/core/libraries/RollupLibs/HeaderLib.sol b/l1-contracts/src/core/libraries/RollupLibs/HeaderLib.sol index 15d26b46e748..29021febbc36 100644 --- a/l1-contracts/src/core/libraries/RollupLibs/HeaderLib.sol +++ b/l1-contracts/src/core/libraries/RollupLibs/HeaderLib.sol @@ -203,8 +203,8 @@ library HeaderLib { fields[24] = bytes32(_header.totalManaUsed); // fail if the header structure has changed without updating this function require( - fields.length == Constants.HEADER_LENGTH, - Errors.HeaderLib__InvalidHeaderSize(Constants.HEADER_LENGTH, fields.length) + fields.length == Constants.BLOCK_HEADER_LENGTH, + Errors.HeaderLib__InvalidHeaderSize(Constants.BLOCK_HEADER_LENGTH, fields.length) ); return fields; @@ -234,7 +234,7 @@ library HeaderLib { // When we verify root proofs, this method can be removed => no need for separate named error require( fields.length == Constants.GLOBAL_VARIABLES_LENGTH, - Errors.HeaderLib__InvalidHeaderSize(Constants.HEADER_LENGTH, fields.length) + Errors.HeaderLib__InvalidHeaderSize(Constants.BLOCK_HEADER_LENGTH, fields.length) ); return fields; diff --git a/l1-contracts/src/core/staking/Staking.sol b/l1-contracts/src/core/staking/Staking.sol index 7f0a0c3b4465..0d75e74e1c1a 100644 --- a/l1-contracts/src/core/staking/Staking.sol +++ b/l1-contracts/src/core/staking/Staking.sol @@ -3,7 +3,12 @@ pragma solidity >=0.8.27; import { - IStaking, ValidatorInfo, Exit, Status, OperatorInfo + IStaking, + ValidatorInfo, + Exit, + Status, + OperatorInfo, + StakingStorage } from "@aztec/core/interfaces/IStaking.sol"; import {Errors} from "@aztec/core/libraries/Errors.sol"; import {Timestamp} from "@aztec/core/libraries/TimeMath.sol"; @@ -22,11 +27,7 @@ contract Staking is IStaking { IERC20 public immutable STAKING_ASSET; uint256 public immutable MINIMUM_STAKE; - // address <=> index - EnumerableSet.AddressSet internal attesters; - - mapping(address attester => ValidatorInfo) internal info; - mapping(address attester => Exit) internal exits; + StakingStorage internal stakingStore; constructor(address _slasher, IERC20 _stakingAsset, uint256 _minimumStake) { SLASHER = _slasher; @@ -35,10 +36,10 @@ contract Staking is IStaking { } function finaliseWithdraw(address _attester) external override(IStaking) { - ValidatorInfo storage validator = info[_attester]; + ValidatorInfo storage validator = stakingStore.info[_attester]; require(validator.status == Status.EXITING, Errors.Staking__NotExiting(_attester)); - Exit storage exit = exits[_attester]; + Exit storage exit = stakingStore.exits[_attester]; require( exit.exitableAt <= Timestamp.wrap(block.timestamp), Errors.Staking__WithdrawalNotUnlockedYet(Timestamp.wrap(block.timestamp), exit.exitableAt) @@ -47,8 +48,8 @@ contract Staking is IStaking { uint256 amount = validator.stake; address recipient = exit.recipient; - delete exits[_attester]; - delete info[_attester]; + delete stakingStore.exits[_attester]; + delete stakingStore.info[_attester]; STAKING_ASSET.transfer(recipient, amount); @@ -58,14 +59,14 @@ contract Staking is IStaking { function slash(address _attester, uint256 _amount) external override(IStaking) { require(msg.sender == SLASHER, Errors.Staking__NotSlasher(SLASHER, msg.sender)); - ValidatorInfo storage validator = info[_attester]; + ValidatorInfo storage validator = stakingStore.info[_attester]; require(validator.status != Status.NONE, Errors.Staking__NoOneToSlash(_attester)); // There is a special, case, if exiting and past the limit, it is untouchable! require( !( validator.status == Status.EXITING - && exits[_attester].exitableAt <= Timestamp.wrap(block.timestamp) + && stakingStore.exits[_attester].exitableAt <= Timestamp.wrap(block.timestamp) ), Errors.Staking__CannotSlashExitedStake(_attester) ); @@ -75,7 +76,7 @@ contract Staking is IStaking { // When LIVING, he can only start exiting, we don't "really" exit him, because that cost // gas and cost edge cases around recipient, so lets just avoid that. if (validator.status == Status.VALIDATING && validator.stake < MINIMUM_STAKE) { - require(attesters.remove(_attester), Errors.Staking__FailedToRemove(_attester)); + require(stakingStore.attesters.remove(_attester), Errors.Staking__FailedToRemove(_attester)); validator.status = Status.LIVING; } @@ -88,28 +89,11 @@ contract Staking is IStaking { override(IStaking) returns (ValidatorInfo memory) { - return info[_attester]; - } - - function getProposerForAttester(address _attester) - external - view - override(IStaking) - returns (address) - { - return info[_attester].proposer; + return stakingStore.info[_attester]; } function getExit(address _attester) external view override(IStaking) returns (Exit memory) { - return exits[_attester]; - } - - function getAttesterAtIndex(uint256 _index) external view override(IStaking) returns (address) { - return attesters.at(_index); - } - - function getProposerAtIndex(uint256 _index) external view override(IStaking) returns (address) { - return info[attesters.at(_index)].proposer; + return stakingStore.exits[_attester]; } function getOperatorAtIndex(uint256 _index) @@ -118,8 +102,8 @@ contract Staking is IStaking { override(IStaking) returns (OperatorInfo memory) { - address attester = attesters.at(_index); - return OperatorInfo({proposer: info[attester].proposer, attester: attester}); + address attester = stakingStore.attesters.at(_index); + return OperatorInfo({proposer: stakingStore.info[attester].proposer, attester: attester}); } function deposit(address _attester, address _proposer, address _withdrawer, uint256 _amount) @@ -129,12 +113,15 @@ contract Staking is IStaking { { require(_amount >= MINIMUM_STAKE, Errors.Staking__InsufficientStake(_amount, MINIMUM_STAKE)); STAKING_ASSET.transferFrom(msg.sender, address(this), _amount); - require(info[_attester].status == Status.NONE, Errors.Staking__AlreadyRegistered(_attester)); - require(attesters.add(_attester), Errors.Staking__AlreadyActive(_attester)); + require( + stakingStore.info[_attester].status == Status.NONE, + Errors.Staking__AlreadyRegistered(_attester) + ); + require(stakingStore.attesters.add(_attester), Errors.Staking__AlreadyActive(_attester)); // If BLS, need to check possession of private key to avoid attacks. - info[_attester] = ValidatorInfo({ + stakingStore.info[_attester] = ValidatorInfo({ stake: _amount, withdrawer: _withdrawer, proposer: _proposer, @@ -150,7 +137,7 @@ contract Staking is IStaking { override(IStaking) returns (bool) { - ValidatorInfo storage validator = info[_attester]; + ValidatorInfo storage validator = stakingStore.info[_attester]; require( msg.sender == validator.withdrawer, @@ -161,12 +148,12 @@ contract Staking is IStaking { Errors.Staking__NothingToExit(_attester) ); if (validator.status == Status.VALIDATING) { - require(attesters.remove(_attester), Errors.Staking__FailedToRemove(_attester)); + require(stakingStore.attesters.remove(_attester), Errors.Staking__FailedToRemove(_attester)); } // Note that the "amount" is not stored here, but reusing the `validators` // We always exit fully. - exits[_attester] = + stakingStore.exits[_attester] = Exit({exitableAt: Timestamp.wrap(block.timestamp) + EXIT_DELAY, recipient: _recipient}); validator.status = Status.EXITING; @@ -176,6 +163,23 @@ contract Staking is IStaking { } function getActiveAttesterCount() public view override(IStaking) returns (uint256) { - return attesters.length(); + return stakingStore.attesters.length(); + } + + function getProposerForAttester(address _attester) + public + view + override(IStaking) + returns (address) + { + return stakingStore.info[_attester].proposer; + } + + function getAttesterAtIndex(uint256 _index) public view override(IStaking) returns (address) { + return stakingStore.attesters.at(_index); + } + + function getProposerAtIndex(uint256 _index) public view override(IStaking) returns (address) { + return stakingStore.info[stakingStore.attesters.at(_index)].proposer; } } diff --git a/l1-contracts/src/mock/MockFeeJuicePortal.sol b/l1-contracts/src/mock/MockFeeJuicePortal.sol index a7d56cae0e8d..0d180556754a 100644 --- a/l1-contracts/src/mock/MockFeeJuicePortal.sol +++ b/l1-contracts/src/mock/MockFeeJuicePortal.sol @@ -13,7 +13,7 @@ contract MockFeeJuicePortal is IFeeJuicePortal { IRegistry public constant REGISTRY = IRegistry(address(0)); constructor() { - UNDERLYING = new TestERC20(); + UNDERLYING = new TestERC20("test", "TEST", msg.sender); } function initialize() external override(IFeeJuicePortal) {} diff --git a/l1-contracts/src/mock/TestERC20.sol b/l1-contracts/src/mock/TestERC20.sol index 6236f94d7588..3883b407347d 100644 --- a/l1-contracts/src/mock/TestERC20.sol +++ b/l1-contracts/src/mock/TestERC20.sol @@ -2,13 +2,31 @@ // docs:start:contract pragma solidity >=0.8.27; +import {Ownable} from "@oz/access/Ownable.sol"; import {ERC20} from "@oz/token/ERC20/ERC20.sol"; -import {IMintableERC20} from "../governance/interfaces/IMintableERC20.sol"; +import {IMintableERC20} from "./../governance/interfaces/IMintableERC20.sol"; -contract TestERC20 is ERC20, IMintableERC20 { - constructor() ERC20("Portal", "PORTAL") {} +contract TestERC20 is ERC20, IMintableERC20, Ownable { + bool public freeForAll = false; - function mint(address _to, uint256 _amount) external override(IMintableERC20) { + modifier ownerOrFreeForAll() { + if (msg.sender != owner() && !freeForAll) { + revert("Not owner or free for all"); + } + _; + } + + constructor(string memory _name, string memory _symbol, address _owner) + ERC20(_name, _symbol) + Ownable(_owner) + {} + + // solhint-disable-next-line comprehensive-interface + function setFreeForAll(bool _freeForAll) external onlyOwner { + freeForAll = _freeForAll; + } + + function mint(address _to, uint256 _amount) external override(IMintableERC20) ownerOrFreeForAll { _mint(_to, _amount); } } diff --git a/l1-contracts/terraform/main.tf b/l1-contracts/terraform/main.tf index 5a720d5c204a..d619a827877f 100644 --- a/l1-contracts/terraform/main.tf +++ b/l1-contracts/terraform/main.tf @@ -57,6 +57,15 @@ output "fee_juice_contract_address" { value = var.FEE_JUICE_CONTRACT_ADDRESS } +variable "STAKING_ASSET_CONTRACT_ADDRESS" { + type = string + default = "" +} + +output "staking_asset_contract_address" { + value = var.STAKING_ASSET_CONTRACT_ADDRESS +} + variable "FEE_JUICE_PORTAL_CONTRACT_ADDRESS" { type = string default = "" diff --git a/l1-contracts/test/Rollup.t.sol b/l1-contracts/test/Rollup.t.sol index 80d85eb31150..504db52ae570 100644 --- a/l1-contracts/test/Rollup.t.sol +++ b/l1-contracts/test/Rollup.t.sol @@ -74,8 +74,12 @@ contract RollupTest is DecoderBase, TimeFns { */ modifier setUpFor(string memory _name) { { + testERC20 = new TestERC20("test", "TEST", address(this)); + leo = new Leonidas( address(1), + testERC20, + TestConstants.AZTEC_MINIMUM_STAKE, TestConstants.AZTEC_SLOT_DURATION, TestConstants.AZTEC_EPOCH_DURATION, TestConstants.AZTEC_TARGET_COMMITTEE_SIZE @@ -88,7 +92,6 @@ contract RollupTest is DecoderBase, TimeFns { } registry = new Registry(address(this)); - testERC20 = new TestERC20(); feeJuicePortal = new FeeJuicePortal( address(registry), address(testERC20), bytes32(Constants.FEE_JUICE_ADDRESS) ); @@ -98,7 +101,7 @@ contract RollupTest is DecoderBase, TimeFns { testERC20.mint(address(rewardDistributor), 1e6 ether); rollup = new Rollup( - feeJuicePortal, rewardDistributor, bytes32(0), bytes32(0), address(this), new address[](0) + feeJuicePortal, rewardDistributor, testERC20, bytes32(0), bytes32(0), address(this) ); inbox = Inbox(address(rollup.INBOX())); outbox = Outbox(address(rollup.OUTBOX())); diff --git a/l1-contracts/test/TestERC20.t.sol b/l1-contracts/test/TestERC20.t.sol index 3b7abc4cfa70..b95dcd9d49d5 100644 --- a/l1-contracts/test/TestERC20.t.sol +++ b/l1-contracts/test/TestERC20.t.sol @@ -7,11 +7,18 @@ contract TestERC20Test is Test { TestERC20 testERC20; function setUp() public { - testERC20 = new TestERC20(); + testERC20 = new TestERC20("test", "TEST", address(this)); } function test_mint() public { testERC20.mint(address(this), 100); assertEq(testERC20.balanceOf(address(this)), 100); } + + function test_mint_only_owner(address _caller) public { + vm.assume(_caller != address(this)); + vm.expectRevert(); + vm.prank(_caller); + testERC20.mint(address(this), 100); + } } diff --git a/l1-contracts/test/fee_portal/depositToAztecPublic.t.sol b/l1-contracts/test/fee_portal/depositToAztecPublic.t.sol index fc68df1a4441..8eb7bcd301c7 100644 --- a/l1-contracts/test/fee_portal/depositToAztecPublic.t.sol +++ b/l1-contracts/test/fee_portal/depositToAztecPublic.t.sol @@ -27,16 +27,15 @@ contract DepositToAztecPublic is Test { function setUp() public { registry = new Registry(OWNER); - token = new TestERC20(); + token = new TestERC20("test", "TEST", address(this)); feeJuicePortal = new FeeJuicePortal(address(registry), address(token), bytes32(Constants.FEE_JUICE_ADDRESS)); token.mint(address(feeJuicePortal), Constants.FEE_JUICE_INITIAL_MINT); feeJuicePortal.initialize(); rewardDistributor = new RewardDistributor(token, registry, address(this)); - rollup = new Rollup( - feeJuicePortal, rewardDistributor, bytes32(0), bytes32(0), address(this), new address[](0) - ); + rollup = + new Rollup(feeJuicePortal, rewardDistributor, token, bytes32(0), bytes32(0), address(this)); vm.prank(OWNER); registry.upgrade(address(rollup)); @@ -67,9 +66,8 @@ contract DepositToAztecPublic is Test { uint256 numberOfRollups = bound(_numberOfRollups, 1, 5); for (uint256 i = 0; i < numberOfRollups; i++) { - Rollup freshRollup = new Rollup( - feeJuicePortal, rewardDistributor, bytes32(0), bytes32(0), address(this), new address[](0) - ); + Rollup freshRollup = + new Rollup(feeJuicePortal, rewardDistributor, token, bytes32(0), bytes32(0), address(this)); vm.prank(OWNER); registry.upgrade(address(freshRollup)); } diff --git a/l1-contracts/test/fee_portal/distributeFees.t.sol b/l1-contracts/test/fee_portal/distributeFees.t.sol index bfb366e21c73..0308b2d9433a 100644 --- a/l1-contracts/test/fee_portal/distributeFees.t.sol +++ b/l1-contracts/test/fee_portal/distributeFees.t.sol @@ -26,16 +26,15 @@ contract DistributeFees is Test { function setUp() public { registry = new Registry(OWNER); - token = new TestERC20(); + token = new TestERC20("test", "TEST", address(this)); feeJuicePortal = new FeeJuicePortal(address(registry), address(token), bytes32(Constants.FEE_JUICE_ADDRESS)); token.mint(address(feeJuicePortal), Constants.FEE_JUICE_INITIAL_MINT); feeJuicePortal.initialize(); rewardDistributor = new RewardDistributor(token, registry, address(this)); - rollup = new Rollup( - feeJuicePortal, rewardDistributor, bytes32(0), bytes32(0), address(this), new address[](0) - ); + rollup = + new Rollup(feeJuicePortal, rewardDistributor, token, bytes32(0), bytes32(0), address(this)); vm.prank(OWNER); registry.upgrade(address(rollup)); @@ -74,9 +73,8 @@ contract DistributeFees is Test { uint256 numberOfRollups = bound(_numberOfRollups, 1, 5); for (uint256 i = 0; i < numberOfRollups; i++) { - Rollup freshRollup = new Rollup( - feeJuicePortal, rewardDistributor, bytes32(0), bytes32(0), address(this), new address[](0) - ); + Rollup freshRollup = + new Rollup(feeJuicePortal, rewardDistributor, token, bytes32(0), bytes32(0), address(this)); vm.prank(OWNER); registry.upgrade(address(freshRollup)); } diff --git a/l1-contracts/test/fees/FeeRollup.t.sol b/l1-contracts/test/fees/FeeRollup.t.sol index ba655ed2e748..8331d66d7eed 100644 --- a/l1-contracts/test/fees/FeeRollup.t.sol +++ b/l1-contracts/test/fees/FeeRollup.t.sol @@ -114,21 +114,24 @@ contract FeeRollupTest is FeeModelTestPoints, DecoderBase { vm.fee(l1Metadata[0].base_fee); vm.blobBaseFee(l1Metadata[0].blob_fee); - asset = new TestERC20(); + asset = new TestERC20("test", "TEST", address(this)); fakeCanonical = new FakeCanonical(IERC20(address(asset))); + asset.transferOwnership(address(fakeCanonical)); + rollup = new Rollup( IFeeJuicePortal(address(fakeCanonical)), IRewardDistributor(address(fakeCanonical)), + asset, bytes32(0), bytes32(0), address(this), - new address[](0), Config({ aztecSlotDuration: SLOT_DURATION, aztecEpochDuration: EPOCH_DURATION, targetCommitteeSize: 48, - aztecEpochProofClaimWindowInL2Slots: 16 + aztecEpochProofClaimWindowInL2Slots: 16, + minimumStake: 100 ether }) ); fakeCanonical.setCanonicalRollup(address(rollup)); diff --git a/l1-contracts/test/governance/coin-issuer/Base.t.sol b/l1-contracts/test/governance/coin-issuer/Base.t.sol index b2812e4c7e91..ada73d4d4acd 100644 --- a/l1-contracts/test/governance/coin-issuer/Base.t.sol +++ b/l1-contracts/test/governance/coin-issuer/Base.t.sol @@ -14,7 +14,9 @@ contract CoinIssuerBase is Test { CoinIssuer internal nom; function _deploy(uint256 _rate) internal { - token = IMintableERC20(address(new TestERC20())); + TestERC20 testERC20 = new TestERC20("test", "TEST", address(this)); + token = IMintableERC20(address(testERC20)); nom = new CoinIssuer(token, _rate, address(this)); + testERC20.transferOwnership(address(nom)); } } diff --git a/l1-contracts/test/governance/governance/base.t.sol b/l1-contracts/test/governance/governance/base.t.sol index 28e51b0e934b..cc5a9878a068 100644 --- a/l1-contracts/test/governance/governance/base.t.sol +++ b/l1-contracts/test/governance/governance/base.t.sol @@ -35,7 +35,7 @@ contract GovernanceBase is TestBase { uint256 proposalId; function setUp() public virtual { - token = IMintableERC20(address(new TestERC20())); + token = IMintableERC20(address(new TestERC20("test", "TEST", address(this)))); registry = new Registry(address(this)); governanceProposer = new GovernanceProposer(registry, 677, 1000); diff --git a/l1-contracts/test/governance/reward-distributor/Base.t.sol b/l1-contracts/test/governance/reward-distributor/Base.t.sol index 8b3c6c511b1c..4c6014d5a2ce 100644 --- a/l1-contracts/test/governance/reward-distributor/Base.t.sol +++ b/l1-contracts/test/governance/reward-distributor/Base.t.sol @@ -16,7 +16,7 @@ contract RewardDistributorBase is Test { RewardDistributor internal rewardDistributor; function setUp() public { - token = IMintableERC20(address(new TestERC20())); + token = IMintableERC20(address(new TestERC20("test", "TEST", address(this)))); registry = new Registry(address(this)); rewardDistributor = new RewardDistributor(token, registry, address(this)); } diff --git a/l1-contracts/test/governance/scenario/UpgradeGovernanceProposerTest.t.sol b/l1-contracts/test/governance/scenario/UpgradeGovernanceProposerTest.t.sol index aea558c9f564..8504653da175 100644 --- a/l1-contracts/test/governance/scenario/UpgradeGovernanceProposerTest.t.sol +++ b/l1-contracts/test/governance/scenario/UpgradeGovernanceProposerTest.t.sol @@ -18,6 +18,8 @@ import {ProposalLib} from "@aztec/governance/libraries/ProposalLib.sol"; import {Errors} from "@aztec/governance/libraries/Errors.sol"; import {NewGovernanceProposerPayload} from "./NewGovernanceProposerPayload.sol"; import {RewardDistributor} from "@aztec/governance/RewardDistributor.sol"; +import {CheatDepositArgs} from "@aztec/core/interfaces/IRollup.sol"; +import {TestConstants} from "../../harnesses/TestConstants.sol"; /** * @title UpgradeGovernanceProposerTest @@ -44,32 +46,36 @@ contract UpgradeGovernanceProposerTest is TestBase { address internal constant EMPEROR = address(uint160(bytes20("EMPEROR"))); function setUp() external { - token = IMintableERC20(address(new TestERC20())); + token = IMintableERC20(address(new TestERC20("test", "TEST", address(this)))); registry = new Registry(address(this)); governanceProposer = new GovernanceProposer(registry, 7, 10); governance = new Governance(token, address(governanceProposer)); - address[] memory initialValidators = new address[](VALIDATOR_COUNT); + CheatDepositArgs[] memory initialValidators = new CheatDepositArgs[](VALIDATOR_COUNT); for (uint256 i = 1; i <= VALIDATOR_COUNT; i++) { uint256 privateKey = uint256(keccak256(abi.encode("validator", i))); address validator = vm.addr(privateKey); privateKeys[validator] = privateKey; validators[i - 1] = validator; - initialValidators[i - 1] = validator; + initialValidators[i - 1] = CheatDepositArgs({ + attester: validator, + proposer: validator, + withdrawer: validator, + amount: TestConstants.AZTEC_MINIMUM_STAKE + }); } RewardDistributor rewardDistributor = new RewardDistributor(token, registry, address(this)); rollup = new Rollup( - new MockFeeJuicePortal(), - rewardDistributor, - bytes32(0), - bytes32(0), - address(this), - initialValidators + new MockFeeJuicePortal(), rewardDistributor, token, bytes32(0), bytes32(0), address(this) ); + token.mint(address(this), TestConstants.AZTEC_MINIMUM_STAKE * VALIDATOR_COUNT); + token.approve(address(rollup), TestConstants.AZTEC_MINIMUM_STAKE * VALIDATOR_COUNT); + rollup.cheat__InitialiseValidatorSet(initialValidators); + registry.upgrade(address(rollup)); registry.transferOwnership(address(governance)); diff --git a/l1-contracts/test/harnesses/Leonidas.sol b/l1-contracts/test/harnesses/Leonidas.sol index f19deae25508..a7c78f304b10 100644 --- a/l1-contracts/test/harnesses/Leonidas.sol +++ b/l1-contracts/test/harnesses/Leonidas.sol @@ -4,11 +4,14 @@ pragma solidity >=0.8.27; import {Leonidas as RealLeonidas} from "@aztec/core/Leonidas.sol"; import {TestConstants} from "./TestConstants.sol"; +import {TestERC20} from "@aztec/mock/TestERC20.sol"; contract Leonidas is RealLeonidas { constructor(address _ares) RealLeonidas( _ares, + new TestERC20("test", "TEST", address(this)), + 100e18, TestConstants.AZTEC_SLOT_DURATION, TestConstants.AZTEC_EPOCH_DURATION, TestConstants.AZTEC_TARGET_COMMITTEE_SIZE diff --git a/l1-contracts/test/harnesses/Rollup.sol b/l1-contracts/test/harnesses/Rollup.sol index 27f78d3864dd..41d72b20de9f 100644 --- a/l1-contracts/test/harnesses/Rollup.sol +++ b/l1-contracts/test/harnesses/Rollup.sol @@ -6,28 +6,30 @@ import {IFeeJuicePortal} from "@aztec/core/interfaces/IFeeJuicePortal.sol"; import {IRewardDistributor} from "@aztec/governance/interfaces/IRewardDistributor.sol"; import {Rollup as RealRollup, Config} from "@aztec/core/Rollup.sol"; import {TestConstants} from "./TestConstants.sol"; +import {IERC20} from "@oz/token/ERC20/IERC20.sol"; contract Rollup is RealRollup { constructor( IFeeJuicePortal _fpcJuicePortal, IRewardDistributor _rewardDistributor, + IERC20 _stakingAsset, bytes32 _vkTreeRoot, bytes32 _protocolContractTreeRoot, - address _ares, - address[] memory _validators + address _ares ) RealRollup( _fpcJuicePortal, _rewardDistributor, + _stakingAsset, _vkTreeRoot, _protocolContractTreeRoot, _ares, - _validators, Config({ aztecSlotDuration: TestConstants.AZTEC_SLOT_DURATION, aztecEpochDuration: TestConstants.AZTEC_EPOCH_DURATION, targetCommitteeSize: TestConstants.AZTEC_TARGET_COMMITTEE_SIZE, - aztecEpochProofClaimWindowInL2Slots: TestConstants.AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS + aztecEpochProofClaimWindowInL2Slots: TestConstants.AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS, + minimumStake: TestConstants.AZTEC_MINIMUM_STAKE }) ) {} diff --git a/l1-contracts/test/harnesses/TestConstants.sol b/l1-contracts/test/harnesses/TestConstants.sol index 4a79b3c97e7d..371a2d8f594e 100644 --- a/l1-contracts/test/harnesses/TestConstants.sol +++ b/l1-contracts/test/harnesses/TestConstants.sol @@ -9,4 +9,5 @@ library TestConstants { uint256 internal constant AZTEC_EPOCH_DURATION = 16; uint256 internal constant AZTEC_TARGET_COMMITTEE_SIZE = 48; uint256 internal constant AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS = 13; + uint256 internal constant AZTEC_MINIMUM_STAKE = 100e18; } diff --git a/l1-contracts/test/portals/TokenPortal.t.sol b/l1-contracts/test/portals/TokenPortal.t.sol index 990f3ec6512b..c043d69d0cda 100644 --- a/l1-contracts/test/portals/TokenPortal.t.sol +++ b/l1-contracts/test/portals/TokenPortal.t.sol @@ -60,15 +60,10 @@ contract TokenPortalTest is Test { function setUp() public { registry = new Registry(address(this)); - testERC20 = new TestERC20(); + testERC20 = new TestERC20("test", "TEST", address(this)); rewardDistributor = new RewardDistributor(testERC20, registry, address(this)); rollup = new Rollup( - new MockFeeJuicePortal(), - rewardDistributor, - bytes32(0), - bytes32(0), - address(this), - new address[](0) + new MockFeeJuicePortal(), rewardDistributor, testERC20, bytes32(0), bytes32(0), address(this) ); inbox = rollup.INBOX(); outbox = rollup.OUTBOX(); diff --git a/l1-contracts/test/portals/UniswapPortal.t.sol b/l1-contracts/test/portals/UniswapPortal.t.sol index ac646e17bac7..fc91ef5d1588 100644 --- a/l1-contracts/test/portals/UniswapPortal.t.sol +++ b/l1-contracts/test/portals/UniswapPortal.t.sol @@ -55,12 +55,7 @@ contract UniswapPortalTest is Test { registry = new Registry(address(this)); RewardDistributor rewardDistributor = new RewardDistributor(DAI, registry, address(this)); rollup = new Rollup( - new MockFeeJuicePortal(), - rewardDistributor, - bytes32(0), - bytes32(0), - address(this), - new address[](0) + new MockFeeJuicePortal(), rewardDistributor, DAI, bytes32(0), bytes32(0), address(this) ); registry.upgrade(address(rollup)); diff --git a/l1-contracts/test/prover-coordination/ProofCommitmentEscrow.t.sol b/l1-contracts/test/prover-coordination/ProofCommitmentEscrow.t.sol index 45178dc9b9ea..18c07487dfea 100644 --- a/l1-contracts/test/prover-coordination/ProofCommitmentEscrow.t.sol +++ b/l1-contracts/test/prover-coordination/ProofCommitmentEscrow.t.sol @@ -31,7 +31,7 @@ contract TestProofCommitmentEscrow is Test { } function setUp() public { - TOKEN = new TestERC20(); + TOKEN = new TestERC20("test", "TEST", address(this)); ESCROW = new ProofCommitmentEscrow( TOKEN, address(this), TestConstants.AZTEC_SLOT_DURATION, TestConstants.AZTEC_EPOCH_DURATION ); diff --git a/l1-contracts/test/sparta/Sparta.t.sol b/l1-contracts/test/sparta/Sparta.t.sol index ea0c94f7d49e..dc5340a39aea 100644 --- a/l1-contracts/test/sparta/Sparta.t.sol +++ b/l1-contracts/test/sparta/Sparta.t.sol @@ -13,7 +13,7 @@ import {Outbox} from "@aztec/core/messagebridge/Outbox.sol"; import {Errors} from "@aztec/core/libraries/Errors.sol"; import {Registry} from "@aztec/governance/Registry.sol"; import {Rollup} from "../harnesses/Rollup.sol"; -import {Leonidas} from "../harnesses/Leonidas.sol"; +import {Leonidas} from "@aztec/core/Leonidas.sol"; import {NaiveMerkle} from "../merkle/Naive.sol"; import {MerkleTestUtil} from "../merkle/TestUtil.sol"; import {TestERC20} from "@aztec/mock/TestERC20.sol"; @@ -23,6 +23,8 @@ import {MockFeeJuicePortal} from "@aztec/mock/MockFeeJuicePortal.sol"; import { ProposeArgs, OracleInput, ProposeLib } from "@aztec/core/libraries/RollupLibs/ProposeLib.sol"; +import {TestConstants} from "../harnesses/TestConstants.sol"; +import {CheatDepositArgs} from "@aztec/core/interfaces/IRollup.sol"; import {Slot, Epoch, SlotLib, EpochLib} from "@aztec/core/libraries/TimeMath.sol"; import {RewardDistributor} from "@aztec/governance/RewardDistributor.sol"; @@ -51,7 +53,9 @@ contract SpartaTest is DecoderBase { TestERC20 internal testERC20; RewardDistributor internal rewardDistributor; Signature internal emptySignature; - mapping(address validator => uint256 privateKey) internal privateKeys; + mapping(address attester => uint256 privateKey) internal attesterPrivateKeys; + mapping(address proposer => uint256 privateKey) internal proposerPrivateKeys; + mapping(address proposer => address attester) internal proposerToAttester; mapping(address => bool) internal _seenValidators; mapping(address => bool) internal _seenCommittee; @@ -61,7 +65,15 @@ contract SpartaTest is DecoderBase { modifier setup(uint256 _validatorCount) { string memory _name = "mixed_block_1"; { - Leonidas leonidas = new Leonidas(address(1)); + Leonidas leonidas = new Leonidas( + address(1), + testERC20, + TestConstants.AZTEC_MINIMUM_STAKE, + TestConstants.AZTEC_SLOT_DURATION, + TestConstants.AZTEC_EPOCH_DURATION, + TestConstants.AZTEC_TARGET_COMMITTEE_SIZE + ); + DecoderBase.Full memory full = load(_name); uint256 slotNumber = full.block.decodedHeader.globalVariables.slotNumber; uint256 initialTime = @@ -69,25 +81,37 @@ contract SpartaTest is DecoderBase { vm.warp(initialTime); } - address[] memory initialValidators = new address[](_validatorCount); + CheatDepositArgs[] memory initialValidators = new CheatDepositArgs[](_validatorCount); + for (uint256 i = 1; i < _validatorCount + 1; i++) { - uint256 privateKey = uint256(keccak256(abi.encode("validator", i))); - address validator = vm.addr(privateKey); - privateKeys[validator] = privateKey; - initialValidators[i - 1] = validator; + uint256 attesterPrivateKey = uint256(keccak256(abi.encode("attester", i))); + address attester = vm.addr(attesterPrivateKey); + attesterPrivateKeys[attester] = attesterPrivateKey; + uint256 proposerPrivateKey = uint256(keccak256(abi.encode("proposer", i))); + address proposer = vm.addr(proposerPrivateKey); + proposerPrivateKeys[proposer] = proposerPrivateKey; + + proposerToAttester[proposer] = attester; + + initialValidators[i - 1] = CheatDepositArgs({ + attester: attester, + proposer: proposer, + withdrawer: address(this), + amount: TestConstants.AZTEC_MINIMUM_STAKE + }); } - testERC20 = new TestERC20(); + testERC20 = new TestERC20("test", "TEST", address(this)); Registry registry = new Registry(address(this)); rewardDistributor = new RewardDistributor(testERC20, registry, address(this)); rollup = new Rollup( - new MockFeeJuicePortal(), - rewardDistributor, - bytes32(0), - bytes32(0), - address(this), - initialValidators + new MockFeeJuicePortal(), rewardDistributor, testERC20, bytes32(0), bytes32(0), address(this) ); + + testERC20.mint(address(this), TestConstants.AZTEC_MINIMUM_STAKE * _validatorCount); + testERC20.approve(address(rollup), TestConstants.AZTEC_MINIMUM_STAKE * _validatorCount); + rollup.cheat__InitialiseValidatorSet(initialValidators); + inbox = Inbox(address(rollup.INBOX())); outbox = Outbox(address(rollup.OUTBOX())); @@ -97,15 +121,15 @@ contract SpartaTest is DecoderBase { _; } - function testInitialCommitteMatch() public setup(4) { - address[] memory validators = rollup.getValidators(); + function testInitialCommitteeMatch() public setup(4) { + address[] memory attesters = rollup.getAttesters(); address[] memory committee = rollup.getCurrentEpochCommittee(); assertEq(rollup.getCurrentEpoch(), 0); - assertEq(validators.length, 4, "Invalid validator set size"); + assertEq(attesters.length, 4, "Invalid validator set size"); assertEq(committee.length, 4, "invalid committee set size"); - for (uint256 i = 0; i < validators.length; i++) { - _seenValidators[validators[i]] = true; + for (uint256 i = 0; i < attesters.length; i++) { + _seenValidators[attesters[i]] = true; } for (uint256 i = 0; i < committee.length; i++) { @@ -114,8 +138,10 @@ contract SpartaTest is DecoderBase { _seenCommittee[committee[i]] = true; } + // The proposer is not necessarily an attester, we have to map it back. We can do this here + // because we created a 1:1 link. In practice, there could be multiple attesters for the same proposer address proposer = rollup.getCurrentProposer(); - assertTrue(_seenCommittee[proposer]); + assertTrue(_seenCommittee[proposerToAttester[proposer]]); } function testProposerForNonSetupEpoch(uint8 _epochsToJump) public setup(4) { @@ -129,14 +155,18 @@ contract SpartaTest is DecoderBase { address expectedProposer = rollup.getCurrentProposer(); // Add a validator which will also setup the epoch - rollup.addValidator(address(0xdead)); + testERC20.mint(address(this), TestConstants.AZTEC_MINIMUM_STAKE); + testERC20.approve(address(rollup), TestConstants.AZTEC_MINIMUM_STAKE); + rollup.deposit( + address(0xdead), address(0xdead), address(0xdead), TestConstants.AZTEC_MINIMUM_STAKE + ); address actualProposer = rollup.getCurrentProposer(); assertEq(expectedProposer, actualProposer, "Invalid proposer"); } function testValidatorSetLargerThanCommittee(bool _insufficientSigs) public setup(100) { - assertGt(rollup.getValidators().length, rollup.TARGET_COMMITTEE_SIZE(), "Not enough validators"); + assertGt(rollup.getAttesters().length, rollup.TARGET_COMMITTEE_SIZE(), "Not enough validators"); uint256 committeeSize = rollup.TARGET_COMMITTEE_SIZE() * 2 / 3 + (_insufficientSigs ? 0 : 1); _testBlock("mixed_block_1", _insufficientSigs, committeeSize, false); @@ -302,7 +332,7 @@ contract SpartaTest is DecoderBase { view returns (Signature memory) { - uint256 privateKey = privateKeys[_signer]; + uint256 privateKey = attesterPrivateKeys[_signer]; bytes32 digest = _digest.toEthSignedMessageHash(); (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, digest); diff --git a/l1-contracts/test/staking/StakingCheater.sol b/l1-contracts/test/staking/StakingCheater.sol index 224c732c6c98..ba89e1e07ab5 100644 --- a/l1-contracts/test/staking/StakingCheater.sol +++ b/l1-contracts/test/staking/StakingCheater.sol @@ -14,14 +14,14 @@ contract StakingCheater is Staking { {} function cheat__SetStatus(address _attester, Status _status) external { - info[_attester].status = _status; + stakingStore.info[_attester].status = _status; } function cheat__AddAttester(address _attester) external { - attesters.add(_attester); + stakingStore.attesters.add(_attester); } function cheat__RemoveAttester(address _attester) external { - attesters.remove(_attester); + stakingStore.attesters.remove(_attester); } } diff --git a/l1-contracts/test/staking/base.t.sol b/l1-contracts/test/staking/base.t.sol index e47b6e8d24ae..6aa8eaa8ca4b 100644 --- a/l1-contracts/test/staking/base.t.sol +++ b/l1-contracts/test/staking/base.t.sol @@ -19,7 +19,7 @@ contract StakingBase is TestBase { address internal constant SLASHER = address(bytes20("SLASHER")); function setUp() public virtual { - stakingAsset = new TestERC20(); + stakingAsset = new TestERC20("test", "TEST", address(this)); staking = new StakingCheater(SLASHER, stakingAsset, MINIMUM_STAKE); } } diff --git a/noir-projects/Earthfile b/noir-projects/Earthfile index 4627c7477039..0a876ac39f7f 100644 --- a/noir-projects/Earthfile +++ b/noir-projects/Earthfile @@ -20,6 +20,7 @@ source: RUN yarn COPY mega_honk_circuits.json . + COPY ivc_integration_circuits.json . COPY --dir aztec-nr noir-contracts noir-protocol-circuits mock-protocol-circuits scripts . build-contracts: diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index a617827c0f52..3e85aca9c9d3 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = 86b4e7ef6f4263e8f60f8a57b6d6e371ca3b0146 + commit = bd578e319f2ea55ad6d9dd0f834dbf7af694ee32 method = merge cmdver = 0.4.6 - parent = 54b261ed46490b61ea7d0d01360dd07392c3c200 + parent = 44aa83aa1fed3afb839e19111d9a30b05c0e1aad diff --git a/noir-projects/aztec-nr/aztec/src/context/inputs/private_context_inputs.nr b/noir-projects/aztec-nr/aztec/src/context/inputs/private_context_inputs.nr index 232972417cf0..afbd1970bc05 100644 --- a/noir-projects/aztec-nr/aztec/src/context/inputs/private_context_inputs.nr +++ b/noir-projects/aztec-nr/aztec/src/context/inputs/private_context_inputs.nr @@ -1,5 +1,5 @@ use dep::protocol_types::{ - abis::call_context::CallContext, header::Header, traits::Empty, + abis::call_context::CallContext, block_header::BlockHeader, traits::Empty, transaction::tx_context::TxContext, }; @@ -7,7 +7,7 @@ use dep::protocol_types::{ // docs:start:private-context-inputs pub struct PrivateContextInputs { pub call_context: CallContext, - pub historical_header: Header, + pub historical_header: BlockHeader, pub tx_context: TxContext, pub start_side_effect_counter: u32, } @@ -17,7 +17,7 @@ impl Empty for PrivateContextInputs { fn empty() -> Self { PrivateContextInputs { call_context: CallContext::empty(), - historical_header: Header::empty(), + historical_header: BlockHeader::empty(), tx_context: TxContext::empty(), start_side_effect_counter: 0 as u32, } diff --git a/noir-projects/aztec-nr/aztec/src/context/private_context.nr b/noir-projects/aztec-nr/aztec/src/context/private_context.nr index 8284ebc74dfc..b2c94e614274 100644 --- a/noir-projects/aztec-nr/aztec/src/context/private_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/private_context.nr @@ -5,12 +5,12 @@ use crate::{ messaging::process_l1_to_l2_message, oracle::{ arguments, + block_header::get_block_header_at, call_private_function::call_private_function_internal, enqueue_public_function_call::{ enqueue_public_function_call_internal, notify_set_min_revertible_side_effect_counter, set_public_teardown_function_call_internal, }, - header::get_header_at, key_validation_request::get_key_validation_request, returns::pack_returns, }, @@ -33,6 +33,7 @@ use dep::protocol_types::{ validation_requests::{KeyValidationRequest, KeyValidationRequestAndGenerator}, }, address::{AztecAddress, EthAddress}, + block_header::BlockHeader, constants::{ MAX_CONTRACT_CLASS_LOGS_PER_CALL, MAX_ENQUEUED_CALLS_PER_CALL, MAX_KEY_VALIDATION_REQUESTS_PER_CALL, MAX_L2_TO_L1_MSGS_PER_CALL, @@ -41,7 +42,6 @@ use dep::protocol_types::{ MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PRIVATE_LOGS_PER_CALL, PRIVATE_LOG_SIZE_IN_FIELDS, PUBLIC_DISPATCH_SELECTOR, }, - header::Header, messaging::l2_to_l1_message::L2ToL1Message, traits::Empty, }; @@ -74,7 +74,7 @@ pub struct PrivateContext { // docs:end:private-context // Header of a block whose state is used during private execution (not the block the transaction is included in). - pub historical_header: Header, + pub historical_header: BlockHeader, pub private_logs: BoundedVec, pub contract_class_logs_hashes: BoundedVec, @@ -157,14 +157,14 @@ impl PrivateContext { // Returns the header of a block whose state is used during private execution (not the block the transaction is // included in). - pub fn get_header(self) -> Header { + pub fn get_block_header(self) -> BlockHeader { self.historical_header } // Returns the header of an arbitrary block whose block number is less than or equal to the block number // of historical header. - pub fn get_header_at(self, block_number: u32) -> Header { - get_header_at(block_number, self) + pub fn get_block_header_at(self, block_number: u32) -> BlockHeader { + get_block_header_at(block_number, self) } pub fn set_return_hash(&mut self, returns_hasher: ArgsHasher) { @@ -585,7 +585,7 @@ impl Empty for PrivateContext { public_call_requests: BoundedVec::new(), public_teardown_call_request: PublicCallRequest::empty(), l2_to_l1_msgs: BoundedVec::new(), - historical_header: Header::empty(), + historical_header: BlockHeader::empty(), private_logs: BoundedVec::new(), contract_class_logs_hashes: BoundedVec::new(), last_key_validation_requests: [Option::none(); NUM_KEY_TYPES], diff --git a/noir-projects/aztec-nr/aztec/src/context/public_context.nr b/noir-projects/aztec-nr/aztec/src/context/public_context.nr index 2d0320b4da6c..d8095be42dd3 100644 --- a/noir-projects/aztec-nr/aztec/src/context/public_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/public_context.nr @@ -253,14 +253,6 @@ unconstrained fn address() -> AztecAddress { unconstrained fn sender() -> AztecAddress { sender_opcode() } -// TODO(9396): Remove. -unconstrained fn portal() -> EthAddress { - portal_opcode() -} -// TODO(9396): Remove. -//unconstrained fn function_selector() -> u32 { -// function_selector_opcode() -//} unconstrained fn transaction_fee() -> Field { transaction_fee_opcode() } @@ -364,13 +356,6 @@ unconstrained fn address_opcode() -> AztecAddress {} #[oracle(avmOpcodeSender)] unconstrained fn sender_opcode() -> AztecAddress {} -#[oracle(avmOpcodePortal)] -unconstrained fn portal_opcode() -> EthAddress {} - -// TODO(9396): Remove. -//#[oracle(avmOpcodeFunctionSelector)] -//unconstrained fn function_selector_opcode() -> u32 {} - #[oracle(avmOpcodeTransactionFee)] unconstrained fn transaction_fee_opcode() -> Field {} diff --git a/noir-projects/aztec-nr/aztec/src/history/contract_inclusion.nr b/noir-projects/aztec-nr/aztec/src/history/contract_inclusion.nr index 342be3c0522c..0fcd5ec920eb 100644 --- a/noir-projects/aztec-nr/aztec/src/history/contract_inclusion.nr +++ b/noir-projects/aztec-nr/aztec/src/history/contract_inclusion.nr @@ -1,13 +1,13 @@ use dep::protocol_types::{ - address::AztecAddress, constants::DEPLOYER_CONTRACT_ADDRESS, hash::compute_siloed_nullifier, - header::Header, + address::AztecAddress, block_header::BlockHeader, constants::DEPLOYER_CONTRACT_ADDRESS, + hash::compute_siloed_nullifier, }; trait ProveContractDeployment { - fn prove_contract_deployment(header: Header, contract_address: AztecAddress); + fn prove_contract_deployment(header: BlockHeader, contract_address: AztecAddress); } -impl ProveContractDeployment for Header { +impl ProveContractDeployment for BlockHeader { fn prove_contract_deployment(self, contract_address: AztecAddress) { // Compute deployment nullifier let nullifier = @@ -18,10 +18,10 @@ impl ProveContractDeployment for Header { } trait ProveContractNonDeployment { - fn prove_contract_non_deployment(header: Header, contract_address: AztecAddress); + fn prove_contract_non_deployment(header: BlockHeader, contract_address: AztecAddress); } -impl ProveContractNonDeployment for Header { +impl ProveContractNonDeployment for BlockHeader { fn prove_contract_non_deployment(self, contract_address: AztecAddress) { // Compute deployment nullifier let nullifier = @@ -34,10 +34,10 @@ impl ProveContractNonDeployment for Header { } trait ProveContractInitialization { - fn prove_contract_initialization(header: Header, contract_address: AztecAddress); + fn prove_contract_initialization(header: BlockHeader, contract_address: AztecAddress); } -impl ProveContractInitialization for Header { +impl ProveContractInitialization for BlockHeader { fn prove_contract_initialization(self, contract_address: AztecAddress) { // Compute initialization nullifier let nullifier = compute_siloed_nullifier(contract_address, contract_address.to_field()); @@ -47,10 +47,10 @@ impl ProveContractInitialization for Header { } trait ProveContractNonInitialization { - fn prove_contract_non_initialization(header: Header, contract_address: AztecAddress); + fn prove_contract_non_initialization(header: BlockHeader, contract_address: AztecAddress); } -impl ProveContractNonInitialization for Header { +impl ProveContractNonInitialization for BlockHeader { fn prove_contract_non_initialization(self, contract_address: AztecAddress) { // Compute initialization nullifier let nullifier = compute_siloed_nullifier(contract_address, contract_address.to_field()); diff --git a/noir-projects/aztec-nr/aztec/src/history/note_inclusion.nr b/noir-projects/aztec-nr/aztec/src/history/note_inclusion.nr index 0876b824a07b..886801dea6f8 100644 --- a/noir-projects/aztec-nr/aztec/src/history/note_inclusion.nr +++ b/noir-projects/aztec-nr/aztec/src/history/note_inclusion.nr @@ -1,4 +1,4 @@ -use dep::protocol_types::header::Header; +use dep::protocol_types::block_header::BlockHeader; use dep::protocol_types::merkle_tree::root::root_from_sibling_path; use crate::{ @@ -7,12 +7,12 @@ use crate::{ }; trait ProveNoteInclusion { - fn prove_note_inclusion(header: Header, note: Note) + fn prove_note_inclusion(header: BlockHeader, note: Note) where Note: NoteInterface + NullifiableNote; } -impl ProveNoteInclusion for Header { +impl ProveNoteInclusion for BlockHeader { fn prove_note_inclusion(self, note: Note) where Note: NoteInterface + NullifiableNote, diff --git a/noir-projects/aztec-nr/aztec/src/history/note_validity.nr b/noir-projects/aztec-nr/aztec/src/history/note_validity.nr index 3efac1fc1aac..1eb5981024bc 100644 --- a/noir-projects/aztec-nr/aztec/src/history/note_validity.nr +++ b/noir-projects/aztec-nr/aztec/src/history/note_validity.nr @@ -1,10 +1,10 @@ use crate::{context::PrivateContext, note::note_interface::{NoteInterface, NullifiableNote}}; -use dep::protocol_types::header::Header; +use dep::protocol_types::block_header::BlockHeader; trait ProveNoteValidity { fn prove_note_validity( - header: Header, + header: BlockHeader, note: Note, context: &mut PrivateContext, ) @@ -12,7 +12,7 @@ trait ProveNoteValidity { Note: NoteInterface + NullifiableNote; } -impl ProveNoteValidity for Header { +impl ProveNoteValidity for BlockHeader { fn prove_note_validity(self, note: Note, context: &mut PrivateContext) where Note: NoteInterface + NullifiableNote, diff --git a/noir-projects/aztec-nr/aztec/src/history/nullifier_inclusion.nr b/noir-projects/aztec-nr/aztec/src/history/nullifier_inclusion.nr index bd0f508d485a..f1278658b89c 100644 --- a/noir-projects/aztec-nr/aztec/src/history/nullifier_inclusion.nr +++ b/noir-projects/aztec-nr/aztec/src/history/nullifier_inclusion.nr @@ -1,4 +1,4 @@ -use dep::protocol_types::header::Header; +use dep::protocol_types::block_header::BlockHeader; use dep::protocol_types::merkle_tree::root::root_from_sibling_path; use crate::{ @@ -8,10 +8,10 @@ use crate::{ }; trait ProveNullifierInclusion { - fn prove_nullifier_inclusion(header: Header, nullifier: Field); + fn prove_nullifier_inclusion(header: BlockHeader, nullifier: Field); } -impl ProveNullifierInclusion for Header { +impl ProveNullifierInclusion for BlockHeader { fn prove_nullifier_inclusion(self, nullifier: Field) { // 1) Get the membership witness of the nullifier let witness = unsafe { @@ -39,7 +39,7 @@ impl ProveNullifierInclusion for Header { trait ProveNoteIsNullified { fn prove_note_is_nullified( - header: Header, + header: BlockHeader, note: Note, context: &mut PrivateContext, ) @@ -47,7 +47,7 @@ trait ProveNoteIsNullified { Note: NoteInterface + NullifiableNote; } -impl ProveNoteIsNullified for Header { +impl ProveNoteIsNullified for BlockHeader { // docs:start:prove_note_is_nullified fn prove_note_is_nullified(self, note: Note, context: &mut PrivateContext) where diff --git a/noir-projects/aztec-nr/aztec/src/history/nullifier_non_inclusion.nr b/noir-projects/aztec-nr/aztec/src/history/nullifier_non_inclusion.nr index 95a43086b2bb..e40c199ea7dc 100644 --- a/noir-projects/aztec-nr/aztec/src/history/nullifier_non_inclusion.nr +++ b/noir-projects/aztec-nr/aztec/src/history/nullifier_non_inclusion.nr @@ -4,16 +4,16 @@ use crate::{ oracle::get_nullifier_membership_witness::get_low_nullifier_membership_witness, }; use dep::protocol_types::{ - header::Header, + block_header::BlockHeader, utils::field::{full_field_greater_than, full_field_less_than}, }; use dep::protocol_types::merkle_tree::root::root_from_sibling_path; trait ProveNullifierNonInclusion { - fn prove_nullifier_non_inclusion(header: Header, nullifier: Field); + fn prove_nullifier_non_inclusion(header: BlockHeader, nullifier: Field); } -impl ProveNullifierNonInclusion for Header { +impl ProveNullifierNonInclusion for BlockHeader { fn prove_nullifier_non_inclusion(self, nullifier: Field) { // 1) Get the membership witness of a low nullifier of the nullifier let witness = unsafe { @@ -52,7 +52,7 @@ impl ProveNullifierNonInclusion for Header { trait ProveNoteNotNullified { fn prove_note_not_nullified( - header: Header, + header: BlockHeader, note: Note, context: &mut PrivateContext, ) @@ -60,7 +60,7 @@ trait ProveNoteNotNullified { Note: NoteInterface + NullifiableNote; } -impl ProveNoteNotNullified for Header { +impl ProveNoteNotNullified for BlockHeader { // docs:start:prove_note_not_nullified fn prove_note_not_nullified(self, note: Note, context: &mut PrivateContext) where diff --git a/noir-projects/aztec-nr/aztec/src/history/public_storage.nr b/noir-projects/aztec-nr/aztec/src/history/public_storage.nr index d26397d1314f..37cf287cde4b 100644 --- a/noir-projects/aztec-nr/aztec/src/history/public_storage.nr +++ b/noir-projects/aztec-nr/aztec/src/history/public_storage.nr @@ -1,6 +1,6 @@ use dep::protocol_types::{ - address::AztecAddress, constants::GENERATOR_INDEX__PUBLIC_LEAF_INDEX, - hash::poseidon2_hash_with_separator, header::Header, utils::field::full_field_less_than, + address::AztecAddress, block_header::BlockHeader, constants::GENERATOR_INDEX__PUBLIC_LEAF_INDEX, + hash::poseidon2_hash_with_separator, utils::field::full_field_less_than, }; use dep::protocol_types::merkle_tree::root::root_from_sibling_path; @@ -8,13 +8,13 @@ use crate::oracle::get_public_data_witness::get_public_data_witness; trait PublicStorageHistoricalRead { fn public_storage_historical_read( - header: Header, + header: BlockHeader, storage_slot: Field, contract_address: AztecAddress, ) -> Field; } -impl PublicStorageHistoricalRead for Header { +impl PublicStorageHistoricalRead for BlockHeader { fn public_storage_historical_read( self, storage_slot: Field, diff --git a/noir-projects/aztec-nr/aztec/src/oracle/header.nr b/noir-projects/aztec-nr/aztec/src/oracle/block_header.nr similarity index 54% rename from noir-projects/aztec-nr/aztec/src/oracle/header.nr rename to noir-projects/aztec-nr/aztec/src/oracle/block_header.nr index 9ce477aac1a0..3139b6d85a68 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/header.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/block_header.nr @@ -1,32 +1,30 @@ -use dep::protocol_types::{constants::HEADER_LENGTH, header::Header}; +use dep::protocol_types::{block_header::BlockHeader, constants::BLOCK_HEADER_LENGTH}; use dep::protocol_types::merkle_tree::root::root_from_sibling_path; use crate::{ context::PrivateContext, oracle::get_membership_witness::get_archive_membership_witness, }; -use crate::test::helpers::test_environment::TestEnvironment; +#[oracle(getBlockHeader)] +unconstrained fn get_block_header_at_oracle(_block_number: u32) -> [Field; BLOCK_HEADER_LENGTH] {} -#[oracle(getHeader)] -unconstrained fn get_header_at_oracle(_block_number: u32) -> [Field; HEADER_LENGTH] {} - -pub unconstrained fn get_header_at_internal(block_number: u32) -> Header { - let header = get_header_at_oracle(block_number); - Header::deserialize(header) +unconstrained fn get_block_header_at_internal(block_number: u32) -> BlockHeader { + let header = get_block_header_at_oracle(block_number); + BlockHeader::deserialize(header) } -pub fn get_header_at(block_number: u32, context: PrivateContext) -> Header { - let header = context.historical_header; - let current_block_number = header.global_variables.block_number as u32; +pub fn get_block_header_at(block_number: u32, context: PrivateContext) -> BlockHeader { + let historical_header = context.historical_header; + let historical_block_number = historical_header.global_variables.block_number as u32; - if (block_number == current_block_number) { + if (block_number == historical_block_number) { // If the block number we want to prove against is the same as the block number in the historical header we // skip the inclusion proofs and just return the historical header from context. - header + historical_header } else { // 1) Get block number corresponding to the last_archive root in the header // Note: We subtract 1 because the last_archive root is the root of the archive after applying the previous block - let last_archive_block_number = current_block_number - 1; + let last_archive_block_number = historical_block_number - 1; // 2) Check that the last archive block number is more than or equal to the block number we want to prove against // We could not perform the proof otherwise because the last archive root from the header would not "contain" @@ -37,24 +35,24 @@ pub fn get_header_at(block_number: u32, context: PrivateContext) -> Header { ); // 3) Get the header hint of a given block from an oracle - let historical = unsafe { get_header_at_internal(block_number) }; + let header = unsafe { get_block_header_at_internal(block_number) }; // 4) We make sure that the header hint we received from the oracle exists in the state tree and is the actual header // at the desired block number - constrain_get_header_at_internal( - historical, + constrain_get_block_header_at_internal( + header, block_number, last_archive_block_number, - header.last_archive.root, + historical_header.last_archive.root, ); // 5) Return the block header - historical + header } } -fn constrain_get_header_at_internal( - header_hint: Header, +fn constrain_get_block_header_at_internal( + header_hint: BlockHeader, block_number: u32, last_archive_block_number: u32, last_archive_root: Field, @@ -78,23 +76,28 @@ fn constrain_get_header_at_internal( ); } -#[test(should_fail_with = "Block number provided is not the same as the block number from the header hint")] -unconstrained fn fetching_a_valid_but_different_header_should_fail() { - let mut env = TestEnvironment::new(); +mod test { + use crate::test::helpers::test_environment::TestEnvironment; + use super::{constrain_get_block_header_at_internal, get_block_header_at_internal}; - env.advance_block_to(3); + #[test(should_fail_with = "Block number provided is not the same as the block number from the header hint")] + unconstrained fn fetching_a_valid_but_different_header_should_fail() { + let mut env = TestEnvironment::new(); - // We get our current header for the last archive values. - let current_header = env.private().historical_header; + env.advance_block_to(3); - let target_block_number = 2; - let bad_header = get_header_at_internal(target_block_number - 1); + // We get our current header for the last archive values. + let current_header = env.private().historical_header; - // We pass in a different block number than the header received - constrain_get_header_at_internal( - bad_header, - 2, - current_header.global_variables.block_number as u32 - 1, - current_header.last_archive.root, - ); + let target_block_number = 2; + let bad_header = get_block_header_at_internal(target_block_number - 1); + + // We pass in a different block number than the header received + constrain_get_block_header_at_internal( + bad_header, + 2, + current_header.global_variables.block_number as u32 - 1, + current_header.last_archive.root, + ); + } } diff --git a/noir-projects/aztec-nr/aztec/src/oracle/mod.nr b/noir-projects/aztec-nr/aztec/src/oracle/mod.nr index b6d74a57c698..7fbf9f64b8b0 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/mod.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/mod.nr @@ -15,7 +15,7 @@ pub mod key_validation_request; pub mod get_sibling_path; pub mod random; pub mod enqueue_public_function_call; -pub mod header; +pub mod block_header; pub mod notes; pub mod storage; pub mod logs; diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/public_immutable.nr b/noir-projects/aztec-nr/aztec/src/state_vars/public_immutable.nr index a3e5df8b9c87..4ed4f2bbb4d2 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/public_immutable.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/public_immutable.nr @@ -73,7 +73,7 @@ where T: Serialize + Deserialize, { pub fn read(self) -> T { - let header = self.context.get_header(); + let header = self.context.get_block_header(); let mut fields = [0; T_SERIALIZED_LEN]; for i in 0..fields.len() { diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable.nr b/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable.nr index a56f06c7f661..3d940d3d4924 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable.nr @@ -191,7 +191,7 @@ where fn historical_read_from_public_storage( self, ) -> (ScheduledValueChange, ScheduledDelayChange, u32) { - let header = self.context.get_header(); + let header = self.context.get_block_header(); let address = self.context.this_address(); let historical_block_number = header.global_variables.block_number as u32; diff --git a/noir-projects/ivc_integration_circuits.json b/noir-projects/ivc_integration_circuits.json new file mode 100644 index 000000000000..01971872d1ff --- /dev/null +++ b/noir-projects/ivc_integration_circuits.json @@ -0,0 +1,8 @@ +[ + "mock_private_kernel_init", + "mock_private_kernel_inner", + "mock_private_kernel_reset.*", + "mock_private_kernel_tail.*", + "app_creator", + "app_reader" +] \ No newline at end of file diff --git a/noir-projects/mega_honk_circuits.json b/noir-projects/mega_honk_circuits.json index 37b25115596a..1db696b11e6e 100644 --- a/noir-projects/mega_honk_circuits.json +++ b/noir-projects/mega_honk_circuits.json @@ -1,6 +1,6 @@ [ - "private_kernel_init", - "private_kernel_inner", - "private_kernel_reset.*", - "private_kernel_tail.*" + "^private_kernel_init", + "^private_kernel_inner", + "^private_kernel_reset.*", + "^private_kernel_tail.*" ] \ No newline at end of file diff --git a/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-init/src/main.nr b/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-init/src/main.nr index 9a7b6ca0d63b..f3ce82e3a989 100644 --- a/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-init/src/main.nr +++ b/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-init/src/main.nr @@ -1,11 +1,15 @@ use dep::mock_types::{ - AppPublicInputs, PrivateKernelPublicInputs, PrivateKernelPublicInputsBuilder, TxRequest, + AppPublicInputs, CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS, PrivateKernelPublicInputs, + PrivateKernelPublicInputsBuilder, PROOF_TYPE_OINK, TxRequest, }; fn main( tx: TxRequest, app_inputs: call_data(1) AppPublicInputs, + app_vk: [Field; CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS], ) -> return_data PrivateKernelPublicInputs { + std::verify_proof_with_type(app_vk, [], [], 0, PROOF_TYPE_OINK); + let mut private_kernel_inputs = PrivateKernelPublicInputsBuilder::from_tx(tx); private_kernel_inputs.ingest_app_inputs(app_inputs); private_kernel_inputs.finish() diff --git a/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-inner/src/main.nr b/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-inner/src/main.nr index 4dee3d46e750..707dab8d84f4 100644 --- a/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-inner/src/main.nr +++ b/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-inner/src/main.nr @@ -1,9 +1,17 @@ -use dep::mock_types::{AppPublicInputs, PrivateKernelPublicInputs, PrivateKernelPublicInputsBuilder}; +use dep::mock_types::{ + AppPublicInputs, CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS, PrivateKernelPublicInputs, + PrivateKernelPublicInputsBuilder, PROOF_TYPE_PG, +}; fn main( prev_kernel_public_inputs: call_data(0) PrivateKernelPublicInputs, + kernel_vk: [Field; CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS], app_inputs: call_data(1) AppPublicInputs, + app_vk: [Field; CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS], ) -> return_data PrivateKernelPublicInputs { + std::verify_proof_with_type(kernel_vk, [], [], 0, PROOF_TYPE_PG); + std::verify_proof_with_type(app_vk, [], [], 0, PROOF_TYPE_PG); + let mut private_kernel_inputs = PrivateKernelPublicInputsBuilder::from_previous_kernel(prev_kernel_public_inputs); private_kernel_inputs.ingest_app_inputs(app_inputs); diff --git a/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-reset/src/main.nr b/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-reset/src/main.nr index 6c27e0652045..1444732a0e64 100644 --- a/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-reset/src/main.nr +++ b/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-reset/src/main.nr @@ -1,13 +1,17 @@ use dep::mock_types::{ - MAX_COMMITMENT_READ_REQUESTS_PER_TX, MAX_COMMITMENTS_PER_TX, PrivateKernelPublicInputs, + CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS, MAX_COMMITMENT_READ_REQUESTS_PER_TX, + MAX_COMMITMENTS_PER_TX, PrivateKernelPublicInputs, PROOF_TYPE_PG, }; // Mock reset kernel that reset read requests. // It needs hints to locate the commitment that matches the read requests. fn main( mut prev_kernel_public_inputs: call_data(0) PrivateKernelPublicInputs, + kernel_vk: [Field; CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS], commitment_read_hints: [u32; MAX_COMMITMENT_READ_REQUESTS_PER_TX], ) -> return_data PrivateKernelPublicInputs { + std::verify_proof_with_type(kernel_vk, [], [], 0, PROOF_TYPE_PG); + for i in 0..MAX_COMMITMENT_READ_REQUESTS_PER_TX { if commitment_read_hints[i] != MAX_COMMITMENTS_PER_TX { assert_eq( diff --git a/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-tail/src/main.nr b/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-tail/src/main.nr index acac4a955431..082dd428a22f 100644 --- a/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-tail/src/main.nr +++ b/noir-projects/mock-protocol-circuits/crates/mock-private-kernel-tail/src/main.nr @@ -1,11 +1,15 @@ use dep::mock_types::{ - KernelPublicInputs, MAX_COMMITMENT_READ_REQUESTS_PER_TX, PrivateKernelPublicInputs, + CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS, KernelPublicInputs, + MAX_COMMITMENT_READ_REQUESTS_PER_TX, PrivateKernelPublicInputs, PROOF_TYPE_PG, }; // The tail kernel finishes the client IVC chain exposing the final public inputs with no remaining calls or unfulfilled read requests. fn main( prev_kernel_public_inputs: call_data(0) PrivateKernelPublicInputs, + kernel_vk: [Field; CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS], ) -> pub KernelPublicInputs { + std::verify_proof_with_type(kernel_vk, [], [], 0, PROOF_TYPE_PG); + assert_eq(prev_kernel_public_inputs.remaining_calls, 0); for i in 0..MAX_COMMITMENT_READ_REQUESTS_PER_TX { assert_eq(prev_kernel_public_inputs.read_requests[i], 0); diff --git a/noir-projects/mock-protocol-circuits/crates/mock-types/Nargo.toml b/noir-projects/mock-protocol-circuits/crates/mock-types/Nargo.toml index d5f57873b0ca..e9b0542224ab 100644 --- a/noir-projects/mock-protocol-circuits/crates/mock-types/Nargo.toml +++ b/noir-projects/mock-protocol-circuits/crates/mock-types/Nargo.toml @@ -4,4 +4,5 @@ type = "lib" authors = [""] compiler_version = ">=0.32.0" -[dependencies] \ No newline at end of file +[dependencies] +protocol_types = { path = "../../../noir-protocol-circuits/crates/types" } diff --git a/noir-projects/mock-protocol-circuits/crates/mock-types/src/lib.nr b/noir-projects/mock-protocol-circuits/crates/mock-types/src/lib.nr index 933e812174a5..c5f69e887a02 100644 --- a/noir-projects/mock-protocol-circuits/crates/mock-types/src/lib.nr +++ b/noir-projects/mock-protocol-circuits/crates/mock-types/src/lib.nr @@ -3,6 +3,10 @@ global MAX_COMMITMENTS_PER_TX: u32 = 4; global MAX_COMMITMENT_READ_REQUESTS_PER_CALL: u32 = 2; global MAX_COMMITMENT_READ_REQUESTS_PER_TX: u32 = 4; +pub use protocol_types::constants::{ + CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS, PROOF_TYPE_OINK, PROOF_TYPE_PG, +}; + struct TxRequest { number_of_calls: u32, } diff --git a/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr index 93c07de02a27..fb7abdab91a6 100644 --- a/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/avm_test_contract/src/main.nr @@ -37,16 +37,14 @@ contract AvmTest { get_contract_instance_initialization_hash_avm, }; use dep::aztec::prelude::Map; - use dep::aztec::protocol_types::{ - abis::function_selector::FunctionSelector, contract_class_id::ContractClassId, - storage::map::derive_storage_slot_in_map, - }; use dep::aztec::protocol_types::{ address::{AztecAddress, EthAddress}, point::Point, scalar::Scalar, }; - use dep::aztec::protocol_types::constants::CONTRACT_INSTANCE_LENGTH; + use dep::aztec::protocol_types::{ + contract_class_id::ContractClassId, storage::map::derive_storage_slot_in_map, + }; use dep::aztec::state_vars::PublicMutable; use dep::compressed_string::CompressedString; use std::embedded_curve_ops::{EmbeddedCurvePoint, multi_scalar_mul}; @@ -378,11 +376,6 @@ contract AvmTest { context.msg_sender() } - #[public] - fn get_function_selector() -> FunctionSelector { - context.selector() - } - #[public] fn get_transaction_fee() -> Field { context.transaction_fee() @@ -434,14 +427,6 @@ contract AvmTest { assert(timestamp == expected_timestamp, "timestamp does not match"); } - #[public] - fn check_selector() { - assert( - context.selector() == comptime { FunctionSelector::from_signature("check_selector()") }, - "Unexpected selector!", - ); - } - #[public] fn get_args_hash(_a: u8, _fields: [Field; 3]) -> Field { context.get_args_hash() @@ -473,6 +458,41 @@ contract AvmTest { context.push_nullifier(nullifier); } + #[public] + fn n_storage_writes(num: u32) { + for i in 0..num { + storage.map.at(AztecAddress::from_field(i as Field)).write(i); + } + } + + #[public] + fn n_new_note_hashes(num: u32) { + for i in 0..num { + context.push_note_hash(i as Field); + } + } + + #[public] + fn n_new_nullifiers(num: u32) { + for i in 0..num { + context.push_nullifier(i as Field); + } + } + + #[public] + fn n_new_l2_to_l1_msgs(num: u32) { + for i in 0..num { + context.message_portal(EthAddress::from_field(i as Field), i as Field) + } + } + + #[public] + fn n_new_unencrypted_logs(num: u32) { + for i in 0..num { + context.emit_unencrypted_log(/*message=*/ [i as Field]); + } + } + // Use the standard context interface to check for a nullifier #[public] fn nullifier_exists(nullifier: Field) -> bool { @@ -619,13 +639,11 @@ contract AvmTest { dep::aztec::oracle::debug_log::debug_log("pedersen_hash_with_index"); let _ = pedersen_hash_with_index(args_field); dep::aztec::oracle::debug_log::debug_log("test_get_contract_instance"); - test_get_contract_instance(AztecAddress::from_field(args_field[0])); + test_get_contract_instance(AztecAddress::from_field(0x4444)); dep::aztec::oracle::debug_log::debug_log("get_address"); let _ = get_address(); dep::aztec::oracle::debug_log::debug_log("get_sender"); let _ = get_sender(); - dep::aztec::oracle::debug_log::debug_log("get_function_selector"); - let _ = get_function_selector(); dep::aztec::oracle::debug_log::debug_log("get_transaction_fee"); let _ = get_transaction_fee(); dep::aztec::oracle::debug_log::debug_log("get_chain_id"); diff --git a/noir-projects/noir-contracts/contracts/claim_contract/src/main.nr b/noir-projects/noir-contracts/contracts/claim_contract/src/main.nr index 1318e17f1921..e2a9e4877561 100644 --- a/noir-projects/noir-contracts/contracts/claim_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/claim_contract/src/main.nr @@ -36,7 +36,7 @@ contract Claim { ); // 2) Prove that the note hash exists in the note hash tree - let header = context.get_header(); + let header = context.get_block_header(); header.prove_note_inclusion(proof_note); // 3) Compute and emit a nullifier which is unique to the note and this contract to ensure the reward can be diff --git a/noir-projects/noir-contracts/contracts/escrow_contract/src/main.nr b/noir-projects/noir-contracts/contracts/escrow_contract/src/main.nr index 4aa3759433a8..219e8c8dd55f 100644 --- a/noir-projects/noir-contracts/contracts/escrow_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/escrow_contract/src/main.nr @@ -43,7 +43,8 @@ contract Escrow { let note = storage.owner.get_note(); assert(note.address == sender); - + // docs:start:call_function Token::at(token).transfer(recipient, amount).call(&mut context); + // docs:end:call_function } } diff --git a/noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr b/noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr index 8be4ea123432..65dafc9a93f3 100644 --- a/noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr @@ -65,9 +65,9 @@ contract InclusionProofs { // docs:start:prove_note_inclusion // 2) Prove the note inclusion let header = if (use_block_number) { - context.get_header_at(block_number) + context.get_block_header_at(block_number) } else { - context.get_header() + context.get_block_header() }; header.prove_note_inclusion(note); @@ -80,13 +80,13 @@ contract InclusionProofs { use_block_number: bool, block_number: u32, // The block at which we'll prove that the note exists ) { - let header = context.get_header(); + let header = context.get_block_header(); let mut note = ValueNote::new(1, owner); let header = if (use_block_number) { - context.get_header_at(block_number) + context.get_block_header_at(block_number) } else { - context.get_header() + context.get_block_header() }; header.prove_note_inclusion(note); @@ -113,9 +113,9 @@ contract InclusionProofs { let note = private_values.get_notes(options).get(0); let header = if (use_block_number) { - context.get_header_at(block_number) + context.get_block_header_at(block_number) } else { - context.get_header() + context.get_block_header() }; // docs:start:prove_note_not_nullified header.prove_note_not_nullified(note, &mut context); @@ -140,9 +140,9 @@ contract InclusionProofs { // 2) Prove the note validity let header = if (use_block_number) { - context.get_header_at(block_number) + context.get_block_header_at(block_number) } else { - context.get_header() + context.get_block_header() }; // docs:start:prove_note_validity header.prove_note_validity(note, &mut context); @@ -170,9 +170,9 @@ contract InclusionProofs { block_number: u32, // The block at which we'll prove that the nullifier exists in the nullifier tree ) { let header = if (use_block_number) { - context.get_header_at(block_number) + context.get_block_header_at(block_number) } else { - context.get_header() + context.get_block_header() }; // docs:start:prove_nullifier_inclusion header.prove_nullifier_inclusion(nullifier); @@ -194,7 +194,7 @@ contract InclusionProofs { fn test_storage_historical_read_unset_slot( block_number: u32, // The block at which we'll read the public storage value ) { - let header = context.get_header_at(block_number); + let header = context.get_block_header_at(block_number); // docs:start:public_storage_historical_read assert_eq( header.public_storage_historical_read( @@ -213,9 +213,9 @@ contract InclusionProofs { block_number: u32, // The block at which we'll read the public storage value ) { let header = if (use_block_number) { - context.get_header_at(block_number) + context.get_block_header_at(block_number) } else { - context.get_header() + context.get_block_header() }; let actual = header.public_storage_historical_read( @@ -234,7 +234,7 @@ contract InclusionProofs { test_deployment: bool, test_initialization: bool, ) { - let header = context.get_header_at(block_number); + let header = context.get_block_header_at(block_number); if test_deployment { // docs:start:prove_contract_deployment @@ -256,7 +256,7 @@ contract InclusionProofs { test_deployment: bool, test_initialization: bool, ) { - let header = context.get_header_at(block_number); + let header = context.get_block_header_at(block_number); if test_deployment { // docs:start:prove_contract_non_deployment diff --git a/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr b/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr index b5781af3b207..d9d965cea052 100644 --- a/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr @@ -128,9 +128,11 @@ contract Lending { on_behalf_of: Field, collateral_asset: AztecAddress, ) { + // docs:start:public_to_public_call let _ = Token::at(collateral_asset) .transfer_in_public(context.msg_sender(), context.this_address(), amount, nonce) .call(&mut context); + // docs:end:public_to_public_call let _ = Lending::at(context.this_address()) ._deposit(AztecAddress::from_field(on_behalf_of), amount, collateral_asset) .call(&mut context); diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_empty.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_empty.nr index 76ac505ad729..fa8a054d0e62 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_empty.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_empty.nr @@ -1,5 +1,5 @@ use dep::types::{ - header::Header, + block_header::BlockHeader, KernelCircuitPublicInputs, proof::{ recursive_proof::RecursiveProof, @@ -28,7 +28,7 @@ impl Empty for EmptyNestedCircuitPublicInputs { pub struct PrivateKernelEmptyPrivateInputs { empty_nested: EmptyNestedCircuitPublicInputs, - historical_header: Header, + historical_header: BlockHeader, chain_id: Field, version: Field, vk_tree_root: Field, @@ -54,7 +54,7 @@ impl Empty for PrivateKernelEmptyPrivateInputs { fn empty() -> Self { PrivateKernelEmptyPrivateInputs { empty_nested: EmptyNestedCircuitPublicInputs::empty(), - historical_header: Header::empty(), + historical_header: BlockHeader::empty(), chain_id: 0, version: 0, vk_tree_root: 0, @@ -67,13 +67,13 @@ mod tests { use crate::private_kernel_empty::{ EmptyNestedCircuitPublicInputs, PrivateKernelEmptyPrivateInputs, }; - use dep::types::header::Header; + use dep::types::block_header::BlockHeader; #[test] unconstrained fn works() { let private_inputs = PrivateKernelEmptyPrivateInputs { empty_nested: EmptyNestedCircuitPublicInputs::empty(), - historical_header: Header::empty(), + historical_header: BlockHeader::empty(), chain_id: 1, version: 2, vk_tree_root: 3, diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/components/archive.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/components/archive.nr index e97541be1587..075676c48462 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/components/archive.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/components/archive.nr @@ -1,6 +1,6 @@ use types::{ + block_header::BlockHeader, constants::ARCHIVE_HEIGHT, - header::Header, merkle_tree::membership::{assert_check_membership, MembershipWitness}, }; @@ -9,7 +9,7 @@ use types::{ pub(crate) fn perform_archive_membership_check( archive_root: Field, previous_block_hash_witness: MembershipWitness, - header: Header, + header: BlockHeader, ) { // Rebuild the block hash let previous_block_hash = header.hash(); diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/block_root/block_root_rollup_inputs.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/block_root/block_root_rollup_inputs.nr index 751dc299204d..ea137243a543 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/block_root/block_root_rollup_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/block_root/block_root_rollup_inputs.nr @@ -8,6 +8,7 @@ use crate::{ use parity_lib::root::root_rollup_parity_input::RootRollupParityInput; use types::{ abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot, + block_header::BlockHeader, constants::{ ARCHIVE_HEIGHT, AZTEC_MAX_EPOCH_DURATION, L1_TO_L2_MSG_SUBTREE_HEIGHT, L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, MERGE_ROLLUP_INDEX, @@ -15,7 +16,6 @@ use types::{ PUBLIC_BASE_ROLLUP_VK_INDEX, }, content_commitment::ContentCommitment, - header::Header, merkle_tree::{append_only_tree, calculate_empty_tree_root}, state_reference::StateReference, traits::Empty, @@ -116,7 +116,7 @@ impl BlockRootRollupInputs { // debug_log_format("header.total_fees={0}", [total_fees]); // debug_log_format("header.total_mana_used={0}", [total_mana_used]); // } - let header = Header { + let header = BlockHeader { last_archive: left.constants.last_archive, content_commitment, state, diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/combined_constant_data.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/combined_constant_data.nr index 03e9863016b6..48d0f7be170b 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/combined_constant_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/combined_constant_data.nr @@ -1,12 +1,12 @@ use crate::abis::{global_variables::GlobalVariables, tx_constant_data::TxConstantData}; +use crate::block_header::BlockHeader; use crate::constants::COMBINED_CONSTANT_DATA_LENGTH; -use crate::header::Header; use crate::traits::{Deserialize, Empty, Serialize}; use crate::transaction::tx_context::TxContext; use crate::utils::reader::Reader; pub struct CombinedConstantData { - pub historical_header: Header, + pub historical_header: BlockHeader, // Note: `chain_id` and `version` in tx_context are not redundant to the values in // self.historical_header.global_variables because they can be different in case of a protocol upgrade. In such // a situation we could be using header from a block before the upgrade took place but be using the updated @@ -37,7 +37,7 @@ impl CombinedConstantData { impl Empty for CombinedConstantData { fn empty() -> Self { CombinedConstantData { - historical_header: Header::empty(), + historical_header: BlockHeader::empty(), tx_context: TxContext::empty(), vk_tree_root: 0, protocol_contract_tree_root: 0, @@ -67,7 +67,7 @@ impl Deserialize for CombinedConstantData { let mut reader = Reader::new(fields); let item = CombinedConstantData { - historical_header: reader.read_struct(Header::deserialize), + historical_header: reader.read_struct(BlockHeader::deserialize), tx_context: reader.read_struct(TxContext::deserialize), vk_tree_root: reader.read(), protocol_contract_tree_root: reader.read(), diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_circuit_public_inputs.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_circuit_public_inputs.nr index c4e9a8502573..217d97ddffce 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_circuit_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/private_circuit_public_inputs.nr @@ -6,6 +6,7 @@ use crate::{ read_request::ReadRequest, side_effect::Counted, validation_requests::KeyValidationRequestAndGenerator, }, + block_header::BlockHeader, constants::{ MAX_CONTRACT_CLASS_LOGS_PER_CALL, MAX_ENQUEUED_CALLS_PER_CALL, MAX_KEY_VALIDATION_REQUESTS_PER_CALL, MAX_L2_TO_L1_MSGS_PER_CALL, @@ -14,7 +15,6 @@ use crate::{ MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PRIVATE_LOGS_PER_CALL, PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH, }, - header::Header, messaging::l2_to_l1_message::L2ToL1Message, traits::{Deserialize, Empty, Serialize}, transaction::tx_context::TxContext, @@ -82,7 +82,7 @@ pub struct PrivateCircuitPublicInputs { pub end_side_effect_counter: u32, // Header of a block whose state is used during private execution (not the block the transaction is included in). - pub historical_header: Header, + pub historical_header: BlockHeader, // Note: The chain_id and version here are not redundant to the values in self.historical_header.global_variables because // they can be different in case of a protocol upgrade. In such a situation we could be using header from a block @@ -226,7 +226,7 @@ impl Deserialize for PrivateCircuitPublicI ), start_side_effect_counter: reader.read() as u32, end_side_effect_counter: reader.read() as u32, - historical_header: reader.read_struct(Header::deserialize), + historical_header: reader.read_struct(BlockHeader::deserialize), tx_context: reader.read_struct(TxContext::deserialize), }; @@ -261,7 +261,7 @@ impl Empty for PrivateCircuitPublicInputs { contract_class_logs_hashes: [LogHash::empty(); MAX_CONTRACT_CLASS_LOGS_PER_CALL], start_side_effect_counter: 0 as u32, end_side_effect_counter: 0 as u32, - historical_header: Header::empty(), + historical_header: BlockHeader::empty(), tx_context: TxContext::empty(), } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/tx_constant_data.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/tx_constant_data.nr index be7c8151d704..e23d05cca9dc 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/tx_constant_data.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/tx_constant_data.nr @@ -1,6 +1,6 @@ use crate::{ + block_header::BlockHeader, constants::TX_CONSTANT_DATA_LENGTH, - header::Header, traits::{Deserialize, Empty, Serialize}, transaction::tx_context::TxContext, utils::reader::Reader, @@ -8,7 +8,7 @@ use crate::{ // Constants used throughout the executions of both private and public. pub struct TxConstantData { - pub historical_header: Header, + pub historical_header: BlockHeader, // Note: `chain_id` and `version` in tx_context are not redundant to the values in // self.historical_header.global_variables because they can be different in case of a protocol upgrade. In such // a situation we could be using header from a block before the upgrade took place but be using the updated @@ -21,7 +21,7 @@ pub struct TxConstantData { impl Empty for TxConstantData { fn empty() -> Self { TxConstantData { - historical_header: Header::empty(), + historical_header: BlockHeader::empty(), tx_context: TxContext::empty(), vk_tree_root: 0, protocol_contract_tree_root: 0, @@ -58,7 +58,7 @@ impl Deserialize for TxConstantData { let mut reader = Reader::new(fields); let item = TxConstantData { - historical_header: reader.read_struct(Header::deserialize), + historical_header: reader.read_struct(BlockHeader::deserialize), tx_context: reader.read_struct(TxContext::deserialize), vk_tree_root: reader.read(), protocol_contract_tree_root: reader.read(), diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/header.nr b/noir-projects/noir-protocol-circuits/crates/types/src/block_header.nr similarity index 81% rename from noir-projects/noir-protocol-circuits/crates/types/src/header.nr rename to noir-projects/noir-protocol-circuits/crates/types/src/block_header.nr index 5817cdb7e188..842a7eaa3082 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/header.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/block_header.nr @@ -4,8 +4,8 @@ use crate::{ global_variables::GlobalVariables, }, constants::{ - CONTENT_COMMITMENT_LENGTH, GENERATOR_INDEX__BLOCK_HASH, GLOBAL_VARIABLES_LENGTH, - HEADER_LENGTH, STATE_REFERENCE_LENGTH, + BLOCK_HEADER_LENGTH, CONTENT_COMMITMENT_LENGTH, GENERATOR_INDEX__BLOCK_HASH, + GLOBAL_VARIABLES_LENGTH, STATE_REFERENCE_LENGTH, }, content_commitment::ContentCommitment, hash::poseidon2_hash_with_separator, @@ -14,8 +14,8 @@ use crate::{ utils::arr_copy_slice, }; -// docs:start:header -pub struct Header { +// docs:start:block-header +pub struct BlockHeader { pub last_archive: AppendOnlyTreeSnapshot, pub content_commitment: ContentCommitment, pub state: StateReference, @@ -23,9 +23,9 @@ pub struct Header { pub total_fees: Field, pub total_mana_used: Field, } -// docs:end:header +// docs:end:block-header -impl Eq for Header { +impl Eq for BlockHeader { fn eq(self, other: Self) -> bool { self.last_archive.eq(other.last_archive) & self.content_commitment.eq(other.content_commitment) @@ -36,9 +36,9 @@ impl Eq for Header { } } -impl Serialize for Header { - fn serialize(self) -> [Field; HEADER_LENGTH] { - let mut fields: BoundedVec = BoundedVec::new(); +impl Serialize for BlockHeader { + fn serialize(self) -> [Field; BLOCK_HEADER_LENGTH] { + let mut fields: BoundedVec = BoundedVec::new(); fields.extend_from_array(self.last_archive.serialize()); fields.extend_from_array(self.content_commitment.serialize()); @@ -50,8 +50,8 @@ impl Serialize for Header { } } -impl Deserialize for Header { - fn deserialize(serialized: [Field; HEADER_LENGTH]) -> Self { +impl Deserialize for BlockHeader { + fn deserialize(serialized: [Field; BLOCK_HEADER_LENGTH]) -> Self { let mut offset = 0; let last_archive_fields = @@ -74,7 +74,7 @@ impl Deserialize for Header { let total_mana_used = serialized[offset]; - Header { + BlockHeader { last_archive: AppendOnlyTreeSnapshot::deserialize(last_archive_fields), content_commitment: ContentCommitment::deserialize(content_commitment_fields), state: StateReference::deserialize(state_fields), @@ -85,7 +85,7 @@ impl Deserialize for Header { } } -impl Empty for Header { +impl Empty for BlockHeader { fn empty() -> Self { Self { last_archive: AppendOnlyTreeSnapshot::zero(), @@ -98,7 +98,7 @@ impl Empty for Header { } } -impl Hash for Header { +impl Hash for BlockHeader { fn hash(self) -> Field { poseidon2_hash_with_separator(self.serialize(), GENERATOR_INDEX__BLOCK_HASH) } @@ -106,21 +106,21 @@ impl Hash for Header { #[test] fn serialization_of_empty() { - let header = Header::empty(); + let header = BlockHeader::empty(); let serialized = header.serialize(); - let deserialized = Header::deserialize(serialized); + let deserialized = BlockHeader::deserialize(serialized); assert(header.eq(deserialized)); } #[test] fn hash_smoke() { - let header = Header::empty(); + let header = BlockHeader::empty(); let _hashed = header.hash(); } #[test] fn empty_hash_is_zero() { - let header = Header::empty(); + let header = BlockHeader::empty(); let hash = header.hash(); // Value from new_contract_data.test.ts "computes empty hash" test diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr index c49164566fcb..9311559e90b7 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr @@ -307,7 +307,7 @@ pub global TX_CONTEXT_LENGTH: u32 = 2 + GAS_SETTINGS_LENGTH; pub global TX_REQUEST_LENGTH: u32 = 2 + TX_CONTEXT_LENGTH + FUNCTION_DATA_LENGTH; pub global TOTAL_FEES_LENGTH: u32 = 1; pub global TOTAL_MANA_USED_LENGTH: u32 = 1; -pub global HEADER_LENGTH: u32 = APPEND_ONLY_TREE_SNAPSHOT_LENGTH +pub global BLOCK_HEADER_LENGTH: u32 = APPEND_ONLY_TREE_SNAPSHOT_LENGTH + CONTENT_COMMITMENT_LENGTH + STATE_REFERENCE_LENGTH + GLOBAL_VARIABLES_LENGTH @@ -328,7 +328,7 @@ pub global PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH: u32 = CALL_CONTEXT_LENGTH + 2 + (PRIVATE_LOG_DATA_LENGTH * MAX_PRIVATE_LOGS_PER_CALL) + (LOG_HASH_LENGTH * MAX_CONTRACT_CLASS_LOGS_PER_CALL) - + HEADER_LENGTH + + BLOCK_HEADER_LENGTH + TX_CONTEXT_LENGTH; pub global PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH: u32 = CALL_CONTEXT_LENGTH + /*argsHash + returnsHash*/ 2 @@ -344,14 +344,14 @@ pub global PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH: u32 = CALL_CONTEXT_LENGTH + (L2_TO_L1_MESSAGE_LENGTH * MAX_L2_TO_L1_MSGS_PER_CALL) + 2 + (LOG_HASH_LENGTH * MAX_UNENCRYPTED_LOGS_PER_CALL) - + HEADER_LENGTH + + BLOCK_HEADER_LENGTH + GLOBAL_VARIABLES_LENGTH + AZTEC_ADDRESS_LENGTH + /* revert_code */ 1 + 2 * GAS_LENGTH + /* transaction_fee */ 1; pub global PRIVATE_CONTEXT_INPUTS_LENGTH: u32 = - CALL_CONTEXT_LENGTH + HEADER_LENGTH + TX_CONTEXT_LENGTH + 1; + CALL_CONTEXT_LENGTH + BLOCK_HEADER_LENGTH + TX_CONTEXT_LENGTH + 1; pub global FEE_RECIPIENT_LENGTH: u32 = 2; pub global AGGREGATION_OBJECT_LENGTH: u32 = 16; @@ -373,7 +373,7 @@ pub global COMBINED_ACCUMULATED_DATA_LENGTH: u32 = MAX_NOTE_HASHES_PER_TX + (SCOPED_LOG_HASH_LENGTH * MAX_CONTRACT_CLASS_LOGS_PER_TX) + 1 /* contract_class_log_preimages_length */ + (MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * PUBLIC_DATA_WRITE_LENGTH); -pub global TX_CONSTANT_DATA_LENGTH: u32 = HEADER_LENGTH +pub global TX_CONSTANT_DATA_LENGTH: u32 = BLOCK_HEADER_LENGTH + TX_CONTEXT_LENGTH + 1 /* vk_tree_root */ + 1 /* protocol_contract_tree_root */; @@ -502,7 +502,7 @@ pub global AVM_VERIFICATION_KEY_LENGTH_IN_FIELDS: u32 = 2 + 21 * 4; // `AVM_PROOF_LENGTH_IN_FIELDS` must be updated when AVM circuit changes. // To determine latest value, hover `COMPUTED_AVM_PROOF_LENGTH_IN_FIELDS` // in barretenberg/cpp/src/barretenberg/vm/avm/generated/flavor.hpp -pub global AVM_PROOF_LENGTH_IN_FIELDS: u32 = 4166; +pub global AVM_PROOF_LENGTH_IN_FIELDS: u32 = 4161; pub global AVM_PUBLIC_COLUMN_MAX_SIZE: u32 = 1024; pub global AVM_PUBLIC_INPUTS_FLATTENED_SIZE: u32 = 2 * AVM_PUBLIC_COLUMN_MAX_SIZE + PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH; @@ -587,7 +587,6 @@ pub global MEM_TAG_U128: Field = 6; // Keep the number of offsets aligned with KERNEL_INPUTS_LENGTH defined in constants.hpp pub global SENDER_KERNEL_INPUTS_COL_OFFSET: u32 = 0; pub global ADDRESS_KERNEL_INPUTS_COL_OFFSET: u32 = 1; -pub global FUNCTION_SELECTOR_KERNEL_INPUTS_COL_OFFSET: u32 = 2; pub global IS_STATIC_CALL_KERNEL_INPUTS_COL_OFFSET: u32 = 3; // pub global Variables pub global CHAIN_ID_KERNEL_INPUTS_COL_OFFSET: u32 = 4; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/lib.nr b/noir-projects/noir-protocol-circuits/crates/types/src/lib.nr index d5ead85a57f2..e4e638b113dd 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/lib.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/lib.nr @@ -17,7 +17,7 @@ pub mod traits; pub mod type_serialization; pub mod content_commitment; -pub mod header; +pub mod block_header; mod tests; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr index 57330b0004cd..604dbc050d05 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr @@ -37,6 +37,7 @@ use crate::{ }, }, address::{AztecAddress, EthAddress, SaltedInitializationHash}, + block_header::BlockHeader, constants::{ CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS, FUNCTION_TREE_HEIGHT, MAX_CONTRACT_CLASS_LOGS_PER_TX, MAX_ENQUEUED_CALLS_PER_TX, MAX_FIELD_VALUE, @@ -52,7 +53,6 @@ use crate::{ compute_l2_to_l1_hash, compute_siloed_nullifier, compute_siloed_private_log_field, silo_note_hash, }, - header::Header, merkle_tree::{membership::MembershipWitness, MerkleTree}, messaging::l2_to_l1_message::{L2ToL1Message, ScopedL2ToL1Message}, partial_state_reference::PartialStateReference, @@ -101,7 +101,7 @@ pub struct FixtureBuilder { pub end_gas_left: Gas, // Constant data. - pub historical_header: Header, + pub historical_header: BlockHeader, pub tx_context: TxContext, pub global_variables: GlobalVariables, @@ -1056,7 +1056,7 @@ impl Empty for FixtureBuilder { is_fee_payer: false, fee_payer: AztecAddress::zero(), public_teardown_call_request: PublicCallRequest::empty(), - historical_header: Header::empty(), + historical_header: BlockHeader::empty(), tx_context: TxContext::empty(), global_variables: GlobalVariables::empty(), note_hashes: BoundedVec::new(), diff --git a/noir-projects/scripts/generate_vk_json.js b/noir-projects/scripts/generate_vk_json.js index c891d1f7ca48..a2942e69cded 100644 --- a/noir-projects/scripts/generate_vk_json.js +++ b/noir-projects/scripts/generate_vk_json.js @@ -4,6 +4,7 @@ const child_process = require("child_process"); const crypto = require("crypto"); const megaHonkPatterns = require("../mega_honk_circuits.json"); +const ivcIntegrationPatterns = require("../ivc_integration_circuits.json"); const { readVKFromS3, writeVKToS3, @@ -32,13 +33,19 @@ async function getBytecodeHash(artifactPath) { return crypto.createHash("md5").update(bytecode).digest("hex"); } -async function getArtifactHash(artifactPath, isMegaHonk, isRecursive) { +async function getArtifactHash( + artifactPath, + isMegaHonk, + isIvcIntegration, + isRecursive +) { const bytecodeHash = await getBytecodeHash(artifactPath); const barretenbergHash = await getBarretenbergHash(); return generateArtifactHash( barretenbergHash, bytecodeHash, isMegaHonk, + isIvcIntegration, isRecursive ); } @@ -66,14 +73,21 @@ function isMegaHonkCircuit(artifactName) { artifactName.match(new RegExp(pattern)) ); } +function isIvcIntegrationCircuit(artifactName) { + return ivcIntegrationPatterns.some((pattern) => + artifactName.match(new RegExp(pattern)) + ); +} async function processArtifact(artifactPath, artifactName, outputFolder) { const isMegaHonk = isMegaHonkCircuit(artifactName); + const isIvcIntegration = isIvcIntegrationCircuit(artifactName); const isRecursive = true; const artifactHash = await getArtifactHash( artifactPath, isMegaHonk, + isIvcIntegration, isRecursive ); @@ -93,6 +107,7 @@ async function processArtifact(artifactPath, artifactName, outputFolder) { artifactPath, artifactHash, isMegaHonk, + isIvcIntegration, isRecursive ); await writeVKToS3(artifactName, artifactHash, JSON.stringify(vkData)); @@ -109,10 +124,13 @@ async function generateVKData( artifactPath, artifactHash, isMegaHonk, + isIvcIntegration, isRecursive ) { if (isMegaHonk) { console.log("Generating new mega honk vk for", artifactName); + } else if (isIvcIntegration) { + console.log("Generating new IVC vk for", artifactName); } else { console.log("Generating new vk for", artifactName); } @@ -123,16 +141,22 @@ async function generateVKData( ); const jsonVkPath = vkJsonFileNameForArtifactName(outputFolder, artifactName); - const writeVkCommand = `${BB_BIN_PATH} ${ - isMegaHonk ? "write_vk_mega_honk" : "write_vk_ultra_honk" - } -h -b "${artifactPath}" -o "${binaryVkPath}" ${ + function getVkCommand() { + if (isMegaHonk) return "write_vk_mega_honk"; + if (isIvcIntegration) return "write_vk_for_ivc"; + return "write_vk_ultra_honk"; + } + + const writeVkCommand = `${BB_BIN_PATH} ${getVkCommand()} -h -b "${artifactPath}" -o "${binaryVkPath}" ${ isRecursive ? "--recursive" : "" }`; console.log("WRITE VK CMD: ", writeVkCommand); const vkAsFieldsCommand = `${BB_BIN_PATH} ${ - isMegaHonk ? "vk_as_fields_mega_honk" : "vk_as_fields_ultra_honk" + isMegaHonk || isIvcIntegration + ? "vk_as_fields_mega_honk" + : "vk_as_fields_ultra_honk" } -k "${binaryVkPath}" -o "${jsonVkPath}"`; await new Promise((resolve, reject) => { diff --git a/noir/noir-repo/.github/workflows/formatting.yml b/noir/noir-repo/.github/workflows/formatting.yml index 08c02af519f6..ab92d452c79c 100644 --- a/noir/noir-repo/.github/workflows/formatting.yml +++ b/noir/noir-repo/.github/workflows/formatting.yml @@ -15,18 +15,11 @@ concurrency: jobs: clippy: name: cargo clippy - runs-on: ${{ matrix.runner }} + runs-on: ubuntu-latest timeout-minutes: 30 env: RUSTFLAGS: -Dwarnings - strategy: - fail-fast: false - matrix: - include: - - runner: ubuntu-latest - target: x86_64-unknown-linux-gnu - steps: - name: Checkout uses: actions/checkout@v4 @@ -34,18 +27,41 @@ jobs: - name: Setup toolchain uses: dtolnay/rust-toolchain@1.74.1 with: - targets: ${{ matrix.target }} + targets: x86_64-unknown-linux-gnu components: clippy, rustfmt - uses: Swatinem/rust-cache@v2 with: - key: ${{ matrix.target }} + key: x86_64-unknown-linux-gnu cache-on-failure: true save-if: ${{ github.event_name != 'merge_group' }} - name: Run `cargo clippy` run: cargo clippy --all-targets --workspace --locked --release + rustfmt: + name: cargo fmt + runs-on: ubuntu-latest + timeout-minutes: 30 + env: + RUSTFLAGS: -Dwarnings + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup toolchain + uses: dtolnay/rust-toolchain@1.74.1 + with: + targets: x86_64-unknown-linux-gnu + components: clippy, rustfmt + + - uses: Swatinem/rust-cache@v2 + with: + key: x86_64-unknown-linux-gnu + cache-on-failure: true + save-if: ${{ github.event_name != 'merge_group' }} + - name: Run `cargo fmt` run: cargo fmt --all --check @@ -88,7 +104,6 @@ jobs: run: | mkdir dist cp ./target/release/nargo ./dist/nargo - 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz - name: Upload artifact uses: actions/upload-artifact@v4 diff --git a/noir/noir-repo/.github/workflows/gates_report.yml b/noir/noir-repo/.github/workflows/gates_report.yml deleted file mode 100644 index 0b0a527b69eb..000000000000 --- a/noir/noir-repo/.github/workflows/gates_report.yml +++ /dev/null @@ -1,94 +0,0 @@ -name: Report gates diff - -on: - push: - branches: - - master - pull_request: - -jobs: - build-nargo: - runs-on: ubuntu-latest - strategy: - matrix: - target: [x86_64-unknown-linux-gnu] - - steps: - - name: Checkout Noir repo - uses: actions/checkout@v4 - - - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.74.1 - - - uses: Swatinem/rust-cache@v2 - with: - key: ${{ matrix.target }} - cache-on-failure: true - save-if: ${{ github.event_name != 'merge_group' }} - - - name: Build Nargo - run: cargo build --package nargo_cli --release - - - name: Package artifacts - run: | - mkdir dist - cp ./target/release/nargo ./dist/nargo - 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz - - - name: Upload artifact - uses: actions/upload-artifact@v4 - with: - name: nargo - path: ./dist/* - retention-days: 3 - - - compare_gates_reports: - needs: [build-nargo] - runs-on: ubuntu-latest - permissions: - pull-requests: write - - steps: - - uses: actions/checkout@v4 - - - name: Install `bb` - run: | - ./scripts/install_bb.sh - echo "$HOME/.bb/" >> $GITHUB_PATH - - - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo - - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V - - - name: Generate gates report - working-directory: ./test_programs - run: | - ./rebuild.sh - ./gates_report.sh - mv gates_report.json ../gates_report.json - - - name: Compare gates reports - id: gates_diff - uses: noir-lang/noir-gates-diff@1931aaaa848a1a009363d6115293f7b7fc72bb87 - with: - report: gates_report.json - summaryQuantile: 0.9 # only display the 10% most significant circuit size diffs in the summary (defaults to 20%) - - - name: Add gates diff to sticky comment - if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' - uses: marocchino/sticky-pull-request-comment@v2 - with: - # delete the comment in case changes no longer impact circuit sizes - delete: ${{ !steps.gates_diff.outputs.markdown }} - message: ${{ steps.gates_diff.outputs.markdown }} diff --git a/noir/noir-repo/.github/workflows/gates_report_brillig.yml b/noir/noir-repo/.github/workflows/gates_report_brillig.yml deleted file mode 100644 index e7ec30923f02..000000000000 --- a/noir/noir-repo/.github/workflows/gates_report_brillig.yml +++ /dev/null @@ -1,92 +0,0 @@ -name: Report Brillig bytecode size diff - -on: - push: - branches: - - master - pull_request: - -jobs: - build-nargo: - runs-on: ubuntu-latest - strategy: - matrix: - target: [x86_64-unknown-linux-gnu] - - steps: - - name: Checkout Noir repo - uses: actions/checkout@v4 - - - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.74.1 - - - uses: Swatinem/rust-cache@v2 - with: - key: ${{ matrix.target }} - cache-on-failure: true - save-if: ${{ github.event_name != 'merge_group' }} - - - name: Build Nargo - run: cargo build --package nargo_cli --release - - - name: Package artifacts - run: | - mkdir dist - cp ./target/release/nargo ./dist/nargo - 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz - - - name: Upload artifact - uses: actions/upload-artifact@v4 - with: - name: nargo - path: ./dist/* - retention-days: 3 - - compare_brillig_bytecode_size_reports: - needs: [build-nargo] - runs-on: ubuntu-latest - permissions: - pull-requests: write - - steps: - - uses: actions/checkout@v4 - - - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo - - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V - - - name: Generate Brillig bytecode size report - working-directory: ./test_programs - run: | - chmod +x gates_report_brillig.sh - ./gates_report_brillig.sh - mv gates_report_brillig.json ../gates_report_brillig.json - - - name: Compare Brillig bytecode size reports - id: brillig_bytecode_diff - uses: noir-lang/noir-gates-diff@d88f7523b013b9edd3f31c5cfddaef87a3fe1b48 - with: - report: gates_report_brillig.json - header: | - # Changes to Brillig bytecode sizes - brillig_report: true - summaryQuantile: 0.9 # only display the 10% most significant bytecode size diffs in the summary (defaults to 20%) - - - name: Add bytecode size diff to sticky comment - if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' - uses: marocchino/sticky-pull-request-comment@v2 - with: - header: brillig - # delete the comment in case changes no longer impact brillig bytecode sizes - delete: ${{ !steps.brillig_bytecode_diff.outputs.markdown }} - message: ${{ steps.brillig_bytecode_diff.outputs.markdown }} \ No newline at end of file diff --git a/noir/noir-repo/.github/workflows/gates_report_brillig_execution.yml b/noir/noir-repo/.github/workflows/gates_report_brillig_execution.yml deleted file mode 100644 index 0ef98f5045b4..000000000000 --- a/noir/noir-repo/.github/workflows/gates_report_brillig_execution.yml +++ /dev/null @@ -1,92 +0,0 @@ -name: Report Brillig opcodes executed diff - -on: - push: - branches: - - master - pull_request: - -jobs: - build-nargo: - runs-on: ubuntu-latest - strategy: - matrix: - target: [x86_64-unknown-linux-gnu] - - steps: - - name: Checkout Noir repo - uses: actions/checkout@v4 - - - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.74.1 - - - uses: Swatinem/rust-cache@v2 - with: - key: ${{ matrix.target }} - cache-on-failure: true - save-if: ${{ github.event_name != 'merge_group' }} - - - name: Build Nargo - run: cargo build --package nargo_cli --release - - - name: Package artifacts - run: | - mkdir dist - cp ./target/release/nargo ./dist/nargo - 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz - - - name: Upload artifact - uses: actions/upload-artifact@v4 - with: - name: nargo - path: ./dist/* - retention-days: 3 - - compare_brillig_execution_reports: - needs: [build-nargo] - runs-on: ubuntu-latest - permissions: - pull-requests: write - - steps: - - uses: actions/checkout@v4 - - - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo - - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V - - - name: Generate Brillig execution report - working-directory: ./test_programs - run: | - chmod +x gates_report_brillig_execution.sh - ./gates_report_brillig_execution.sh - mv gates_report_brillig_execution.json ../gates_report_brillig_execution.json - - - name: Compare Brillig execution reports - id: brillig_execution_diff - uses: noir-lang/noir-gates-diff@d88f7523b013b9edd3f31c5cfddaef87a3fe1b48 - with: - report: gates_report_brillig_execution.json - header: | - # Changes to number of Brillig opcodes executed - brillig_report: true - summaryQuantile: 0.9 # only display the 10% most significant bytecode size diffs in the summary (defaults to 20%) - - - name: Add bytecode size diff to sticky comment - if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' - uses: marocchino/sticky-pull-request-comment@v2 - with: - header: brillig_execution - # delete the comment in case changes no longer impact brillig bytecode sizes - delete: ${{ !steps.brillig_execution_diff.outputs.markdown }} - message: ${{ steps.brillig_execution_diff.outputs.markdown }} \ No newline at end of file diff --git a/noir/noir-repo/.github/workflows/lockfile.yml b/noir/noir-repo/.github/workflows/lockfile.yml deleted file mode 100644 index 190e01745af3..000000000000 --- a/noir/noir-repo/.github/workflows/lockfile.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Lockfile check - -on: - pull_request: - -# This will cancel previous runs when a branch or PR is updated -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }} - cancel-in-progress: true - -jobs: - yarn-lock: - runs-on: ubuntu-latest - timeout-minutes: 30 - - steps: - - name: Checkout - uses: actions/checkout@v4 - - # Errors if installation would result in modifications to yarn.lock - - name: Install - run: yarn --immutable - shell: bash diff --git a/noir/noir-repo/.github/workflows/release.yml b/noir/noir-repo/.github/workflows/release.yml index 7e0909224e54..59c3d9a14152 100644 --- a/noir/noir-repo/.github/workflows/release.yml +++ b/noir/noir-repo/.github/workflows/release.yml @@ -15,7 +15,7 @@ jobs: steps: - name: Run release-please id: release - uses: google-github-actions/release-please-action@v4 + uses: googleapis/release-please-action@v4 with: token: ${{ secrets.NOIR_RELEASES_TOKEN }} diff --git a/noir/noir-repo/.github/workflows/reports.yml b/noir/noir-repo/.github/workflows/reports.yml new file mode 100644 index 000000000000..8f8aeabb65ed --- /dev/null +++ b/noir/noir-repo/.github/workflows/reports.yml @@ -0,0 +1,235 @@ +name: Reports + +on: + push: + branches: + - master + pull_request: + +jobs: + build-nargo: + runs-on: ubuntu-latest + + steps: + - name: Checkout Noir repo + uses: actions/checkout@v4 + + - name: Setup toolchain + uses: dtolnay/rust-toolchain@1.74.1 + + - uses: Swatinem/rust-cache@v2 + with: + key: x86_64-unknown-linux-gnu + cache-on-failure: true + save-if: ${{ github.event_name != 'merge_group' }} + + - name: Build Nargo + run: cargo build --package nargo_cli --release + + - name: Package artifacts + run: | + mkdir dist + cp ./target/release/nargo ./dist/nargo + + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + name: nargo + path: ./dist/* + retention-days: 3 + + + compare_gates_reports: + name: Circuit sizes + needs: [build-nargo] + runs-on: ubuntu-latest + permissions: + pull-requests: write + + steps: + - uses: actions/checkout@v4 + + - name: Install `bb` + run: | + ./scripts/install_bb.sh + echo "$HOME/.bb/" >> $GITHUB_PATH + + - name: Download nargo binary + uses: actions/download-artifact@v4 + with: + name: nargo + path: ./nargo + + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + nargo -V + + - name: Generate gates report + working-directory: ./test_programs + run: | + ./rebuild.sh + ./gates_report.sh + mv gates_report.json ../gates_report.json + + - name: Compare gates reports + id: gates_diff + uses: noir-lang/noir-gates-diff@1931aaaa848a1a009363d6115293f7b7fc72bb87 + with: + report: gates_report.json + summaryQuantile: 0.9 # only display the 10% most significant circuit size diffs in the summary (defaults to 20%) + + - name: Add gates diff to sticky comment + if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' + uses: marocchino/sticky-pull-request-comment@v2 + with: + # delete the comment in case changes no longer impact circuit sizes + delete: ${{ !steps.gates_diff.outputs.markdown }} + message: ${{ steps.gates_diff.outputs.markdown }} + + compare_brillig_bytecode_size_reports: + name: Brillig bytecode sizes + needs: [build-nargo] + runs-on: ubuntu-latest + permissions: + pull-requests: write + + steps: + - uses: actions/checkout@v4 + + - name: Download nargo binary + uses: actions/download-artifact@v4 + with: + name: nargo + path: ./nargo + + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + nargo -V + + - name: Generate Brillig bytecode size report + working-directory: ./test_programs + run: | + ./gates_report_brillig.sh + mv gates_report_brillig.json ../gates_report_brillig.json + + - name: Compare Brillig bytecode size reports + id: brillig_bytecode_diff + uses: noir-lang/noir-gates-diff@d88f7523b013b9edd3f31c5cfddaef87a3fe1b48 + with: + report: gates_report_brillig.json + header: | + # Changes to Brillig bytecode sizes + brillig_report: true + summaryQuantile: 0.9 # only display the 10% most significant bytecode size diffs in the summary (defaults to 20%) + + - name: Add bytecode size diff to sticky comment + if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' + uses: marocchino/sticky-pull-request-comment@v2 + with: + header: brillig + # delete the comment in case changes no longer impact brillig bytecode sizes + delete: ${{ !steps.brillig_bytecode_diff.outputs.markdown }} + message: ${{ steps.brillig_bytecode_diff.outputs.markdown }} + + compare_brillig_execution_reports: + name: Brillig execution trace sizes + needs: [build-nargo] + runs-on: ubuntu-latest + permissions: + pull-requests: write + + steps: + - uses: actions/checkout@v4 + + - name: Download nargo binary + uses: actions/download-artifact@v4 + with: + name: nargo + path: ./nargo + + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + nargo -V + + - name: Generate Brillig execution report + working-directory: ./test_programs + run: | + ./gates_report_brillig_execution.sh + mv gates_report_brillig_execution.json ../gates_report_brillig_execution.json + + - name: Compare Brillig execution reports + id: brillig_execution_diff + uses: noir-lang/noir-gates-diff@d88f7523b013b9edd3f31c5cfddaef87a3fe1b48 + with: + report: gates_report_brillig_execution.json + header: | + # Changes to number of Brillig opcodes executed + brillig_report: true + summaryQuantile: 0.9 # only display the 10% most significant bytecode size diffs in the summary (defaults to 20%) + + - name: Add bytecode size diff to sticky comment + if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' + uses: marocchino/sticky-pull-request-comment@v2 + with: + header: brillig_execution + # delete the comment in case changes no longer impact brillig bytecode sizes + delete: ${{ !steps.brillig_execution_diff.outputs.markdown }} + message: ${{ steps.brillig_execution_diff.outputs.markdown }} + + generate_memory_report: + name: Peak memory usage + needs: [build-nargo] + runs-on: ubuntu-latest + permissions: + pull-requests: write + + steps: + - uses: actions/checkout@v4 + + - name: Download nargo binary + uses: actions/download-artifact@v4 + with: + name: nargo + path: ./nargo + + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + nargo -V + + - name: Generate Memory report + working-directory: ./test_programs + run: | + ./memory_report.sh + mv memory_report.json ../memory_report.json + + - name: Parse memory report + id: memory_report + uses: noir-lang/noir-bench-report@ccb0d806a91d3bd86dba0ba3d580a814eed5673c + with: + report: memory_report.json + header: | + # Memory Report + memory_report: true + + - name: Add memory report to sticky comment + if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' + uses: marocchino/sticky-pull-request-comment@v2 + with: + header: memory + message: ${{ steps.memory_report.outputs.markdown }} diff --git a/noir/noir-repo/.github/workflows/test-js-packages.yml b/noir/noir-repo/.github/workflows/test-js-packages.yml index 4a5d0b8179bb..36ece11b1bfe 100644 --- a/noir/noir-repo/.github/workflows/test-js-packages.yml +++ b/noir/noir-repo/.github/workflows/test-js-packages.yml @@ -13,6 +13,19 @@ concurrency: cancel-in-progress: true jobs: + yarn-lock: + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + # Errors if installation would result in modifications to yarn.lock + - name: Install + run: yarn --immutable + shell: bash + build-nargo: runs-on: ubuntu-22.04 timeout-minutes: 30 @@ -78,7 +91,6 @@ jobs: ./tooling/noirc_abi_wasm/web retention-days: 10 - build-noir-wasm: runs-on: ubuntu-latest timeout-minutes: 30 @@ -509,8 +521,27 @@ jobs: working-directory: ./examples/codegen_verifier run: ./test.sh + critical-library-list: + name: Load critical library list + runs-on: ubuntu-latest + outputs: + libraries: ${{ steps.get_critical_libraries.outputs.libraries }} + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Build list of libraries + id: get_critical_libraries + run: | + LIBRARIES=$(grep -Po "^https://github.com/\K.+" ./CRITICAL_NOIR_LIBRARIES | jq -R -s -c 'split("\n") | map(select(. != "")) | map({ repo: ., path: "./"})') + echo "libraries=$LIBRARIES" + echo "libraries=$LIBRARIES" >> $GITHUB_OUTPUT + env: + GH_TOKEN: ${{ github.token }} + external-repo-checks: - needs: [build-nargo] + needs: [build-nargo, critical-library-list] runs-on: ubuntu-latest # Only run when 'run-external-checks' label is present if: contains(github.event.pull_request.labels.*.name, 'run-external-checks') @@ -518,13 +549,16 @@ jobs: strategy: fail-fast: false matrix: - project: - # Disabled as these are currently failing with many visibility errors - - { repo: AztecProtocol/aztec-nr, path: ./ } - - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-contracts } - # Disabled as aztec-packages requires a setup-step in order to generate a `Nargo.toml` - #- { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits } - - { repo: noir-lang/noir-edwards, path: ./, ref: 3188ea74fe3b059219a2ea87899589c266256d74 } + project: ${{ fromJson( needs.critical-library-list.outputs.libraries )}} + include: + - project: { repo: AztecProtocol/aztec-packages, path: ./noir-projects/aztec-nr } + - project: { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-contracts } + - project: { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/parity-lib } + - project: { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/private-kernel-lib } + - project: { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/reset-kernel-lib } + - project: { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/rollup-lib } + - project: { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/types } + name: Check external repo - ${{ matrix.project.repo }} steps: - name: Checkout @@ -554,9 +588,12 @@ jobs: # Github actions seems to not expand "**" in globs by default. shopt -s globstar sed -i '/^compiler_version/d' ./**/Nargo.toml - - name: Run nargo check + + - name: Run nargo test working-directory: ./test-repo/${{ matrix.project.path }} - run: nargo check + run: nargo test --silence-warnings + env: + NARGO_IGNORE_TEST_FAILURES_FROM_FOREIGN_CALLS: true # This is a job which depends on all test jobs and reports the overall status. # This allows us to add/remove test jobs without having to update the required workflows. @@ -566,6 +603,7 @@ jobs: # We want this job to always run (even if the dependant jobs fail) as we want this job to fail rather than skipping. if: ${{ always() }} needs: + - yarn-lock - test-acvm_js-node - test-acvm_js-browser - test-noirc-abi diff --git a/noir/noir-repo/CRITICAL_NOIR_LIBRARIES b/noir/noir-repo/CRITICAL_NOIR_LIBRARIES new file mode 100644 index 000000000000..c753b76a4fcc --- /dev/null +++ b/noir/noir-repo/CRITICAL_NOIR_LIBRARIES @@ -0,0 +1,13 @@ +https://github.com/noir-lang/ec +https://github.com/noir-lang/eddsa +https://github.com/noir-lang/mimc +https://github.com/noir-lang/schnorr +https://github.com/noir-lang/noir_sort +https://github.com/noir-lang/noir-edwards +https://github.com/noir-lang/noir-bignum +https://github.com/noir-lang/noir_bigcurve +https://github.com/noir-lang/noir_base64 +https://github.com/noir-lang/noir_string_search +https://github.com/noir-lang/sparse_array +https://github.com/noir-lang/noir_rsa +https://github.com/noir-lang/noir_json_parser diff --git a/noir/noir-repo/Cargo.lock b/noir/noir-repo/Cargo.lock index 94a84b89d055..e8226d5fc580 100644 --- a/noir/noir-repo/Cargo.lock +++ b/noir/noir-repo/Cargo.lock @@ -13,7 +13,7 @@ dependencies = [ "criterion", "flate2", "fxhash", - "pprof 0.13.0", + "pprof", "serde", "serde-big-array", "serde-generate", @@ -158,6 +158,15 @@ dependencies = [ "memchr", ] +[[package]] +name = "aligned-vec" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e0966165eaf052580bd70eb1b32cb3d6245774c0104d1b2793e9650bf83b52a" +dependencies = [ + "equator", +] + [[package]] name = "android-tzdata" version = "0.1.1" @@ -604,7 +613,7 @@ dependencies = [ "lazy_static", "noir_grumpkin", "num-bigint", - "pprof 0.12.1", + "pprof", ] [[package]] @@ -1417,6 +1426,26 @@ dependencies = [ "log", ] +[[package]] +name = "equator" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c35da53b5a021d2484a7cc49b2ac7f2d840f8236a286f84202369bd338d761ea" +dependencies = [ + "equator-macro", +] + +[[package]] +name = "equator-macro" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3bf679796c0322556351f287a51b49e48f7c4986e727b5dd78c972d30e2e16cc" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + [[package]] name = "equivalent" version = "1.0.1" @@ -2789,6 +2818,7 @@ dependencies = [ "dirs", "file-lock", "fm", + "fxhash", "iai", "iter-extended", "lazy_static", @@ -2806,7 +2836,7 @@ dependencies = [ "notify", "notify-debouncer-full", "paste", - "pprof 0.13.0", + "pprof", "predicates 2.1.5", "prettytable-rs", "proptest", @@ -2852,6 +2882,7 @@ dependencies = [ "noirc_frontend", "semver", "serde", + "test-case", "thiserror", "toml 0.7.8", "url 2.5.3", @@ -3151,6 +3182,7 @@ dependencies = [ "serde_json", "serde_with", "similar-asserts", + "test-case", "thiserror", "tracing", ] @@ -3175,7 +3207,6 @@ dependencies = [ "proptest", "proptest-derive 0.5.0", "rangemap", - "regex", "rustc-hash", "serde", "serde_json", @@ -3194,7 +3225,6 @@ dependencies = [ "acvm", "iter-extended", "jsonrpc", - "regex", "serde", "serde_json", "thiserror", @@ -3577,32 +3607,11 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "pprof" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "978385d59daf9269189d052ca8a84c1acfd0715c0599a5d5188d4acc078ca46a" -dependencies = [ - "backtrace", - "cfg-if 1.0.0", - "criterion", - "findshlibs", - "inferno", - "libc", - "log", - "nix 0.26.4", - "once_cell", - "parking_lot 0.12.3", - "smallvec", - "symbolic-demangle", - "tempfile", - "thiserror", -] - -[[package]] -name = "pprof" -version = "0.13.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef5c97c51bd34c7e742402e216abdeb44d415fbe6ae41d56b114723e953711cb" +checksum = "ebbe2f8898beba44815fdc9e5a4ae9c929e21c5dc29b0c774a15555f7f58d6d0" dependencies = [ + "aligned-vec", "backtrace", "cfg-if 1.0.0", "criterion", diff --git a/noir/noir-repo/Cargo.toml b/noir/noir-repo/Cargo.toml index 94ebe54fde17..4ce0ddd999f0 100644 --- a/noir/noir-repo/Cargo.toml +++ b/noir/noir-repo/Cargo.toml @@ -126,7 +126,7 @@ codespan-reporting = "0.11.1" criterion = "0.5.0" # Note that using the "frame-pointer" feature breaks framegraphs on linux # https://github.com/tikv/pprof-rs/pull/172 -pprof = { version = "0.13", features = ["flamegraph", "criterion"] } +pprof = { version = "0.14", features = ["flamegraph", "criterion"] } cfg-if = "1.0.0" dirs = "4" diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/brillig.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/brillig.rs index a9714ce29b25..ef75d088f8c4 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/brillig.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/brillig.rs @@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize}; /// Inputs for the Brillig VM. These are the initial inputs /// that the Brillig VM will use to start. -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug, Hash)] pub enum BrilligInputs { Single(Expression), Array(Vec>), @@ -14,7 +14,7 @@ pub enum BrilligInputs { /// Outputs for the Brillig VM. Once the VM has completed /// execution, this will be the object that is returned. -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug, Hash)] pub enum BrilligOutputs { Simple(Witness), Array(Vec), @@ -23,7 +23,7 @@ pub enum BrilligOutputs { /// This is purely a wrapper struct around a list of Brillig opcode's which represents /// a full Brillig function to be executed by the Brillig VM. /// This is stored separately on a program and accessed through a [BrilligPointer]. -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Default, Debug)] +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Default, Debug, Hash)] pub struct BrilligBytecode { pub bytecode: Vec>, } diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs index 6282a33af6b4..4ff581bf17ab 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs @@ -25,7 +25,7 @@ use self::{brillig::BrilligBytecode, opcodes::BlockId}; /// Bounded Expressions are useful if you are eventually going to pass the ACIR /// into a proving system which supports PLONK, where arithmetic expressions have a /// finite fan-in. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default, Hash)] pub enum ExpressionWidth { #[default] Unbounded, @@ -36,13 +36,13 @@ pub enum ExpressionWidth { /// A program represented by multiple ACIR circuits. The execution trace of these /// circuits is dictated by construction of the [crate::native_types::WitnessStack]. -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Default, Hash)] pub struct Program { pub functions: Vec>, pub unconstrained_functions: Vec>, } -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Default)] +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Default, Hash)] pub struct Circuit { // current_witness_index is the highest witness index in the circuit. The next witness to be added to this circuit // will take on this value. (The value is cached here as an optimization.) @@ -69,13 +69,13 @@ pub struct Circuit { pub assert_messages: Vec<(OpcodeLocation, AssertionPayload)>, } -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] pub enum ExpressionOrMemory { Expression(Expression), Memory(BlockId), } -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] pub struct AssertionPayload { pub error_selector: u64, pub payload: Vec>, @@ -355,7 +355,7 @@ impl std::fmt::Debug for Program { } } -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default)] +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default, Hash)] pub struct PublicInputs(pub BTreeSet); impl PublicInputs { diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs index 06effd3c5b64..f47c40b0dd79 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs @@ -15,7 +15,7 @@ pub use black_box_function_call::{ }; pub use memory_operation::{BlockId, MemOp}; -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] pub enum BlockType { Memory, CallData(u32), @@ -29,7 +29,7 @@ impl BlockType { } #[allow(clippy::large_enum_variant)] -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] pub enum Opcode { /// An `AssertZero` opcode adds the constraint that `P(w) = 0`, where /// `w=(w_1,..w_n)` is a tuple of `n` witnesses, and `P` is a multi-variate diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs index dfdf96163062..9cf31e94eb49 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs @@ -9,13 +9,13 @@ use thiserror::Error; // Note: Some functions will not use all of the witness // So we need to supply how many bits of the witness is needed -#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] pub enum ConstantOrWitnessEnum { Constant(F), Witness(Witness), } -#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] pub struct FunctionInput { input: ConstantOrWitnessEnum, num_bits: u32, @@ -79,7 +79,7 @@ impl std::fmt::Display for FunctionInput { } } -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] pub enum BlackBoxFuncCall { AES128Encrypt { inputs: Vec>, diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/memory_operation.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/memory_operation.rs index 90e3ee0563ad..c9a789832047 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/memory_operation.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/memory_operation.rs @@ -7,7 +7,7 @@ pub struct BlockId(pub u32); /// Operation on a block of memory /// We can either write or read at an index in memory -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug, Hash)] pub struct MemOp { /// A constant expression that can be 0 (read) or 1 (write) pub operation: Expression, diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/merge_expressions.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/merge_expressions.rs index 0a55e4ca17c8..f49cd61e813c 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/merge_expressions.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/merge_expressions.rs @@ -12,26 +12,36 @@ use acir::{ use crate::compiler::CircuitSimulator; -pub(crate) struct MergeExpressionsOptimizer { +pub(crate) struct MergeExpressionsOptimizer { resolved_blocks: HashMap>, + modified_gates: HashMap>, + deleted_gates: BTreeSet, } -impl MergeExpressionsOptimizer { +impl MergeExpressionsOptimizer { pub(crate) fn new() -> Self { - MergeExpressionsOptimizer { resolved_blocks: HashMap::new() } + MergeExpressionsOptimizer { + resolved_blocks: HashMap::new(), + modified_gates: HashMap::new(), + deleted_gates: BTreeSet::new(), + } } /// This pass analyzes the circuit and identifies intermediate variables that are /// only used in two gates. It then merges the gate that produces the /// intermediate variable into the second one that uses it /// Note: This pass is only relevant for backends that can handle unlimited width - pub(crate) fn eliminate_intermediate_variable( + pub(crate) fn eliminate_intermediate_variable( &mut self, circuit: &Circuit, acir_opcode_positions: Vec, ) -> (Vec>, Vec) { + // Initialization + self.modified_gates.clear(); + self.deleted_gates.clear(); + self.resolved_blocks.clear(); + // Keep track, for each witness, of the gates that use it let circuit_inputs = circuit.circuit_arguments(); - self.resolved_blocks = HashMap::new(); let mut used_witness: BTreeMap> = BTreeMap::new(); for (i, opcode) in circuit.opcodes.iter().enumerate() { let witnesses = self.witness_inputs(opcode); @@ -46,80 +56,89 @@ impl MergeExpressionsOptimizer { } } - let mut modified_gates: HashMap> = HashMap::new(); - let mut new_circuit = Vec::new(); - let mut new_acir_opcode_positions = Vec::new(); // For each opcode, try to get a target opcode to merge with - for (i, (opcode, opcode_position)) in - circuit.opcodes.iter().zip(acir_opcode_positions).enumerate() - { + for (i, opcode) in circuit.opcodes.iter().enumerate() { if !matches!(opcode, Opcode::AssertZero(_)) { - new_circuit.push(opcode.clone()); - new_acir_opcode_positions.push(opcode_position); continue; } - let opcode = modified_gates.get(&i).unwrap_or(opcode).clone(); - let mut to_keep = true; - let input_witnesses = self.witness_inputs(&opcode); - for w in input_witnesses { - let Some(gates_using_w) = used_witness.get(&w) else { - continue; - }; - // We only consider witness which are used in exactly two arithmetic gates - if gates_using_w.len() == 2 { - let first = *gates_using_w.first().expect("gates_using_w.len == 2"); - let second = *gates_using_w.last().expect("gates_using_w.len == 2"); - let b = if second == i { - first - } else { - // sanity check - assert!(i == first); - second + if let Some(opcode) = self.get_opcode(i, circuit) { + let input_witnesses = self.witness_inputs(&opcode); + for w in input_witnesses { + let Some(gates_using_w) = used_witness.get(&w) else { + continue; }; - - let second_gate = modified_gates.get(&b).unwrap_or(&circuit.opcodes[b]); - if let (Opcode::AssertZero(expr_define), Opcode::AssertZero(expr_use)) = - (&opcode, second_gate) - { - // We cannot merge an expression into an earlier opcode, because this - // would break the 'execution ordering' of the opcodes - // This case can happen because a previous merge would change an opcode - // and eliminate a witness from it, giving new opportunities for this - // witness to be used in only two expressions - // TODO: the missed optimization for the i>b case can be handled by - // - doing this pass again until there is no change, or - // - merging 'b' into 'i' instead - if i < b { - if let Some(expr) = Self::merge(expr_use, expr_define, w) { - modified_gates.insert(b, Opcode::AssertZero(expr)); - to_keep = false; - // Update the 'used_witness' map to account for the merge. - for w2 in CircuitSimulator::expr_wit(expr_define) { - if !circuit_inputs.contains(&w2) { - let v = used_witness.entry(w2).or_default(); - v.insert(b); - v.remove(&i); + // We only consider witness which are used in exactly two arithmetic gates + if gates_using_w.len() == 2 { + let first = *gates_using_w.first().expect("gates_using_w.len == 2"); + let second = *gates_using_w.last().expect("gates_using_w.len == 2"); + let b = if second == i { + first + } else { + // sanity check + assert!(i == first); + second + }; + // Merge the opcode with smaller index into the other one + // by updating modified_gates/deleted_gates/used_witness + // returns false if it could not merge them + let mut merge_opcodes = |op1, op2| -> bool { + if op1 == op2 { + return false; + } + let (source, target) = if op1 < op2 { (op1, op2) } else { (op2, op1) }; + let source_opcode = self.get_opcode(source, circuit); + let target_opcode = self.get_opcode(target, circuit); + if let ( + Some(Opcode::AssertZero(expr_use)), + Some(Opcode::AssertZero(expr_define)), + ) = (target_opcode, source_opcode) + { + if let Some(expr) = + Self::merge_expression(&expr_use, &expr_define, w) + { + self.modified_gates.insert(target, Opcode::AssertZero(expr)); + self.deleted_gates.insert(source); + // Update the 'used_witness' map to account for the merge. + let mut witness_list = CircuitSimulator::expr_wit(&expr_use); + witness_list.extend(CircuitSimulator::expr_wit(&expr_define)); + for w2 in witness_list { + if !circuit_inputs.contains(&w2) { + used_witness.entry(w2).and_modify(|v| { + v.insert(target); + v.remove(&source); + }); + } } + return true; } - // We need to stop here and continue with the next opcode - // because the merge invalidates the current opcode. - break; } + false + }; + + if merge_opcodes(b, i) { + // We need to stop here and continue with the next opcode + // because the merge invalidates the current opcode. + break; } } } } + } + + // Construct the new circuit from modified/deleted gates + let mut new_circuit = Vec::new(); + let mut new_acir_opcode_positions = Vec::new(); - if to_keep { - let opcode = modified_gates.get(&i).cloned().unwrap_or(opcode); - new_circuit.push(opcode); - new_acir_opcode_positions.push(opcode_position); + for (i, opcode_position) in acir_opcode_positions.iter().enumerate() { + if let Some(op) = self.get_opcode(i, circuit) { + new_circuit.push(op); + new_acir_opcode_positions.push(*opcode_position); } } (new_circuit, new_acir_opcode_positions) } - fn brillig_input_wit(&self, input: &BrilligInputs) -> BTreeSet { + fn brillig_input_wit(&self, input: &BrilligInputs) -> BTreeSet { let mut result = BTreeSet::new(); match input { BrilligInputs::Single(expr) => { @@ -152,7 +171,7 @@ impl MergeExpressionsOptimizer { } // Returns the input witnesses used by the opcode - fn witness_inputs(&self, opcode: &Opcode) -> BTreeSet { + fn witness_inputs(&self, opcode: &Opcode) -> BTreeSet { match opcode { Opcode::AssertZero(expr) => CircuitSimulator::expr_wit(expr), Opcode::BlackBoxFuncCall(bb_func) => { @@ -198,7 +217,7 @@ impl MergeExpressionsOptimizer { // Merge 'expr' into 'target' via Gaussian elimination on 'w' // Returns None if the expressions cannot be merged - fn merge( + fn merge_expression( target: &Expression, expr: &Expression, w: Witness, @@ -226,6 +245,13 @@ impl MergeExpressionsOptimizer { } None } + + fn get_opcode(&self, g: usize, circuit: &Circuit) -> Option> { + if self.deleted_gates.contains(&g) { + return None; + } + self.modified_gates.get(&g).or(circuit.opcodes.get(g)).cloned() + } } #[cfg(test)] diff --git a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml index 8829692b9b43..825a0ef04816 100644 --- a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml +++ b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml @@ -30,7 +30,7 @@ num-bigint.workspace = true [dev-dependencies] ark-std.workspace = true criterion = "0.5.0" -pprof = { version = "0.12", features = [ +pprof = { version = "0.14", features = [ "flamegraph", "frame-pointer", "criterion", diff --git a/noir/noir-repo/acvm-repo/brillig/src/black_box.rs b/noir/noir-repo/acvm-repo/brillig/src/black_box.rs index 9cc5349e45ba..f185b36e6c89 100644 --- a/noir/noir-repo/acvm-repo/brillig/src/black_box.rs +++ b/noir/noir-repo/acvm-repo/brillig/src/black_box.rs @@ -3,7 +3,7 @@ use serde::{Deserialize, Serialize}; /// These opcodes provide an equivalent of ACIR blackbox functions. /// They are implemented as native functions in the VM. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash)] pub enum BlackBoxOp { /// Encrypts a message using AES128. AES128Encrypt { diff --git a/noir/noir-repo/acvm-repo/brillig/src/opcodes.rs b/noir/noir-repo/acvm-repo/brillig/src/opcodes.rs index 8b72b5a9b41b..1cb31ca3d0ae 100644 --- a/noir/noir-repo/acvm-repo/brillig/src/opcodes.rs +++ b/noir/noir-repo/acvm-repo/brillig/src/opcodes.rs @@ -56,7 +56,7 @@ impl MemoryAddress { } /// Describes the memory layout for an array/vector element -#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize, Hash)] pub enum HeapValueType { // A single field element is enough to represent the value with a given bit size Simple(BitSize), @@ -81,7 +81,7 @@ impl HeapValueType { } /// A fixed-sized array starting from a Brillig memory location. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy, Hash)] pub struct HeapArray { pub pointer: MemoryAddress, pub size: usize, @@ -94,13 +94,13 @@ impl Default for HeapArray { } /// A memory-sized vector passed starting from a Brillig memory location and with a memory-held size -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy, Hash)] pub struct HeapVector { pub pointer: MemoryAddress, pub size: MemoryAddress, } -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy, PartialOrd, Ord)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy, PartialOrd, Ord, Hash)] pub enum IntegerBitSize { U1, U8, @@ -152,7 +152,7 @@ impl std::fmt::Display for IntegerBitSize { } } -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy, PartialOrd, Ord)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy, PartialOrd, Ord, Hash)] pub enum BitSize { Field, Integer(IntegerBitSize), @@ -181,7 +181,7 @@ impl BitSize { /// While we are usually agnostic to how memory is passed within Brillig, /// this needs to be encoded somehow when dealing with an external system. /// For simplicity, the extra type information is given right in the ForeignCall instructions. -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy, Hash)] pub enum ValueOrArray { /// A single value passed to or from an external call /// It is an 'immediate' value - used without dereferencing. @@ -198,7 +198,7 @@ pub enum ValueOrArray { HeapVector(HeapVector), } -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] pub enum BrilligOpcode { /// Takes the fields in addresses `lhs` and `rhs` /// Performs the specified binary operation @@ -314,7 +314,7 @@ pub enum BrilligOpcode { } /// Binary fixed-length field expressions -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash)] pub enum BinaryFieldOp { Add, Sub, @@ -332,7 +332,7 @@ pub enum BinaryFieldOp { } /// Binary fixed-length integer expressions -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash)] pub enum BinaryIntOp { Add, Sub, diff --git a/noir/noir-repo/compiler/noirc_driver/src/debug.rs b/noir/noir-repo/compiler/noirc_driver/src/debug.rs index f5eaede89b2a..6044e6c0e654 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/debug.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/debug.rs @@ -8,7 +8,7 @@ use std::{ /// For a given file, we store the source code and the path to the file /// so consumers of the debug artifact can reconstruct the original source code structure. -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, Serialize, Deserialize, Hash)] pub struct DebugFile { pub source: String, pub path: PathBuf, diff --git a/noir/noir-repo/compiler/noirc_driver/src/lib.rs b/noir/noir-repo/compiler/noirc_driver/src/lib.rs index 72ea464805ff..5bedefaf5632 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/lib.rs @@ -13,7 +13,7 @@ use noirc_abi::{AbiParameter, AbiType, AbiValue}; use noirc_errors::{CustomDiagnostic, FileDiagnostic}; use noirc_evaluator::create_program; use noirc_evaluator::errors::RuntimeError; -use noirc_evaluator::ssa::SsaProgramArtifact; +use noirc_evaluator::ssa::{SsaLogging, SsaProgramArtifact}; use noirc_frontend::debug::build_debug_crate_file; use noirc_frontend::hir::def_map::{Contract, CrateDefMap}; use noirc_frontend::hir::Context; @@ -70,6 +70,11 @@ pub struct CompileOptions { #[arg(long, hide = true)] pub show_ssa: bool, + /// Only show SSA passes whose name contains the provided string. + /// This setting takes precedence over `show_ssa` if it's not empty. + #[arg(long, hide = true)] + pub show_ssa_pass_name: Option, + /// Emit the unoptimized SSA IR to file. /// The IR will be dumped into the workspace target directory, /// under `[compiled-package].ssa.json`. @@ -126,11 +131,19 @@ pub struct CompileOptions { #[arg(long)] pub skip_underconstrained_check: bool, - /// Setting to decide on an inlining strategy for brillig functions. + /// Setting to decide on an inlining strategy for Brillig functions. /// A more aggressive inliner should generate larger programs but more optimized /// A less aggressive inliner should generate smaller programs #[arg(long, hide = true, allow_hyphen_values = true, default_value_t = i64::MAX)] pub inliner_aggressiveness: i64, + + /// Setting the maximum acceptable increase in Brillig bytecode size due to + /// unrolling small loops. When left empty, any change is accepted as long + /// as it required fewer SSA instructions. + /// A higher value results in fewer jumps but a larger program. + /// A lower value keeps the original program if it was smaller, even if it has more jumps. + #[arg(long, hide = true, allow_hyphen_values = true)] + pub max_bytecode_increase_percent: Option, } pub fn parse_expression_width(input: &str) -> Result { @@ -321,6 +334,8 @@ pub fn compute_function_abi( /// /// On success this returns the compiled program alongside any warnings that were found. /// On error this returns the non-empty list of warnings and errors. +/// +/// See [compile_no_check] for further information about the use of `cached_program`. pub fn compile_main( context: &mut Context, crate_id: CrateId, @@ -542,6 +557,15 @@ pub const DEFAULT_EXPRESSION_WIDTH: ExpressionWidth = ExpressionWidth::Bounded { /// Compile the current crate using `main_function` as the entrypoint. /// /// This function assumes [`check_crate`] is called beforehand. +/// +/// If the program is not returned from cache, it is backend-agnostic and must go through a transformation +/// pass before usage in proof generation; if it's returned from cache these transformations might have +/// already been applied. +/// +/// The transformations are _not_ covered by the check that decides whether we can use the cached artifact. +/// That comparison is based on on [CompiledProgram::hash] which is a persisted version of the hash of the input +/// [`ast::Program`][noirc_frontend::monomorphization::ast::Program], whereas the output [`circuit::Program`][acir::circuit::Program] +/// contains the final optimized ACIR opcodes, including the transformation done after this compilation. #[tracing::instrument(level = "trace", skip_all, fields(function_name = context.function_name(&main_function)))] pub fn compile_no_check( context: &mut Context, @@ -556,8 +580,6 @@ pub fn compile_no_check( monomorphize(main_function, &mut context.def_interner)? }; - let hash = fxhash::hash64(&program); - let hashes_match = cached_program.as_ref().map_or(false, |program| program.hash == hash); if options.show_monomorphized { println!("{program}"); } @@ -571,13 +593,28 @@ pub fn compile_no_check( || options.show_ssa || options.emit_ssa; - if !force_compile && hashes_match { - info!("Program matches existing artifact, returning early"); - return Ok(cached_program.expect("cache must exist for hashes to match")); + // Hash the AST program, which is going to be used to fingerprint the compilation artifact. + let hash = fxhash::hash64(&program); + + if let Some(cached_program) = cached_program { + if !force_compile && cached_program.hash == hash { + info!("Program matches existing artifact, returning early"); + return Ok(cached_program); + } } + let return_visibility = program.return_visibility; let ssa_evaluator_options = noirc_evaluator::ssa::SsaEvaluatorOptions { - enable_ssa_logging: options.show_ssa, + ssa_logging: match &options.show_ssa_pass_name { + Some(string) => SsaLogging::Contains(string.clone()), + None => { + if options.show_ssa { + SsaLogging::All + } else { + SsaLogging::None + } + } + }, enable_brillig_logging: options.show_brillig, force_brillig_output: options.force_brillig, print_codegen_timings: options.benchmark_codegen, @@ -589,6 +626,7 @@ pub fn compile_no_check( emit_ssa: if options.emit_ssa { Some(context.package_build_path.clone()) } else { None }, skip_underconstrained_check: options.skip_underconstrained_check, inliner_aggressiveness: options.inliner_aggressiveness, + max_bytecode_increase_percent: options.max_bytecode_increase_percent, }; let SsaProgramArtifact { program, debug, warnings, names, brillig_names, error_types, .. } = diff --git a/noir/noir-repo/compiler/noirc_driver/src/program.rs b/noir/noir-repo/compiler/noirc_driver/src/program.rs index 884604829285..4b4d6662e8e7 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/program.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/program.rs @@ -9,7 +9,7 @@ use serde::{Deserialize, Serialize}; use super::debug::DebugFile; -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, Hash)] pub struct CompiledProgram { pub noir_version: String, /// Hash of the [`Program`][noirc_frontend::monomorphization::ast::Program] from which this [`CompiledProgram`] diff --git a/noir/noir-repo/compiler/noirc_errors/src/debug_info.rs b/noir/noir-repo/compiler/noirc_errors/src/debug_info.rs index 77028f739bdd..a5e12b37712d 100644 --- a/noir/noir-repo/compiler/noirc_errors/src/debug_info.rs +++ b/noir/noir-repo/compiler/noirc_errors/src/debug_info.rs @@ -94,7 +94,7 @@ impl ProgramDebugInfo { } #[serde_as] -#[derive(Default, Debug, Clone, Deserialize, Serialize)] +#[derive(Default, Debug, Clone, Deserialize, Serialize, Hash)] pub struct DebugInfo { /// Map opcode index of an ACIR circuit into the source code location /// Serde does not support mapping keys being enums for json, so we indicate diff --git a/noir/noir-repo/compiler/noirc_evaluator/Cargo.toml b/noir/noir-repo/compiler/noirc_evaluator/Cargo.toml index e25b5bf855ae..bb8c62cfd953 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/Cargo.toml +++ b/noir/noir-repo/compiler/noirc_evaluator/Cargo.toml @@ -33,6 +33,7 @@ cfg-if.workspace = true proptest.workspace = true similar-asserts.workspace = true num-traits.workspace = true +test-case.workspace = true [features] bn254 = ["noirc_frontend/bn254"] diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/acir/acir_variable.rs b/noir/noir-repo/compiler/noirc_evaluator/src/acir/acir_variable.rs index a42426e6c04d..9f2c649ee3e9 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/acir/acir_variable.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/acir/acir_variable.rs @@ -92,7 +92,7 @@ impl<'a> From<&'a SsaType> for AcirType { SsaType::Numeric(numeric_type) => AcirType::NumericType(*numeric_type), SsaType::Array(elements, size) => { let elements = elements.iter().map(|e| e.into()).collect(); - AcirType::Array(elements, *size) + AcirType::Array(elements, *size as usize) } _ => unreachable!("The type {value} cannot be represented in ACIR"), } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/acir/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/acir/mod.rs index 69679495b926..76f0dea95bb5 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/acir/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/acir/mod.rs @@ -571,7 +571,7 @@ impl<'a> Context<'a> { AcirValue::Array(_) => { let block_id = self.block_id(param_id); let len = if matches!(typ, Type::Array(_, _)) { - typ.flattened_size() + typ.flattened_size() as usize } else { return Err(InternalError::Unexpected { expected: "Block params should be an array".to_owned(), @@ -816,7 +816,9 @@ impl<'a> Context<'a> { let inputs = vecmap(arguments, |arg| self.convert_value(*arg, dfg)); let output_count = result_ids .iter() - .map(|result_id| dfg.type_of_value(*result_id).flattened_size()) + .map(|result_id| { + dfg.type_of_value(*result_id).flattened_size() as usize + }) .sum(); let Some(acir_function_id) = @@ -948,7 +950,7 @@ impl<'a> Context<'a> { let block_id = self.block_id(&array_id); let array_typ = dfg.type_of_value(array_id); let len = if matches!(array_typ, Type::Array(_, _)) { - array_typ.flattened_size() + array_typ.flattened_size() as usize } else { Self::flattened_value_size(&output) }; @@ -1444,7 +1446,7 @@ impl<'a> Context<'a> { // a separate SSA value and restrictions on slice indices should be generated elsewhere in the SSA. let array_typ = dfg.type_of_value(array); let array_len = if !array_typ.contains_slice_element() { - array_typ.flattened_size() + array_typ.flattened_size() as usize } else { self.flattened_slice_size(array, dfg) }; @@ -1539,7 +1541,7 @@ impl<'a> Context<'a> { let value = self.convert_value(array, dfg); let array_typ = dfg.type_of_value(array); let len = if !array_typ.contains_slice_element() { - array_typ.flattened_size() + array_typ.flattened_size() as usize } else { self.flattened_slice_size(array, dfg) }; @@ -1810,7 +1812,7 @@ impl<'a> Context<'a> { return_values .iter() - .fold(0, |acc, value_id| acc + dfg.type_of_value(*value_id).flattened_size()) + .fold(0, |acc, value_id| acc + dfg.type_of_value(*value_id).flattened_size() as usize) } /// Converts an SSA terminator's return values into their ACIR representations @@ -2156,7 +2158,7 @@ impl<'a> Context<'a> { let inputs = vecmap(&arguments_no_slice_len, |arg| self.convert_value(*arg, dfg)); let output_count = result_ids.iter().fold(0usize, |sum, result_id| { - sum + dfg.try_get_array_length(*result_id).unwrap_or(1) + sum + dfg.try_get_array_length(*result_id).unwrap_or(1) as usize }); let vars = self.acir_context.black_box_function(black_box, inputs, output_count)?; @@ -2180,7 +2182,7 @@ impl<'a> Context<'a> { endian, field, radix, - array_length as u32, + array_length, result_type[0].clone().into(), ) .map(|array| vec![array]) @@ -2194,12 +2196,7 @@ impl<'a> Context<'a> { }; self.acir_context - .bit_decompose( - endian, - field, - array_length as u32, - result_type[0].clone().into(), - ) + .bit_decompose(endian, field, array_length, result_type[0].clone().into()) .map(|array| vec![array]) } Intrinsic::ArrayLen => { @@ -2220,7 +2217,7 @@ impl<'a> Context<'a> { let acir_value = self.convert_value(slice_contents, dfg); let array_len = if !slice_typ.contains_slice_element() { - slice_typ.flattened_size() + slice_typ.flattened_size() as usize } else { self.flattened_slice_size(slice_contents, dfg) }; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 1fa4985295a9..9c88c559b59f 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -1823,7 +1823,7 @@ impl<'block> BrilligBlock<'block> { Type::Array(_, nested_size) => { let inner_array = BrilligArray { pointer: self.brillig_context.allocate_register(), - size: *nested_size, + size: *nested_size as usize, }; self.allocate_foreign_call_result_array(element_type, inner_array); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs index 393d4c967c2b..bf0a1bc73474 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs @@ -142,7 +142,7 @@ pub(crate) fn allocate_value( } Type::Array(item_typ, elem_count) => BrilligVariable::BrilligArray(BrilligArray { pointer: brillig_context.allocate_register(), - size: compute_array_length(&item_typ, elem_count), + size: compute_array_length(&item_typ, elem_count as usize), }), Type::Slice(_) => BrilligVariable::BrilligVector(BrilligVector { pointer: brillig_context.allocate_register(), diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs index 2779be103cd3..3dea7b3e7f50 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs @@ -59,7 +59,7 @@ impl FunctionContext { vecmap(item_type.iter(), |item_typ| { FunctionContext::ssa_type_to_parameter(item_typ) }), - *size, + *size as usize, ), Type::Slice(_) => { panic!("ICE: Slice parameters cannot be derived from type information") diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs index 81d61e05cc44..0bb18448670a 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs @@ -88,7 +88,7 @@ pub(crate) fn type_to_heap_value_type(typ: &Type) -> HeapValueType { ), Type::Array(elem_type, size) => HeapValueType::Array { value_types: elem_type.as_ref().iter().map(type_to_heap_value_type).collect(), - size: typ.element_size() * size, + size: typ.element_size() * *size as usize, }, Type::Slice(elem_type) => HeapValueType::Vector { value_types: elem_type.as_ref().iter().map(type_to_heap_value_type).collect(), diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/procedures/array_copy.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/procedures/array_copy.rs index 67f7cf2dc34d..0a6e8824223e 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/procedures/array_copy.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/procedures/array_copy.rs @@ -69,6 +69,8 @@ pub(super) fn compile_array_copy_procedure( BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, 1_usize.into(), ); + // Decrease the original ref count now that this copy is no longer pointing to it + ctx.codegen_usize_op(rc.address, rc.address, BrilligBinaryOp::Sub, 1); } }); } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/mod.rs index 1b61ae1a864d..cb8c35cd8e0e 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/mod.rs @@ -12,7 +12,7 @@ use self::{ }, }; use crate::ssa::{ - ir::function::{Function, FunctionId, RuntimeType}, + ir::function::{Function, FunctionId}, ssa_gen::Ssa, }; use fxhash::FxHashMap as HashMap; @@ -59,7 +59,7 @@ impl std::ops::Index for Brillig { } impl Ssa { - /// Compile to brillig brillig functions and ACIR functions reachable from them + /// Compile Brillig functions and ACIR functions reachable from them #[tracing::instrument(level = "trace", skip_all)] pub(crate) fn to_brillig(&self, enable_debug_trace: bool) -> Brillig { // Collect all the function ids that are reachable from brillig @@ -67,9 +67,7 @@ impl Ssa { let brillig_reachable_function_ids = self .functions .iter() - .filter_map(|(id, func)| { - matches!(func.runtime(), RuntimeType::Brillig(_)).then_some(*id) - }) + .filter_map(|(id, func)| func.runtime().is_brillig().then_some(*id)) .collect::>(); let mut brillig = Brillig::default(); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs b/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs index 994e97eabb8e..75a3ceb3a721 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs @@ -63,7 +63,7 @@ pub enum RuntimeError { UnknownReference { call_stack: CallStack }, } -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, Hash)] pub enum SsaReport { Warning(InternalWarning), Bug(InternalBug), @@ -107,7 +107,7 @@ impl From for FileDiagnostic { } } -#[derive(Debug, PartialEq, Eq, Clone, Error, Serialize, Deserialize)] +#[derive(Debug, PartialEq, Eq, Clone, Error, Serialize, Deserialize, Hash)] pub enum InternalWarning { #[error("Return variable contains a constant value")] ReturnConstant { call_stack: CallStack }, @@ -115,7 +115,7 @@ pub enum InternalWarning { VerifyProof { call_stack: CallStack }, } -#[derive(Debug, PartialEq, Eq, Clone, Error, Serialize, Deserialize)] +#[derive(Debug, PartialEq, Eq, Clone, Error, Serialize, Deserialize, Hash)] pub enum InternalBug { #[error("Input to brillig function is in a separate subgraph to output")] IndependentSubgraph { call_stack: CallStack }, diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/lib.rs b/noir/noir-repo/compiler/noirc_evaluator/src/lib.rs index 8127e3d03eff..75ea557d3de0 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/lib.rs @@ -12,8 +12,7 @@ pub mod ssa; pub use ssa::create_program; pub use ssa::ir::instruction::ErrorType; -/// Trims leading whitespace from each line of the input string, according to -/// how much leading whitespace there is on the first non-empty line. +/// Trims leading whitespace from each line of the input string #[cfg(test)] pub(crate) fn trim_leading_whitespace_from_lines(src: &str) -> String { let mut lines = src.trim_end().lines(); @@ -21,11 +20,10 @@ pub(crate) fn trim_leading_whitespace_from_lines(src: &str) -> String { while first_line.is_empty() { first_line = lines.next().unwrap(); } - let indent = first_line.len() - first_line.trim_start().len(); let mut result = first_line.trim_start().to_string(); for line in lines { result.push('\n'); - result.push_str(&line[indent..]); + result.push_str(line.trim_start()); } result } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs index 97c1760d87c1..8f31023f790c 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs @@ -44,9 +44,16 @@ mod opt; pub(crate) mod parser; pub mod ssa_gen; +#[derive(Debug, Clone)] +pub enum SsaLogging { + None, + All, + Contains(String), +} + pub struct SsaEvaluatorOptions { /// Emit debug information for the intermediate SSA IR - pub enable_ssa_logging: bool, + pub ssa_logging: SsaLogging, pub enable_brillig_logging: bool, @@ -67,6 +74,11 @@ pub struct SsaEvaluatorOptions { /// The higher the value, the more inlined brillig functions will be. pub inliner_aggressiveness: i64, + + /// Maximum accepted percentage increase in the Brillig bytecode size after unrolling loops. + /// When `None` the size increase check is skipped altogether and any decrease in the SSA + /// instruction count is accepted. + pub max_bytecode_increase_percent: Option, } pub(crate) struct ArtifactsAndWarnings(Artifacts, Vec); @@ -85,46 +97,49 @@ pub(crate) fn optimize_into_acir( let mut ssa = SsaBuilder::new( program, - options.enable_ssa_logging, + options.ssa_logging.clone(), options.force_brillig_output, options.print_codegen_timings, &options.emit_ssa, )? - .run_pass(Ssa::defunctionalize, "After Defunctionalization:") - .run_pass(Ssa::remove_paired_rc, "After Removing Paired rc_inc & rc_decs:") - .run_pass(Ssa::separate_runtime, "After Runtime Separation:") - .run_pass(Ssa::resolve_is_unconstrained, "After Resolving IsUnconstrained:") - .run_pass(|ssa| ssa.inline_functions(options.inliner_aggressiveness), "After Inlining (1st):") + .run_pass(Ssa::defunctionalize, "Defunctionalization") + .run_pass(Ssa::remove_paired_rc, "Removing Paired rc_inc & rc_decs") + .run_pass(Ssa::separate_runtime, "Runtime Separation") + .run_pass(Ssa::resolve_is_unconstrained, "Resolving IsUnconstrained") + .run_pass(|ssa| ssa.inline_functions(options.inliner_aggressiveness), "Inlining (1st)") // Run mem2reg with the CFG separated into blocks - .run_pass(Ssa::mem2reg, "After Mem2Reg (1st):") - .run_pass(Ssa::simplify_cfg, "After Simplifying (1st):") - .run_pass(Ssa::as_slice_optimization, "After `as_slice` optimization") + .run_pass(Ssa::mem2reg, "Mem2Reg (1st)") + .run_pass(Ssa::simplify_cfg, "Simplifying (1st)") + .run_pass(Ssa::as_slice_optimization, "`as_slice` optimization") .try_run_pass( Ssa::evaluate_static_assert_and_assert_constant, - "After `static_assert` and `assert_constant`:", + "`static_assert` and `assert_constant`", + )? + .run_pass(Ssa::loop_invariant_code_motion, "Loop Invariant Code Motion") + .try_run_pass( + |ssa| ssa.unroll_loops_iteratively(options.max_bytecode_increase_percent), + "Unrolling", )? - .run_pass(Ssa::loop_invariant_code_motion, "After Loop Invariant Code Motion:") - .try_run_pass(Ssa::unroll_loops_iteratively, "After Unrolling:")? - .run_pass(Ssa::simplify_cfg, "After Simplifying (2nd):") - .run_pass(Ssa::flatten_cfg, "After Flattening:") - .run_pass(Ssa::remove_bit_shifts, "After Removing Bit Shifts:") + .run_pass(Ssa::simplify_cfg, "Simplifying (2nd)") + .run_pass(Ssa::flatten_cfg, "Flattening") + .run_pass(Ssa::remove_bit_shifts, "After Removing Bit Shifts") // Run mem2reg once more with the flattened CFG to catch any remaining loads/stores - .run_pass(Ssa::mem2reg, "After Mem2Reg (2nd):") + .run_pass(Ssa::mem2reg, "Mem2Reg (2nd)") // Run the inlining pass again to handle functions with `InlineType::NoPredicates`. // Before flattening is run, we treat functions marked with the `InlineType::NoPredicates` as an entry point. // This pass must come immediately following `mem2reg` as the succeeding passes // may create an SSA which inlining fails to handle. .run_pass( |ssa| ssa.inline_functions_with_no_predicates(options.inliner_aggressiveness), - "After Inlining (2nd):", + "Inlining (2nd)", ) - .run_pass(Ssa::remove_if_else, "After Remove IfElse:") - .run_pass(Ssa::fold_constants, "After Constant Folding:") - .run_pass(Ssa::remove_enable_side_effects, "After EnableSideEffectsIf removal:") - .run_pass(Ssa::fold_constants_using_constraints, "After Constraint Folding:") - .run_pass(Ssa::dead_instruction_elimination, "After Dead Instruction Elimination:") - .run_pass(Ssa::simplify_cfg, "After Simplifying:") - .run_pass(Ssa::array_set_optimization, "After Array Set Optimizations:") + .run_pass(Ssa::remove_if_else, "Remove IfElse") + .run_pass(Ssa::fold_constants, "Constant Folding") + .run_pass(Ssa::remove_enable_side_effects, "EnableSideEffectsIf removal") + .run_pass(Ssa::fold_constants_using_constraints, "Constraint Folding") + .run_pass(Ssa::dead_instruction_elimination, "Dead Instruction Elimination (1st)") + .run_pass(Ssa::simplify_cfg, "Simplifying:") + .run_pass(Ssa::array_set_optimization, "Array Set Optimizations") .finish(); let ssa_level_warnings = if options.skip_underconstrained_check { @@ -146,14 +161,11 @@ pub(crate) fn optimize_into_acir( let ssa = SsaBuilder { ssa, - print_ssa_passes: options.enable_ssa_logging, + ssa_logging: options.ssa_logging.clone(), print_codegen_timings: options.print_codegen_timings, } - .run_pass( - |ssa| ssa.fold_constants_with_brillig(&brillig), - "After Constant Folding with Brillig:", - ) - .run_pass(Ssa::dead_instruction_elimination, "After Dead Instruction Elimination:") + .run_pass(|ssa| ssa.fold_constants_with_brillig(&brillig), "Inlining Brillig Calls Inlining") + .run_pass(Ssa::dead_instruction_elimination, "Dead Instruction Elimination (2nd)") .finish(); drop(ssa_gen_span_guard); @@ -226,7 +238,7 @@ impl SsaProgramArtifact { } } -/// Compiles the [`Program`] into [`ACIR``][acvm::acir::circuit::Program]. +/// Compiles the [`Program`] into [`ACIR`][acvm::acir::circuit::Program]. /// /// The output ACIR is backend-agnostic and so must go through a transformation pass before usage in proof generation. #[tracing::instrument(level = "trace", skip_all)] @@ -411,14 +423,14 @@ fn split_public_and_private_inputs( // This is just a convenience object to bundle the ssa with `print_ssa_passes` for debug printing. struct SsaBuilder { ssa: Ssa, - print_ssa_passes: bool, + ssa_logging: SsaLogging, print_codegen_timings: bool, } impl SsaBuilder { fn new( program: Program, - print_ssa_passes: bool, + ssa_logging: SsaLogging, force_brillig_runtime: bool, print_codegen_timings: bool, emit_ssa: &Option, @@ -433,7 +445,7 @@ impl SsaBuilder { let ssa_path = emit_ssa.with_extension("ssa.json"); write_to_file(&serde_json::to_vec(&ssa).unwrap(), &ssa_path); } - Ok(SsaBuilder { print_ssa_passes, print_codegen_timings, ssa }.print("Initial SSA:")) + Ok(SsaBuilder { ssa_logging, print_codegen_timings, ssa }.print("Initial SSA:")) } fn finish(self) -> Ssa { @@ -450,19 +462,28 @@ impl SsaBuilder { } /// The same as `run_pass` but for passes that may fail - fn try_run_pass( - mut self, - pass: fn(Ssa) -> Result, - msg: &str, - ) -> Result { + fn try_run_pass(mut self, pass: F, msg: &str) -> Result + where + F: FnOnce(Ssa) -> Result, + { self.ssa = time(msg, self.print_codegen_timings, || pass(self.ssa))?; Ok(self.print(msg)) } fn print(mut self, msg: &str) -> Self { - if self.print_ssa_passes { + let print_ssa_pass = match &self.ssa_logging { + SsaLogging::None => false, + SsaLogging::All => true, + SsaLogging::Contains(string) => { + let string = string.to_lowercase(); + let string = string.strip_prefix("after ").unwrap_or(&string); + let string = string.strip_suffix(':').unwrap_or(string); + msg.to_lowercase().contains(string) + } + }; + if print_ssa_pass { self.ssa.normalize_ids(); - println!("{msg}\n{}", self.ssa); + println!("After {msg}:\n{}", self.ssa); } self } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs index e4a2eeb8c22a..bd2585a3bfa7 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs @@ -160,7 +160,7 @@ impl FunctionBuilder { for value in values { self.add_to_data_bus(*value, &mut databus); } - let len = databus.values.len(); + let len = databus.values.len() as u32; let array = (len > 0 && matches!(self.current_function.runtime(), RuntimeType::Acir(_))) .then(|| { @@ -223,9 +223,11 @@ impl FunctionBuilder { ssa_params: &[ValueId], mut flattened_params_databus_visibility: Vec, ) -> Vec { - let ssa_param_sizes: Vec<_> = ssa_params + let ssa_param_sizes: Vec = ssa_params .iter() - .map(|ssa_param| self.current_function.dfg[*ssa_param].get_type().flattened_size()) + .map(|ssa_param| { + self.current_function.dfg[*ssa_param].get_type().flattened_size() as usize + }) .collect(); let mut is_ssa_params_databus = Vec::with_capacity(ssa_params.len()); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index 0479f8da0b72..0ae614044421 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -441,29 +441,38 @@ impl FunctionBuilder { /// Insert instructions to increment the reference count of any array(s) stored /// within the given value. If the given value is not an array and does not contain /// any arrays, this does nothing. - pub(crate) fn increment_array_reference_count(&mut self, value: ValueId) { - self.update_array_reference_count(value, true); + /// + /// Returns whether a reference count instruction was issued. + pub(crate) fn increment_array_reference_count(&mut self, value: ValueId) -> bool { + self.update_array_reference_count(value, true) } /// Insert instructions to decrement the reference count of any array(s) stored /// within the given value. If the given value is not an array and does not contain /// any arrays, this does nothing. - pub(crate) fn decrement_array_reference_count(&mut self, value: ValueId) { - self.update_array_reference_count(value, false); + /// + /// Returns whether a reference count instruction was issued. + pub(crate) fn decrement_array_reference_count(&mut self, value: ValueId) -> bool { + self.update_array_reference_count(value, false) } /// Increment or decrement the given value's reference count if it is an array. /// If it is not an array, this does nothing. Note that inc_rc and dec_rc instructions /// are ignored outside of unconstrained code. - fn update_array_reference_count(&mut self, value: ValueId, increment: bool) { + /// + /// Returns whether a reference count instruction was issued. + fn update_array_reference_count(&mut self, value: ValueId, increment: bool) -> bool { match self.type_of_value(value) { - Type::Numeric(_) => (), - Type::Function => (), + Type::Numeric(_) => false, + Type::Function => false, Type::Reference(element) => { if element.contains_an_array() { let reference = value; let value = self.insert_load(reference, element.as_ref().clone()); self.update_array_reference_count(value, increment); + true + } else { + false } } Type::Array(..) | Type::Slice(..) => { @@ -474,6 +483,7 @@ impl FunctionBuilder { } else { self.insert_dec_rc(value); } + true } } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index e3f3f33682bd..827944e22d1a 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -307,13 +307,13 @@ impl DataFlowGraph { instruction_id: InstructionId, ctrl_typevars: Option>, ) { - self.results.insert(instruction_id, Default::default()); + let result_types = self.instruction_result_types(instruction_id, ctrl_typevars); + let results = vecmap(result_types.into_iter().enumerate(), |(position, typ)| { + let instruction = instruction_id; + self.values.insert(Value::Instruction { typ, position, instruction }) + }); - // Get all of the types that this instruction produces - // and append them as results. - for typ in self.instruction_result_types(instruction_id, ctrl_typevars) { - self.append_result(instruction_id, typ); - } + self.results.insert(instruction_id, results); } /// Return the result types of this instruction. @@ -370,22 +370,6 @@ impl DataFlowGraph { matches!(self.values[value].get_type(), Type::Reference(_)) } - /// Appends a result type to the instruction. - pub(crate) fn append_result(&mut self, instruction_id: InstructionId, typ: Type) -> ValueId { - let results = self.results.get_mut(&instruction_id).unwrap(); - let expected_res_position = results.len(); - - let value_id = self.values.insert(Value::Instruction { - typ, - position: expected_res_position, - instruction: instruction_id, - }); - - // Add value to the list of results for this instruction - results.push(value_id); - value_id - } - /// Replaces an instruction result with a fresh id. pub(crate) fn replace_result( &mut self, @@ -463,7 +447,7 @@ impl DataFlowGraph { /// If this value is an array, return the length of the array as indicated by its type. /// Otherwise, return None. - pub(crate) fn try_get_array_length(&self, value: ValueId) -> Option { + pub(crate) fn try_get_array_length(&self, value: ValueId) -> Option { match self.type_of_value(value) { Type::Array(_, length) => Some(length), _ => None, diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function.rs index b1233e3063e1..6413107c04a3 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function.rs @@ -197,6 +197,12 @@ impl Function { } } +impl Clone for Function { + fn clone(&self) -> Self { + Function::clone_with_id(self.id(), self) + } +} + impl std::fmt::Display for RuntimeType { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs index a0c23ad70aaf..6ebd2aa11055 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs @@ -129,7 +129,7 @@ impl<'f> FunctionInserter<'f> { // another MakeArray instruction. Note that this assumes the function inserter is inserting // in control-flow order. Otherwise we could refer to ValueIds defined later in the program. let make_array = if let Instruction::MakeArray { elements, typ } = &instruction { - if self.array_is_constant(elements) { + if self.array_is_constant(elements) && self.function.runtime().is_acir() { if let Some(fetched_value) = self.get_cached_array(elements, typ) { assert_eq!(results.len(), 1); self.values.insert(results[0], fetched_value); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index f606fffbf91e..76409f6a20a7 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -11,7 +11,7 @@ use fxhash::FxHasher64; use iter_extended::vecmap; use noirc_frontend::hir_def::types::Type as HirType; -use crate::ssa::opt::flatten_cfg::value_merger::ValueMerger; +use crate::ssa::{ir::function::RuntimeType, opt::flatten_cfg::value_merger::ValueMerger}; use super::{ basic_block::BasicBlockId, @@ -315,7 +315,12 @@ pub(crate) enum Instruction { /// else_value /// } /// ``` - IfElse { then_condition: ValueId, then_value: ValueId, else_value: ValueId }, + IfElse { + then_condition: ValueId, + then_value: ValueId, + else_condition: ValueId, + else_value: ValueId, + }, /// Creates a new array or slice. /// @@ -389,9 +394,22 @@ impl Instruction { // This should never be side-effectful MakeArray { .. } => false, + // Some binary math can overflow or underflow + Binary(binary) => match binary.operator { + BinaryOp::Add | BinaryOp::Sub | BinaryOp::Mul | BinaryOp::Div | BinaryOp::Mod => { + true + } + BinaryOp::Eq + | BinaryOp::Lt + | BinaryOp::And + | BinaryOp::Or + | BinaryOp::Xor + | BinaryOp::Shl + | BinaryOp::Shr => false, + }, + // These can have different behavior depending on the EnableSideEffectsIf context. - Binary(_) - | Cast(_, _) + Cast(_, _) | Not(_) | Truncate { .. } | IfElse { .. } @@ -411,7 +429,7 @@ impl Instruction { /// conditional on whether the caller wants the predicate to be taken into account or not. pub(crate) fn can_be_deduplicated( &self, - dfg: &DataFlowGraph, + function: &Function, deduplicate_with_predicate: bool, ) -> bool { use Instruction::*; @@ -425,7 +443,7 @@ impl Instruction { | IncrementRc { .. } | DecrementRc { .. } => false, - Call { func, .. } => match dfg[*func] { + Call { func, .. } => match function.dfg[*func] { Value::Intrinsic(intrinsic) => { intrinsic.can_be_deduplicated(deduplicate_with_predicate) } @@ -435,8 +453,11 @@ impl Instruction { // We can deduplicate these instructions if we know the predicate is also the same. Constrain(..) | RangeCheck { .. } => deduplicate_with_predicate, - // This should never be side-effectful - MakeArray { .. } => true, + // Arrays can be mutated in unconstrained code so code that handles this case must + // take care to track whether the array was possibly mutated or not before + // deduplicating. Since we don't know if the containing pass checks for this, we + // can only assume these are safe to deduplicate in constrained code. + MakeArray { .. } => function.runtime().is_acir(), // These can have different behavior depending on the EnableSideEffectsIf context. // Replacing them with a similar instruction potentially enables replacing an instruction @@ -449,7 +470,7 @@ impl Instruction { | IfElse { .. } | ArrayGet { .. } | ArraySet { .. } => { - deduplicate_with_predicate || !self.requires_acir_gen_predicate(dfg) + deduplicate_with_predicate || !self.requires_acir_gen_predicate(&function.dfg) } } } @@ -478,8 +499,19 @@ impl Instruction { | ArraySet { .. } | MakeArray { .. } => true, + // Store instructions must be removed by DIE in acir code, any load + // instructions should already be unused by that point. + // + // Note that this check assumes that it is being performed after the flattening + // pass and after the last mem2reg pass. This is currently the case for the DIE + // pass where this check is done, but does mean that we cannot perform mem2reg + // after the DIE pass. + Store { .. } => { + matches!(function.runtime(), RuntimeType::Acir(_)) + && function.reachable_blocks().len() == 1 + } + Constrain(..) - | Store { .. } | EnableSideEffectsIf { .. } | IncrementRc { .. } | DecrementRc { .. } @@ -608,11 +640,14 @@ impl Instruction { assert_message: assert_message.clone(), } } - Instruction::IfElse { then_condition, then_value, else_value } => Instruction::IfElse { - then_condition: f(*then_condition), - then_value: f(*then_value), - else_value: f(*else_value), - }, + Instruction::IfElse { then_condition, then_value, else_condition, else_value } => { + Instruction::IfElse { + then_condition: f(*then_condition), + then_value: f(*then_value), + else_condition: f(*else_condition), + else_value: f(*else_value), + } + } Instruction::MakeArray { elements, typ } => Instruction::MakeArray { elements: elements.iter().copied().map(f).collect(), typ: typ.clone(), @@ -671,9 +706,10 @@ impl Instruction { | Instruction::RangeCheck { value, .. } => { f(*value); } - Instruction::IfElse { then_condition, then_value, else_value } => { + Instruction::IfElse { then_condition, then_value, else_condition, else_value } => { f(*then_condition); f(*then_value); + f(*else_condition); f(*else_value); } Instruction::MakeArray { elements, typ: _ } => { @@ -836,7 +872,7 @@ impl Instruction { None } } - Instruction::IfElse { then_condition, then_value, else_value } => { + Instruction::IfElse { then_condition, then_value, else_condition, else_value } => { let typ = dfg.type_of_value(*then_value); if let Some(constant) = dfg.get_numeric_constant(*then_condition) { @@ -855,11 +891,13 @@ impl Instruction { if matches!(&typ, Type::Numeric(_)) { let then_condition = *then_condition; + let else_condition = *else_condition; let result = ValueMerger::merge_numeric_values( dfg, block, then_condition, + else_condition, then_value, else_value, ); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 6ebe80128c0d..a8db5e2ff94e 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -56,11 +56,13 @@ pub(super) fn simplify_call( if let (Some(constant_args), Some(return_type)) = (constant_args, return_type.clone()) { let field = constant_args[0]; let limb_count = if let Type::Array(_, array_len) = return_type { - array_len as u32 + array_len } else { unreachable!("ICE: Intrinsic::ToRadix return type must be array") }; - constant_to_radix(endian, field, 2, limb_count, dfg, block, call_stack) + constant_to_radix(endian, field, 2, limb_count, |values| { + make_constant_array(dfg, values.into_iter(), Type::bool(), block, call_stack) + }) } else { SimplifyResult::None } @@ -71,11 +73,19 @@ pub(super) fn simplify_call( let field = constant_args[0]; let radix = constant_args[1].to_u128() as u32; let limb_count = if let Type::Array(_, array_len) = return_type { - array_len as u32 + array_len } else { unreachable!("ICE: Intrinsic::ToRadix return type must be array") }; - constant_to_radix(endian, field, radix, limb_count, dfg, block, call_stack) + constant_to_radix(endian, field, radix, limb_count, |values| { + make_constant_array( + dfg, + values.into_iter(), + Type::unsigned(8), + block, + call_stack, + ) + }) } else { SimplifyResult::None } @@ -351,7 +361,7 @@ pub(super) fn simplify_call( Intrinsic::IsUnconstrained => SimplifyResult::None, Intrinsic::DerivePedersenGenerators => { if let Some(Type::Array(_, len)) = return_type.clone() { - simplify_derive_generators(dfg, arguments, len as u32, block, call_stack) + simplify_derive_generators(dfg, arguments, len, block, call_stack) } else { unreachable!("Derive Pedersen Generators must return an array"); } @@ -432,8 +442,8 @@ fn simplify_slice_push_back( for elem in &arguments[2..] { slice.push_back(*elem); } - let slice_size = slice.len(); - let element_size = element_type.element_size(); + let slice_size = slice.len() as u32; + let element_size = element_type.element_size() as u32; let new_slice = make_array(dfg, slice, element_type, block, &call_stack); let set_last_slice_value_instr = Instruction::ArraySet { @@ -455,8 +465,12 @@ fn simplify_slice_push_back( let mut value_merger = ValueMerger::new(dfg, block, &mut slice_sizes, unknown, None, call_stack); - let new_slice = - value_merger.merge_values(len_not_equals_capacity, set_last_slice_value, new_slice); + let new_slice = value_merger.merge_values( + len_not_equals_capacity, + len_equals_capacity, + set_last_slice_value, + new_slice, + ); SimplifyResult::SimplifiedToMultiple(vec![new_slice_length, new_slice]) } @@ -621,7 +635,7 @@ fn make_constant_array( let result_constants: im::Vector<_> = results.map(|element| dfg.make_constant(element, typ.clone())).collect(); - let typ = Type::Array(Arc::new(vec![typ]), result_constants.len()); + let typ = Type::Array(Arc::new(vec![typ]), result_constants.len() as u32); make_array(dfg, result_constants, typ, block, call_stack) } @@ -660,9 +674,7 @@ fn constant_to_radix( field: FieldElement, radix: u32, limb_count: u32, - dfg: &mut DataFlowGraph, - block: BasicBlockId, - call_stack: &CallStack, + mut make_array: impl FnMut(Vec) -> ValueId, ) -> SimplifyResult { let bit_size = u32::BITS - (radix - 1).leading_zeros(); let radix_big = BigUint::from(radix); @@ -683,13 +695,7 @@ fn constant_to_radix( if endian == Endian::Big { limbs.reverse(); } - let result_array = make_constant_array( - dfg, - limbs.into_iter(), - Type::unsigned(bit_size), - block, - call_stack, - ); + let result_array = make_array(limbs); SimplifyResult::SimplifiedTo(result_array) } } @@ -816,7 +822,7 @@ fn simplify_derive_generators( results.push(dfg.make_constant(y, Type::field())); results.push(is_infinite); } - let len = results.len(); + let len = results.len() as u32; let typ = Type::Array(vec![Type::field(), Type::field(), Type::unsigned(1)].into(), len / 3); let result = make_array(dfg, results.into(), typ, block, call_stack); @@ -835,27 +841,27 @@ mod tests { #[test] fn simplify_derive_generators_has_correct_type() { - let src = " + let src = r#" brillig(inline) fn main f0 { b0(): - v0 = make_array [u8 68, u8 69, u8 70, u8 65, u8 85, u8 76, u8 84, u8 95, u8 68, u8 79, u8 77, u8 65, u8 73, u8 78, u8 95, u8 83, u8 69, u8 80, u8 65, u8 82, u8 65, u8 84, u8 79, u8 82] : [u8; 24] + v0 = make_array b"DEFAULT_DOMAIN_SEPARATOR" // This call was previously incorrectly simplified to something that returned `[Field; 3]` v2 = call derive_pedersen_generators(v0, u32 0) -> [(Field, Field, u1); 1] return v2 } - "; + "#; let ssa = Ssa::from_str(src).unwrap(); - let expected = " + let expected = r#" brillig(inline) fn main f0 { b0(): - v15 = make_array [u8 68, u8 69, u8 70, u8 65, u8 85, u8 76, u8 84, u8 95, u8 68, u8 79, u8 77, u8 65, u8 73, u8 78, u8 95, u8 83, u8 69, u8 80, u8 65, u8 82, u8 65, u8 84, u8 79, u8 82] : [u8; 24] + v15 = make_array b"DEFAULT_DOMAIN_SEPARATOR" v19 = make_array [Field 3728882899078719075161482178784387565366481897740339799480980287259621149274, Field -9903063709032878667290627648209915537972247634463802596148419711785767431332, u1 0] : [(Field, Field, u1); 1] return v19 } - "; + "#; assert_normalized_ssa_equals(ssa, expected); } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call/blackbox.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call/blackbox.rs index b9faf1c46ecb..016d7ffa25bc 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call/blackbox.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call/blackbox.rs @@ -2,10 +2,11 @@ use std::sync::Arc; use acvm::{acir::AcirField, BlackBoxFunctionSolver, BlackBoxResolutionError, FieldElement}; +use crate::ssa::ir::instruction::BlackBoxFunc; use crate::ssa::ir::{ basic_block::BasicBlockId, dfg::{CallStack, DataFlowGraph}, - instruction::{Instruction, SimplifyResult}, + instruction::{Instruction, Intrinsic, SimplifyResult}, types::Type, value::ValueId, }; @@ -70,52 +71,125 @@ pub(super) fn simplify_msm( block: BasicBlockId, call_stack: &CallStack, ) -> SimplifyResult { - // TODO: Handle MSMs where a subset of the terms are constant. + let mut is_constant; + match (dfg.get_array_constant(arguments[0]), dfg.get_array_constant(arguments[1])) { (Some((points, _)), Some((scalars, _))) => { - let Some(points) = points - .into_iter() - .map(|id| dfg.get_numeric_constant(id)) - .collect::>>() - else { - return SimplifyResult::None; - }; - - let Some(scalars) = scalars - .into_iter() - .map(|id| dfg.get_numeric_constant(id)) - .collect::>>() - else { - return SimplifyResult::None; - }; + // We decompose points and scalars into constant and non-constant parts in order to simplify MSMs where a subset of the terms are constant. + let mut constant_points = vec![]; + let mut constant_scalars_lo = vec![]; + let mut constant_scalars_hi = vec![]; + let mut var_points = vec![]; + let mut var_scalars = vec![]; + let len = scalars.len() / 2; + for i in 0..len { + match ( + dfg.get_numeric_constant(scalars[2 * i]), + dfg.get_numeric_constant(scalars[2 * i + 1]), + dfg.get_numeric_constant(points[3 * i]), + dfg.get_numeric_constant(points[3 * i + 1]), + dfg.get_numeric_constant(points[3 * i + 2]), + ) { + (Some(lo), Some(hi), _, _, _) if lo.is_zero() && hi.is_zero() => { + is_constant = true; + constant_scalars_lo.push(lo); + constant_scalars_hi.push(hi); + constant_points.push(FieldElement::zero()); + constant_points.push(FieldElement::zero()); + constant_points.push(FieldElement::one()); + } + (_, _, _, _, Some(infinity)) if infinity.is_one() => { + is_constant = true; + constant_scalars_lo.push(FieldElement::zero()); + constant_scalars_hi.push(FieldElement::zero()); + constant_points.push(FieldElement::zero()); + constant_points.push(FieldElement::zero()); + constant_points.push(FieldElement::one()); + } + (Some(lo), Some(hi), Some(x), Some(y), Some(infinity)) => { + is_constant = true; + constant_scalars_lo.push(lo); + constant_scalars_hi.push(hi); + constant_points.push(x); + constant_points.push(y); + constant_points.push(infinity); + } + _ => { + is_constant = false; + } + } - let mut scalars_lo = Vec::new(); - let mut scalars_hi = Vec::new(); - for (i, scalar) in scalars.into_iter().enumerate() { - if i % 2 == 0 { - scalars_lo.push(scalar); - } else { - scalars_hi.push(scalar); + if !is_constant { + var_points.push(points[3 * i]); + var_points.push(points[3 * i + 1]); + var_points.push(points[3 * i + 2]); + var_scalars.push(scalars[2 * i]); + var_scalars.push(scalars[2 * i + 1]); } } - let Ok((result_x, result_y, result_is_infinity)) = - solver.multi_scalar_mul(&points, &scalars_lo, &scalars_hi) - else { + // If there are no constant terms, we can't simplify + if constant_scalars_lo.is_empty() { + return SimplifyResult::None; + } + let Ok((result_x, result_y, result_is_infinity)) = solver.multi_scalar_mul( + &constant_points, + &constant_scalars_lo, + &constant_scalars_hi, + ) else { return SimplifyResult::None; }; - let result_x = dfg.make_constant(result_x, Type::field()); - let result_y = dfg.make_constant(result_y, Type::field()); - let result_is_infinity = dfg.make_constant(result_is_infinity, Type::field()); + // If there are no variable term, we can directly return the constant result + if var_scalars.is_empty() { + let result_x = dfg.make_constant(result_x, Type::field()); + let result_y = dfg.make_constant(result_y, Type::field()); + let result_is_infinity = dfg.make_constant(result_is_infinity, Type::field()); - let elements = im::vector![result_x, result_y, result_is_infinity]; - let typ = Type::Array(Arc::new(vec![Type::field()]), 3); - let instruction = Instruction::MakeArray { elements, typ }; - let result_array = - dfg.insert_instruction_and_results(instruction, block, None, call_stack.clone()); + let elements = im::vector![result_x, result_y, result_is_infinity]; + let typ = Type::Array(Arc::new(vec![Type::field()]), 3); + let instruction = Instruction::MakeArray { elements, typ }; + let result_array = dfg.insert_instruction_and_results( + instruction, + block, + None, + call_stack.clone(), + ); - SimplifyResult::SimplifiedTo(result_array.first()) + return SimplifyResult::SimplifiedTo(result_array.first()); + } + // If there is only one non-null constant term, we cannot simplify + if constant_scalars_lo.len() == 1 && result_is_infinity != FieldElement::one() { + return SimplifyResult::None; + } + // Add the constant part back to the non-constant part, if it is not null + let one = dfg.make_constant(FieldElement::one(), Type::field()); + let zero = dfg.make_constant(FieldElement::zero(), Type::field()); + if result_is_infinity.is_zero() { + var_scalars.push(one); + var_scalars.push(zero); + let result_x = dfg.make_constant(result_x, Type::field()); + let result_y = dfg.make_constant(result_y, Type::field()); + let result_is_infinity = dfg.make_constant(result_is_infinity, Type::bool()); + var_points.push(result_x); + var_points.push(result_y); + var_points.push(result_is_infinity); + } + // Construct the simplified MSM expression + let typ = Type::Array(Arc::new(vec![Type::field()]), var_scalars.len() as u32); + let scalars = Instruction::MakeArray { elements: var_scalars.into(), typ }; + let scalars = dfg + .insert_instruction_and_results(scalars, block, None, call_stack.clone()) + .first(); + let typ = Type::Array(Arc::new(vec![Type::field()]), var_points.len() as u32); + let points = Instruction::MakeArray { elements: var_points.into(), typ }; + let points = + dfg.insert_instruction_and_results(points, block, None, call_stack.clone()).first(); + let msm = dfg.import_intrinsic(Intrinsic::BlackBox(BlackBoxFunc::MultiScalarMul)); + SimplifyResult::SimplifiedToInstruction(Instruction::Call { + func: msm, + arguments: vec![points, scalars], + }) } _ => SimplifyResult::None, } @@ -228,3 +302,93 @@ pub(super) fn simplify_signature( _ => SimplifyResult::None, } } + +#[cfg(feature = "bn254")] +#[cfg(test)] +mod test { + use crate::ssa::opt::assert_normalized_ssa_equals; + use crate::ssa::Ssa; + + #[cfg(feature = "bn254")] + #[test] + fn full_constant_folding() { + let src = r#" + acir(inline) fn main f0 { + b0(): + v0 = make_array [Field 2, Field 3, Field 5, Field 5] : [Field; 4] + v1 = make_array [Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0, Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0] : [Field; 6] + v2 = call multi_scalar_mul (v1, v0) -> [Field; 3] + return v2 + }"#; + let ssa = Ssa::from_str(src).unwrap(); + + let expected_src = r#" + acir(inline) fn main f0 { + b0(): + v3 = make_array [Field 2, Field 3, Field 5, Field 5] : [Field; 4] + v7 = make_array [Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0, Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0] : [Field; 6] + v10 = make_array [Field 1478523918288173385110236399861791147958001875200066088686689589556927843200, Field 700144278551281040379388961242974992655630750193306467120985766322057145630, Field 0] : [Field; 3] + return v10 + } + "#; + assert_normalized_ssa_equals(ssa, expected_src); + } + + #[cfg(feature = "bn254")] + #[test] + fn simplify_zero() { + let src = r#" + acir(inline) fn main f0 { + b0(v0: Field, v1: Field): + v2 = make_array [v0, Field 0, Field 0, Field 0, v0, Field 0] : [Field; 6] + v3 = make_array [ + Field 0, Field 0, Field 1, v0, v1, Field 0, Field 1, v0, Field 0] : [Field; 9] + v4 = call multi_scalar_mul (v3, v2) -> [Field; 3] + + return v4 + + }"#; + let ssa = Ssa::from_str(src).unwrap(); + //First point is zero, second scalar is zero, so we should be left with the scalar mul of the last point. + let expected_src = r#" + acir(inline) fn main f0 { + b0(v0: Field, v1: Field): + v3 = make_array [v0, Field 0, Field 0, Field 0, v0, Field 0] : [Field; 6] + v5 = make_array [Field 0, Field 0, Field 1, v0, v1, Field 0, Field 1, v0, Field 0] : [Field; 9] + v6 = make_array [v0, Field 0] : [Field; 2] + v7 = make_array [Field 1, v0, Field 0] : [Field; 3] + v9 = call multi_scalar_mul(v7, v6) -> [Field; 3] + return v9 + } + "#; + assert_normalized_ssa_equals(ssa, expected_src); + } + + #[cfg(feature = "bn254")] + #[test] + fn partial_constant_folding() { + let src = r#" + acir(inline) fn main f0 { + b0(v0: Field, v1: Field): + v2 = make_array [Field 1, Field 0, v0, Field 0, Field 2, Field 0] : [Field; 6] + v3 = make_array [ + Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0, v0, v1, Field 0, Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0] : [Field; 9] + v4 = call multi_scalar_mul (v3, v2) -> [Field; 3] + return v4 + }"#; + let ssa = Ssa::from_str(src).unwrap(); + //First and last scalar/point are constant, so we should be left with the msm of the middle point and the folded constant point + let expected_src = r#" + acir(inline) fn main f0 { + b0(v0: Field, v1: Field): + v5 = make_array [Field 1, Field 0, v0, Field 0, Field 2, Field 0] : [Field; 6] + v7 = make_array [Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0, v0, v1, Field 0, Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0] : [Field; 9] + v8 = make_array [v0, Field 0, Field 1, Field 0] : [Field; 4] + v12 = make_array [v0, v1, Field 0, Field -3227352362257037263902424173275354266044964400219754872043023745437788450996, Field 8902249110305491597038405103722863701255802573786510474664632793109847672620, u1 0] : [Field; 6] + v14 = call multi_scalar_mul(v12, v8) -> [Field; 3] + return v14 + } + "#; + assert_normalized_ssa_equals(ssa, expected_src); + } +} diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs index 6bebd21fe61e..29e797283030 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs @@ -1,12 +1,12 @@ //! This file is for pretty-printing the SSA IR in a human-readable form for debugging. -use std::{ - collections::HashSet, - fmt::{Formatter, Result}, -}; +use std::fmt::{Formatter, Result}; use acvm::acir::AcirField; +use im::Vector; use iter_extended::vecmap; +use crate::ssa::ir::types::{NumericType, Type}; + use super::{ basic_block::BasicBlockId, dfg::DataFlowGraph, @@ -18,28 +18,10 @@ use super::{ /// Helper function for Function's Display impl to pretty-print the function with the given formatter. pub(crate) fn display_function(function: &Function, f: &mut Formatter) -> Result { writeln!(f, "{} fn {} {} {{", function.runtime(), function.name(), function.id())?; - display_block_with_successors(function, function.entry_block(), &mut HashSet::new(), f)?; - write!(f, "}}") -} - -/// Displays a block followed by all of its successors recursively. -/// This uses a HashSet to keep track of the visited blocks. Otherwise -/// there would be infinite recursion for any loops in the IR. -pub(crate) fn display_block_with_successors( - function: &Function, - block_id: BasicBlockId, - visited: &mut HashSet, - f: &mut Formatter, -) -> Result { - display_block(function, block_id, f)?; - visited.insert(block_id); - - for successor in function.dfg[block_id].successors() { - if !visited.contains(&successor) { - display_block_with_successors(function, successor, visited, f)?; - } + for block_id in function.reachable_blocks() { + display_block(function, block_id, f)?; } - Ok(()) + write!(f, "}}") } /// Display a single block. This will not display the block's successors. @@ -209,13 +191,39 @@ fn display_instruction_inner( Instruction::RangeCheck { value, max_bit_size, .. } => { writeln!(f, "range_check {} to {} bits", show(*value), *max_bit_size,) } - Instruction::IfElse { then_condition, then_value, else_value } => { + Instruction::IfElse { then_condition, then_value, else_condition, else_value } => { let then_condition = show(*then_condition); let then_value = show(*then_value); + let else_condition = show(*else_condition); let else_value = show(*else_value); - writeln!(f, "if {then_condition} then {then_value} else {else_value}") + writeln!( + f, + "if {then_condition} then {then_value} else (if {else_condition}) {else_value}" + ) } Instruction::MakeArray { elements, typ } => { + // If the array is a byte array, we check if all the bytes are printable ascii characters + // and, if so, we print the array as a string literal (easier to understand). + // It could happen that the byte array is a random byte sequence that happens to be printable + // (it didn't come from a string literal) but this still reduces the noise in the output + // and actually represents the same value. + let (element_types, is_slice) = match typ { + Type::Array(types, _) => (types, false), + Type::Slice(types) => (types, true), + _ => panic!("Expected array or slice type for MakeArray"), + }; + if element_types.len() == 1 + && element_types[0] == Type::Numeric(NumericType::Unsigned { bit_size: 8 }) + { + if let Some(string) = try_byte_array_to_string(elements, function) { + if is_slice { + return writeln!(f, "make_array &b{:?}", string); + } else { + return writeln!(f, "make_array b{:?}", string); + } + } + } + write!(f, "make_array [")?; for (i, element) in elements.iter().enumerate() { @@ -230,6 +238,25 @@ fn display_instruction_inner( } } +fn try_byte_array_to_string(elements: &Vector, function: &Function) -> Option { + let mut string = String::new(); + for element in elements { + let element = function.dfg.get_numeric_constant(*element)?; + let element = element.try_to_u32()?; + if element > 0xFF { + return None; + } + let byte = element as u8; + if byte.is_ascii_alphanumeric() || byte.is_ascii_punctuation() || byte.is_ascii_whitespace() + { + string.push(byte as char); + } else { + return None; + } + } + Some(string) +} + fn result_types(function: &Function, results: &[ValueId]) -> String { let types = vecmap(results, |result| function.dfg.type_of_value(*result).to_string()); if types.is_empty() { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/types.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/types.rs index 130f1d59e46f..4e4f7e8aa62d 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/types.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/types.rs @@ -75,7 +75,7 @@ pub(crate) enum Type { Reference(Arc), /// An immutable array value with the given element type and length - Array(Arc, usize), + Array(Arc, u32), /// An immutable slice value with a given element type Slice(Arc), @@ -111,7 +111,7 @@ impl Type { } /// Creates the str type, of the given length N - pub(crate) fn str(length: usize) -> Type { + pub(crate) fn str(length: u32) -> Type { Type::Array(Arc::new(vec![Type::char()]), length) } @@ -161,7 +161,7 @@ impl Type { } /// Returns the flattened size of a Type - pub(crate) fn flattened_size(&self) -> usize { + pub(crate) fn flattened_size(&self) -> u32 { match self { Type::Array(elements, len) => { elements.iter().fold(0, |sum, elem| sum + (elem.flattened_size() * len)) @@ -190,6 +190,15 @@ impl Type { } } + /// Retrieves the array or slice type within this type, or panics if there is none. + pub(crate) fn get_contained_array(&self) -> &Type { + match self { + Type::Numeric(_) | Type::Function => panic!("Expected an array type"), + Type::Array(_, _) | Type::Slice(_) => self, + Type::Reference(element) => element.get_contained_array(), + } + } + pub(crate) fn element_types(self) -> Arc> { match self { Type::Array(element_types, _) | Type::Slice(element_types) => element_types, diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/array_set.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/array_set.rs index 96de22600a45..09339cf07973 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/array_set.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/array_set.rs @@ -209,6 +209,8 @@ mod tests { b1(v0: u32): v8 = lt v0, u32 5 jmpif v8 then: b3, else: b2 + b2(): + return b3(): v9 = eq v0, u32 5 jmpif v9 then: b4, else: b5 @@ -224,8 +226,6 @@ mod tests { store v15 at v4 v17 = add v0, u32 1 jmp b1(v17) - b2(): - return } "; let ssa = Ssa::from_str(src).unwrap(); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs index 76705dcc9db6..75cdea349b7c 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs @@ -33,7 +33,7 @@ impl Function { } } -fn known_slice_lengths(func: &Function) -> HashMap { +fn known_slice_lengths(func: &Function) -> HashMap { let mut known_slice_lengths = HashMap::default(); for block_id in func.reachable_blocks() { let block = &func.dfg[block_id]; @@ -61,7 +61,7 @@ fn known_slice_lengths(func: &Function) -> HashMap { fn replace_known_slice_lengths( func: &mut Function, - known_slice_lengths: HashMap, + known_slice_lengths: HashMap, ) { known_slice_lengths.into_iter().for_each(|(instruction_id, known_length)| { let call_returns = func.dfg.instruction_results(instruction_id); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs index ceda0c6272f6..e039b8f0f9eb 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -149,7 +149,8 @@ impl Function { use_constraint_info: bool, brillig_info: Option, ) { - let mut context = Context::new(self, use_constraint_info, brillig_info); + let mut context = Context::new(use_constraint_info, brillig_info); + let mut dom = DominatorTree::with_function(self); context.block_queue.push_back(self.entry_block()); while let Some(block) = context.block_queue.pop_front() { @@ -158,7 +159,7 @@ impl Function { } context.visited_blocks.insert(block); - context.fold_constants_in_block(self, block); + context.fold_constants_in_block(self, &mut dom, block); } } } @@ -172,22 +173,15 @@ struct Context<'a> { /// Contains sets of values which are constrained to be equivalent to each other. /// - /// The mapping's structure is `side_effects_enabled_var => (constrained_value => [(block, simplified_value)])`. + /// The mapping's structure is `side_effects_enabled_var => (constrained_value => simplified_value)`. /// /// We partition the maps of constrained values according to the side-effects flag at the point /// at which the values are constrained. This prevents constraints which are only sometimes enforced /// being used to modify the rest of the program. - /// - /// We also keep track of how a value was simplified to other values per block. That is, - /// a same ValueId could have been simplified to one value in one block and to another value - /// in another block. - constraint_simplification_mappings: - HashMap>>, + constraint_simplification_mappings: ConstraintSimplificationCache, // Cache of instructions without any side-effects along with their outputs. cached_instruction_results: InstructionResultCache, - - dom: DominatorTree, } #[derive(Copy, Clone)] @@ -196,9 +190,56 @@ pub(crate) struct BrilligInfo<'a> { brillig_functions: &'a BTreeMap, } +/// Records a simplified equivalents of an [`Instruction`] in the blocks +/// where the constraint that advised the simplification has been encountered. +/// +/// For more information see [`ConstraintSimplificationCache`]. +#[derive(Default)] +struct SimplificationCache { + /// Simplified expressions where we found them. + /// + /// It will always have at least one value because `add` is called + /// after the default is constructed. + simplifications: HashMap, +} + +impl SimplificationCache { + /// Called with a newly encountered simplification. + fn add(&mut self, dfg: &DataFlowGraph, simple: ValueId, block: BasicBlockId) { + self.simplifications + .entry(block) + .and_modify(|existing| { + // `SimplificationCache` may already hold a simplification in this block + // so we check whether `simple` is a better simplification than the current one. + if let Some((_, simpler)) = simplify(dfg, *existing, simple) { + *existing = simpler; + }; + }) + .or_insert(simple); + } + + /// Try to find a simplification in a visible block. + fn get(&self, block: BasicBlockId, dom: &DominatorTree) -> Option { + // Deterministically walk up the dominator chain until we encounter a block that contains a simplification. + dom.find_map_dominator(block, |b| self.simplifications.get(&b).cloned()) + } +} + +/// HashMap from `(side_effects_enabled_var, Instruction)` to a simplified expression that it can +/// be replaced with based on constraints that testify to their equivalence, stored together +/// with the set of blocks at which this constraint has been observed. +/// +/// Only blocks dominated by one in the cache should have access to this information, otherwise +/// we create a sort of time paradox where we replace an instruction with a constant we believe +/// it _should_ equal to, without ever actually producing and asserting the value. +type ConstraintSimplificationCache = HashMap>; + /// HashMap from `(Instruction, side_effects_enabled_var)` to the results of the instruction. /// Stored as a two-level map to avoid cloning Instructions during the `.get` call. /// +/// The `side_effects_enabled_var` is optional because we only use them when `Instruction::requires_acir_gen_predicate` +/// is true _and_ the constraint information is also taken into account. +/// /// In addition to each result, the original BasicBlockId is stored as well. This allows us /// to deduplicate instructions across blocks as long as the new block dominates the original. type InstructionResultCache = HashMap, ResultCache>>; @@ -208,15 +249,11 @@ type InstructionResultCache = HashMap, Resu /// For more information see [`InstructionResultCache`]. #[derive(Default)] struct ResultCache { - results: Vec<(BasicBlockId, Vec)>, + result: Option<(BasicBlockId, Vec)>, } impl<'brillig> Context<'brillig> { - fn new( - function: &Function, - use_constraint_info: bool, - brillig_info: Option>, - ) -> Self { + fn new(use_constraint_info: bool, brillig_info: Option>) -> Self { Self { use_constraint_info, brillig_info, @@ -224,19 +261,25 @@ impl<'brillig> Context<'brillig> { block_queue: Default::default(), constraint_simplification_mappings: Default::default(), cached_instruction_results: Default::default(), - dom: DominatorTree::with_function(function), } } - fn fold_constants_in_block(&mut self, function: &mut Function, block: BasicBlockId) { + fn fold_constants_in_block( + &mut self, + function: &mut Function, + dom: &mut DominatorTree, + block: BasicBlockId, + ) { let instructions = function.dfg[block].take_instructions(); + // Default side effect condition variable with an enabled state. let mut side_effects_enabled_var = function.dfg.make_constant(FieldElement::one(), Type::bool()); for instruction_id in instructions { self.fold_constants_into_instruction( - &mut function.dfg, + function, + dom, block, instruction_id, &mut side_effects_enabled_var, @@ -247,48 +290,59 @@ impl<'brillig> Context<'brillig> { fn fold_constants_into_instruction( &mut self, - dfg: &mut DataFlowGraph, - block: BasicBlockId, + function: &mut Function, + dom: &mut DominatorTree, + mut block: BasicBlockId, id: InstructionId, side_effects_enabled_var: &mut ValueId, ) { - let constraint_simplification_mapping = - self.constraint_simplification_mappings.get(side_effects_enabled_var); - let instruction = Self::resolve_instruction( - id, - block, - dfg, - &mut self.dom, - constraint_simplification_mapping, - ); + let constraint_simplification_mapping = self.get_constraint_map(*side_effects_enabled_var); + let dfg = &mut function.dfg; + + let instruction = + Self::resolve_instruction(id, block, dfg, dom, constraint_simplification_mapping); + let old_results = dfg.instruction_results(id).to_vec(); // If a copy of this instruction exists earlier in the block, then reuse the previous results. - if let Some(cached_results) = - self.get_cached(dfg, &instruction, *side_effects_enabled_var, block) + if let Some(cache_result) = + self.get_cached(dfg, dom, &instruction, *side_effects_enabled_var, block) { - Self::replace_result_ids(dfg, &old_results, cached_results); - return; - } + match cache_result { + CacheResult::Cached(cached) => { + // We track whether we may mutate MakeArray instructions before we deduplicate + // them but we still need to issue an extra inc_rc in case they're mutated afterward. + if matches!(instruction, Instruction::MakeArray { .. }) { + let value = *cached.last().unwrap(); + let inc_rc = Instruction::IncrementRc { value }; + let call_stack = dfg.get_call_stack(id); + dfg.insert_instruction_and_results(inc_rc, block, None, call_stack); + } + + Self::replace_result_ids(dfg, &old_results, cached); + return; + } + CacheResult::NeedToHoistToCommonBlock(dominator) => { + // Just change the block to insert in the common dominator instead. + // This will only move the current instance of the instruction right now. + // When constant folding is run a second time later on, it'll catch + // that the previous instance can be deduplicated to this instance. + block = dominator; + } + } + }; - let new_results = // First try to inline a call to a brillig function with all constant arguments. - Self::try_inline_brillig_call_with_all_constants( + let new_results = Self::try_inline_brillig_call_with_all_constants( &instruction, &old_results, block, dfg, self.brillig_info, ) + // Otherwise, try inserting the instruction again to apply any optimizations using the newly resolved inputs. .unwrap_or_else(|| { - // Otherwise, try inserting the instruction again to apply any optimizations using the newly resolved inputs. - Self::push_instruction( - id, - instruction.clone(), - &old_results, - block, - dfg, - ) + Self::push_instruction(id, instruction.clone(), &old_results, block, dfg) }); Self::replace_result_ids(dfg, &old_results, &new_results); @@ -296,7 +350,7 @@ impl<'brillig> Context<'brillig> { self.cache_instruction( instruction.clone(), new_results, - dfg, + function, *side_effects_enabled_var, block, ); @@ -314,7 +368,7 @@ impl<'brillig> Context<'brillig> { block: BasicBlockId, dfg: &DataFlowGraph, dom: &mut DominatorTree, - constraint_simplification_mapping: Option<&HashMap>>, + constraint_simplification_mapping: &HashMap, ) -> Instruction { let instruction = dfg[instruction_id].clone(); @@ -324,30 +378,28 @@ impl<'brillig> Context<'brillig> { // This allows us to reach a stable final `ValueId` for each instruction input as we add more // constraints to the cache. fn resolve_cache( + block: BasicBlockId, dfg: &DataFlowGraph, dom: &mut DominatorTree, - cache: Option<&HashMap>>, + cache: &HashMap, value_id: ValueId, - block: BasicBlockId, ) -> ValueId { let resolved_id = dfg.resolve(value_id); - let Some(cached_values) = cache.and_then(|cache| cache.get(&resolved_id)) else { - return resolved_id; - }; - - for (cached_block, cached_value) in cached_values { - // We can only use the simplified value if it was simplified in a block that dominates the current one - if dom.dominates(*cached_block, block) { - return resolve_cache(dfg, dom, cache, *cached_value, block); + match cache.get(&resolved_id) { + Some(simplification_cache) => { + if let Some(simplified) = simplification_cache.get(block, dom) { + resolve_cache(block, dfg, dom, cache, simplified) + } else { + resolved_id + } } + None => resolved_id, } - - resolved_id } // Resolve any inputs to ensure that we're comparing like-for-like instructions. instruction.map_values(|value_id| { - resolve_cache(dfg, dom, constraint_simplification_mapping, value_id, block) + resolve_cache(block, dfg, dom, constraint_simplification_mapping, value_id) }) } @@ -385,7 +437,7 @@ impl<'brillig> Context<'brillig> { &mut self, instruction: Instruction, instruction_results: Vec, - dfg: &DataFlowGraph, + function: &Function, side_effects_enabled_var: ValueId, block: BasicBlockId, ) { @@ -394,21 +446,53 @@ impl<'brillig> Context<'brillig> { // to map from the more complex to the simpler value. if let Instruction::Constrain(lhs, rhs, _) = instruction { // These `ValueId`s should be fully resolved now. - if let Some((complex, simple)) = simplify(dfg, lhs, rhs) { + if let Some((complex, simple)) = simplify(&function.dfg, lhs, rhs) { self.get_constraint_map(side_effects_enabled_var) .entry(complex) .or_default() - .push((block, simple)); + .add(&function.dfg, simple, block); } } } + // If we have an array get whose value is from an array set on the same array at the same index, + // we can simplify that array get to the value of the previous array set. + // + // For example: + // v3 = array_set v0, index v1, value v2 + // v4 = array_get v3, index v1 -> Field + // + // We know that `v4` can be simplified to `v2`. + // Thus, even if the index is dynamic (meaning the array get would have side effects), + // we can simplify the operation when we take into account the predicate. + if let Instruction::ArraySet { index, value, .. } = &instruction { + let use_predicate = + self.use_constraint_info && instruction.requires_acir_gen_predicate(&function.dfg); + let predicate = use_predicate.then_some(side_effects_enabled_var); + + let array_get = Instruction::ArrayGet { array: instruction_results[0], index: *index }; + + self.cached_instruction_results + .entry(array_get) + .or_default() + .entry(predicate) + .or_default() + .cache(block, vec![*value]); + } + + self.remove_possibly_mutated_cached_make_arrays(&instruction, function); + // If the instruction doesn't have side-effects and if it won't interact with enable_side_effects during acir_gen, // we cache the results so we can reuse them if the same instruction appears again later in the block. // Others have side effects representing failure, which are implicit in the ACIR code and can also be deduplicated. - if instruction.can_be_deduplicated(dfg, self.use_constraint_info) { + let can_be_deduplicated = + instruction.can_be_deduplicated(function, self.use_constraint_info); + + // We also allow deduplicating MakeArray instructions that we have tracked which haven't + // been mutated. + if can_be_deduplicated || matches!(instruction, Instruction::MakeArray { .. }) { let use_predicate = - self.use_constraint_info && instruction.requires_acir_gen_predicate(dfg); + self.use_constraint_info && instruction.requires_acir_gen_predicate(&function.dfg); let predicate = use_predicate.then_some(side_effects_enabled_var); self.cached_instruction_results @@ -420,10 +504,12 @@ impl<'brillig> Context<'brillig> { } } + /// Get the simplification mapping from complex to simpler instructions, + /// which all depend on the same side effect condition variable. fn get_constraint_map( &mut self, side_effects_enabled_var: ValueId, - ) -> &mut HashMap> { + ) -> &mut HashMap { self.constraint_simplification_mappings.entry(side_effects_enabled_var).or_default() } @@ -438,19 +524,20 @@ impl<'brillig> Context<'brillig> { } } - fn get_cached<'a>( - &'a mut self, + /// Get a cached result if it can be used in this context. + fn get_cached( + &self, dfg: &DataFlowGraph, + dom: &mut DominatorTree, instruction: &Instruction, side_effects_enabled_var: ValueId, block: BasicBlockId, - ) -> Option<&'a [ValueId]> { + ) -> Option { let results_for_instruction = self.cached_instruction_results.get(instruction)?; - let predicate = self.use_constraint_info && instruction.requires_acir_gen_predicate(dfg); let predicate = predicate.then_some(side_effects_enabled_var); - results_for_instruction.get(&predicate)?.get(block, &mut self.dom) + results_for_instruction.get(&predicate)?.get(block, dom, instruction.has_side_effects(dfg)) } /// Checks if the given instruction is a call to a brillig function with all constant arguments. @@ -612,12 +699,34 @@ impl<'brillig> Context<'brillig> { } } } + + fn remove_possibly_mutated_cached_make_arrays( + &mut self, + instruction: &Instruction, + function: &Function, + ) { + use Instruction::{ArraySet, Store}; + + // Should we consider calls to slice_push_back and similar to be mutating operations as well? + if let Store { value: array, .. } | ArraySet { array, .. } = instruction { + let instruction = match &function.dfg[*array] { + Value::Instruction { instruction, .. } => &function.dfg[*instruction], + _ => return, + }; + + if matches!(instruction, Instruction::MakeArray { .. }) { + self.cached_instruction_results.remove(instruction); + } + } + } } impl ResultCache { /// Records that an `Instruction` in block `block` produced the result values `results`. fn cache(&mut self, block: BasicBlockId, results: Vec) { - self.results.push((block, results)); + if self.result.is_none() { + self.result = Some((block, results)); + } } /// Returns a set of [`ValueId`]s produced from a copy of this [`Instruction`] which sits @@ -626,13 +735,23 @@ impl ResultCache { /// We require that the cached instruction's block dominates `block` in order to avoid /// cycles causing issues (e.g. two instructions being replaced with the results of each other /// such that neither instruction exists anymore.) - fn get(&self, block: BasicBlockId, dom: &mut DominatorTree) -> Option<&[ValueId]> { - for (origin_block, results) in &self.results { + fn get( + &self, + block: BasicBlockId, + dom: &mut DominatorTree, + has_side_effects: bool, + ) -> Option { + self.result.as_ref().and_then(|(origin_block, results)| { if dom.dominates(*origin_block, block) { - return Some(results); + Some(CacheResult::Cached(results)) + } else if !has_side_effects { + // Insert a copy of this instruction in the common dominator + let dominator = dom.common_dominator(*origin_block, block); + Some(CacheResult::NeedToHoistToCommonBlock(dominator)) + } else { + None } - } - None + }) } } @@ -663,7 +782,7 @@ pub(crate) fn type_to_brillig_parameter(typ: &Type) -> Option for item_typ in item_type.iter() { parameters.push(type_to_brillig_parameter(item_typ)?); } - Some(BrilligParameter::Array(parameters, *size)) + Some(BrilligParameter::Array(parameters, *size as usize)) } _ => None, } @@ -940,32 +1059,22 @@ mod test { // Regression for #4600 #[test] fn array_get_regression() { - // fn main f0 { - // b0(v0: u1, v1: u64): - // enable_side_effects_if v0 - // v2 = make_array [Field 0, Field 1] - // v3 = array_get v2, index v1 - // v4 = not v0 - // enable_side_effects_if v4 - // v5 = array_get v2, index v1 - // } - // // We want to make sure after constant folding both array_gets remain since they are // under different enable_side_effects_if contexts and thus one may be disabled while // the other is not. If one is removed, it is possible e.g. v4 is replaced with v2 which // is disabled (only gets from index 0) and thus returns the wrong result. let src = " - acir(inline) fn main f0 { - b0(v0: u1, v1: u64): - enable_side_effects v0 - v4 = make_array [Field 0, Field 1] : [Field; 2] - v5 = array_get v4, index v1 -> Field - v6 = not v0 - enable_side_effects v6 - v7 = array_get v4, index v1 -> Field - return - } - "; + acir(inline) fn main f0 { + b0(v0: u1, v1: u64): + enable_side_effects v0 + v4 = make_array [Field 0, Field 1] : [Field; 2] + v5 = array_get v4, index v1 -> Field + v6 = not v0 + enable_side_effects v6 + v7 = array_get v4, index v1 -> Field + return + } + "; let ssa = Ssa::from_str(src).unwrap(); // Expected output is unchanged @@ -1032,7 +1141,6 @@ mod test { // v5 = call keccakf1600(v1) // v6 = call keccakf1600(v2) // } - // // Here we're checking a situation where two identical arrays are being initialized twice and being assigned separate `ValueId`s. // This would result in otherwise identical instructions not being deduplicated. let main_id = Id::test_new(0); @@ -1071,6 +1179,7 @@ mod test { // fn main f0 { // b0(v0: u64): // v1 = make_array [v0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0, u64 0] + // inc_rc v1 // v5 = call keccakf1600(v1) // } let ssa = ssa.fold_constants(); @@ -1080,7 +1189,107 @@ mod test { let main = ssa.main(); let instructions = main.dfg[main.entry_block()].instructions(); let ending_instruction_count = instructions.len(); - assert_eq!(ending_instruction_count, 2); + assert_eq!(ending_instruction_count, 3); + } + + #[test] + fn deduplicate_across_blocks() { + // fn main f0 { + // b0(v0: u1): + // v1 = not v0 + // jmp b1() + // b1(): + // v2 = not v0 + // return v2 + // } + let main_id = Id::test_new(0); + + // Compiling main + let mut builder = FunctionBuilder::new("main".into(), main_id); + let b1 = builder.insert_block(); + + let v0 = builder.add_parameter(Type::bool()); + let _v1 = builder.insert_not(v0); + builder.terminate_with_jmp(b1, Vec::new()); + + builder.switch_to_block(b1); + let v2 = builder.insert_not(v0); + builder.terminate_with_return(vec![v2]); + + let ssa = builder.finish(); + let main = ssa.main(); + assert_eq!(main.dfg[main.entry_block()].instructions().len(), 1); + assert_eq!(main.dfg[b1].instructions().len(), 1); + + // Expected output: + // + // fn main f0 { + // b0(v0: u1): + // v1 = not v0 + // jmp b1() + // b1(): + // return v1 + // } + let ssa = ssa.fold_constants_using_constraints(); + let main = ssa.main(); + assert_eq!(main.dfg[main.entry_block()].instructions().len(), 1); + assert_eq!(main.dfg[b1].instructions().len(), 0); + } + + #[test] + fn deduplicate_across_non_dominated_blocks() { + let src = " + brillig(inline) fn main f0 { + b0(v0: u32): + v2 = lt u32 1000, v0 + jmpif v2 then: b1, else: b2 + b1(): + v4 = shl v0, u32 1 + v5 = lt v0, v4 + constrain v5 == u1 1 + jmp b2() + b2(): + v7 = lt u32 1000, v0 + jmpif v7 then: b3, else: b4 + b3(): + v8 = shl v0, u32 1 + v9 = lt v0, v8 + constrain v9 == u1 1 + jmp b4() + b4(): + return + } + "; + let ssa = Ssa::from_str(src).unwrap(); + + // v4 has been hoisted, although: + // - v5 has not yet been removed since it was encountered earlier in the program + // - v8 hasn't been recognized as a duplicate of v6 yet since they still reference v4 and + // v5 respectively + let expected = " + brillig(inline) fn main f0 { + b0(v0: u32): + v2 = lt u32 1000, v0 + v4 = shl v0, u32 1 + jmpif v2 then: b1, else: b2 + b1(): + v5 = shl v0, u32 1 + v6 = lt v0, v5 + constrain v6 == u1 1 + jmp b2() + b2(): + jmpif v2 then: b3, else: b4 + b3(): + v8 = lt v0, v4 + constrain v8 == u1 1 + jmp b4() + b4(): + return + } + "; + + let ssa = ssa.fold_constants_using_constraints(); + assert_normalized_ssa_equals(ssa, expected); } #[test] @@ -1260,46 +1469,158 @@ mod test { } #[test] - fn deduplicate_across_blocks() { - // fn main f0 { - // b0(v0: u1): - // v1 = not v0 - // jmp b1() - // b1(): - // v2 = not v0 - // return v2 - // } - let main_id = Id::test_new(0); + fn does_not_use_cached_constrain_in_block_that_is_not_dominated() { + let src = " + brillig(inline) fn main f0 { + b0(v0: Field, v1: Field): + v3 = eq v0, Field 0 + jmpif v3 then: b1, else: b2 + b1(): + v5 = eq v1, Field 1 + constrain v1 == Field 1 + jmp b2() + b2(): + v6 = eq v1, Field 0 + constrain v1 == Field 0 + return + } + "; + let ssa = Ssa::from_str(src).unwrap(); + let ssa = ssa.fold_constants_using_constraints(); + assert_normalized_ssa_equals(ssa, src); + } - // Compiling main - let mut builder = FunctionBuilder::new("main".into(), main_id); - let b1 = builder.insert_block(); + #[test] + fn does_not_hoist_constrain_to_common_ancestor() { + let src = " + brillig(inline) fn main f0 { + b0(v0: Field, v1: Field): + v3 = eq v0, Field 0 + jmpif v3 then: b1, else: b2 + b1(): + constrain v1 == Field 1 + jmp b2() + b2(): + jmpif v0 then: b3, else: b4 + b3(): + constrain v1 == Field 1 // This was incorrectly hoisted to b0 but this condition is not valid when going b0 -> b2 -> b4 + jmp b4() + b4(): + return + } + "; + let ssa = Ssa::from_str(src).unwrap(); + let ssa = ssa.fold_constants_using_constraints(); + assert_normalized_ssa_equals(ssa, src); + } - let v0 = builder.add_parameter(Type::bool()); - let _v1 = builder.insert_not(v0); - builder.terminate_with_jmp(b1, Vec::new()); + #[test] + fn does_not_hoist_sub_to_common_ancestor() { + let src = " + acir(inline) fn main f0 { + b0(v0: u32): + v2 = eq v0, u32 0 + jmpif v2 then: b4, else: b1 + b1(): + jmpif v0 then: b3, else: b2 + b2(): + jmp b5() + b3(): + v4 = sub v0, u32 1 // We can't hoist this because v0 is zero here and it will lead to an underflow + jmp b5() + b4(): + v5 = sub v0, u32 1 + jmp b5() + b5(): + return + } + "; + let ssa = Ssa::from_str(src).unwrap(); + let ssa = ssa.fold_constants_using_constraints(); + assert_normalized_ssa_equals(ssa, src); + } - builder.switch_to_block(b1); - let v2 = builder.insert_not(v0); - builder.terminate_with_return(vec![v2]); + #[test] + fn deduplicates_side_effecting_intrinsics() { + let src = " + // After EnableSideEffectsIf removal: + acir(inline) fn main f0 { + b0(v0: Field, v1: Field, v2: u1): + v4 = call is_unconstrained() -> u1 + v7 = call to_be_radix(v0, u32 256) -> [u8; 1] // `a.to_be_radix(256)`; + inc_rc v7 + v8 = call to_be_radix(v0, u32 256) -> [u8; 1] // duplicate load of `a` + inc_rc v8 + v9 = cast v2 as Field // `if c { a.to_be_radix(256) }` + v10 = mul v0, v9 // attaching `c` to `a` + v11 = call to_be_radix(v10, u32 256) -> [u8; 1] // calling `to_radix(c * a)` + inc_rc v11 + enable_side_effects v2 // side effect var for `c` shifted down by removal + return + } + "; + let ssa = Ssa::from_str(src).unwrap(); + let expected = " + acir(inline) fn main f0 { + b0(v0: Field, v1: Field, v2: u1): + v4 = call is_unconstrained() -> u1 + v7 = call to_be_radix(v0, u32 256) -> [u8; 1] + inc_rc v7 + inc_rc v7 + v8 = cast v2 as Field + v9 = mul v0, v8 + v10 = call to_be_radix(v9, u32 256) -> [u8; 1] + inc_rc v10 + enable_side_effects v2 + return + } + "; + let ssa = ssa.fold_constants_using_constraints(); + assert_normalized_ssa_equals(ssa, expected); + } - let ssa = builder.finish(); - let main = ssa.main(); - assert_eq!(main.dfg[main.entry_block()].instructions().len(), 1); - assert_eq!(main.dfg[b1].instructions().len(), 1); + #[test] + fn array_get_from_array_set_with_different_predicates() { + let src = " + acir(inline) fn main f0 { + b0(v0: [Field; 3], v1: u32, v2: Field): + enable_side_effects u1 0 + v4 = array_set v0, index v1, value v2 + enable_side_effects u1 1 + v6 = array_get v4, index v1 -> Field + return v6 + } + "; + + let ssa = Ssa::from_str(src).unwrap(); - // Expected output: - // - // fn main f0 { - // b0(v0: u1): - // v1 = not v0 - // jmp b1() - // b1(): - // return v1 - // } let ssa = ssa.fold_constants_using_constraints(); - let main = ssa.main(); - assert_eq!(main.dfg[main.entry_block()].instructions().len(), 1); - assert_eq!(main.dfg[b1].instructions().len(), 0); + // We expect the code to be unchanged + assert_normalized_ssa_equals(ssa, src); + } + + #[test] + fn array_get_from_array_set_same_predicates() { + let src = " + acir(inline) fn main f0 { + b0(v0: [Field; 3], v1: u32, v2: Field): + enable_side_effects u1 1 + v4 = array_set v0, index v1, value v2 + v6 = array_get v4, index v1 -> Field + return v6 + } + "; + let ssa = Ssa::from_str(src).unwrap(); + + let expected = " + acir(inline) fn main f0 { + b0(v0: [Field; 3], v1: u32, v2: Field): + enable_side_effects u1 1 + v4 = array_set v0, index v1, value v2 + return v2 + } + "; + let ssa = ssa.fold_constants_using_constraints(); + assert_normalized_ssa_equals(ssa, expected); } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs index 8d3fa9cc6159..f7ac6f7b313c 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs @@ -18,8 +18,6 @@ use crate::ssa::{ ssa_gen::{Ssa, SSA_WORD_SIZE}, }; -use super::rc::{pop_rc_for, RcInstruction}; - impl Ssa { /// Performs Dead Instruction Elimination (DIE) to remove any instructions with /// unused results. @@ -108,8 +106,6 @@ impl Context { let instructions_len = block.instructions().len(); - let mut rc_tracker = RcTracker::default(); - // Indexes of instructions that might be out of bounds. // We'll remove those, but before that we'll insert bounds checks for them. let mut possible_index_out_of_bounds_indexes = Vec::new(); @@ -127,8 +123,9 @@ impl Context { .push(instructions_len - instruction_index - 1); } } else { - use Instruction::*; - if matches!(instruction, IncrementRc { .. } | DecrementRc { .. }) { + // We can't remove rc instructions if they're loaded from a reference + // since we'd have no way of knowing whether the reference is still used. + if Self::is_inc_dec_instruction_on_known_array(instruction, &function.dfg) { self.rc_instructions.push((*instruction_id, block_id)); } else { instruction.for_each_value(|value| { @@ -136,13 +133,8 @@ impl Context { }); } } - - rc_tracker.track_inc_rcs_to_remove(*instruction_id, function); } - self.instructions_to_remove.extend(rc_tracker.get_non_mutated_arrays()); - self.instructions_to_remove.extend(rc_tracker.rc_pairs_to_remove); - // If there are some instructions that might trigger an out of bounds error, // first add constrain checks. Then run the DIE pass again, which will remove those // but leave the constrains (any any value needed by those constrains) @@ -201,19 +193,28 @@ impl Context { } fn remove_rc_instructions(self, dfg: &mut DataFlowGraph) { - for (rc, block) in self.rc_instructions { - let value = match &dfg[rc] { - Instruction::IncrementRc { value } => *value, - Instruction::DecrementRc { value } => *value, - other => { - unreachable!("Expected IncrementRc or DecrementRc instruction, found {other:?}") + let unused_rc_values_by_block: HashMap> = + self.rc_instructions.into_iter().fold(HashMap::default(), |mut acc, (rc, block)| { + let value = match &dfg[rc] { + Instruction::IncrementRc { value } => *value, + Instruction::DecrementRc { value } => *value, + other => { + unreachable!( + "Expected IncrementRc or DecrementRc instruction, found {other:?}" + ) + } + }; + + if !self.used_values.contains(&value) { + acc.entry(block).or_default().insert(rc); } - }; + acc + }); - // This could be more efficient if we have to remove multiple instructions in a single block - if !self.used_values.contains(&value) { - dfg[block].instructions_mut().retain(|instruction| *instruction != rc); - } + for (block, instructions_to_remove) in unused_rc_values_by_block { + dfg[block] + .instructions_mut() + .retain(|instruction| !instructions_to_remove.contains(instruction)); } } @@ -337,6 +338,28 @@ impl Context { inserted_check } + + /// True if this is a `Instruction::IncrementRc` or `Instruction::DecrementRc` + /// operating on an array directly from a `Instruction::MakeArray` or an + /// intrinsic known to return a fresh array. + fn is_inc_dec_instruction_on_known_array( + instruction: &Instruction, + dfg: &DataFlowGraph, + ) -> bool { + use Instruction::*; + if let IncrementRc { value } | DecrementRc { value } = instruction { + if let Value::Instruction { instruction, .. } = &dfg[*value] { + return match &dfg[*instruction] { + MakeArray { .. } => true, + Call { func, .. } => { + matches!(&dfg[*func], Value::Intrinsic(_) | Value::ForeignFunction(_)) + } + _ => false, + }; + } + } + false + } } fn instruction_might_result_in_out_of_bounds( @@ -499,103 +522,6 @@ fn apply_side_effects( (lhs, rhs) } -#[derive(Default)] -struct RcTracker { - // We can track IncrementRc instructions per block to determine whether they are useless. - // IncrementRc and DecrementRc instructions are normally side effectual instructions, but we remove - // them if their value is not used anywhere in the function. However, even when their value is used, their existence - // is pointless logic if there is no array set between the increment and the decrement of the reference counter. - // We track per block whether an IncrementRc instruction has a paired DecrementRc instruction - // with the same value but no array set in between. - // If we see an inc/dec RC pair within a block we can safely remove both instructions. - rcs_with_possible_pairs: HashMap>, - rc_pairs_to_remove: HashSet, - // We also separately track all IncrementRc instructions and all arrays which have been mutably borrowed. - // If an array has not been mutably borrowed we can then safely remove all IncrementRc instructions on that array. - inc_rcs: HashMap>, - mut_borrowed_arrays: HashSet, - // The SSA often creates patterns where after simplifications we end up with repeat - // IncrementRc instructions on the same value. We track whether the previous instruction was an IncrementRc, - // and if the current instruction is also an IncrementRc on the same value we remove the current instruction. - // `None` if the previous instruction was anything other than an IncrementRc - previous_inc_rc: Option, -} - -impl RcTracker { - fn track_inc_rcs_to_remove(&mut self, instruction_id: InstructionId, function: &Function) { - let instruction = &function.dfg[instruction_id]; - - if let Instruction::IncrementRc { value } = instruction { - if let Some(previous_value) = self.previous_inc_rc { - if previous_value == *value { - self.rc_pairs_to_remove.insert(instruction_id); - } - } - self.previous_inc_rc = Some(*value); - } else { - self.previous_inc_rc = None; - } - - // DIE loops over a block in reverse order, so we insert an RC instruction for possible removal - // when we see a DecrementRc and check whether it was possibly mutated when we see an IncrementRc. - match instruction { - Instruction::IncrementRc { value } => { - if let Some(inc_rc) = - pop_rc_for(*value, function, &mut self.rcs_with_possible_pairs) - { - if !inc_rc.possibly_mutated { - self.rc_pairs_to_remove.insert(inc_rc.id); - self.rc_pairs_to_remove.insert(instruction_id); - } - } - - self.inc_rcs.entry(*value).or_default().insert(instruction_id); - } - Instruction::DecrementRc { value } => { - let typ = function.dfg.type_of_value(*value); - - // We assume arrays aren't mutated until we find an array_set - let dec_rc = - RcInstruction { id: instruction_id, array: *value, possibly_mutated: false }; - self.rcs_with_possible_pairs.entry(typ).or_default().push(dec_rc); - } - Instruction::ArraySet { array, .. } => { - let typ = function.dfg.type_of_value(*array); - if let Some(dec_rcs) = self.rcs_with_possible_pairs.get_mut(&typ) { - for dec_rc in dec_rcs { - dec_rc.possibly_mutated = true; - } - } - - self.mut_borrowed_arrays.insert(*array); - } - Instruction::Store { value, .. } => { - // We are very conservative and say that any store of an array value means it has the potential - // to be mutated. This is done due to the tracking of mutable borrows still being per block. - let typ = function.dfg.type_of_value(*value); - if matches!(&typ, Type::Array(..) | Type::Slice(..)) { - self.mut_borrowed_arrays.insert(*value); - } - } - _ => {} - } - } - - fn get_non_mutated_arrays(&self) -> HashSet { - self.inc_rcs - .keys() - .filter_map(|value| { - if !self.mut_borrowed_arrays.contains(value) { - Some(&self.inc_rcs[value]) - } else { - None - } - }) - .flatten() - .copied() - .collect() - } -} #[cfg(test)] mod test { use std::sync::Arc; @@ -604,7 +530,7 @@ mod test { use crate::ssa::{ function_builder::FunctionBuilder, - ir::{instruction::Instruction, map::Id, types::Type}, + ir::{map::Id, types::Type}, opt::assert_normalized_ssa_equals, Ssa, }; @@ -676,30 +602,6 @@ mod test { assert_normalized_ssa_equals(ssa, expected); } - #[test] - fn remove_useless_paired_rcs_even_when_used() { - let src = " - acir(inline) fn main f0 { - b0(v0: [Field; 2]): - inc_rc v0 - v2 = array_get v0, index u32 0 -> Field - dec_rc v0 - return v2 - } - "; - let ssa = Ssa::from_str(src).unwrap(); - - let expected = " - acir(inline) fn main f0 { - b0(v0: [Field; 2]): - v2 = array_get v0, index u32 0 -> Field - return v2 - } - "; - let ssa = ssa.dead_instruction_elimination(); - assert_normalized_ssa_equals(ssa, expected); - } - #[test] fn keep_paired_rcs_with_array_set() { let src = " @@ -770,92 +672,23 @@ mod test { } #[test] - fn keep_inc_rc_on_borrowed_array_set() { - // acir(inline) fn main f0 { - // b0(v0: [u32; 2]): - // inc_rc v0 - // v3 = array_set v0, index u32 0, value u32 1 - // inc_rc v0 - // inc_rc v0 - // inc_rc v0 - // v4 = array_get v3, index u32 1 - // return v4 - // } - let main_id = Id::test_new(0); - - // Compiling main - let mut builder = FunctionBuilder::new("main".into(), main_id); - let array_type = Type::Array(Arc::new(vec![Type::unsigned(32)]), 2); - let v0 = builder.add_parameter(array_type.clone()); - builder.increment_array_reference_count(v0); - let zero = builder.numeric_constant(0u128, Type::unsigned(32)); - let one = builder.numeric_constant(1u128, Type::unsigned(32)); - let v3 = builder.insert_array_set(v0, zero, one); - builder.increment_array_reference_count(v0); - builder.increment_array_reference_count(v0); - builder.increment_array_reference_count(v0); - - let v4 = builder.insert_array_get(v3, one, Type::unsigned(32)); - - builder.terminate_with_return(vec![v4]); - - let ssa = builder.finish(); - let main = ssa.main(); - - // The instruction count never includes the terminator instruction - assert_eq!(main.dfg[main.entry_block()].instructions().len(), 6); - - // We expect the output to be unchanged - // Expected output: - // - // acir(inline) fn main f0 { - // b0(v0: [u32; 2]): - // inc_rc v0 - // v3 = array_set v0, index u32 0, value u32 1 - // inc_rc v0 - // v4 = array_get v3, index u32 1 - // return v4 - // } - let ssa = ssa.dead_instruction_elimination(); - let main = ssa.main(); - - let instructions = main.dfg[main.entry_block()].instructions(); - // We expect only the repeated inc_rc instructions to be collapsed into a single inc_rc. - assert_eq!(instructions.len(), 4); - - assert!(matches!(&main.dfg[instructions[0]], Instruction::IncrementRc { .. })); - assert!(matches!(&main.dfg[instructions[1]], Instruction::ArraySet { .. })); - assert!(matches!(&main.dfg[instructions[2]], Instruction::IncrementRc { .. })); - assert!(matches!(&main.dfg[instructions[3]], Instruction::ArrayGet { .. })); - } - - #[test] - fn remove_inc_rcs_that_are_never_mutably_borrowed() { + fn does_not_remove_inc_or_dec_rc_of_if_they_are_loaded_from_a_reference() { let src = " - acir(inline) fn main f0 { - b0(v0: [Field; 2]): - inc_rc v0 - inc_rc v0 - inc_rc v0 - v2 = array_get v0, index u32 0 -> Field - inc_rc v0 - return v2 + brillig(inline) fn borrow_mut f0 { + b0(v0: &mut [Field; 3]): + v1 = load v0 -> [Field; 3] + inc_rc v1 // this one shouldn't be removed + v2 = load v0 -> [Field; 3] + inc_rc v2 // this one shouldn't be removed + v3 = load v0 -> [Field; 3] + v6 = array_set v3, index u32 0, value Field 5 + store v6 at v0 + dec_rc v6 + return } "; let ssa = Ssa::from_str(src).unwrap(); - let main = ssa.main(); - - // The instruction count never includes the terminator instruction - assert_eq!(main.dfg[main.entry_block()].instructions().len(), 5); - - let expected = " - acir(inline) fn main f0 { - b0(v0: [Field; 2]): - v2 = array_get v0, index u32 0 -> Field - return v2 - } - "; let ssa = ssa.dead_instruction_elimination(); - assert_normalized_ssa_equals(ssa, expected); + assert_normalized_ssa_equals(ssa, src); } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index 5d114672a556..3fbccf93ec93 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -131,8 +131,7 @@ //! v11 = mul v4, Field 12 //! v12 = add v10, v11 //! store v12 at v5 (new store) -use fxhash::FxHashMap as HashMap; -use std::collections::{BTreeMap, HashSet}; +use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; use acvm::{acir::AcirField, acir::BlackBoxFunc, FieldElement}; use iter_extended::vecmap; @@ -186,18 +185,6 @@ struct Context<'f> { /// Maps start of branch -> end of branch branch_ends: HashMap, - /// Maps an address to the old and new value of the element at that address - /// These only hold stores for one block at a time and is cleared - /// between inlining of branches. - store_values: HashMap, - - /// Stores all allocations local to the current branch. - /// Since these branches are local to the current branch (ie. only defined within one branch of - /// an if expression), they should not be merged with their previous value or stored value in - /// the other branch since there is no such value. The ValueId here is that which is returned - /// by the allocate instruction. - local_allocations: HashSet, - /// A stack of each jmpif condition that was taken to reach a particular point in the program. /// When two branches are merged back into one, this constitutes a join point, and is analogous /// to the rest of the program after an if statement. When such a join point / end block is @@ -214,13 +201,15 @@ struct Context<'f> { /// When processing a block, we pop this stack to get its arguments /// and at the end we push the arguments for his successor arguments_stack: Vec>, -} -#[derive(Clone)] -pub(crate) struct Store { - old_value: ValueId, - new_value: ValueId, - call_stack: CallStack, + /// Stores all allocations local to the current branch. + /// + /// Since these branches are local to the current branch (i.e. only defined within one branch of + /// an if expression), they should not be merged with their previous value or stored value in + /// the other branch since there is no such value. + /// + /// The `ValueId` here is that which is returned by the allocate instruction. + local_allocations: HashSet, } #[derive(Clone)] @@ -231,8 +220,6 @@ struct ConditionalBranch { old_condition: ValueId, // The condition of the branch condition: ValueId, - // The store values accumulated when processing the branch - store_values: HashMap, // The allocations accumulated when processing the branch local_allocations: HashSet, } @@ -263,12 +250,11 @@ fn flatten_function_cfg(function: &mut Function, no_predicates: &HashMap Context<'f> { let old_condition = *condition; let then_condition = self.inserter.resolve(old_condition); - let old_stores = std::mem::take(&mut self.store_values); let old_allocations = std::mem::take(&mut self.local_allocations); let branch = ConditionalBranch { old_condition, condition: self.link_condition(then_condition), - store_values: old_stores, - local_allocations: old_allocations, last_block: *then_destination, + local_allocations: old_allocations, }; let cond_context = ConditionalContext { condition: then_condition, @@ -473,19 +457,12 @@ impl<'f> Context<'f> { ); let else_condition = self.link_condition(else_condition); - // Make sure the else branch sees the previous values of each store - // rather than any values created in the 'then' branch. - let old_stores = std::mem::take(&mut cond_context.then_branch.store_values); - cond_context.then_branch.store_values = std::mem::take(&mut self.store_values); - self.undo_stores_in_then_branch(&cond_context.then_branch.store_values); - let old_allocations = std::mem::take(&mut self.local_allocations); let else_branch = ConditionalBranch { old_condition: cond_context.then_branch.old_condition, condition: else_condition, - store_values: old_stores, - local_allocations: old_allocations, last_block: *block, + local_allocations: old_allocations, }; cond_context.then_branch.local_allocations.clear(); cond_context.else_branch = Some(else_branch); @@ -509,10 +486,8 @@ impl<'f> Context<'f> { } let mut else_branch = cond_context.else_branch.unwrap(); - let stores_in_branch = std::mem::replace(&mut self.store_values, else_branch.store_values); self.local_allocations = std::mem::take(&mut else_branch.local_allocations); else_branch.last_block = *block; - else_branch.store_values = stores_in_branch; cond_context.else_branch = Some(else_branch); // We must remember to reset whether side effects are enabled when both branches @@ -562,7 +537,11 @@ impl<'f> Context<'f> { let args = vecmap(then_args.iter().zip(else_args), |(then_arg, else_arg)| { (self.inserter.resolve(*then_arg), self.inserter.resolve(else_arg)) }); - + let else_condition = if let Some(branch) = cond_context.else_branch { + branch.condition + } else { + self.inserter.function.dfg.make_constant(FieldElement::zero(), Type::bool()) + }; let block = self.inserter.function.entry_block(); // Cannot include this in the previous vecmap since it requires exclusive access to self @@ -570,6 +549,7 @@ impl<'f> Context<'f> { let instruction = Instruction::IfElse { then_condition: cond_context.then_branch.condition, then_value: then_arg, + else_condition, else_value: else_arg, }; let call_stack = cond_context.call_stack.clone(); @@ -580,8 +560,6 @@ impl<'f> Context<'f> { .first() }); - let call_stack = cond_context.call_stack; - self.merge_stores(cond_context.then_branch, cond_context.else_branch, call_stack); self.arguments_stack.pop(); self.arguments_stack.pop(); self.arguments_stack.push(args); @@ -636,116 +614,29 @@ impl<'f> Context<'f> { self.insert_instruction_with_typevars(enable_side_effects, None, call_stack); } - /// Merge any store instructions found in each branch. - /// - /// This function relies on the 'then' branch being merged before the 'else' branch of a jmpif - /// instruction. If this ordering is changed, the ordering that store values are merged within - /// this function also needs to be changed to reflect that. - fn merge_stores( - &mut self, - then_branch: ConditionalBranch, - else_branch: Option, - call_stack: CallStack, - ) { - // Address -> (then_value, else_value, value_before_the_if) - let mut new_map = BTreeMap::new(); - - for (address, store) in then_branch.store_values { - new_map.insert(address, (store.new_value, store.old_value, store.old_value)); - } - - if else_branch.is_some() { - for (address, store) in else_branch.clone().unwrap().store_values { - if let Some(entry) = new_map.get_mut(&address) { - entry.1 = store.new_value; - } else { - new_map.insert(address, (store.old_value, store.new_value, store.old_value)); - } - } - } - - let then_condition = then_branch.condition; - let block = self.inserter.function.entry_block(); - - // Merging must occur in a separate loop as we cannot borrow `self` as mutable while `value_merger` does - let mut new_values = HashMap::default(); - for (address, (then_case, else_case, _)) in &new_map { - let instruction = Instruction::IfElse { - then_condition, - then_value: *then_case, - else_value: *else_case, - }; - let dfg = &mut self.inserter.function.dfg; - let value = dfg - .insert_instruction_and_results(instruction, block, None, call_stack.clone()) - .first(); - - new_values.insert(address, value); - } - - // Replace stores with new merged values - for (address, (_, _, old_value)) in &new_map { - let value = new_values[address]; - let address = *address; - self.insert_instruction_with_typevars( - Instruction::Store { address, value }, - None, - call_stack.clone(), - ); - - if let Some(store) = self.store_values.get_mut(&address) { - store.new_value = value; - } else { - self.store_values.insert( - address, - Store { - old_value: *old_value, - new_value: value, - call_stack: call_stack.clone(), - }, - ); - } - } - } - - fn remember_store(&mut self, address: ValueId, new_value: ValueId, call_stack: CallStack) { - if !self.local_allocations.contains(&address) { - if let Some(store_value) = self.store_values.get_mut(&address) { - store_value.new_value = new_value; - } else { - let load = Instruction::Load { address }; - - let load_type = Some(vec![self.inserter.function.dfg.type_of_value(new_value)]); - let old_value = self - .insert_instruction_with_typevars(load.clone(), load_type, call_stack.clone()) - .first(); - - self.store_values.insert(address, Store { old_value, new_value, call_stack }); - } - } - } - /// Push the given instruction to the end of the entry block of the current function. /// /// Note that each ValueId of the instruction will be mapped via self.inserter.resolve. /// As a result, the instruction that will be pushed will actually be a new instruction /// with a different InstructionId from the original. The results of the given instruction /// will also be mapped to the results of the new instruction. - fn push_instruction(&mut self, id: InstructionId) -> Vec { + /// + /// `previous_allocate_result` should only be set to the result of an allocate instruction + /// if that instruction was the instruction immediately previous to this one - if there are + /// any instructions in between it should be None. + fn push_instruction(&mut self, id: InstructionId) { let (instruction, call_stack) = self.inserter.map_instruction(id); let instruction = self.handle_instruction_side_effects(instruction, call_stack.clone()); - let is_allocate = matches!(instruction, Instruction::Allocate); + let instruction_is_allocate = matches!(&instruction, Instruction::Allocate); let entry = self.inserter.function.entry_block(); let results = self.inserter.push_instruction_value(instruction, id, entry, call_stack); // Remember an allocate was created local to this branch so that we do not try to merge store // values across branches for it later. - if is_allocate { + if instruction_is_allocate { self.local_allocations.insert(results.first()); } - - results.results().into_owned() } /// If we are currently in a branch, we need to modify constrain instructions @@ -782,8 +673,35 @@ impl<'f> Context<'f> { Instruction::Constrain(lhs, rhs, message) } Instruction::Store { address, value } => { - self.remember_store(address, value, call_stack); - Instruction::Store { address, value } + // If this instruction immediately follows an allocate, and stores to that + // address there is no previous value to load and we don't need a merge anyway. + if self.local_allocations.contains(&address) { + Instruction::Store { address, value } + } else { + // Instead of storing `value`, store `if condition { value } else { previous_value }` + let typ = self.inserter.function.dfg.type_of_value(value); + let load = Instruction::Load { address }; + let previous_value = self + .insert_instruction_with_typevars( + load, + Some(vec![typ]), + call_stack.clone(), + ) + .first(); + + let else_condition = self + .insert_instruction(Instruction::Not(condition), call_stack.clone()); + + let instruction = Instruction::IfElse { + then_condition: condition, + then_value: value, + else_condition, + else_value: previous_value, + }; + + let updated_value = self.insert_instruction(instruction, call_stack); + Instruction::Store { address, value: updated_value } + } } Instruction::RangeCheck { value, max_bit_size, assert_message } => { // Replace value with `value * predicate` to zero out value when predicate is inactive. @@ -905,23 +823,11 @@ impl<'f> Context<'f> { call_stack, ) } - - fn undo_stores_in_then_branch(&mut self, store_values: &HashMap) { - for (address, store) in store_values { - let address = *address; - let value = store.old_value; - let instruction = Instruction::Store { address, value }; - // Considering the location of undoing a store to be the same as the original store. - self.insert_instruction_with_typevars(instruction, None, store.call_stack.clone()); - } - } } #[cfg(test)] mod test { - use std::sync::Arc; - - use acvm::{acir::AcirField, FieldElement}; + use acvm::acir::AcirField; use crate::ssa::{ function_builder::FunctionBuilder, @@ -961,9 +867,11 @@ mod test { v1 = not v0 enable_side_effects u1 1 v3 = cast v0 as Field - v5 = mul v3, Field -1 - v7 = add Field 4, v5 - return v7 + v4 = cast v1 as Field + v6 = mul v3, Field 3 + v8 = mul v4, Field 4 + v9 = add v6, v8 + return v9 } "; @@ -1023,15 +931,15 @@ mod test { b0(v0: u1, v1: &mut Field): enable_side_effects v0 v2 = load v1 -> Field - store Field 5 at v1 - v4 = not v0 - store v2 at v1 - enable_side_effects u1 1 - v6 = cast v0 as Field - v7 = sub Field 5, v2 - v8 = mul v6, v7 - v9 = add v2, v8 + v3 = not v0 + v4 = cast v0 as Field + v5 = cast v3 as Field + v7 = mul v4, Field 5 + v8 = mul v5, v2 + v9 = add v7, v8 store v9 at v1 + v10 = not v0 + enable_side_effects u1 1 return } "; @@ -1062,17 +970,23 @@ mod test { b0(v0: u1, v1: &mut Field): enable_side_effects v0 v2 = load v1 -> Field - store Field 5 at v1 - v4 = not v0 - store v2 at v1 - enable_side_effects v4 - v5 = load v1 -> Field - store Field 6 at v1 + v3 = not v0 + v4 = cast v0 as Field + v5 = cast v3 as Field + v7 = mul v4, Field 5 + v8 = mul v5, v2 + v9 = add v7, v8 + store v9 at v1 + v10 = not v0 + enable_side_effects v10 + v11 = load v1 -> Field + v12 = cast v10 as Field + v13 = cast v0 as Field + v15 = mul v12, Field 6 + v16 = mul v13, v11 + v17 = add v15, v16 + store v17 at v1 enable_side_effects u1 1 - v8 = cast v0 as Field - v10 = mul v8, Field -1 - v11 = add Field 6, v10 - store v11 at v1 return } "; @@ -1115,6 +1029,7 @@ mod test { // b7 b8 // ↘ ↙ // b9 + let src = " acir(inline) fn main f0 { b0(v0: u1, v1: u1): @@ -1165,33 +1080,49 @@ mod test { v20 = load v2 -> Field // call v1(Field 8, v20) jmp b9() - } - "; + }"; let ssa = Ssa::from_str(src).unwrap(); + let ssa = ssa.flatten_cfg().mem2reg(); - // Expected results after mem2reg removes the allocation and each load and store: let expected = " acir(inline) fn main f0 { b0(v0: u1, v1: u1): v2 = allocate -> &mut Field enable_side_effects v0 - v3 = mul v0, v1 - enable_side_effects v3 - v4 = not v1 - v5 = mul v0, v4 + v3 = not v0 + v4 = cast v0 as Field + v5 = cast v3 as Field + v7 = mul v4, Field 2 + v8 = add v7, v5 + v9 = mul v0, v1 + enable_side_effects v9 + v10 = not v9 + v11 = cast v9 as Field + v12 = cast v10 as Field + v14 = mul v11, Field 5 + v15 = mul v12, v8 + v16 = add v14, v15 + v17 = not v1 + v18 = mul v0, v17 + enable_side_effects v18 + v19 = not v18 + v20 = cast v18 as Field + v21 = cast v19 as Field + v23 = mul v20, Field 6 + v24 = mul v21, v16 + v25 = add v23, v24 enable_side_effects v0 - v6 = cast v3 as Field - v8 = mul v6, Field -1 - v10 = add Field 6, v8 - v11 = not v0 + v26 = not v0 + enable_side_effects v26 + v27 = cast v26 as Field + v28 = cast v0 as Field + v30 = mul v27, Field 3 + v31 = mul v28, v25 + v32 = add v30, v31 enable_side_effects u1 1 - v13 = cast v0 as Field - v15 = sub v10, Field 3 - v16 = mul v13, v15 - v17 = add Field 3, v16 - return v17 + return v32 }"; let main = ssa.main(); @@ -1201,10 +1132,7 @@ mod test { }; let merged_values = get_all_constants_reachable_from_instruction(&main.dfg, ret); - assert_eq!( - merged_values, - vec![FieldElement::from(3u128), FieldElement::from(6u128), -FieldElement::from(1u128)] - ); + assert_eq!(merged_values, vec![2, 3, 5, 6]); assert_normalized_ssa_equals(ssa, expected); } @@ -1287,7 +1215,7 @@ mod test { fn get_all_constants_reachable_from_instruction( dfg: &DataFlowGraph, value: ValueId, - ) -> Vec { + ) -> Vec { match dfg[value] { Value::Instruction { instruction, .. } => { let mut values = vec![]; @@ -1305,7 +1233,7 @@ mod test { values.dedup(); values } - Value::NumericConstant { constant, .. } => vec![constant], + Value::NumericConstant { constant, .. } => vec![constant.to_u128()], _ => Vec::new(), } } @@ -1344,63 +1272,71 @@ mod test { fn should_not_merge_incorrectly_to_false() { // Regression test for #1792 // Tests that it does not simplify a true constraint an always-false constraint - // acir(inline) fn main f1 { - // b0(v0: [u8; 2]): - // v5 = array_get v0, index u8 0 - // v6 = cast v5 as u32 - // v8 = truncate v6 to 1 bits, max_bit_size: 32 - // v9 = cast v8 as u1 - // v10 = allocate - // store u8 0 at v10 - // jmpif v9 then: b2, else: b3 - // b2(): - // v12 = cast v5 as Field - // v13 = add v12, Field 1 - // store v13 at v10 - // jmp b4() - // b4(): - // constrain v9 == u1 1 - // return - // b3(): - // store u8 0 at v10 - // jmp b4() - // } - let main_id = Id::test_new(1); - let mut builder = FunctionBuilder::new("main".into(), main_id); - builder.insert_block(); // b0 - let b1 = builder.insert_block(); - let b2 = builder.insert_block(); - let b3 = builder.insert_block(); - let element_type = Arc::new(vec![Type::unsigned(8)]); - let array_type = Type::Array(element_type.clone(), 2); - let array = builder.add_parameter(array_type); - let zero = builder.numeric_constant(0_u128, Type::unsigned(8)); - let v5 = builder.insert_array_get(array, zero, Type::unsigned(8)); - let v6 = builder.insert_cast(v5, Type::unsigned(32)); - let i_two = builder.numeric_constant(2_u128, Type::unsigned(32)); - let v8 = builder.insert_binary(v6, BinaryOp::Mod, i_two); - let v9 = builder.insert_cast(v8, Type::bool()); - let v10 = builder.insert_allocate(Type::field()); - builder.insert_store(v10, zero); - builder.terminate_with_jmpif(v9, b1, b2); - builder.switch_to_block(b1); - let one = builder.field_constant(1_u128); - let v5b = builder.insert_cast(v5, Type::field()); - let v13: Id = builder.insert_binary(v5b, BinaryOp::Add, one); - let v14 = builder.insert_cast(v13, Type::unsigned(8)); - builder.insert_store(v10, v14); - builder.terminate_with_jmp(b3, vec![]); - builder.switch_to_block(b2); - builder.insert_store(v10, zero); - builder.terminate_with_jmp(b3, vec![]); - builder.switch_to_block(b3); - let v_true = builder.numeric_constant(true, Type::bool()); - let v12 = builder.insert_binary(v9, BinaryOp::Eq, v_true); - builder.insert_constrain(v12, v_true, None); - builder.terminate_with_return(vec![]); - let ssa = builder.finish(); + + let src = " + acir(inline) fn main f0 { + b0(v0: [u8; 2]): + v2 = array_get v0, index u8 0 -> u8 + v3 = cast v2 as u32 + v4 = truncate v3 to 1 bits, max_bit_size: 32 + v5 = cast v4 as u1 + v6 = allocate -> &mut Field + store u8 0 at v6 + jmpif v5 then: b2, else: b1 + b2(): + v7 = cast v2 as Field + v9 = add v7, Field 1 + v10 = cast v9 as u8 + store v10 at v6 + jmp b3() + b3(): + constrain v5 == u1 1 + return + b1(): + store u8 0 at v6 + jmp b3() + } + "; + + let ssa = Ssa::from_str(src).unwrap(); + + let expected = " + acir(inline) fn main f0 { + b0(v0: [u8; 2]): + v2 = array_get v0, index u8 0 -> u8 + v3 = cast v2 as u32 + v4 = truncate v3 to 1 bits, max_bit_size: 32 + v5 = cast v4 as u1 + v6 = allocate -> &mut Field + store u8 0 at v6 + enable_side_effects v5 + v7 = cast v2 as Field + v9 = add v7, Field 1 + v10 = cast v9 as u8 + v11 = load v6 -> u8 + v12 = not v5 + v13 = cast v4 as u8 + v14 = cast v12 as u8 + v15 = mul v13, v10 + v16 = mul v14, v11 + v17 = add v15, v16 + store v17 at v6 + v18 = not v5 + enable_side_effects v18 + v19 = load v6 -> u8 + v20 = cast v18 as u8 + v21 = cast v4 as u8 + v22 = mul v21, v19 + store v22 at v6 + enable_side_effects u1 1 + constrain v5 == u1 1 + return + } + "; + let flattened_ssa = ssa.flatten_cfg(); let main = flattened_ssa.main(); + // Now assert that there is not an always-false constraint after flattening: let mut constrain_count = 0; for instruction in main.dfg[main.entry_block()].instructions() { @@ -1414,6 +1350,8 @@ mod test { } } assert_eq!(constrain_count, 1); + + assert_normalized_ssa_equals(flattened_ssa, expected); } #[test] @@ -1549,7 +1487,7 @@ mod test { b2(): return b1(): - jmp b2() + jmp b2() } "; let merged_ssa = Ssa::from_str(src).unwrap(); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs index ddc8b0bfe6b8..a01be6917783 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/capacity_tracker.rs @@ -21,7 +21,7 @@ impl<'a> SliceCapacityTracker<'a> { pub(crate) fn collect_slice_information( &self, instruction: &Instruction, - slice_sizes: &mut HashMap, + slice_sizes: &mut HashMap, results: &[ValueId], ) { match instruction { @@ -106,13 +106,12 @@ impl<'a> SliceCapacityTracker<'a> { Intrinsic::ToBits(_) => { // Compiler sanity check assert!(matches!(self.dfg.type_of_value(result_slice), Type::Slice(_))); - slice_sizes.insert(result_slice, FieldElement::max_num_bits() as usize); + slice_sizes.insert(result_slice, FieldElement::max_num_bits()); } Intrinsic::ToRadix(_) => { // Compiler sanity check assert!(matches!(self.dfg.type_of_value(result_slice), Type::Slice(_))); - slice_sizes - .insert(result_slice, FieldElement::max_num_bytes() as usize); + slice_sizes.insert(result_slice, FieldElement::max_num_bytes()); } Intrinsic::AsSlice => { let array_size = self @@ -157,7 +156,7 @@ impl<'a> SliceCapacityTracker<'a> { pub(crate) fn compute_slice_capacity( &self, array_id: ValueId, - slice_sizes: &mut HashMap, + slice_sizes: &mut HashMap, ) { if let Some((array, typ)) = self.dfg.get_array_constant(array_id) { // Compiler sanity check @@ -165,7 +164,7 @@ impl<'a> SliceCapacityTracker<'a> { if let Type::Slice(_) = typ { let element_size = typ.element_size(); let len = array.len() / element_size; - slice_sizes.insert(array_id, len); + slice_sizes.insert(array_id, len as u32); } } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs index 8ea26d4e96d4..6ea235b94147 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs @@ -17,7 +17,7 @@ pub(crate) struct ValueMerger<'a> { // Maps SSA array values with a slice type to their size. // This must be computed before merging values. - slice_sizes: &'a mut HashMap, + slice_sizes: &'a mut HashMap, array_set_conditionals: &'a mut HashMap, @@ -28,7 +28,7 @@ impl<'a> ValueMerger<'a> { pub(crate) fn new( dfg: &'a mut DataFlowGraph, block: BasicBlockId, - slice_sizes: &'a mut HashMap, + slice_sizes: &'a mut HashMap, array_set_conditionals: &'a mut HashMap, current_condition: Option, call_stack: CallStack, @@ -54,6 +54,7 @@ impl<'a> ValueMerger<'a> { pub(crate) fn merge_values( &mut self, then_condition: ValueId, + else_condition: ValueId, then_value: ValueId, else_value: ValueId, ) -> ValueId { @@ -69,14 +70,15 @@ impl<'a> ValueMerger<'a> { self.dfg, self.block, then_condition, + else_condition, then_value, else_value, ), typ @ Type::Array(_, _) => { - self.merge_array_values(typ, then_condition, then_value, else_value) + self.merge_array_values(typ, then_condition, else_condition, then_value, else_value) } typ @ Type::Slice(_) => { - self.merge_slice_values(typ, then_condition, then_value, else_value) + self.merge_slice_values(typ, then_condition, else_condition, then_value, else_value) } Type::Reference(_) => panic!("Cannot return references from an if expression"), Type::Function => panic!("Cannot return functions from an if expression"), @@ -84,11 +86,12 @@ impl<'a> ValueMerger<'a> { } /// Merge two numeric values a and b from separate basic blocks to a single value. This - /// function would return the result of `if c { a } else { b }` as `c * (a-b) + b`. + /// function would return the result of `if c { a } else { b }` as `c*a + (!c)*b`. pub(crate) fn merge_numeric_values( dfg: &mut DataFlowGraph, block: BasicBlockId, then_condition: ValueId, + else_condition: ValueId, then_value: ValueId, else_value: ValueId, ) -> ValueId { @@ -111,38 +114,31 @@ impl<'a> ValueMerger<'a> { // We must cast the bool conditions to the actual numeric type used by each value. let then_condition = dfg .insert_instruction_and_results( - Instruction::Cast(then_condition, Type::field()), + Instruction::Cast(then_condition, then_type), block, None, call_stack.clone(), ) .first(); - - let then_field = Instruction::Cast(then_value, Type::field()); - let then_field_value = - dfg.insert_instruction_and_results(then_field, block, None, call_stack.clone()).first(); - - let else_field = Instruction::Cast(else_value, Type::field()); - let else_field_value = - dfg.insert_instruction_and_results(else_field, block, None, call_stack.clone()).first(); - - let diff = Instruction::binary(BinaryOp::Sub, then_field_value, else_field_value); - let diff_value = - dfg.insert_instruction_and_results(diff, block, None, call_stack.clone()).first(); - - let conditional_diff = Instruction::binary(BinaryOp::Mul, then_condition, diff_value); - let conditional_diff_value = dfg - .insert_instruction_and_results(conditional_diff, block, None, call_stack.clone()) + let else_condition = dfg + .insert_instruction_and_results( + Instruction::Cast(else_condition, else_type), + block, + None, + call_stack.clone(), + ) .first(); - let merged_field = - Instruction::binary(BinaryOp::Add, else_field_value, conditional_diff_value); - let merged_field_value = dfg - .insert_instruction_and_results(merged_field, block, None, call_stack.clone()) - .first(); + let mul = Instruction::binary(BinaryOp::Mul, then_condition, then_value); + let then_value = + dfg.insert_instruction_and_results(mul, block, None, call_stack.clone()).first(); + + let mul = Instruction::binary(BinaryOp::Mul, else_condition, else_value); + let else_value = + dfg.insert_instruction_and_results(mul, block, None, call_stack.clone()).first(); - let merged = Instruction::Cast(merged_field_value, then_type); - dfg.insert_instruction_and_results(merged, block, None, call_stack).first() + let add = Instruction::binary(BinaryOp::Add, then_value, else_value); + dfg.insert_instruction_and_results(add, block, None, call_stack).first() } /// Given an if expression that returns an array: `if c { array1 } else { array2 }`, @@ -152,6 +148,7 @@ impl<'a> ValueMerger<'a> { &mut self, typ: Type, then_condition: ValueId, + else_condition: ValueId, then_value: ValueId, else_value: ValueId, ) -> ValueId { @@ -162,10 +159,11 @@ impl<'a> ValueMerger<'a> { _ => panic!("Expected array type"), }; - let actual_length = len * element_types.len(); + let actual_length = len * element_types.len() as u32; if let Some(result) = self.try_merge_only_changed_indices( then_condition, + else_condition, then_value, else_value, actual_length, @@ -175,7 +173,8 @@ impl<'a> ValueMerger<'a> { for i in 0..len { for (element_index, element_type) in element_types.iter().enumerate() { - let index = ((i * element_types.len() + element_index) as u128).into(); + let index = + ((i * element_types.len() as u32 + element_index as u32) as u128).into(); let index = self.dfg.make_constant(index, Type::field()); let typevars = Some(vec![element_type.clone()]); @@ -195,7 +194,12 @@ impl<'a> ValueMerger<'a> { let then_element = get_element(then_value, typevars.clone()); let else_element = get_element(else_value, typevars); - merged.push_back(self.merge_values(then_condition, then_element, else_element)); + merged.push_back(self.merge_values( + then_condition, + else_condition, + then_element, + else_element, + )); } } @@ -208,6 +212,7 @@ impl<'a> ValueMerger<'a> { &mut self, typ: Type, then_condition: ValueId, + else_condition: ValueId, then_value_id: ValueId, else_value_id: ValueId, ) -> ValueId { @@ -222,22 +227,22 @@ impl<'a> ValueMerger<'a> { let (slice, typ) = self.dfg.get_array_constant(then_value_id).unwrap_or_else(|| { panic!("ICE: Merging values during flattening encountered slice {then_value_id} without a preset size"); }); - slice.len() / typ.element_types().len() + (slice.len() / typ.element_types().len()) as u32 }); let else_len = self.slice_sizes.get(&else_value_id).copied().unwrap_or_else(|| { let (slice, typ) = self.dfg.get_array_constant(else_value_id).unwrap_or_else(|| { panic!("ICE: Merging values during flattening encountered slice {else_value_id} without a preset size"); }); - slice.len() / typ.element_types().len() + (slice.len() / typ.element_types().len()) as u32 }); let len = then_len.max(else_len); for i in 0..len { for (element_index, element_type) in element_types.iter().enumerate() { - let index_usize = i * element_types.len() + element_index; - let index_value = (index_usize as u128).into(); + let index_u32 = i * element_types.len() as u32 + element_index as u32; + let index_value = (index_u32 as u128).into(); let index = self.dfg.make_constant(index_value, Type::field()); let typevars = Some(vec![element_type.clone()]); @@ -245,7 +250,7 @@ impl<'a> ValueMerger<'a> { let mut get_element = |array, typevars, len| { // The smaller slice is filled with placeholder data. Codegen for slice accesses must // include checks against the dynamic slice length so that this placeholder data is not incorrectly accessed. - if len <= index_usize { + if len <= index_u32 { self.make_slice_dummy_data(element_type) } else { let get = Instruction::ArrayGet { array, index }; @@ -260,12 +265,20 @@ impl<'a> ValueMerger<'a> { } }; - let then_element = - get_element(then_value_id, typevars.clone(), then_len * element_types.len()); + let then_element = get_element( + then_value_id, + typevars.clone(), + then_len * element_types.len() as u32, + ); let else_element = - get_element(else_value_id, typevars, else_len * element_types.len()); - - merged.push_back(self.merge_values(then_condition, then_element, else_element)); + get_element(else_value_id, typevars, else_len * element_types.len() as u32); + + merged.push_back(self.merge_values( + then_condition, + else_condition, + then_element, + else_element, + )); } } @@ -314,9 +327,10 @@ impl<'a> ValueMerger<'a> { fn try_merge_only_changed_indices( &mut self, then_condition: ValueId, + else_condition: ValueId, then_value: ValueId, else_value: ValueId, - array_length: usize, + array_length: u32, ) -> Option { let mut found = false; let current_condition = self.current_condition?; @@ -370,7 +384,7 @@ impl<'a> ValueMerger<'a> { .chain(seen_else.into_iter().map(|(_, index, typ, condition)| (index, typ, condition))) .collect(); - if !found || changed_indices.len() >= array_length { + if !found || changed_indices.len() as u32 >= array_length { return None; } @@ -397,7 +411,8 @@ impl<'a> ValueMerger<'a> { let then_element = get_element(then_value, typevars.clone()); let else_element = get_element(else_value, typevars); - let value = self.merge_values(then_condition, then_element, else_element); + let value = + self.merge_values(then_condition, else_condition, then_element, else_element); array = self.insert_array_set(array, index, value, Some(condition)).first(); } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs index 6cf7070e65e0..f91487fd73ea 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs @@ -1089,7 +1089,6 @@ mod test { } #[test] - #[ignore] #[should_panic( expected = "Attempted to recur more than 1000 times during inlining function 'main': acir(inline) fn main f0 {" )] diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs index 14233ca73e58..87e7f8bcff31 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs @@ -7,14 +7,16 @@ //! - Already marked as loop invariants //! //! We also check that we are not hoisting instructions with side effects. -use fxhash::FxHashSet as HashSet; +use acvm::{acir::AcirField, FieldElement}; +use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; use crate::ssa::{ ir::{ basic_block::BasicBlockId, function::{Function, RuntimeType}, function_inserter::FunctionInserter, - instruction::InstructionId, + instruction::{Instruction, InstructionId}, + types::Type, value::ValueId, }, Ssa, @@ -45,25 +47,51 @@ impl Function { } impl Loops { - fn hoist_loop_invariants(self, function: &mut Function) { + fn hoist_loop_invariants(mut self, function: &mut Function) { let mut context = LoopInvariantContext::new(function); - for loop_ in self.yet_to_unroll.iter() { + // The loops should be sorted by the number of blocks. + // We want to access outer nested loops first, which we do by popping + // from the top of the list. + while let Some(loop_) = self.yet_to_unroll.pop() { let Ok(pre_header) = loop_.get_pre_header(context.inserter.function, &self.cfg) else { // If the loop does not have a preheader we skip hoisting loop invariants for this loop continue; }; - context.hoist_loop_invariants(loop_, pre_header); + + context.hoist_loop_invariants(&loop_, pre_header); } context.map_dependent_instructions(); } } +impl Loop { + /// Find the value that controls whether to perform a loop iteration. + /// This is going to be the block parameter of the loop header. + /// + /// Consider the following example of a `for i in 0..4` loop: + /// ```text + /// brillig(inline) fn main f0 { + /// b0(v0: u32): + /// ... + /// jmp b1(u32 0) + /// b1(v1: u32): // Loop header + /// v5 = lt v1, u32 4 // Upper bound + /// jmpif v5 then: b3, else: b2 + /// ``` + /// In the example above, `v1` is the induction variable + fn get_induction_variable(&self, function: &Function) -> ValueId { + function.dfg.block_parameters(self.header)[0] + } +} + struct LoopInvariantContext<'f> { inserter: FunctionInserter<'f>, defined_in_loop: HashSet, loop_invariants: HashSet, + // Maps induction variable -> fixed upper loop bound + outer_induction_variables: HashMap, } impl<'f> LoopInvariantContext<'f> { @@ -72,6 +100,7 @@ impl<'f> LoopInvariantContext<'f> { inserter: FunctionInserter::new(function), defined_in_loop: HashSet::default(), loop_invariants: HashSet::default(), + outer_induction_variables: HashMap::default(), } } @@ -88,13 +117,29 @@ impl<'f> LoopInvariantContext<'f> { self.inserter.push_instruction(instruction_id, *block); } - self.update_values_defined_in_loop_and_invariants(instruction_id, hoist_invariant); + self.extend_values_defined_in_loop_and_invariants(instruction_id, hoist_invariant); } } + + // Keep track of a loop induction variable and respective upper bound. + // This will be used by later loops to determine whether they have operations + // reliant upon the maximum induction variable. + let upper_bound = loop_.get_const_upper_bound(self.inserter.function); + if let Some(upper_bound) = upper_bound { + let induction_variable = loop_.get_induction_variable(self.inserter.function); + let induction_variable = self.inserter.resolve(induction_variable); + self.outer_induction_variables.insert(induction_variable, upper_bound); + } } /// Gather the variables declared within the loop fn set_values_defined_in_loop(&mut self, loop_: &Loop) { + // Clear any values that may be defined in previous loops, as the context is per function. + self.defined_in_loop.clear(); + // These are safe to keep per function, but we want to be clear that these values + // are used per loop. + self.loop_invariants.clear(); + for block in loop_.blocks.iter() { let params = self.inserter.function.dfg.block_parameters(*block); self.defined_in_loop.extend(params); @@ -107,7 +152,7 @@ impl<'f> LoopInvariantContext<'f> { /// Update any values defined in the loop and loop invariants after a /// analyzing and re-inserting a loop's instruction. - fn update_values_defined_in_loop_and_invariants( + fn extend_values_defined_in_loop_and_invariants( &mut self, instruction_id: InstructionId, hoist_invariant: bool, @@ -143,9 +188,44 @@ impl<'f> LoopInvariantContext<'f> { is_loop_invariant &= !self.defined_in_loop.contains(&value) || self.loop_invariants.contains(&value); }); - is_loop_invariant && instruction.can_be_deduplicated(&self.inserter.function.dfg, false) + + let can_be_deduplicated = instruction.can_be_deduplicated(self.inserter.function, false) + || self.can_be_deduplicated_from_upper_bound(&instruction); + + is_loop_invariant && can_be_deduplicated + } + + /// Certain instructions can take advantage of that our induction variable has a fixed maximum. + /// + /// For example, an array access can usually only be safely deduplicated when we have a constant + /// index that is below the length of the array. + /// Checking an array get where the index is the loop's induction variable on its own + /// would determine that the instruction is not safe for hoisting. + /// However, if we know that the induction variable's upper bound will always be in bounds of the array + /// we can safely hoist the array access. + fn can_be_deduplicated_from_upper_bound(&self, instruction: &Instruction) -> bool { + match instruction { + Instruction::ArrayGet { array, index } => { + let array_typ = self.inserter.function.dfg.type_of_value(*array); + let upper_bound = self.outer_induction_variables.get(index); + if let (Type::Array(_, len), Some(upper_bound)) = (array_typ, upper_bound) { + upper_bound.to_u128() <= len.into() + } else { + false + } + } + _ => false, + } } + /// Loop invariant hoisting only operates over loop instructions. + /// The `FunctionInserter` is used for mapping old values to new values after + /// re-inserting loop invariant instructions. + /// However, there may be instructions which are not within loops that are + /// still reliant upon the instruction results altered during the pass. + /// This method re-inserts all instructions so that all instructions have + /// correct new value IDs based upon the `FunctionInserter` internal map. + /// Leaving out this mapping could lead to instructions with values that do not exist. fn map_dependent_instructions(&mut self) { let blocks = self.inserter.function.reachable_blocks(); for block in blocks { @@ -171,13 +251,13 @@ mod test { b1(v2: u32): v5 = lt v2, u32 4 jmpif v5 then: b3, else: b2 + b2(): + return b3(): v6 = mul v0, v1 constrain v6 == u32 6 v8 = add v2, u32 1 jmp b1(v8) - b2(): - return } "; @@ -196,12 +276,12 @@ mod test { b1(v2: u32): v6 = lt v2, u32 4 jmpif v6 then: b3, else: b2 + b2(): + return b3(): constrain v3 == u32 6 v9 = add v2, u32 1 jmp b1(v9) - b2(): - return } "; @@ -220,21 +300,21 @@ mod test { b1(v2: u32): v6 = lt v2, u32 4 jmpif v6 then: b3, else: b2 + b2(): + return b3(): jmp b4(u32 0) b4(v3: u32): v7 = lt v3, u32 4 jmpif v7 then: b6, else: b5 + b5(): + v9 = add v2, u32 1 + jmp b1(v9) b6(): v10 = mul v0, v1 constrain v10 == u32 6 v12 = add v3, u32 1 jmp b4(v12) - b5(): - v9 = add v2, u32 1 - jmp b1(v9) - b2(): - return } "; @@ -253,20 +333,20 @@ mod test { b1(v2: u32): v7 = lt v2, u32 4 jmpif v7 then: b3, else: b2 + b2(): + return b3(): jmp b4(u32 0) b4(v3: u32): v8 = lt v3, u32 4 jmpif v8 then: b6, else: b5 + b5(): + v10 = add v2, u32 1 + jmp b1(v10) b6(): constrain v4 == u32 6 v12 = add v3, u32 1 jmp b4(v12) - b5(): - v10 = add v2, u32 1 - jmp b1(v10) - b2(): - return } "; @@ -294,6 +374,8 @@ mod test { b1(v2: u32): v5 = lt v2, u32 4 jmpif v5 then: b3, else: b2 + b2(): + return b3(): v6 = mul v0, v1 v7 = mul v6, v0 @@ -301,8 +383,6 @@ mod test { constrain v7 == u32 12 v9 = add v2, u32 1 jmp b1(v9) - b2(): - return } "; @@ -322,12 +402,12 @@ mod test { b1(v2: u32): v9 = lt v2, u32 4 jmpif v9 then: b3, else: b2 + b2(): + return b3(): constrain v4 == u32 12 v11 = add v2, u32 1 jmp b1(v11) - b2(): - return } "; @@ -351,17 +431,17 @@ mod test { b1(v2: u32): v7 = lt v2, u32 4 jmpif v7 then: b3, else: b2 + b2(): + v8 = load v5 -> [u32; 5] + v10 = array_get v8, index u32 2 -> u32 + constrain v10 == u32 3 + return b3(): v12 = load v5 -> [u32; 5] v13 = array_set v12, index v0, value v1 store v13 at v5 v15 = add v2, u32 1 jmp b1(v15) - b2(): - v8 = load v5 -> [u32; 5] - v10 = array_get v8, index u32 2 -> u32 - constrain v10 == u32 3 - return } "; @@ -375,4 +455,108 @@ mod test { // The code should be unchanged assert_normalized_ssa_equals(ssa, src); } + + #[test] + fn hoist_array_gets_using_induction_variable_with_const_bound() { + // SSA for the following program: + // + // fn triple_loop(x: u32) { + // let arr = [2; 5]; + // for i in 0..4 { + // for j in 0..4 { + // for _ in 0..4 { + // assert_eq(arr[i], x); + // assert_eq(arr[j], x); + // } + // } + // } + // } + // + // `arr[i]` and `arr[j]` are safe to hoist as we know the maximum possible index + // to be used for both array accesses. + // We want to make sure `arr[i]` is hoisted to the outermost loop body and that + // `arr[j]` is hoisted to the second outermost loop body. + let src = " + brillig(inline) fn main f0 { + b0(v0: u32, v1: u32): + v6 = make_array [u32 2, u32 2, u32 2, u32 2, u32 2] : [u32; 5] + inc_rc v6 + jmp b1(u32 0) + b1(v2: u32): + v9 = lt v2, u32 4 + jmpif v9 then: b3, else: b2 + b2(): + return + b3(): + jmp b4(u32 0) + b4(v3: u32): + v10 = lt v3, u32 4 + jmpif v10 then: b6, else: b5 + b5(): + v12 = add v2, u32 1 + jmp b1(v12) + b6(): + jmp b7(u32 0) + b7(v4: u32): + v13 = lt v4, u32 4 + jmpif v13 then: b9, else: b8 + b8(): + v14 = add v3, u32 1 + jmp b4(v14) + b9(): + v15 = array_get v6, index v2 -> u32 + v16 = eq v15, v0 + constrain v15 == v0 + v17 = array_get v6, index v3 -> u32 + v18 = eq v17, v0 + constrain v17 == v0 + v19 = add v4, u32 1 + jmp b7(v19) + } + "; + + let ssa = Ssa::from_str(src).unwrap(); + + let expected = " + brillig(inline) fn main f0 { + b0(v0: u32, v1: u32): + v6 = make_array [u32 2, u32 2, u32 2, u32 2, u32 2] : [u32; 5] + inc_rc v6 + jmp b1(u32 0) + b1(v2: u32): + v9 = lt v2, u32 4 + jmpif v9 then: b3, else: b2 + b2(): + return + b3(): + v10 = array_get v6, index v2 -> u32 + v11 = eq v10, v0 + jmp b4(u32 0) + b4(v3: u32): + v12 = lt v3, u32 4 + jmpif v12 then: b6, else: b5 + b5(): + v14 = add v2, u32 1 + jmp b1(v14) + b6(): + v15 = array_get v6, index v3 -> u32 + v16 = eq v15, v0 + jmp b7(u32 0) + b7(v4: u32): + v17 = lt v4, u32 4 + jmpif v17 then: b9, else: b8 + b8(): + v18 = add v3, u32 1 + jmp b4(v18) + b9(): + constrain v10 == v0 + constrain v15 == v0 + v19 = add v4, u32 1 + jmp b7(v19) + } + "; + + let ssa = ssa.loop_invariant_code_motion(); + assert_normalized_ssa_equals(ssa, expected); + } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs index 0690dbbf2042..77ad53df9cf4 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -18,6 +18,7 @@ //! - A reference with 0 aliases means we were unable to find which reference this reference //! refers to. If such a reference is stored to, we must conservatively invalidate every //! reference in the current block. +//! - We also track the last load instruction to each address per block. //! //! From there, to figure out the value of each reference at the end of block, iterate each instruction: //! - On `Instruction::Allocate`: @@ -28,6 +29,13 @@ //! - Furthermore, if the result of the load is a reference, mark the result as an alias //! of the reference it dereferences to (if known). //! - If which reference it dereferences to is not known, this load result has no aliases. +//! - We also track the last instance of a load instruction to each address in a block. +//! If we see that the last load instruction was from the same address as the current load instruction, +//! we move to replace the result of the current load with the result of the previous load. +//! This removal requires a couple conditions: +//! - No store occurs to that address before the next load, +//! - The address is not used as an argument to a call +//! This optimization helps us remove repeated loads for which there are not known values. //! - On `Instruction::Store { address, value }`: //! - If the address of the store is known: //! - If the address has exactly 1 alias: @@ -40,11 +48,13 @@ //! - Conservatively mark every alias in the block to `Unknown`. //! - Additionally, if there were no Loads to any alias of the address between this Store and //! the previous Store to the same address, the previous store can be removed. +//! - Remove the instance of the last load instruction to the address and its aliases //! - On `Instruction::Call { arguments }`: //! - If any argument of the call is a reference, set the value of each alias of that //! reference to `Unknown` //! - Any builtin functions that may return aliases if their input also contains a //! reference should be tracked. Examples: `slice_push_back`, `slice_insert`, `slice_remove`, etc. +//! - Remove the instance of the last load instruction for any reference arguments and their aliases //! //! On a terminator instruction: //! - If the terminator is a `Jmp`: @@ -274,6 +284,9 @@ impl<'f> PerFunctionContext<'f> { if let Some(first_predecessor) = predecessors.next() { let mut first = self.blocks.get(&first_predecessor).cloned().unwrap_or_default(); first.last_stores.clear(); + // Last loads are tracked per block. During unification we are creating a new block from the current one, + // so we must clear the last loads of the current block before we return the new block. + first.last_loads.clear(); // Note that we have to start folding with the first block as the accumulator. // If we started with an empty block, an empty block union'd with any other block @@ -410,6 +423,28 @@ impl<'f> PerFunctionContext<'f> { self.last_loads.insert(address, (instruction, block_id)); } + + // Check whether the block has a repeat load from the same address (w/ no calls or stores in between the loads). + // If we do have a repeat load, we can remove the current load and map its result to the previous load's result. + if let Some(last_load) = references.last_loads.get(&address) { + let Instruction::Load { address: previous_address } = + &self.inserter.function.dfg[*last_load] + else { + panic!("Expected a Load instruction here"); + }; + let result = self.inserter.function.dfg.instruction_results(instruction)[0]; + let previous_result = + self.inserter.function.dfg.instruction_results(*last_load)[0]; + if *previous_address == address { + self.inserter.map_value(result, previous_result); + self.instructions_to_remove.insert(instruction); + } + } + // We want to set the load for every load even if the address has a known value + // and the previous load instruction was removed. + // We are safe to still remove a repeat load in this case as we are mapping from the current load's + // result to the previous load, which if it was removed should already have a mapping to the known value. + references.set_last_load(address, instruction); } Instruction::Store { address, value } => { let address = self.inserter.function.dfg.resolve(*address); @@ -435,6 +470,8 @@ impl<'f> PerFunctionContext<'f> { } references.set_known_value(address, value); + // If we see a store to an address, the last load to that address needs to remain. + references.keep_last_load_for(address, self.inserter.function); references.last_stores.insert(address, instruction); } Instruction::Allocate => { @@ -542,6 +579,9 @@ impl<'f> PerFunctionContext<'f> { let value = self.inserter.function.dfg.resolve(*value); references.set_unknown(value); references.mark_value_used(value, self.inserter.function); + + // If a reference is an argument to a call, the last load to that address and its aliases needs to remain. + references.keep_last_load_for(value, self.inserter.function); } } } @@ -572,6 +612,12 @@ impl<'f> PerFunctionContext<'f> { let destination_parameters = self.inserter.function.dfg[*destination].parameters(); assert_eq!(destination_parameters.len(), arguments.len()); + // If we have multiple parameters that alias that same argument value, + // then those parameters also alias each other. + // We save parameters with repeat arguments to later mark those + // parameters as aliasing one another. + let mut arg_set: HashMap> = HashMap::default(); + // Add an alias for each reference parameter for (parameter, argument) in destination_parameters.iter().zip(arguments) { if self.inserter.function.dfg.value_is_reference(*parameter) { @@ -581,10 +627,27 @@ impl<'f> PerFunctionContext<'f> { if let Some(aliases) = references.aliases.get_mut(expression) { // The argument reference is possibly aliased by this block parameter aliases.insert(*parameter); + + // Check if we have seen the same argument + let seen_parameters = arg_set.entry(argument).or_default(); + // Add the current parameter to the parameters we have seen for this argument. + // The previous parameters and the current one alias one another. + seen_parameters.insert(*parameter); } } } } + + // Set the aliases of the parameters + for (_, aliased_params) in arg_set { + for param in aliased_params.iter() { + self.set_aliases( + references, + *param, + AliasSet::known_multiple(aliased_params.clone()), + ); + } + } } TerminatorInstruction::Return { return_values, .. } => { // Removing all `last_stores` for each returned reference is more important here @@ -612,6 +675,8 @@ mod tests { map::Id, types::Type, }, + opt::assert_normalized_ssa_equals, + Ssa, }; #[test] @@ -822,88 +887,53 @@ mod tests { // is later stored in a successor block #[test] fn load_aliases_in_predecessor_block() { - // fn main { - // b0(): - // v0 = allocate - // store Field 0 at v0 - // v2 = allocate - // store v0 at v2 - // v3 = load v2 - // v4 = load v2 - // jmp b1() - // b1(): - // store Field 1 at v3 - // store Field 2 at v4 - // v7 = load v3 - // v8 = eq v7, Field 2 - // return - // } - let main_id = Id::test_new(0); - let mut builder = FunctionBuilder::new("main".into(), main_id); - - let v0 = builder.insert_allocate(Type::field()); - - let zero = builder.field_constant(0u128); - builder.insert_store(v0, zero); - - let v2 = builder.insert_allocate(Type::Reference(Arc::new(Type::field()))); - builder.insert_store(v2, v0); - - let v3 = builder.insert_load(v2, Type::field()); - let v4 = builder.insert_load(v2, Type::field()); - let b1 = builder.insert_block(); - builder.terminate_with_jmp(b1, vec![]); - - builder.switch_to_block(b1); - - let one = builder.field_constant(1u128); - builder.insert_store(v3, one); - - let two = builder.field_constant(2u128); - builder.insert_store(v4, two); - - let v8 = builder.insert_load(v3, Type::field()); - let _ = builder.insert_binary(v8, BinaryOp::Eq, two); - - builder.terminate_with_return(vec![]); - - let ssa = builder.finish(); - assert_eq!(ssa.main().reachable_blocks().len(), 2); + let src = " + acir(inline) fn main f0 { + b0(): + v0 = allocate -> &mut Field + store Field 0 at v0 + v2 = allocate -> &mut &mut Field + store v0 at v2 + v3 = load v2 -> &mut Field + v4 = load v2 -> &mut Field + jmp b1() + b1(): + store Field 1 at v3 + store Field 2 at v4 + v7 = load v3 -> Field + v8 = eq v7, Field 2 + return + } + "; - // Expected result: - // acir fn main f0 { - // b0(): - // v9 = allocate - // store Field 0 at v9 - // v10 = allocate - // jmp b1() - // b1(): - // return - // } - let ssa = ssa.mem2reg(); - println!("{}", ssa); + let mut ssa = Ssa::from_str(src).unwrap(); + let main = ssa.main_mut(); - let main = ssa.main(); - assert_eq!(main.reachable_blocks().len(), 2); + let instructions = main.dfg[main.entry_block()].instructions(); + assert_eq!(instructions.len(), 6); // The final return is not counted // All loads should be removed - assert_eq!(count_loads(main.entry_block(), &main.dfg), 0); - assert_eq!(count_loads(b1, &main.dfg), 0); - // The first store is not removed as it is used as a nested reference in another store. - // We would need to track whether the store where `v9` is the store value gets removed to know whether + // We would need to track whether the store where `v0` is the store value gets removed to know whether // to remove it. - assert_eq!(count_stores(main.entry_block(), &main.dfg), 1); // The first store in b1 is removed since there is another store to the same reference // in the same block, and the store is not needed before the later store. // The rest of the stores are also removed as no loads are done within any blocks // to the stored values. - assert_eq!(count_stores(b1, &main.dfg), 0); - - let b1_instructions = main.dfg[b1].instructions(); + let expected = " + acir(inline) fn main f0 { + b0(): + v0 = allocate -> &mut Field + store Field 0 at v0 + v2 = allocate -> &mut &mut Field + jmp b1() + b1(): + return + } + "; - // We expect the last eq to be optimized out - assert_eq!(b1_instructions.len(), 0); + let ssa = ssa.mem2reg(); + assert_normalized_ssa_equals(ssa, expected); } #[test] @@ -933,7 +963,7 @@ mod tests { // v10 = eq v9, Field 2 // constrain v9 == Field 2 // v11 = load v2 - // v12 = load v10 + // v12 = load v11 // v13 = eq v12, Field 2 // constrain v11 == Field 2 // return @@ -992,7 +1022,7 @@ mod tests { let main = ssa.main(); assert_eq!(main.reachable_blocks().len(), 4); - // The store from the original SSA should remain + // The stores from the original SSA should remain assert_eq!(count_stores(main.entry_block(), &main.dfg), 2); assert_eq!(count_stores(b2, &main.dfg), 1); @@ -1039,4 +1069,160 @@ mod tests { let main = ssa.main(); assert_eq!(count_loads(main.entry_block(), &main.dfg), 1); } + + #[test] + fn remove_repeat_loads() { + // This tests starts with two loads from the same unknown load. + // Specifically you should look for `load v2` in `b3`. + // We should be able to remove the second repeated load. + let src = " + acir(inline) fn main f0 { + b0(): + v0 = allocate -> &mut Field + store Field 0 at v0 + v2 = allocate -> &mut &mut Field + store v0 at v2 + jmp b1(Field 0) + b1(v3: Field): + v4 = eq v3, Field 0 + jmpif v4 then: b2, else: b3 + b2(): + v5 = load v2 -> &mut Field + store Field 2 at v5 + v8 = add v3, Field 1 + jmp b1(v8) + b3(): + v9 = load v0 -> Field + v10 = eq v9, Field 2 + constrain v9 == Field 2 + v11 = load v2 -> &mut Field + v12 = load v2 -> &mut Field + v13 = load v12 -> Field + v14 = eq v13, Field 2 + constrain v13 == Field 2 + return + } + "; + + let ssa = Ssa::from_str(src).unwrap(); + + // The repeated load from v3 should be removed + // b3 should only have three loads now rather than four previously + // + // All stores are expected to remain. + let expected = " + acir(inline) fn main f0 { + b0(): + v1 = allocate -> &mut Field + store Field 0 at v1 + v3 = allocate -> &mut &mut Field + store v1 at v3 + jmp b1(Field 0) + b1(v0: Field): + v4 = eq v0, Field 0 + jmpif v4 then: b3, else: b2 + b2(): + v5 = load v1 -> Field + v7 = eq v5, Field 2 + constrain v5 == Field 2 + v8 = load v3 -> &mut Field + v9 = load v8 -> Field + v10 = eq v9, Field 2 + constrain v9 == Field 2 + return + b3(): + v11 = load v3 -> &mut Field + store Field 2 at v11 + v13 = add v0, Field 1 + jmp b1(v13) + } + "; + + let ssa = ssa.mem2reg(); + assert_normalized_ssa_equals(ssa, expected); + } + + #[test] + fn keep_repeat_loads_passed_to_a_call() { + // The test is the exact same as `remove_repeat_loads` above except with the call + // to `f1` between the repeated loads. + let src = " + acir(inline) fn main f0 { + b0(): + v1 = allocate -> &mut Field + store Field 0 at v1 + v3 = allocate -> &mut &mut Field + store v1 at v3 + jmp b1(Field 0) + b1(v0: Field): + v4 = eq v0, Field 0 + jmpif v4 then: b3, else: b2 + b2(): + v5 = load v1 -> Field + v7 = eq v5, Field 2 + constrain v5 == Field 2 + v8 = load v3 -> &mut Field + call f1(v3) + v10 = load v3 -> &mut Field + v11 = load v10 -> Field + v12 = eq v11, Field 2 + constrain v11 == Field 2 + return + b3(): + v13 = load v3 -> &mut Field + store Field 2 at v13 + v15 = add v0, Field 1 + jmp b1(v15) + } + acir(inline) fn foo f1 { + b0(v0: &mut Field): + return + } + "; + + let ssa = Ssa::from_str(src).unwrap(); + + let ssa = ssa.mem2reg(); + // We expect the program to be unchanged + assert_normalized_ssa_equals(ssa, src); + } + + #[test] + fn keep_repeat_loads_with_alias_store() { + // v7, v8, and v9 alias one another. We want to make sure that a repeat load to v7 with a store + // to its aliases in between the repeat loads does not remove those loads. + let src = " + acir(inline) fn main f0 { + b0(v0: u1): + jmpif v0 then: b2, else: b1 + b1(): + v4 = allocate -> &mut Field + store Field 1 at v4 + jmp b3(v4, v4, v4) + b2(): + v6 = allocate -> &mut Field + store Field 0 at v6 + jmp b3(v6, v6, v6) + b3(v1: &mut Field, v2: &mut Field, v3: &mut Field): + v8 = load v1 -> Field + store Field 2 at v2 + v10 = load v1 -> Field + store Field 1 at v3 + v11 = load v1 -> Field + store Field 3 at v3 + v13 = load v1 -> Field + constrain v8 == Field 0 + constrain v10 == Field 2 + constrain v11 == Field 1 + constrain v13 == Field 3 + return + } + "; + + let ssa = Ssa::from_str(src).unwrap(); + + let ssa = ssa.mem2reg(); + // We expect the program to be unchanged + assert_normalized_ssa_equals(ssa, src); + } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs index cdbb10432324..ccf5bd9d9f85 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs @@ -191,7 +191,7 @@ impl Context<'_> { let typ = self.function.dfg.type_of_value(rhs); if let Type::Numeric(NumericType::Unsigned { bit_size }) = typ { let to_bits = self.function.dfg.import_intrinsic(Intrinsic::ToBits(Endian::Little)); - let result_types = vec![Type::Array(Arc::new(vec![Type::bool()]), bit_size as usize)]; + let result_types = vec![Type::Array(Arc::new(vec![Type::bool()]), bit_size)]; let rhs_bits = self.insert_call(to_bits, vec![rhs], result_types); let rhs_bits = rhs_bits[0]; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs index 8e25c3f0a35a..02191801fcdc 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs @@ -48,7 +48,7 @@ impl Function { #[derive(Default)] struct Context { - slice_sizes: HashMap, + slice_sizes: HashMap, // Maps array_set result -> element that was overwritten by that instruction. // Used to undo array_sets while merging values @@ -66,8 +66,9 @@ impl Context { for instruction in instructions { match &function.dfg[instruction] { - Instruction::IfElse { then_condition, then_value, else_value } => { + Instruction::IfElse { then_condition, then_value, else_condition, else_value } => { let then_condition = *then_condition; + let else_condition = *else_condition; let then_value = *then_value; let else_value = *else_value; @@ -84,7 +85,12 @@ impl Context { call_stack, ); - let value = value_merger.merge_values(then_condition, then_value, else_value); + let value = value_merger.merge_values( + then_condition, + else_condition, + then_value, + else_value, + ); let _typ = function.dfg.type_of_value(value); let results = function.dfg.instruction_results(instruction); @@ -142,13 +148,13 @@ impl Context { } } - fn get_or_find_capacity(&mut self, dfg: &DataFlowGraph, value: ValueId) -> usize { + fn get_or_find_capacity(&mut self, dfg: &DataFlowGraph, value: ValueId) -> u32 { match self.slice_sizes.entry(value) { Entry::Occupied(entry) => return *entry.get(), Entry::Vacant(entry) => { if let Some((array, typ)) = dfg.get_array_constant(value) { let length = array.len() / typ.element_types().len(); - return *entry.insert(length); + return *entry.insert(length as u32); } if let Type::Array(_, length) = dfg.type_of_value(value) { @@ -164,7 +170,7 @@ impl Context { enum SizeChange { None, - SetTo(ValueId, usize), + SetTo(ValueId, u32), // These two variants store the old and new slice ids // not their lengths which should be old_len = new_len +/- 1 diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs index c282e2df4510..e7f8d227d288 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs @@ -442,14 +442,14 @@ mod test { store Field 0 at v1 v3 = not v0 jmpif v0 then: b2, else: b1 + b1(): + store Field 2 at v1 + jmp b2() b2(): v5 = load v1 -> Field v6 = eq v5, Field 2 constrain v5 == Field 2 return - b1(): - store Field 2 at v1 - jmp b2() }"; assert_normalized_ssa_equals(ssa.simplify_cfg(), expected); } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs index 777c16dacd17..22daba1de45c 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs @@ -19,8 +19,10 @@ //! When unrolling ACIR code, we remove reference count instructions because they are //! only used by Brillig bytecode. use acvm::{acir::AcirField, FieldElement}; +use im::HashSet; use crate::{ + brillig::brillig_gen::convert_ssa_function, errors::RuntimeError, ssa::{ ir::{ @@ -37,38 +39,60 @@ use crate::{ ssa_gen::Ssa, }, }; -use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; +use fxhash::FxHashMap as HashMap; impl Ssa { /// Loop unrolling can return errors, since ACIR functions need to be fully unrolled. /// This meta-pass will keep trying to unroll loops and simplifying the SSA until no more errors are found. - #[tracing::instrument(level = "trace", skip(ssa))] - pub(crate) fn unroll_loops_iteratively(mut ssa: Ssa) -> Result { - for (_, function) in ssa.functions.iter_mut() { + /// + /// The `max_bytecode_incr_pct`, when given, is used to limit the growth of the Brillig bytecode size + /// after unrolling small loops to some percentage of the original loop. For example a value of 150 would + /// mean the new loop can be 150% (ie. 2.5 times) larger than the original loop. It will still contain + /// fewer SSA instructions, but that can still result in more Brillig opcodes. + #[tracing::instrument(level = "trace", skip(self))] + pub(crate) fn unroll_loops_iteratively( + mut self: Ssa, + max_bytecode_increase_percent: Option, + ) -> Result { + for (_, function) in self.functions.iter_mut() { + // Take a snapshot of the function to compare byte size increase, + // but only if the setting indicates we have to, otherwise skip it. + let orig_func_and_max_incr_pct = max_bytecode_increase_percent + .filter(|_| function.runtime().is_brillig()) + .map(|max_incr_pct| (function.clone(), max_incr_pct)); + // Try to unroll loops first: - let mut unroll_errors = function.try_unroll_loops(); + let (mut has_unrolled, mut unroll_errors) = function.try_unroll_loops(); // Keep unrolling until no more errors are found while !unroll_errors.is_empty() { let prev_unroll_err_count = unroll_errors.len(); // Simplify the SSA before retrying - - // Do a mem2reg after the last unroll to aid simplify_cfg - function.mem2reg(); - function.simplify_function(); - // Do another mem2reg after simplify_cfg to aid the next unroll - function.mem2reg(); + simplify_between_unrolls(function); // Unroll again - unroll_errors = function.try_unroll_loops(); + let (new_unrolled, new_errors) = function.try_unroll_loops(); + unroll_errors = new_errors; + has_unrolled |= new_unrolled; + // If we didn't manage to unroll any more loops, exit if unroll_errors.len() >= prev_unroll_err_count { return Err(unroll_errors.swap_remove(0)); } } + + if has_unrolled { + if let Some((orig_function, max_incr_pct)) = orig_func_and_max_incr_pct { + let new_size = brillig_bytecode_size(function); + let orig_size = brillig_bytecode_size(&orig_function); + if !is_new_size_ok(orig_size, new_size, max_incr_pct) { + *function = orig_function; + } + } + } } - Ok(ssa) + Ok(self) } } @@ -77,7 +101,7 @@ impl Function { // This can also be true for ACIR, but we have no alternative to unrolling in ACIR. // Brillig also generally prefers smaller code rather than faster code, // so we only attempt to unroll small loops, which we decide on a case-by-case basis. - fn try_unroll_loops(&mut self) -> Vec { + fn try_unroll_loops(&mut self) -> (bool, Vec) { Loops::find_all(self).unroll_each(self) } } @@ -85,7 +109,7 @@ impl Function { pub(super) struct Loop { /// The header block of a loop is the block which dominates all the /// other blocks in the loop. - header: BasicBlockId, + pub(super) header: BasicBlockId, /// The start of the back_edge n -> d is the block n at the end of /// the loop that jumps back to the header block d which restarts the loop. @@ -170,8 +194,10 @@ impl Loops { /// Unroll all loops within a given function. /// Any loops which fail to be unrolled (due to using non-constant indices) will be unmodified. - fn unroll_each(mut self, function: &mut Function) -> Vec { + /// Returns whether any blocks have been modified + fn unroll_each(mut self, function: &mut Function) -> (bool, Vec) { let mut unroll_errors = vec![]; + let mut has_unrolled = false; while let Some(next_loop) = self.yet_to_unroll.pop() { if function.runtime().is_brillig() && !next_loop.is_small_loop(function, &self.cfg) { continue; @@ -181,13 +207,17 @@ impl Loops { if next_loop.blocks.iter().any(|block| self.modified_blocks.contains(block)) { let mut new_loops = Self::find_all(function); new_loops.failed_to_unroll = self.failed_to_unroll; - return unroll_errors.into_iter().chain(new_loops.unroll_each(function)).collect(); + let (new_unrolled, new_errors) = new_loops.unroll_each(function); + return (has_unrolled || new_unrolled, [unroll_errors, new_errors].concat()); } // Don't try to unroll the loop again if it is known to fail if !self.failed_to_unroll.contains(&next_loop.header) { match next_loop.unroll(function, &self.cfg) { - Ok(_) => self.modified_blocks.extend(next_loop.blocks), + Ok(_) => { + has_unrolled = true; + self.modified_blocks.extend(next_loop.blocks); + } Err(call_stack) => { self.failed_to_unroll.insert(next_loop.header); unroll_errors.push(RuntimeError::UnknownLoopBound { call_stack }); @@ -195,7 +225,7 @@ impl Loops { } } } - unroll_errors + (has_unrolled, unroll_errors) } } @@ -269,7 +299,7 @@ impl Loop { /// v5 = lt v1, u32 4 // Upper bound /// jmpif v5 then: b3, else: b2 /// ``` - fn get_const_upper_bound(&self, function: &Function) -> Option { + pub(super) fn get_const_upper_bound(&self, function: &Function) -> Option { let block = &function.dfg[self.header]; let instructions = block.instructions(); assert_eq!( @@ -947,21 +977,59 @@ impl<'f> LoopIteration<'f> { } } +/// Unrolling leaves some duplicate instructions which can potentially be removed. +fn simplify_between_unrolls(function: &mut Function) { + // Do a mem2reg after the last unroll to aid simplify_cfg + function.mem2reg(); + function.simplify_function(); + // Do another mem2reg after simplify_cfg to aid the next unroll + function.mem2reg(); +} + +/// Convert the function to Brillig bytecode and return the resulting size. +fn brillig_bytecode_size(function: &Function) -> usize { + // We need to do some SSA passes in order for the conversion to be able to go ahead, + // otherwise we can hit `unreachable!()` instructions in `convert_ssa_instruction`. + // Creating a clone so as not to modify the originals. + let mut temp = function.clone(); + + // Might as well give it the best chance. + simplify_between_unrolls(&mut temp); + + // This is to try to prevent hitting ICE. + temp.dead_instruction_elimination(false); + + convert_ssa_function(&temp, false).byte_code.len() +} + +/// Decide if the new bytecode size is acceptable, compared to the original. +/// +/// The maximum increase can be expressed as a negative value if we demand a decrease. +/// (Values -100 and under mean the new size should be 0). +fn is_new_size_ok(orig_size: usize, new_size: usize, max_incr_pct: i32) -> bool { + let max_size_pct = 100i32.saturating_add(max_incr_pct).max(0) as usize; + let max_size = orig_size.saturating_mul(max_size_pct); + new_size.saturating_mul(100) <= max_size +} + #[cfg(test)] mod tests { use acvm::FieldElement; + use test_case::test_case; use crate::errors::RuntimeError; use crate::ssa::{ir::value::ValueId, opt::assert_normalized_ssa_equals, Ssa}; - use super::{BoilerplateStats, Loops}; + use super::{is_new_size_ok, BoilerplateStats, Loops}; - /// Tries to unroll all loops in each SSA function. + /// Tries to unroll all loops in each SSA function once, calling the `Function` directly, + /// bypassing the iterative loop done by the SSA which does further optimisations. + /// /// If any loop cannot be unrolled, it is left as-is or in a partially unrolled state. fn try_unroll_loops(mut ssa: Ssa) -> (Ssa, Vec) { let mut errors = vec![]; for function in ssa.functions.values_mut() { - errors.extend(function.try_unroll_loops()); + errors.extend(function.try_unroll_loops().1); } (ssa, errors) } @@ -1221,9 +1289,26 @@ mod tests { let (ssa, errors) = try_unroll_loops(ssa); assert_eq!(errors.len(), 0, "Unroll should have no errors"); + // Check that it's still the original assert_normalized_ssa_equals(ssa, parse_ssa().to_string().as_str()); } + #[test] + fn test_brillig_unroll_iteratively_respects_max_increase() { + let ssa = brillig_unroll_test_case(); + let ssa = ssa.unroll_loops_iteratively(Some(-90)).unwrap(); + // Check that it's still the original + assert_normalized_ssa_equals(ssa, brillig_unroll_test_case().to_string().as_str()); + } + + #[test] + fn test_brillig_unroll_iteratively_with_large_max_increase() { + let ssa = brillig_unroll_test_case(); + let ssa = ssa.unroll_loops_iteratively(Some(50)).unwrap(); + // Check that it did the unroll + assert_eq!(ssa.main().reachable_blocks().len(), 2, "The loop should be unrolled"); + } + /// Test that `break` and `continue` stop unrolling without any panic. #[test] fn test_brillig_unroll_break_and_continue() { @@ -1252,12 +1337,15 @@ mod tests { b2(): v7 = eq v0, u32 2 jmpif v7 then: b7, else: b3 - b7(): - v18 = add v0, u32 1 - jmp b1(v18) b3(): v9 = eq v0, u32 5 jmpif v9 then: b5, else: b4 + b4(): + v10 = load v1 -> Field + v12 = add v10, Field 1 + store v12 at v1 + v14 = add v0, u32 1 + jmp b1(v14) b5(): jmp b6() b6(): @@ -1265,12 +1353,9 @@ mod tests { v17 = eq v15, Field 4 constrain v15 == Field 4 return - b4(): - v10 = load v1 -> Field - v12 = add v10, Field 1 - store v12 at v1 - v14 = add v0, u32 1 - jmp b1(v14) + b7(): + v18 = add v0, u32 1 + jmp b1(v18) } "; let ssa = Ssa::from_str(src).unwrap(); @@ -1377,4 +1462,14 @@ mod tests { let loop0 = loops.yet_to_unroll.pop().expect("there should be a loop"); loop0.boilerplate_stats(function, &loops.cfg).expect("there should be stats") } + + #[test_case(1000, 700, 50, true; "size decreased")] + #[test_case(1000, 1500, 50, true; "size increased just by the max")] + #[test_case(1000, 1501, 50, false; "size increased over the max")] + #[test_case(1000, 700, -50, false; "size decreased but not enough")] + #[test_case(1000, 250, -50, true; "size decreased over expectations")] + #[test_case(1000, 250, -1250, false; "demanding more than minus 100 is handled")] + fn test_is_new_size_ok(old: usize, new: usize, max: i32, ok: bool) { + assert_eq!(is_new_size_ok(old, new, max), ok); + } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/lexer.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/lexer.rs index d89bc1e9e28d..5b66810c6419 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/lexer.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/lexer.rs @@ -62,6 +62,7 @@ impl<'a> Lexer<'a> { Some('-') if self.peek_char() == Some('>') => self.double_char_token(Token::Arrow), Some('-') => self.single_char_token(Token::Dash), Some('"') => self.eat_string_literal(), + Some('b') if self.peek_char() == Some('"') => self.eat_byte_string_literal(), Some(ch) if ch.is_ascii_alphanumeric() || ch == '_' => self.eat_alpha_numeric(ch), Some(char) => Err(LexerError::UnexpectedCharacter { char, @@ -180,8 +181,23 @@ impl<'a> Lexer<'a> { fn eat_string_literal(&mut self) -> SpannedTokenResult { let start = self.position; - let mut string = String::new(); + let string = self.eat_string(start)?; + let str_literal_token = Token::Str(string); + let end = self.position; + Ok(str_literal_token.into_span(start, end)) + } + + fn eat_byte_string_literal(&mut self) -> SpannedTokenResult { + let start = self.position; + self.next_char(); // skip the b + let string = self.eat_string(start)?; + let str_literal_token = Token::ByteStr(string); + let end = self.position; + Ok(str_literal_token.into_span(start, end)) + } + fn eat_string(&mut self, start: u32) -> Result { + let mut string = String::new(); while let Some(next) = self.next_char() { let char = match next { '"' => break, @@ -206,11 +222,7 @@ impl<'a> Lexer<'a> { string.push(char); } - - let str_literal_token = Token::Str(string); - - let end = self.position; - Ok(str_literal_token.into_span(start, end)) + Ok(string) } fn eat_while bool>( diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/mod.rs index 3d8bd37deadd..24a5ff430710 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/mod.rs @@ -4,7 +4,10 @@ use std::{ }; use super::{ - ir::{instruction::BinaryOp, types::Type}, + ir::{ + instruction::BinaryOp, + types::{NumericType, Type}, + }, Ssa, }; @@ -448,12 +451,39 @@ impl<'a> Parser<'a> { } if self.eat_keyword(Keyword::MakeArray)? { - self.eat_or_error(Token::LeftBracket)?; - let elements = self.parse_comma_separated_values()?; - self.eat_or_error(Token::RightBracket)?; - self.eat_or_error(Token::Colon)?; - let typ = self.parse_type()?; - return Ok(ParsedInstruction::MakeArray { target, elements, typ }); + if self.eat(Token::Ampersand)? { + let Some(string) = self.eat_byte_str()? else { + return self.expected_byte_string(); + }; + let u8 = Type::Numeric(NumericType::Unsigned { bit_size: 8 }); + let typ = Type::Slice(Arc::new(vec![u8.clone()])); + let elements = string + .bytes() + .map(|byte| ParsedValue::NumericConstant { + constant: FieldElement::from(byte as u128), + typ: u8.clone(), + }) + .collect(); + return Ok(ParsedInstruction::MakeArray { target, elements, typ }); + } else if let Some(string) = self.eat_byte_str()? { + let u8 = Type::Numeric(NumericType::Unsigned { bit_size: 8 }); + let typ = Type::Array(Arc::new(vec![u8.clone()]), string.len() as u32); + let elements = string + .bytes() + .map(|byte| ParsedValue::NumericConstant { + constant: FieldElement::from(byte as u128), + typ: u8.clone(), + }) + .collect(); + return Ok(ParsedInstruction::MakeArray { target, elements, typ }); + } else { + self.eat_or_error(Token::LeftBracket)?; + let elements = self.parse_comma_separated_values()?; + self.eat_or_error(Token::RightBracket)?; + self.eat_or_error(Token::Colon)?; + let typ = self.parse_type()?; + return Ok(ParsedInstruction::MakeArray { target, elements, typ }); + } } if self.eat_keyword(Keyword::Not)? { @@ -656,7 +686,7 @@ impl<'a> Parser<'a> { if self.eat(Token::Semicolon)? { let length = self.eat_int_or_error()?; self.eat_or_error(Token::RightBracket)?; - return Ok(Type::Array(Arc::new(element_types), length.to_u128() as usize)); + return Ok(Type::Array(Arc::new(element_types), length.to_u128() as u32)); } else { self.eat_or_error(Token::RightBracket)?; return Ok(Type::Slice(Arc::new(element_types))); @@ -796,6 +826,18 @@ impl<'a> Parser<'a> { } } + fn eat_byte_str(&mut self) -> ParseResult> { + if matches!(self.token.token(), Token::ByteStr(..)) { + let token = self.bump()?; + match token.into_token() { + Token::ByteStr(string) => Ok(Some(string)), + _ => unreachable!(), + } + } else { + Ok(None) + } + } + fn eat(&mut self, token: Token) -> ParseResult { if self.token.token() == &token { self.bump()?; @@ -848,6 +890,13 @@ impl<'a> Parser<'a> { }) } + fn expected_byte_string(&mut self) -> ParseResult { + Err(ParserError::ExpectedByteString { + found: self.token.token().clone(), + span: self.token.to_span(), + }) + } + fn expected_identifier(&mut self) -> ParseResult { Err(ParserError::ExpectedIdentifier { found: self.token.token().clone(), @@ -911,6 +960,8 @@ pub(crate) enum ParserError { ExpectedInstructionOrTerminator { found: Token, span: Span }, #[error("Expected a string literal or 'data', found '{found}'")] ExpectedStringOrData { found: Token, span: Span }, + #[error("Expected a byte string literal, found '{found}'")] + ExpectedByteString { found: Token, span: Span }, #[error("Expected a value, found '{found}'")] ExpectedValue { found: Token, span: Span }, #[error("Multiple return values only allowed for call")] @@ -928,6 +979,7 @@ impl ParserError { | ParserError::ExpectedType { span, .. } | ParserError::ExpectedInstructionOrTerminator { span, .. } | ParserError::ExpectedStringOrData { span, .. } + | ParserError::ExpectedByteString { span, .. } | ParserError::ExpectedValue { span, .. } => *span, ParserError::MultipleReturnValuesOnlyAllowedForCall { second_target, .. } => { second_target.span diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs index 593b66d0c98c..dab96dfa04f5 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs @@ -89,6 +89,30 @@ fn test_make_composite_array() { assert_ssa_roundtrip(src); } +#[test] +fn test_make_byte_array_with_string_literal() { + let src = " + acir(inline) fn main f0 { + b0(): + v9 = make_array b\"Hello world!\" + return v9 + } + "; + assert_ssa_roundtrip(src); +} + +#[test] +fn test_make_byte_slice_with_string_literal() { + let src = " + acir(inline) fn main f0 { + b0(): + v9 = make_array &b\"Hello world!\" + return v9 + } + "; + assert_ssa_roundtrip(src); +} + #[test] fn test_block_parameters() { let src = " @@ -119,10 +143,10 @@ fn test_jmpif() { acir(inline) fn main f0 { b0(v0: Field): jmpif v0 then: b2, else: b1 - b2(): - return b1(): return + b2(): + return } "; assert_ssa_roundtrip(src); @@ -228,14 +252,14 @@ fn test_constrain_with_static_message() { #[test] fn test_constrain_with_dynamic_message() { - let src = " + let src = r#" acir(inline) fn main f0 { b0(v0: Field, v1: Field): - v7 = make_array [u8 123, u8 120, u8 125, u8 32, u8 123, u8 121, u8 125] : [u8; 7] + v7 = make_array b"{x} {y}" constrain v0 == Field 1, data v7, u32 2, v0, v1 return } - "; + "#; assert_ssa_roundtrip(src); } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/token.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/token.rs index d8dd4ec011e7..83a2a1d1ed28 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/token.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/token.rs @@ -30,6 +30,7 @@ pub(crate) enum Token { Ident(String), Int(FieldElement), Str(String), + ByteStr(String), Keyword(Keyword), IntType(IntType), /// = @@ -79,6 +80,7 @@ impl Display for Token { Token::Ident(ident) => write!(f, "{}", ident), Token::Int(int) => write!(f, "{}", int), Token::Str(string) => write!(f, "{string:?}"), + Token::ByteStr(string) => write!(f, "{string:?}"), Token::Keyword(keyword) => write!(f, "{}", keyword), Token::IntType(int_type) => write!(f, "{}", int_type), Token::Assign => write!(f, "="), diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index 0c6041029dab..116e0de4ecd5 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -20,7 +20,7 @@ use crate::ssa::ir::value::ValueId; use super::value::{Tree, Value, Values}; use super::SSA_WORD_SIZE; -use fxhash::FxHashMap as HashMap; +use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; /// The FunctionContext is the main context object for translating a /// function into SSA form during the SSA-gen pass. @@ -159,7 +159,8 @@ impl<'a> FunctionContext<'a> { let parameter_value = Self::map_type(parameter_type, |typ| { let value = self.builder.add_parameter(typ); if mutable { - self.new_mutable_variable(value) + // This will wrap any `mut var: T` in a reference and increase the rc of an array if needed + self.new_mutable_variable(value, true) } else { value.into() } @@ -170,8 +171,17 @@ impl<'a> FunctionContext<'a> { /// Allocate a single slot of memory and store into it the given initial value of the variable. /// Always returns a Value::Mutable wrapping the allocate instruction. - pub(super) fn new_mutable_variable(&mut self, value_to_store: ValueId) -> Value { + pub(super) fn new_mutable_variable( + &mut self, + value_to_store: ValueId, + increment_array_rc: bool, + ) -> Value { let element_type = self.builder.current_function.dfg.type_of_value(value_to_store); + + if increment_array_rc { + self.builder.increment_array_reference_count(value_to_store); + } + let alloc = self.builder.insert_allocate(element_type); self.builder.insert_store(alloc, value_to_store); let typ = self.builder.type_of_value(value_to_store); @@ -236,12 +246,12 @@ impl<'a> FunctionContext<'a> { ast::Type::Field => Type::field(), ast::Type::Array(len, element) => { let element_types = Self::convert_type(element).flatten(); - Type::Array(Arc::new(element_types), *len as usize) + Type::Array(Arc::new(element_types), *len) } ast::Type::Integer(Signedness::Signed, bits) => Type::signed((*bits).into()), ast::Type::Integer(Signedness::Unsigned, bits) => Type::unsigned((*bits).into()), ast::Type::Bool => Type::unsigned(1), - ast::Type::String(len) => Type::str(*len as usize), + ast::Type::String(len) => Type::str(*len), ast::Type::FmtString(_, _) => { panic!("convert_non_tuple_type called on a fmt string: {typ}") } @@ -732,10 +742,6 @@ impl<'a> FunctionContext<'a> { let element_types = Self::convert_type(element_type); values.map_both(element_types, |value, element_type| { let reference = value.eval_reference(); - // Reference counting in brillig relies on us incrementing reference - // counts when arrays/slices are constructed or indexed. - // Thus, if we dereference an lvalue which happens to be array/slice we should increment its reference counter. - self.builder.increment_array_reference_count(reference); self.builder.insert_load(reference, element_type).into() }) } @@ -907,33 +913,55 @@ impl<'a> FunctionContext<'a> { } } - /// Increments the reference count of all parameters. Returns the entry block of the function. + /// Increments the reference count of mutable reference array parameters. + /// Any mutable-value (`mut a: [T; N]` versus `a: &mut [T; N]`) are already incremented + /// by `FunctionBuilder::add_parameter_to_scope`. + /// Returns each array id that was incremented. /// /// This is done on parameters rather than call arguments so that we can optimize out /// paired inc/dec instructions within brillig functions more easily. - pub(crate) fn increment_parameter_rcs(&mut self) -> BasicBlockId { + pub(crate) fn increment_parameter_rcs(&mut self) -> HashSet { let entry = self.builder.current_function.entry_block(); let parameters = self.builder.current_function.dfg.block_parameters(entry).to_vec(); + let mut incremented = HashSet::default(); + let mut seen_array_types = HashSet::default(); + for parameter in parameters { - self.builder.increment_array_reference_count(parameter); + // Avoid reference counts for immutable arrays that aren't behind references. + let typ = self.builder.current_function.dfg.type_of_value(parameter); + + if let Type::Reference(element) = typ { + if element.contains_an_array() { + // If we haven't already seen this array type, the value may be possibly + // aliased, so issue an inc_rc for it. + if !seen_array_types.insert(element.get_contained_array().clone()) + && self.builder.increment_array_reference_count(parameter) + { + incremented.insert(parameter); + } + } + } } - entry + incremented } /// Ends a local scope of a function. /// This will issue DecrementRc instructions for any arrays in the given starting scope /// block's parameters. Arrays that are also used in terminator instructions for the scope are /// ignored. - pub(crate) fn end_scope(&mut self, scope: BasicBlockId, terminator_args: &[ValueId]) { - let mut dropped_parameters = - self.builder.current_function.dfg.block_parameters(scope).to_vec(); - - dropped_parameters.retain(|parameter| !terminator_args.contains(parameter)); + pub(crate) fn end_scope( + &mut self, + mut incremented_params: HashSet, + terminator_args: &[ValueId], + ) { + incremented_params.retain(|parameter| !terminator_args.contains(parameter)); - for parameter in dropped_parameters { - self.builder.decrement_array_reference_count(parameter); + for parameter in incremented_params { + if self.builder.current_function.dfg.value_is_reference(parameter) { + self.builder.decrement_array_reference_count(parameter); + } } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index c50f0a7f45c5..91a49018f761 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -2,6 +2,7 @@ pub(crate) mod context; mod program; mod value; +use noirc_frontend::token::FmtStrFragment; pub(crate) use program::Ssa; use context::SharedContext; @@ -125,10 +126,10 @@ impl<'a> FunctionContext<'a> { /// Codegen a function's body and set its return value to that of its last parameter. /// For functions returning nothing, this will be an empty list. fn codegen_function_body(&mut self, body: &Expression) -> Result<(), RuntimeError> { - let entry_block = self.increment_parameter_rcs(); + let incremented_params = self.increment_parameter_rcs(); let return_value = self.codegen_expression(body)?; let results = return_value.into_value_list(self); - self.end_scope(entry_block, &results); + self.end_scope(incremented_params, &results); self.builder.terminate_with_return(results); Ok(()) @@ -195,8 +196,7 @@ impl<'a> FunctionContext<'a> { fn codegen_literal(&mut self, literal: &ast::Literal) -> Result { match literal { ast::Literal::Array(array) => { - let elements = - try_vecmap(&array.contents, |element| self.codegen_expression(element))?; + let elements = self.codegen_array_elements(&array.contents)?; let typ = Self::convert_type(&array.typ).flatten(); Ok(match array.typ { @@ -207,8 +207,7 @@ impl<'a> FunctionContext<'a> { }) } ast::Literal::Slice(array) => { - let elements = - try_vecmap(&array.contents, |element| self.codegen_expression(element))?; + let elements = self.codegen_array_elements(&array.contents)?; let typ = Self::convert_type(&array.typ).flatten(); Ok(match array.typ { @@ -232,10 +231,26 @@ impl<'a> FunctionContext<'a> { Ok(self.builder.numeric_constant(*value as u128, Type::bool()).into()) } ast::Literal::Str(string) => Ok(self.codegen_string(string)), - ast::Literal::FmtStr(string, number_of_fields, fields) => { + ast::Literal::FmtStr(fragments, number_of_fields, fields) => { + let mut string = String::new(); + for fragment in fragments { + match fragment { + FmtStrFragment::String(value) => { + // Escape curly braces in non-interpolations + let value = value.replace('{', "{{").replace('}', "}}"); + string.push_str(&value); + } + FmtStrFragment::Interpolation(value, _span) => { + string.push('{'); + string.push_str(value); + string.push('}'); + } + } + } + // A caller needs multiple pieces of information to make use of a format string // The message string, the number of fields to be formatted, and the fields themselves - let string = self.codegen_string(string); + let string = self.codegen_string(&string); let field_count = self.builder.length_constant(*number_of_fields as u128); let fields = self.codegen_expression(fields)?; @@ -245,18 +260,33 @@ impl<'a> FunctionContext<'a> { } } + fn codegen_array_elements( + &mut self, + elements: &[Expression], + ) -> Result, RuntimeError> { + try_vecmap(elements, |element| { + let value = self.codegen_expression(element)?; + Ok((value, element.is_array_or_slice_literal())) + }) + } + fn codegen_string(&mut self, string: &str) -> Values { let elements = vecmap(string.as_bytes(), |byte| { - self.builder.numeric_constant(*byte as u128, Type::unsigned(8)).into() + let char = self.builder.numeric_constant(*byte as u128, Type::unsigned(8)); + (char.into(), false) }); let typ = Self::convert_non_tuple_type(&ast::Type::String(elements.len() as u32)); self.codegen_array(elements, typ) } // Codegen an array but make sure that we do not have a nested slice + /// + /// The bool aspect of each array element indicates whether the element is an array constant + /// or not. If it is, we avoid incrementing the reference count because we consider the + /// constant to be moved into this larger array constant. fn codegen_array_checked( &mut self, - elements: Vec, + elements: Vec<(Values, bool)>, typ: Type, ) -> Result { if typ.is_nested_slice() { @@ -273,11 +303,15 @@ impl<'a> FunctionContext<'a> { /// stored next to the other fields in memory. So an array such as [(1, 2), (3, 4)] is /// stored the same as the array [1, 2, 3, 4]. /// + /// The bool aspect of each array element indicates whether the element is an array constant + /// or not. If it is, we avoid incrementing the reference count because we consider the + /// constant to be moved into this larger array constant. + /// /// The value returned from this function is always that of the allocate instruction. - fn codegen_array(&mut self, elements: Vec, typ: Type) -> Values { + fn codegen_array(&mut self, elements: Vec<(Values, bool)>, typ: Type) -> Values { let mut array = im::Vector::new(); - for element in elements { + for (element, is_array_constant) in elements { element.for_each(|element| { let element = element.eval(self); @@ -286,7 +320,10 @@ impl<'a> FunctionContext<'a> { // pessimistic reference count (since some are likely moved rather than shared) // which is important for Brillig's copy on write optimization. This has no // effect in ACIR code. - self.builder.increment_array_reference_count(element); + if !is_array_constant { + self.builder.increment_array_reference_count(element); + } + array.push_back(element); }); } @@ -662,15 +699,22 @@ impl<'a> FunctionContext<'a> { fn codegen_let(&mut self, let_expr: &ast::Let) -> Result { let mut values = self.codegen_expression(&let_expr.expression)?; + // Don't mutate the reference count if we're assigning an array literal to a Let: + // `let mut foo = [1, 2, 3];` + // we consider the array to be moved, so we should have an initial rc of just 1. + let should_inc_rc = !let_expr.expression.is_array_or_slice_literal(); + values = values.map(|value| { let value = value.eval(self); - // Make sure to increment array reference counts on each let binding - self.builder.increment_array_reference_count(value); - Tree::Leaf(if let_expr.mutable { - self.new_mutable_variable(value) + self.new_mutable_variable(value, should_inc_rc) } else { + // `new_mutable_variable` increments rcs internally so we have to + // handle it separately for the immutable case + if should_inc_rc { + self.builder.increment_array_reference_count(value); + } value::Value::Normal(value) }) }); @@ -729,10 +773,14 @@ impl<'a> FunctionContext<'a> { fn codegen_assign(&mut self, assign: &ast::Assign) -> Result { let lhs = self.extract_current_value(&assign.lvalue)?; let rhs = self.codegen_expression(&assign.expression)?; + let should_inc_rc = !assign.expression.is_array_or_slice_literal(); rhs.clone().for_each(|value| { let value = value.eval(self); - self.builder.increment_array_reference_count(value); + + if should_inc_rc { + self.builder.increment_array_reference_count(value); + } }); self.assign_new_value(lhs, rhs); diff --git a/noir/noir-repo/compiler/noirc_frontend/Cargo.toml b/noir/noir-repo/compiler/noirc_frontend/Cargo.toml index 5d1520af54f7..5f8f02689c85 100644 --- a/noir/noir-repo/compiler/noirc_frontend/Cargo.toml +++ b/noir/noir-repo/compiler/noirc_frontend/Cargo.toml @@ -25,7 +25,6 @@ num-bigint.workspace = true num-traits.workspace = true rustc-hash = "1.1.0" small-ord-set = "0.1.3" -regex = "1.9.1" cfg-if.workspace = true tracing.workspace = true petgraph = "0.6" diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs index 2c8a9b6508dd..ae622f46686b 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs @@ -10,7 +10,7 @@ use crate::ast::{ use crate::node_interner::{ ExprId, InternedExpressionKind, InternedStatementKind, QuotedTypeId, StructId, }; -use crate::token::{Attributes, FunctionAttribute, Token, Tokens}; +use crate::token::{Attributes, FmtStrFragment, FunctionAttribute, Token, Tokens}; use crate::{Kind, Type}; use acvm::{acir::AcirField, FieldElement}; use iter_extended::vecmap; @@ -210,8 +210,8 @@ impl ExpressionKind { ExpressionKind::Literal(Literal::RawStr(contents, hashes)) } - pub fn format_string(contents: String) -> ExpressionKind { - ExpressionKind::Literal(Literal::FmtStr(contents)) + pub fn format_string(fragments: Vec, length: u32) -> ExpressionKind { + ExpressionKind::Literal(Literal::FmtStr(fragments, length)) } pub fn constructor( @@ -434,7 +434,7 @@ pub enum Literal { Integer(FieldElement, /*sign*/ bool), // false for positive integer and true for negative Str(String), RawStr(String, u8), - FmtStr(String), + FmtStr(Vec, u32 /* length */), Unit, } @@ -669,7 +669,13 @@ impl Display for Literal { std::iter::once('#').cycle().take(*num_hashes as usize).collect(); write!(f, "r{hashes}\"{string}\"{hashes}") } - Literal::FmtStr(string) => write!(f, "f\"{string}\""), + Literal::FmtStr(fragments, _length) => { + write!(f, "f\"")?; + for fragment in fragments { + fragment.fmt(f)?; + } + write!(f, "\"") + } Literal::Unit => write!(f, "()"), } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs index f149c998eca1..2f60532980a0 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs @@ -16,7 +16,7 @@ use crate::{ InternedUnresolvedTypeData, QuotedTypeId, }, parser::{Item, ItemKind, ParsedSubModule}, - token::{MetaAttribute, SecondaryAttribute, Tokens}, + token::{FmtStrFragment, MetaAttribute, SecondaryAttribute, Tokens}, ParsedModule, QuotedType, }; @@ -172,7 +172,7 @@ pub trait Visitor { fn visit_literal_raw_str(&mut self, _: &str, _: u8) {} - fn visit_literal_fmt_str(&mut self, _: &str) {} + fn visit_literal_fmt_str(&mut self, _: &[FmtStrFragment], _length: u32) {} fn visit_literal_unit(&mut self) {} @@ -900,7 +900,7 @@ impl Literal { Literal::Integer(value, negative) => visitor.visit_literal_integer(*value, *negative), Literal::Str(str) => visitor.visit_literal_str(str), Literal::RawStr(str, length) => visitor.visit_literal_raw_str(str, *length), - Literal::FmtStr(str) => visitor.visit_literal_fmt_str(str), + Literal::FmtStr(fragments, length) => visitor.visit_literal_fmt_str(fragments, *length), Literal::Unit => visitor.visit_literal_unit(), } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/debug/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/debug/mod.rs index fed3149118b5..f05fc721581d 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/debug/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/debug/mod.rs @@ -67,12 +67,16 @@ impl DebugInstrumenter { self.insert_state_set_oracle(module, 8); } - fn insert_var(&mut self, var_name: &str) -> SourceVarId { + fn insert_var(&mut self, var_name: &str) -> Option { + if var_name == "_" { + return None; + } + let var_id = SourceVarId(self.next_var_id); self.next_var_id += 1; self.variables.insert(var_id, var_name.to_string()); self.scope.last_mut().unwrap().insert(var_name.to_string(), var_id); - var_id + Some(var_id) } fn lookup_var(&self, var_name: &str) -> Option { @@ -107,9 +111,9 @@ impl DebugInstrumenter { .flat_map(|param| { pattern_vars(¶m.pattern) .iter() - .map(|(id, _is_mut)| { - let var_id = self.insert_var(&id.0.contents); - build_assign_var_stmt(var_id, id_expr(id)) + .filter_map(|(id, _is_mut)| { + let var_id = self.insert_var(&id.0.contents)?; + Some(build_assign_var_stmt(var_id, id_expr(id))) }) .collect::>() }) @@ -225,13 +229,28 @@ impl DebugInstrumenter { } }) .collect(); - let vars_exprs: Vec = vars.iter().map(|(id, _)| id_expr(id)).collect(); + let vars_exprs: Vec = vars + .iter() + .map(|(id, _)| { + // We don't want to generate an expression to read from "_". + // And since this expression is going to be assigned to "_" so it doesn't matter + // what it is, we can use `()` for it. + if id.0.contents == "_" { + ast::Expression { + kind: ast::ExpressionKind::Literal(ast::Literal::Unit), + span: id.span(), + } + } else { + id_expr(id) + } + }) + .collect(); let mut block_stmts = vec![ast::Statement { kind: ast::StatementKind::Let(let_stmt.clone()), span: *span }]; - block_stmts.extend(vars.iter().map(|(id, _)| { - let var_id = self.insert_var(&id.0.contents); - build_assign_var_stmt(var_id, id_expr(id)) + block_stmts.extend(vars.iter().filter_map(|(id, _)| { + let var_id = self.insert_var(&id.0.contents)?; + Some(build_assign_var_stmt(var_id, id_expr(id))) })); block_stmts.push(ast::Statement { kind: ast::StatementKind::Expression(ast::Expression { @@ -422,21 +441,31 @@ impl DebugInstrumenter { let var_name = &for_stmt.identifier.0.contents; let var_id = self.insert_var(var_name); - let set_stmt = build_assign_var_stmt(var_id, id_expr(&for_stmt.identifier)); - let drop_stmt = build_drop_var_stmt(var_id, Span::empty(for_stmt.span.end())); + let set_and_drop_stmt = var_id.map(|var_id| { + ( + build_assign_var_stmt(var_id, id_expr(&for_stmt.identifier)), + build_drop_var_stmt(var_id, Span::empty(for_stmt.span.end())), + ) + }); self.walk_expr(&mut for_stmt.block); + + let mut statements = Vec::new(); + let block_statement = ast::Statement { + kind: ast::StatementKind::Semi(for_stmt.block.clone()), + span: for_stmt.block.span, + }; + + if let Some((set_stmt, drop_stmt)) = set_and_drop_stmt { + statements.push(set_stmt); + statements.push(block_statement); + statements.push(drop_stmt); + } else { + statements.push(block_statement); + } + for_stmt.block = ast::Expression { - kind: ast::ExpressionKind::Block(ast::BlockExpression { - statements: vec![ - set_stmt, - ast::Statement { - kind: ast::StatementKind::Semi(for_stmt.block.clone()), - span: for_stmt.block.span, - }, - drop_stmt, - ], - }), + kind: ast::ExpressionKind::Block(ast::BlockExpression { statements }), span: for_stmt.span, }; } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs index a27e2bf0163c..962356d6dd95 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs @@ -329,8 +329,6 @@ impl<'context> Elaborator<'context> { push_arg(Value::TraitDefinition(trait_id)); } else { let (expr_id, expr_type) = interpreter.elaborator.elaborate_expression(arg); - push_arg(interpreter.evaluate(expr_id)?); - if let Err(UnificationError) = expr_type.unify(param_type) { return Err(InterpreterError::TypeMismatch { expected: param_type.clone(), @@ -338,6 +336,7 @@ impl<'context> Elaborator<'context> { location: arg_location, }); } + push_arg(interpreter.evaluate(expr_id)?); }; } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs index f801c1817efc..b5fab6faf9b2 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs @@ -1,7 +1,6 @@ use acvm::{AcirField, FieldElement}; use iter_extended::vecmap; use noirc_errors::{Location, Span}; -use regex::Regex; use rustc_hash::FxHashSet as HashSet; use crate::{ @@ -29,7 +28,7 @@ use crate::{ traits::{ResolvedTraitBound, TraitConstraint}, }, node_interner::{DefinitionKind, ExprId, FuncId, InternedStatementKind, TraitMethodId}, - token::Tokens, + token::{FmtStrFragment, Tokens}, Kind, QuotedType, Shared, StructType, Type, }; @@ -167,7 +166,7 @@ impl<'context> Elaborator<'context> { let len = Type::Constant(str.len().into(), Kind::u32()); (Lit(HirLiteral::Str(str)), Type::String(Box::new(len))) } - Literal::FmtStr(str) => self.elaborate_fmt_string(str, span), + Literal::FmtStr(fragments, length) => self.elaborate_fmt_string(fragments, length), Literal::Array(array_literal) => { self.elaborate_array_literal(array_literal, span, true) } @@ -234,53 +233,50 @@ impl<'context> Elaborator<'context> { (HirExpression::Literal(constructor(expr)), typ) } - fn elaborate_fmt_string(&mut self, str: String, call_expr_span: Span) -> (HirExpression, Type) { - let re = Regex::new(r"\{([a-zA-Z0-9_]+)\}") - .expect("ICE: an invalid regex pattern was used for checking format strings"); - + fn elaborate_fmt_string( + &mut self, + fragments: Vec, + length: u32, + ) -> (HirExpression, Type) { let mut fmt_str_idents = Vec::new(); let mut capture_types = Vec::new(); - for field in re.find_iter(&str) { - let matched_str = field.as_str(); - let ident_name = &matched_str[1..(matched_str.len() - 1)]; - - let scope_tree = self.scopes.current_scope_tree(); - let variable = scope_tree.find(ident_name); - - let hir_ident = if let Some((old_value, _)) = variable { - old_value.num_times_used += 1; - old_value.ident.clone() - } else if let Ok((definition_id, _)) = - self.lookup_global(Path::from_single(ident_name.to_string(), call_expr_span)) - { - HirIdent::non_trait_method(definition_id, Location::new(call_expr_span, self.file)) - } else if ident_name.parse::().is_ok() { - self.push_err(ResolverError::NumericConstantInFormatString { - name: ident_name.to_owned(), - span: call_expr_span, - }); - continue; - } else { - self.push_err(ResolverError::VariableNotDeclared { - name: ident_name.to_owned(), - span: call_expr_span, - }); - continue; - }; + for fragment in &fragments { + if let FmtStrFragment::Interpolation(ident_name, string_span) = fragment { + let scope_tree = self.scopes.current_scope_tree(); + let variable = scope_tree.find(ident_name); + + let hir_ident = if let Some((old_value, _)) = variable { + old_value.num_times_used += 1; + old_value.ident.clone() + } else if let Ok((definition_id, _)) = + self.lookup_global(Path::from_single(ident_name.to_string(), *string_span)) + { + HirIdent::non_trait_method( + definition_id, + Location::new(*string_span, self.file), + ) + } else { + self.push_err(ResolverError::VariableNotDeclared { + name: ident_name.to_owned(), + span: *string_span, + }); + continue; + }; - let hir_expr = HirExpression::Ident(hir_ident.clone(), None); - let expr_id = self.interner.push_expr(hir_expr); - self.interner.push_expr_location(expr_id, call_expr_span, self.file); - let typ = self.type_check_variable(hir_ident, expr_id, None); - self.interner.push_expr_type(expr_id, typ.clone()); - capture_types.push(typ); - fmt_str_idents.push(expr_id); + let hir_expr = HirExpression::Ident(hir_ident.clone(), None); + let expr_id = self.interner.push_expr(hir_expr); + self.interner.push_expr_location(expr_id, *string_span, self.file); + let typ = self.type_check_variable(hir_ident, expr_id, None); + self.interner.push_expr_type(expr_id, typ.clone()); + capture_types.push(typ); + fmt_str_idents.push(expr_id); + } } - let len = Type::Constant(str.len().into(), Kind::u32()); + let len = Type::Constant(length.into(), Kind::u32()); let typ = Type::FmtString(Box::new(len), Box::new(Type::Tuple(capture_types))); - (HirExpression::Literal(HirLiteral::FmtStr(str, fmt_str_idents)), typ) + (HirExpression::Literal(HirLiteral::FmtStr(fragments, fmt_str_idents, length)), typ) } fn elaborate_prefix(&mut self, prefix: PrefixExpression, span: Span) -> (ExprId, Type) { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs index 20d27fbc9ac2..478504a79be9 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs @@ -440,6 +440,9 @@ impl<'context> Elaborator<'context> { // so we need to reintroduce the same IDs into scope here. for parameter in &func_meta.parameter_idents { let name = self.interner.definition_name(parameter.id).to_owned(); + if name == "_" { + continue; + } let warn_if_unused = !(func_meta.trait_impl.is_some() && name == "self"); self.add_existing_variable_to_scope(name, parameter.clone(), warn_if_unused); } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs index 3928362db11a..3fbdadbbee8b 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/patterns.rs @@ -331,16 +331,18 @@ impl<'context> Elaborator<'context> { let resolver_meta = ResolverMeta { num_times_used: 0, ident: ident.clone(), warn_if_unused }; - let scope = self.scopes.get_mut_scope(); - let old_value = scope.add_key_value(name.clone(), resolver_meta); - - if !allow_shadowing { - if let Some(old_value) = old_value { - self.push_err(ResolverError::DuplicateDefinition { - name, - first_span: old_value.ident.location.span, - second_span: location.span, - }); + if name != "_" { + let scope = self.scopes.get_mut_scope(); + let old_value = scope.add_key_value(name.clone(), resolver_meta); + + if !allow_shadowing { + if let Some(old_value) = old_value { + self.push_err(ResolverError::DuplicateDefinition { + name, + first_span: old_value.ident.location.span, + second_span: location.span, + }); + } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs index 7e06964b563e..0404ae3c2c05 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs @@ -1321,11 +1321,23 @@ impl<'context> Elaborator<'context> { { Some(method_id) => Some(HirMethodReference::FuncId(method_id)), None => { - self.push_err(TypeCheckError::UnresolvedMethodCall { - method_name: method_name.to_string(), - object_type: object_type.clone(), - span, - }); + let has_field_with_function_type = + typ.borrow().get_fields_as_written().into_iter().any(|field| { + field.name.0.contents == method_name && field.typ.is_function() + }); + if has_field_with_function_type { + self.push_err(TypeCheckError::CannotInvokeStructFieldFunctionType { + method_name: method_name.to_string(), + object_type: object_type.clone(), + span, + }); + } else { + self.push_err(TypeCheckError::UnresolvedMethodCall { + method_name: method_name.to_string(), + object_type: object_type.clone(), + span, + }); + } None } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs index 560d11cfa2e0..29d1448f07e6 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs @@ -661,7 +661,7 @@ fn remove_interned_in_literal(interner: &NodeInterner, literal: Literal) -> Lite | Literal::Integer(_, _) | Literal::Str(_) | Literal::RawStr(_, _) - | Literal::FmtStr(_) + | Literal::FmtStr(_, _) | Literal::Unit => literal, } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs index 446c4dae2d33..3df20b39209e 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs @@ -240,6 +240,9 @@ pub enum InterpreterError { err: Box, location: Location, }, + CannotInterpretFormatStringWithErrors { + location: Location, + }, // These cases are not errors, they are just used to prevent us from running more code // until the loop can be resumed properly. These cases will never be displayed to users. @@ -315,7 +318,8 @@ impl InterpreterError { | InterpreterError::TypeAnnotationsNeededForMethodCall { location } | InterpreterError::CannotResolveExpression { location, .. } | InterpreterError::CannotSetFunctionBody { location, .. } - | InterpreterError::UnknownArrayLength { location, .. } => *location, + | InterpreterError::UnknownArrayLength { location, .. } + | InterpreterError::CannotInterpretFormatStringWithErrors { location } => *location, InterpreterError::FailedToParseMacro { error, file, .. } => { Location::new(error.span(), *file) @@ -664,6 +668,12 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { let secondary = format!("Evaluating the length failed with: `{err}`"); CustomDiagnostic::simple_error(msg, secondary, location.span) } + InterpreterError::CannotInterpretFormatStringWithErrors { location } => { + let msg = "Cannot interpret format string with errors".to_string(); + let secondary = + "Some of the variables to interpolate could not be evaluated".to_string(); + CustomDiagnostic::simple_error(msg, secondary, location.span) + } } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs index 5540a199cec2..9338c0fc37f1 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs @@ -121,9 +121,9 @@ impl HirExpression { HirExpression::Literal(HirLiteral::Str(string)) => { ExpressionKind::Literal(Literal::Str(string.clone())) } - HirExpression::Literal(HirLiteral::FmtStr(string, _exprs)) => { + HirExpression::Literal(HirLiteral::FmtStr(fragments, _exprs, length)) => { // TODO: Is throwing away the exprs here valid? - ExpressionKind::Literal(Literal::FmtStr(string.clone())) + ExpressionKind::Literal(Literal::FmtStr(fragments.clone(), *length)) } HirExpression::Literal(HirLiteral::Unit) => ExpressionKind::Literal(Literal::Unit), HirExpression::Block(expr) => ExpressionKind::Block(expr.to_display_ast(interner)), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs index 49fd86b73bbf..dfa55a9d79b6 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs @@ -20,7 +20,7 @@ use crate::monomorphization::{ perform_impl_bindings, perform_instantiation_bindings, resolve_trait_method, undo_instantiation_bindings, }; -use crate::token::Tokens; +use crate::token::{FmtStrFragment, Tokens}; use crate::TypeVariable; use crate::{ hir_def::{ @@ -623,8 +623,8 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { self.evaluate_integer(value, is_negative, id) } HirLiteral::Str(string) => Ok(Value::String(Rc::new(string))), - HirLiteral::FmtStr(string, captures) => { - self.evaluate_format_string(string, captures, id) + HirLiteral::FmtStr(fragments, captures, _length) => { + self.evaluate_format_string(fragments, captures, id) } HirLiteral::Array(array) => self.evaluate_array(array, id), HirLiteral::Slice(array) => self.evaluate_slice(array, id), @@ -633,7 +633,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { fn evaluate_format_string( &mut self, - string: String, + fragments: Vec, captures: Vec, id: ExprId, ) -> IResult { @@ -644,13 +644,12 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { let mut values: VecDeque<_> = captures.into_iter().map(|capture| self.evaluate(capture)).collect::>()?; - for character in string.chars() { - match character { - '\\' => escaped = true, - '{' if !escaped => consuming = true, - '}' if !escaped && consuming => { - consuming = false; - + for fragment in fragments { + match fragment { + FmtStrFragment::String(string) => { + result.push_str(&string); + } + FmtStrFragment::Interpolation(_, span) => { if let Some(value) = values.pop_front() { // When interpolating a quoted value inside a format string, we don't include the // surrounding `quote {` ... `}` as if we are unquoting the quoted value inside the string. @@ -665,13 +664,15 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { } else { result.push_str(&value.display(self.elaborator.interner).to_string()); } + } else { + // If we can't find a value for this fragment it means the interpolated value was not + // found or it errored. In this case we error here as well. + let location = self.elaborator.interner.expr_location(&id); + return Err(InterpreterError::CannotInterpretFormatStringWithErrors { + location, + }); } } - other if !consuming => { - escaped = false; - result.push(other); - } - _ => (), } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs index 80bd5247ee6a..774836f89921 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs @@ -77,8 +77,6 @@ pub enum ResolverError { MutableReferenceToImmutableVariable { variable: String, span: Span }, #[error("Mutable references to array indices are unsupported")] MutableReferenceToArrayElement { span: Span }, - #[error("Numeric constants should be printed without formatting braces")] - NumericConstantInFormatString { name: String, span: Span }, #[error("Closure environment must be a tuple or unit type")] InvalidClosureEnvironment { typ: Type, span: Span }, #[error("Nested slices, i.e. slices within an array or slice, are not supported")] @@ -223,11 +221,21 @@ impl<'a> From<&'a ResolverError> for Diagnostic { *span, ) } - ResolverError::VariableNotDeclared { name, span } => Diagnostic::simple_error( - format!("cannot find `{name}` in this scope "), - "not found in this scope".to_string(), - *span, - ), + ResolverError::VariableNotDeclared { name, span } => { + if name == "_" { + Diagnostic::simple_error( + "in expressions, `_` can only be used on the left-hand side of an assignment".to_string(), + "`_` not allowed here".to_string(), + *span, + ) + } else { + Diagnostic::simple_error( + format!("cannot find `{name}` in this scope"), + "not found in this scope".to_string(), + *span, + ) + } + }, ResolverError::PathIsNotIdent { span } => Diagnostic::simple_error( "cannot use path as an identifier".to_string(), String::new(), @@ -368,11 +376,6 @@ impl<'a> From<&'a ResolverError> for Diagnostic { ResolverError::MutableReferenceToArrayElement { span } => { Diagnostic::simple_error("Mutable references to array elements are currently unsupported".into(), "Try storing the element in a fresh variable first".into(), *span) }, - ResolverError::NumericConstantInFormatString { name, span } => Diagnostic::simple_error( - format!("cannot find `{name}` in this scope "), - "Numeric constants should be printed without formatting braces".to_string(), - *span, - ), ResolverError::InvalidClosureEnvironment { span, typ } => Diagnostic::simple_error( format!("{typ} is not a valid closure environment type"), "Closure environment must be a tuple or unit type".to_string(), *span), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs index a6b6120986e6..dfa431157e34 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs @@ -99,6 +99,8 @@ pub enum TypeCheckError { CannotMutateImmutableVariable { name: String, span: Span }, #[error("No method named '{method_name}' found for type '{object_type}'")] UnresolvedMethodCall { method_name: String, object_type: Type, span: Span }, + #[error("Cannot invoke function field '{method_name}' on type '{object_type}' as a method")] + CannotInvokeStructFieldFunctionType { method_name: String, object_type: Type, span: Span }, #[error("Integers must have the same signedness LHS is {sign_x:?}, RHS is {sign_y:?}")] IntegerSignedness { sign_x: Signedness, sign_y: Signedness, span: Span }, #[error("Integers must have the same bit width LHS is {bit_width_x}, RHS is {bit_width_y}")] @@ -511,6 +513,13 @@ impl<'a> From<&'a TypeCheckError> for Diagnostic { TypeCheckError::CyclicType { typ: _, span } => { Diagnostic::simple_error(error.to_string(), "Cyclic types have unlimited size and are prohibited in Noir".into(), *span) } + TypeCheckError::CannotInvokeStructFieldFunctionType { method_name, object_type, span } => { + Diagnostic::simple_error( + format!("Cannot invoke function field '{method_name}' on type '{object_type}' as a method"), + format!("to call the function stored in '{method_name}', surround the field access with parentheses: '(', ')'"), + *span, + ) + }, } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs index 5d3fe632a745..e243fc88cff4 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs @@ -7,7 +7,7 @@ use crate::hir::type_check::generics::TraitGenerics; use crate::node_interner::{ DefinitionId, DefinitionKind, ExprId, FuncId, NodeInterner, StmtId, TraitMethodId, }; -use crate::token::Tokens; +use crate::token::{FmtStrFragment, Tokens}; use crate::Shared; use super::stmt::HirPattern; @@ -114,7 +114,7 @@ pub enum HirLiteral { Bool(bool), Integer(FieldElement, bool), //true for negative integer and false for positive Str(String), - FmtStr(String, Vec), + FmtStr(Vec, Vec, u32 /* length */), Unit, } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs index 659fafbbcbbb..2c9a44c079d0 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/types.rs @@ -1087,6 +1087,14 @@ impl Type { } } + pub fn is_function(&self) -> bool { + match self.follow_bindings_shallow().as_ref() { + Type::Function(..) => true, + Type::Alias(alias_type, _) => alias_type.borrow().typ.is_function(), + _ => false, + } + } + /// True if this type can be used as a parameter to `main` or a contract function. /// This is only false for unsized types like slices or slices that do not make sense /// as a program input such as named generics or mutable references. diff --git a/noir/noir-repo/compiler/noirc_frontend/src/lexer/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/lexer/errors.rs index 8d799ef35d1a..f95ccba061a6 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/lexer/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/lexer/errors.rs @@ -30,6 +30,10 @@ pub enum LexerErrorKind { UnterminatedBlockComment { span: Span }, #[error("Unterminated string literal")] UnterminatedStringLiteral { span: Span }, + #[error("Invalid format string: expected '}}', found {found:?}")] + InvalidFormatString { found: char, span: Span }, + #[error("Invalid format string: expected letter or underscore, found '}}'")] + EmptyFormatStringInterpolation { span: Span }, #[error( "'\\{escaped}' is not a valid escape sequence. Use '\\' for a literal backslash character." )] @@ -68,6 +72,8 @@ impl LexerErrorKind { LexerErrorKind::LogicalAnd { span } => *span, LexerErrorKind::UnterminatedBlockComment { span } => *span, LexerErrorKind::UnterminatedStringLiteral { span } => *span, + LexerErrorKind::InvalidFormatString { span, .. } => *span, + LexerErrorKind::EmptyFormatStringInterpolation { span, .. } => *span, LexerErrorKind::InvalidEscape { span, .. } => *span, LexerErrorKind::InvalidQuoteDelimiter { delimiter } => delimiter.to_span(), LexerErrorKind::NonAsciiComment { span, .. } => *span, @@ -130,6 +136,32 @@ impl LexerErrorKind { LexerErrorKind::UnterminatedBlockComment { span } => ("Unterminated block comment".to_string(), "Unterminated block comment".to_string(), *span), LexerErrorKind::UnterminatedStringLiteral { span } => ("Unterminated string literal".to_string(), "Unterminated string literal".to_string(), *span), + LexerErrorKind::InvalidFormatString { found, span } => { + if found == &'}' { + ( + "Invalid format string: unmatched '}}' found".to_string(), + "If you intended to print '}', you can escape it using '}}'".to_string(), + *span, + ) + } else { + ( + format!("Invalid format string: expected '}}', found {found:?}"), + if found == &'.' { + "Field access isn't supported in format strings".to_string() + } else { + "If you intended to print '{', you can escape it using '{{'".to_string() + }, + *span, + ) + } + } + LexerErrorKind::EmptyFormatStringInterpolation { span } => { + ( + "Invalid format string: expected letter or underscore, found '}}'".to_string(), + "If you intended to print '{' or '}', you can escape them using '{{' and '}}' respectively".to_string(), + *span, + ) + } LexerErrorKind::InvalidEscape { escaped, span } => (format!("'\\{escaped}' is not a valid escape sequence. Use '\\' for a literal backslash character."), "Invalid escape sequence".to_string(), *span), LexerErrorKind::InvalidQuoteDelimiter { delimiter } => { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs b/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs index 68dc142ff10b..a5c4b2cd7721 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs @@ -2,7 +2,7 @@ use crate::token::DocStyle; use super::{ errors::LexerErrorKind, - token::{IntType, Keyword, SpannedToken, Token, Tokens}, + token::{FmtStrFragment, IntType, Keyword, SpannedToken, Token, Tokens}, }; use acvm::{AcirField, FieldElement}; use noirc_errors::{Position, Span}; @@ -411,51 +411,190 @@ impl<'a> Lexer<'a> { let start = self.position; let mut string = String::new(); - while let Some(next) = self.next_char() { - let char = match next { - '"' => break, - '\\' => match self.next_char() { - Some('r') => '\r', - Some('n') => '\n', - Some('t') => '\t', - Some('0') => '\0', - Some('"') => '"', - Some('\\') => '\\', - Some(escaped) => { - let span = Span::inclusive(start, self.position); - return Err(LexerErrorKind::InvalidEscape { escaped, span }); - } - None => { - let span = Span::inclusive(start, self.position); - return Err(LexerErrorKind::UnterminatedStringLiteral { span }); - } - }, - other => other, - }; + loop { + if let Some(next) = self.next_char() { + let char = match next { + '"' => break, + '\\' => match self.next_char() { + Some('r') => '\r', + Some('n') => '\n', + Some('t') => '\t', + Some('0') => '\0', + Some('"') => '"', + Some('\\') => '\\', + Some(escaped) => { + let span = Span::inclusive(start, self.position); + return Err(LexerErrorKind::InvalidEscape { escaped, span }); + } + None => { + let span = Span::inclusive(start, self.position); + return Err(LexerErrorKind::UnterminatedStringLiteral { span }); + } + }, + other => other, + }; - string.push(char); + string.push(char); + } else { + let span = Span::inclusive(start, self.position); + return Err(LexerErrorKind::UnterminatedStringLiteral { span }); + } } let str_literal_token = Token::Str(string); - let end = self.position; Ok(str_literal_token.into_span(start, end)) } - // This differs from `eat_string_literal` in that we want the leading `f` to be captured in the Span fn eat_fmt_string(&mut self) -> SpannedTokenResult { let start = self.position; - self.next_char(); - let str_literal = self.eat_while(None, |ch| ch != '"'); + let mut fragments = Vec::new(); + let mut length = 0; + + loop { + // String fragment until '{' or '"' + let mut string = String::new(); + let mut found_curly = false; + + loop { + if let Some(next) = self.next_char() { + let char = match next { + '"' => break, + '\\' => match self.next_char() { + Some('r') => '\r', + Some('n') => '\n', + Some('t') => '\t', + Some('0') => '\0', + Some('"') => '"', + Some('\\') => '\\', + Some(escaped) => { + let span = Span::inclusive(start, self.position); + return Err(LexerErrorKind::InvalidEscape { escaped, span }); + } + None => { + let span = Span::inclusive(start, self.position); + return Err(LexerErrorKind::UnterminatedStringLiteral { span }); + } + }, + '{' if self.peek_char_is('{') => { + self.next_char(); + '{' + } + '}' if self.peek_char_is('}') => { + self.next_char(); + '}' + } + '}' => { + let error_position = self.position; + + // Keep consuming chars until we find the closing double quote + self.skip_until_string_end(); + + let span = Span::inclusive(error_position, error_position); + return Err(LexerErrorKind::InvalidFormatString { found: '}', span }); + } + '{' => { + found_curly = true; + break; + } + other => other, + }; + + string.push(char); + length += 1; + + if char == '{' || char == '}' { + // This might look a bit strange, but if there's `{{` or `}}` in the format string + // then it will be `{` and `}` in the string fragment respectively, but on the codegen + // phase it will be translated back to `{{` and `}}` to avoid executing an interpolation, + // thus the actual length of the codegen'd string will be one more than what we get here. + // + // We could just make the fragment include the double curly braces, but then the interpreter + // would need to undo the curly braces, so it's simpler to add them during codegen. + length += 1; + } + } else { + let span = Span::inclusive(start, self.position); + return Err(LexerErrorKind::UnterminatedStringLiteral { span }); + } + } + + if !string.is_empty() { + fragments.push(FmtStrFragment::String(string)); + } + + if !found_curly { + break; + } + + length += 1; // for the curly brace + + // Interpolation fragment until '}' or '"' + let mut string = String::new(); + let interpolation_start = self.position + 1; // + 1 because we are at '{' + let mut first_char = true; + while let Some(next) = self.next_char() { + let char = match next { + '}' => { + if string.is_empty() { + let error_position = self.position; + + // Keep consuming chars until we find the closing double quote + self.skip_until_string_end(); + + let span = Span::inclusive(error_position, error_position); + return Err(LexerErrorKind::EmptyFormatStringInterpolation { span }); + } + + break; + } + other => { + let is_valid_char = if first_char { + other.is_ascii_alphabetic() || other == '_' + } else { + other.is_ascii_alphanumeric() || other == '_' + }; + if !is_valid_char { + let error_position = self.position; + + // Keep consuming chars until we find the closing double quote + // (unless we bumped into a double quote now, in which case we are done) + if other != '"' { + self.skip_until_string_end(); + } - let str_literal_token = Token::FmtStr(str_literal); + let span = Span::inclusive(error_position, error_position); + return Err(LexerErrorKind::InvalidFormatString { found: other, span }); + } + first_char = false; + other + } + }; + length += 1; + string.push(char); + } + + length += 1; // for the closing curly brace - self.next_char(); // Advance past the closing quote + let interpolation_span = Span::from(interpolation_start..self.position); + fragments.push(FmtStrFragment::Interpolation(string, interpolation_span)); + } + let token = Token::FmtStr(fragments, length); let end = self.position; - Ok(str_literal_token.into_span(start, end)) + Ok(token.into_span(start, end)) + } + + fn skip_until_string_end(&mut self) { + while let Some(next) = self.next_char() { + if next == '\'' && self.peek_char_is('"') { + self.next_char(); + } else if next == '"' { + break; + } + } } fn eat_format_string_or_alpha_numeric(&mut self) -> SpannedTokenResult { @@ -962,6 +1101,155 @@ mod tests { } } + #[test] + fn test_eat_string_literal_with_escapes() { + let input = "let _word = \"hello\\n\\t\""; + + let expected = vec![ + Token::Keyword(Keyword::Let), + Token::Ident("_word".to_string()), + Token::Assign, + Token::Str("hello\n\t".to_string()), + ]; + let mut lexer = Lexer::new(input); + + for token in expected.into_iter() { + let got = lexer.next_token().unwrap(); + assert_eq!(got, token); + } + } + + #[test] + fn test_eat_string_literal_missing_double_quote() { + let input = "\"hello"; + let mut lexer = Lexer::new(input); + assert!(matches!( + lexer.next_token(), + Err(LexerErrorKind::UnterminatedStringLiteral { .. }) + )); + } + + #[test] + fn test_eat_fmt_string_literal_without_interpolations() { + let input = "let _word = f\"hello\""; + + let expected = vec![ + Token::Keyword(Keyword::Let), + Token::Ident("_word".to_string()), + Token::Assign, + Token::FmtStr(vec![FmtStrFragment::String("hello".to_string())], 5), + ]; + let mut lexer = Lexer::new(input); + + for token in expected.into_iter() { + let got = lexer.next_token().unwrap(); + assert_eq!(got, token); + } + } + + #[test] + fn test_eat_fmt_string_literal_with_escapes_without_interpolations() { + let input = "let _word = f\"hello\\n\\t{{x}}\""; + + let expected = vec![ + Token::Keyword(Keyword::Let), + Token::Ident("_word".to_string()), + Token::Assign, + Token::FmtStr(vec![FmtStrFragment::String("hello\n\t{x}".to_string())], 12), + ]; + let mut lexer = Lexer::new(input); + + for token in expected.into_iter() { + let got = lexer.next_token().unwrap(); + assert_eq!(got, token); + } + } + + #[test] + fn test_eat_fmt_string_literal_with_interpolations() { + let input = "let _word = f\"hello {world} and {_another} {vAr_123}\""; + + let expected = vec![ + Token::Keyword(Keyword::Let), + Token::Ident("_word".to_string()), + Token::Assign, + Token::FmtStr( + vec![ + FmtStrFragment::String("hello ".to_string()), + FmtStrFragment::Interpolation("world".to_string(), Span::from(21..26)), + FmtStrFragment::String(" and ".to_string()), + FmtStrFragment::Interpolation("_another".to_string(), Span::from(33..41)), + FmtStrFragment::String(" ".to_string()), + FmtStrFragment::Interpolation("vAr_123".to_string(), Span::from(44..51)), + ], + 38, + ), + ]; + let mut lexer = Lexer::new(input); + + for token in expected.into_iter() { + let got = lexer.next_token().unwrap().into_token(); + assert_eq!(got, token); + } + } + + #[test] + fn test_eat_fmt_string_literal_missing_double_quote() { + let input = "f\"hello"; + let mut lexer = Lexer::new(input); + assert!(matches!( + lexer.next_token(), + Err(LexerErrorKind::UnterminatedStringLiteral { .. }) + )); + } + + #[test] + fn test_eat_fmt_string_literal_invalid_char_in_interpolation() { + let input = "f\"hello {foo.bar}\" true"; + let mut lexer = Lexer::new(input); + assert!(matches!(lexer.next_token(), Err(LexerErrorKind::InvalidFormatString { .. }))); + + // Make sure the lexer went past the ending double quote for better recovery + let token = lexer.next_token().unwrap().into_token(); + assert!(matches!(token, Token::Bool(true))); + } + + #[test] + fn test_eat_fmt_string_literal_double_quote_inside_interpolation() { + let input = "f\"hello {world\" true"; + let mut lexer = Lexer::new(input); + assert!(matches!(lexer.next_token(), Err(LexerErrorKind::InvalidFormatString { .. }))); + + // Make sure the lexer stopped parsing the string literal when it found \" inside the interpolation + let token = lexer.next_token().unwrap().into_token(); + assert!(matches!(token, Token::Bool(true))); + } + + #[test] + fn test_eat_fmt_string_literal_unmatched_closing_curly() { + let input = "f\"hello }\" true"; + let mut lexer = Lexer::new(input); + assert!(matches!(lexer.next_token(), Err(LexerErrorKind::InvalidFormatString { .. }))); + + // Make sure the lexer went past the ending double quote for better recovery + let token = lexer.next_token().unwrap().into_token(); + assert!(matches!(token, Token::Bool(true))); + } + + #[test] + fn test_eat_fmt_string_literal_empty_interpolation() { + let input = "f\"{}\" true"; + let mut lexer = Lexer::new(input); + assert!(matches!( + lexer.next_token(), + Err(LexerErrorKind::EmptyFormatStringInterpolation { .. }) + )); + + // Make sure the lexer went past the ending double quote for better recovery + let token = lexer.next_token().unwrap().into_token(); + assert!(matches!(token, Token::Bool(true))); + } + #[test] fn test_eat_integer_literals() { let test_cases: Vec<(&str, Token)> = vec![ @@ -1151,7 +1439,7 @@ mod tests { format!("let s = r#####\"{s}\"#####;"), ], ), - (Some(Token::FmtStr("".to_string())), vec![format!("assert(x == y, f\"{s}\");")]), + (Some(Token::FmtStr(vec![], 0)), vec![format!("assert(x == y, f\"{s}\");")]), // expected token not found // (Some(Token::LineComment("".to_string(), None)), vec![ (None, vec![format!("//{s}"), format!("// {s}")]), @@ -1196,11 +1484,16 @@ mod tests { Err(LexerErrorKind::InvalidIntegerLiteral { .. }) | Err(LexerErrorKind::UnexpectedCharacter { .. }) | Err(LexerErrorKind::NonAsciiComment { .. }) - | Err(LexerErrorKind::UnterminatedBlockComment { .. }) => { + | Err(LexerErrorKind::UnterminatedBlockComment { .. }) + | Err(LexerErrorKind::UnterminatedStringLiteral { .. }) + | Err(LexerErrorKind::InvalidFormatString { .. }) => { expected_token_found = true; } Err(err) => { - panic!("Unexpected lexer error found: {:?}", err) + panic!( + "Unexpected lexer error found {:?} for input string {:?}", + err, blns_program_str + ) } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs b/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs index 836161c7c9fb..f35515045dbe 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs @@ -25,7 +25,7 @@ pub enum BorrowedToken<'input> { Str(&'input str), /// the u8 is the number of hashes, i.e. r###.. RawStr(&'input str, u8), - FmtStr(&'input str), + FmtStr(&'input [FmtStrFragment], u32 /* length */), Keyword(Keyword), IntType(IntType), AttributeStart { @@ -136,7 +136,7 @@ pub enum Token { Str(String), /// the u8 is the number of hashes, i.e. r###.. RawStr(String, u8), - FmtStr(String), + FmtStr(Vec, u32 /* length */), Keyword(Keyword), IntType(IntType), AttributeStart { @@ -255,7 +255,7 @@ pub fn token_to_borrowed_token(token: &Token) -> BorrowedToken<'_> { Token::Int(n) => BorrowedToken::Int(*n), Token::Bool(b) => BorrowedToken::Bool(*b), Token::Str(ref b) => BorrowedToken::Str(b), - Token::FmtStr(ref b) => BorrowedToken::FmtStr(b), + Token::FmtStr(ref b, length) => BorrowedToken::FmtStr(b, *length), Token::RawStr(ref b, hashes) => BorrowedToken::RawStr(b, *hashes), Token::Keyword(k) => BorrowedToken::Keyword(*k), Token::AttributeStart { is_inner, is_tag } => { @@ -312,6 +312,35 @@ pub fn token_to_borrowed_token(token: &Token) -> BorrowedToken<'_> { } } +#[derive(Clone, PartialEq, Eq, Hash, Debug, PartialOrd, Ord)] +pub enum FmtStrFragment { + String(String), + Interpolation(String, Span), +} + +impl Display for FmtStrFragment { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + FmtStrFragment::String(string) => { + // Undo the escapes when displaying the fmt string + let string = string + .replace('{', "{{") + .replace('}', "}}") + .replace('\r', "\\r") + .replace('\n', "\\n") + .replace('\t', "\\t") + .replace('\0', "\\0") + .replace('\'', "\\'") + .replace('\"', "\\\""); + write!(f, "{}", string) + } + FmtStrFragment::Interpolation(string, _span) => { + write!(f, "{{{}}}", string) + } + } + } +} + #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, PartialOrd, Ord)] pub enum DocStyle { Outer, @@ -375,7 +404,7 @@ impl fmt::Display for Token { Token::Int(n) => write!(f, "{}", n), Token::Bool(b) => write!(f, "{b}"), Token::Str(ref b) => write!(f, "{b:?}"), - Token::FmtStr(ref b) => write!(f, "f{b:?}"), + Token::FmtStr(ref b, _length) => write!(f, "f{b:?}"), Token::RawStr(ref b, hashes) => { let h: String = std::iter::once('#').cycle().take(hashes as usize).collect(); write!(f, "r{h}{b:?}{h}") @@ -515,7 +544,7 @@ impl Token { | Token::Bool(_) | Token::Str(_) | Token::RawStr(..) - | Token::FmtStr(_) => TokenKind::Literal, + | Token::FmtStr(_, _) => TokenKind::Literal, Token::Keyword(_) => TokenKind::Keyword, Token::UnquoteMarker(_) => TokenKind::UnquoteMarker, Token::Quote(_) => TokenKind::Quote, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs index 8f6817dc15d0..c9ae3438e424 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs @@ -7,11 +7,11 @@ use noirc_errors::{ Location, }; -use crate::hir_def::function::FunctionSignature; use crate::{ ast::{BinaryOpKind, IntegerBitSize, Signedness, Visibility}, token::{Attributes, FunctionAttribute}, }; +use crate::{hir_def::function::FunctionSignature, token::FmtStrFragment}; use serde::{Deserialize, Serialize}; use super::HirType; @@ -48,6 +48,12 @@ pub enum Expression { Continue, } +impl Expression { + pub fn is_array_or_slice_literal(&self) -> bool { + matches!(self, Expression::Literal(Literal::Array(_) | Literal::Slice(_))) + } +} + /// A definition is either a local (variable), function, or is a built-in /// function that will be generated or referenced by the compiler later. #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -106,7 +112,7 @@ pub enum Literal { Bool(bool), Unit, Str(String), - FmtStr(String, u64, Box), + FmtStr(Vec, u64, Box), } #[derive(Debug, Clone, Hash)] diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs index 050f844146ac..b31a5744d09c 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -12,6 +12,7 @@ use crate::ast::{FunctionKind, IntegerBitSize, Signedness, UnaryOp, Visibility}; use crate::hir::comptime::InterpreterError; use crate::hir::type_check::{NoMatchingImplFoundError, TypeCheckError}; use crate::node_interner::{ExprId, ImplSearchErrorKind}; +use crate::token::FmtStrFragment; use crate::{ debug::DebugInstrumenter, hir_def::{ @@ -417,10 +418,10 @@ impl<'interner> Monomorphizer<'interner> { let expr = match self.interner.expression(&expr) { HirExpression::Ident(ident, generics) => self.ident(ident, expr, generics)?, HirExpression::Literal(HirLiteral::Str(contents)) => Literal(Str(contents)), - HirExpression::Literal(HirLiteral::FmtStr(contents, idents)) => { + HirExpression::Literal(HirLiteral::FmtStr(fragments, idents, _length)) => { let fields = try_vecmap(idents, |ident| self.expr(ident))?; Literal(FmtStr( - contents, + fragments, fields.len() as u64, Box::new(ast::Expression::Tuple(fields)), )) @@ -1846,7 +1847,7 @@ impl<'interner> Monomorphizer<'interner> { _ => unreachable!("ICE: format string fields should be structured in a tuple, but got a {zeroed_tuple}"), }; ast::Expression::Literal(ast::Literal::FmtStr( - "\0".repeat(*length as usize), + vec![FmtStrFragment::String("\0".repeat(*length as usize))], fields_len, Box::new(zeroed_tuple), )) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/printer.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/printer.rs index b6421b26a03d..9c1072a41179 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/printer.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/printer.rs @@ -105,9 +105,11 @@ impl AstPrinter { super::ast::Literal::Integer(x, _, _, _) => x.fmt(f), super::ast::Literal::Bool(x) => x.fmt(f), super::ast::Literal::Str(s) => s.fmt(f), - super::ast::Literal::FmtStr(s, _, _) => { + super::ast::Literal::FmtStr(fragments, _, _) => { write!(f, "f\"")?; - s.fmt(f)?; + for fragment in fragments { + fragment.fmt(f)?; + } write!(f, "\"") } super::ast::Literal::Unit => { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs index c2f7b7818738..fcc58c5d8331 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs @@ -5,7 +5,7 @@ use noirc_errors::Span; use crate::{ ast::{Ident, ItemVisibility}, lexer::{Lexer, SpannedTokenResult}, - token::{IntType, Keyword, SpannedToken, Token, TokenKind, Tokens}, + token::{FmtStrFragment, IntType, Keyword, SpannedToken, Token, TokenKind, Tokens}, }; use super::{labels::ParsingRuleLabel, ParsedModule, ParserError, ParserErrorReason}; @@ -294,11 +294,11 @@ impl<'a> Parser<'a> { } } - fn eat_fmt_str(&mut self) -> Option { + fn eat_fmt_str(&mut self) -> Option<(Vec, u32)> { if matches!(self.token.token(), Token::FmtStr(..)) { let token = self.bump(); match token.into_token() { - Token::FmtStr(string) => Some(string), + Token::FmtStr(fragments, length) => Some((fragments, length)), _ => unreachable!(), } } else { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs index 06f51b168427..526a0c3dd6e9 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs @@ -428,8 +428,8 @@ impl<'a> Parser<'a> { Some(if self.eat_colon() { let expression = self.parse_expression_or_error(); (ident, expression) - } else if self.at(Token::Assign) { - // If we find '=' instead of ':', assume the user meant ':`, error and continue + } else if self.at(Token::DoubleColon) || self.at(Token::Assign) { + // If we find '=' or '::' instead of ':', assume the user meant ':`, error and continue self.expected_token(Token::Colon); self.bump(); let expression = self.parse_expression_or_error(); @@ -577,7 +577,7 @@ impl<'a> Parser<'a> { /// BlockExpression = Block fn parse_literal(&mut self) -> Option { if let Some(bool) = self.eat_bool() { - return Some(ExpressionKind::Literal(Literal::Bool(bool))); + return Some(ExpressionKind::boolean(bool)); } if let Some(int) = self.eat_int() { @@ -585,15 +585,15 @@ impl<'a> Parser<'a> { } if let Some(string) = self.eat_str() { - return Some(ExpressionKind::Literal(Literal::Str(string))); + return Some(ExpressionKind::string(string)); } if let Some((string, n)) = self.eat_raw_str() { - return Some(ExpressionKind::Literal(Literal::RawStr(string, n))); + return Some(ExpressionKind::raw_string(string, n)); } - if let Some(string) = self.eat_fmt_str() { - return Some(ExpressionKind::Literal(Literal::FmtStr(string))); + if let Some((fragments, length)) = self.eat_fmt_str() { + return Some(ExpressionKind::format_string(fragments, length)); } if let Some(tokens) = self.eat_quote() { @@ -865,10 +865,11 @@ mod tests { fn parses_fmt_str() { let src = "f\"hello\""; let expr = parse_expression_no_errors(src); - let ExpressionKind::Literal(Literal::FmtStr(string)) = expr.kind else { + let ExpressionKind::Literal(Literal::FmtStr(fragments, length)) = expr.kind else { panic!("Expected format string literal"); }; - assert_eq!(string, "hello"); + assert_eq!(fragments[0].to_string(), "hello"); + assert_eq!(length, 5); } #[test] @@ -1369,6 +1370,34 @@ mod tests { assert_eq!(expr.to_string(), "y"); } + #[test] + fn parses_constructor_recovers_if_double_colon_instead_of_colon() { + let src = " + Foo { x: 1, y:: z } + ^^ + "; + let (src, span) = get_source_with_error_span(src); + let mut parser = Parser::for_str(&src); + let expr = parser.parse_expression_or_error(); + + let error = get_single_error(&parser.errors, span); + assert_eq!(error.to_string(), "Expected a ':' but found '::'"); + + let ExpressionKind::Constructor(mut constructor) = expr.kind else { + panic!("Expected constructor"); + }; + assert_eq!(constructor.typ.to_string(), "Foo"); + assert_eq!(constructor.fields.len(), 2); + + let (name, expr) = constructor.fields.remove(0); + assert_eq!(name.to_string(), "x"); + assert_eq!(expr.to_string(), "1"); + + let (name, expr) = constructor.fields.remove(0); + assert_eq!(name.to_string(), "y"); + assert_eq!(expr.to_string(), "z"); + } + #[test] fn parses_parses_if_true() { let src = "if true { 1 }"; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs index 605236c8dda0..8ddf1b571e62 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs @@ -1209,8 +1209,6 @@ fn resolve_fmt_strings() { let string = f"this is i: {i}"; println(string); - println(f"I want to print {0}"); - let new_val = 10; println(f"random_string{new_val}{new_val}"); } @@ -1220,7 +1218,7 @@ fn resolve_fmt_strings() { "#; let errors = get_program_errors(src); - assert!(errors.len() == 5, "Expected 5 errors, got: {:?}", errors); + assert!(errors.len() == 3, "Expected 5 errors, got: {:?}", errors); for (err, _file_id) in errors { match &err { @@ -1229,21 +1227,13 @@ fn resolve_fmt_strings() { }) => { assert_eq!(name, "i"); } - CompilationError::ResolverError(ResolverError::NumericConstantInFormatString { - name, - .. - }) => { - assert_eq!(name, "0"); - } CompilationError::TypeError(TypeCheckError::UnusedResultError { expr_type: _, expr_span, }) => { let a = src.get(expr_span.start() as usize..expr_span.end() as usize).unwrap(); assert!( - a == "println(string)" - || a == "println(f\"I want to print {0}\")" - || a == "println(f\"random_string{new_val}{new_val}\")" + a == "println(string)" || a == "println(f\"random_string{new_val}{new_val}\")" ); } _ => unimplemented!(), @@ -3752,6 +3742,35 @@ fn allows_struct_with_generic_infix_type_as_main_input_3() { assert_no_errors(src); } +#[test] +fn errors_with_better_message_when_trying_to_invoke_struct_field_that_is_a_function() { + let src = r#" + pub struct Foo { + wrapped: fn(Field) -> bool, + } + + impl Foo { + fn call(self) -> bool { + self.wrapped(1) + } + } + + fn main() {} + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::TypeError(TypeCheckError::CannotInvokeStructFieldFunctionType { + method_name, + .. + }) = &errors[0].0 + else { + panic!("Expected a 'CannotInvokeStructFieldFunctionType' error, got {:?}", errors[0].0); + }; + + assert_eq!(method_name, "wrapped"); +} + fn test_disallows_attribute_on_impl_method( attr: &str, check_error: impl FnOnce(&CompilationError), @@ -3845,3 +3864,33 @@ fn disallows_export_attribute_on_trait_impl_method() { )); }); } + +#[test] +fn allows_multiple_underscore_parameters() { + let src = r#" + pub fn foo(_: i32, _: i64) {} + + fn main() {} + "#; + assert_no_errors(src); +} + +#[test] +fn disallows_underscore_on_right_hand_side() { + let src = r#" + fn main() { + let _ = 1; + let _x = _; + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + + let CompilationError::ResolverError(ResolverError::VariableNotDeclared { name, .. }) = + &errors[0].0 + else { + panic!("Expected a VariableNotDeclared error, got {:?}", errors[0].0); + }; + + assert_eq!(name, "_"); +} diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests/metaprogramming.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests/metaprogramming.rs index 82c40203244f..89a049ebc9d1 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/tests/metaprogramming.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests/metaprogramming.rs @@ -141,3 +141,23 @@ fn errors_if_macros_inject_functions_with_name_collisions() { ) if contents == "foo" )); } + +#[test] +fn uses_correct_type_for_attribute_arguments() { + let src = r#" + #[foo(32)] + comptime fn foo(_f: FunctionDefinition, i: u32) { + let y: u32 = 1; + let _ = y == i; + } + + #[bar([0; 2])] + comptime fn bar(_f: FunctionDefinition, i: [u32; 2]) { + let y: u32 = 1; + let _ = y == i[0]; + } + + fn main() {} + "#; + assert_no_errors(src); +} diff --git a/noir/noir-repo/compiler/noirc_printable_type/Cargo.toml b/noir/noir-repo/compiler/noirc_printable_type/Cargo.toml index 8bb56703e8ae..8d0574aad64b 100644 --- a/noir/noir-repo/compiler/noirc_printable_type/Cargo.toml +++ b/noir/noir-repo/compiler/noirc_printable_type/Cargo.toml @@ -14,7 +14,6 @@ workspace = true [dependencies] acvm.workspace = true iter-extended.workspace = true -regex = "1.9.1" serde.workspace = true serde_json.workspace = true thiserror.workspace = true diff --git a/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs b/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs index 5ab04c6f5769..d46b37c4ea23 100644 --- a/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs @@ -2,7 +2,7 @@ use std::{collections::BTreeMap, str}; use acvm::{acir::AcirField, brillig_vm::brillig::ForeignCallParam}; use iter_extended::vecmap; -use regex::{Captures, Regex}; + use serde::{Deserialize, Serialize}; use thiserror::Error; @@ -69,6 +69,9 @@ pub enum PrintableValueDisplay { #[derive(Debug, Error)] pub enum ForeignCallError { + #[error("No handler could be found for foreign call `{0}`")] + NoHandler(String), + #[error("Foreign call inputs needed for execution are missing")] MissingForeignCallInputs, @@ -250,24 +253,6 @@ fn to_string(value: &PrintableValue, typ: &PrintableType) -> Op Some(output) } -// Taken from Regex docs directly -fn replace_all( - re: &Regex, - haystack: &str, - mut replacement: impl FnMut(&Captures) -> Result, -) -> Result { - let mut new = String::with_capacity(haystack.len()); - let mut last_match = 0; - for caps in re.captures_iter(haystack) { - let m = caps.get(0).unwrap(); - new.push_str(&haystack[last_match..m.start()]); - new.push_str(&replacement(&caps)?); - last_match = m.end(); - } - new.push_str(&haystack[last_match..]); - Ok(new) -} - impl std::fmt::Display for PrintableValueDisplay { fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { @@ -276,18 +261,56 @@ impl std::fmt::Display for PrintableValueDisplay { write!(fmt, "{output_string}") } Self::FmtString(template, values) => { - let mut display_iter = values.iter(); - let re = Regex::new(r"\{([a-zA-Z0-9_]+)\}").map_err(|_| std::fmt::Error)?; + let mut values_iter = values.iter(); + write_template_replacing_interpolations(template, fmt, || { + values_iter.next().and_then(|(value, typ)| to_string(value, typ)) + }) + } + } + } +} + +fn write_template_replacing_interpolations( + template: &str, + fmt: &mut std::fmt::Formatter<'_>, + mut replacement: impl FnMut() -> Option, +) -> std::fmt::Result { + let mut last_index = 0; // How far we've written from the template + let mut char_indices = template.char_indices().peekable(); + while let Some((char_index, char)) = char_indices.next() { + // Keep going forward until we find a '{' + if char != '{' { + continue; + } + + // We'll either have to write an interpolation or '{{' if it's an escape, + // so let's write what we've seen so far in the template. + write!(fmt, "{}", &template[last_index..char_index])?; + + // If it's '{{', write '{' and keep going + if char_indices.peek().map(|(_, char)| char) == Some(&'{') { + write!(fmt, "{{")?; + (last_index, _) = char_indices.next().unwrap(); + continue; + } - let formatted_str = replace_all(&re, template, |_: &Captures| { - let (value, typ) = display_iter.next().ok_or(std::fmt::Error)?; - to_string(value, typ).ok_or(std::fmt::Error) - })?; + // Write the interpolation + if let Some(string) = replacement() { + write!(fmt, "{}", string)?; + } else { + return Err(std::fmt::Error); + } - write!(fmt, "{formatted_str}") + // Whatever was inside '{...}' doesn't matter, so skip until we find '}' + while let Some((_, char)) = char_indices.next() { + if char == '}' { + last_index = char_indices.peek().map(|(index, _)| *index).unwrap_or(template.len()); + break; } } } + + write!(fmt, "{}", &template[last_index..]) } /// This trims any leading zeroes. @@ -387,3 +410,40 @@ pub fn decode_string_value(field_elements: &[F]) -> String { let final_string = str::from_utf8(&string_as_slice).unwrap(); final_string.to_owned() } + +#[cfg(test)] +mod tests { + use acvm::FieldElement; + + use crate::{PrintableType, PrintableValue, PrintableValueDisplay}; + + #[test] + fn printable_value_display_to_string_without_interpolations() { + let template = "hello"; + let display = + PrintableValueDisplay::::FmtString(template.to_string(), vec![]); + assert_eq!(display.to_string(), template); + } + + #[test] + fn printable_value_display_to_string_with_curly_escapes() { + let template = "hello {{world}} {{{{double_escape}}}}"; + let display = + PrintableValueDisplay::::FmtString(template.to_string(), vec![]); + assert_eq!(display.to_string(), template); + } + + #[test] + fn printable_value_display_to_string_with_interpolations() { + let template = "hello {one} {{no}} {two} {{not_again}} {three} world"; + let values = vec![ + (PrintableValue::String("ONE".to_string()), PrintableType::String { length: 3 }), + (PrintableValue::String("TWO".to_string()), PrintableType::String { length: 3 }), + (PrintableValue::String("THREE".to_string()), PrintableType::String { length: 5 }), + ]; + let expected = "hello ONE {{no}} TWO {{not_again}} THREE world"; + let display = + PrintableValueDisplay::::FmtString(template.to_string(), values); + assert_eq!(display.to_string(), expected); + } +} diff --git a/noir/noir-repo/compiler/wasm/Cargo.toml b/noir/noir-repo/compiler/wasm/Cargo.toml index c8b8c3bb06e9..9951b23f6095 100644 --- a/noir/noir-repo/compiler/wasm/Cargo.toml +++ b/noir/noir-repo/compiler/wasm/Cargo.toml @@ -1,10 +1,12 @@ [package] name = "noir_wasm" +description = "A JS interface to the Noir compiler" version.workspace = true authors.workspace = true edition.workspace = true rust-version.workspace = true license.workspace = true +repository.workspace = true [lints] workspace = true @@ -42,4 +44,4 @@ getrandom = { workspace = true, features = ["js"] } rust-embed = { workspace = true, features = ["debug-embed"] } [build-dependencies] -build-data.workspace = true \ No newline at end of file +build-data.workspace = true diff --git a/noir/noir-repo/compiler/wasm/LICENSE-APACHE b/noir/noir-repo/compiler/wasm/LICENSE-APACHE new file mode 100644 index 000000000000..261eeb9e9f8b --- /dev/null +++ b/noir/noir-repo/compiler/wasm/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/noir/noir-repo/compiler/wasm/LICENSE-MIT b/noir/noir-repo/compiler/wasm/LICENSE-MIT new file mode 100644 index 000000000000..a93d7f55c8e7 --- /dev/null +++ b/noir/noir-repo/compiler/wasm/LICENSE-MIT @@ -0,0 +1,21 @@ +MIT License + + Copyright (c) 2021-2023 noir-lang + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. diff --git a/noir/noir-repo/compiler/wasm/tsconfig.json b/noir/noir-repo/compiler/wasm/tsconfig.json index d2ae58b8fc96..42c7396aa832 100644 --- a/noir/noir-repo/compiler/wasm/tsconfig.json +++ b/noir/noir-repo/compiler/wasm/tsconfig.json @@ -18,4 +18,4 @@ "allowJs": true, }, "exclude": ["node_modules"] -} \ No newline at end of file +} diff --git a/noir/noir-repo/cspell.json b/noir/noir-repo/cspell.json index 36bba737cd7c..5c707e92e215 100644 --- a/noir/noir-repo/cspell.json +++ b/noir/noir-repo/cspell.json @@ -106,6 +106,7 @@ "Guillaume", "gzipped", "hasher", + "heaptrack", "hexdigit", "higher-kinded", "Hindley-Milner", @@ -154,6 +155,7 @@ "nargo", "neovim", "newtype", + "nextest", "nightlies", "nixpkgs", "noirc", diff --git a/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md b/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md index f3badde62bed..41a823646ddd 100644 --- a/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md +++ b/noir/noir-repo/docs/docs/noir/concepts/data_types/integers.md @@ -79,7 +79,7 @@ fn main() { You can construct a U128 from its limbs: ```rust fn main(x: u64, y: u64) { - let x = U128::from_u64s_be(x,y); + let z = U128::from_u64s_be(x,y); assert(z.hi == x as Field); assert(z.lo == y as Field); } diff --git a/noir/noir-repo/docs/docs/noir/standard_library/containers/boundedvec.md b/noir/noir-repo/docs/docs/noir/standard_library/containers/boundedvec.md index 98b7d584033e..4efb1e4ea0fe 100644 --- a/noir/noir-repo/docs/docs/noir/standard_library/containers/boundedvec.md +++ b/noir/noir-repo/docs/docs/noir/standard_library/containers/boundedvec.md @@ -246,6 +246,42 @@ Example: let bounded_vec: BoundedVec = BoundedVec::from_array([1, 2, 3]) ``` +### from_parts + +```rust +pub fn from_parts(mut array: [T; MaxLen], len: u32) -> Self +``` + +Creates a new BoundedVec from the given array and length. +The given length must be less than or equal to the length of the array. + +This function will zero out any elements at or past index `len` of `array`. +This incurs an extra runtime cost of O(MaxLen). If you are sure your array is +zeroed after that index, you can use `from_parts_unchecked` to remove the extra loop. + +Example: + +#include_code from-parts noir_stdlib/src/collections/bounded_vec.nr rust + +### from_parts_unchecked + +```rust +pub fn from_parts_unchecked(array: [T; MaxLen], len: u32) -> Self +``` + +Creates a new BoundedVec from the given array and length. +The given length must be less than or equal to the length of the array. + +This function is unsafe because it expects all elements past the `len` index +of `array` to be zeroed, but does not check for this internally. Use `from_parts` +for a safe version of this function which does zero out any indices past the +given length. Invalidating this assumption can notably cause `BoundedVec::eq` +to give incorrect results since it will check even elements past `len`. + +Example: + +#include_code from-parts-unchecked noir_stdlib/src/collections/bounded_vec.nr rust + ### map ```rust diff --git a/noir/noir-repo/docs/docs/noir/standard_library/meta/op.md b/noir/noir-repo/docs/docs/noir/standard_library/meta/op.md index 55d2d244344d..90501e05fa4e 100644 --- a/noir/noir-repo/docs/docs/noir/standard_library/meta/op.md +++ b/noir/noir-repo/docs/docs/noir/standard_library/meta/op.md @@ -142,7 +142,7 @@ Represents a binary operator. One of `+`, `-`, `*`, `/`, `%`, `==`, `!=`, `<`, ` #### is_shift_left -#include_code is_shift_right noir_stdlib/src/meta/op.nr rust +#include_code is_shift_left noir_stdlib/src/meta/op.nr rust `true` if this operator is `<<` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.33.0/index.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.33.0/index.mdx index a6bd306f91da..9ed9662b0b9d 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.33.0/index.mdx +++ b/noir/noir-repo/docs/versioned_docs/version-v0.33.0/index.mdx @@ -51,7 +51,7 @@ Noir can be used both in complex cloud-based backends and in user's smartphones, Aztec Contracts leverage Noir to allow for the storage and execution of private information. Writing an Aztec Contract is as easy as writing Noir, and Aztec developers can easily interact with the network storage and execution through the [Aztec.nr](https://docs.aztec.network/developers/contracts/main) library. - Soliditry Verifier Example + Solidity Verifier Example Noir can auto-generate Solidity verifier contracts that verify Noir proofs. This allows for non-interactive verification of proofs containing private information in an immutable system. This feature powers a multitude of use-case scenarios, from P2P chess tournaments, to [Aztec Layer-2 Blockchain](https://docs.aztec.network/) diff --git a/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr b/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr index f33890f197ee..0ad39c518c49 100644 --- a/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr +++ b/noir/noir-repo/noir_stdlib/src/collections/bounded_vec.nr @@ -420,6 +420,58 @@ impl BoundedVec { } ret } + + /// Creates a new BoundedVec from the given array and length. + /// The given length must be less than or equal to the length of the array. + /// + /// This function will zero out any elements at or past index `len` of `array`. + /// This incurs an extra runtime cost of O(MaxLen). If you are sure your array is + /// zeroed after that index, you can use `from_parts_unchecked` to remove the extra loop. + /// + /// Example: + /// + /// ```noir + /// let vec: BoundedVec = BoundedVec::from_parts([1, 2, 3, 0], 3); + /// assert_eq(vec.len(), 3); + /// ``` + pub fn from_parts(mut array: [T; MaxLen], len: u32) -> Self { + assert(len <= MaxLen); + let zeroed = crate::mem::zeroed(); + for i in 0..MaxLen { + if i >= len { + array[i] = zeroed; + } + } + BoundedVec { storage: array, len } + } + + /// Creates a new BoundedVec from the given array and length. + /// The given length must be less than or equal to the length of the array. + /// + /// This function is unsafe because it expects all elements past the `len` index + /// of `array` to be zeroed, but does not check for this internally. Use `from_parts` + /// for a safe version of this function which does zero out any indices past the + /// given length. Invalidating this assumption can notably cause `BoundedVec::eq` + /// to give incorrect results since it will check even elements past `len`. + /// + /// Example: + /// + /// ```noir + /// let vec: BoundedVec = BoundedVec::from_parts_unchecked([1, 2, 3, 0], 3); + /// assert_eq(vec.len(), 3); + /// + /// // invalid use! + /// let vec1: BoundedVec = BoundedVec::from_parts_unchecked([1, 2, 3, 1], 3); + /// let vec2: BoundedVec = BoundedVec::from_parts_unchecked([1, 2, 3, 2], 3); + /// + /// // both vecs have length 3 so we'd expect them to be equal, but this + /// // fails because elements past the length are still checked in eq + /// assert_eq(vec1, vec2); // fails + /// ``` + pub fn from_parts_unchecked(array: [T; MaxLen], len: u32) -> Self { + assert(len <= MaxLen); + BoundedVec { storage: array, len } + } } impl Eq for BoundedVec @@ -431,7 +483,11 @@ where // // We make the assumption that the user has used the proper interface for working with `BoundedVec`s // rather than directly manipulating the internal fields as this can result in an inconsistent internal state. - (self.len == other.len) & (self.storage == other.storage) + if self.len == other.len { + self.storage == other.storage + } else { + false + } } } @@ -598,4 +654,38 @@ mod bounded_vec_tests { assert(bounded_vec1 != bounded_vec2); } } + + mod from_parts { + use crate::collections::bounded_vec::BoundedVec; + + #[test] + fn from_parts() { + // docs:start:from-parts + let vec: BoundedVec = BoundedVec::from_parts([1, 2, 3, 0], 3); + assert_eq(vec.len(), 3); + + // Any elements past the given length are zeroed out, so these + // two BoundedVecs will be completely equal + let vec1: BoundedVec = BoundedVec::from_parts([1, 2, 3, 1], 3); + let vec2: BoundedVec = BoundedVec::from_parts([1, 2, 3, 2], 3); + assert_eq(vec1, vec2); + // docs:end:from-parts + } + + #[test] + fn from_parts_unchecked() { + // docs:start:from-parts-unchecked + let vec: BoundedVec = BoundedVec::from_parts_unchecked([1, 2, 3, 0], 3); + assert_eq(vec.len(), 3); + + // invalid use! + let vec1: BoundedVec = BoundedVec::from_parts_unchecked([1, 2, 3, 1], 3); + let vec2: BoundedVec = BoundedVec::from_parts_unchecked([1, 2, 3, 2], 3); + + // both vecs have length 3 so we'd expect them to be equal, but this + // fails because elements past the length are still checked in eq + assert(vec1 != vec2); + // docs:end:from-parts-unchecked + } + } } diff --git a/noir/noir-repo/noir_stdlib/src/collections/map.nr b/noir/noir-repo/noir_stdlib/src/collections/map.nr index bcce08faab4c..2b0da1b90ecd 100644 --- a/noir/noir-repo/noir_stdlib/src/collections/map.nr +++ b/noir/noir-repo/noir_stdlib/src/collections/map.nr @@ -201,7 +201,10 @@ impl HashMap { } } - let msg = f"Amount of valid elements should have been {self._len} times, but got {entries.len()}."; + let self_len = self._len; + let entries_len = entries.len(); + let msg = + f"Amount of valid elements should have been {self_len} times, but got {entries_len}."; assert(entries.len() == self._len, msg); entries @@ -236,8 +239,10 @@ impl HashMap { } } + let self_len = self._len; + let keys_len = keys.len(); let msg = - f"Amount of valid elements should have been {self._len} times, but got {keys.len()}."; + f"Amount of valid elements should have been {self_len} times, but got {keys_len}."; assert(keys.len() == self._len, msg); keys @@ -271,8 +276,10 @@ impl HashMap { } } + let self_len = self._len; + let values_len = values.len(); let msg = - f"Amount of valid elements should have been {self._len} times, but got {values.len()}."; + f"Amount of valid elements should have been {self_len} times, but got {values_len}."; assert(values.len() == self._len, msg); values diff --git a/noir/noir-repo/noir_stdlib/src/collections/umap.nr b/noir/noir-repo/noir_stdlib/src/collections/umap.nr index 3e074551e9d5..7aebeb437cf8 100644 --- a/noir/noir-repo/noir_stdlib/src/collections/umap.nr +++ b/noir/noir-repo/noir_stdlib/src/collections/umap.nr @@ -138,7 +138,10 @@ impl UHashMap { } } - let msg = f"Amount of valid elements should have been {self._len} times, but got {entries.len()}."; + let self_len = self._len; + let entries_len = entries.len(); + let msg = + f"Amount of valid elements should have been {self_len} times, but got {entries_len}."; assert(entries.len() == self._len, msg); entries @@ -158,8 +161,10 @@ impl UHashMap { } } + let self_len = self._len; + let keys_len = keys.len(); let msg = - f"Amount of valid elements should have been {self._len} times, but got {keys.len()}."; + f"Amount of valid elements should have been {self_len} times, but got {keys_len}."; assert(keys.len() == self._len, msg); keys @@ -179,8 +184,10 @@ impl UHashMap { } } + let self_len = self._len; + let values_len = values.len(); let msg = - f"Amount of valid elements should have been {self._len} times, but got {values.len()}."; + f"Amount of valid elements should have been {self_len} times, but got {values_len}."; assert(values.len() == self._len, msg); values diff --git a/noir/noir-repo/noir_stdlib/src/hash/poseidon2.nr b/noir/noir-repo/noir_stdlib/src/hash/poseidon2.nr index f2167c43c2cf..419f07a2aca5 100644 --- a/noir/noir-repo/noir_stdlib/src/hash/poseidon2.nr +++ b/noir/noir-repo/noir_stdlib/src/hash/poseidon2.nr @@ -13,11 +13,7 @@ pub struct Poseidon2 { impl Poseidon2 { #[no_predicates] pub fn hash(input: [Field; N], message_size: u32) -> Field { - if message_size == N { - Poseidon2::hash_internal(input, N, false) - } else { - Poseidon2::hash_internal(input, message_size, true) - } + Poseidon2::hash_internal(input, message_size, message_size != N) } pub(crate) fn new(iv: Field) -> Poseidon2 { diff --git a/noir/noir-repo/scripts/check-critical-libraries.sh b/noir/noir-repo/scripts/check-critical-libraries.sh new file mode 100755 index 000000000000..b492cf1d4bc3 --- /dev/null +++ b/noir/noir-repo/scripts/check-critical-libraries.sh @@ -0,0 +1,37 @@ +#!/usr/bin/env bash +set -e + +# Run relative to repo root +cd $(dirname "$0")/../ + +if [[ -z $1 ]]; then + echo "Must specify Noir release to test against" >&2 + echo "usage: ./check-critical-libraries.sh " >&2 + exit 1 +fi +noirup -v $1 + +CRITICAL_LIBRARIES=$(grep -v "^#\|^$" ./CRITICAL_NOIR_LIBRARIES) +readarray -t REPOS_TO_CHECK < <(echo "$CRITICAL_LIBRARIES") + +getLatestReleaseTagForRepo() { + REPO_NAME=$1 + TAG=$(gh release list -R $REPO_NAME --json 'tagName,isLatest' -q '.[] | select(.isLatest == true).tagName') + if [[ -z $TAG ]]; then + echo "$REPO_NAME has no valid release" >&2 + exit 1 + fi + echo $TAG +} + +for REPO in ${REPOS_TO_CHECK[@]}; do + echo $REPO + TMP_DIR=$(mktemp -d) + + TAG=$(getLatestReleaseTagForRepo $REPO) + git clone $REPO -c advice.detachedHead=false --depth 1 --branch $TAG $TMP_DIR + + nargo test --program-dir $TMP_DIR + + rm -rf $TMP_DIR +done diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_fmt_strings/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/comptime_fmt_strings/src/main.nr index ca337c822d87..8cdd15aaa0ec 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/comptime_fmt_strings/src/main.nr +++ b/noir/noir-repo/test_programs/compile_success_empty/comptime_fmt_strings/src/main.nr @@ -6,7 +6,7 @@ fn main() { // Can't print these at compile-time here since printing to stdout while // compiling breaks the test runner. - let s1 = f"x is {x}, fake interpolation: \{y}, y is {y}"; + let s1 = f"x is {x}, fake interpolation: {{y}}, y is {y}"; let s2 = std::mem::zeroed::>(); (s1, s2) }; diff --git a/noir/noir-repo/test_programs/execution_success/array_dedup_regression/Nargo.toml b/noir/noir-repo/test_programs/execution_success/array_dedup_regression/Nargo.toml new file mode 100644 index 000000000000..16a708743edd --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/array_dedup_regression/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "array_dedup_regression" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/array_dedup_regression/Prover.toml b/noir/noir-repo/test_programs/execution_success/array_dedup_regression/Prover.toml new file mode 100644 index 000000000000..3aea0c58ce5d --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/array_dedup_regression/Prover.toml @@ -0,0 +1 @@ +x = 0 diff --git a/noir/noir-repo/test_programs/execution_success/array_dedup_regression/src/main.nr b/noir/noir-repo/test_programs/execution_success/array_dedup_regression/src/main.nr new file mode 100644 index 000000000000..5506d55b9e7b --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/array_dedup_regression/src/main.nr @@ -0,0 +1,21 @@ +unconstrained fn main(x: u32) { + let a1 = [1, 2, 3, 4, 5]; + + for i in 0..5 { + let mut a2 = [1, 2, 3, 4, 5]; + a2[x + i] = 128; + println(a2); + + if i != 0 { + assert(a2[x + i - 1] != 128); + } + } + + // Can't use `== [1, 2, 3, 4, 5]` here, that make_array may get + // deduplicated to equal a1 in the bugged version + assert_eq(a1[0], 1); + assert_eq(a1[1], 2); + assert_eq(a1[2], 3); + assert_eq(a1[3], 4); + assert_eq(a1[4], 5); +} diff --git a/noir/noir-repo/test_programs/execution_success/embedded_curve_ops/src/main.nr b/noir/noir-repo/test_programs/execution_success/embedded_curve_ops/src/main.nr index e69184b9c960..85cf60dc7967 100644 --- a/noir/noir-repo/test_programs/execution_success/embedded_curve_ops/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/embedded_curve_ops/src/main.nr @@ -20,4 +20,22 @@ fn main(priv_key: Field, pub_x: pub Field, pub_y: pub Field) { // The results should be double the g1 point because the scalars are 1 and we pass in g1 twice assert(double.x == res.x); + + // Tests for #6549 + let const_scalar1 = std::embedded_curve_ops::EmbeddedCurveScalar { lo: 23, hi: 0 }; + let const_scalar2 = std::embedded_curve_ops::EmbeddedCurveScalar { lo: 0, hi: 23 }; + let const_scalar3 = std::embedded_curve_ops::EmbeddedCurveScalar { lo: 13, hi: 4 }; + let partial_mul = std::embedded_curve_ops::multi_scalar_mul( + [g1, double, pub_point, g1, g1], + [scalar, const_scalar1, scalar, const_scalar2, const_scalar3], + ); + assert(partial_mul.x == 0x2024c4eebfbc8a20018f8c95c7aab77c6f34f10cf785a6f04e97452d8708fda7); + // Check simplification by zero + let zero_point = std::embedded_curve_ops::EmbeddedCurvePoint { x: 0, y: 0, is_infinite: true }; + let const_zero = std::embedded_curve_ops::EmbeddedCurveScalar { lo: 0, hi: 0 }; + let partial_mul = std::embedded_curve_ops::multi_scalar_mul( + [zero_point, double, g1], + [scalar, const_zero, scalar], + ); + assert(partial_mul == g1); } diff --git a/noir/noir-repo/test_programs/execution_success/hashmap/src/main.nr b/noir/noir-repo/test_programs/execution_success/hashmap/src/main.nr index cfd4e4a91364..aab531ea559e 100644 --- a/noir/noir-repo/test_programs/execution_success/hashmap/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/hashmap/src/main.nr @@ -104,10 +104,11 @@ fn test_insert_and_methods(input: [Entry; HASHMAP_LEN]) { hashmap.insert(entry.key, entry.value); } - assert(hashmap.len() == HASHMAP_LEN, "hashmap.len() does not match input lenght."); + assert(hashmap.len() == HASHMAP_LEN, "hashmap.len() does not match input length."); for entry in input { - assert(hashmap.contains_key(entry.key), f"Not found inserted key {entry.key}."); + let entry_key = entry.key; + assert(hashmap.contains_key(entry.key), f"Not found inserted key {entry_key}."); } hashmap.clear(); diff --git a/noir/noir-repo/test_programs/execution_success/inline_decompose_hint_brillig_call/Nargo.toml b/noir/noir-repo/test_programs/execution_success/inline_decompose_hint_brillig_call/Nargo.toml new file mode 100644 index 000000000000..ecac2dfb1977 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/inline_decompose_hint_brillig_call/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "inline_decompose_hint_brillig_call" +version = "0.1.0" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/inline_decompose_hint_brillig_call/src/main.nr b/noir/noir-repo/test_programs/execution_success/inline_decompose_hint_brillig_call/src/main.nr new file mode 100644 index 000000000000..e500f0f976d9 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/inline_decompose_hint_brillig_call/src/main.nr @@ -0,0 +1,15 @@ +use std::embedded_curve_ops::{EmbeddedCurvePoint, EmbeddedCurveScalar, fixed_base_scalar_mul}; + +fn main() -> pub Field { + let pre_address = 0x23d95e303879a5d0bbef78ecbc335e559da37431f6dcd11da54ed375c2846813; + let (a, b) = std::field::bn254::decompose(pre_address); + let curve = EmbeddedCurveScalar { lo: a, hi: b }; + let key = fixed_base_scalar_mul(curve); + let point = EmbeddedCurvePoint { + x: 0x111223493147f6785514b1c195bb37a2589f22a6596d30bb2bb145fdc9ca8f1e, + y: 0x273bbffd678edce8fe30e0deafc4f66d58357c06fd4a820285294b9746c3be95, + is_infinite: false, + }; + let address_point = key.add(point); + address_point.x +} diff --git a/noir/noir-repo/test_programs/execution_success/loop_invariant_regression/src/main.nr b/noir/noir-repo/test_programs/execution_success/loop_invariant_regression/src/main.nr index 25f6e92f8680..c28ce063116a 100644 --- a/noir/noir-repo/test_programs/execution_success/loop_invariant_regression/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/loop_invariant_regression/src/main.nr @@ -2,6 +2,7 @@ // to be hoisted to the loop's pre-header block. fn main(x: u32, y: u32) { loop(4, x, y); + array_read_loop(4, x); } fn loop(upper_bound: u32, x: u32, y: u32) { @@ -11,3 +12,15 @@ fn loop(upper_bound: u32, x: u32, y: u32) { assert_eq(z, 12); } } + +fn array_read_loop(upper_bound: u32, x: u32) { + let arr = [2; 5]; + for i in 0..upper_bound { + for j in 0..upper_bound { + for _ in 0..upper_bound { + assert_eq(arr[i], x); + assert_eq(arr[j], x); + } + } + } +} diff --git a/noir/noir-repo/test_programs/execution_success/reference_counts/src/main.nr b/noir/noir-repo/test_programs/execution_success/reference_counts/src/main.nr index 7ab7de893fa5..8de4d0f2508e 100644 --- a/noir/noir-repo/test_programs/execution_success/reference_counts/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/reference_counts/src/main.nr @@ -1,10 +1,19 @@ +use std::mem::array_refcount; + fn main() { let mut array = [0, 1, 2]; assert_refcount(array, 1); - borrow(array, std::mem::array_refcount(array)); - borrow_mut(&mut array, std::mem::array_refcount(array)); - copy_mut(array, std::mem::array_refcount(array)); + borrow(array, array_refcount(array)); + borrow_mut(&mut array, array_refcount(array)); + copy_mut(array, array_refcount(array)); + + borrow_mut_two(&mut array, &mut array, array_refcount(array)); + + let mut u32_array = [0, 1, 2]; + let rc1 = array_refcount(array); + let rc2 = array_refcount(u32_array); + borrow_mut_two_separate(&mut array, &mut u32_array, rc1, rc2); } fn borrow(array: [Field; 3], rc_before_call: u32) { @@ -13,19 +22,48 @@ fn borrow(array: [Field; 3], rc_before_call: u32) { } fn borrow_mut(array: &mut [Field; 3], rc_before_call: u32) { - assert_refcount(*array, rc_before_call + 0); // Issue! This should be rc_before_call + 1 - array[0] = 5; + // Optimization: inc_rc isn't needed since there is only one array (`array`) + // of the same type that `array` can be modified through + assert_refcount(*array, rc_before_call + 0); + array[0] = 3; println(array[0]); } fn copy_mut(mut array: [Field; 3], rc_before_call: u32) { - assert_refcount(array, rc_before_call + 0); // Issue! This should be rc_before_call + 1 - array[0] = 6; + assert_refcount(array, rc_before_call + 1); + array[0] = 4; println(array[0]); } -fn assert_refcount(array: [Field; 3], expected: u32) { - let count = std::mem::array_refcount(array); +/// Borrow the same array mutably through both parameters, inc_rc is necessary here, although +/// only one is needed to bring the rc from 1 to 2. +fn borrow_mut_two(array1: &mut [Field; 3], array2: &mut [Field; 3], rc_before_call: u32) { + assert_refcount(*array1, rc_before_call + 1); + assert_refcount(*array2, rc_before_call + 1); + array1[0] = 5; + array2[0] = 6; + println(array1[0]); // array1 & 2 alias, so this should also print 6 + println(array2[0]); +} + +/// Borrow a different array: we should be able to reason that these types cannot be mutably +/// aliased since they're different types so we don't need any inc_rc instructions. +fn borrow_mut_two_separate( + array1: &mut [Field; 3], + array2: &mut [u32; 3], + rc_before_call1: u32, + rc_before_call2: u32, +) { + assert_refcount(*array1, rc_before_call1 + 0); + assert_refcount(*array2, rc_before_call2 + 0); + array1[0] = 7; + array2[0] = 8; + println(array1[0]); + println(array2[0]); +} + +fn assert_refcount(array: [T; 3], expected: u32) { + let count = array_refcount(array); // All refcounts are zero when running this as a constrained program if std::runtime::is_unconstrained() { diff --git a/noir/noir-repo/test_programs/execution_success/regression_6451/src/main.nr b/noir/noir-repo/test_programs/execution_success/regression_6451/src/main.nr index fbee6956dfab..b13b6c25a7ee 100644 --- a/noir/noir-repo/test_programs/execution_success/regression_6451/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/regression_6451/src/main.nr @@ -9,7 +9,7 @@ fn main(x: Field) { value += term2; value.assert_max_bit_size::<1>(); - // Regression test for Aztec Packages issue #6451 + // Regression test for #6447 (Aztec Packages issue #9488) let y = unsafe { empty(x + 1) }; let z = y + x + 1; let z1 = z + y; diff --git a/noir/noir-repo/test_programs/execution_success/regression_6674_1/Nargo.toml b/noir/noir-repo/test_programs/execution_success/regression_6674_1/Nargo.toml new file mode 100644 index 000000000000..ad87f9deb467 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_6674_1/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "regression_6674_1" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/regression_6674_1/src/main.nr b/noir/noir-repo/test_programs/execution_success/regression_6674_1/src/main.nr new file mode 100644 index 000000000000..70315c16b78e --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_6674_1/src/main.nr @@ -0,0 +1,85 @@ +use std::mem::zeroed; + +pub struct BoundedVec4 { + storage: [Field; 4], + len: u32, +} + +impl BoundedVec4 { + pub fn new() -> Self { + BoundedVec4 { storage: [0; 4], len: 0 } + } + + pub fn push(&mut self, elem: Field) { + self.storage[self.len] = elem; + self.len += 1; + } +} + +pub struct PrivateKernelCircuitPublicInputs { + pub l2_to_l1_msgs: [Field; 4], + pub public_call_requests: [Field; 4], +} + +pub struct FixtureBuilder { + pub public_call_requests: BoundedVec4, + pub counter: Field, +} + +impl FixtureBuilder { + pub fn new() -> Self { + FixtureBuilder { public_call_requests: zeroed(), counter: 0 } + } + + pub fn append_public_call_requests_inner(&mut self) { + self.public_call_requests.push(self.next_counter()); + } + + pub fn append_public_call_requests(&mut self) { + for _ in 0..4 { + // Note that here we push via a method call + self.append_public_call_requests_inner(); + } + } + + fn next_counter(&mut self) -> Field { + let counter = self.counter; + self.counter += 1; + counter + } +} + +pub struct PrivateKernelCircuitPublicInputsComposer { + pub l2_to_l1_msgs: [Field; 4], + pub public_call_requests: [Field; 4], +} + +pub unconstrained fn sort_by(array: [Field; 4]) -> [Field; 4] { + let result = array; + get_sorting_index(array); + result +} + +unconstrained fn get_sorting_index(array: [Field; 4]) { + let _ = [0; 4]; + let mut a = array; + for i in 1..4 { + for j in 0..i { + a[i] = a[j]; + } + } +} + +unconstrained fn main() { + let mut previous_kernel = FixtureBuilder::new(); + previous_kernel.append_public_call_requests(); + + let mut output_composer = PrivateKernelCircuitPublicInputsComposer { + l2_to_l1_msgs: [0; 4], + public_call_requests: previous_kernel.public_call_requests.storage, + }; + output_composer.l2_to_l1_msgs = sort_by(output_composer.l2_to_l1_msgs); + output_composer.public_call_requests = sort_by(output_composer.public_call_requests); + + assert_eq(previous_kernel.public_call_requests.storage[1], 1, "equality"); +} diff --git a/noir/noir-repo/test_programs/execution_success/regression_6674_2/Nargo.toml b/noir/noir-repo/test_programs/execution_success/regression_6674_2/Nargo.toml new file mode 100644 index 000000000000..666765c81726 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_6674_2/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "regression_6674_2" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/regression_6674_2/src/main.nr b/noir/noir-repo/test_programs/execution_success/regression_6674_2/src/main.nr new file mode 100644 index 000000000000..42ad4fa40311 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_6674_2/src/main.nr @@ -0,0 +1,87 @@ +use std::mem::zeroed; + +pub struct BoundedVec4 { + storage: [Field; 4], + len: u32, +} + +impl BoundedVec4 { + pub fn new() -> Self { + BoundedVec4 { storage: [0; 4], len: 0 } + } + + pub fn push(&mut self, elem: Field) { + self.storage[self.len] = elem; + self.len += 1; + } +} + +pub struct PrivateKernelCircuitPublicInputs { + pub l2_to_l1_msgs: [Field; 4], + pub public_call_requests: [Field; 4], +} + +pub struct FixtureBuilder { + pub public_call_requests: BoundedVec4, + pub counter: Field, +} + +impl FixtureBuilder { + pub fn new() -> Self { + FixtureBuilder { public_call_requests: zeroed(), counter: 0 } + } + + pub fn append_public_call_requests(&mut self) { + for _ in 0..4 { + // Note that here we push directly, not through a method call + self.public_call_requests.push(self.next_counter()); + } + } + + fn next_counter(&mut self) -> Field { + let counter = self.counter; + self.counter += 1; + counter + } +} + +pub struct PrivateKernelCircuitPublicInputsComposer { + pub l2_to_l1_msgs: [Field; 4], + pub public_call_requests: [Field; 4], +} + +impl PrivateKernelCircuitPublicInputsComposer { + pub unconstrained fn sort_ordered_values(&mut self) { + self.l2_to_l1_msgs = sort_by(self.l2_to_l1_msgs); + self.public_call_requests = sort_by(self.public_call_requests); + } +} + +pub unconstrained fn sort_by(array: [Field; 4]) -> [Field; 4] { + let result = array; + get_sorting_index(array); + result +} + +unconstrained fn get_sorting_index(array: [Field; 4]) { + let _ = [0; 4]; + let mut a = array; + for i in 1..4 { + for j in 0..i { + a[i] = a[j]; + } + } +} + +unconstrained fn main() { + let mut previous_kernel = FixtureBuilder::new(); + previous_kernel.append_public_call_requests(); + + let mut output_composer = PrivateKernelCircuitPublicInputsComposer { + l2_to_l1_msgs: [0; 4], + public_call_requests: previous_kernel.public_call_requests.storage, + }; + output_composer.sort_ordered_values(); + + assert_eq(previous_kernel.public_call_requests.storage[1], 1, "equality"); +} diff --git a/noir/noir-repo/test_programs/execution_success/regression_6674_3/Nargo.toml b/noir/noir-repo/test_programs/execution_success/regression_6674_3/Nargo.toml new file mode 100644 index 000000000000..7b396f636937 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_6674_3/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "regression_6674_3" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/regression_6674_3/src/main.nr b/noir/noir-repo/test_programs/execution_success/regression_6674_3/src/main.nr new file mode 100644 index 000000000000..2c87a4c679ce --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_6674_3/src/main.nr @@ -0,0 +1,191 @@ +use std::mem::zeroed; + +pub struct PrivateAccumulatedData { + pub public_call_requests: [Counted; 4], +} + +pub struct PrivateAccumulatedDataBuilder { + pub l2_to_l1_msgs: BoundedVec, + pub public_call_requests: BoundedVec, 4>, + pub private_call_stack: BoundedVec, +} + +impl PrivateAccumulatedDataBuilder { + pub fn finish(self) -> PrivateAccumulatedData { + PrivateAccumulatedData { public_call_requests: self.public_call_requests.storage() } + } +} + +pub struct Counted { + pub inner: T, + pub counter: u32, +} + +impl Counted { + pub fn new(inner: T, counter: u32) -> Self { + Self { inner, counter } + } +} + +pub struct PrivateKernelCircuitPublicInputs { + pub end: PrivateAccumulatedData, +} + +pub struct PrivateKernelData { + pub public_inputs: PrivateKernelCircuitPublicInputs, +} + +pub struct FixtureBuilder2 { + pub public_teardown_call_request: Field, + pub private_call_requests: BoundedVec, + pub public_call_requests: BoundedVec, 4>, + pub counter: u32, +} + +impl FixtureBuilder2 { + pub fn new() -> Self { + let mut builder: FixtureBuilder2 = zeroed(); + builder.counter = 1; + builder + } + + pub fn to_private_accumulated_data_builder(self) -> PrivateAccumulatedDataBuilder { + PrivateAccumulatedDataBuilder { + l2_to_l1_msgs: zeroed(), + public_call_requests: self.public_call_requests, + private_call_stack: vec_reverse(self.private_call_requests), + } + } + + pub fn to_private_accumulated_data(self) -> PrivateAccumulatedData { + self.to_private_accumulated_data_builder().finish() + } + + pub fn to_private_kernel_circuit_public_inputs(self) -> PrivateKernelCircuitPublicInputs { + PrivateKernelCircuitPublicInputs { end: self.to_private_accumulated_data() } + } + + pub fn to_private_kernel_data(self) -> PrivateKernelData { + let public_inputs = + PrivateKernelCircuitPublicInputs { end: self.to_private_accumulated_data() }; + PrivateKernelData { public_inputs } + } + + pub fn add_public_call_request(&mut self) { + self.public_call_requests.push(Counted::new(zeroed(), self.next_counter())); + } + + pub fn append_public_call_requests(&mut self, num: u32) { + for _ in 0..num { + self.add_public_call_request(); + } + } + + pub fn set_public_teardown_call_request(&mut self) { + let mut fields = [0; 5]; + for i in 0..5 { + fields[i] = i as Field; + } + + self.public_teardown_call_request = zeroed(); + } + + fn next_counter(&mut self) -> u32 { + let counter = self.counter; + self.counter += 1; + counter + } +} + +struct PrivateKernelTailToPublicInputsBuilder { + previous_kernel: FixtureBuilder2, +} + +impl PrivateKernelTailToPublicInputsBuilder { + pub unconstrained fn execute(&mut self) { + let kernel = PrivateKernelTailToPublicCircuitPrivateInputs { + previous_kernel: self.previous_kernel.to_private_kernel_data(), + }; + let mut output_composer = PrivateKernelCircuitPublicInputsComposer::new_from_previous_kernel( + kernel.previous_kernel.public_inputs, + ); + output_composer.sort_ordered_values(); + } +} + +pub struct PrivateKernelTailToPublicCircuitPrivateInputs { + previous_kernel: PrivateKernelData, +} + +pub struct PrivateKernelCircuitPublicInputsComposer { + public_inputs: PrivateKernelCircuitPublicInputsBuilder, +} + +impl PrivateKernelCircuitPublicInputsComposer { + pub unconstrained fn sort_ordered_values(&mut self) { + // Note hashes, nullifiers, and private logs are sorted in the reset circuit. + self.public_inputs.end.l2_to_l1_msgs.storage = + sort_by_counter_desc(self.public_inputs.end.l2_to_l1_msgs.storage); + self.public_inputs.end.public_call_requests.storage = + sort_by_counter_desc(self.public_inputs.end.public_call_requests.storage); + } +} + +impl PrivateKernelCircuitPublicInputsComposer { + pub fn new_from_previous_kernel( + previous_kernel_public_inputs: PrivateKernelCircuitPublicInputs, + ) -> Self { + let mut public_inputs: PrivateKernelCircuitPublicInputsBuilder = zeroed(); + let start = previous_kernel_public_inputs.end; + public_inputs.end.public_call_requests = BoundedVec { + storage: start.public_call_requests, + len: start.public_call_requests.len(), + }; + PrivateKernelCircuitPublicInputsComposer { public_inputs } + } +} + +pub struct PrivateKernelCircuitPublicInputsBuilder { + end: PrivateAccumulatedDataBuilder, +} + +fn vec_reverse(vec: BoundedVec) -> BoundedVec { + let mut reversed = BoundedVec::new(); + let len = vec.len(); + for i in 0..N { + if i < len { + reversed.push(vec.get_unchecked(len - i - 1)); + } + } + reversed +} + +pub unconstrained fn sort_by_counter_desc(array: [T; N]) -> [T; N] { + sort_by(array) +} + +pub unconstrained fn sort_by(array: [T; N]) -> [T; N] { + let mut result = array; + unsafe { get_sorting_index(array) }; + result +} + +unconstrained fn get_sorting_index(array: [T; N]) { + let _ = [0; 4]; + let mut a = array; + for i in 1..4 { + for j in 0..i { + a[i] = a[j]; + } + } +} + +unconstrained fn main() { + let mut previous_kernel = FixtureBuilder2::new(); + let mut builder = PrivateKernelTailToPublicInputsBuilder { previous_kernel }; + builder.previous_kernel.append_public_call_requests(4); + assert_eq(builder.previous_kernel.public_call_requests.storage[3].counter, 4); + builder.previous_kernel.set_public_teardown_call_request(); + builder.execute(); + assert_eq(builder.previous_kernel.public_call_requests.storage[3].counter, 4); +} diff --git a/noir/noir-repo/test_programs/execution_success/uhashmap/src/main.nr b/noir/noir-repo/test_programs/execution_success/uhashmap/src/main.nr index b56a4fe1747c..689ba9d4a04a 100644 --- a/noir/noir-repo/test_programs/execution_success/uhashmap/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/uhashmap/src/main.nr @@ -104,7 +104,8 @@ unconstrained fn test_insert_and_methods(input: [Entry; HASHMAP_LEN]) { assert(hashmap.len() == HASHMAP_LEN, "hashmap.len() does not match input length."); for entry in input { - assert(hashmap.contains_key(entry.key), f"Not found inserted key {entry.key}."); + let entry_key = entry.key; + assert(hashmap.contains_key(entry.key), f"Not found inserted key {entry_key}."); } hashmap.clear(); diff --git a/noir/noir-repo/test_programs/gates_report_brillig.sh b/noir/noir-repo/test_programs/gates_report_brillig.sh old mode 100644 new mode 100755 diff --git a/noir/noir-repo/test_programs/gates_report_brillig_execution.sh b/noir/noir-repo/test_programs/gates_report_brillig_execution.sh old mode 100644 new mode 100755 diff --git a/noir/noir-repo/test_programs/memory_report.sh b/noir/noir-repo/test_programs/memory_report.sh new file mode 100755 index 000000000000..1b8274b76ccc --- /dev/null +++ b/noir/noir-repo/test_programs/memory_report.sh @@ -0,0 +1,48 @@ +#!/usr/bin/env bash +set -e + +sudo apt-get install heaptrack + +NARGO="nargo" + + +# Tests to be profiled for memory report +tests_to_profile=("keccak256" "workspace" "regression_4709" "ram_blowup_regression") + +current_dir=$(pwd) +execution_success_path="$current_dir/execution_success" +test_dirs=$(ls $execution_success_path) + +FIRST="1" + +echo "{\"memory_reports\": [ " > memory_report.json + + +for test_name in ${tests_to_profile[@]}; do + full_path=$execution_success_path"/"$test_name + cd $full_path + + if [ $FIRST = "1" ] + then + FIRST="0" + else + echo " ," >> $current_dir"/memory_report.json" + fi + heaptrack --output $current_dir/$test_name"_heap" $NARGO compile --force + if test -f $current_dir/$test_name"_heap.gz"; + then + heaptrack --analyze $current_dir/$test_name"_heap.gz" > $current_dir/$test_name"_heap_analysis.txt" + rm $current_dir/$test_name"_heap.gz" + else + heaptrack --analyze $current_dir/$test_name"_heap.zst" > $current_dir/$test_name"_heap_analysis.txt" + rm $current_dir/$test_name"_heap.zst" + fi + consumption="$(grep 'peak heap memory consumption' $current_dir/$test_name'_heap_analysis.txt')" + len=${#consumption}-30 + peak=${consumption:30:len} + rm $current_dir/$test_name"_heap_analysis.txt" + echo -e " {\n \"artifact_name\":\"$test_name\",\n \"peak_memory\":\"$peak\"\n }" >> $current_dir"/memory_report.json" +done + +echo "]}" >> $current_dir"/memory_report.json" + diff --git a/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs b/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs index c453936568ce..bf5969718e5a 100644 --- a/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs +++ b/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs @@ -8,7 +8,7 @@ use clap::Args; use crate::cli::fs::inputs::{read_bytecode_from_file, read_inputs_from_file}; use crate::errors::CliError; -use nargo::ops::{execute_program, DefaultForeignCallExecutor}; +use nargo::{foreign_calls::DefaultForeignCallExecutor, ops::execute_program}; use super::fs::witness::{create_output_witness_string, save_witness_to_dir}; diff --git a/noir/noir-repo/tooling/debugger/src/foreign_calls.rs b/noir/noir-repo/tooling/debugger/src/foreign_calls.rs index 6a773a4b3484..ecf27a22f29d 100644 --- a/noir/noir-repo/tooling/debugger/src/foreign_calls.rs +++ b/noir/noir-repo/tooling/debugger/src/foreign_calls.rs @@ -3,7 +3,7 @@ use acvm::{ pwg::ForeignCallWaitInfo, AcirField, FieldElement, }; -use nargo::ops::{DefaultForeignCallExecutor, ForeignCallExecutor}; +use nargo::foreign_calls::{DefaultForeignCallExecutor, ForeignCallExecutor}; use noirc_artifacts::debug::{DebugArtifact, DebugVars, StackFrame}; use noirc_errors::debug_info::{DebugFnId, DebugVarId}; use noirc_printable_type::ForeignCallError; diff --git a/noir/noir-repo/tooling/lsp/src/requests/completion/tests.rs b/noir/noir-repo/tooling/lsp/src/requests/completion/tests.rs index 9306e38a48ad..97c7ad86d5ac 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/completion/tests.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/completion/tests.rs @@ -1586,6 +1586,54 @@ fn main() { assert_eq!(changed, expected); } + #[test] + async fn test_auto_import_inserts_after_last_use_in_nested_module() { + let src = r#"mod foo { + pub mod bar { + pub fn hello_world() {} + } +} + +mod baz { + fn qux() {} +} + +mod other { + use baz::qux; + + fn main() { + hel>|< + } +}"#; + + let expected = r#"mod foo { + pub mod bar { + pub fn hello_world() {} + } +} + +mod baz { + fn qux() {} +} + +mod other { + use baz::qux; + use super::foo::bar::hello_world; + + fn main() { + hel + } +}"#; + let mut items = get_completions(src).await; + assert_eq!(items.len(), 1); + + let item = items.remove(0); + + let changed = + apply_text_edits(&src.replace(">|<", ""), &item.additional_text_edits.unwrap()); + assert_eq!(changed, expected); + } + #[test] async fn test_does_not_auto_import_test_functions() { let src = r#" diff --git a/noir/noir-repo/tooling/lsp/src/requests/test_run.rs b/noir/noir-repo/tooling/lsp/src/requests/test_run.rs index 50c699bb6a63..937fdcc0a5e5 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/test_run.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/test_run.rs @@ -101,6 +101,11 @@ fn on_test_run_request_inner( result: "fail".to_string(), message: Some(message), }, + TestStatus::Skipped => NargoTestRunResult { + id: params.id.clone(), + result: "skipped".to_string(), + message: None, + }, TestStatus::CompileError(diag) => NargoTestRunResult { id: params.id.clone(), result: "error".to_string(), diff --git a/noir/noir-repo/tooling/lsp/src/use_segment_positions.rs b/noir/noir-repo/tooling/lsp/src/use_segment_positions.rs index f9a3f4290294..246ff653245f 100644 --- a/noir/noir-repo/tooling/lsp/src/use_segment_positions.rs +++ b/noir/noir-repo/tooling/lsp/src/use_segment_positions.rs @@ -318,7 +318,7 @@ fn new_use_completion_item_additional_text_edits( request: UseCompletionItemAdditionTextEditsRequest, ) -> Vec { let line = request.auto_import_line as u32; - let character = (request.nesting * 4) as u32; + let character = 0; let indent = " ".repeat(request.nesting * 4); let mut newlines = "\n"; @@ -331,6 +331,6 @@ fn new_use_completion_item_additional_text_edits( vec![TextEdit { range: Range { start: Position { line, character }, end: Position { line, character } }, - new_text: format!("use {};{}{}", request.full_path, newlines, indent), + new_text: format!("{}use {};{}", indent, request.full_path, newlines), }] } diff --git a/noir/noir-repo/tooling/nargo/src/foreign_calls/mocker.rs b/noir/noir-repo/tooling/nargo/src/foreign_calls/mocker.rs new file mode 100644 index 000000000000..c93d16bbaf6e --- /dev/null +++ b/noir/noir-repo/tooling/nargo/src/foreign_calls/mocker.rs @@ -0,0 +1,176 @@ +use acvm::{ + acir::brillig::{ForeignCallParam, ForeignCallResult}, + pwg::ForeignCallWaitInfo, + AcirField, +}; +use noirc_printable_type::{decode_string_value, ForeignCallError}; +use serde::{Deserialize, Serialize}; + +use super::{ForeignCall, ForeignCallExecutor}; + +/// This struct represents an oracle mock. It can be used for testing programs that use oracles. +#[derive(Debug, PartialEq, Eq, Clone)] +struct MockedCall { + /// The id of the mock, used to update or remove it + id: usize, + /// The oracle it's mocking + name: String, + /// Optionally match the parameters + params: Option>>, + /// The parameters with which the mock was last called + last_called_params: Option>>, + /// The result to return when this mock is called + result: ForeignCallResult, + /// How many times should this mock be called before it is removed + times_left: Option, +} + +impl MockedCall { + fn new(id: usize, name: String) -> Self { + Self { + id, + name, + params: None, + last_called_params: None, + result: ForeignCallResult { values: vec![] }, + times_left: None, + } + } +} + +impl MockedCall { + fn matches(&self, name: &str, params: &[ForeignCallParam]) -> bool { + self.name == name && (self.params.is_none() || self.params.as_deref() == Some(params)) + } +} + +#[derive(Debug, Default)] +pub(crate) struct MockForeignCallExecutor { + /// Mocks have unique ids used to identify them in Noir, allowing to update or remove them. + last_mock_id: usize, + /// The registered mocks + mocked_responses: Vec>, +} + +impl MockForeignCallExecutor { + fn extract_mock_id( + foreign_call_inputs: &[ForeignCallParam], + ) -> Result<(usize, &[ForeignCallParam]), ForeignCallError> { + let (id, params) = + foreign_call_inputs.split_first().ok_or(ForeignCallError::MissingForeignCallInputs)?; + let id = + usize::try_from(id.unwrap_field().try_to_u64().expect("value does not fit into u64")) + .expect("value does not fit into usize"); + Ok((id, params)) + } + + fn find_mock_by_id(&self, id: usize) -> Option<&MockedCall> { + self.mocked_responses.iter().find(|response| response.id == id) + } + + fn find_mock_by_id_mut(&mut self, id: usize) -> Option<&mut MockedCall> { + self.mocked_responses.iter_mut().find(|response| response.id == id) + } + + fn parse_string(param: &ForeignCallParam) -> String { + let fields: Vec<_> = param.fields().to_vec(); + decode_string_value(&fields) + } +} + +impl Deserialize<'a>> ForeignCallExecutor + for MockForeignCallExecutor +{ + fn execute( + &mut self, + foreign_call: &ForeignCallWaitInfo, + ) -> Result, ForeignCallError> { + let foreign_call_name = foreign_call.function.as_str(); + match ForeignCall::lookup(foreign_call_name) { + Some(ForeignCall::CreateMock) => { + let mock_oracle_name = Self::parse_string(&foreign_call.inputs[0]); + assert!(ForeignCall::lookup(&mock_oracle_name).is_none()); + let id = self.last_mock_id; + self.mocked_responses.push(MockedCall::new(id, mock_oracle_name)); + self.last_mock_id += 1; + + Ok(F::from(id).into()) + } + Some(ForeignCall::SetMockParams) => { + let (id, params) = Self::extract_mock_id(&foreign_call.inputs)?; + self.find_mock_by_id_mut(id) + .unwrap_or_else(|| panic!("Unknown mock id {}", id)) + .params = Some(params.to_vec()); + + Ok(ForeignCallResult::default()) + } + Some(ForeignCall::GetMockLastParams) => { + let (id, _) = Self::extract_mock_id(&foreign_call.inputs)?; + let mock = + self.find_mock_by_id(id).unwrap_or_else(|| panic!("Unknown mock id {}", id)); + + let last_called_params = mock + .last_called_params + .clone() + .unwrap_or_else(|| panic!("Mock {} was never called", mock.name)); + + Ok(last_called_params.into()) + } + Some(ForeignCall::SetMockReturns) => { + let (id, params) = Self::extract_mock_id(&foreign_call.inputs)?; + self.find_mock_by_id_mut(id) + .unwrap_or_else(|| panic!("Unknown mock id {}", id)) + .result = ForeignCallResult { values: params.to_vec() }; + + Ok(ForeignCallResult::default()) + } + Some(ForeignCall::SetMockTimes) => { + let (id, params) = Self::extract_mock_id(&foreign_call.inputs)?; + let times = + params[0].unwrap_field().try_to_u64().expect("Invalid bit size of times"); + + self.find_mock_by_id_mut(id) + .unwrap_or_else(|| panic!("Unknown mock id {}", id)) + .times_left = Some(times); + + Ok(ForeignCallResult::default()) + } + Some(ForeignCall::ClearMock) => { + let (id, _) = Self::extract_mock_id(&foreign_call.inputs)?; + self.mocked_responses.retain(|response| response.id != id); + Ok(ForeignCallResult::default()) + } + _ => { + let mock_response_position = self + .mocked_responses + .iter() + .position(|response| response.matches(foreign_call_name, &foreign_call.inputs)); + + if let Some(response_position) = mock_response_position { + // If the program has registered a mocked response to this oracle call then we prefer responding + // with that. + + let mock = self + .mocked_responses + .get_mut(response_position) + .expect("Invalid position of mocked response"); + + mock.last_called_params = Some(foreign_call.inputs.clone()); + + let result = mock.result.values.clone(); + + if let Some(times_left) = &mut mock.times_left { + *times_left -= 1; + if *times_left == 0 { + self.mocked_responses.remove(response_position); + } + } + + Ok(result.into()) + } else { + Err(ForeignCallError::NoHandler(foreign_call_name.to_string())) + } + } + } + } +} diff --git a/noir/noir-repo/tooling/nargo/src/foreign_calls/mod.rs b/noir/noir-repo/tooling/nargo/src/foreign_calls/mod.rs new file mode 100644 index 000000000000..16ed71e11e31 --- /dev/null +++ b/noir/noir-repo/tooling/nargo/src/foreign_calls/mod.rs @@ -0,0 +1,146 @@ +use std::path::PathBuf; + +use acvm::{acir::brillig::ForeignCallResult, pwg::ForeignCallWaitInfo, AcirField}; +use mocker::MockForeignCallExecutor; +use noirc_printable_type::ForeignCallError; +use print::PrintForeignCallExecutor; +use rand::Rng; +use rpc::RPCForeignCallExecutor; +use serde::{Deserialize, Serialize}; + +pub(crate) mod mocker; +pub(crate) mod print; +pub(crate) mod rpc; + +pub trait ForeignCallExecutor { + fn execute( + &mut self, + foreign_call: &ForeignCallWaitInfo, + ) -> Result, ForeignCallError>; +} + +/// This enumeration represents the Brillig foreign calls that are natively supported by nargo. +/// After resolution of a foreign call, nargo will restart execution of the ACVM +pub enum ForeignCall { + Print, + CreateMock, + SetMockParams, + GetMockLastParams, + SetMockReturns, + SetMockTimes, + ClearMock, +} + +impl std::fmt::Display for ForeignCall { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.name()) + } +} + +impl ForeignCall { + pub(crate) fn name(&self) -> &'static str { + match self { + ForeignCall::Print => "print", + ForeignCall::CreateMock => "create_mock", + ForeignCall::SetMockParams => "set_mock_params", + ForeignCall::GetMockLastParams => "get_mock_last_params", + ForeignCall::SetMockReturns => "set_mock_returns", + ForeignCall::SetMockTimes => "set_mock_times", + ForeignCall::ClearMock => "clear_mock", + } + } + + pub(crate) fn lookup(op_name: &str) -> Option { + match op_name { + "print" => Some(ForeignCall::Print), + "create_mock" => Some(ForeignCall::CreateMock), + "set_mock_params" => Some(ForeignCall::SetMockParams), + "get_mock_last_params" => Some(ForeignCall::GetMockLastParams), + "set_mock_returns" => Some(ForeignCall::SetMockReturns), + "set_mock_times" => Some(ForeignCall::SetMockTimes), + "clear_mock" => Some(ForeignCall::ClearMock), + _ => None, + } + } +} + +#[derive(Debug, Default)] +pub struct DefaultForeignCallExecutor { + /// The executor for any [`ForeignCall::Print`] calls. + printer: Option, + mocker: MockForeignCallExecutor, + external: Option, +} + +impl DefaultForeignCallExecutor { + pub fn new( + show_output: bool, + resolver_url: Option<&str>, + root_path: Option, + package_name: Option, + ) -> Self { + let id = rand::thread_rng().gen(); + let printer = if show_output { Some(PrintForeignCallExecutor) } else { None }; + let external_resolver = resolver_url.map(|resolver_url| { + RPCForeignCallExecutor::new(resolver_url, id, root_path, package_name) + }); + DefaultForeignCallExecutor { + printer, + mocker: MockForeignCallExecutor::default(), + external: external_resolver, + } + } +} + +impl Deserialize<'a>> ForeignCallExecutor + for DefaultForeignCallExecutor +{ + fn execute( + &mut self, + foreign_call: &ForeignCallWaitInfo, + ) -> Result, ForeignCallError> { + let foreign_call_name = foreign_call.function.as_str(); + match ForeignCall::lookup(foreign_call_name) { + Some(ForeignCall::Print) => { + if let Some(printer) = &mut self.printer { + printer.execute(foreign_call) + } else { + Ok(ForeignCallResult::default()) + } + } + Some( + ForeignCall::CreateMock + | ForeignCall::SetMockParams + | ForeignCall::GetMockLastParams + | ForeignCall::SetMockReturns + | ForeignCall::SetMockTimes + | ForeignCall::ClearMock, + ) => self.mocker.execute(foreign_call), + + None => { + // First check if there's any defined mock responses for this foreign call. + match self.mocker.execute(foreign_call) { + Err(ForeignCallError::NoHandler(_)) => (), + response_or_error => return response_or_error, + }; + + if let Some(external_resolver) = &mut self.external { + // If the user has registered an external resolver then we forward any remaining oracle calls there. + match external_resolver.execute(foreign_call) { + Err(ForeignCallError::NoHandler(_)) => (), + response_or_error => return response_or_error, + }; + } + + // If all executors have no handler for the given foreign call then we cannot + // return a correct response to the ACVM. The best we can do is to return an empty response, + // this allows us to ignore any foreign calls which exist solely to pass information from inside + // the circuit to the environment (e.g. custom logging) as the execution will still be able to progress. + // + // We optimistically return an empty response for all oracle calls as the ACVM will error + // should a response have been required. + Ok(ForeignCallResult::default()) + } + } + } +} diff --git a/noir/noir-repo/tooling/nargo/src/foreign_calls/print.rs b/noir/noir-repo/tooling/nargo/src/foreign_calls/print.rs new file mode 100644 index 000000000000..92fcd65ae285 --- /dev/null +++ b/noir/noir-repo/tooling/nargo/src/foreign_calls/print.rs @@ -0,0 +1,36 @@ +use acvm::{acir::brillig::ForeignCallResult, pwg::ForeignCallWaitInfo, AcirField}; +use noirc_printable_type::{ForeignCallError, PrintableValueDisplay}; + +use super::{ForeignCall, ForeignCallExecutor}; + +#[derive(Debug, Default)] +pub(crate) struct PrintForeignCallExecutor; + +impl ForeignCallExecutor for PrintForeignCallExecutor { + fn execute( + &mut self, + foreign_call: &ForeignCallWaitInfo, + ) -> Result, ForeignCallError> { + let foreign_call_name = foreign_call.function.as_str(); + match ForeignCall::lookup(foreign_call_name) { + Some(ForeignCall::Print) => { + let skip_newline = foreign_call.inputs[0].unwrap_field().is_zero(); + + let foreign_call_inputs = foreign_call + .inputs + .split_first() + .ok_or(ForeignCallError::MissingForeignCallInputs)? + .1; + + let display_values: PrintableValueDisplay = foreign_call_inputs.try_into()?; + let display_string = + format!("{display_values}{}", if skip_newline { "" } else { "\n" }); + + print!("{display_string}"); + + Ok(ForeignCallResult::default()) + } + _ => Err(ForeignCallError::NoHandler(foreign_call_name.to_string())), + } + } +} diff --git a/noir/noir-repo/tooling/nargo/src/foreign_calls/rpc.rs b/noir/noir-repo/tooling/nargo/src/foreign_calls/rpc.rs new file mode 100644 index 000000000000..0653eb1c7e3d --- /dev/null +++ b/noir/noir-repo/tooling/nargo/src/foreign_calls/rpc.rs @@ -0,0 +1,227 @@ +use std::path::PathBuf; + +use acvm::{acir::brillig::ForeignCallResult, pwg::ForeignCallWaitInfo, AcirField}; +use jsonrpc::{arg as build_json_rpc_arg, minreq_http::Builder, Client}; +use noirc_printable_type::ForeignCallError; +use serde::{Deserialize, Serialize}; + +use super::ForeignCallExecutor; + +#[derive(Debug)] +pub(crate) struct RPCForeignCallExecutor { + /// A randomly generated id for this `DefaultForeignCallExecutor`. + /// + /// This is used so that a single `external_resolver` can distinguish between requests from multiple + /// instantiations of `DefaultForeignCallExecutor`. + id: u64, + /// JSON RPC client to resolve foreign calls + external_resolver: Client, + /// Root path to the program or workspace in execution. + root_path: Option, + /// Name of the package in execution + package_name: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +struct ResolveForeignCallRequest { + /// A session ID which allows the external RPC server to link this foreign call request to other foreign calls + /// for the same program execution. + /// + /// This is intended to allow a single RPC server to maintain state related to multiple program executions being + /// performed in parallel. + session_id: u64, + + #[serde(flatten)] + /// The foreign call which the external RPC server is to provide a response for. + function_call: ForeignCallWaitInfo, + + #[serde(skip_serializing_if = "Option::is_none")] + /// Root path to the program or workspace in execution. + root_path: Option, + #[serde(skip_serializing_if = "Option::is_none")] + /// Name of the package in execution + package_name: Option, +} + +impl RPCForeignCallExecutor { + pub(crate) fn new( + resolver_url: &str, + id: u64, + root_path: Option, + package_name: Option, + ) -> Self { + let mut transport_builder = + Builder::new().url(resolver_url).expect("Invalid oracle resolver URL"); + + if let Some(Ok(timeout)) = + std::env::var("NARGO_FOREIGN_CALL_TIMEOUT").ok().map(|timeout| timeout.parse()) + { + let timeout_duration = std::time::Duration::from_millis(timeout); + transport_builder = transport_builder.timeout(timeout_duration); + }; + let oracle_resolver = Client::with_transport(transport_builder.build()); + + RPCForeignCallExecutor { external_resolver: oracle_resolver, id, root_path, package_name } + } +} + +impl Deserialize<'a>> ForeignCallExecutor + for RPCForeignCallExecutor +{ + fn execute( + &mut self, + foreign_call: &ForeignCallWaitInfo, + ) -> Result, ForeignCallError> { + let encoded_params = vec![build_json_rpc_arg(ResolveForeignCallRequest { + session_id: self.id, + function_call: foreign_call.clone(), + root_path: self.root_path.clone().map(|path| path.to_str().unwrap().to_string()), + package_name: self.package_name.clone(), + })]; + + let req = self.external_resolver.build_request("resolve_foreign_call", &encoded_params); + + let response = self.external_resolver.send_request(req)?; + + let parsed_response: ForeignCallResult = response.result()?; + + Ok(parsed_response) + } +} + +#[cfg(test)] +mod tests { + use acvm::{ + acir::brillig::ForeignCallParam, brillig_vm::brillig::ForeignCallResult, + pwg::ForeignCallWaitInfo, FieldElement, + }; + use jsonrpc_core::Result as RpcResult; + use jsonrpc_derive::rpc; + use jsonrpc_http_server::{Server, ServerBuilder}; + + use super::{ForeignCallExecutor, RPCForeignCallExecutor, ResolveForeignCallRequest}; + + #[allow(unreachable_pub)] + #[rpc] + pub trait OracleResolver { + #[rpc(name = "resolve_foreign_call")] + fn resolve_foreign_call( + &self, + req: ResolveForeignCallRequest, + ) -> RpcResult>; + } + + struct OracleResolverImpl; + + impl OracleResolverImpl { + fn echo(&self, param: ForeignCallParam) -> ForeignCallResult { + vec![param].into() + } + + fn sum(&self, array: ForeignCallParam) -> ForeignCallResult { + let mut res: FieldElement = 0_usize.into(); + + for value in array.fields() { + res += value; + } + + res.into() + } + } + + impl OracleResolver for OracleResolverImpl { + fn resolve_foreign_call( + &self, + req: ResolveForeignCallRequest, + ) -> RpcResult> { + let response = match req.function_call.function.as_str() { + "sum" => self.sum(req.function_call.inputs[0].clone()), + "echo" => self.echo(req.function_call.inputs[0].clone()), + "id" => FieldElement::from(req.session_id as u128).into(), + + _ => panic!("unexpected foreign call"), + }; + Ok(response) + } + } + + fn build_oracle_server() -> (Server, String) { + let mut io = jsonrpc_core::IoHandler::new(); + io.extend_with(OracleResolverImpl.to_delegate()); + + // Choosing port 0 results in a random port being assigned. + let server = ServerBuilder::new(io) + .start_http(&"127.0.0.1:0".parse().expect("Invalid address")) + .expect("Could not start server"); + + let url = format!("http://{}", server.address()); + (server, url) + } + + #[test] + fn test_oracle_resolver_echo() { + let (server, url) = build_oracle_server(); + + let mut executor = RPCForeignCallExecutor::new(&url, 1, None, None); + + let foreign_call: ForeignCallWaitInfo = ForeignCallWaitInfo { + function: "echo".to_string(), + inputs: vec![ForeignCallParam::Single(1_u128.into())], + }; + + let result = executor.execute(&foreign_call); + assert_eq!(result.unwrap(), ForeignCallResult { values: foreign_call.inputs }); + + server.close(); + } + + #[test] + fn test_oracle_resolver_sum() { + let (server, url) = build_oracle_server(); + + let mut executor = RPCForeignCallExecutor::new(&url, 2, None, None); + + let foreign_call: ForeignCallWaitInfo = ForeignCallWaitInfo { + function: "sum".to_string(), + inputs: vec![ForeignCallParam::Array(vec![1_usize.into(), 2_usize.into()])], + }; + + let result = executor.execute(&foreign_call); + assert_eq!(result.unwrap(), FieldElement::from(3_usize).into()); + + server.close(); + } + + #[test] + fn foreign_call_executor_id_is_persistent() { + let (server, url) = build_oracle_server(); + + let mut executor = RPCForeignCallExecutor::new(&url, 3, None, None); + + let foreign_call: ForeignCallWaitInfo = + ForeignCallWaitInfo { function: "id".to_string(), inputs: Vec::new() }; + + let result_1 = executor.execute(&foreign_call).unwrap(); + let result_2 = executor.execute(&foreign_call).unwrap(); + assert_eq!(result_1, result_2); + + server.close(); + } + + #[test] + fn oracle_resolver_rpc_can_distinguish_executors() { + let (server, url) = build_oracle_server(); + + let mut executor_1 = RPCForeignCallExecutor::new(&url, 4, None, None); + let mut executor_2 = RPCForeignCallExecutor::new(&url, 5, None, None); + + let foreign_call: ForeignCallWaitInfo = + ForeignCallWaitInfo { function: "id".to_string(), inputs: Vec::new() }; + + let result_1 = executor_1.execute(&foreign_call).unwrap(); + let result_2 = executor_2.execute(&foreign_call).unwrap(); + assert_ne!(result_1, result_2); + + server.close(); + } +} diff --git a/noir/noir-repo/tooling/nargo/src/lib.rs b/noir/noir-repo/tooling/nargo/src/lib.rs index 88f07e0c2925..74b7f54d860d 100644 --- a/noir/noir-repo/tooling/nargo/src/lib.rs +++ b/noir/noir-repo/tooling/nargo/src/lib.rs @@ -9,6 +9,7 @@ pub mod constants; pub mod errors; +pub mod foreign_calls; pub mod ops; pub mod package; pub mod workspace; diff --git a/noir/noir-repo/tooling/nargo/src/ops/check.rs b/noir/noir-repo/tooling/nargo/src/ops/check.rs index 14d629ab0f68..707353ccdadf 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/check.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/check.rs @@ -2,8 +2,8 @@ use acvm::compiler::CircuitSimulator; use noirc_driver::{CompiledProgram, ErrorsAndWarnings}; use noirc_errors::{CustomDiagnostic, FileDiagnostic}; +/// Run each function through a circuit simulator to check that they are solvable. pub fn check_program(compiled_program: &CompiledProgram) -> Result<(), ErrorsAndWarnings> { - // Check if the program is solvable for (i, circuit) in compiled_program.program.functions.iter().enumerate() { let mut simulator = CircuitSimulator::default(); if !simulator.check_circuit(circuit) { diff --git a/noir/noir-repo/tooling/nargo/src/ops/execute.rs b/noir/noir-repo/tooling/nargo/src/ops/execute.rs index 09ef554d2aa5..57116ec2efd9 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/execute.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/execute.rs @@ -10,10 +10,9 @@ use acvm::{acir::circuit::Circuit, acir::native_types::WitnessMap}; use acvm::{AcirField, BlackBoxFunctionSolver}; use crate::errors::ExecutionError; +use crate::foreign_calls::ForeignCallExecutor; use crate::NargoError; -use super::foreign_calls::ForeignCallExecutor; - struct ProgramExecutor<'a, F, B: BlackBoxFunctionSolver, E: ForeignCallExecutor> { functions: &'a [Circuit], diff --git a/noir/noir-repo/tooling/nargo/src/ops/foreign_calls.rs b/noir/noir-repo/tooling/nargo/src/ops/foreign_calls.rs deleted file mode 100644 index 30785949a46e..000000000000 --- a/noir/noir-repo/tooling/nargo/src/ops/foreign_calls.rs +++ /dev/null @@ -1,494 +0,0 @@ -use std::path::PathBuf; - -use acvm::{ - acir::brillig::{ForeignCallParam, ForeignCallResult}, - pwg::ForeignCallWaitInfo, - AcirField, -}; -use jsonrpc::{arg as build_json_rpc_arg, minreq_http::Builder, Client}; -use noirc_printable_type::{decode_string_value, ForeignCallError, PrintableValueDisplay}; -use rand::Rng; -use serde::{Deserialize, Serialize}; - -pub trait ForeignCallExecutor { - fn execute( - &mut self, - foreign_call: &ForeignCallWaitInfo, - ) -> Result, ForeignCallError>; -} - -/// This enumeration represents the Brillig foreign calls that are natively supported by nargo. -/// After resolution of a foreign call, nargo will restart execution of the ACVM -pub enum ForeignCall { - Print, - CreateMock, - SetMockParams, - GetMockLastParams, - SetMockReturns, - SetMockTimes, - ClearMock, -} - -impl std::fmt::Display for ForeignCall { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.name()) - } -} - -impl ForeignCall { - pub(crate) fn name(&self) -> &'static str { - match self { - ForeignCall::Print => "print", - ForeignCall::CreateMock => "create_mock", - ForeignCall::SetMockParams => "set_mock_params", - ForeignCall::GetMockLastParams => "get_mock_last_params", - ForeignCall::SetMockReturns => "set_mock_returns", - ForeignCall::SetMockTimes => "set_mock_times", - ForeignCall::ClearMock => "clear_mock", - } - } - - pub(crate) fn lookup(op_name: &str) -> Option { - match op_name { - "print" => Some(ForeignCall::Print), - "create_mock" => Some(ForeignCall::CreateMock), - "set_mock_params" => Some(ForeignCall::SetMockParams), - "get_mock_last_params" => Some(ForeignCall::GetMockLastParams), - "set_mock_returns" => Some(ForeignCall::SetMockReturns), - "set_mock_times" => Some(ForeignCall::SetMockTimes), - "clear_mock" => Some(ForeignCall::ClearMock), - _ => None, - } - } -} - -/// This struct represents an oracle mock. It can be used for testing programs that use oracles. -#[derive(Debug, PartialEq, Eq, Clone)] -struct MockedCall { - /// The id of the mock, used to update or remove it - id: usize, - /// The oracle it's mocking - name: String, - /// Optionally match the parameters - params: Option>>, - /// The parameters with which the mock was last called - last_called_params: Option>>, - /// The result to return when this mock is called - result: ForeignCallResult, - /// How many times should this mock be called before it is removed - times_left: Option, -} - -impl MockedCall { - fn new(id: usize, name: String) -> Self { - Self { - id, - name, - params: None, - last_called_params: None, - result: ForeignCallResult { values: vec![] }, - times_left: None, - } - } -} - -impl MockedCall { - fn matches(&self, name: &str, params: &[ForeignCallParam]) -> bool { - self.name == name && (self.params.is_none() || self.params.as_deref() == Some(params)) - } -} - -#[derive(Debug, Default)] -pub struct DefaultForeignCallExecutor { - /// A randomly generated id for this `DefaultForeignCallExecutor`. - /// - /// This is used so that a single `external_resolver` can distinguish between requests from multiple - /// instantiations of `DefaultForeignCallExecutor`. - id: u64, - - /// Mocks have unique ids used to identify them in Noir, allowing to update or remove them. - last_mock_id: usize, - /// The registered mocks - mocked_responses: Vec>, - /// Whether to print [`ForeignCall::Print`] output. - show_output: bool, - /// JSON RPC client to resolve foreign calls - external_resolver: Option, - /// Root path to the program or workspace in execution. - root_path: Option, - /// Name of the package in execution - package_name: Option, -} - -#[derive(Debug, Serialize, Deserialize)] -struct ResolveForeignCallRequest { - /// A session ID which allows the external RPC server to link this foreign call request to other foreign calls - /// for the same program execution. - /// - /// This is intended to allow a single RPC server to maintain state related to multiple program executions being - /// performed in parallel. - session_id: u64, - - #[serde(flatten)] - /// The foreign call which the external RPC server is to provide a response for. - function_call: ForeignCallWaitInfo, - - #[serde(skip_serializing_if = "Option::is_none")] - /// Root path to the program or workspace in execution. - root_path: Option, - #[serde(skip_serializing_if = "Option::is_none")] - /// Name of the package in execution - package_name: Option, -} - -impl DefaultForeignCallExecutor { - pub fn new( - show_output: bool, - resolver_url: Option<&str>, - root_path: Option, - package_name: Option, - ) -> Self { - let oracle_resolver = resolver_url.map(|resolver_url| { - let mut transport_builder = - Builder::new().url(resolver_url).expect("Invalid oracle resolver URL"); - - if let Some(Ok(timeout)) = - std::env::var("NARGO_FOREIGN_CALL_TIMEOUT").ok().map(|timeout| timeout.parse()) - { - let timeout_duration = std::time::Duration::from_millis(timeout); - transport_builder = transport_builder.timeout(timeout_duration); - }; - Client::with_transport(transport_builder.build()) - }); - DefaultForeignCallExecutor { - show_output, - external_resolver: oracle_resolver, - id: rand::thread_rng().gen(), - mocked_responses: Vec::new(), - last_mock_id: 0, - root_path, - package_name, - } - } -} - -impl DefaultForeignCallExecutor { - fn extract_mock_id( - foreign_call_inputs: &[ForeignCallParam], - ) -> Result<(usize, &[ForeignCallParam]), ForeignCallError> { - let (id, params) = - foreign_call_inputs.split_first().ok_or(ForeignCallError::MissingForeignCallInputs)?; - let id = - usize::try_from(id.unwrap_field().try_to_u64().expect("value does not fit into u64")) - .expect("value does not fit into usize"); - Ok((id, params)) - } - - fn find_mock_by_id(&self, id: usize) -> Option<&MockedCall> { - self.mocked_responses.iter().find(|response| response.id == id) - } - - fn find_mock_by_id_mut(&mut self, id: usize) -> Option<&mut MockedCall> { - self.mocked_responses.iter_mut().find(|response| response.id == id) - } - - fn parse_string(param: &ForeignCallParam) -> String { - let fields: Vec<_> = param.fields().to_vec(); - decode_string_value(&fields) - } - - fn execute_print(foreign_call_inputs: &[ForeignCallParam]) -> Result<(), ForeignCallError> { - let skip_newline = foreign_call_inputs[0].unwrap_field().is_zero(); - - let foreign_call_inputs = - foreign_call_inputs.split_first().ok_or(ForeignCallError::MissingForeignCallInputs)?.1; - let display_string = Self::format_printable_value(foreign_call_inputs, skip_newline)?; - - print!("{display_string}"); - - Ok(()) - } - - fn format_printable_value( - foreign_call_inputs: &[ForeignCallParam], - skip_newline: bool, - ) -> Result { - let display_values: PrintableValueDisplay = foreign_call_inputs.try_into()?; - - let result = format!("{display_values}{}", if skip_newline { "" } else { "\n" }); - - Ok(result) - } -} - -impl Deserialize<'a>> ForeignCallExecutor - for DefaultForeignCallExecutor -{ - fn execute( - &mut self, - foreign_call: &ForeignCallWaitInfo, - ) -> Result, ForeignCallError> { - let foreign_call_name = foreign_call.function.as_str(); - match ForeignCall::lookup(foreign_call_name) { - Some(ForeignCall::Print) => { - if self.show_output { - Self::execute_print(&foreign_call.inputs)?; - } - Ok(ForeignCallResult::default()) - } - Some(ForeignCall::CreateMock) => { - let mock_oracle_name = Self::parse_string(&foreign_call.inputs[0]); - assert!(ForeignCall::lookup(&mock_oracle_name).is_none()); - let id = self.last_mock_id; - self.mocked_responses.push(MockedCall::new(id, mock_oracle_name)); - self.last_mock_id += 1; - - Ok(F::from(id).into()) - } - Some(ForeignCall::SetMockParams) => { - let (id, params) = Self::extract_mock_id(&foreign_call.inputs)?; - self.find_mock_by_id_mut(id) - .unwrap_or_else(|| panic!("Unknown mock id {}", id)) - .params = Some(params.to_vec()); - - Ok(ForeignCallResult::default()) - } - Some(ForeignCall::GetMockLastParams) => { - let (id, _) = Self::extract_mock_id(&foreign_call.inputs)?; - let mock = - self.find_mock_by_id(id).unwrap_or_else(|| panic!("Unknown mock id {}", id)); - - let last_called_params = mock - .last_called_params - .clone() - .unwrap_or_else(|| panic!("Mock {} was never called", mock.name)); - - Ok(last_called_params.into()) - } - Some(ForeignCall::SetMockReturns) => { - let (id, params) = Self::extract_mock_id(&foreign_call.inputs)?; - self.find_mock_by_id_mut(id) - .unwrap_or_else(|| panic!("Unknown mock id {}", id)) - .result = ForeignCallResult { values: params.to_vec() }; - - Ok(ForeignCallResult::default()) - } - Some(ForeignCall::SetMockTimes) => { - let (id, params) = Self::extract_mock_id(&foreign_call.inputs)?; - let times = - params[0].unwrap_field().try_to_u64().expect("Invalid bit size of times"); - - self.find_mock_by_id_mut(id) - .unwrap_or_else(|| panic!("Unknown mock id {}", id)) - .times_left = Some(times); - - Ok(ForeignCallResult::default()) - } - Some(ForeignCall::ClearMock) => { - let (id, _) = Self::extract_mock_id(&foreign_call.inputs)?; - self.mocked_responses.retain(|response| response.id != id); - Ok(ForeignCallResult::default()) - } - None => { - let mock_response_position = self - .mocked_responses - .iter() - .position(|response| response.matches(foreign_call_name, &foreign_call.inputs)); - - if let Some(response_position) = mock_response_position { - // If the program has registered a mocked response to this oracle call then we prefer responding - // with that. - - let mock = self - .mocked_responses - .get_mut(response_position) - .expect("Invalid position of mocked response"); - - mock.last_called_params = Some(foreign_call.inputs.clone()); - - let result = mock.result.values.clone(); - - if let Some(times_left) = &mut mock.times_left { - *times_left -= 1; - if *times_left == 0 { - self.mocked_responses.remove(response_position); - } - } - - Ok(result.into()) - } else if let Some(external_resolver) = &self.external_resolver { - // If the user has registered an external resolver then we forward any remaining oracle calls there. - - let encoded_params = vec![build_json_rpc_arg(ResolveForeignCallRequest { - session_id: self.id, - function_call: foreign_call.clone(), - root_path: self - .root_path - .clone() - .map(|path| path.to_str().unwrap().to_string()), - package_name: self.package_name.clone(), - })]; - - let req = - external_resolver.build_request("resolve_foreign_call", &encoded_params); - - let response = external_resolver.send_request(req)?; - - let parsed_response: ForeignCallResult = response.result()?; - - Ok(parsed_response) - } else { - // If there's no registered mock oracle response and no registered resolver then we cannot - // return a correct response to the ACVM. The best we can do is to return an empty response, - // this allows us to ignore any foreign calls which exist solely to pass information from inside - // the circuit to the environment (e.g. custom logging) as the execution will still be able to progress. - // - // We optimistically return an empty response for all oracle calls as the ACVM will error - // should a response have been required. - Ok(ForeignCallResult::default()) - } - } - } - } -} - -#[cfg(test)] -mod tests { - use acvm::{ - acir::brillig::ForeignCallParam, brillig_vm::brillig::ForeignCallResult, - pwg::ForeignCallWaitInfo, FieldElement, - }; - use jsonrpc_core::Result as RpcResult; - use jsonrpc_derive::rpc; - use jsonrpc_http_server::{Server, ServerBuilder}; - - use crate::ops::{DefaultForeignCallExecutor, ForeignCallExecutor}; - - use super::ResolveForeignCallRequest; - - #[allow(unreachable_pub)] - #[rpc] - pub trait OracleResolver { - #[rpc(name = "resolve_foreign_call")] - fn resolve_foreign_call( - &self, - req: ResolveForeignCallRequest, - ) -> RpcResult>; - } - - struct OracleResolverImpl; - - impl OracleResolverImpl { - fn echo(&self, param: ForeignCallParam) -> ForeignCallResult { - vec![param].into() - } - - fn sum(&self, array: ForeignCallParam) -> ForeignCallResult { - let mut res: FieldElement = 0_usize.into(); - - for value in array.fields() { - res += value; - } - - res.into() - } - } - - impl OracleResolver for OracleResolverImpl { - fn resolve_foreign_call( - &self, - req: ResolveForeignCallRequest, - ) -> RpcResult> { - let response = match req.function_call.function.as_str() { - "sum" => self.sum(req.function_call.inputs[0].clone()), - "echo" => self.echo(req.function_call.inputs[0].clone()), - "id" => FieldElement::from(req.session_id as u128).into(), - - _ => panic!("unexpected foreign call"), - }; - Ok(response) - } - } - - fn build_oracle_server() -> (Server, String) { - let mut io = jsonrpc_core::IoHandler::new(); - io.extend_with(OracleResolverImpl.to_delegate()); - - // Choosing port 0 results in a random port being assigned. - let server = ServerBuilder::new(io) - .start_http(&"127.0.0.1:0".parse().expect("Invalid address")) - .expect("Could not start server"); - - let url = format!("http://{}", server.address()); - (server, url) - } - - #[test] - fn test_oracle_resolver_echo() { - let (server, url) = build_oracle_server(); - - let mut executor = - DefaultForeignCallExecutor::::new(false, Some(&url), None, None); - - let foreign_call = ForeignCallWaitInfo { - function: "echo".to_string(), - inputs: vec![ForeignCallParam::Single(1_u128.into())], - }; - - let result = executor.execute(&foreign_call); - assert_eq!(result.unwrap(), ForeignCallResult { values: foreign_call.inputs }); - - server.close(); - } - - #[test] - fn test_oracle_resolver_sum() { - let (server, url) = build_oracle_server(); - - let mut executor = DefaultForeignCallExecutor::new(false, Some(&url), None, None); - - let foreign_call = ForeignCallWaitInfo { - function: "sum".to_string(), - inputs: vec![ForeignCallParam::Array(vec![1_usize.into(), 2_usize.into()])], - }; - - let result = executor.execute(&foreign_call); - assert_eq!(result.unwrap(), FieldElement::from(3_usize).into()); - - server.close(); - } - - #[test] - fn foreign_call_executor_id_is_persistent() { - let (server, url) = build_oracle_server(); - - let mut executor = - DefaultForeignCallExecutor::::new(false, Some(&url), None, None); - - let foreign_call = ForeignCallWaitInfo { function: "id".to_string(), inputs: Vec::new() }; - - let result_1 = executor.execute(&foreign_call).unwrap(); - let result_2 = executor.execute(&foreign_call).unwrap(); - assert_eq!(result_1, result_2); - - server.close(); - } - - #[test] - fn oracle_resolver_rpc_can_distinguish_executors() { - let (server, url) = build_oracle_server(); - - let mut executor_1 = - DefaultForeignCallExecutor::::new(false, Some(&url), None, None); - let mut executor_2 = - DefaultForeignCallExecutor::::new(false, Some(&url), None, None); - - let foreign_call = ForeignCallWaitInfo { function: "id".to_string(), inputs: Vec::new() }; - - let result_1 = executor_1.execute(&foreign_call).unwrap(); - let result_2 = executor_2.execute(&foreign_call).unwrap(); - assert_ne!(result_1, result_2); - - server.close(); - } -} diff --git a/noir/noir-repo/tooling/nargo/src/ops/mod.rs b/noir/noir-repo/tooling/nargo/src/ops/mod.rs index f70577a14f12..04efeb5a9ec6 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/mod.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/mod.rs @@ -4,7 +4,6 @@ pub use self::compile::{ compile_workspace, report_errors, }; pub use self::execute::{execute_program, execute_program_with_profiling}; -pub use self::foreign_calls::{DefaultForeignCallExecutor, ForeignCall, ForeignCallExecutor}; pub use self::optimize::{optimize_contract, optimize_program}; pub use self::transform::{transform_contract, transform_program}; @@ -13,7 +12,6 @@ pub use self::test::{run_test, TestStatus}; mod check; mod compile; mod execute; -mod foreign_calls; mod optimize; mod test; mod transform; diff --git a/noir/noir-repo/tooling/nargo/src/ops/test.rs b/noir/noir-repo/tooling/nargo/src/ops/test.rs index 370a4235f61b..e258627b522e 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/test.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/test.rs @@ -1,27 +1,42 @@ use std::path::PathBuf; use acvm::{ - acir::native_types::{WitnessMap, WitnessStack}, - BlackBoxFunctionSolver, FieldElement, + acir::{ + brillig::ForeignCallResult, + native_types::{WitnessMap, WitnessStack}, + }, + pwg::ForeignCallWaitInfo, + AcirField, BlackBoxFunctionSolver, FieldElement, }; use noirc_abi::Abi; use noirc_driver::{compile_no_check, CompileError, CompileOptions}; use noirc_errors::{debug_info::DebugInfo, FileDiagnostic}; use noirc_frontend::hir::{def_map::TestFunction, Context}; +use noirc_printable_type::ForeignCallError; +use rand::Rng; +use serde::{Deserialize, Serialize}; -use crate::{errors::try_to_diagnose_runtime_error, NargoError}; +use crate::{ + errors::try_to_diagnose_runtime_error, + foreign_calls::{ + mocker::MockForeignCallExecutor, print::PrintForeignCallExecutor, + rpc::RPCForeignCallExecutor, ForeignCall, ForeignCallExecutor, + }, + NargoError, +}; -use super::{execute_program, DefaultForeignCallExecutor}; +use super::execute_program; pub enum TestStatus { Pass, Fail { message: String, error_diagnostic: Option }, + Skipped, CompileError(FileDiagnostic), } impl TestStatus { pub fn failed(&self) -> bool { - !matches!(self, TestStatus::Pass) + !matches!(self, TestStatus::Pass | TestStatus::Skipped) } } @@ -48,23 +63,42 @@ pub fn run_test>( if test_function_has_no_arguments { // Run the backend to ensure the PWG evaluates functions like std::hash::pedersen, // otherwise constraints involving these expressions will not error. + let mut foreign_call_executor = TestForeignCallExecutor::new( + show_output, + foreign_call_resolver_url, + root_path, + package_name, + ); + let circuit_execution = execute_program( &compiled_program.program, WitnessMap::new(), blackbox_solver, - &mut DefaultForeignCallExecutor::new( - show_output, - foreign_call_resolver_url, - root_path, - package_name, - ), + &mut foreign_call_executor, ); - test_status_program_compile_pass( + + let status = test_status_program_compile_pass( test_function, compiled_program.abi, compiled_program.debug, circuit_execution, - ) + ); + + let ignore_foreign_call_failures = + std::env::var("NARGO_IGNORE_TEST_FAILURES_FROM_FOREIGN_CALLS") + .is_ok_and(|var| &var == "true"); + + if let TestStatus::Fail { .. } = status { + if ignore_foreign_call_failures + && foreign_call_executor.encountered_unknown_foreign_call + { + TestStatus::Skipped + } else { + status + } + } else { + status + } } else { #[cfg(target_arch = "wasm32")] { @@ -90,7 +124,7 @@ pub fn run_test>( program, initial_witness, blackbox_solver, - &mut DefaultForeignCallExecutor::::new( + &mut TestForeignCallExecutor::::new( false, foreign_call_resolver_url, root_path.clone(), @@ -215,3 +249,93 @@ fn check_expected_failure_message( error_diagnostic, } } + +/// A specialized foreign call executor which tracks whether it has encountered any unknown foreign calls +struct TestForeignCallExecutor { + /// The executor for any [`ForeignCall::Print`] calls. + printer: Option, + mocker: MockForeignCallExecutor, + external: Option, + + encountered_unknown_foreign_call: bool, +} + +impl TestForeignCallExecutor { + fn new( + show_output: bool, + resolver_url: Option<&str>, + root_path: Option, + package_name: Option, + ) -> Self { + let id = rand::thread_rng().gen(); + let printer = if show_output { Some(PrintForeignCallExecutor) } else { None }; + let external_resolver = resolver_url.map(|resolver_url| { + RPCForeignCallExecutor::new(resolver_url, id, root_path, package_name) + }); + TestForeignCallExecutor { + printer, + mocker: MockForeignCallExecutor::default(), + external: external_resolver, + encountered_unknown_foreign_call: false, + } + } +} + +impl Deserialize<'a>> ForeignCallExecutor + for TestForeignCallExecutor +{ + fn execute( + &mut self, + foreign_call: &ForeignCallWaitInfo, + ) -> Result, ForeignCallError> { + // If the circuit has reached a new foreign call opcode then it can't have failed from any previous unknown foreign calls. + self.encountered_unknown_foreign_call = false; + + let foreign_call_name = foreign_call.function.as_str(); + match ForeignCall::lookup(foreign_call_name) { + Some(ForeignCall::Print) => { + if let Some(printer) = &mut self.printer { + printer.execute(foreign_call) + } else { + Ok(ForeignCallResult::default()) + } + } + + Some( + ForeignCall::CreateMock + | ForeignCall::SetMockParams + | ForeignCall::GetMockLastParams + | ForeignCall::SetMockReturns + | ForeignCall::SetMockTimes + | ForeignCall::ClearMock, + ) => self.mocker.execute(foreign_call), + + None => { + // First check if there's any defined mock responses for this foreign call. + match self.mocker.execute(foreign_call) { + Err(ForeignCallError::NoHandler(_)) => (), + response_or_error => return response_or_error, + }; + + if let Some(external_resolver) = &mut self.external { + // If the user has registered an external resolver then we forward any remaining oracle calls there. + match external_resolver.execute(foreign_call) { + Err(ForeignCallError::NoHandler(_)) => (), + response_or_error => return response_or_error, + }; + } + + self.encountered_unknown_foreign_call = true; + + // If all executors have no handler for the given foreign call then we cannot + // return a correct response to the ACVM. The best we can do is to return an empty response, + // this allows us to ignore any foreign calls which exist solely to pass information from inside + // the circuit to the environment (e.g. custom logging) as the execution will still be able to progress. + // + // We optimistically return an empty response for all oracle calls as the ACVM will error + // should a response have been required. + Ok(ForeignCallResult::default()) + } + } + } +} diff --git a/noir/noir-repo/tooling/nargo/src/ops/transform.rs b/noir/noir-repo/tooling/nargo/src/ops/transform.rs index 9255ac3e0ece..fdda368d150c 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/transform.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/transform.rs @@ -6,6 +6,7 @@ use iter_extended::vecmap; use noirc_driver::{CompiledContract, CompiledProgram}; use noirc_errors::debug_info::DebugInfo; +/// Apply ACVM optimizations on the circuit. pub fn transform_program( mut compiled_program: CompiledProgram, expression_width: ExpressionWidth, @@ -18,6 +19,7 @@ pub fn transform_program( compiled_program } +/// Apply the optimizing transformation on each function in the contract. pub fn transform_contract( contract: CompiledContract, expression_width: ExpressionWidth, @@ -25,7 +27,6 @@ pub fn transform_contract( let functions = vecmap(contract.functions, |mut func| { func.bytecode = transform_program_internal(func.bytecode, &mut func.debug, expression_width); - func }); diff --git a/noir/noir-repo/tooling/nargo_cli/Cargo.toml b/noir/noir-repo/tooling/nargo_cli/Cargo.toml index 02e669f5c688..5603b7f4fcaa 100644 --- a/noir/noir-repo/tooling/nargo_cli/Cargo.toml +++ b/noir/noir-repo/tooling/nargo_cli/Cargo.toml @@ -25,6 +25,7 @@ toml.workspace = true [dependencies] clap.workspace = true fm.workspace = true +fxhash.workspace = true iter-extended.workspace = true nargo.workspace = true nargo_fmt.workspace = true diff --git a/noir/noir-repo/tooling/nargo_cli/benches/criterion.rs b/noir/noir-repo/tooling/nargo_cli/benches/criterion.rs index 488cbfcd243e..51de97df139b 100644 --- a/noir/noir-repo/tooling/nargo_cli/benches/criterion.rs +++ b/noir/noir-repo/tooling/nargo_cli/benches/criterion.rs @@ -115,7 +115,7 @@ fn criterion_test_execution(c: &mut Criterion, test_program_dir: &Path, force_br let artifacts = RefCell::new(None); let mut foreign_call_executor = - nargo::ops::DefaultForeignCallExecutor::new(false, None, None, None); + nargo::foreign_calls::DefaultForeignCallExecutor::new(false, None, None, None); c.bench_function(&benchmark_name, |b| { b.iter_batched( diff --git a/noir/noir-repo/tooling/nargo_cli/build.rs b/noir/noir-repo/tooling/nargo_cli/build.rs index 740e5ed20529..41b3c0c9cf72 100644 --- a/noir/noir-repo/tooling/nargo_cli/build.rs +++ b/noir/noir-repo/tooling/nargo_cli/build.rs @@ -60,13 +60,9 @@ const IGNORED_BRILLIG_TESTS: [&str; 11] = [ ]; /// Tests which aren't expected to work with the default inliner cases. -const INLINER_MIN_OVERRIDES: [(&str, i64); 2] = [ +const INLINER_MIN_OVERRIDES: [(&str, i64); 1] = [ // 0 works if PoseidonHasher::write is tagged as `inline_always`, otherwise 22. ("eddsa", 0), - // (#6583): The RcTracker in the DIE SSA pass is removing inc_rcs that are still needed. - // This triggers differently depending on the optimization level (although all are wrong), - // so we arbitrarily only run with the inlined versions. - ("reference_counts", 0), ]; /// Some tests are expected to have warnings @@ -213,8 +209,13 @@ fn test_{test_name}(force_brillig: ForceBrillig, inliner_aggressiveness: Inliner nargo.arg("--program-dir").arg(test_program_dir); nargo.arg("{test_command}").arg("--force"); nargo.arg("--inliner-aggressiveness").arg(inliner_aggressiveness.0.to_string()); + if force_brillig.0 {{ nargo.arg("--force-brillig"); + + // Set the maximum increase so that part of the optimization is exercised (it might fail). + nargo.arg("--max-bytecode-increase-percent"); + nargo.arg("50"); }} {test_content} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs index 304988ed5162..ff6009981c70 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -65,6 +65,7 @@ pub(crate) fn run(args: CompileCommand, config: NargoConfig) -> Result<(), CliEr Ok(()) } +/// Continuously recompile the workspace on any Noir file change event. fn watch_workspace(workspace: &Workspace, compile_options: &CompileOptions) -> notify::Result<()> { let (tx, rx) = std::sync::mpsc::channel(); @@ -108,6 +109,8 @@ fn watch_workspace(workspace: &Workspace, compile_options: &CompileOptions) -> n Ok(()) } +/// Parse and compile the entire workspace, then report errors. +/// This is the main entry point used by all other commands that need compilation. pub(super) fn compile_workspace_full( workspace: &Workspace, compile_options: &CompileOptions, @@ -129,6 +132,8 @@ pub(super) fn compile_workspace_full( Ok(()) } +/// Compile binary and contract packages. +/// Returns the merged warnings or errors. fn compile_workspace( file_manager: &FileManager, parsed_files: &ParsedFiles, @@ -144,6 +149,7 @@ fn compile_workspace( // Compile all of the packages in parallel. let program_warnings_or_errors: CompilationResult<()> = compile_programs(file_manager, parsed_files, workspace, &binary_packages, compile_options); + let contract_warnings_or_errors: CompilationResult<()> = compiled_contracts( file_manager, parsed_files, @@ -164,6 +170,7 @@ fn compile_workspace( } } +/// Compile the given binary packages in the workspace. fn compile_programs( file_manager: &FileManager, parsed_files: &ParsedFiles, @@ -171,6 +178,8 @@ fn compile_programs( binary_packages: &[Package], compile_options: &CompileOptions, ) -> CompilationResult<()> { + // Load any existing artifact for a given package, _iff_ it was compiled with the same nargo version. + // The loaded circuit includes backend specific transformations, which might be different from the current target. let load_cached_program = |package| { let program_artifact_path = workspace.package_build_path(package); read_program_from_file(program_artifact_path) @@ -180,19 +189,45 @@ fn compile_programs( }; let compile_package = |package| { + let cached_program = load_cached_program(package); + + // Hash over the entire compiled program, including any post-compile transformations. + // This is used to detect whether `cached_program` is returned by `compile_program`. + let cached_hash = cached_program.as_ref().map(fxhash::hash64); + + // Compile the program, or use the cached artifacts if it matches. let (program, warnings) = compile_program( file_manager, parsed_files, workspace, package, compile_options, - load_cached_program(package), + cached_program, )?; + // Choose the target width for the final, backend specific transformation. let target_width = get_target_width(package.expression_width, compile_options.expression_width); + + // If the compiled program is the same as the cached one, we don't apply transformations again, unless the target width has changed. + // The transformations might not be idempotent, which would risk creating witnesses that don't work with earlier versions, + // based on which we might have generated a verifier already. + if cached_hash == Some(fxhash::hash64(&program)) { + let width_matches = program + .program + .functions + .iter() + .all(|circuit| circuit.expression_width == target_width); + + if width_matches { + return Ok(((), warnings)); + } + } + // Run ACVM optimizations and set the target width. let program = nargo::ops::transform_program(program, target_width); + // Check solvability. nargo::ops::check_program(&program)?; + // Overwrite the build artifacts with the final circuit, which includes the backend specific transformations. save_program_to_file(&program.into(), &package.name, workspace.target_directory_path()); Ok(((), warnings)) @@ -208,6 +243,7 @@ fn compile_programs( collect_errors(program_results).map(|(_, warnings)| ((), warnings)) } +/// Compile the given contracts in the workspace. fn compiled_contracts( file_manager: &FileManager, parsed_files: &ParsedFiles, diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs index 8dc71b1c7e5b..fa95d3123c6f 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -7,7 +7,7 @@ use clap::Args; use nargo::constants::PROVER_INPUT_FILE; use nargo::errors::try_to_diagnose_runtime_error; -use nargo::ops::DefaultForeignCallExecutor; +use nargo::foreign_calls::DefaultForeignCallExecutor; use nargo::package::{CrateName, Package}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::{Format, InputValue}; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs index cf416b1fa5f6..769a1f79d814 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs @@ -4,7 +4,7 @@ use clap::Args; use iter_extended::vecmap; use nargo::{ constants::PROVER_INPUT_FILE, - ops::DefaultForeignCallExecutor, + foreign_calls::DefaultForeignCallExecutor, package::{CrateName, Package}, }; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs index 7b0201226ef9..aa0ee1bb94bb 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs @@ -255,6 +255,12 @@ fn display_test_report( ); } } + TestStatus::Skipped { .. } => { + writer + .set_color(ColorSpec::new().set_fg(Some(Color::Yellow))) + .expect("Failed to set color"); + writeln!(writer, "skipped").expect("Failed to write to stderr"); + } TestStatus::CompileError(err) => { noirc_errors::reporter::report_all( file_manager.as_file_map(), diff --git a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-props.rs b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-props.rs index 0013a90b4ff8..86c225831b90 100644 --- a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-props.rs +++ b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-props.rs @@ -2,10 +2,7 @@ use std::{cell::RefCell, collections::BTreeMap, path::Path}; use acvm::{acir::native_types::WitnessStack, AcirField, FieldElement}; use iter_extended::vecmap; -use nargo::{ - ops::{execute_program, DefaultForeignCallExecutor}, - parse_all, -}; +use nargo::{foreign_calls::DefaultForeignCallExecutor, ops::execute_program, parse_all}; use noirc_abi::input_parser::InputValue; use noirc_driver::{ compile_main, file_manager_with_stdlib, prepare_crate, CompilationResult, CompileOptions, @@ -64,6 +61,7 @@ fn prepare_and_compile_snippet( ) -> CompilationResult { let (mut context, root_crate_id) = prepare_snippet(source); let options = CompileOptions { force_brillig, ..Default::default() }; + // TODO: Run nargo::ops::transform_program? compile_main(&mut context, root_crate_id, &options, None) } diff --git a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs index bdc92e625ab9..99f0c9a2e7fc 100644 --- a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs +++ b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs @@ -138,6 +138,12 @@ fn display_test_report( ); } } + TestStatus::Skipped { .. } => { + writer + .set_color(ColorSpec::new().set_fg(Some(Color::Yellow))) + .expect("Failed to set color"); + writeln!(writer, "skipped").expect("Failed to write to stderr"); + } TestStatus::CompileError(err) => { noirc_errors::reporter::report_all( file_manager.as_file_map(), diff --git a/noir/noir-repo/tooling/nargo_fmt/src/formatter/expression.rs b/noir/noir-repo/tooling/nargo_fmt/src/formatter/expression.rs index 0730d06ad724..ecc9fab18ced 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/formatter/expression.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/formatter/expression.rs @@ -104,11 +104,12 @@ impl<'a, 'b> ChunkFormatter<'a, 'b> { formatter.write_left_paren(); formatter.write_right_paren(); })), - Literal::Bool(_) | Literal::Str(_) | Literal::FmtStr(_) | Literal::RawStr(..) => group - .text(self.chunk(|formatter| { + Literal::Bool(_) | Literal::Str(_) | Literal::FmtStr(_, _) | Literal::RawStr(..) => { + group.text(self.chunk(|formatter| { formatter.write_current_token_as_in_source(); formatter.bump(); - })), + })); + } Literal::Integer(..) => group.text(self.chunk(|formatter| { if formatter.is_at(Token::Minus) { formatter.write_token(Token::Minus); diff --git a/noir/noir-repo/tooling/nargo_toml/Cargo.toml b/noir/noir-repo/tooling/nargo_toml/Cargo.toml index e4766e448595..2bc241538367 100644 --- a/noir/noir-repo/tooling/nargo_toml/Cargo.toml +++ b/noir/noir-repo/tooling/nargo_toml/Cargo.toml @@ -25,3 +25,4 @@ noirc_driver.workspace = true semver = "1.0.20" [dev-dependencies] +test-case.workspace = true diff --git a/noir/noir-repo/tooling/nargo_toml/src/git.rs b/noir/noir-repo/tooling/nargo_toml/src/git.rs index 80e57247ae68..efaed4fabb9a 100644 --- a/noir/noir-repo/tooling/nargo_toml/src/git.rs +++ b/noir/noir-repo/tooling/nargo_toml/src/git.rs @@ -3,16 +3,20 @@ use std::path::PathBuf; /// Creates a unique folder name for a GitHub repo /// by using its URL and tag fn resolve_folder_name(base: &url::Url, tag: &str) -> String { - let mut folder_name = base.domain().unwrap().to_owned(); - folder_name.push_str(base.path()); - folder_name.push_str(tag); - folder_name + let mut folder = PathBuf::from(""); + for part in [base.domain().unwrap(), base.path(), tag] { + folder.push(part.trim_start_matches('/')); + } + folder.to_string_lossy().into_owned() } +/// Path to the `nargo` directory under `$HOME`. fn nargo_crates() -> PathBuf { dirs::home_dir().unwrap().join("nargo") } +/// Target directory to download dependencies into, e.g. +/// `$HOME/nargo/github.com/noir-lang/noir-bignum/v0.1.2` fn git_dep_location(base: &url::Url, tag: &str) -> PathBuf { let folder_name = resolve_folder_name(base, tag); @@ -53,3 +57,19 @@ pub(crate) fn clone_git_repo(url: &str, tag: &str) -> Result { Ok(loc) } + +#[cfg(test)] +mod tests { + use test_case::test_case; + use url::Url; + + use super::resolve_folder_name; + + #[test_case("https://github.com/noir-lang/noir-bignum/"; "with slash")] + #[test_case("https://github.com/noir-lang/noir-bignum"; "without slash")] + fn test_resolve_folder_name(url: &str) { + let tag = "v0.4.2"; + let dir = resolve_folder_name(&Url::parse(url).unwrap(), tag); + assert_eq!(dir, "github.com/noir-lang/noir-bignum/v0.4.2"); + } +} diff --git a/noir/noir-repo/tooling/noirc_abi/proptest-regressions/input_parser/json.txt b/noir/noir-repo/tooling/noirc_abi/proptest-regressions/input_parser/json.txt new file mode 100644 index 000000000000..19de8eeaf489 --- /dev/null +++ b/noir/noir-repo/tooling/noirc_abi/proptest-regressions/input_parser/json.txt @@ -0,0 +1,7 @@ +# Seeds for failure cases proptest has generated in the past. It is +# automatically read and these particular cases re-run before any +# novel cases are generated. +# +# It is recommended to check this file in to source control so that +# everyone who runs the test benefits from these saved cases. +cc b3f9ae88d54944ca274764f4d99a2023d4b0ac09beb89bc599cbba1e45dd3620 # shrinks to (typ, value) = (Integer { sign: Signed, width: 1 }, -1) diff --git a/noir/noir-repo/tooling/noirc_abi/proptest-regressions/input_parser/toml.txt b/noir/noir-repo/tooling/noirc_abi/proptest-regressions/input_parser/toml.txt new file mode 100644 index 000000000000..1448cb67ef14 --- /dev/null +++ b/noir/noir-repo/tooling/noirc_abi/proptest-regressions/input_parser/toml.txt @@ -0,0 +1,9 @@ +# Seeds for failure cases proptest has generated in the past. It is +# automatically read and these particular cases re-run before any +# novel cases are generated. +# +# It is recommended to check this file in to source control so that +# everyone who runs the test benefits from these saved cases. +cc 9d200afb8f5c01e3414d24eebe1436a7eef5377a46a9a9235aaa7f81e0b33656 # shrinks to (typ, value) = (Integer { sign: Signed, width: 8 }, -1) +cc 7fd29637e5566d819992185c1a95438e9949a555928a911b3918eed2e3f7a1fd # shrinks to (typ, value) = (Integer { sign: Signed, width: 64 }, -1) +cc 8ecbda39d887674b53ca23a861ac30fbb10c123bb70c57e69b336c86a3d9dea8 # shrinks to (abi, input_map) = (Abi { parameters: [AbiParameter { name: "¡", typ: Struct { path: "�)\u{1b}=�?Ⱥ\u{59424}?{\u{e4d5e}%Ѩ/Q\u{36a17}/*\";\u{b}&iC_\u{d313f}S\u{1b}\u{9dfec}\r/\u{10530d}", fields: [("?p*\"/\u{202e}\u{6f038}\u{537ca}.y@~𘛶?4\u{1b}*", Field), (".Ⱥ/$\u{7f}\u{103c06}%\\\u{202e}][0\u{88479}]\"*~\u{36fd5}\u{5}\u{feff}]{/", Tuple { fields: [String { length: 937 }] }), ("r\u{ac3a5}&:", Boolean), ("$d6🕴/:|�\u{37f8b}\r\u{a13b7}C$𲁹\\&\u{f8712}?\u{db61c}t%\u{57be1}\0", Field), ("/\u{6378b}\u{a426c}¥\u{7}/\u{fcb29}$\u{53c6b}\u{12d6f}\u{12bd3}.\u{f2f82}\u{8613e}*$\u{fd32f}\u{e29f7}\0𨺉'¬\"1", Struct { path: "\\\u{4a5ac}<\u{9e505}\u{4f3af}🕴&?<:^\u{7}\u{88}\u{3e1ff}(¥\u{531f3}K{:¥𦺀", fields: [("n\0Ѩ/\u{1b}𥐰\u{a4906}�¥`{\u{389d4}`1\u{7708a})\u{3dac4}8\u{93e5f}㒭\\\"\u{e6824}\u{b}Ѩ\u{88946}Ⱥ{", Integer { sign: Signed, width: 127 })] }), ("¥🕴\u{1b}¥🕴=sR\0\u{35f36}\u{867dc}>ä\u{202e}f:BȺ?:``*·¥\u{74ca5}\"", Tuple { fields: [Boolean, Field, String { length: 205 }, String { length: 575 }, Integer { sign: Signed, width: 124 }, String { length: 923 }, String { length: 294 }] })] }, visibility: Public }], return_type: None, error_types: {} }, {"¡": Struct({"$d6🕴/:|�\u{37f8b}\r\u{a13b7}C$𲁹\\&\u{f8712}?\u{db61c}t%\u{57be1}\0": Field(-8275115097504119425402713293372777967031130481426075481525511323101167533940), ".Ⱥ/$\u{7f}\u{103c06}%\\\u{202e}][0\u{88479}]\"*~\u{36fd5}\u{5}\u{feff}]{/": Vec([String("A \0A 0 aA0 a0aa00 A\000 0 \0\0aA\0\0a \0 \0a 0A\0A\0 Aa0aAA0A\0aa\00 0\0\0\0\0\00a Aa0 \0 a A0 \0AA0A Aa Aa\00aAaAaaA0A0 aA0 \0 Aa\00 \0000AAA a \0AAaaA\0\0a A0a0AA\0aA00 aA a0A\0AAa0a\0A0a\0\0A0A \00Aaaaa a A AO.*D\r.`bD4a\n*\u{15}\\B\"ace.8&A\t[AV8w<\u{18}\"\u{f}4`^Q\u{1b}U*$Z/\0\u{b}]qw${`\"=X&A\\\u{e}%`\\:\"$\u{1}.(6_C:\u{7}a`V=N**\u{1b})#Y\u{7f}#\u{b}$l\t}.Mns5!\t*$g\u{18}\rC\u{11}\"$=\u{7}.?&\u{1}yW\t.Y|<6\u{12}\u{e}/4JJ*&/V$`\"&`x#R\np\\%'*\n:P\0K\u{b}*`\r7Ym\t_\u{b}=$\u{16}`0v\u{7f}'NV^N4J<9=G*A:!b\u{1c}:'c{ST&z![\u{7f}/.={E*pmaWC\u{7f}7p{<\"']\u{8}?`\u{1b}\"\\\u{1}$\u{18}/!\u{16}-\t:E7CUs%_qw*xf.S\t\u{4}'=\"&%t'\u{1f}\u{7f}\u{b}$.=f\u{6}\"$A}xV_$\u{1a}nH\n\u{1b}?<&\n\u{15}U\\-b\u{1d}|\u{b}\u{2}t \rwA{L\u{11}\u{6}\u{10}\0\u{1b}G[x?&Yi?&7\u{b}?\r\u{1f}b\\$=\u{b}x& Q/\t\u{4}|X\"7\"{\0\0j'.\0\\e1zR.\u{c}\n<\u{b}Q*R+y8\u{19}(o\u{1f}@m\nt+\u{7f}Q\\+.Rn?\u{17}UZ\"$\u{b}/\0B=9=\t{\u{8}qZ&`!:D{\u{6}IO.H\u{7f}:?/3@\r\u{1b}oä\u{202e}f:BȺ?:``*·¥\u{74ca5}\"": Vec([Field(1), Field(8822392870083219098626030699076694602179106416928939583840848325203494062169), String("*TXn;{}\"_)_9\nk\\#ts\u{10}%\\c\n/2._::Oj*\u{7f}\0\r&PUMl\u{10}$/u?L}\u{7f}*P&<%=\u{7}S#%A\n \u{e}\\#v!\"\nepRp.{vH{&@\t\u{1f}\u{b}?=T\u{f}\"B\u{11}\n/{HY.\u{16}\n\nj<&\u{3}{f\n/9J*&x.$/,\r\0\u{1c}'\u{5}\u{13}\u{1b}`T\0`\n&/&\u{15}\u{b}w:{SK\u{7f}\\apR%/'0`0\n'd$$\u{7f}Vs\t<{\nDTT\\F\n\u{15}y.\\\t*-)&D$*u\u{b}\u{1b}?{\u{b}/\n\u{7f}0*.7\0\n:\u{b}.rSk<6~>{#"), String(".\"JA%q6i\ra/:F\u{16}?q<\t\rN\\13?H<;?{`\u{1d}p{.\"5?*@'N\"\u{1a}P,\u{1b}\u{7f}c+dt5':Y\u{1b}k/G>k/eM$XIX')\u{1b}'&\u{7f}\\\r\u{1b}`'P_.\n.?\0p`Y\u{c}`._\u{b}B\0\ng/*v$jfJ:\u{c}\u{1b}Pv}xn7ph@#{_<{.JD?r%'E\n7s9n/],u![;%*\u{2}{y`MgRdok8\"%<*>*{GyFJ}?\0W%#\0\u{1b}\u{7f}\u{16}G:\t=w\u{7f}:q\u{7f}:{k?\u{b}(:ca{$*1X/cw\u{1b}Z6I\rX\0\u{1b}(.^14\r\\=s\u{1b}w\u{3}F~\n\u{1e})/$0:=[\u{1},\\\\\tg\u{16}:],J`\0N\n\u{1b}\u{1b}\u{1b}{.xb\u{1a}\r'12#?e\\#/\tA\u{7f}\".\\Ke=\\?!v+P\u{17}\r\u{12}x.=A.`0<&?\niR/*WW\rnV)5vY.~\n _h\0&5f#\r\u{2}-S%\t s..\u{7f}!X}\"=\"?\u{5}y\u{4}`fr&R&d: 1Ht\"4`y_/S.71#{|%$%&ehy\u{16}J_\u{e}=:.%'\"N=J:\r:{&.\u{12}\u{b})&N\u{10}R_3;11\u{b}Qd<`<{?xF:~\"%<=<<\03:t??&\r;{\u{13}?__Y\u{6})\\k,vs?\n`G(*\n!\u{1b}[@z\0$?*yKLJh_\u{13}FkY'\\?T^\u{1f}$1n`'[\n\u{7f}\0+l\u{b}\u{1a}E\u{b}&(/\u{b}\rr\t:&\0+N'N:oC:*``IN\u{b}*.:\t$7+'*U:\t Result { let json_value = match (value, abi_type) { + (InputValue::Field(f), AbiType::Integer { sign: crate::Sign::Signed, width }) => { + JsonTypes::String(field_to_signed_hex(*f, *width)) + } (InputValue::Field(f), AbiType::Field | AbiType::Integer { .. }) => { JsonTypes::String(Self::format_field_string(*f)) } @@ -143,6 +146,9 @@ impl InputValue { ) -> Result { let input_value = match (value, param_type) { (JsonTypes::String(string), AbiType::String { .. }) => InputValue::String(string), + (JsonTypes::String(string), AbiType::Integer { sign: crate::Sign::Signed, width }) => { + InputValue::Field(parse_str_to_signed(&string, *width)?) + } ( JsonTypes::String(string), AbiType::Field | AbiType::Integer { .. } | AbiType::Boolean, @@ -192,3 +198,40 @@ impl InputValue { Ok(input_value) } } + +#[cfg(test)] +mod test { + use proptest::prelude::*; + + use crate::{ + arbitrary::arb_abi_and_input_map, + input_parser::{arbitrary::arb_signed_integer_type_and_value, json::JsonTypes, InputValue}, + }; + + use super::{parse_json, serialize_to_json}; + + proptest! { + #[test] + fn serializing_and_parsing_returns_original_input((abi, input_map) in arb_abi_and_input_map()) { + let json = serialize_to_json(&input_map, &abi).expect("should be serializable"); + let parsed_input_map = parse_json(&json, &abi).expect("should be parsable"); + + prop_assert_eq!(parsed_input_map, input_map); + } + + #[test] + fn signed_integer_serialization_roundtrip((typ, value) in arb_signed_integer_type_and_value()) { + let string_input = JsonTypes::String(value.to_string()); + let input_value = InputValue::try_from_json(string_input, &typ, "foo").expect("should be parsable"); + let JsonTypes::String(output_string) = JsonTypes::try_from_input_value(&input_value, &typ).expect("should be serializable") else { + panic!("wrong type output"); + }; + let output_number = if let Some(output_string) = output_string.strip_prefix("-0x") { + -i64::from_str_radix(output_string, 16).unwrap() + } else { + i64::from_str_radix(output_string.strip_prefix("0x").unwrap(), 16).unwrap() + }; + prop_assert_eq!(output_number, value); + } + } +} diff --git a/noir/noir-repo/tooling/noirc_abi/src/input_parser/mod.rs b/noir/noir-repo/tooling/noirc_abi/src/input_parser/mod.rs index d7bbb0adfe3a..b7732235eb22 100644 --- a/noir/noir-repo/tooling/noirc_abi/src/input_parser/mod.rs +++ b/noir/noir-repo/tooling/noirc_abi/src/input_parser/mod.rs @@ -248,6 +248,11 @@ mod serialization_tests { typ: AbiType::Field, visibility: AbiVisibility::Private, }, + AbiParameter { + name: "signed_example".into(), + typ: AbiType::Integer { sign: Sign::Signed, width: 8 }, + visibility: AbiVisibility::Private, + }, AbiParameter { name: "bar".into(), typ: AbiType::Struct { @@ -272,6 +277,7 @@ mod serialization_tests { let input_map: BTreeMap = BTreeMap::from([ ("foo".into(), InputValue::Field(FieldElement::one())), + ("signed_example".into(), InputValue::Field(FieldElement::from(240u128))), ( "bar".into(), InputValue::Struct(BTreeMap::from([ @@ -317,7 +323,9 @@ fn parse_str_to_field(value: &str) -> Result { } fn parse_str_to_signed(value: &str, width: u32) -> Result { - let big_num = if let Some(hex) = value.strip_prefix("0x") { + let big_num = if let Some(hex) = value.strip_prefix("-0x") { + BigInt::from_str_radix(hex, 16).map(|value| -value) + } else if let Some(hex) = value.strip_prefix("0x") { BigInt::from_str_radix(hex, 16) } else { BigInt::from_str_radix(value, 10) @@ -357,12 +365,23 @@ fn field_from_big_int(bigint: BigInt) -> FieldElement { } } +fn field_to_signed_hex(f: FieldElement, bit_size: u32) -> String { + let f_u128 = f.to_u128(); + let max = 2_u128.pow(bit_size - 1) - 1; + if f_u128 > max { + let f = FieldElement::from(2_u128.pow(bit_size) - f_u128); + format!("-0x{}", f.to_hex()) + } else { + format!("0x{}", f.to_hex()) + } +} + #[cfg(test)] mod test { use acvm::{AcirField, FieldElement}; use num_bigint::BigUint; - use super::parse_str_to_field; + use super::{parse_str_to_field, parse_str_to_signed}; fn big_uint_from_field(field: FieldElement) -> BigUint { BigUint::from_bytes_be(&field.to_be_bytes()) @@ -400,4 +419,38 @@ mod test { let noncanonical_field = FieldElement::modulus().to_string(); assert!(parse_str_to_field(&noncanonical_field).is_err()); } + + #[test] + fn test_parse_str_to_signed() { + let value = parse_str_to_signed("1", 8).unwrap(); + assert_eq!(value, FieldElement::from(1_u128)); + + let value = parse_str_to_signed("-1", 8).unwrap(); + assert_eq!(value, FieldElement::from(255_u128)); + + let value = parse_str_to_signed("-1", 16).unwrap(); + assert_eq!(value, FieldElement::from(65535_u128)); + } +} + +#[cfg(test)] +mod arbitrary { + use proptest::prelude::*; + + use crate::{AbiType, Sign}; + + pub(super) fn arb_signed_integer_type_and_value() -> BoxedStrategy<(AbiType, i64)> { + (2u32..=64) + .prop_flat_map(|width| { + let typ = Just(AbiType::Integer { width, sign: Sign::Signed }); + let value = if width == 64 { + // Avoid overflow + i64::MIN..i64::MAX + } else { + -(2i64.pow(width - 1))..(2i64.pow(width - 1) - 1) + }; + (typ, value) + }) + .boxed() + } } diff --git a/noir/noir-repo/tooling/noirc_abi/src/input_parser/toml.rs b/noir/noir-repo/tooling/noirc_abi/src/input_parser/toml.rs index 321d3511b5d0..6f2be68a0c47 100644 --- a/noir/noir-repo/tooling/noirc_abi/src/input_parser/toml.rs +++ b/noir/noir-repo/tooling/noirc_abi/src/input_parser/toml.rs @@ -1,4 +1,4 @@ -use super::{parse_str_to_field, parse_str_to_signed, InputValue}; +use super::{field_to_signed_hex, parse_str_to_field, parse_str_to_signed, InputValue}; use crate::{errors::InputParserError, Abi, AbiType, MAIN_RETURN_NAME}; use acvm::{AcirField, FieldElement}; use iter_extended::{try_btree_map, try_vecmap}; @@ -60,7 +60,7 @@ pub(crate) fn serialize_to_toml( Ok(toml_string) } -#[derive(Debug, Deserialize, Serialize, Clone)] +#[derive(Debug, Deserialize, Serialize, Clone, PartialEq)] #[serde(untagged)] enum TomlTypes { // This is most likely going to be a hex string @@ -83,6 +83,9 @@ impl TomlTypes { abi_type: &AbiType, ) -> Result { let toml_value = match (value, abi_type) { + (InputValue::Field(f), AbiType::Integer { sign: crate::Sign::Signed, width }) => { + TomlTypes::String(field_to_signed_hex(*f, *width)) + } (InputValue::Field(f), AbiType::Field | AbiType::Integer { .. }) => { let f_str = format!("0x{}", f.to_hex()); TomlTypes::String(f_str) @@ -126,6 +129,7 @@ impl InputValue { ) -> Result { let input_value = match (value, param_type) { (TomlTypes::String(string), AbiType::String { .. }) => InputValue::String(string), + ( TomlTypes::String(string), AbiType::Field @@ -139,7 +143,7 @@ impl InputValue { TomlTypes::Integer(integer), AbiType::Field | AbiType::Integer { .. } | AbiType::Boolean, ) => { - let new_value = FieldElement::from(i128::from(integer)); + let new_value = FieldElement::from(u128::from(integer)); InputValue::Field(new_value) } @@ -179,3 +183,40 @@ impl InputValue { Ok(input_value) } } + +#[cfg(test)] +mod test { + use proptest::prelude::*; + + use crate::{ + arbitrary::arb_abi_and_input_map, + input_parser::{arbitrary::arb_signed_integer_type_and_value, toml::TomlTypes, InputValue}, + }; + + use super::{parse_toml, serialize_to_toml}; + + proptest! { + #[test] + fn serializing_and_parsing_returns_original_input((abi, input_map) in arb_abi_and_input_map()) { + let toml = serialize_to_toml(&input_map, &abi).expect("should be serializable"); + let parsed_input_map = parse_toml(&toml, &abi).expect("should be parsable"); + + prop_assert_eq!(parsed_input_map, input_map); + } + + #[test] + fn signed_integer_serialization_roundtrip((typ, value) in arb_signed_integer_type_and_value()) { + let string_input = TomlTypes::String(value.to_string()); + let input_value = InputValue::try_from_toml(string_input.clone(), &typ, "foo").expect("should be parsable"); + let TomlTypes::String(output_string) = TomlTypes::try_from_input_value(&input_value, &typ).expect("should be serializable") else { + panic!("wrong type output"); + }; + let output_number = if let Some(output_string) = output_string.strip_prefix("-0x") { + -i64::from_str_radix(output_string, 16).unwrap() + } else { + i64::from_str_radix(output_string.strip_prefix("0x").unwrap(), 16).unwrap() + }; + prop_assert_eq!(output_number, value); + } + } +} diff --git a/noir/noir-repo/tooling/noirc_abi/src/lib.rs b/noir/noir-repo/tooling/noirc_abi/src/lib.rs index b1b199727c2a..bd5674d64f17 100644 --- a/noir/noir-repo/tooling/noirc_abi/src/lib.rs +++ b/noir/noir-repo/tooling/noirc_abi/src/lib.rs @@ -49,6 +49,7 @@ pub const MAIN_RETURN_NAME: &str = "return"; /// depends on the types of programs that users want to do. I don't envision string manipulation /// in programs, however it is possible to support, with many complications like encoding character set /// support. +#[derive(Hash)] pub enum AbiType { Field, Array { @@ -77,7 +78,7 @@ pub enum AbiType { }, } -#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] #[cfg_attr(test, derive(arbitrary::Arbitrary))] #[serde(rename_all = "lowercase")] /// Represents whether the parameter is public or known only to the prover. @@ -89,7 +90,7 @@ pub enum AbiVisibility { DataBus, } -#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] #[cfg_attr(test, derive(arbitrary::Arbitrary))] #[serde(rename_all = "lowercase")] pub enum Sign { @@ -146,7 +147,7 @@ impl From<&AbiType> for PrintableType { } } -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Hash)] #[cfg_attr(test, derive(arbitrary::Arbitrary))] /// An argument or return value of the circuit's `main` function. pub struct AbiParameter { @@ -163,7 +164,7 @@ impl AbiParameter { } } -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, Serialize, Deserialize, Hash)] #[cfg_attr(test, derive(arbitrary::Arbitrary))] pub struct AbiReturnType { #[cfg_attr(test, proptest(strategy = "arbitrary::arb_abi_type()"))] @@ -171,7 +172,7 @@ pub struct AbiReturnType { pub visibility: AbiVisibility, } -#[derive(Clone, Debug, Default, Serialize, Deserialize)] +#[derive(Clone, Debug, Default, Serialize, Deserialize, Hash)] #[cfg_attr(test, derive(arbitrary::Arbitrary))] pub struct Abi { /// An ordered list of the arguments to the program's `main` function, specifying their types and visibility. @@ -459,7 +460,7 @@ pub enum AbiValue { }, } -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] #[serde(tag = "error_kind", rename_all = "lowercase")] pub enum AbiErrorType { FmtString { length: u32, item_types: Vec }, diff --git a/noir/noir-repo/tooling/noirc_abi_wasm/Cargo.toml b/noir/noir-repo/tooling/noirc_abi_wasm/Cargo.toml index daa619ca01df..b00d580515ef 100644 --- a/noir/noir-repo/tooling/noirc_abi_wasm/Cargo.toml +++ b/noir/noir-repo/tooling/noirc_abi_wasm/Cargo.toml @@ -1,9 +1,11 @@ [package] name = "noirc_abi_wasm" +description = "An ABI encoder for the Noir language" version.workspace = true authors.workspace = true edition.workspace = true license.workspace = true +repository.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/noir/noir-repo/tooling/noirc_abi_wasm/test/browser/abi_encode.test.ts b/noir/noir-repo/tooling/noirc_abi_wasm/test/browser/abi_encode.test.ts index e1aaf0dc2c06..ac18495919c9 100644 --- a/noir/noir-repo/tooling/noirc_abi_wasm/test/browser/abi_encode.test.ts +++ b/noir/noir-repo/tooling/noirc_abi_wasm/test/browser/abi_encode.test.ts @@ -15,7 +15,8 @@ it('recovers original inputs when abi encoding and decoding', async () => { const foo: Field = inputs.foo as Field; const bar: Field[] = inputs.bar as Field[]; expect(BigInt(decoded_inputs.inputs.foo)).to.be.equal(BigInt(foo)); - expect(BigInt(decoded_inputs.inputs.bar[0])).to.be.equal(BigInt(bar[0])); - expect(BigInt(decoded_inputs.inputs.bar[1])).to.be.equal(BigInt(bar[1])); + expect(parseInt(decoded_inputs.inputs.bar[0])).to.be.equal(parseInt(bar[0].toString())); + expect(parseInt(decoded_inputs.inputs.bar[1])).to.be.equal(parseInt(bar[1].toString())); + expect(parseInt(decoded_inputs.inputs.bar[2])).to.be.equal(parseInt(bar[2].toString())); expect(decoded_inputs.return_value).to.be.null; }); diff --git a/noir/noir-repo/tooling/noirc_abi_wasm/test/node/abi_encode.test.ts b/noir/noir-repo/tooling/noirc_abi_wasm/test/node/abi_encode.test.ts index a49c10b6ea66..e87618d84da3 100644 --- a/noir/noir-repo/tooling/noirc_abi_wasm/test/node/abi_encode.test.ts +++ b/noir/noir-repo/tooling/noirc_abi_wasm/test/node/abi_encode.test.ts @@ -11,7 +11,8 @@ it('recovers original inputs when abi encoding and decoding', async () => { const foo: Field = inputs.foo as Field; const bar: Field[] = inputs.bar as Field[]; expect(BigInt(decoded_inputs.inputs.foo)).to.be.equal(BigInt(foo)); - expect(BigInt(decoded_inputs.inputs.bar[0])).to.be.equal(BigInt(bar[0])); - expect(BigInt(decoded_inputs.inputs.bar[1])).to.be.equal(BigInt(bar[1])); + expect(parseInt(decoded_inputs.inputs.bar[0])).to.be.equal(parseInt(bar[0].toString())); + expect(parseInt(decoded_inputs.inputs.bar[1])).to.be.equal(parseInt(bar[1].toString())); + expect(parseInt(decoded_inputs.inputs.bar[2])).to.be.equal(parseInt(bar[2].toString())); expect(decoded_inputs.return_value).to.be.null; }); diff --git a/noir/noir-repo/tooling/noirc_abi_wasm/test/shared/abi_encode.ts b/noir/noir-repo/tooling/noirc_abi_wasm/test/shared/abi_encode.ts index 62eb7658f436..b789bb05371b 100644 --- a/noir/noir-repo/tooling/noirc_abi_wasm/test/shared/abi_encode.ts +++ b/noir/noir-repo/tooling/noirc_abi_wasm/test/shared/abi_encode.ts @@ -5,7 +5,7 @@ export const abi: Abi = { { name: 'foo', type: { kind: 'field' }, visibility: 'private' }, { name: 'bar', - type: { kind: 'array', length: 2, type: { kind: 'field' } }, + type: { kind: 'array', length: 3, type: { kind: 'integer', sign: 'signed', width: 32 } }, visibility: 'private', }, ], @@ -15,5 +15,5 @@ export const abi: Abi = { export const inputs: InputMap = { foo: '1', - bar: ['1', '2'], + bar: ['1', '2', '-1'], }; diff --git a/noir/noir-repo/tooling/profiler/src/cli/execution_flamegraph_cmd.rs b/noir/noir-repo/tooling/profiler/src/cli/execution_flamegraph_cmd.rs index 981d08a3eb12..6d6da89f660b 100644 --- a/noir/noir-repo/tooling/profiler/src/cli/execution_flamegraph_cmd.rs +++ b/noir/noir-repo/tooling/profiler/src/cli/execution_flamegraph_cmd.rs @@ -8,7 +8,7 @@ use crate::flamegraph::{BrilligExecutionSample, FlamegraphGenerator, InfernoFlam use crate::fs::{read_inputs_from_file, read_program_from_file}; use crate::opcode_formatter::format_brillig_opcode; use bn254_blackbox_solver::Bn254BlackBoxSolver; -use nargo::ops::DefaultForeignCallExecutor; +use nargo::foreign_calls::DefaultForeignCallExecutor; use noirc_abi::input_parser::Format; use noirc_artifacts::debug::DebugArtifact; diff --git a/scripts/run_interleaved.sh b/scripts/run_interleaved.sh index 85449570cb49..5d18e8cb4864 100755 --- a/scripts/run_interleaved.sh +++ b/scripts/run_interleaved.sh @@ -40,9 +40,11 @@ trap cleanup SIGINT SIGTERM EXIT # Function to run a command and prefix the output with color function run_command() { local cmd="$1" + # Take first 3 parts of command to display inline + local cmd_prefix=$(echo "$cmd" | awk '{print $1" "$2" "$3}') local color="$2" $cmd 2>&1 | while IFS= read -r line; do - echo -e "${color}[$cmd]\e[0m $line" + echo -e "${color}[$cmd_prefix]\e[0m $line" done } diff --git a/scripts/run_native_testnet.sh b/scripts/run_native_testnet.sh index 00b08f53b29f..4cf6d83900a1 100755 --- a/scripts/run_native_testnet.sh +++ b/scripts/run_native_testnet.sh @@ -32,6 +32,7 @@ PROVER_SCRIPT="\"./prover-node.sh 8078 false\"" NUM_VALIDATORS=3 INTERLEAVED=false METRICS=false +LOG_LEVEL="info" OTEL_COLLECTOR_ENDPOINT=${OTEL_COLLECTOR_ENDPOINT:-"http://localhost:4318"} # Function to display help message @@ -120,7 +121,7 @@ cd $(git rev-parse --show-toplevel) # Base command BASE_CMD="INTERLEAVED=$INTERLEAVED ./yarn-project/end-to-end/scripts/native_network_test.sh \ $TEST_SCRIPT \ - ./deploy-l1-contracts.sh \ + \"./deploy-l1-contracts.sh $NUM_VALIDATORS\" \ ./deploy-l2-contracts.sh \ ./boot-node.sh \ ./ethereum.sh \ diff --git a/spartan/aztec-network/files/config/config-prover-env.sh b/spartan/aztec-network/files/config/config-prover-env.sh index a3eccd01c1be..073547821d48 100644 --- a/spartan/aztec-network/files/config/config-prover-env.sh +++ b/spartan/aztec-network/files/config/config-prover-env.sh @@ -13,6 +13,7 @@ registry_address=$(echo "$output" | grep -oP 'Registry Address: \K0x[a-fA-F0-9]{ inbox_address=$(echo "$output" | grep -oP 'L1 -> L2 Inbox Address: \K0x[a-fA-F0-9]{40}') outbox_address=$(echo "$output" | grep -oP 'L2 -> L1 Outbox Address: \K0x[a-fA-F0-9]{40}') fee_juice_address=$(echo "$output" | grep -oP 'Fee Juice Address: \K0x[a-fA-F0-9]{40}') +staking_asset_address=$(echo "$output" | grep -oP 'Staking Asset Address: \K0x[a-fA-F0-9]{40}') fee_juice_portal_address=$(echo "$output" | grep -oP 'Fee Juice Portal Address: \K0x[a-fA-F0-9]{40}') coin_issuer_address=$(echo "$output" | grep -oP 'CoinIssuer Address: \K0x[a-fA-F0-9]{40}') reward_distributor_address=$(echo "$output" | grep -oP 'RewardDistributor Address: \K0x[a-fA-F0-9]{40}') @@ -27,6 +28,7 @@ export REGISTRY_CONTRACT_ADDRESS=$registry_address export INBOX_CONTRACT_ADDRESS=$inbox_address export OUTBOX_CONTRACT_ADDRESS=$outbox_address export FEE_JUICE_CONTRACT_ADDRESS=$fee_juice_address +export STAKING_ASSET_CONTRACT_ADDRESS=$staking_asset_address export FEE_JUICE_PORTAL_CONTRACT_ADDRESS=$fee_juice_portal_address export COIN_ISSUER_CONTRACT_ADDRESS=$coin_issuer_address export REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=$reward_distributor_address diff --git a/spartan/aztec-network/files/config/config-validator-env.sh b/spartan/aztec-network/files/config/config-validator-env.sh index 6483168f16dd..b2848f8e069c 100644 --- a/spartan/aztec-network/files/config/config-validator-env.sh +++ b/spartan/aztec-network/files/config/config-validator-env.sh @@ -1,9 +1,9 @@ #!/bin/bash set -eu -# Pass the bootnode url as an argument -# Ask the bootnode for l1 contract addresses -output=$(node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js get-node-info --node-url $1) +# Pass a PXE url as an argument +# Ask the PXE's node for l1 contract addresses +output=$(node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js get-node-info -u $1 --node-url '') echo "$output" @@ -13,6 +13,7 @@ registry_address=$(echo "$output" | grep -oP 'Registry Address: \K0x[a-fA-F0-9]{ inbox_address=$(echo "$output" | grep -oP 'L1 -> L2 Inbox Address: \K0x[a-fA-F0-9]{40}') outbox_address=$(echo "$output" | grep -oP 'L2 -> L1 Outbox Address: \K0x[a-fA-F0-9]{40}') fee_juice_address=$(echo "$output" | grep -oP 'Fee Juice Address: \K0x[a-fA-F0-9]{40}') +staking_asset_address=$(echo "$output" | grep -oP 'Staking Asset Address: \K0x[a-fA-F0-9]{40}') fee_juice_portal_address=$(echo "$output" | grep -oP 'Fee Juice Portal Address: \K0x[a-fA-F0-9]{40}') coin_issuer_address=$(echo "$output" | grep -oP 'CoinIssuer Address: \K0x[a-fA-F0-9]{40}') reward_distributor_address=$(echo "$output" | grep -oP 'RewardDistributor Address: \K0x[a-fA-F0-9]{40}') @@ -32,6 +33,7 @@ export REGISTRY_CONTRACT_ADDRESS=$registry_address export INBOX_CONTRACT_ADDRESS=$inbox_address export OUTBOX_CONTRACT_ADDRESS=$outbox_address export FEE_JUICE_CONTRACT_ADDRESS=$fee_juice_address +export STAKING_ASSET_CONTRACT_ADDRESS=$staking_asset_address export FEE_JUICE_PORTAL_CONTRACT_ADDRESS=$fee_juice_portal_address export COIN_ISSUER_CONTRACT_ADDRESS=$coin_issuer_address export REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=$reward_distributor_address diff --git a/spartan/aztec-network/files/config/deploy-l1-contracts.sh b/spartan/aztec-network/files/config/deploy-l1-contracts.sh index 4d976821f047..74f8e3c6bfc8 100644 --- a/spartan/aztec-network/files/config/deploy-l1-contracts.sh +++ b/spartan/aztec-network/files/config/deploy-l1-contracts.sh @@ -4,17 +4,21 @@ set -exu CHAIN_ID=$1 -# Use default account, it is funded on our dev machine -export PRIVATE_KEY="0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" - # Run the deploy-l1-contracts command and capture the output output="" -# if INIT_VALIDATORS is true, then we need to pass the validators flag to the deploy-l1-contracts command -if [ "$INIT_VALIDATORS" = "true" ]; then - output=$(node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js deploy-l1-contracts --validators $2 --l1-chain-id $CHAIN_ID) -else - output=$(node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js deploy-l1-contracts --l1-chain-id $CHAIN_ID) -fi +MAX_RETRIES=5 +RETRY_DELAY=60 +for attempt in $(seq 1 $MAX_RETRIES); do + # if INIT_VALIDATORS is true, then we need to pass the validators flag to the deploy-l1-contracts command + if [ "${INIT_VALIDATORS:-false}" = "true" ]; then + output=$(node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js deploy-l1-contracts --mnemonic "$MNEMONIC" --validators $2 --l1-chain-id $CHAIN_ID) && break + else + output=$(node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js deploy-l1-contracts --mnemonic "$MNEMONIC" --l1-chain-id $CHAIN_ID) && break + fi + echo "Attempt $attempt failed. Retrying in $RETRY_DELAY seconds..." + sleep "$RETRY_DELAY" +done || { echo "All l1 contract deploy attempts failed."; exit 1; } + echo "$output" @@ -24,6 +28,7 @@ registry_address=$(echo "$output" | grep -oP 'Registry Address: \K0x[a-fA-F0-9]{ inbox_address=$(echo "$output" | grep -oP 'L1 -> L2 Inbox Address: \K0x[a-fA-F0-9]{40}') outbox_address=$(echo "$output" | grep -oP 'L2 -> L1 Outbox Address: \K0x[a-fA-F0-9]{40}') fee_juice_address=$(echo "$output" | grep -oP 'Fee Juice Address: \K0x[a-fA-F0-9]{40}') +staking_asset_address=$(echo "$output" | grep -oP 'Staking Asset Address: \K0x[a-fA-F0-9]{40}') fee_juice_portal_address=$(echo "$output" | grep -oP 'Fee Juice Portal Address: \K0x[a-fA-F0-9]{40}') coin_issuer_address=$(echo "$output" | grep -oP 'CoinIssuer Address: \K0x[a-fA-F0-9]{40}') reward_distributor_address=$(echo "$output" | grep -oP 'RewardDistributor Address: \K0x[a-fA-F0-9]{40}') @@ -37,6 +42,7 @@ export REGISTRY_CONTRACT_ADDRESS=$registry_address export INBOX_CONTRACT_ADDRESS=$inbox_address export OUTBOX_CONTRACT_ADDRESS=$outbox_address export FEE_JUICE_CONTRACT_ADDRESS=$fee_juice_address +export STAKING_ASSET_CONTRACT_ADDRESS=$staking_asset_address export FEE_JUICE_PORTAL_CONTRACT_ADDRESS=$fee_juice_portal_address export COIN_ISSUER_CONTRACT_ADDRESS=$coin_issuer_address export REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=$reward_distributor_address diff --git a/spartan/aztec-network/files/config/setup-service-addresses.sh b/spartan/aztec-network/files/config/setup-service-addresses.sh index 5ca3bb5a248c..063c84a16e5d 100644 --- a/spartan/aztec-network/files/config/setup-service-addresses.sh +++ b/spartan/aztec-network/files/config/setup-service-addresses.sh @@ -81,17 +81,29 @@ fi if [ "${PROVER_BROKER_EXTERNAL_HOST}" != "" ]; then PROVER_BROKER_ADDR="${PROVER_BROKER_EXTERNAL_HOST}" -elif [ "${NETWORK_PUBLIC}" = "true" ]; then - PROVER_BROKER_ADDR=$(get_service_address "prover-broker" "${PROVER_BROKER_PORT}") else PROVER_BROKER_ADDR="http://${SERVICE_NAME}-prover-broker.${NAMESPACE}:${PROVER_BROKER_PORT}" fi +# Configure OTEL_COLLECTOR_ENDPOINT if not set in values file +if [ "${TELEMETRY:-false}" = "true" ] && [ "${OTEL_COLLECTOR_ENDPOINT}" = "" ]; then + OTEL_COLLECTOR_PORT=${OTEL_COLLECTOR_PORT:-4318} + OTEL_COLLECTOR_ENDPOINT="http://metrics-opentelemetry-collector.metrics:$OTEL_COLLECTOR_PORT" +fi # Write addresses to file for sourcing echo "export ETHEREUM_HOST=${ETHEREUM_ADDR}" >> /shared/config/service-addresses echo "export BOOT_NODE_HOST=${BOOT_NODE_ADDR}" >> /shared/config/service-addresses echo "export PROVER_NODE_HOST=${PROVER_NODE_ADDR}" >> /shared/config/service-addresses echo "export PROVER_BROKER_HOST=${PROVER_BROKER_ADDR}" >> /shared/config/service-addresses + +if [ "${OTEL_COLLECTOR_ENDPOINT}" != "" ]; then + echo "export OTEL_COLLECTOR_ENDPOINT=$OTEL_COLLECTOR_ENDPOINT" >> /shared/config/service-addresses + echo "export OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=$OTEL_COLLECTOR_ENDPOINT/v1/logs" >> /shared/config/service-addresses + echo "export OTEL_EXPORTER_OTLP_METRICS_ENDPOINT=$OTEL_COLLECTOR_ENDPOINT/v1/metrics" >> /shared/config/service-addresses + echo "export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=$OTEL_COLLECTOR_ENDPOINT/v1/traces" >> /shared/config/service-addresses +fi + + echo "Addresses configured:" cat /shared/config/service-addresses diff --git a/spartan/aztec-network/templates/_helpers.tpl b/spartan/aztec-network/templates/_helpers.tpl index 3db484690a06..87322add2ac2 100644 --- a/spartan/aztec-network/templates/_helpers.tpl +++ b/spartan/aztec-network/templates/_helpers.tpl @@ -68,30 +68,6 @@ http://{{ include "aztec-network.fullname" . }}-validator.{{ .Release.Namespace http://{{ include "aztec-network.fullname" . }}-metrics.{{ .Release.Namespace }} {{- end -}} -{{- define "aztec-network.otelCollectorMetricsEndpoint" -}} -{{- if .Values.telemetry.enabled -}} -{{- if .Values.telemetry.otelCollectorEndpoint -}} -{{- .Values.telemetry.otelCollectorEndpoint -}}/v1/metrics -{{- end -}} -{{- end -}} -{{- end -}} - -{{- define "aztec-network.otelCollectorTracesEndpoint" -}} -{{- if .Values.telemetry.enabled -}} -{{- if .Values.telemetry.otelCollectorEndpoint -}} -{{- .Values.telemetry.otelCollectorEndpoint -}}/v1/traces -{{- end -}} -{{- end -}} -{{- end -}} - -{{- define "aztec-network.otelCollectorLogsEndpoint" -}} -{{- if .Values.telemetry.enabled -}} -{{- if .Values.telemetry.otelCollectorEndpoint -}} -{{- .Values.telemetry.otelCollectorEndpoint -}}/v1/logs -{{- end -}} -{{- end -}} -{{- end -}} - {{- define "helpers.flag" -}} {{- $name := index . 0 -}} {{- $value := index . 1 -}} @@ -153,6 +129,10 @@ Service Address Setup Container value: "{{ .Values.network.public }}" - name: NAMESPACE value: {{ .Release.Namespace }} + - name: TELEMETRY + value: "{{ .Values.telemetry.enabled }}" + - name: OTEL_COLLECTOR_ENDPOINT + value: "{{ .Values.telemetry.otelCollectorEndpoint }}" - name: EXTERNAL_ETHEREUM_HOST value: "{{ .Values.ethereum.externalHost }}" - name: ETHEREUM_PORT @@ -190,6 +170,8 @@ affinity: values: - validator - boot-node - - prover + - prover-node + - prover-broker topologyKey: "kubernetes.io/hostname" + namespaceSelector: {} {{- end -}} diff --git a/spartan/aztec-network/templates/boot-node.yaml b/spartan/aztec-network/templates/boot-node.yaml index 022d344ebe4b..3a5d2103f9a7 100644 --- a/spartan/aztec-network/templates/boot-node.yaml +++ b/spartan/aztec-network/templates/boot-node.yaml @@ -17,6 +17,7 @@ spec: {{- include "aztec-network.selectorLabels" . | nindent 8 }} app: boot-node spec: + dnsPolicy: ClusterFirstWithHostNet {{- if .Values.network.public }} hostNetwork: true {{- include "aztec-network.publicAntiAffinity" . | nindent 6 }} @@ -32,17 +33,18 @@ spec: - -c - | source /shared/config/service-addresses + cat /shared/config/service-addresses echo "Awaiting ethereum node at ${ETHEREUM_HOST}" until curl -s -X POST -H 'Content-Type: application/json' \ -d '{"jsonrpc":"2.0","method":"web3_clientVersion","params":[],"id":67}' \ ${ETHEREUM_HOST} | grep -q reth; do - echo "Waiting for Ethereum node..." + echo "Waiting for Ethereum node ${ETHEREUM_HOST}..." sleep 5 done echo "Ethereum node is ready!" {{- if .Values.telemetry.enabled }} - until curl --head --silent {{ include "aztec-network.otelCollectorMetricsEndpoint" . }} > /dev/null; do - echo "Waiting for OpenTelemetry collector..." + until curl --head --silent $OTEL_COLLECTOR_ENDPOINT > /dev/null; do + echo "Waiting for OpenTelemetry collector $OTEL_COLLECTOR_ENDPOINT..." sleep 5 done echo "OpenTelemetry collector is ready!" @@ -70,8 +72,12 @@ spec: - name: scripts mountPath: /scripts env: + - name: TELEMETRY + value: "{{ .Values.telemetry.enabled }}" - name: INIT_VALIDATORS value: "true" + - name: MNEMONIC + value: "{{ .Values.aztec.l1DeploymentMnemonic }}" - name: ETHEREUM_SLOT_DURATION value: "{{ .Values.ethereum.blockTime }}" - name: AZTEC_SLOT_DURATION @@ -150,12 +156,6 @@ spec: value: "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" - name: OTEL_RESOURCE_ATTRIBUTES value: service.name={{ .Release.Name }},service.namespace={{ .Release.Namespace }},service.version={{ .Chart.AppVersion }},environment={{ .Values.environment | default "production" }} - - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT - value: {{ include "aztec-network.otelCollectorMetricsEndpoint" . | quote }} - - name: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT - value: {{ include "aztec-network.otelCollectorTracesEndpoint" . | quote }} - - name: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT - value: {{ include "aztec-network.otelCollectorLogsEndpoint" . | quote }} - name: PROVER_REAL_PROOFS value: "{{ .Values.aztec.realProofs }}" - name: PXE_PROVER_ENABLED @@ -208,6 +208,7 @@ data: export INBOX_CONTRACT_ADDRESS={{ .Values.bootNode.contracts.inboxAddress }} export OUTBOX_CONTRACT_ADDRESS={{ .Values.bootNode.contracts.outboxAddress }} export FEE_JUICE_CONTRACT_ADDRESS={{ .Values.bootNode.contracts.feeJuiceAddress }} + export STAKING_ASSET_CONTRACT_ADDRESS={{ .Values.bootNode.contracts.stakingAssetAddress }} export FEE_JUICE_PORTAL_CONTRACT_ADDRESS={{ .Values.bootNode.contracts.feeJuicePortalAddress }} {{- end }} {{if not .Values.network.public }} diff --git a/spartan/aztec-network/templates/deploy-l1-verifier.yaml b/spartan/aztec-network/templates/deploy-l1-verifier.yaml index 8866dd1ca09a..b69dcd58f65a 100644 --- a/spartan/aztec-network/templates/deploy-l1-verifier.yaml +++ b/spartan/aztec-network/templates/deploy-l1-verifier.yaml @@ -5,6 +5,11 @@ metadata: name: {{ include "aztec-network.fullname" . }}-deploy-l1-verifier labels: {{- include "aztec-network.labels" . | nindent 4 }} + annotations: + "helm.sh/hook": post-install + "helm.sh/hook-weight": "0" + "helm.sh/hook-delete-policy": before-hook-creation + spec: template: metadata: @@ -44,6 +49,7 @@ spec: chmod +x /tmp/setup-service-addresses.sh /tmp/setup-service-addresses.sh source /shared/config/service-addresses + cat /shared/config/service-addresses until curl -s -X GET "$BOOT_NODE_HOST/status"; do echo "Waiting for Aztec node $BOOT_NODE_HOST..." @@ -51,19 +57,25 @@ spec: done echo "Boot node is ready!" - export ROLLUP_CONTRACT_ADDRESS=$(curl -X POST -H 'Content-Type: application/json' \ + l1_contracts=$(curl -X POST -H 'Content-Type: application/json' \ -d '{"jsonrpc":"2.0","method":"node_getL1ContractAddresses","params":[],"id":1}' \ - "$BOOT_NODE_HOST" \ - | jq -r '.result.rollupAddress.value') - + "$BOOT_NODE_HOST") + echo "L1 Contracts" + echo $l1_contracts + export ROLLUP_CONTRACT_ADDRESS=$(echo $l1_contracts | jq -r '.result.rollupAddress') + [ -z "$ROLLUP_CONTRACT_ADDRESS" ] && echo "Could not retrieve rollup address!" && exit 1 echo "Rollup contract address: $ROLLUP_CONTRACT_ADDRESS" - node /usr/src/yarn-project/aztec/dest/bin/index.js deploy-l1-verifier --verifier real + MAX_RETRIES=5 + RETRY_DELAY=60 + for attempt in $(seq 1 $MAX_RETRIES); do + node /usr/src/yarn-project/aztec/dest/bin/index.js deploy-l1-verifier --verifier real + echo "Attempt $attempt failed. Retrying in $RETRY_DELAY seconds..." + sleep "$RETRY_DELAY" + done || { echo "All L1 verifier deploy attempts failed." >&2; exit 1; } echo "L1 verifier deployed" env: - name: NODE_NO_WARNINGS value: "1" - - name: DEBUG - value: "aztec:*" - name: LOG_LEVEL value: "debug" - name: L1_CHAIN_ID diff --git a/spartan/aztec-network/templates/prover-agent.yaml b/spartan/aztec-network/templates/prover-agent.yaml index a4517c7a5030..c27adb96eeb9 100644 --- a/spartan/aztec-network/templates/prover-agent.yaml +++ b/spartan/aztec-network/templates/prover-agent.yaml @@ -34,6 +34,7 @@ spec: serviceAccountName: {{ include "aztec-network.fullname" . }}-node {{- if .Values.network.public }} hostNetwork: true + dnsPolicy: ClusterFirstWithHostNet {{- end }} volumes: - name: config @@ -50,14 +51,15 @@ spec: - -c - | source /shared/config/service-addresses + cat /shared/config/service-addresses until curl -s -X POST ${PROVER_BROKER_HOST}/status; do echo "Waiting for broker ${PROVER_BROKER_HOST} ..." sleep 5 done echo "Broker is ready!" {{- if .Values.telemetry.enabled }} - until curl --head --silent {{ include "aztec-network.otelCollectorMetricsEndpoint" . }} > /dev/null; do - echo "Waiting for OpenTelemetry collector..." + until curl --head --silent $OTEL_COLLECTOR_ENDPOINT > /dev/null; do + echo "Waiting for OpenTelemetry collector $OTEL_COLLECTOR_ENDPOINT..." sleep 5 done echo "OpenTelemetry collector is ready!" @@ -85,8 +87,6 @@ spec: value: "{{ .Values.proverAgent.logLevel }}" - name: LOG_JSON value: "1" - - name: DEBUG - value: "{{ .Values.proverAgent.debug }}" - name: PROVER_REAL_PROOFS value: "{{ .Values.aztec.realProofs }}" - name: PROVER_AGENT_COUNT @@ -97,12 +97,6 @@ spec: value: {{ join "," .Values.proverAgent.proofTypes | quote }} - name: OTEL_RESOURCE_ATTRIBUTES value: service.name={{ .Release.Name }},service.namespace={{ .Release.Namespace }},service.version={{ .Chart.AppVersion }},environment={{ .Values.environment | default "production" }} - - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT - value: {{ include "aztec-network.otelCollectorMetricsEndpoint" . | quote }} - - name: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT - value: {{ include "aztec-network.otelCollectorTracesEndpoint" . | quote }} - - name: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT - value: {{ include "aztec-network.otelCollectorLogsEndpoint" . | quote }} resources: {{- toYaml .Values.proverAgent.resources | nindent 12 }} {{- end }} diff --git a/spartan/aztec-network/templates/prover-broker.yaml b/spartan/aztec-network/templates/prover-broker.yaml index 214b6720fcef..5f6a571b167f 100644 --- a/spartan/aztec-network/templates/prover-broker.yaml +++ b/spartan/aztec-network/templates/prover-broker.yaml @@ -20,6 +20,8 @@ spec: serviceAccountName: {{ include "aztec-network.fullname" . }}-node {{- if .Values.network.public }} hostNetwork: true + dnsPolicy: ClusterFirstWithHostNet + {{- include "aztec-network.publicAntiAffinity" . | nindent 6 }} {{- end }} volumes: - name: config @@ -36,9 +38,10 @@ spec: - -c - | source /shared/config/service-addresses + cat /shared/config/service-addresses {{- if .Values.telemetry.enabled }} - until curl --head --silent {{ include "aztec-network.otelCollectorMetricsEndpoint" . }} > /dev/null; do - echo "Waiting for OpenTelemetry collector..." + until curl --head --silent $OTEL_COLLECTOR_ENDPOINT > /dev/null; do + echo "Waiting for OpenTelemetry collector $OTEL_COLLECTOR_ENDPOINT..." sleep 5 done echo "OpenTelemetry collector is ready!" @@ -66,8 +69,6 @@ spec: value: "{{ .Values.proverBroker.logLevel }}" - name: LOG_JSON value: "1" - - name: DEBUG - value: "{{ .Values.proverBroker.debug }}" - name: PROVER_BROKER_POLL_INTERVAL_MS value: "{{ .Values.proverBroker.pollIntervalMs }}" - name: PROVER_BROKER_JOB_TIMEOUT_MS @@ -78,15 +79,11 @@ spec: value: "{{ .Values.proverBroker.dataDirectory }}" - name: OTEL_RESOURCE_ATTRIBUTES value: service.name={{ .Release.Name }},service.namespace={{ .Release.Namespace }},service.version={{ .Chart.AppVersion }},environment={{ .Values.environment | default "production" }} - - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT - value: {{ include "aztec-network.otelCollectorMetricsEndpoint" . | quote }} - - name: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT - value: {{ include "aztec-network.otelCollectorTracesEndpoint" . | quote }} - - name: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT - value: {{ include "aztec-network.otelCollectorLogsEndpoint" . | quote }} resources: {{- toYaml .Values.proverBroker.resources | nindent 12 }} +{{- end }} --- +# Headless service for StatefulSet DNS entries apiVersion: v1 kind: Service metadata: @@ -101,4 +98,3 @@ spec: ports: - port: {{ .Values.proverBroker.service.nodePort }} name: node -{{ end }} diff --git a/spartan/aztec-network/templates/prover-node.yaml b/spartan/aztec-network/templates/prover-node.yaml index bfe9447570cc..44984a2fb2a0 100644 --- a/spartan/aztec-network/templates/prover-node.yaml +++ b/spartan/aztec-network/templates/prover-node.yaml @@ -19,6 +19,7 @@ spec: spec: {{- if .Values.network.public }} hostNetwork: true + dnsPolicy: ClusterFirstWithHostNet {{- include "aztec-network.publicAntiAffinity" . | nindent 6 }} {{- end }} serviceAccountName: {{ include "aztec-network.fullname" . }}-node @@ -35,7 +36,7 @@ spec: until curl -s -X POST -H 'Content-Type: application/json' \ -d '{"jsonrpc":"2.0","method":"web3_clientVersion","params":[],"id":67}' \ ${ETHEREUM_HOST} | grep -q reth; do - echo "Waiting for Ethereum node..." + echo "Waiting for Ethereum node ${ETHEREUM_HOST}..." sleep 5 done echo "Ethereum node is ready!" @@ -51,8 +52,8 @@ spec: fi {{- if .Values.telemetry.enabled }} - until curl --head --silent {{ include "aztec-network.otelCollectorMetricsEndpoint" . }} > /dev/null; do - echo "Waiting for OpenTelemetry collector..." + until curl --head --silent $OTEL_COLLECTOR_ENDPOINT > /dev/null; do + echo "Waiting for OpenTelemetry collector $OTEL_COLLECTOR_ENDPOINT..." sleep 5 done echo "OpenTelemetry collector is ready!" @@ -142,12 +143,6 @@ spec: value: "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" - name: OTEL_RESOURCE_ATTRIBUTES value: service.name={{ .Release.Name }},service.namespace={{ .Release.Namespace }},service.version={{ .Chart.AppVersion }},environment={{ .Values.environment | default "production" }} - - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT - value: {{ include "aztec-network.otelCollectorMetricsEndpoint" . | quote }} - - name: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT - value: {{ include "aztec-network.otelCollectorTracesEndpoint" . | quote }} - - name: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT - value: {{ include "aztec-network.otelCollectorLogsEndpoint" . | quote }} - name: L1_CHAIN_ID value: "{{ .Values.ethereum.chainId }}" - name: P2P_ENABLED diff --git a/spartan/aztec-network/templates/pxe.yaml b/spartan/aztec-network/templates/pxe.yaml index bb15f206c2df..d61df752190a 100644 --- a/spartan/aztec-network/templates/pxe.yaml +++ b/spartan/aztec-network/templates/pxe.yaml @@ -19,6 +19,7 @@ spec: serviceAccountName: {{ include "aztec-network.fullname" . }}-node {{- if .Values.network.public }} hostNetwork: true + dnsPolicy: ClusterFirstWithHostNet {{- end }} volumes: - name: config @@ -37,6 +38,7 @@ spec: - -c - | source /shared/config/service-addresses + cat /shared/config/service-addresses until curl --head --silent ${BOOT_NODE_HOST}/status; do echo "Waiting for boot node..." sleep 5 @@ -68,6 +70,7 @@ spec: - "-c" - | source /shared/config/service-addresses + cat /shared/config/service-addresses {{- if .Values.network.public }} # If the network is public, we need to use the boot node URL export AZTEC_NODE_URL=${BOOT_NODE_HOST} @@ -84,8 +87,6 @@ spec: value: "1" - name: LOG_LEVEL value: "{{ .Values.pxe.logLevel }}" - - name: DEBUG - value: "{{ .Values.pxe.debug }}" - name: PXE_PROVER_ENABLED value: "{{ .Values.aztec.realProofs }}" ports: diff --git a/spartan/aztec-network/templates/rbac.yaml b/spartan/aztec-network/templates/rbac.yaml index a0e8e68cd118..94f143f619e3 100644 --- a/spartan/aztec-network/templates/rbac.yaml +++ b/spartan/aztec-network/templates/rbac.yaml @@ -55,4 +55,4 @@ roleRef: subjects: - kind: ServiceAccount name: {{ include "aztec-network.fullname" . }}-node - namespace: {{ .Release.Namespace }} + namespace: {{ .Release.Namespace }} \ No newline at end of file diff --git a/spartan/aztec-network/templates/reth.yaml b/spartan/aztec-network/templates/reth.yaml index d6230ecf0ad5..8a3e28728bf8 100644 --- a/spartan/aztec-network/templates/reth.yaml +++ b/spartan/aztec-network/templates/reth.yaml @@ -19,6 +19,43 @@ spec: {{- if .Values.network.public }} hostNetwork: true {{- end }} + initContainers: + - name: prepare-genesis + image: node:18-alpine + command: ["/bin/sh", "-c"] + args: + - | + cd /tmp + npm init -y + npm install ethers@6 + cat > derive.js << 'EOF' + const { ethers } = require('ethers'); + const fs = require('fs'); + + async function main() { + const mnemonic = process.env.DEPLOYMENT_MNEMONIC; + const wallet = ethers.Wallet.fromPhrase(mnemonic); + + const genesis = JSON.parse(fs.readFileSync('/genesis-template/genesis.json', 'utf8')); + + genesis.alloc[wallet.address] = { + balance: '0x3635c9adc5dea00000' // 1000 ETH in wei + }; + + fs.writeFileSync('/genesis-output/genesis.json', JSON.stringify(genesis, null, 2)); + } + + main().catch(console.error); + EOF + node derive.js + env: + - name: DEPLOYMENT_MNEMONIC + value: {{ .Values.aztec.l1DeploymentMnemonic }} + volumeMounts: + - name: genesis-template + mountPath: /genesis-template + - name: genesis-output + mountPath: /genesis-output containers: - name: ethereum image: "{{ .Values.images.reth.image }}" @@ -40,7 +77,7 @@ spec: volumeMounts: - name: shared-volume mountPath: /data - - name: genesis + - name: genesis-output mountPath: /genesis resources: {{- toYaml .Values.ethereum.resources | nindent 12 }} @@ -48,9 +85,11 @@ spec: - name: shared-volume persistentVolumeClaim: claimName: {{ include "aztec-network.fullname" . }}-ethereum-pvc - - name: genesis + - name: genesis-template configMap: name: {{ include "aztec-network.fullname" . }}-reth-genesis + - name: genesis-output + emptyDir: {} {{if not .Values.network.public }} --- apiVersion: v1 diff --git a/spartan/aztec-network/templates/setup-l2-contracts.yaml b/spartan/aztec-network/templates/setup-l2-contracts.yaml index 56cf8fc57f2a..c3167e891098 100644 --- a/spartan/aztec-network/templates/setup-l2-contracts.yaml +++ b/spartan/aztec-network/templates/setup-l2-contracts.yaml @@ -5,6 +5,10 @@ metadata: name: {{ include "aztec-network.fullname" . }}-setup-l2-contracts labels: {{- include "aztec-network.labels" . | nindent 4 }} + annotations: + "helm.sh/hook": post-install + "helm.sh/hook-weight": "0" + "helm.sh/hook-delete-policy": before-hook-creation spec: template: metadata: @@ -48,12 +52,13 @@ spec: chmod +x /tmp/setup-service-addresses.sh /tmp/setup-service-addresses.sh source /shared/config/service-addresses + cat /shared/config/service-addresses export AZTEC_NODE_URL=$BOOT_NODE_HOST export PXE_URL=$BOOT_NODE_HOST until curl -s -X POST -H 'content-type: application/json' \ -d '{"jsonrpc":"2.0","method":"pxe_getNodeInfo","params":[],"id":67}' \ $PXE_URL | grep -q '"enr:-'; do - echo "Waiting for PXE service..." + echo "Waiting for PXE service at ${PXE_URL}..." sleep 5 done echo "PXE service is ready!" @@ -61,8 +66,8 @@ spec: node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js setup-protocol-contracts --skipProofWait --l1-chain-id {{ .Values.ethereum.chainId }} echo "L2 contracts initialized" env: - - name: DEBUG - value: "aztec:*" + - name: TELEMETRY + value: "{{ .Values.telemetry.enabled }}" - name: LOG_LEVEL value: "debug" - name: NETWORK_PUBLIC diff --git a/spartan/aztec-network/templates/transaction-bot.yaml b/spartan/aztec-network/templates/transaction-bot.yaml index 762c6bd7c07e..06c6ce7048ba 100644 --- a/spartan/aztec-network/templates/transaction-bot.yaml +++ b/spartan/aztec-network/templates/transaction-bot.yaml @@ -38,6 +38,7 @@ spec: - -c - | source /shared/config/service-addresses + cat /shared/config/service-addresses {{- if .Values.bot.nodeUrl }} export AZTEC_NODE_URL={{ .Values.bot.nodeUrl }} {{- else if .Values.network.public }} @@ -63,6 +64,7 @@ spec: - "-c" - | source /shared/config/service-addresses + cat /shared/config/service-addresses {{- if .Values.bot.nodeUrl }} export AZTEC_NODE_URL={{ .Values.bot.nodeUrl }} {{- else if .Values.network.public }} @@ -79,8 +81,6 @@ spec: value: "1" - name: LOG_LEVEL value: "{{ .Values.bot.logLevel }}" - - name: DEBUG - value: "{{ .Values.bot.debug }}" - name: BOT_PRIVATE_KEY value: "{{ .Values.bot.botPrivateKey }}" - name: BOT_TX_INTERVAL_SECONDS diff --git a/spartan/aztec-network/templates/validator.yaml b/spartan/aztec-network/templates/validator.yaml index 6f8aba191b2f..1faa6823076b 100644 --- a/spartan/aztec-network/templates/validator.yaml +++ b/spartan/aztec-network/templates/validator.yaml @@ -20,6 +20,7 @@ spec: spec: {{- if .Values.network.public }} hostNetwork: true + dnsPolicy: ClusterFirstWithHostNet {{- include "aztec-network.publicAntiAffinity" . | nindent 6 }} {{- end }} serviceAccountName: {{ include "aztec-network.fullname" . }}-node @@ -33,18 +34,19 @@ spec: - -c - | source /shared/config/service-addresses + cat /shared/config/service-addresses # First check ethereum node until curl -s -X POST -H 'Content-Type: application/json' \ -d '{"jsonrpc":"2.0","method":"web3_clientVersion","params":[],"id":67}' \ $ETHEREUM_HOST | grep -q reth; do - echo "Waiting for Ethereum node..." + echo "Waiting for Ethereum node ${ETHEREUM_HOST}..." sleep 5 done echo "Ethereum node is ready!" {{- if .Values.telemetry.enabled }} - until curl --head --silent {{ include "aztec-network.otelCollectorMetricsEndpoint" . }} > /dev/null; do - echo "Waiting for OpenTelemetry collector..." + until curl --head --silent $OTEL_COLLECTOR_ENDPOINT > /dev/null; do + echo "Waiting for OpenTelemetry collector $OTEL_COLLECTOR_ENDPOINT..." sleep 5 done echo "OpenTelemetry collector is ready!" @@ -165,12 +167,6 @@ spec: value: "{{ .Values.ethereum.chainId }}" - name: OTEL_RESOURCE_ATTRIBUTES value: service.name={{ .Release.Name }},service.namespace={{ .Release.Namespace }},service.version={{ .Chart.AppVersion }},environment={{ .Values.environment | default "production" }} - - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT - value: {{ include "aztec-network.otelCollectorMetricsEndpoint" . | quote }} - - name: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT - value: {{ include "aztec-network.otelCollectorTracesEndpoint" . | quote }} - - name: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT - value: {{ include "aztec-network.otelCollectorLogsEndpoint" . | quote }} - name: ETHEREUM_SLOT_DURATION value: "{{ .Values.ethereum.blockTime }}" - name: AZTEC_SLOT_DURATION diff --git a/spartan/aztec-network/values.yaml b/spartan/aztec-network/values.yaml index fbc957e802f3..a9596657a6f3 100644 --- a/spartan/aztec-network/values.yaml +++ b/spartan/aztec-network/values.yaml @@ -34,6 +34,7 @@ aztec: epochDuration: 16 # how many L2 slots in an epoch epochProofClaimWindow: 13 # in L2 slots realProofs: false + l1DeploymentMnemonic: "test test test test test test test test test test test junk" # the mnemonic used when deploying contracts bootNode: peerIdPrivateKey: "" @@ -44,7 +45,7 @@ bootNode: p2pUdpPort: 40400 nodePort: 8080 logLevel: "debug" - debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" + debug: "" coinbaseAddress: "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" sequencer: maxSecondsBetweenBlocks: 0 @@ -87,7 +88,7 @@ validator: p2pUdpPort: 40400 nodePort: 8080 logLevel: "debug" - debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" + debug: "" sequencer: maxSecondsBetweenBlocks: 0 minTxsPerBlock: 1 @@ -117,7 +118,7 @@ proverNode: p2pUdpPort: 40400 nodePort: 8080 logLevel: "debug" - debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" + debug: "" proverAgent: count: 0 pollIntervalMs: 1000 @@ -136,7 +137,6 @@ proverNode: pxe: logLevel: "debug" - debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" replicas: 1 service: nodePort: 8081 @@ -155,7 +155,6 @@ bot: enabled: true nodeUrl: "" logLevel: "debug" - debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:l2_block_stream,-aztec:world-state:database" replicas: 1 botPrivateKey: "0xcafe" txIntervalSeconds: 24 @@ -216,7 +215,6 @@ proverAgent: gke: spotEnabled: false logLevel: "debug" - debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" bb: hardwareConcurrency: "" nodeSelector: {} @@ -232,7 +230,6 @@ proverBroker: jobMaxRetries: 3 dataDirectory: "" logLevel: "debug" - debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" nodeSelector: {} resources: {} diff --git a/spartan/aztec-network/values/1-validator-with-proving-and-metrics.yaml b/spartan/aztec-network/values/1-validator-with-proving-and-metrics.yaml index 95b7f0ac6380..43814e98963e 100644 --- a/spartan/aztec-network/values/1-validator-with-proving-and-metrics.yaml +++ b/spartan/aztec-network/values/1-validator-with-proving-and-metrics.yaml @@ -37,5 +37,4 @@ jobs: enable: true telemetry: - enabled: true - otelCollectorEndpoint: http://metrics-opentelemetry-collector.metrics:4318 + enabled: true \ No newline at end of file diff --git a/spartan/aztec-network/values/16-validators-with-metrics.yaml b/spartan/aztec-network/values/16-validators-with-metrics.yaml index 8bc8f2c115c1..454ec8c2839d 100644 --- a/spartan/aztec-network/values/16-validators-with-metrics.yaml +++ b/spartan/aztec-network/values/16-validators-with-metrics.yaml @@ -6,7 +6,6 @@ # (then `./forward.sh` if you want to see it) telemetry: enabled: true - otelCollectorEndpoint: http://metrics-opentelemetry-collector.metrics:4318 validator: replicas: 16 diff --git a/spartan/aztec-network/values/3-validators-with-metrics.yaml b/spartan/aztec-network/values/3-validators-with-metrics.yaml index b20b34b51945..c3a57e252289 100644 --- a/spartan/aztec-network/values/3-validators-with-metrics.yaml +++ b/spartan/aztec-network/values/3-validators-with-metrics.yaml @@ -6,7 +6,6 @@ # (then `./forward.sh` if you want to see it) telemetry: enabled: true - otelCollectorEndpoint: http://metrics-opentelemetry-collector.metrics:4318 validator: replicas: 3 diff --git a/spartan/aztec-network/values/4-validators-with-metrics.yaml b/spartan/aztec-network/values/4-validators-with-metrics.yaml index 47387cd89c15..6f59aa627084 100644 --- a/spartan/aztec-network/values/4-validators-with-metrics.yaml +++ b/spartan/aztec-network/values/4-validators-with-metrics.yaml @@ -6,7 +6,6 @@ # (then `./forward.sh` if you want to see it) telemetry: enabled: true - otelCollectorEndpoint: http://metrics-opentelemetry-collector.metrics:4318 validator: replicas: 4 diff --git a/spartan/aztec-network/values/48-validators.yaml b/spartan/aztec-network/values/48-validators.yaml index 31d480956813..4659655e4d25 100644 --- a/spartan/aztec-network/values/48-validators.yaml +++ b/spartan/aztec-network/values/48-validators.yaml @@ -6,7 +6,6 @@ # (then `./forward.sh` if you want to see it) telemetry: enabled: true - otelCollectorEndpoint: http://metrics-opentelemetry-collector.metrics:4318 validator: replicas: 48 diff --git a/spartan/aztec-network/values/exp-1.yaml b/spartan/aztec-network/values/exp-1.yaml new file mode 100644 index 000000000000..503102aa5934 --- /dev/null +++ b/spartan/aztec-network/values/exp-1.yaml @@ -0,0 +1,43 @@ +network: + public: false + +aztec: + slotDuration: 36 + epochDuration: 32 + realProofs: false + +telemetry: + enabled: true + +images: + aztec: + pullPolicy: Always + +validator: + replicas: 1 + validatorKeys: + - 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 + validatorAddresses: + - 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266 + resources: + requests: + memory: "512Mi" + validator: + disabled: false + +bootNode: + peerIdPrivateKey: 080212200ba8451c6d62b03c4441f0a466c0bce7a3a595f2cf50a055ded3305c77aa3af0 + validator: + disabled: true + +proverAgent: + replicas: 4 + +bot: + followChain: "NONE" + enabled: true + txIntervalSeconds: 1 + +jobs: + deployL1Verifier: + enable: false diff --git a/spartan/aztec-network/values/gcp-proving-test.yaml b/spartan/aztec-network/values/gcp-proving-test.yaml index 765f1a2ade38..546ffc61f4c6 100644 --- a/spartan/aztec-network/values/gcp-proving-test.yaml +++ b/spartan/aztec-network/values/gcp-proving-test.yaml @@ -1,6 +1,5 @@ telemetry: enabled: true - otelCollectorEndpoint: http://metrics-opentelemetry-collector.metrics:4318 validator: replicas: 1 diff --git a/spartan/aztec-network/values/multicloud-demo.yaml b/spartan/aztec-network/values/multicloud-demo.yaml index 2c4ea379e6e5..f408059d69ee 100644 --- a/spartan/aztec-network/values/multicloud-demo.yaml +++ b/spartan/aztec-network/values/multicloud-demo.yaml @@ -2,7 +2,6 @@ telemetry: enabled: false - otelCollectorEndpoint: http://metrics-opentelemetry-collector.metrics:4318 validator: replicas: 1 diff --git a/spartan/aztec-network/values/prover-node-with-agents.yaml b/spartan/aztec-network/values/prover-node-with-agents.yaml index a1b981a5fc91..2f1e14543251 100644 --- a/spartan/aztec-network/values/prover-node-with-agents.yaml +++ b/spartan/aztec-network/values/prover-node-with-agents.yaml @@ -6,13 +6,12 @@ # (then `./forward.sh` if you want to see it) telemetry: enabled: true - otelCollectorEndpoint: http://metrics-opentelemetry-collector.metrics:4318 validator: external: true bootNode: - debug: "aztec:*,-aztec:avm_simulator:*,-aztec:world-state:database,discv5:*,-JsonProxy:*" + debug: "discv5:*" validator: disabled: true diff --git a/spartan/aztec-network/values/release.yaml b/spartan/aztec-network/values/rc-1.yaml similarity index 97% rename from spartan/aztec-network/values/release.yaml rename to spartan/aztec-network/values/rc-1.yaml index 2f18e02fc513..625c58fd2aa3 100644 --- a/spartan/aztec-network/values/release.yaml +++ b/spartan/aztec-network/values/rc-1.yaml @@ -12,7 +12,7 @@ images: telemetry: enabled: true - otelCollectorEndpoint: http://34.150.160.154:4318 + otelCollectorEndpoint: http://35.197.100.168:4318 validator: replicas: 48 @@ -127,18 +127,15 @@ bootNode: disabled: true proverAgent: - replicas: 4 + replicas: 8 bb: - hardwareConcurrency: 16 + hardwareConcurrency: 31 gke: spotEnabled: true resources: requests: - memory: "64Gi" - cpu: "16" - limits: - memory: "96Gi" - cpu: "16" + memory: "116Gi" + cpu: "31" bot: followChain: "PENDING" diff --git a/spartan/aztec-network/values/rc-2.yaml b/spartan/aztec-network/values/rc-2.yaml new file mode 100644 index 000000000000..e059fb4ce9af --- /dev/null +++ b/spartan/aztec-network/values/rc-2.yaml @@ -0,0 +1,158 @@ +network: + public: true + +aztec: + slotDuration: 36 + epochDuration: 32 + realProofs: true + +images: + aztec: + pullPolicy: Always + +telemetry: + enabled: true + otelCollectorEndpoint: http://35.197.100.168:4318 + +validator: + replicas: 48 + validatorKeys: + - 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 + - 0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d + - 0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a + - 0x7c852118294e51e653712a81e05800f419141751be58f605c371e15141b007a6 + - 0x47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a + - 0x8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba + - 0x92db14e403b83dfe3df233f83dfa3a0d7096f21ca9b0d6d6b8d88b2b4ec1564e + - 0x4bbbf85ce3377467afe5d46f804f221813b2bb87f24d81f60f1fcdbf7cbf4356 + - 0xdbda1821b80551c9d65939329250298aa3472ba22feea921c0cf5d620ea67b97 + - 0x2a871d0798f97d79848a013d4936a73bf4cc922c825d33c1cf7073dff6d409c6 + - 0xf214f2b2cd398c806f84e317254e0f0b801d0643303237d97a22a48e01628897 + - 0x701b615bbdfb9de65240bc28bd21bbc0d996645a3dd57e7b12bc2bdf6f192c82 + - 0xa267530f49f8280200edf313ee7af6b827f2a8bce2897751d06a843f644967b1 + - 0x47c99abed3324a2707c28affff1267e45918ec8c3f20b8aa892e8b065d2942dd + - 0xc526ee95bf44d8fc405a158bb884d9d1238d99f0612e9f33d006bb0789009aaa + - 0x8166f546bab6da521a8369cab06c5d2b9e46670292d85c875ee9ec20e84ffb61 + - 0xea6c44ac03bff858b476bba40716402b03e41b8e97e276d1baec7c37d42484a0 + - 0x689af8efa8c651a91ad287602527f3af2fe9f6501a7ac4b061667b5a93e037fd + - 0xde9be858da4a475276426320d5e9262ecfc3ba460bfac56360bfa6c4c28b4ee0 + - 0xdf57089febbacf7ba0bc227dafbffa9fc08a93fdc68e1e42411a14efcf23656e + - 0xeaa861a9a01391ed3d587d8a5a84ca56ee277629a8b02c22093a419bf240e65d + - 0xc511b2aa70776d4ff1d376e8537903dae36896132c90b91d52c1dfbae267cd8b + - 0x224b7eb7449992aac96d631d9677f7bf5888245eef6d6eeda31e62d2f29a83e4 + - 0x4624e0802698b9769f5bdb260a3777fbd4941ad2901f5966b854f953497eec1b + - 0x375ad145df13ed97f8ca8e27bb21ebf2a3819e9e0a06509a812db377e533def7 + - 0x18743e59419b01d1d846d97ea070b5a3368a3e7f6f0242cf497e1baac6972427 + - 0xe383b226df7c8282489889170b0f68f66af6459261f4833a781acd0804fafe7a + - 0xf3a6b71b94f5cd909fb2dbb287da47badaa6d8bcdc45d595e2884835d8749001 + - 0x4e249d317253b9641e477aba8dd5d8f1f7cf5250a5acadd1229693e262720a19 + - 0x233c86e887ac435d7f7dc64979d7758d69320906a0d340d2b6518b0fd20aa998 + - 0x85a74ca11529e215137ccffd9c95b2c72c5fb0295c973eb21032e823329b3d2d + - 0xac8698a440d33b866b6ffe8775621ce1a4e6ebd04ab7980deb97b3d997fc64fb + - 0xf076539fbce50f0513c488f32bf81524d30ca7a29f400d68378cc5b1b17bc8f2 + - 0x5544b8b2010dbdbef382d254802d856629156aba578f453a76af01b81a80104e + - 0x47003709a0a9a4431899d4e014c1fd01c5aad19e873172538a02370a119bae11 + - 0x9644b39377553a920edc79a275f45fa5399cbcf030972f771d0bca8097f9aad3 + - 0xcaa7b4a2d30d1d565716199f068f69ba5df586cf32ce396744858924fdf827f0 + - 0xfc5a028670e1b6381ea876dd444d3faaee96cffae6db8d93ca6141130259247c + - 0x5b92c5fe82d4fabee0bc6d95b4b8a3f9680a0ed7801f631035528f32c9eb2ad5 + - 0xb68ac4aa2137dd31fd0732436d8e59e959bb62b4db2e6107b15f594caf0f405f + - 0xc95eaed402c8bd203ba04d81b35509f17d0719e3f71f40061a2ec2889bc4caa7 + - 0x55afe0ab59c1f7bbd00d5531ddb834c3c0d289a4ff8f318e498cb3f004db0b53 + - 0xc3f9b30f83d660231203f8395762fa4257fa7db32039f739630f87b8836552cc + - 0x3db34a7bcc6424e7eadb8e290ce6b3e1423c6e3ef482dd890a812cd3c12bbede + - 0xae2daaa1ce8a70e510243a77187d2bc8da63f0186074e4a4e3a7bfae7fa0d639 + - 0x5ea5c783b615eb12be1afd2bdd9d96fae56dda0efe894da77286501fd56bac64 + - 0xf702e0ff916a5a76aaf953de7583d128c013e7f13ecee5d701b49917361c5e90 + - 0x7ec49efc632757533404c2139a55b4d60d565105ca930a58709a1c52d86cf5d3 + validatorAddresses: + - 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266 + - 0x70997970C51812dc3A010C7d01b50e0d17dc79C8 + - 0x3C44CdDdB6a900fa2b585dd299e03d12FA4293BC + - 0x90F79bf6EB2c4f870365E785982E1f101E93b906 + - 0x15d34AAf54267DB7D7c367839AAf71A00a2C6A65 + - 0x9965507D1a55bcC2695C58ba16FB37d819B0A4dc + - 0x976EA74026E726554dB657fA54763abd0C3a0aa9 + - 0x14dC79964da2C08b23698B3D3cc7Ca32193d9955 + - 0x23618e81E3f5cdF7f54C3d65f7FBc0aBf5B21E8f + - 0xa0Ee7A142d267C1f36714E4a8F75612F20a79720 + - 0xBcd4042DE499D14e55001CcbB24a551F3b954096 + - 0x71bE63f3384f5fb98995898A86B02Fb2426c5788 + - 0xFABB0ac9d68B0B445fB7357272Ff202C5651694a + - 0x1CBd3b2770909D4e10f157cABC84C7264073C9Ec + - 0xdF3e18d64BC6A983f673Ab319CCaE4f1a57C7097 + - 0xcd3B766CCDd6AE721141F452C550Ca635964ce71 + - 0x2546BcD3c84621e976D8185a91A922aE77ECEc30 + - 0xbDA5747bFD65F08deb54cb465eB87D40e51B197E + - 0xdD2FD4581271e230360230F9337D5c0430Bf44C0 + - 0x8626f6940E2eb28930eFb4CeF49B2d1F2C9C1199 + - 0x09DB0a93B389bEF724429898f539AEB7ac2Dd55f + - 0x02484cb50AAC86Eae85610D6f4Bf026f30f6627D + - 0x08135Da0A343E492FA2d4282F2AE34c6c5CC1BbE + - 0x5E661B79FE2D3F6cE70F5AAC07d8Cd9abb2743F1 + - 0x61097BA76cD906d2ba4FD106E757f7Eb455fc295 + - 0xDf37F81dAAD2b0327A0A50003740e1C935C70913 + - 0x553BC17A05702530097c3677091C5BB47a3a7931 + - 0x87BdCE72c06C21cd96219BD8521bDF1F42C78b5e + - 0x40Fc963A729c542424cD800349a7E4Ecc4896624 + - 0x9DCCe783B6464611f38631e6C851bf441907c710 + - 0x1BcB8e569EedAb4668e55145Cfeaf190902d3CF2 + - 0x8263Fce86B1b78F95Ab4dae11907d8AF88f841e7 + - 0xcF2d5b3cBb4D7bF04e3F7bFa8e27081B52191f91 + - 0x86c53Eb85D0B7548fea5C4B4F82b4205C8f6Ac18 + - 0x1aac82773CB722166D7dA0d5b0FA35B0307dD99D + - 0x2f4f06d218E426344CFE1A83D53dAd806994D325 + - 0x1003ff39d25F2Ab16dBCc18EcE05a9B6154f65F4 + - 0x9eAF5590f2c84912A08de97FA28d0529361Deb9E + - 0x11e8F3eA3C6FcF12EcfF2722d75CEFC539c51a1C + - 0x7D86687F980A56b832e9378952B738b614A99dc6 + - 0x9eF6c02FB2ECc446146E05F1fF687a788a8BF76d + - 0x08A2DE6F3528319123b25935C92888B16db8913E + - 0xe141C82D99D85098e03E1a1cC1CdE676556fDdE0 + - 0x4b23D303D9e3719D6CDf8d172Ea030F80509ea15 + - 0xC004e69C5C04A223463Ff32042dd36DabF63A25a + - 0x5eb15C0992734B5e77c888D713b4FC67b3D679A2 + - 0x7Ebb637fd68c523613bE51aad27C35C4DB199B9c + - 0x3c3E2E178C69D4baD964568415a0f0c84fd6320A + + resources: + requests: + memory: "512Mi" + validator: + disabled: false + +bootNode: + peerIdPrivateKey: 080212200ba8451c6d62b03c4441f0a466c0bce7a3a595f2cf50a055ded3305c77aa3af0 + validator: + disabled: true + +proverAgent: + replicas: 64 + bb: + hardwareConcurrency: 31 + gke: + spotEnabled: true + resources: + requests: + memory: "116Gi" + cpu: "31" + +bot: + replicas: 4 + enabled: true + txIntervalSeconds: 1 + botPrivateKey: "" + privateTransfersPerTx: 1 + publicTransfersPerTx: 1 + followChain: "PENDING" + bb: + hardwareConcurrency: 7 + resources: + requests: + memory: "8Gi" + cpu: "7" + ephemeral-storage: "8Gi" + +jobs: + deployL1Verifier: + enable: true diff --git a/spartan/aztec-network/values/release-devnet.yaml b/spartan/aztec-network/values/release-devnet.yaml new file mode 100644 index 000000000000..485e6462aeb1 --- /dev/null +++ b/spartan/aztec-network/values/release-devnet.yaml @@ -0,0 +1,50 @@ +########## +# BEWARE # +########## +# You need to deploy the metrics helm chart before using this values file. +# head to spartan/metrics and run `./install.sh` +# (then `./forward.sh` if you want to see it) +telemetry: + enabled: true + +validator: + replicas: 1 + validatorKeys: + - 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 + validatorAddresses: + - 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266 + validator: + disabled: false + +bootNode: + validator: + disabled: true + +# use small provers to produce fake proofs +proverAgent: + replicas: 1 + resources: + requests: + memory: "4Gi" + cpu: "1" + +bot: + followChain: "PENDING" + enabled: true + txIntervalSeconds: 200 + +network: + public: true + +images: + aztec: + pullPolicy: Always + +aztec: + slotDuration: 36 + epochDuration: 32 + realProofs: false # devnet does not use real proofs + +jobs: + deployL1Verifier: + enable: false diff --git a/spartan/releases/README.md b/spartan/releases/README.md index 527762ae1120..56801f2f314a 100644 --- a/spartan/releases/README.md +++ b/spartan/releases/README.md @@ -19,7 +19,7 @@ To configure a new node, create a new directory and run the install script: ```bash mkdir val1 && cd val1 -curl -L https://raw.githubusercontent.com/AztecProtocol/aztec-packages/refs/heads/master/spartan/releases/rough-rhino/create-spartan.sh | bash +curl -L https://raw.githubusercontent.com/AztecProtocol/aztec-packages/refs/heads/master/spartan/releases/create-spartan.sh | bash ``` This will install `aztec-spartan.sh` in the current directory. You can now run it: @@ -28,9 +28,13 @@ This will install `aztec-spartan.sh` in the current directory. You can now run i ./aztec-spartan.sh config ``` -If you don't have Docker installed, the script will do it for you. It will then prompt for any required environment variables and output a `docker-compose.yml` file. +If you don't have Docker installed, the script will do it for you. It will then prompt for any required environment variables and output both a `docker-compose.yml` and an `.env` file. -You can run the command without any command to see all available options, and pass them as flags, i.e. `npx aztec-spartan config -p 8080 -p2p 40400 -n nameme`. +You will also be prompted to choose whether to use a [named volume](https://docs.docker.com/engine/storage/volumes/) (default) or if you want to use a local directory to store the node's data. + +Run `./aztec-spartan.sh` without any command to see all available options, and pass them as flags, i.e. `npx aztec-spartan config -p 8080 -p2p 40400`. + +If you want to use a different key for p2p peer id, pass it with `-pk `. ## Running diff --git a/spartan/releases/rough-rhino/Earthfile b/spartan/releases/rough-rhino/Earthfile index 53e1f6365a78..a81d24533021 100644 --- a/spartan/releases/rough-rhino/Earthfile +++ b/spartan/releases/rough-rhino/Earthfile @@ -43,14 +43,13 @@ test-install: -p 8080 \ -p2p 40400 \ -ip 1.2.3.4 \ - -k 0x00 \ - -n test-validator + -k 0x00 # Verify docker-compose.yml was created and contains correct values - RUN test -f docker-compose.yml && \ - grep -q "name: test-validator" docker-compose.yml && \ - grep -q "P2P_UDP_ANNOUNCE_ADDR=1.2.3.4:40400" docker-compose.yml && \ - grep -q "AZTEC_PORT=8080" docker-compose.yml && \ - grep -q "VALIDATOR_PRIVATE_KEY=0x00" docker-compose.yml && \ + RUN test -f .env && \ + test -f docker-compose.yml && \ + grep -q "P2P_UDP_ANNOUNCE_ADDR=1.2.3.4:40400" .env && \ + grep -q "AZTEC_PORT=8080" .env && \ + grep -q "VALIDATOR_PRIVATE_KEY=0x00" .env && \ echo "✅ Config test passed" || \ (echo "❌ Config test failed" && exit 1) @@ -70,12 +69,11 @@ test-docker-check: test-start-stop: FROM +test-setup # First install with test configuration - RUN echo -e "\n\n" | ./aztec-spartan.sh config \ + RUN echo -e "\n" | ./aztec-spartan.sh config \ -p 8080 \ -p2p 40400 \ -ip 1.2.3.4 \ - -k 0x00 \ - -n test-validator + -k 0x00 # Test start command RUN ./aztec-spartan.sh start 2>&1 | grep -q "Starting containers" && \ echo "✅ Start command test passed" || \ @@ -91,6 +89,37 @@ test-update: echo "✅ Update command test passed" || \ (echo "❌ Update command test failed" && exit 1) +test-data-dir: + FROM +test-setup + # Test installation with data directory argument + RUN echo -e "\n\n" | ./aztec-spartan.sh config \ + -p 8080 \ + -p2p 40400 \ + -ip 1.2.3.4 \ + -k 0x00 \ + -d ./aztec-data + # Verify docker-compose.yml uses bind mount instead of named volume + RUN grep -q "volumes:" docker-compose.yml && \ + grep -q "./aztec-data:/var/lib/aztec" docker-compose.yml && \ + ! grep -q "volumes:\n aztec_data:" docker-compose.yml && \ + echo "✅ Data directory test passed" || \ + (echo "❌ Data directory test failed" && exit 1) + +test-p2p-key: + FROM +test-setup + # Test installation with P2P private key argument + RUN echo -e "\n\n" | ./aztec-spartan.sh config \ + -p 8080 \ + -p2p 40400 \ + -ip 1.2.3.4 \ + -k 0x00 \ + -pk 00000 + # Verify the P2P private key was set in the .env file + RUN test -f .env && \ + grep -q "PEER_ID_PRIVATE_KEY=00000" .env && \ + echo "✅ P2P private key test passed" || \ + (echo "❌ P2P private key test failed" && exit 1) + test-all: BUILD +test-help BUILD +test-no-config @@ -98,4 +127,6 @@ test-all: BUILD +test-docker-check BUILD +test-start-stop BUILD +test-update + BUILD +test-data-dir + BUILD +test-p2p-key diff --git a/spartan/releases/rough-rhino/aztec-spartan.sh b/spartan/releases/rough-rhino/aztec-spartan.sh index 5198a7bf78c8..cb1823507a74 100755 --- a/spartan/releases/rough-rhino/aztec-spartan.sh +++ b/spartan/releases/rough-rhino/aztec-spartan.sh @@ -14,7 +14,7 @@ DEFAULT_PORT="8080" DEFAULT_KEY="0x0000000000000000000000000000000000000000000000000000000000000001" # Try to get default IP from ipify API, otherwise leave empty to require user input DEFAULT_IP=$(curl -s --connect-timeout 5 https://api.ipify.org?format=json | grep -o '"ip":"[^"]*' | cut -d'"' -f4 || echo "") -DEFAULT_NAME="validator-1" +DEFAULT_BIND_MOUNT_DIR="$HOME/aztec-data" # Parse command line arguments parse_args() { @@ -36,8 +36,12 @@ parse_args() { CLI_KEY="$2" shift 2 ;; - -n|--name) - CLI_NAME="$2" + -d|--data-dir) + BIND_MOUNT_DIR="$2" + shift 2 + ;; + -pk|--p2p-id-private-key) + PEER_ID_PRIVATE_KEY="$2" shift 2 ;; *) @@ -110,14 +114,6 @@ configure_environment() { echo -e "${BLUE}Configuring environment...${NC}" - # Use CLI arguments if provided, otherwise use defaults or prompt - if [ -n "$CLI_NAME" ]; then - NAME="$CLI_NAME" - else - read -p "Validator Name [$DEFAULT_NAME]: " NAME - NAME=${NAME:-$DEFAULT_NAME} - fi - if [ -n "$CLI_P2P_PORT" ]; then P2P_PORT="$CLI_P2P_PORT" else @@ -163,50 +159,89 @@ configure_environment() { fi fi + if [ -n "$BIND_MOUNT_DIR" ]; then + BIND_MOUNT_DIR="$BIND_MOUNT_DIR" + else + read -p "Use docker volume for data directory? [Y/n] " -n 1 -r + echo + if [[ $REPLY =~ ^[Nn]$ ]]; then + read -p "Relative path for data directory [${DEFAULT_BIND_MOUNT_DIR}]: " BIND_MOUNT_DIR + BIND_MOUNT_DIR=${BIND_MOUNT_DIR:-$DEFAULT_BIND_MOUNT_DIR} + fi + fi + + + # Generate .env file + cat > .env << EOF +P2P_UDP_ANNOUNCE_ADDR=${IP}:${P2P_PORT} +P2P_TCP_ANNOUNCE_ADDR=${IP}:${P2P_PORT} +COINBASE=0xbaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +VALIDATOR_DISABLED=false +VALIDATOR_PRIVATE_KEY=${KEY} +SEQ_PUBLISHER_PRIVATE_KEY=${KEY} +L1_PRIVATE_KEY=${KEY} +DEBUG=aztec:*,-aztec:avm_simulator*,-aztec:circuits:artifact_hash,-aztec:libp2p_service,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream* +LOG_LEVEL=debug +AZTEC_PORT=${PORT} +P2P_ENABLED=true +L1_CHAIN_ID=1337 +PROVER_REAL_PROOFS=true +PXE_PROVER_ENABLED=true +ETHEREUM_SLOT_DURATION=12sec +AZTEC_SLOT_DURATION=36 +AZTEC_EPOCH_DURATION=32 +AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS=13 +ETHEREUM_HOST=http://34.48.76.131:8545 +BOOTSTRAP_NODES=enr:-Jq4QO_3szmgtG2cbEdnFDIhpGAQkc1HwfNy4-M6sG9QmQbPTmp9PMOHR3xslfR23hORiU-GpA7uM9uXw49lFcnuuvYGjWF6dGVjX25ldHdvcmsBgmlkgnY0gmlwhCIwTIOJc2VjcDI1NmsxoQKQTN17XKCwjYSSwmTc-6YzCMhd3v6Ofl8TS-WunX6LCoN0Y3CCndCDdWRwgp3Q +REGISTRY_CONTRACT_ADDRESS=0x5fbdb2315678afecb367f032d93f642f64180aa3 +GOVERNANCE_PROPOSER_CONTRACT_ADDRESS=0x9fe46736679d2d9a65f0992f2272de9f3c7fa6e0 +FEE_JUICE_CONTRACT_ADDRESS=0xe7f1725e7734ce288f8367e1bb143e90bb3f0512 +ROLLUP_CONTRACT_ADDRESS=0x2279b7a0a67db372996a5fab50d91eaa73d2ebe6 +REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=0x5fc8d32690cc91d4c39d9d3abcbd16989f875707 +GOVERNANCE_CONTRACT_ADDRESS=0xcf7ed3acca5a467e9e704c703e8d87f634fb0fc9 +COIN_ISSUER_CONTRACT_ADDRESS=0xdc64a140aa3e981100a9beca4e685f962f0cf6c9 +FEE_JUICE_PORTAL_CONTRACT_ADDRESS=0x0165878a594ca255338adfa4d48449f69242eb8f +INBOX_CONTRACT_ADDRESS=0xed179b78d5781f93eb169730d8ad1be7313123f4 +OUTBOX_CONTRACT_ADDRESS=0x1016b5aaa3270a65c315c664ecb238b6db270b64 +P2P_UDP_LISTEN_ADDR=0.0.0.0:${P2P_PORT} +P2P_TCP_LISTEN_ADDR=0.0.0.0:${P2P_PORT} +DATA_DIRECTORY=/var/lib/aztec +PEER_ID_PRIVATE_KEY=${PEER_ID_PRIVATE_KEY} +EOF + # Generate docker-compose.yml cat > docker-compose.yml << EOF -name: ${NAME} services: validator: network_mode: host restart: unless-stopped - environment: - - P2P_UDP_ANNOUNCE_ADDR=${IP}:${P2P_PORT} - - P2P_TCP_ANNOUNCE_ADDR=${IP}:${P2P_PORT} - - COINBASE=0xbaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa - - VALIDATOR_DISABLED=false - - VALIDATOR_PRIVATE_KEY=${KEY} - - SEQ_PUBLISHER_PRIVATE_KEY=${KEY} - - L1_PRIVATE_KEY=${KEY} - - DEBUG=aztec:*,-aztec:avm_simulator*,-aztec:circuits:artifact_hash,-aztec:libp2p_service,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream* - - LOG_LEVEL=debug - - AZTEC_PORT=${PORT} - - P2P_ENABLED=true - - L1_CHAIN_ID=1337 - - PROVER_REAL_PROOFS=true - - PXE_PROVER_ENABLED=true - - ETHEREUM_SLOT_DURATION=12sec - - AZTEC_SLOT_DURATION=36 - - AZTEC_EPOCH_DURATION=32 - - AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS=13 - - ETHEREUM_HOST=http://34.48.76.131:8545 - - BOOTSTRAP_NODES=enr:-Jq4QO_3szmgtG2cbEdnFDIhpGAQkc1HwfNy4-M6sG9QmQbPTmp9PMOHR3xslfR23hORiU-GpA7uM9uXw49lFcnuuvYGjWF6dGVjX25ldHdvcmsBgmlkgnY0gmlwhCIwTIOJc2VjcDI1NmsxoQKQTN17XKCwjYSSwmTc-6YzCMhd3v6Ofl8TS-WunX6LCoN0Y3CCndCDdWRwgp3Q - - REGISTRY_CONTRACT_ADDRESS=0x5fbdb2315678afecb367f032d93f642f64180aa3 - - GOVERNANCE_PROPOSER_CONTRACT_ADDRESS=0x9fe46736679d2d9a65f0992f2272de9f3c7fa6e0 - - FEE_JUICE_CONTRACT_ADDRESS=0xe7f1725e7734ce288f8367e1bb143e90bb3f0512 - - ROLLUP_CONTRACT_ADDRESS=0x2279b7a0a67db372996a5fab50d91eaa73d2ebe6 - - REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=0x5fc8d32690cc91d4c39d9d3abcbd16989f875707 - - GOVERNANCE_CONTRACT_ADDRESS=0xcf7ed3acca5a467e9e704c703e8d87f634fb0fc9 - - COIN_ISSUER_CONTRACT_ADDRESS=0xdc64a140aa3e981100a9beca4e685f962f0cf6c9 - - FEE_JUICE_PORTAL_CONTRACT_ADDRESS=0x0165878a594ca255338adfa4d48449f69242eb8f - - INBOX_CONTRACT_ADDRESS=0xed179b78d5781f93eb169730d8ad1be7313123f4 - - OUTBOX_CONTRACT_ADDRESS=0x1016b5aaa3270a65c315c664ecb238b6db270b64 - - P2P_UDP_LISTEN_ADDR=0.0.0.0:${P2P_PORT} - - P2P_TCP_LISTEN_ADDR=0.0.0.0:${P2P_PORT} + env_file: .env image: aztecprotocol/aztec:698cd3d62680629a3f1bfc0f82604534cedbccf3-${ARCH} - command: start --node --archiver --sequencer + entrypoint: > + sh -c ' + + test -z "\$PEER_ID_PRIVATE_KEY" -a ! -f /var/lib/aztec/p2p-private-key && node /usr/src/yarn-project/aztec/dest/bin/index.js generate-p2p-private-key | head -1 | cut -d" " -f 3 | tee /var/lib/aztec/p2p-private-key || echo "Re-using existing P2P private key" + test -z "\$PEER_ID_PRIVATE_KEY" && export PEER_ID_PRIVATE_KEY=\$(cat /var/lib/aztec/p2p-private-key) + + node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js start --node --archiver --sequencer' EOF + # Add volume configuration based on user choice + if [ -n "$BIND_MOUNT_DIR" ]; then + cat >> docker-compose.yml << EOF + volumes: + - ${BIND_MOUNT_DIR}:/var/lib/aztec +EOF + else + cat >> docker-compose.yml << EOF + volumes: + - aztec_data:/var/lib/aztec + +volumes: + aztec_data: +EOF + fi + echo -e "${GREEN}Configuration complete! Use './aztec-spartan.sh start' to launch your node.${NC}" } @@ -283,3 +318,4 @@ case "$1" in exit 1 ;; esac + diff --git a/spartan/scripts/deploy_spartan.sh b/spartan/scripts/deploy_spartan.sh index 96a8ef2c68d2..16bb8c76628d 100755 --- a/spartan/scripts/deploy_spartan.sh +++ b/spartan/scripts/deploy_spartan.sh @@ -8,7 +8,7 @@ NAMESPACE=${3:-spartan} PROD=${4:-true} PROD_ARGS="" if [ "$PROD" = "true" ] ; then - PROD_ARGS="--set network.public=true --set telemetry.enabled=true --set telemetry.otelCollectorEndpoint=http://metrics-opentelemetry-collector.metrics:4318" + PROD_ARGS="--set network.public=true" fi SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" @@ -69,14 +69,12 @@ function upgrade() { if ! upgrade | tee "$SCRIPT_DIR/logs/$NAMESPACE-helm.log" ; then if grep 'cannot patch "'$NAMESPACE'-aztec-network-setup-l2-contracts"' "$SCRIPT_DIR/logs/$NAMESPACE-helm.log" ; then kubectl delete job $NAMESPACE-aztec-network-setup-l2-contracts -n $NAMESPACE - upgrade fi -fi -if ! upgrade | tee "$SCRIPT_DIR/logs/$NAMESPACE-helm.log" ; then if grep 'cannot patch "'$NAMESPACE'-aztec-network-deploy-l1-verifier"' "$SCRIPT_DIR/logs/$NAMESPACE-helm.log" ; then kubectl delete job $NAMESPACE-aztec-network-deploy-l1-verifier -n $NAMESPACE - upgrade fi + + upgrade fi diff --git a/spartan/scripts/get_service_address b/spartan/scripts/get_service_address new file mode 100755 index 000000000000..3f3634faaef2 --- /dev/null +++ b/spartan/scripts/get_service_address @@ -0,0 +1,47 @@ +set -eu +SERVICE_LABEL=$1 +PORT=$2 +MAX_RETRIES=30 +RETRY_INTERVAL=2 +attempt=1 + +# Get pod name +while [ $attempt -le $MAX_RETRIES ]; do + POD_NAME=$(kubectl get pods -n ${NAMESPACE} -l app=${SERVICE_LABEL} -o jsonpath='{.items[0].metadata.name}') + if [ -n "$POD_NAME" ]; then + break + fi + echo "Attempt $attempt: Waiting for ${SERVICE_LABEL} pod to be available..." >&2 + sleep $RETRY_INTERVAL + attempt=$((attempt + 1)) +done + +if [ -z "$POD_NAME" ]; then + echo "Error: Failed to get ${SERVICE_LABEL} pod name after $MAX_RETRIES attempts" >&2 + return 1 +fi +echo "Pod name: [${POD_NAME}]" >&2 + +# Get node name +attempt=1 +NODE_NAME="" +while [ $attempt -le $MAX_RETRIES ]; do + NODE_NAME=$(kubectl get pod ${POD_NAME} -n ${NAMESPACE} -o jsonpath='{.spec.nodeName}') + if [ -n "$NODE_NAME" ]; then + break + fi + echo "Attempt $attempt: Waiting for node name to be available..." >&2 + sleep $RETRY_INTERVAL + attempt=$((attempt + 1)) +done + +if [ -z "$NODE_NAME" ]; then + echo "Error: Failed to get node name after $MAX_RETRIES attempts" >&2 + return 1 +fi +echo "Node name: ${NODE_NAME}" >&2 + +# Get the node's external IP +NODE_IP=$(kubectl get node ${NODE_NAME} -o jsonpath='{.status.addresses[?(@.type=="ExternalIP")].address}') +echo "Node IP: ${NODE_IP}" >&2 +echo "http://${NODE_IP}:${PORT}" \ No newline at end of file diff --git a/spartan/scripts/post_deploy_spartan.sh b/spartan/scripts/post_deploy_spartan.sh index bcf66bff49be..e268174f49c3 100755 --- a/spartan/scripts/post_deploy_spartan.sh +++ b/spartan/scripts/post_deploy_spartan.sh @@ -5,7 +5,7 @@ set -o pipefail echo "Bootstrapping network with test contracts" -NAMESPACE=${1:-spartan} +export NAMESPACE=${1:-spartan} TAG=${2:-latest} SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" @@ -22,10 +22,11 @@ function get_load_balancer_url() { kubectl get svc -n $namespace -o jsonpath="{.items[?(@.metadata.name=='$service_name')].status.loadBalancer.ingress[0].hostname}" } + # Fetch the service URLs based on the namespace for injection in the test-transfer.sh -export BOOTNODE_URL=http://$(get_load_balancer_url $NAMESPACE "$NAMESPACE-aztec-network-boot-node-lb-tcp"):8080 -export PXE_URL=http://$(get_load_balancer_url $NAMESPACE "$NAMESPACE-aztec-network-pxe-lb"):8080 -export ETHEREUM_HOST=http://$(get_load_balancer_url $NAMESPACE "$NAMESPACE-aztec-network-ethereum-lb"):8545 +export BOOTNODE_URL=$($(dirname $0)/get_service_address boot-node 8080) +export PXE_URL=$($(dirname $0)/get_service_address pxe 8080) +export ETHEREUM_HOST=$($(dirname $0)/get_service_address ethereum 8545) echo "BOOTNODE_URL: $BOOTNODE_URL" echo "PXE_URL: $PXE_URL" @@ -36,6 +37,6 @@ echo "Bootstrapping contracts for test network. NOTE: This took one hour last ru docker run aztecprotocol/aztec:$TAG bootstrap-network \ --rpc-url $BOOTNODE_URL \ --l1-rpc-url $ETHEREUM_HOST \ - --l1-chain-id 31337 \ + --l1-chain-id 1337 \ --l1-private-key 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 \ --json | tee ./basic_contracts.json diff --git a/spartan/terraform/deploy-release/deploy.sh b/spartan/terraform/deploy-release/deploy.sh deleted file mode 100755 index ac13466745f8..000000000000 --- a/spartan/terraform/deploy-release/deploy.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash - -# Usage: ./deploy.sh -# Example: ./deploy.sh rough-rhino aztecprotocol/aztec:698cd3d62680629a3f1bfc0f82604534cedbccf3-x86_64 - -set -eu - -RELEASE_NAME=$1 -AZTEC_DOCKER_IMAGE=$2 - -terraform init -backend-config="key=deploy-network/${RELEASE_NAME}/terraform.tfstate" -terraform apply -var-file="release.tfvars" -var="RELEASE_NAME=${RELEASE_NAME}" -var="AZTEC_DOCKER_IMAGE=${AZTEC_DOCKER_IMAGE}" diff --git a/spartan/terraform/deploy-release/main.tf b/spartan/terraform/deploy-release/main.tf index 458f36795ef2..73eba9e5b379 100644 --- a/spartan/terraform/deploy-release/main.tf +++ b/spartan/terraform/deploy-release/main.tf @@ -6,7 +6,7 @@ terraform { required_providers { helm = { source = "hashicorp/helm" - version = "~> 2.12.1" + version = "~> 2.16.1" } kubernetes = { source = "hashicorp/kubernetes" @@ -37,6 +37,7 @@ resource "helm_release" "aztec-gke-cluster" { chart = "aztec-network" namespace = var.RELEASE_NAME create_namespace = true + upgrade_install = true # base values file values = [file("../../aztec-network/values/${var.VALUES_FILE}")] @@ -46,6 +47,11 @@ resource "helm_release" "aztec-gke-cluster" { value = var.AZTEC_DOCKER_IMAGE } + set { + name = "aztec.l1DeploymentMnemonic" + value = var.L1_DEPLOYMENT_MNEMONIC + } + # Setting timeout and wait conditions timeout = 1200 # 20 minutes in seconds wait = true diff --git a/spartan/terraform/deploy-release/release.tfvars b/spartan/terraform/deploy-release/release.tfvars deleted file mode 100644 index 916a85db918c..000000000000 --- a/spartan/terraform/deploy-release/release.tfvars +++ /dev/null @@ -1 +0,0 @@ -VALUES_FILE = "release.yaml" diff --git a/spartan/terraform/deploy-release/variables.tf b/spartan/terraform/deploy-release/variables.tf index 03930fa3d65b..0dff0d4509b1 100644 --- a/spartan/terraform/deploy-release/variables.tf +++ b/spartan/terraform/deploy-release/variables.tf @@ -18,3 +18,9 @@ variable "AZTEC_DOCKER_IMAGE" { description = "Docker image to use for the aztec network" type = string } + +variable "L1_DEPLOYMENT_MNEMONIC" { + description = "Mnemonic to use for the L1 contract deployments" + type = string + sensitive = true +} diff --git a/spartan/terraform/gke-cluster-old/firewall.tf b/spartan/terraform/gke-cluster-old/firewall.tf new file mode 100644 index 000000000000..0dc4b406ce39 --- /dev/null +++ b/spartan/terraform/gke-cluster-old/firewall.tf @@ -0,0 +1,51 @@ +# Create ingress firewall rules for UDP +resource "google_compute_firewall" "udp_ingress" { + name = "allow-udp-ingress-custom" + network = "default" + allow { + protocol = "udp" + ports = ["40400-40499", "8080", "8545"] + } + direction = "INGRESS" + source_ranges = ["0.0.0.0/0"] + target_tags = ["gke-node", "aztec-gke-node"] +} + +# Create egress firewall rules for UDP +resource "google_compute_firewall" "udp_egress" { + name = "allow-udp-egress-custom" + network = "default" + allow { + protocol = "udp" + ports = ["40400-40499", "8080", "8545"] + } + direction = "EGRESS" + destination_ranges = ["0.0.0.0/0"] + target_tags = ["gke-node", "aztec-gke-node"] +} + +# Create ingress firewall rules for TCP +resource "google_compute_firewall" "tcp_ingress" { + name = "allow-tcp-ingress-custom" + network = "default" + allow { + protocol = "tcp" + ports = ["40400-40499", "8080", "8545"] + } + direction = "INGRESS" + source_ranges = ["0.0.0.0/0"] + target_tags = ["gke-node", "aztec-gke-node"] +} + +# Create egress firewall rules for TCP +resource "google_compute_firewall" "tcp_egress" { + name = "allow-tcp-egress-custom" + network = "default" + allow { + protocol = "tcp" + ports = ["40400-40499", "8080", "8545"] + } + direction = "EGRESS" + destination_ranges = ["0.0.0.0/0"] + target_tags = ["gke-node", "aztec-gke-node"] +} diff --git a/spartan/terraform/gke-cluster-old/main.tf b/spartan/terraform/gke-cluster-old/main.tf new file mode 100644 index 000000000000..6055ca52cc30 --- /dev/null +++ b/spartan/terraform/gke-cluster-old/main.tf @@ -0,0 +1,193 @@ +terraform { + backend "s3" { + bucket = "aztec-terraform" + key = "spartan-gke-cluster/terraform.tfstate" + region = "eu-west-2" + } + required_providers { + google = { + source = "hashicorp/google" + version = "~> 5.0" + } + } +} + +# Configure the Google Cloud provider +provider "google" { + project = var.project + region = var.region +} + +# Create the service account +resource "google_service_account" "gke_sa" { + account_id = "gke-nodes-sa" + display_name = "GKE Nodes Service Account" + description = "Service account for GKE nodes" +} + +# Add IAM roles to the service account +resource "google_project_iam_member" "gke_sa_roles" { + for_each = toset([ + "roles/logging.logWriter", + "roles/monitoring.metricWriter", + "roles/monitoring.viewer", + "roles/artifactregistry.reader" + ]) + project = var.project + role = each.key + member = "serviceAccount:${google_service_account.gke_sa.email}" +} + +# Create a new service account for Helm +resource "google_service_account" "helm_sa" { + account_id = "helm-sa" + display_name = "Helm Service Account" + description = "Service account for Helm operations" +} + +# Add IAM roles to the Helm service account +resource "google_project_iam_member" "helm_sa_roles" { + for_each = toset([ + "roles/container.admin", + "roles/storage.admin", + "roles/secretmanager.admin" + ]) + project = var.project + role = each.key + member = "serviceAccount:${google_service_account.helm_sa.email}" +} + +# Create a GKE cluster +resource "google_container_cluster" "primary" { + name = "spartan-gke" + location = var.zone + + initial_node_count = 1 + # Remove default node pool after cluster creation + remove_default_node_pool = true + + # Kubernetes version + min_master_version = "latest" + + # Network configuration + network = "default" + subnetwork = "default" + + # Master auth configuration + master_auth { + client_certificate_config { + issue_client_certificate = false + } + } +} + +# Create primary node pool with autoscaling +resource "google_container_node_pool" "primary_nodes" { + name = "primary-node-pool" + location = var.zone + cluster = google_container_cluster.primary.name + + # Enable autoscaling + autoscaling { + min_node_count = 1 + max_node_count = 5 + } + + # Node configuration + node_config { + machine_type = "t2d-standard-32" + + service_account = google_service_account.gke_sa.email + oauth_scopes = [ + "https://www.googleapis.com/auth/cloud-platform" + ] + + labels = { + env = "production" + } + tags = ["gke-node"] + } + + # Management configuration + management { + auto_repair = true + auto_upgrade = true + } +} + +# Create node pool for aztec nodes (validators, prover nodes, boot nodes) +resource "google_container_node_pool" "aztec_nodes" { + name = "aztec-node-pool" + location = var.zone + cluster = google_container_cluster.primary.name + + # Enable autoscaling + autoscaling { + min_node_count = 1 + max_node_count = 128 + } + + # Node configuration + node_config { + machine_type = "t2d-standard-8" + + service_account = google_service_account.gke_sa.email + oauth_scopes = [ + "https://www.googleapis.com/auth/cloud-platform" + ] + + labels = { + env = "production" + } + tags = ["gke-node", "aztec"] + } + + # Management configuration + management { + auto_repair = true + auto_upgrade = true + } +} + +# Create spot instance node pool with autoscaling +resource "google_container_node_pool" "spot_nodes" { + name = "spot-node-pool" + location = var.zone + cluster = google_container_cluster.primary.name + + # Enable autoscaling + autoscaling { + min_node_count = 0 + max_node_count = 10 + } + + # Node configuration + node_config { + machine_type = "t2d-standard-32" + spot = true + + service_account = google_service_account.gke_sa.email + oauth_scopes = [ + "https://www.googleapis.com/auth/cloud-platform" + ] + + labels = { + env = "production" + pool = "spot" + } + tags = ["gke-node", "spot"] + + # Spot instance termination handler + taint { + key = "cloud.google.com/gke-spot" + value = "true" + effect = "NO_SCHEDULE" + } + } + + # Management configuration + management { + auto_repair = true + auto_upgrade = true + } +} diff --git a/spartan/terraform/gke-cluster-old/outputs.tf b/spartan/terraform/gke-cluster-old/outputs.tf new file mode 100644 index 000000000000..befaa28092e9 --- /dev/null +++ b/spartan/terraform/gke-cluster-old/outputs.tf @@ -0,0 +1,17 @@ +output "cluster_endpoint" { + value = google_container_cluster.primary.endpoint +} + +output "service_account_email" { + value = google_service_account.gke_sa.email +} + +output "region" { + description = "Google cloud region" + value = var.region +} + +output "kubernetes_cluster_name" { + description = "GKE Cluster Name" + value = google_container_cluster.primary.name +} diff --git a/spartan/terraform/gke-cluster-old/variables.tf b/spartan/terraform/gke-cluster-old/variables.tf new file mode 100644 index 000000000000..555458daa5d0 --- /dev/null +++ b/spartan/terraform/gke-cluster-old/variables.tf @@ -0,0 +1,11 @@ +variable "project" { + default = "testnet-440309" +} + +variable "region" { + default = "us-east4" +} + +variable "zone" { + default = "us-east4-a" +} diff --git a/spartan/terraform/gke-cluster/firewall.tf b/spartan/terraform/gke-cluster/firewall.tf index 0c5741c85066..0dc4b406ce39 100644 --- a/spartan/terraform/gke-cluster/firewall.tf +++ b/spartan/terraform/gke-cluster/firewall.tf @@ -8,7 +8,7 @@ resource "google_compute_firewall" "udp_ingress" { } direction = "INGRESS" source_ranges = ["0.0.0.0/0"] - target_tags = ["gke-node"] + target_tags = ["gke-node", "aztec-gke-node"] } # Create egress firewall rules for UDP @@ -21,7 +21,7 @@ resource "google_compute_firewall" "udp_egress" { } direction = "EGRESS" destination_ranges = ["0.0.0.0/0"] - target_tags = ["gke-node"] + target_tags = ["gke-node", "aztec-gke-node"] } # Create ingress firewall rules for TCP @@ -34,7 +34,7 @@ resource "google_compute_firewall" "tcp_ingress" { } direction = "INGRESS" source_ranges = ["0.0.0.0/0"] - target_tags = ["gke-node"] + target_tags = ["gke-node", "aztec-gke-node"] } # Create egress firewall rules for TCP @@ -47,5 +47,5 @@ resource "google_compute_firewall" "tcp_egress" { } direction = "EGRESS" destination_ranges = ["0.0.0.0/0"] - target_tags = ["gke-node"] + target_tags = ["gke-node", "aztec-gke-node"] } diff --git a/spartan/terraform/gke-cluster/main.tf b/spartan/terraform/gke-cluster/main.tf index 971a4aacdbc3..2c1691444854 100644 --- a/spartan/terraform/gke-cluster/main.tf +++ b/spartan/terraform/gke-cluster/main.tf @@ -1,7 +1,7 @@ terraform { backend "s3" { bucket = "aztec-terraform" - key = "spartan-gke-cluster/terraform.tfstate" + key = "aztec-gke-cluster/terraform.tfstate" region = "eu-west-2" } required_providers { @@ -20,9 +20,9 @@ provider "google" { # Create the service account resource "google_service_account" "gke_sa" { - account_id = "gke-nodes-sa" - display_name = "GKE Nodes Service Account" - description = "Service account for GKE nodes" + account_id = "aztec-gke-nodes-sa" + display_name = "Aztec GKE Nodes Service Account" + description = "Service account for aztec GKE nodes" } # Add IAM roles to the service account @@ -49,7 +49,8 @@ resource "google_service_account" "helm_sa" { resource "google_project_iam_member" "helm_sa_roles" { for_each = toset([ "roles/container.admin", - "roles/storage.admin" + "roles/storage.admin", + "roles/secretmanager.admin" ]) project = var.project role = each.key @@ -58,7 +59,7 @@ resource "google_project_iam_member" "helm_sa_roles" { # Create a GKE cluster resource "google_container_cluster" "primary" { - name = "spartan-gke" + name = var.cluster_name location = var.zone initial_node_count = 1 @@ -89,7 +90,7 @@ resource "google_container_node_pool" "primary_nodes" { # Enable autoscaling autoscaling { min_node_count = 1 - max_node_count = 5 + max_node_count = 2 } # Node configuration @@ -104,7 +105,7 @@ resource "google_container_node_pool" "primary_nodes" { labels = { env = "production" } - tags = ["gke-node"] + tags = ["aztec-gke-node"] } # Management configuration @@ -128,7 +129,7 @@ resource "google_container_node_pool" "aztec_nodes" { # Node configuration node_config { - machine_type = "t2d-standard-8" + machine_type = "t2d-standard-4" service_account = google_service_account.gke_sa.email oauth_scopes = [ @@ -138,7 +139,7 @@ resource "google_container_node_pool" "aztec_nodes" { labels = { env = "production" } - tags = ["gke-node", "aztec"] + tags = ["aztec-gke-node", "aztec"] } # Management configuration @@ -150,7 +151,7 @@ resource "google_container_node_pool" "aztec_nodes" { # Create spot instance node pool with autoscaling resource "google_container_node_pool" "spot_nodes" { - name = "spot-node-pool" + name = "aztec-spot-node-pool" location = var.zone cluster = google_container_cluster.primary.name @@ -174,7 +175,7 @@ resource "google_container_node_pool" "spot_nodes" { env = "production" pool = "spot" } - tags = ["gke-node", "spot"] + tags = ["aztec-gke-node", "spot"] # Spot instance termination handler taint { diff --git a/spartan/terraform/gke-cluster/variables.tf b/spartan/terraform/gke-cluster/variables.tf index 555458daa5d0..83e1925cbd45 100644 --- a/spartan/terraform/gke-cluster/variables.tf +++ b/spartan/terraform/gke-cluster/variables.tf @@ -3,9 +3,13 @@ variable "project" { } variable "region" { - default = "us-east4" + default = "us-west1" } variable "zone" { - default = "us-east4-a" + default = "us-west1-a" +} + +variable "cluster_name" { + default = "aztec-gke" } diff --git a/spartan/testnet-runbook.md b/spartan/testnet-runbook.md index 30a224a33cf2..f9b7dbc45582 100644 --- a/spartan/testnet-runbook.md +++ b/spartan/testnet-runbook.md @@ -45,7 +45,6 @@ Verbose logging on Aztec nodes should be enabled by default using the following - `LOG_JSON=1` - `LOG_LEVEL=debug` -- `DEBUG=discv5*,aztec:*,-aztec:avm_simulator*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*` Deployments are initiated from CI by manually running the (_name pending_) workflow. diff --git a/yarn-project/Earthfile b/yarn-project/Earthfile index c25214fc97a6..0fed665d9da9 100644 --- a/yarn-project/Earthfile +++ b/yarn-project/Earthfile @@ -302,15 +302,16 @@ test: run-e2e: ARG test + ARG log_level="" ARG debug="" FROM +end-to-end - RUN DEBUG=$debug yarn test $test + RUN DEBUG=$debug LOG_LEVEL=$log_level yarn test $test prover-client-test: FROM +build ARG test - ARG debug="" - RUN cd prover-client && DEBUG=$debug yarn test $test + ARG log_level="" + RUN cd prover-client && LOG_LEVEL=$log_level yarn test $test # NOTE: This is not in the end-to-end Earthfile as that is entirely LOCALLY commands that will go away sometime. # Running this inside the main builder as the point is not to run this through dockerization. @@ -327,7 +328,7 @@ network-test: ENV LOG_LEVEL=verbose RUN INTERLEAVED=true end-to-end/scripts/native_network_test.sh \ "$test" \ - ./deploy-l1-contracts.sh \ + "./deploy-l1-contracts.sh $validators" \ ./deploy-l2-contracts.sh \ ./boot-node.sh \ ./ethereum.sh \ diff --git a/yarn-project/archiver/package.json b/yarn-project/archiver/package.json index 3366d4b3f1b4..22579ff402ec 100644 --- a/yarn-project/archiver/package.json +++ b/yarn-project/archiver/package.json @@ -22,8 +22,6 @@ "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests", - "start": "node ./dest", - "start:dev": "tsc-watch -p tsconfig.json --onSuccess 'yarn start'", "test:integration": "concurrently -k -s first -c reset,dim -n test,anvil \"yarn test:integration:run\" \"anvil\"", "test:integration:run": "NODE_NO_WARNINGS=1 node --experimental-vm-modules $(yarn bin jest) --no-cache --config jest.integration.config.json" }, diff --git a/yarn-project/archiver/src/archiver/archiver.test.ts b/yarn-project/archiver/src/archiver/archiver.test.ts index 69d69ed0b2d1..87173270dc59 100644 --- a/yarn-project/archiver/src/archiver/archiver.test.ts +++ b/yarn-project/archiver/src/archiver/archiver.test.ts @@ -56,14 +56,32 @@ describe('Archiver', () => { let archiver: Archiver; let blocks: L2Block[]; + let l2BlockProposedLogs: Log[]; + let l2MessageSentLogs: Log[]; + const GENESIS_ROOT = new Fr(GENESIS_ARCHIVE_ROOT).toString(); beforeEach(() => { now = +new Date(); publicClient = mock>({ + // Return a block with a reasonable timestamp getBlock: ((args: any) => ({ timestamp: args.blockNumber * BigInt(DefaultL1ContractsConfig.ethereumSlotDuration) + BigInt(now), })) as any, + // Return the logs mocked whenever the public client is queried + getLogs: ((args: any) => { + let logs = undefined; + if (args!.event!.name === 'MessageSent') { + logs = l2MessageSentLogs; + } else if (args!.event!.name === 'L2BlockProposed') { + logs = l2BlockProposedLogs; + } else { + throw new Error(`Unknown event: ${args!.event!.name}`); + } + return Promise.resolve( + logs.filter(log => log.blockNumber >= args.fromBlock && log.blockNumber <= args.toBlock), + ); + }) as any, }); instrumentation = mock({ isEnabled: () => true }); @@ -71,12 +89,17 @@ describe('Archiver', () => { archiver = new Archiver( publicClient, - rollupAddress, - inboxAddress, - registryAddress, + { rollupAddress, inboxAddress, registryAddress }, archiverStore, - 1000, + { pollingIntervalMs: 1000, batchSize: 1000 }, instrumentation, + { + l1GenesisTime: BigInt(now), + l1StartBlock: 0n, + epochDuration: 4, + slotDuration: 24, + ethereumSlotDuration: 12, + }, ); blocks = blockNumbers.map(x => L2Block.random(x, txsPerBlock, x + 1, 2)); @@ -97,6 +120,9 @@ describe('Archiver', () => { inboxRead = mock(); ((archiver as any).inbox as any).read = inboxRead; + + l2MessageSentLogs = []; + l2BlockProposedLogs = []; }); afterEach(async () => { @@ -127,27 +153,16 @@ describe('Archiver', () => { inboxRead.totalMessagesInserted.mockResolvedValueOnce(2n).mockResolvedValueOnce(6n); - mockGetLogs({ - messageSent: [ - makeMessageSentEventWithIndexInL2BlockSubtree(98n, 1n, 0n), - makeMessageSentEventWithIndexInL2BlockSubtree(99n, 1n, 1n), - ], - L2BlockProposed: [makeL2BlockProposedEvent(101n, 1n, blocks[0].archive.root.toString())], - }); - - mockGetLogs({ - messageSent: [ - makeMessageSentEventWithIndexInL2BlockSubtree(2504n, 2n, 0n), - makeMessageSentEventWithIndexInL2BlockSubtree(2505n, 2n, 1n), - makeMessageSentEventWithIndexInL2BlockSubtree(2505n, 2n, 2n), - makeMessageSentEventWithIndexInL2BlockSubtree(2506n, 3n, 1n), - ], - L2BlockProposed: [ - makeL2BlockProposedEvent(2510n, 2n, blocks[1].archive.root.toString()), - makeL2BlockProposedEvent(2520n, 3n, blocks[2].archive.root.toString()), - ], - }); + makeMessageSentEvent(98n, 1n, 0n); + makeMessageSentEvent(99n, 1n, 1n); + makeL2BlockProposedEvent(101n, 1n, blocks[0].archive.root.toString()); + makeMessageSentEvent(2504n, 2n, 0n); + makeMessageSentEvent(2505n, 2n, 1n); + makeMessageSentEvent(2505n, 2n, 2n); + makeMessageSentEvent(2506n, 3n, 1n); + makeL2BlockProposedEvent(2510n, 2n, blocks[1].archive.root.toString()); + makeL2BlockProposedEvent(2520n, 3n, blocks[2].archive.root.toString()); publicClient.getTransaction.mockResolvedValueOnce(rollupTxs[0]); rollupTxs.slice(1).forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); @@ -228,17 +243,11 @@ describe('Archiver', () => { inboxRead.totalMessagesInserted.mockResolvedValueOnce(2n).mockResolvedValueOnce(2n); - mockGetLogs({ - messageSent: [ - makeMessageSentEventWithIndexInL2BlockSubtree(66n, 1n, 0n), - makeMessageSentEventWithIndexInL2BlockSubtree(68n, 1n, 1n), - ], - L2BlockProposed: [ - makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString()), - makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString()), - makeL2BlockProposedEvent(90n, 3n, badArchive), - ], - }); + makeMessageSentEvent(66n, 1n, 0n); + makeMessageSentEvent(68n, 1n, 1n); + makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString()); + makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString()); + makeL2BlockProposedEvent(90n, 3n, badArchive); rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); @@ -250,12 +259,14 @@ describe('Archiver', () => { latestBlockNum = await archiver.getBlockNumber(); expect(latestBlockNum).toEqual(numL2BlocksInTest); - const errorMessage = `Archive mismatch matching, ignoring block ${3} with archive: ${badArchive}, expected ${blocks[2].archive.root.toString()}`; - expect(loggerSpy).toHaveBeenCalledWith(errorMessage); + expect(loggerSpy).toHaveBeenCalledWith(expect.stringMatching(/archive root mismatch/i), { + actual: badArchive, + expected: blocks[2].archive.root.toString(), + }); }, 10_000); it('skip event search if no changes found', async () => { - const loggerSpy = jest.spyOn((archiver as any).log, 'verbose'); + const loggerSpy = jest.spyOn((archiver as any).log, 'debug'); let latestBlockNum = await archiver.getBlockNumber(); expect(latestBlockNum).toEqual(0); @@ -271,16 +282,10 @@ describe('Archiver', () => { inboxRead.totalMessagesInserted.mockResolvedValueOnce(0n).mockResolvedValueOnce(2n); - mockGetLogs({ - messageSent: [ - makeMessageSentEventWithIndexInL2BlockSubtree(66n, 1n, 0n), - makeMessageSentEventWithIndexInL2BlockSubtree(68n, 1n, 1n), - ], - L2BlockProposed: [ - makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString()), - makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString()), - ], - }); + makeMessageSentEvent(66n, 1n, 0n); + makeMessageSentEvent(68n, 1n, 1n); + makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString()); + makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString()); rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); @@ -292,17 +297,11 @@ describe('Archiver', () => { latestBlockNum = await archiver.getBlockNumber(); expect(latestBlockNum).toEqual(numL2BlocksInTest); - - // For some reason, this is 1-indexed. - expect(loggerSpy).toHaveBeenNthCalledWith( - 1, - `Retrieved no new L1 -> L2 messages between L1 blocks ${1n} and ${50}.`, - ); - expect(loggerSpy).toHaveBeenNthCalledWith(2, `No blocks to retrieve from ${1n} to ${50n}`); + expect(loggerSpy).toHaveBeenCalledWith(`No blocks to retrieve from 1 to 50`); }, 10_000); it('handles L2 reorg', async () => { - const loggerSpy = jest.spyOn((archiver as any).log, 'verbose'); + const loggerSpy = jest.spyOn((archiver as any).log, 'debug'); let latestBlockNum = await archiver.getBlockNumber(); expect(latestBlockNum).toEqual(0); @@ -331,16 +330,10 @@ describe('Archiver', () => { .mockResolvedValueOnce(2n) .mockResolvedValueOnce(2n); - mockGetLogs({ - messageSent: [ - makeMessageSentEventWithIndexInL2BlockSubtree(66n, 1n, 0n), - makeMessageSentEventWithIndexInL2BlockSubtree(68n, 1n, 1n), - ], - L2BlockProposed: [ - makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString()), - makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString()), - ], - }); + makeMessageSentEvent(66n, 1n, 0n); + makeMessageSentEvent(68n, 1n, 1n); + makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString()); + makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString()); rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); @@ -353,18 +346,12 @@ describe('Archiver', () => { latestBlockNum = await archiver.getBlockNumber(); expect(latestBlockNum).toEqual(numL2BlocksInTest); - // For some reason, this is 1-indexed. - expect(loggerSpy).toHaveBeenNthCalledWith( - 1, - `Retrieved no new L1 -> L2 messages between L1 blocks ${1n} and ${50}.`, - ); - expect(loggerSpy).toHaveBeenNthCalledWith(2, `No blocks to retrieve from ${1n} to ${50n}`); + expect(loggerSpy).toHaveBeenCalledWith(`No blocks to retrieve from 1 to 50`); // Lets take a look to see if we can find re-org stuff! await sleep(1000); - expect(loggerSpy).toHaveBeenNthCalledWith(6, `L2 prune have occurred, unwind state`); - expect(loggerSpy).toHaveBeenNthCalledWith(7, `Unwinding 1 block from block 2`); + expect(loggerSpy).toHaveBeenCalledWith(`L2 prune has been detected.`); // Should also see the block number be reduced latestBlockNum = await archiver.getBlockNumber(); @@ -382,57 +369,40 @@ describe('Archiver', () => { // TODO(palla/reorg): Add a unit test for the archiver handleEpochPrune xit('handles an upcoming L2 prune', () => {}); - // logs should be created in order of how archiver syncs. - const mockGetLogs = (logs: { - messageSent?: ReturnType[]; - L2BlockProposed?: ReturnType[]; - }) => { - if (logs.messageSent) { - publicClient.getLogs.mockResolvedValueOnce(logs.messageSent); - } - if (logs.L2BlockProposed) { - publicClient.getLogs.mockResolvedValueOnce(logs.L2BlockProposed); - } + /** + * Makes a fake L2BlockProposed event for testing purposes and registers it to be returned by the public client. + * @param l1BlockNum - L1 block number. + * @param l2BlockNum - L2 Block number. + */ + const makeL2BlockProposedEvent = (l1BlockNum: bigint, l2BlockNum: bigint, archive: `0x${string}`) => { + const log = { + blockNumber: l1BlockNum, + args: { blockNumber: l2BlockNum, archive }, + transactionHash: `0x${l2BlockNum}`, + } as Log; + l2BlockProposedLogs.push(log); }; -}); - -/** - * Makes a fake L2BlockProposed event for testing purposes. - * @param l1BlockNum - L1 block number. - * @param l2BlockNum - L2 Block number. - * @returns An L2BlockProposed event log. - */ -function makeL2BlockProposedEvent(l1BlockNum: bigint, l2BlockNum: bigint, archive: `0x${string}`) { - return { - blockNumber: l1BlockNum, - args: { blockNumber: l2BlockNum, archive }, - transactionHash: `0x${l2BlockNum}`, - } as Log; -} -/** - * Makes fake L1ToL2 MessageSent events for testing purposes. - * @param l1BlockNum - L1 block number. - * @param l2BlockNumber - The L2 block number for which the message was included. - * @param indexInSubtree - the index in the l2Block's subtree in the L1 to L2 Messages Tree. - * @returns MessageSent event logs. - */ -function makeMessageSentEventWithIndexInL2BlockSubtree( - l1BlockNum: bigint, - l2BlockNumber: bigint, - indexInSubtree: bigint, -) { - const index = indexInSubtree + InboxLeaf.smallestIndexFromL2Block(l2BlockNumber); - return { - blockNumber: l1BlockNum, - args: { - l2BlockNumber, - index, - hash: Fr.random().toString(), - }, - transactionHash: `0x${l1BlockNum}`, - } as Log; -} + /** + * Makes fake L1ToL2 MessageSent events for testing purposes and registers it to be returned by the public client. + * @param l1BlockNum - L1 block number. + * @param l2BlockNumber - The L2 block number for which the message was included. + * @param indexInSubtree - the index in the l2Block's subtree in the L1 to L2 Messages Tree. + */ + const makeMessageSentEvent = (l1BlockNum: bigint, l2BlockNumber: bigint, indexInSubtree: bigint) => { + const index = indexInSubtree + InboxLeaf.smallestIndexFromL2Block(l2BlockNumber); + const log = { + blockNumber: l1BlockNum, + args: { + l2BlockNumber, + index, + hash: Fr.random().toString(), + }, + transactionHash: `0x${l1BlockNum}`, + } as Log; + l2MessageSentLogs.push(log); + }; +}); /** * Makes a fake rollup tx for testing purposes. diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 94c664d0f114..849b36df3aa3 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -2,6 +2,7 @@ import { type GetUnencryptedLogsResponse, type InBlock, type InboxLeaf, + type L1RollupConstants, type L1ToL2MessageSource, type L2Block, type L2BlockId, @@ -15,14 +16,18 @@ import { type TxReceipt, type TxScopedL2Log, type UnencryptedL2Log, + getEpochNumberAtTimestamp, + getSlotAtTimestamp, + getSlotRangeForEpoch, + getTimestampRangeForEpoch, } from '@aztec/circuit-types'; import { + type BlockHeader, type ContractClassPublic, type ContractDataSource, type ContractInstanceWithAddress, type ExecutablePrivateFunctionWithMembershipProof, type FunctionSelector, - type Header, type PrivateLog, type PublicFunction, type UnconstrainedFunctionWithMembershipProof, @@ -38,7 +43,7 @@ import { Fr } from '@aztec/foundation/fields'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; import { count } from '@aztec/foundation/string'; -import { Timer } from '@aztec/foundation/timer'; +import { elapsed } from '@aztec/foundation/timer'; import { InboxAbi, RollupAbi } from '@aztec/l1-artifacts'; import { ContractClassRegisteredEvent, @@ -61,13 +66,7 @@ import { import { type ArchiverDataStore, type ArchiverL1SynchPoint } from './archiver_store.js'; import { type ArchiverConfig } from './config.js'; -import { retrieveBlockFromRollup, retrieveL1ToL2Messages } from './data_retrieval.js'; -import { - getEpochNumberAtTimestamp, - getSlotAtTimestamp, - getSlotRangeForEpoch, - getTimestampRangeForEpoch, -} from './epoch_helpers.js'; +import { retrieveBlocksFromRollup, retrieveL1ToL2Messages } from './data_retrieval.js'; import { ArchiverInstrumentation } from './instrumentation.js'; import { type DataRetrieval } from './structs/data_retrieval.js'; import { type L1Published } from './structs/published.js'; @@ -112,25 +111,23 @@ export class Archiver implements ArchiveSource { */ constructor( private readonly publicClient: PublicClient, - private readonly rollupAddress: EthAddress, - readonly inboxAddress: EthAddress, - private readonly registryAddress: EthAddress, + private readonly l1Addresses: { rollupAddress: EthAddress; inboxAddress: EthAddress; registryAddress: EthAddress }, readonly dataStore: ArchiverDataStore, - private readonly pollingIntervalMs: number, + private readonly config: { pollingIntervalMs: number; batchSize: number }, private readonly instrumentation: ArchiverInstrumentation, - private readonly l1constants: L1RollupConstants = EmptyL1RollupConstants, + private readonly l1constants: L1RollupConstants, private readonly log: DebugLogger = createDebugLogger('aztec:archiver'), ) { this.store = new ArchiverStoreHelper(dataStore); this.rollup = getContract({ - address: rollupAddress.toString(), + address: l1Addresses.rollupAddress.toString(), abi: RollupAbi, client: publicClient, }); this.inbox = getContract({ - address: inboxAddress.toString(), + address: l1Addresses.inboxAddress.toString(), abi: InboxAbi, client: publicClient, }); @@ -171,12 +168,13 @@ export class Archiver implements ArchiveSource { const archiver = new Archiver( publicClient, - config.l1Contracts.rollupAddress, - config.l1Contracts.inboxAddress, - config.l1Contracts.registryAddress, + config.l1Contracts, archiverStore, - config.archiverPollingIntervalMS ?? 10_000, - new ArchiverInstrumentation(telemetry), + { + pollingIntervalMs: config.archiverPollingIntervalMS ?? 10_000, + batchSize: config.archiverBatchSize ?? 100, + }, + new ArchiverInstrumentation(telemetry, () => archiverStore.estimateSize()), { l1StartBlock, l1GenesisTime, epochDuration, slotDuration, ethereumSlotDuration }, ); await archiver.start(blockUntilSynced); @@ -193,11 +191,10 @@ export class Archiver implements ArchiveSource { } if (blockUntilSynced) { - this.log.info(`Performing initial chain sync to rollup contract ${this.rollupAddress.toString()}`); await this.sync(blockUntilSynced); } - this.runningPromise = new RunningPromise(() => this.safeSync(), this.pollingIntervalMs); + this.runningPromise = new RunningPromise(() => this.safeSync(), this.config.pollingIntervalMs); this.runningPromise.start(); } @@ -214,9 +211,8 @@ export class Archiver implements ArchiveSource { /** * Fetches logs from L1 contracts and processes them. - * @param blockUntilSynced - If true, blocks until the archiver has fully synced. */ - private async sync(blockUntilSynced: boolean) { + private async sync(initialRun: boolean) { /** * We keep track of three "pointers" to L1 blocks: * 1. the last L1 block that published an L2 block @@ -233,6 +229,15 @@ export class Archiver implements ArchiveSource { const { blocksSynchedTo = l1StartBlock, messagesSynchedTo = l1StartBlock } = await this.store.getSynchPoint(); const currentL1BlockNumber = await this.publicClient.getBlockNumber(); + if (initialRun) { + this.log.info( + `Starting archiver sync to rollup contract ${this.l1Addresses.rollupAddress.toString()} from L1 block ${Math.min( + Number(blocksSynchedTo), + Number(messagesSynchedTo), + )} to current L1 block ${currentL1BlockNumber}`, + ); + } + // ********** Ensuring Consistency of data pulled from L1 ********** /** @@ -253,7 +258,7 @@ export class Archiver implements ArchiveSource { */ // ********** Events that are processed per L1 block ********** - await this.handleL1ToL2Messages(blockUntilSynced, messagesSynchedTo, currentL1BlockNumber); + await this.handleL1ToL2Messages(messagesSynchedTo, currentL1BlockNumber); // Store latest l1 block number and timestamp seen. Used for epoch and slots calculations. if (!this.l1BlockNumber || this.l1BlockNumber < currentL1BlockNumber) { @@ -264,16 +269,17 @@ export class Archiver implements ArchiveSource { // ********** Events that are processed per L2 block ********** if (currentL1BlockNumber > blocksSynchedTo) { // First we retrieve new L2 blocks - const { provenBlockNumber } = await this.handleL2blocks(blockUntilSynced, blocksSynchedTo, currentL1BlockNumber); + const { provenBlockNumber } = await this.handleL2blocks(blocksSynchedTo, currentL1BlockNumber); // And then we prune the current epoch if it'd reorg on next submission. // Note that we don't do this before retrieving L2 blocks because we may need to retrieve // blocks from more than 2 epochs ago, so we want to make sure we have the latest view of // the chain locally before we start unwinding stuff. This can be optimized by figuring out // up to which point we're pruning, and then requesting L2 blocks up to that point only. await this.handleEpochPrune(provenBlockNumber, currentL1BlockNumber); + } - const storeSizes = this.store.estimateSize(); - this.instrumentation.recordDBMetrics(storeSizes); + if (initialRun) { + this.log.info(`Initial archiver sync to L1 block ${currentL1BlockNumber} complete.`); } } @@ -289,24 +295,31 @@ export class Archiver implements ArchiveSource { if (canPrune) { const blocksToUnwind = localPendingBlockNumber - provenBlockNumber; - this.log.verbose( - `L2 prune will occur on next submission. ` + - `Unwinding ${count(blocksToUnwind, 'block')} from block ${localPendingBlockNumber} ` + - `to the last proven block ${provenBlockNumber}.`, - ); + this.log.debug(`L2 prune will occur on next block submission.`); await this.store.unwindBlocks(Number(localPendingBlockNumber), Number(blocksToUnwind)); - this.log.verbose(`Unwound ${count(blocksToUnwind, 'block')}. New L2 block is ${await this.getBlockNumber()}.`); + this.log.warn( + `Unwound ${count(blocksToUnwind, 'block')} from L2 block ${localPendingBlockNumber} ` + + `to ${provenBlockNumber} due to predicted reorg at L1 block ${currentL1BlockNumber}. ` + + `Updated L2 latest block is ${await this.getBlockNumber()}.`, + ); // TODO(palla/reorg): Do we need to set the block synched L1 block number here? // Seems like the next iteration should handle this. // await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber); } } - private async handleL1ToL2Messages( - blockUntilSynced: boolean, - messagesSynchedTo: bigint, - currentL1BlockNumber: bigint, - ) { + private nextRange(end: bigint, limit: bigint): [bigint, bigint] { + const batchSize = (this.config.batchSize * this.l1constants.slotDuration) / this.l1constants.ethereumSlotDuration; + const nextStart = end + 1n; + const nextEnd = nextStart + BigInt(batchSize); + if (nextEnd > limit) { + return [nextStart, limit]; + } + return [nextStart, nextEnd]; + } + + private async handleL1ToL2Messages(messagesSynchedTo: bigint, currentL1BlockNumber: bigint) { + this.log.trace(`Handling L1 to L2 messages from ${messagesSynchedTo} to ${currentL1BlockNumber}.`); if (currentL1BlockNumber <= messagesSynchedTo) { return; } @@ -316,30 +329,30 @@ export class Archiver implements ArchiveSource { if (localTotalMessageCount === destinationTotalMessageCount) { await this.store.setMessageSynchedL1BlockNumber(currentL1BlockNumber); - this.log.verbose( - `Retrieved no new L1 -> L2 messages between L1 blocks ${messagesSynchedTo + 1n} and ${currentL1BlockNumber}.`, + this.log.trace( + `Retrieved no new L1 to L2 messages between L1 blocks ${messagesSynchedTo + 1n} and ${currentL1BlockNumber}.`, ); return; } - const retrievedL1ToL2Messages = await retrieveL1ToL2Messages( - this.inbox, - blockUntilSynced, - messagesSynchedTo + 1n, - currentL1BlockNumber, - ); - - await this.store.addL1ToL2Messages(retrievedL1ToL2Messages); - - this.log.verbose( - `Retrieved ${retrievedL1ToL2Messages.retrievedData.length} new L1 -> L2 messages between L1 blocks ${ - messagesSynchedTo + 1n - } and ${currentL1BlockNumber}.`, - ); + // Retrieve messages in batches. Each batch is estimated to acommodate up to L2 'blockBatchSize' blocks, + let searchStartBlock: bigint = messagesSynchedTo; + let searchEndBlock: bigint = messagesSynchedTo; + do { + [searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber); + this.log.trace(`Retrieving L1 to L2 messages between L1 blocks ${searchStartBlock} and ${searchEndBlock}.`); + const retrievedL1ToL2Messages = await retrieveL1ToL2Messages(this.inbox, searchStartBlock, searchEndBlock); + this.log.verbose( + `Retrieved ${retrievedL1ToL2Messages.retrievedData.length} new L1 to L2 messages between L1 blocks ${searchStartBlock} and ${searchEndBlock}.`, + ); + await this.store.addL1ToL2Messages(retrievedL1ToL2Messages); + for (const msg of retrievedL1ToL2Messages.retrievedData) { + this.log.debug(`Downloaded L1 to L2 message`, { leaf: msg.leaf.toString(), index: msg.index }); + } + } while (searchEndBlock < currentL1BlockNumber); } private async handleL2blocks( - blockUntilSynced: boolean, blocksSynchedTo: bigint, currentL1BlockNumber: bigint, ): Promise<{ provenBlockNumber: bigint }> { @@ -359,10 +372,13 @@ export class Archiver implements ArchiveSource { localBlockForDestinationProvenBlockNumber && provenArchive === localBlockForDestinationProvenBlockNumber.archive.root.toString() ) { - this.log.verbose(`Updating the proven block number to ${provenBlockNumber} and epoch to ${provenEpochNumber}`); await this.store.setProvenL2BlockNumber(Number(provenBlockNumber)); // if we are here then we must have a valid proven epoch number await this.store.setProvenL2EpochNumber(Number(provenEpochNumber)); + this.log.info(`Updated proven chain to block ${provenBlockNumber} (epoch ${provenEpochNumber})`, { + provenBlockNumber, + provenEpochNumber, + }); } this.instrumentation.updateLastProvenBlock(Number(provenBlockNumber)); }; @@ -372,7 +388,7 @@ export class Archiver implements ArchiveSource { const noBlocks = localPendingBlockNumber === 0n && pendingBlockNumber === 0n; if (noBlocks) { await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber); - this.log.verbose(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`); + this.log.debug(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`); return { provenBlockNumber }; } @@ -389,7 +405,7 @@ export class Archiver implements ArchiveSource { const noBlockSinceLast = localPendingBlock && pendingArchive === localPendingBlock.archive.root.toString(); if (noBlockSinceLast) { await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber); - this.log.verbose(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`); + this.log.debug(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`); return { provenBlockNumber }; } @@ -399,7 +415,7 @@ export class Archiver implements ArchiveSource { // or the L1 have reorged. // In any case, we have to figure out how far into the past the action will take us. // For simplicity here, we will simply rewind until we end in a block that is also on the chain on L1. - this.log.verbose(`L2 prune have occurred, unwind state`); + this.log.debug(`L2 prune has been detected.`); let tipAfterUnwind = localPendingBlockNumber; while (true) { @@ -417,55 +433,70 @@ export class Archiver implements ArchiveSource { } const blocksToUnwind = localPendingBlockNumber - tipAfterUnwind; - this.log.verbose( - `Unwinding ${blocksToUnwind} block${blocksToUnwind > 1n ? 's' : ''} from block ${localPendingBlockNumber}`, - ); - await this.store.unwindBlocks(Number(localPendingBlockNumber), Number(blocksToUnwind)); + + this.log.warn( + `Unwound ${count(blocksToUnwind, 'block')} from L2 block ${localPendingBlockNumber} ` + + `due to mismatched block hashes at L1 block ${currentL1BlockNumber}. ` + + `Updated L2 latest block is ${await this.getBlockNumber()}.`, + ); } } - this.log.debug(`Retrieving blocks from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`); - const retrievedBlocks = await retrieveBlockFromRollup( - this.rollup, - this.publicClient, - blockUntilSynced, - blocksSynchedTo + 1n, // TODO(palla/reorg): If the L2 reorg was due to an L1 reorg, we need to start search earlier - currentL1BlockNumber, - this.log, - ); + // Retrieve L2 blocks in batches. Each batch is estimated to acommodate up to L2 'blockBatchSize' blocks, + // computed using the L2 block time vs the L1 block time. + let searchStartBlock: bigint = blocksSynchedTo; + let searchEndBlock: bigint = blocksSynchedTo; + + do { + [searchStartBlock, searchEndBlock] = this.nextRange(searchEndBlock, currentL1BlockNumber); + + this.log.trace(`Retrieving L2 blocks from L1 block ${searchStartBlock} to ${searchEndBlock}`); + const retrievedBlocks = await retrieveBlocksFromRollup( + this.rollup, + this.publicClient, + searchStartBlock, // TODO(palla/reorg): If the L2 reorg was due to an L1 reorg, we need to start search earlier + searchEndBlock, + this.log, + ); - if (retrievedBlocks.length === 0) { - // We are not calling `setBlockSynchedL1BlockNumber` because it may cause sync issues if based off infura. - // See further details in earlier comments. - this.log.verbose(`Retrieved no new L2 blocks from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`); - return { provenBlockNumber }; - } + if (retrievedBlocks.length === 0) { + // We are not calling `setBlockSynchedL1BlockNumber` because it may cause sync issues if based off infura. + // See further details in earlier comments. + this.log.trace(`Retrieved no new L2 blocks from L1 block ${searchStartBlock} to ${searchEndBlock}`); + continue; + } - this.log.debug( - `Retrieved ${retrievedBlocks.length} new L2 blocks between L1 blocks ${ - blocksSynchedTo + 1n - } and ${currentL1BlockNumber}.`, - ); + const lastProcessedL1BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].l1.blockNumber; + this.log.debug( + `Retrieved ${retrievedBlocks.length} new L2 blocks between L1 blocks ${searchStartBlock} and ${searchEndBlock} with last processed L1 block ${lastProcessedL1BlockNumber}.`, + ); - const lastProcessedL1BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].l1.blockNumber; + for (const block of retrievedBlocks) { + this.log.debug(`Ingesting new L2 block ${block.data.number} with ${block.data.body.txEffects.length} txs`, { + blockHash: block.data.hash(), + l1BlockNumber: block.l1.blockNumber, + ...block.data.header.globalVariables.toInspect(), + ...block.data.getStats(), + }); + } - this.log.debug(`last processed L1 block: [${lastProcessedL1BlockNumber}]`); - for (const block of retrievedBlocks) { - this.log.debug(`ingesting new L2 block`, block.data.header.globalVariables.toFriendlyJSON()); - } + const [processDuration] = await elapsed(() => this.store.addBlocks(retrievedBlocks)); + this.instrumentation.processNewBlocks( + processDuration / retrievedBlocks.length, + retrievedBlocks.map(b => b.data), + ); - const timer = new Timer(); - await this.store.addBlocks(retrievedBlocks); + for (const block of retrievedBlocks) { + this.log.info(`Downloaded L2 block ${block.data.number}`, { + blockHash: block.data.hash(), + blockNumber: block.data.number, + }); + } + } while (searchEndBlock < currentL1BlockNumber); // Important that we update AFTER inserting the blocks. await updateProvenBlock(); - this.instrumentation.processNewBlocks( - timer.ms() / retrievedBlocks.length, - retrievedBlocks.map(b => b.data), - ); - const lastL2BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].data.number; - this.log.verbose(`Processed ${retrievedBlocks.length} new L2 blocks up to ${lastL2BlockNumber}`); return { provenBlockNumber }; } @@ -483,11 +514,11 @@ export class Archiver implements ArchiveSource { } public getRollupAddress(): Promise { - return Promise.resolve(this.rollupAddress); + return Promise.resolve(this.l1Addresses.rollupAddress); } public getRegistryAddress(): Promise { - return Promise.resolve(this.registryAddress); + return Promise.resolve(this.l1Addresses.registryAddress); } public getL1BlockNumber(): bigint { @@ -586,7 +617,7 @@ export class Archiver implements ArchiveSource { return blocks.length === 0 ? undefined : blocks[0].data; } - public async getBlockHeader(number: number | 'latest'): Promise
{ + public async getBlockHeader(number: number | 'latest'): Promise { if (number === 'latest') { number = await this.store.getSynchedL2BlockNumber(); } @@ -828,7 +859,7 @@ class ArchiverStoreHelper .map(log => ContractClassRegisteredEvent.fromLog(log.data)) .map(e => e.toContractClassPublic()); if (contractClasses.length > 0) { - contractClasses.forEach(c => this.#log.verbose(`Registering contract class ${c.id.toString()}`)); + contractClasses.forEach(c => this.#log.verbose(`${Operation[operation]} contract class ${c.id.toString()}`)); if (operation == Operation.Store) { // TODO: Will probably want to create some worker threads to compute these bytecode commitments as they are expensive return await this.store.addContractClasses( @@ -983,7 +1014,7 @@ class ArchiverStoreHelper getBlocks(from: number, limit: number): Promise[]> { return this.store.getBlocks(from, limit); } - getBlockHeaders(from: number, limit: number): Promise { + getBlockHeaders(from: number, limit: number): Promise { return this.store.getBlockHeaders(from, limit); } getTxEffect(txHash: TxHash): Promise | undefined> { @@ -1068,19 +1099,3 @@ class ArchiverStoreHelper return this.store.estimateSize(); } } - -type L1RollupConstants = { - l1StartBlock: bigint; - l1GenesisTime: bigint; - slotDuration: number; - epochDuration: number; - ethereumSlotDuration: number; -}; - -const EmptyL1RollupConstants: L1RollupConstants = { - l1StartBlock: 0n, - l1GenesisTime: 0n, - epochDuration: 0, - slotDuration: 0, - ethereumSlotDuration: 0, -}; diff --git a/yarn-project/archiver/src/archiver/archiver_store.ts b/yarn-project/archiver/src/archiver/archiver_store.ts index 281fb80c41dd..11d0abd7c35e 100644 --- a/yarn-project/archiver/src/archiver/archiver_store.ts +++ b/yarn-project/archiver/src/archiver/archiver_store.ts @@ -10,11 +10,11 @@ import { type TxScopedL2Log, } from '@aztec/circuit-types'; import { + type BlockHeader, type ContractClassPublic, type ContractInstanceWithAddress, type ExecutablePrivateFunctionWithMembershipProof, type Fr, - type Header, type PrivateLog, type UnconstrainedFunctionWithMembershipProof, } from '@aztec/circuits.js'; @@ -71,7 +71,7 @@ export interface ArchiverDataStore { * @param limit - The number of blocks to return. * @returns The requested L2 block headers. */ - getBlockHeaders(from: number, limit: number): Promise; + getBlockHeaders(from: number, limit: number): Promise; /** * Gets a tx effect. diff --git a/yarn-project/archiver/src/archiver/config.ts b/yarn-project/archiver/src/archiver/config.ts index 6aa953cd087d..d739314d468c 100644 --- a/yarn-project/archiver/src/archiver/config.ts +++ b/yarn-project/archiver/src/archiver/config.ts @@ -18,24 +18,19 @@ import { type ConfigMappingsType, getConfigFromMappings, numberConfigHelper } fr * The archiver configuration. */ export type ArchiverConfig = { - /** - * URL for an archiver service. If set, will return an archiver client as opposed to starting a new one. - */ + /** URL for an archiver service. If set, will return an archiver client as opposed to starting a new one. */ archiverUrl?: string; - /** - * The polling interval in ms for retrieving new L2 blocks and encrypted logs. - */ + /** The polling interval in ms for retrieving new L2 blocks and encrypted logs. */ archiverPollingIntervalMS?: number; - /** - * The polling interval viem uses in ms - */ + /** The number of L2 blocks the archiver will attempt to download at a time. */ + archiverBatchSize?: number; + + /** The polling interval viem uses in ms */ viemPollingIntervalMS?: number; - /** - * The deployed L1 contract addresses - */ + /** The deployed L1 contract addresses */ l1Contracts: L1ContractAddresses; /** The max number of logs that can be obtained in 1 "getUnencryptedLogs" call. */ @@ -54,6 +49,11 @@ export const archiverConfigMappings: ConfigMappingsType = { description: 'The polling interval in ms for retrieving new L2 blocks and encrypted logs.', ...numberConfigHelper(1_000), }, + archiverBatchSize: { + env: 'ARCHIVER_BATCH_SIZE', + description: 'The number of L2 blocks the archiver will attempt to download at a time.', + ...numberConfigHelper(100), + }, maxLogs: { env: 'ARCHIVER_MAX_LOGS', description: 'The max number of logs that can be obtained in 1 "getUnencryptedLogs" call.', diff --git a/yarn-project/archiver/src/archiver/data_retrieval.ts b/yarn-project/archiver/src/archiver/data_retrieval.ts index 3249a5fc541a..7cc84ab185cb 100644 --- a/yarn-project/archiver/src/archiver/data_retrieval.ts +++ b/yarn-project/archiver/src/archiver/data_retrieval.ts @@ -1,5 +1,6 @@ import { Body, InboxLeaf, L2Block } from '@aztec/circuit-types'; -import { AppendOnlyTreeSnapshot, Fr, Header, Proof } from '@aztec/circuits.js'; +import { AppendOnlyTreeSnapshot, BlockHeader, Fr, Proof } from '@aztec/circuits.js'; +import { asyncPool } from '@aztec/foundation/async-pool'; import { type EthAddress } from '@aztec/foundation/eth-address'; import { type ViemSignature } from '@aztec/foundation/eth-signature'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; @@ -25,16 +26,14 @@ import { type L1Published, type L1PublishedData } from './structs/published.js'; * Fetches new L2 blocks. * @param publicClient - The viem public client to use for transaction retrieval. * @param rollupAddress - The address of the rollup contract. - * @param blockUntilSynced - If true, blocks until the archiver has fully synced. * @param searchStartBlock - The block number to use for starting the search. * @param searchEndBlock - The highest block number that we should search up to. * @param expectedNextL2BlockNum - The next L2 block number that we expect to find. * @returns An array of block; as well as the next eth block to search from. */ -export async function retrieveBlockFromRollup( +export async function retrieveBlocksFromRollup( rollup: GetContractReturnType>, publicClient: PublicClient, - blockUntilSynced: boolean, searchStartBlock: bigint, searchEndBlock: bigint, logger: DebugLogger = createDebugLogger('aztec:archiver'), @@ -58,13 +57,13 @@ export async function retrieveBlockFromRollup( const lastLog = l2BlockProposedLogs[l2BlockProposedLogs.length - 1]; logger.debug( - `Got L2 block processed logs for ${l2BlockProposedLogs[0].blockNumber}-${lastLog.blockNumber} between ${searchStartBlock}-${searchEndBlock} L1 blocks`, + `Got ${l2BlockProposedLogs.length} L2 block processed logs for L2 blocks ${l2BlockProposedLogs[0].args.blockNumber}-${lastLog.args.blockNumber} between L1 blocks ${searchStartBlock}-${searchEndBlock}`, ); const newBlocks = await processL2BlockProposedLogs(rollup, publicClient, l2BlockProposedLogs, logger); retrievedBlocks.push(...newBlocks); searchStartBlock = lastLog.blockNumber! + 1n; - } while (blockUntilSynced && searchStartBlock <= searchEndBlock); + } while (searchStartBlock <= searchEndBlock); return retrievedBlocks; } @@ -82,14 +81,13 @@ export async function processL2BlockProposedLogs( logger: DebugLogger, ): Promise[]> { const retrievedBlocks: L1Published[] = []; - for (const log of logs) { + await asyncPool(10, logs, async log => { const l2BlockNumber = log.args.blockNumber!; const archive = log.args.archive!; const archiveFromChain = await rollup.read.archiveAt([l2BlockNumber]); // The value from the event and contract will match only if the block is in the chain. if (archive === archiveFromChain) { - // TODO: Fetch blocks from calldata in parallel const block = await getBlockFromRollupTx(publicClient, log.transactionHash!, l2BlockNumber); const l1: L1PublishedData = { @@ -100,11 +98,12 @@ export async function processL2BlockProposedLogs( retrievedBlocks.push({ data: block, l1 }); } else { - logger.warn( - `Archive mismatch matching, ignoring block ${l2BlockNumber} with archive: ${archive}, expected ${archiveFromChain}`, - ); + logger.warn(`Ignoring L2 block ${l2BlockNumber} due to archive root mismatch`, { + actual: archive, + expected: archiveFromChain, + }); } - } + }); return retrievedBlocks; } @@ -129,10 +128,7 @@ async function getBlockFromRollupTx( l2BlockNum: bigint, ): Promise { const { input: data } = await publicClient.getTransaction({ hash: txHash }); - const { functionName, args } = decodeFunctionData({ - abi: RollupAbi, - data, - }); + const { functionName, args } = decodeFunctionData({ abi: RollupAbi, data }); const allowedMethods = ['propose', 'proposeAndClaim']; @@ -154,7 +150,7 @@ async function getBlockFromRollupTx( Hex, ]; - const header = Header.fromBuffer(Buffer.from(hexToBytes(decodedArgs.header))); + const header = BlockHeader.fromBuffer(Buffer.from(hexToBytes(decodedArgs.header))); const blockBody = Body.fromBuffer(Buffer.from(hexToBytes(bodyHex))); const blockNumberFromHeader = header.globalVariables.blockNumber.toBigInt(); @@ -184,7 +180,6 @@ async function getBlockFromRollupTx( */ export async function retrieveL1ToL2Messages( inbox: GetContractReturnType>, - blockUntilSynced: boolean, searchStartBlock: bigint, searchEndBlock: bigint, ): Promise> { @@ -213,7 +208,7 @@ export async function retrieveL1ToL2Messages( // handles the case when there are no new messages: searchStartBlock = (messageSentLogs.findLast(msgLog => !!msgLog)?.blockNumber || searchStartBlock) + 1n; - } while (blockUntilSynced && searchStartBlock <= searchEndBlock); + } while (searchStartBlock <= searchEndBlock); return { lastProcessedL1BlockNumber: searchStartBlock - 1n, retrievedData: retrievedL1ToL2Messages }; } diff --git a/yarn-project/archiver/src/archiver/instrumentation.ts b/yarn-project/archiver/src/archiver/instrumentation.ts index 1d6343b8f9dd..7c44a9a46186 100644 --- a/yarn-project/archiver/src/archiver/instrumentation.ts +++ b/yarn-project/archiver/src/archiver/instrumentation.ts @@ -5,6 +5,7 @@ import { type Gauge, type Histogram, LmdbMetrics, + type LmdbStatsCallback, Metrics, type TelemetryClient, type UpDownCounter, @@ -23,7 +24,7 @@ export class ArchiverInstrumentation { private log = createDebugLogger('aztec:archiver:instrumentation'); - constructor(private telemetry: TelemetryClient) { + constructor(private telemetry: TelemetryClient, lmdbStats?: LmdbStatsCallback) { const meter = telemetry.getMeter('Archiver'); this.blockHeight = meter.createGauge(Metrics.ARCHIVER_BLOCK_HEIGHT, { description: 'The height of the latest block processed by the archiver', @@ -72,13 +73,10 @@ export class ArchiverInstrumentation { name: Metrics.ARCHIVER_DB_NUM_ITEMS, description: 'Num items in the archiver database', }, + lmdbStats, ); } - public recordDBMetrics(metrics: { mappingSize: number; numItems: number; actualSize: number }) { - this.dbMetrics.recordDBMetrics(metrics); - } - public isEnabled(): boolean { return this.telemetry.isEnabled(); } diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts index 3d15de3fbbb8..458ab2317789 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts @@ -1,5 +1,5 @@ import { Body, type InBlock, L2Block, L2BlockHash, type TxEffect, type TxHash, TxReceipt } from '@aztec/circuit-types'; -import { AppendOnlyTreeSnapshot, type AztecAddress, Header, INITIAL_L2_BLOCK_NUM } from '@aztec/circuits.js'; +import { AppendOnlyTreeSnapshot, type AztecAddress, BlockHeader, INITIAL_L2_BLOCK_NUM } from '@aztec/circuits.js'; import { createDebugLogger } from '@aztec/foundation/log'; import { type AztecKVStore, type AztecMap, type AztecSingleton, type Range } from '@aztec/kv-store'; @@ -147,14 +147,14 @@ export class BlockStore { * @param limit - The number of blocks to return. * @returns The requested L2 block headers */ - *getBlockHeaders(start: number, limit: number): IterableIterator
{ + *getBlockHeaders(start: number, limit: number): IterableIterator { for (const blockStorage of this.#blocks.values(this.#computeBlockRange(start, limit))) { - yield Header.fromBuffer(blockStorage.header); + yield BlockHeader.fromBuffer(blockStorage.header); } } private getBlockFromBlockStorage(blockStorage: BlockStorage) { - const header = Header.fromBuffer(blockStorage.header); + const header = BlockHeader.fromBuffer(blockStorage.header); const archive = AppendOnlyTreeSnapshot.fromBuffer(blockStorage.archive); const blockHash = header.hash().toString(); const blockBodyBuffer = this.#blockBodies.get(blockHash); diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/contract_artifacts_store.test.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/contract_artifacts_store.test.ts index 0735866154a3..54fc9c7d69e8 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/contract_artifacts_store.test.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/contract_artifacts_store.test.ts @@ -1,5 +1,5 @@ import { AztecAddress } from '@aztec/circuits.js'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { BenchmarkingContractArtifact } from '@aztec/noir-contracts.js/Benchmarking'; import { beforeEach } from '@jest/globals'; diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.test.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.test.ts index 90b3693716c0..d361f91c1397 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.test.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.test.ts @@ -1,4 +1,4 @@ -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { describeArchiverDataStore } from '../archiver_store_test_suite.js'; import { KVArchiverDataStore } from './kv_archiver_store.js'; diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts index 618abf9cbfde..804f2caa03ef 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts @@ -9,11 +9,11 @@ import { type TxScopedL2Log, } from '@aztec/circuit-types'; import { + type BlockHeader, type ContractClassPublic, type ContractInstanceWithAddress, type ExecutablePrivateFunctionWithMembershipProof, type Fr, - type Header, type PrivateLog, type UnconstrainedFunctionWithMembershipProof, } from '@aztec/circuits.js'; @@ -171,7 +171,7 @@ export class KVArchiverDataStore implements ArchiverDataStore { * @param limit - The number of blocks to return. * @returns The requested L2 blocks */ - getBlockHeaders(start: number, limit: number): Promise { + getBlockHeaders(start: number, limit: number): Promise { try { return Promise.resolve(Array.from(this.#blockStore.getBlockHeaders(start, limit))); } catch (err) { diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts index efb4922d328c..da6f49388832 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts @@ -94,7 +94,7 @@ export class LogStore { } const tag = new Fr(correctedBuffer); - this.#log.verbose(`Found tagged unencrypted log with tag ${tag.toString()} in block ${block.number}`); + this.#log.debug(`Found tagged unencrypted log with tag ${tag.toString()} in block ${block.number}`); const currentLogs = taggedLogs.get(tag.toString()) ?? []; currentLogs.push( new TxScopedL2Log(txHash, dataStartIndexForTx, block.number, /* isFromPublic */ true, log.data).toBuffer(), diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts index 5a0c7085c61b..74480bc80070 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts @@ -16,12 +16,12 @@ import { wrapInBlock, } from '@aztec/circuit-types'; import { + type BlockHeader, type ContractClassPublic, type ContractClassPublicWithBlockNumber, type ContractInstanceWithAddress, type ExecutablePrivateFunctionWithMembershipProof, Fr, - type Header, INITIAL_L2_BLOCK_NUM, MAX_NOTE_HASHES_PER_TX, MAX_NULLIFIERS_PER_TX, @@ -427,7 +427,7 @@ export class MemoryArchiverStore implements ArchiverDataStore { return Promise.resolve(this.l2Blocks.slice(fromIndex, toIndex)); } - public async getBlockHeaders(from: number, limit: number): Promise { + public async getBlockHeaders(from: number, limit: number): Promise { const blocks = await this.getBlocks(from, limit); return blocks.map(block => block.data.header); } diff --git a/yarn-project/archiver/src/factory.ts b/yarn-project/archiver/src/factory.ts index a2bd3c66ac7a..6e6949500654 100644 --- a/yarn-project/archiver/src/factory.ts +++ b/yarn-project/archiver/src/factory.ts @@ -7,7 +7,7 @@ import { import { createDebugLogger } from '@aztec/foundation/log'; import { type Maybe } from '@aztec/foundation/types'; import { type DataStoreConfig } from '@aztec/kv-store/config'; -import { createStore } from '@aztec/kv-store/utils'; +import { createStore } from '@aztec/kv-store/lmdb'; import { TokenBridgeContractArtifact, TokenContractArtifact } from '@aztec/noir-contracts.js'; import { getCanonicalProtocolContract, protocolContractNames } from '@aztec/protocol-contracts'; import { type TelemetryClient } from '@aztec/telemetry-client'; diff --git a/yarn-project/archiver/src/index.ts b/yarn-project/archiver/src/index.ts index 24112863fc1c..4aa32e6d591b 100644 --- a/yarn-project/archiver/src/index.ts +++ b/yarn-project/archiver/src/index.ts @@ -1,62 +1,8 @@ -import { jsonStringify } from '@aztec/foundation/json-rpc'; -import { createDebugLogger } from '@aztec/foundation/log'; -import { fileURLToPath } from '@aztec/foundation/url'; -import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; - -import { createPublicClient, http } from 'viem'; -import { localhost } from 'viem/chains'; - -import { Archiver, getArchiverConfigFromEnv } from './archiver/index.js'; -import { ArchiverInstrumentation } from './archiver/instrumentation.js'; -import { MemoryArchiverStore } from './archiver/memory_archiver_store/memory_archiver_store.js'; - export * from './archiver/index.js'; -export * from './rpc/index.js'; export * from './factory.js'; +export * from './rpc/index.js'; -export { retrieveL2ProofVerifiedEvents, retrieveBlockFromRollup } from './archiver/data_retrieval.js'; - -const log = createDebugLogger('aztec:archiver'); - -/** - * A function which instantiates and starts Archiver. - */ -// eslint-disable-next-line require-await -async function main() { - const config = getArchiverConfigFromEnv(); - const { l1RpcUrl: rpcUrl, l1Contracts } = config; - - log.info(`Starting archiver in main(): ${jsonStringify(config)}`); - const publicClient = createPublicClient({ - chain: localhost, - transport: http(rpcUrl), - }); - - const archiverStore = new MemoryArchiverStore(1000); - - const archiver = new Archiver( - publicClient, - l1Contracts.rollupAddress, - l1Contracts.inboxAddress, - l1Contracts.registryAddress, - archiverStore, - 1000, - new ArchiverInstrumentation(new NoopTelemetryClient()), - ); - - const shutdown = async () => { - await archiver.stop(); - process.exit(0); - }; - process.once('SIGINT', shutdown); - process.once('SIGTERM', shutdown); -} - -// See https://twitter.com/Rich_Harris/status/1355289863130673153 -if (process.argv[1] === fileURLToPath(import.meta.url).replace(/\/index\.js$/, '')) { - // eslint-disable-next-line @typescript-eslint/no-floating-promises - main().catch(err => { - log.error(err); - process.exit(1); - }); -} +export { + retrieveBlocksFromRollup as retrieveBlockFromRollup, + retrieveL2ProofVerifiedEvents, +} from './archiver/data_retrieval.js'; diff --git a/yarn-project/archiver/src/test/mock_l2_block_source.ts b/yarn-project/archiver/src/test/mock_l2_block_source.ts index cbd2e3363d33..6dd2c43a8b6f 100644 --- a/yarn-project/archiver/src/test/mock_l2_block_source.ts +++ b/yarn-project/archiver/src/test/mock_l2_block_source.ts @@ -7,12 +7,11 @@ import { TxReceipt, TxStatus, } from '@aztec/circuit-types'; -import { EthAddress, type Header } from '@aztec/circuits.js'; +import { getSlotRangeForEpoch } from '@aztec/circuit-types'; +import { type BlockHeader, EthAddress } from '@aztec/circuits.js'; import { DefaultL1ContractsConfig } from '@aztec/ethereum'; import { createDebugLogger } from '@aztec/foundation/log'; -import { getSlotRangeForEpoch } from '../archiver/epoch_helpers.js'; - /** * A mocked implementation of L2BlockSource to be used in tests. */ @@ -107,7 +106,7 @@ export class MockL2BlockSource implements L2BlockSource { ); } - getBlockHeader(number: number | 'latest'): Promise
{ + getBlockHeader(number: number | 'latest'): Promise { return Promise.resolve(this.l2Blocks.at(typeof number === 'number' ? number - 1 : -1)?.header); } diff --git a/yarn-project/aztec-faucet/terraform/main.tf b/yarn-project/aztec-faucet/terraform/main.tf index 2326d9e3e4ef..d77dd205300e 100644 --- a/yarn-project/aztec-faucet/terraform/main.tf +++ b/yarn-project/aztec-faucet/terraform/main.tf @@ -107,8 +107,8 @@ resource "aws_ecs_task_definition" "aztec-faucet" { value = "80" }, { - name = "DEBUG", - value = "aztec:*" + name = "LOG_LEVEL", + value = "verbose" }, { name = "RPC_URL", diff --git a/yarn-project/aztec-faucet/terraform/variables.tf b/yarn-project/aztec-faucet/terraform/variables.tf index f1d2fbf5c865..992719e667d0 100644 --- a/yarn-project/aztec-faucet/terraform/variables.tf +++ b/yarn-project/aztec-faucet/terraform/variables.tf @@ -35,6 +35,10 @@ variable "FEE_JUICE_CONTRACT_ADDRESS" { type = string } +variable "STAKING_ASSET_CONTRACT_ADDRESS" { + type = string +} + variable "DEV_COIN_CONTRACT_ADDRESS" { type = string } diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 57690bd78d97..4bbc079ec319 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -34,13 +34,13 @@ import { } from '@aztec/circuit-types'; import { type ARCHIVE_HEIGHT, + type BlockHeader, type ContractClassPublic, type ContractDataSource, type ContractInstanceWithAddress, EthAddress, Fr, type GasFees, - type Header, INITIAL_L2_BLOCK_NUM, type L1_TO_L2_MSG_TREE_HEIGHT, type NOTE_HASH_TREE_HEIGHT, @@ -60,7 +60,7 @@ import { padArrayEnd } from '@aztec/foundation/collection'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { Timer } from '@aztec/foundation/timer'; import { type AztecKVStore } from '@aztec/kv-store'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { SHA256Trunc, StandardTree, UnbalancedTree } from '@aztec/merkle-tree'; import { AggregateTxValidator, @@ -171,7 +171,7 @@ export class AztecNodeService implements AztecNode { // start both and wait for them to sync from the block source await Promise.all([p2pClient.start(), worldStateSynchronizer.start()]); - const validatorClient = createValidatorClient(config, p2pClient, telemetry); + const validatorClient = await createValidatorClient(config, config.l1Contracts.rollupAddress, p2pClient, telemetry); // now create the sequencer const sequencer = config.disableValidator @@ -767,7 +767,7 @@ export class AztecNodeService implements AztecNode { * Returns the currently committed block header, or the initial header if no blocks have been produced. * @returns The current committed block header. */ - public async getBlockHeader(blockNumber: L2BlockNumber = 'latest'): Promise
{ + public async getBlockHeader(blockNumber: L2BlockNumber = 'latest'): Promise { return ( (await this.getBlock(blockNumber === 'latest' ? -1 : blockNumber))?.header ?? this.worldStateSynchronizer.getCommitted().getInitialHeader() diff --git a/yarn-project/aztec.js/src/contract/contract.test.ts b/yarn-project/aztec.js/src/contract/contract.test.ts index 4e856779ad4a..66a54e8cfb5b 100644 --- a/yarn-project/aztec.js/src/contract/contract.test.ts +++ b/yarn-project/aztec.js/src/contract/contract.test.ts @@ -41,6 +41,7 @@ describe('Contract Class', () => { inboxAddress: EthAddress.random(), outboxAddress: EthAddress.random(), feeJuiceAddress: EthAddress.random(), + stakingAssetAddress: EthAddress.random(), feeJuicePortalAddress: EthAddress.random(), governanceAddress: EthAddress.random(), coinIssuerAddress: EthAddress.random(), diff --git a/yarn-project/aztec.js/src/index.ts b/yarn-project/aztec.js/src/index.ts index 3a67fd9a6fd3..b1b58e529983 100644 --- a/yarn-project/aztec.js/src/index.ts +++ b/yarn-project/aztec.js/src/index.ts @@ -32,9 +32,9 @@ export { type ContractNotes, type ContractStorageLayout, type DeployOptions, + type ProfileResult, type SendMethodOptions, type WaitOpts, - type ProfileResult, } from './contract/index.js'; export { ContractDeployer } from './deployment/index.js'; @@ -53,14 +53,15 @@ export { generatePublicKey, readFieldCompressedString, waitForPXE, + waitForNode, type AztecAddressLike, type EthAddressLike, type EventSelectorLike, type FieldLike, type FunctionSelectorLike, type L2AmountClaim, - type L2Claim, type L2AmountClaimWithRecipient, + type L2Claim, type WrappedFieldLike, } from './utils/index.js'; @@ -138,10 +139,12 @@ export { UnencryptedL2Log, UniqueNote, createAztecNodeClient, + getTimestampRangeForEpoch, merkleTreeIds, mockEpochProofQuote, mockTx, type AztecNode, + type EpochConstants, type LogFilter, type PXE, type PartialAddress, @@ -156,7 +159,7 @@ export { decodeFromAbi, encodeArguments, type AbiType } from '@aztec/foundation/ export { toBigIntBE } from '@aztec/foundation/bigint-buffer'; export { sha256 } from '@aztec/foundation/crypto'; export { makeFetch } from '@aztec/foundation/json-rpc/client'; -export { createDebugLogger, onLog, type DebugLogger } from '@aztec/foundation/log'; +export { createDebugLogger, type DebugLogger } from '@aztec/foundation/log'; export { retry, retryUntil } from '@aztec/foundation/retry'; export { to2Fields, toBigInt } from '@aztec/foundation/serialize'; export { sleep } from '@aztec/foundation/sleep'; @@ -164,7 +167,7 @@ export { elapsed } from '@aztec/foundation/timer'; export { type FieldsOf } from '@aztec/foundation/types'; export { fileURLToPath } from '@aztec/foundation/url'; -export { type DeployL1Contracts, EthCheatCodes, deployL1Contract, deployL1Contracts } from '@aztec/ethereum'; +export { EthCheatCodes, deployL1Contract, deployL1Contracts, type DeployL1Contracts } from '@aztec/ethereum'; // Start of section that exports public api via granular api. // Here you *can* do `export *` as the granular api defacto exports things explicitly. diff --git a/yarn-project/aztec.js/src/utils/index.ts b/yarn-project/aztec.js/src/utils/index.ts index 7a980b6ca681..68a1b4d12fad 100644 --- a/yarn-project/aztec.js/src/utils/index.ts +++ b/yarn-project/aztec.js/src/utils/index.ts @@ -4,6 +4,7 @@ export * from './abi_types.js'; export * from './cheat_codes.js'; export * from './authwit.js'; export * from './pxe.js'; +export * from './node.js'; export * from './anvil_test_watcher.js'; export * from './field_compressed_string.js'; export * from './portal_manager.js'; diff --git a/yarn-project/aztec.js/src/utils/node.ts b/yarn-project/aztec.js/src/utils/node.ts new file mode 100644 index 000000000000..51c815aa5fea --- /dev/null +++ b/yarn-project/aztec.js/src/utils/node.ts @@ -0,0 +1,17 @@ +import { type AztecNode } from '@aztec/circuit-types'; +import { type DebugLogger } from '@aztec/foundation/log'; +import { retryUntil } from '@aztec/foundation/retry'; + +export const waitForNode = async (node: AztecNode, logger?: DebugLogger) => { + await retryUntil(async () => { + try { + logger?.verbose('Attempting to contact Aztec node...'); + await node.getNodeInfo(); + logger?.verbose('Contacted Aztec node'); + return true; + } catch (error) { + logger?.verbose('Failed to contact Aztec Node'); + } + return undefined; + }, 'RPC Get Node Info'); +}; diff --git a/yarn-project/aztec.js/webpack.config.js b/yarn-project/aztec.js/webpack.config.js index d377a5fa0563..3ba9561af4e1 100644 --- a/yarn-project/aztec.js/webpack.config.js +++ b/yarn-project/aztec.js/webpack.config.js @@ -61,6 +61,7 @@ export default { fs: false, path: false, url: false, + tty: false, worker_threads: false, buffer: require.resolve('buffer/'), util: require.resolve('util/'), diff --git a/yarn-project/aztec/CHANGELOG.md b/yarn-project/aztec/CHANGELOG.md index 39144fef90bf..c7f9131d6299 100644 --- a/yarn-project/aztec/CHANGELOG.md +++ b/yarn-project/aztec/CHANGELOG.md @@ -1,5 +1,23 @@ # Changelog +## [0.66.0](https://github.com/AztecProtocol/aztec-packages/compare/aztec-package-v0.65.2...aztec-package-v0.66.0) (2024-12-06) + + +### ⚠ BREAKING CHANGES + +* Remove debug and winston in favor of pino ([#10355](https://github.com/AztecProtocol/aztec-packages/issues/10355)) + +### Features + +* Agent and broker expose OTEL metrics ([#10264](https://github.com/AztecProtocol/aztec-packages/issues/10264)) ([c2c8cc6](https://github.com/AztecProtocol/aztec-packages/commit/c2c8cc6f7336cf4b2fa14d9a7f1af1a30f1b8f79)) +* Epoch cache, do not attest if not in committee or from current proposer ([#10327](https://github.com/AztecProtocol/aztec-packages/issues/10327)) ([9ebaa65](https://github.com/AztecProtocol/aztec-packages/commit/9ebaa65ce290481e5dc00174e92137561360549a)) +* Staking integration ([#10403](https://github.com/AztecProtocol/aztec-packages/issues/10403)) ([ecd6c4f](https://github.com/AztecProtocol/aztec-packages/commit/ecd6c4ff914129236b23ab6f4924e4faa3e9d523)) + + +### Miscellaneous + +* Remove debug and winston in favor of pino ([#10355](https://github.com/AztecProtocol/aztec-packages/issues/10355)) ([c246aba](https://github.com/AztecProtocol/aztec-packages/commit/c246aba5dd51391e2b8a3bd8cdc67f0115b85a7a)) + ## [0.65.2](https://github.com/AztecProtocol/aztec-packages/compare/aztec-package-v0.65.1...aztec-package-v0.65.2) (2024-11-28) diff --git a/yarn-project/aztec/docker-compose.yml b/yarn-project/aztec/docker-compose.yml index f26a2e548286..3fe35cd42f82 100644 --- a/yarn-project/aztec/docker-compose.yml +++ b/yarn-project/aztec/docker-compose.yml @@ -21,7 +21,7 @@ services: ports: - '${SANDBOX_PXE_PORT:-8080}:8080' environment: - DEBUG: # Loaded from the user shell if explicitly set + LOG_LEVEL: # Loaded from the user shell if explicitly set HOST_WORKDIR: '${PWD}' # Loaded from the user shell to show log files absolute path in host ETHEREUM_HOST: http://ethereum:8545 L1_CHAIN_ID: 31337 diff --git a/yarn-project/aztec/package.json b/yarn-project/aztec/package.json index e9ff10afa77d..2ac1f232f67b 100644 --- a/yarn-project/aztec/package.json +++ b/yarn-project/aztec/package.json @@ -1,6 +1,6 @@ { "name": "@aztec/aztec", - "version": "0.65.2", + "version": "0.66.0", "type": "module", "exports": { ".": "./dest/index.js" @@ -22,7 +22,7 @@ "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", "build:dev": "tsc -b --watch", "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests", - "run:example:token": "DEBUG='aztec:*' node ./dest/examples/token.js" + "run:example:token": "LOG_LEVEL='verbose' node ./dest/examples/token.js" }, "inherits": [ "../package.common.json" @@ -55,16 +55,13 @@ "@aztec/telemetry-client": "workspace:^", "@aztec/txe": "workspace:^", "@aztec/types": "workspace:^", - "@opentelemetry/winston-transport": "^0.7.0", "@types/chalk": "^2.2.0", "abitype": "^0.8.11", "chalk": "^5.3.0", "commander": "^12.1.0", "koa": "^2.14.2", "koa-router": "^12.0.0", - "viem": "^2.7.15", - "winston": "^3.10.0", - "winston-daily-rotate-file": "^4.7.1" + "viem": "^2.7.15" }, "files": [ "dest", diff --git a/yarn-project/aztec/src/cli/aztec_start_options.ts b/yarn-project/aztec/src/cli/aztec_start_options.ts index 90b0a970092c..6b0b5c904700 100644 --- a/yarn-project/aztec/src/cli/aztec_start_options.ts +++ b/yarn-project/aztec/src/cli/aztec_start_options.ts @@ -75,6 +75,12 @@ export const aztecStartOptions: { [key: string]: AztecStartOption[] } = { envVar: 'ENABLE_GAS', ...booleanConfigHelper(), }, + { + flag: '--sandbox.noPXE', + description: 'Do not expose PXE service on sandbox start', + envVar: 'NO_PXE', + ...booleanConfigHelper(), + }, ], API: [ { @@ -143,6 +149,12 @@ export const aztecStartOptions: { [key: string]: AztecStartOption[] } = { defaultValue: undefined, envVar: 'FEE_JUICE_CONTRACT_ADDRESS', }, + { + flag: '--staking-asset-address ', + description: 'The deployed L1 Staking Asset contract address', + defaultValue: undefined, + envVar: 'STAKING_ASSET_CONTRACT_ADDRESS', + }, { flag: '--fee-juice-portal-address ', description: 'The deployed L1 Fee Juice portal contract address', diff --git a/yarn-project/aztec/src/cli/cli.ts b/yarn-project/aztec/src/cli/cli.ts index 91d803851e40..54a5a139478e 100644 --- a/yarn-project/aztec/src/cli/cli.ts +++ b/yarn-project/aztec/src/cli/cli.ts @@ -9,7 +9,6 @@ import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; import { Command } from 'commander'; -import { setupConsoleJsonLog } from '../logging.js'; import { createSandbox } from '../sandbox.js'; import { github, splash } from '../splash.js'; import { aztecStartOptions } from './aztec_start_options.js'; @@ -39,11 +38,6 @@ export function injectAztecCommands(program: Command, userLog: LogFn, debugLogge startCmd.helpInformation = printAztecStartHelpText; startCmd.action(async options => { - // setup json logging - if (['1', 'true', 'TRUE'].includes(process.env.LOG_JSON ?? '')) { - setupConsoleJsonLog(); - } - // list of 'stop' functions to call when process ends const signalHandlers: Array<() => Promise> = []; const services: NamespacedApiHandlers = {}; @@ -62,6 +56,8 @@ export function injectAztecCommands(program: Command, userLog: LogFn, debugLogge if (sandboxOptions.testAccounts) { if (aztecNodeConfig.p2pEnabled) { userLog(`Not setting up test accounts as we are connecting to a network`); + } else if (sandboxOptions.noPXE) { + userLog(`Not setting up test accounts as we are not exposing a PXE`); } else { userLog('Setting up test accounts...'); const accounts = await deployInitialTestAccounts(pxe); @@ -73,7 +69,11 @@ export function injectAztecCommands(program: Command, userLog: LogFn, debugLogge // Start Node and PXE JSON-RPC server signalHandlers.push(stop); services.node = [node, AztecNodeApiSchema]; - services.pxe = [pxe, PXESchema]; + if (!sandboxOptions.noPXE) { + services.pxe = [pxe, PXESchema]; + } else { + userLog(`Not exposing PXE API through JSON-RPC server`); + } } else { if (options.node) { const { startNode } = await import('./cmds/start_node.js'); diff --git a/yarn-project/aztec/src/cli/cmds/start_archiver.ts b/yarn-project/aztec/src/cli/cmds/start_archiver.ts index 541ec726db54..c9b953a4980b 100644 --- a/yarn-project/aztec/src/cli/cmds/start_archiver.ts +++ b/yarn-project/aztec/src/cli/cmds/start_archiver.ts @@ -3,7 +3,7 @@ import { createDebugLogger } from '@aztec/aztec.js'; import { ArchiverApiSchema } from '@aztec/circuit-types'; import { type NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server'; import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config'; -import { createStore } from '@aztec/kv-store/utils'; +import { createStore } from '@aztec/kv-store/lmdb'; import { createAndStartTelemetryClient, getConfigEnvVars as getTelemetryClientConfig, diff --git a/yarn-project/aztec/src/cli/cmds/start_prover_agent.ts b/yarn-project/aztec/src/cli/cmds/start_prover_agent.ts index 3ae24df0ad9b..df382eb6251f 100644 --- a/yarn-project/aztec/src/cli/cmds/start_prover_agent.ts +++ b/yarn-project/aztec/src/cli/cmds/start_prover_agent.ts @@ -40,7 +40,18 @@ export async function startProverAgent( ); const prover = await buildServerCircuitProver(config, telemetry); const proofStore = new InlineProofStore(); - const agents = times(config.proverAgentCount, () => new ProvingAgent(broker, proofStore, prover)); + const agents = times( + config.proverAgentCount, + () => + new ProvingAgent( + broker, + proofStore, + prover, + telemetry, + config.proverAgentProofTypes, + config.proverAgentPollIntervalMs, + ), + ); await Promise.all(agents.map(agent => agent.start())); diff --git a/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts b/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts index 197d48971c91..ce5ef637ff6b 100644 --- a/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts +++ b/yarn-project/aztec/src/cli/cmds/start_prover_broker.ts @@ -3,6 +3,10 @@ import { type NamespacedApiHandlers } from '@aztec/foundation/json-rpc/server'; import { type LogFn } from '@aztec/foundation/log'; import { ProvingJobBrokerSchema, createAndStartProvingBroker } from '@aztec/prover-client/broker'; import { getProverNodeBrokerConfigFromEnv } from '@aztec/prover-node'; +import { + createAndStartTelemetryClient, + getConfigEnvVars as getTelemetryClientConfig, +} from '@aztec/telemetry-client/start'; import { extractRelevantOptions } from '../util.js'; @@ -22,7 +26,8 @@ export async function startProverBroker( ...extractRelevantOptions(options, proverBrokerConfigMappings, 'proverBroker'), // override with command line options }; - const broker = await createAndStartProvingBroker(config); + const client = await createAndStartTelemetryClient(getTelemetryClientConfig()); + const broker = await createAndStartProvingBroker(config, client); services.proverBroker = [broker, ProvingJobBrokerSchema]; signalHandlers.push(() => broker.stop()); diff --git a/yarn-project/aztec/src/logging.ts b/yarn-project/aztec/src/logging.ts deleted file mode 100644 index a7deed55ae50..000000000000 --- a/yarn-project/aztec/src/logging.ts +++ /dev/null @@ -1,69 +0,0 @@ -import { currentLevel, onLog, setLevel } from '@aztec/foundation/log'; - -import { OpenTelemetryTransportV3 } from '@opentelemetry/winston-transport'; -import * as path from 'path'; -import * as process from 'process'; -import * as winston from 'winston'; -import DailyRotateFile from 'winston-daily-rotate-file'; - -const { format } = winston; -const CURRENT_LOG_FILE_NAME = 'aztec.debug.log'; -const LOG_DIR = 'log'; - -/** Creates a winston logger that logs everything to a local rotating file */ -function createWinstonLocalFileLogger() { - // See https://www.npmjs.com/package/winston-daily-rotate-file#user-content-options - const transport: DailyRotateFile = new DailyRotateFile({ - filename: 'aztec-%DATE%.debug.log', - dirname: LOG_DIR, - datePattern: 'YYYY-MM-DD', - zippedArchive: true, - maxSize: '30m', - maxFiles: '5', - createSymlink: true, - symlinkName: CURRENT_LOG_FILE_NAME, - }); - - return winston.createLogger({ - level: 'debug', - transports: [transport], - format: format.combine(format.timestamp(), format.json()), - }); -} - -/** Creates a winston logger that logs everything to stdout in json format */ -function createWinstonJsonStdoutLogger() { - return winston.createLogger({ - level: currentLevel, - transports: [ - new winston.transports.Console({ - format: format.combine(format.timestamp(), format.json()), - }), - new OpenTelemetryTransportV3(), - ], - }); -} - -/** - * Hooks to all log statements and outputs them to a local rotating file. - * @returns Output log name. - */ -export function setupFileDebugLog() { - const logger = createWinstonLocalFileLogger(); - onLog((level, module, message, data) => { - logger.log({ ...data, level, module, message }); - }); - const workdir = process.env.HOST_WORKDIR ?? process.cwd(); - return path.join(workdir, LOG_DIR, CURRENT_LOG_FILE_NAME); -} - -/** - * Silences the foundation stdout logger and funnels all logs through a winston JSON logger. - */ -export function setupConsoleJsonLog() { - const logger = createWinstonJsonStdoutLogger(); - setLevel('silent'); - onLog((level, module, message, data) => { - logger.log({ ...data, level, module, message }); - }); -} diff --git a/yarn-project/aztec/terraform/node/main.tf b/yarn-project/aztec/terraform/node/main.tf index f419a3568a7e..ebb85c4461d7 100644 --- a/yarn-project/aztec/terraform/node/main.tf +++ b/yarn-project/aztec/terraform/node/main.tf @@ -252,10 +252,6 @@ resource "aws_ecs_task_definition" "aztec-node" { name = "AZTEC_PORT" value = "80" }, - { - name = "DEBUG" - value = "aztec:*,-json-rpc:json_proxy:*,-aztec:avm_simulator:*" - }, { name = "ETHEREUM_HOST" value = "${local.eth_host}" @@ -324,6 +320,10 @@ resource "aws_ecs_task_definition" "aztec-node" { name = "FEE_JUICE_CONTRACT_ADDRESS" value = data.terraform_remote_state.l1_contracts.outputs.fee_juice_contract_address }, + { + name = "STAKING_ASSET_CONTRACT_ADDRESS" + value = data.terraform_remote_state.l1_contracts.outputs.staking_asset_contract_address + }, { name = "FEE_JUICE_PORTAL_CONTRACT_ADDRESS" value = data.terraform_remote_state.l1_contracts.outputs.FEE_JUICE_PORTAL_CONTRACT_ADDRESS diff --git a/yarn-project/aztec/terraform/prover-node/main.tf b/yarn-project/aztec/terraform/prover-node/main.tf index 45bdfcb0be81..0577e0fa77fd 100644 --- a/yarn-project/aztec/terraform/prover-node/main.tf +++ b/yarn-project/aztec/terraform/prover-node/main.tf @@ -235,7 +235,6 @@ resource "aws_ecs_task_definition" "aztec-prover-node" { { name = "NODE_ENV", value = "production" }, { name = "LOG_LEVEL", value = "verbose" }, { name = "LOG_JSON", value = "1" }, - { name = "DEBUG", value = "aztec:*,-json-rpc:json_proxy:*,-aztec:avm_simulator:*" }, { name = "DEPLOY_TAG", value = var.DEPLOY_TAG }, { name = "NETWORK_NAME", value = "${var.DEPLOY_TAG}" }, { name = "ETHEREUM_HOST", value = "${local.eth_host}" }, @@ -275,6 +274,7 @@ resource "aws_ecs_task_definition" "aztec-prover-node" { { name = "OUTBOX_CONTRACT_ADDRESS", value = data.terraform_remote_state.l1_contracts.outputs.outbox_contract_address }, { name = "REGISTRY_CONTRACT_ADDRESS", value = data.terraform_remote_state.l1_contracts.outputs.registry_contract_address }, { name = "FEE_JUICE_CONTRACT_ADDRESS", value = data.terraform_remote_state.l1_contracts.outputs.fee_juice_contract_address }, + { name = "STAKING_ASSET_CONTRACT_ADDRESS", value = data.terraform_remote_state.l1_contracts.outputs.staking_asset_contract_address }, { name = "FEE_JUICE_PORTAL_CONTRACT_ADDRESS", value = data.terraform_remote_state.l1_contracts.outputs.FEE_JUICE_PORTAL_CONTRACT_ADDRESS }, // P2P (disabled) diff --git a/yarn-project/aztec/terraform/prover/main.tf b/yarn-project/aztec/terraform/prover/main.tf index 72b48ff520bf..97f62ee8995e 100644 --- a/yarn-project/aztec/terraform/prover/main.tf +++ b/yarn-project/aztec/terraform/prover/main.tf @@ -250,8 +250,8 @@ resource "aws_ecs_task_definition" "aztec-proving-agent" { "value": "production" }, { - "name": "DEBUG", - "value": "aztec:*" + "name": "LOG_LEVEL", + "value": "verbose" }, { "name": "DEPLOY_TAG", diff --git a/yarn-project/bb-prover/src/avm_proving.test.ts b/yarn-project/bb-prover/src/avm_proving.test.ts index 3e0ae84cf228..80c2f1172050 100644 --- a/yarn-project/bb-prover/src/avm_proving.test.ts +++ b/yarn-project/bb-prover/src/avm_proving.test.ts @@ -1,4 +1,11 @@ -import { VerificationKeyData } from '@aztec/circuits.js'; +import { + MAX_L2_TO_L1_MSGS_PER_TX, + MAX_NOTE_HASHES_PER_TX, + MAX_NULLIFIERS_PER_TX, + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + MAX_UNENCRYPTED_LOGS_PER_TX, + VerificationKeyData, +} from '@aztec/circuits.js'; import { Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; import { simulateAvmTestContractGenerateCircuitInputs } from '@aztec/simulator/public/fixtures'; @@ -10,17 +17,78 @@ import path from 'path'; import { type BBSuccess, BB_RESULT, generateAvmProof, verifyAvmProof } from './bb/execute.js'; import { extractAvmVkData } from './verification_key/verification_key_data.js'; +const TIMEOUT = 180_000; + describe('AVM WitGen, proof generation and verification', () => { - it('Should prove and verify bulk_testing', async () => { - await proveAndVerifyAvmTestContract( - 'bulk_testing', - [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10].map(x => new Fr(x)), - ); - }, 180_000); + it( + 'Should prove and verify bulk_testing', + async () => { + await proveAndVerifyAvmTestContract( + 'bulk_testing', + /*calldata=*/ [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10].map(x => new Fr(x)), + ); + }, + TIMEOUT, + ); + it( + 'Should prove and verify test that performs too many storage writes and reverts', + async () => { + await proveAndVerifyAvmTestContract( + 'n_storage_writes', + /*calldata=*/ [new Fr(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX + 1)], + /*expectRevert=*/ true, + ); + }, + TIMEOUT, + ); + it( + 'Should prove and verify test that creates too many note hashes and reverts', + async () => { + await proveAndVerifyAvmTestContract( + 'n_new_note_hashes', + /*calldata=*/ [new Fr(MAX_NOTE_HASHES_PER_TX + 1)], + /*expectRevert=*/ true, + ); + }, + TIMEOUT, + ); + it( + 'Should prove and verify test that creates too many nullifiers and reverts', + async () => { + await proveAndVerifyAvmTestContract( + 'n_new_nullifiers', + /*calldata=*/ [new Fr(MAX_NULLIFIERS_PER_TX + 1)], + /*expectRevert=*/ true, + ); + }, + TIMEOUT, + ); + it( + 'Should prove and verify test that creates too many l2tol1 messages and reverts', + async () => { + await proveAndVerifyAvmTestContract( + 'n_new_l2_to_l1_msgs', + /*calldata=*/ [new Fr(MAX_L2_TO_L1_MSGS_PER_TX + 1)], + /*expectRevert=*/ true, + ); + }, + TIMEOUT, + ); + it( + 'Should prove and verify test that creates too many unencrypted logs and reverts', + async () => { + await proveAndVerifyAvmTestContract( + 'n_new_unencrypted_logs', + /*calldata=*/ [new Fr(MAX_UNENCRYPTED_LOGS_PER_TX + 1)], + /*expectRevert=*/ true, + ); + }, + TIMEOUT, + ); }); -async function proveAndVerifyAvmTestContract(functionName: string, calldata: Fr[] = []) { - const avmCircuitInputs = await simulateAvmTestContractGenerateCircuitInputs(functionName, calldata); +async function proveAndVerifyAvmTestContract(functionName: string, calldata: Fr[] = [], expectRevert = false) { + const avmCircuitInputs = await simulateAvmTestContractGenerateCircuitInputs(functionName, calldata, expectRevert); const internalLogger = createDebugLogger('aztec:avm-proving-test'); const logger = (msg: string, _data?: any) => internalLogger.verbose(msg); @@ -30,7 +98,7 @@ async function proveAndVerifyAvmTestContract(functionName: string, calldata: Fr[ const bbWorkingDirectory = await fs.mkdtemp(path.join(tmpdir(), 'bb-')); // Then we prove. - const proofRes = await generateAvmProof(bbPath, bbWorkingDirectory, avmCircuitInputs, logger); + const proofRes = await generateAvmProof(bbPath, bbWorkingDirectory, avmCircuitInputs, internalLogger); if (proofRes.status === BB_RESULT.FAILURE) { internalLogger.error(`Proof generation failed: ${proofRes.reason}`); } diff --git a/yarn-project/bb-prover/src/bb/cli.ts b/yarn-project/bb-prover/src/bb/cli.ts index a0122082c96f..97a6724e2e8c 100644 --- a/yarn-project/bb-prover/src/bb/cli.ts +++ b/yarn-project/bb-prover/src/bb/cli.ts @@ -2,7 +2,7 @@ import { type LogFn } from '@aztec/foundation/log'; import { type ProtocolArtifact, ProtocolCircuitArtifacts } from '@aztec/noir-protocol-circuits-types'; import { Command } from 'commander'; -import * as fs from 'fs/promises'; +import { promises as fs } from 'fs'; import { generateContractForCircuit, generateKeyForNoirCircuit } from './execute.js'; diff --git a/yarn-project/bb-prover/src/bb/execute.ts b/yarn-project/bb-prover/src/bb/execute.ts index a14598863a9f..d1236b27a84b 100644 --- a/yarn-project/bb-prover/src/bb/execute.ts +++ b/yarn-project/bb-prover/src/bb/execute.ts @@ -1,11 +1,11 @@ import { type AvmCircuitInputs } from '@aztec/circuits.js'; import { sha256 } from '@aztec/foundation/crypto'; -import { type LogFn, currentLevel as currentLogLevel } from '@aztec/foundation/log'; +import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; import { Timer } from '@aztec/foundation/timer'; import { type NoirCompiledCircuit } from '@aztec/types/noir'; import * as proc from 'child_process'; -import * as fs from 'fs/promises'; +import { promises as fs } from 'fs'; import { basename, dirname, join } from 'path'; import { type UltraHonkFlavor } from '../honk.js'; @@ -15,7 +15,6 @@ export const VK_FIELDS_FILENAME = 'vk_fields.json'; export const PROOF_FILENAME = 'proof'; export const PROOF_FIELDS_FILENAME = 'proof_fields.json'; export const AVM_BYTECODE_FILENAME = 'avm_bytecode.bin'; -export const AVM_CALLDATA_FILENAME = 'avm_calldata.bin'; export const AVM_PUBLIC_INPUTS_FILENAME = 'avm_public_inputs.bin'; export const AVM_HINTS_FILENAME = 'avm_hints.bin'; @@ -202,6 +201,7 @@ export async function executeBbClientIvcProof( bytecodeStackPath: string, witnessStackPath: string, log: LogFn, + noAutoVerify = false, ): Promise { // Check that the working directory exists try { @@ -238,6 +238,9 @@ export async function executeBbClientIvcProof( '--input_type', 'runtime_stack', ]; + if (noAutoVerify) { + args.push('--no_auto_verify'); + } const timer = new Timer(); const logFunction = (message: string) => { log(`bb - ${message}`); @@ -505,7 +508,7 @@ export async function generateAvmProof( pathToBB: string, workingDirectory: string, input: AvmCircuitInputs, - log: LogFn, + logger: DebugLogger, ): Promise { // Check that the working directory exists try { @@ -515,7 +518,6 @@ export async function generateAvmProof( } // Paths for the inputs - const calldataPath = join(workingDirectory, AVM_CALLDATA_FILENAME); const publicInputsPath = join(workingDirectory, AVM_PUBLIC_INPUTS_FILENAME); const avmHintsPath = join(workingDirectory, AVM_HINTS_FILENAME); @@ -535,13 +537,6 @@ export async function generateAvmProof( try { // Write the inputs to the working directory. - await fs.writeFile( - calldataPath, - input.calldata.map(fr => fr.toBuffer()), - ); - if (!filePresent(calldataPath)) { - return { status: BB_RESULT.FAILURE, reason: `Could not write calldata at ${calldataPath}` }; - } await fs.writeFile(publicInputsPath, input.output.toBuffer()); if (!filePresent(publicInputsPath)) { @@ -554,19 +549,17 @@ export async function generateAvmProof( } const args = [ - '--avm-calldata', - calldataPath, '--avm-public-inputs', publicInputsPath, '--avm-hints', avmHintsPath, '-o', outputPath, - currentLogLevel == 'debug' ? '-d' : currentLogLevel == 'verbose' ? '-v' : '', + logger.level === 'debug' || logger.level === 'trace' ? '-d' : logger.level === 'verbose' ? '-v' : '', ]; const timer = new Timer(); const logFunction = (message: string) => { - log(`AvmCircuit (prove) BB out - ${message}`); + logger.verbose(`AvmCircuit (prove) BB out - ${message}`); }; const result = await executeBB(pathToBB, 'avm_prove', args, logFunction); const duration = timer.ms(); diff --git a/yarn-project/bb-prover/src/prover/bb_private_kernel_prover.ts b/yarn-project/bb-prover/src/prover/bb_private_kernel_prover.ts index 7f16b0292fd9..5d1b735523a6 100644 --- a/yarn-project/bb-prover/src/prover/bb_private_kernel_prover.ts +++ b/yarn-project/bb-prover/src/prover/bb_private_kernel_prover.ts @@ -45,7 +45,7 @@ import { type NoirCompiledCircuit } from '@aztec/types/noir'; import { encode } from '@msgpack/msgpack'; import { serializeWitness } from '@noir-lang/noirc_abi'; import { type WitnessMap } from '@noir-lang/types'; -import * as fs from 'fs/promises'; +import { promises as fs } from 'fs'; import path from 'path'; import { diff --git a/yarn-project/bb-prover/src/prover/bb_prover.ts b/yarn-project/bb-prover/src/prover/bb_prover.ts index f737b093a382..463a09eaad8f 100644 --- a/yarn-project/bb-prover/src/prover/bb_prover.ts +++ b/yarn-project/bb-prover/src/prover/bb_prover.ts @@ -76,7 +76,7 @@ import { Attributes, type TelemetryClient, trackSpan } from '@aztec/telemetry-cl import { abiEncode } from '@noir-lang/noirc_abi'; import { type Abi, type WitnessMap } from '@noir-lang/types'; import crypto from 'crypto'; -import * as fs from 'fs/promises'; +import { promises as fs } from 'fs'; import * as path from 'path'; import { @@ -535,7 +535,7 @@ export class BBNativeRollupProver implements ServerCircuitProver { private async generateAvmProofWithBB(input: AvmCircuitInputs, workingDirectory: string): Promise { logger.info(`Proving avm-circuit for ${input.functionName}...`); - const provingResult = await generateAvmProof(this.config.bbBinaryPath, workingDirectory, input, logger.verbose); + const provingResult = await generateAvmProof(this.config.bbBinaryPath, workingDirectory, input, logger); if (provingResult.status === BB_RESULT.FAILURE) { logger.error(`Failed to generate AVM proof for ${input.functionName}: ${provingResult.reason}`); diff --git a/yarn-project/bb-prover/src/test/index.ts b/yarn-project/bb-prover/src/test/index.ts index 555536e8cb7d..3f84ad27da1a 100644 --- a/yarn-project/bb-prover/src/test/index.ts +++ b/yarn-project/bb-prover/src/test/index.ts @@ -1,3 +1,2 @@ export * from './test_circuit_prover.js'; export * from './test_verifier.js'; -export * from './test_avm.js'; diff --git a/yarn-project/bb-prover/src/test/test_avm.ts b/yarn-project/bb-prover/src/test/test_avm.ts index 4cbac8bb1c4b..7dd0954dfe6f 100644 --- a/yarn-project/bb-prover/src/test/test_avm.ts +++ b/yarn-project/bb-prover/src/test/test_avm.ts @@ -1,10 +1,10 @@ import { AztecAddress, + BlockHeader, ContractStorageRead, ContractStorageUpdateRequest, Gas, GlobalVariables, - Header, L2ToL1Message, LogHash, MAX_ENQUEUED_CALLS_PER_CALL, @@ -74,7 +74,7 @@ export function getPublicInputs(result: PublicFunctionCallResult): PublicCircuit ), publicCallRequests: padArrayEnd([], PublicInnerCallRequest.empty(), MAX_ENQUEUED_CALLS_PER_CALL), unencryptedLogsHashes: padArrayEnd(result.unencryptedLogsHashes, LogHash.empty(), MAX_UNENCRYPTED_LOGS_PER_CALL), - historicalHeader: Header.empty(), + historicalHeader: BlockHeader.empty(), globalVariables: GlobalVariables.empty(), startGasLeft: Gas.from(result.startGasLeft), endGasLeft: Gas.from(result.endGasLeft), diff --git a/yarn-project/bb-prover/src/verification_key/verification_key_data.ts b/yarn-project/bb-prover/src/verification_key/verification_key_data.ts index 14f5eb28c59d..98dd1ebff4c1 100644 --- a/yarn-project/bb-prover/src/verification_key/verification_key_data.ts +++ b/yarn-project/bb-prover/src/verification_key/verification_key_data.ts @@ -7,7 +7,7 @@ import { import { hashVK } from '@aztec/circuits.js/hash'; import { strict as assert } from 'assert'; -import * as fs from 'fs/promises'; +import { promises as fs } from 'fs'; import * as path from 'path'; import { VK_FIELDS_FILENAME, VK_FILENAME } from '../bb/execute.js'; diff --git a/yarn-project/bb-prover/src/verifier/bb_verifier.ts b/yarn-project/bb-prover/src/verifier/bb_verifier.ts index af05a695ae05..d0e2ae156457 100644 --- a/yarn-project/bb-prover/src/verifier/bb_verifier.ts +++ b/yarn-project/bb-prover/src/verifier/bb_verifier.ts @@ -9,7 +9,7 @@ import { ProtocolCircuitArtifacts, } from '@aztec/noir-protocol-circuits-types'; -import * as fs from 'fs/promises'; +import { promises as fs } from 'fs'; import * as path from 'path'; import { diff --git a/yarn-project/archiver/src/archiver/epoch_helpers.ts b/yarn-project/circuit-types/src/epoch-helpers/index.ts similarity index 84% rename from yarn-project/archiver/src/archiver/epoch_helpers.ts rename to yarn-project/circuit-types/src/epoch-helpers/index.ts index 55fe28e2f0ec..50c698e11600 100644 --- a/yarn-project/archiver/src/archiver/epoch_helpers.ts +++ b/yarn-project/circuit-types/src/epoch-helpers/index.ts @@ -1,4 +1,18 @@ -// REFACTOR: This file should go in a package lower in the dependency graph. +export type L1RollupConstants = { + l1StartBlock: bigint; + l1GenesisTime: bigint; + slotDuration: number; + epochDuration: number; + ethereumSlotDuration: number; +}; + +export const EmptyL1RollupConstants: L1RollupConstants = { + l1StartBlock: 0n, + l1GenesisTime: 0n, + epochDuration: 1, // Not 0 to pervent division by zero + slotDuration: 1, + ethereumSlotDuration: 1, +}; export type EpochConstants = { l1GenesisBlock: bigint; diff --git a/yarn-project/circuit-types/src/index.ts b/yarn-project/circuit-types/src/index.ts index 829e43cf8e3b..87db2e5e2dd5 100644 --- a/yarn-project/circuit-types/src/index.ts +++ b/yarn-project/circuit-types/src/index.ts @@ -25,3 +25,4 @@ export * from './tx_execution_request.js'; export * from './in_block.js'; export * from './nullifier_with_block_source.js'; export * from './proving_error.js'; +export * from './epoch-helpers/index.js'; diff --git a/yarn-project/circuit-types/src/interfaces/archiver.test.ts b/yarn-project/circuit-types/src/interfaces/archiver.test.ts index c97893fc8974..04aa0e0341d6 100644 --- a/yarn-project/circuit-types/src/interfaces/archiver.test.ts +++ b/yarn-project/circuit-types/src/interfaces/archiver.test.ts @@ -1,11 +1,11 @@ import { AztecAddress, + BlockHeader, type ContractClassPublic, type ContractInstanceWithAddress, EthAddress, Fr, FunctionSelector, - Header, PrivateLog, type PublicFunction, PublicKeys, @@ -92,7 +92,7 @@ describe('ArchiverApiSchema', () => { it('getBlockHeader', async () => { const result = await context.client.getBlockHeader(1); - expect(result).toBeInstanceOf(Header); + expect(result).toBeInstanceOf(BlockHeader); }); it('getBlocks', async () => { @@ -277,8 +277,8 @@ class MockArchiver implements ArchiverApi { getBlock(number: number): Promise { return Promise.resolve(L2Block.random(number)); } - getBlockHeader(_number: number | 'latest'): Promise
{ - return Promise.resolve(Header.empty()); + getBlockHeader(_number: number | 'latest'): Promise { + return Promise.resolve(BlockHeader.empty()); } getBlocks(from: number, _limit: number, _proven?: boolean | undefined): Promise { return Promise.resolve([L2Block.random(from)]); diff --git a/yarn-project/circuit-types/src/interfaces/archiver.ts b/yarn-project/circuit-types/src/interfaces/archiver.ts index b032efc61742..bf8ace98b60c 100644 --- a/yarn-project/circuit-types/src/interfaces/archiver.ts +++ b/yarn-project/circuit-types/src/interfaces/archiver.ts @@ -1,8 +1,8 @@ import { + BlockHeader, ContractClassPublicSchema, type ContractDataSource, ContractInstanceWithAddressSchema, - Header, PrivateLog, PublicFunctionSchema, } from '@aztec/circuits.js'; @@ -38,7 +38,7 @@ export const ArchiverApiSchema: ApiSchemaFor = { getBlockHeader: z .function() .args(z.union([schemas.Integer, z.literal('latest')])) - .returns(Header.schema.optional()), + .returns(BlockHeader.schema.optional()), getBlocks: z .function() .args(schemas.Integer, schemas.Integer, optional(z.boolean())) diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts index 30d729f47504..3bb4ee18185f 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts @@ -1,12 +1,12 @@ import { ARCHIVE_HEIGHT, AztecAddress, + BlockHeader, type ContractClassPublic, type ContractInstanceWithAddress, EthAddress, Fr, GasFees, - Header, L1_TO_L2_MSG_TREE_HEIGHT, NOTE_HASH_TREE_HEIGHT, NULLIFIER_TREE_HEIGHT, @@ -284,7 +284,7 @@ describe('AztecNodeApiSchema', () => { it('getBlockHeader', async () => { const response = await context.client.getBlockHeader(); - expect(response).toBeInstanceOf(Header); + expect(response).toBeInstanceOf(BlockHeader); }); it('simulatePublicCalls', async () => { @@ -553,8 +553,8 @@ class MockAztecNode implements AztecNode { expect(slot).toBeInstanceOf(Fr); return Promise.resolve(Fr.random()); } - getBlockHeader(_blockNumber?: number | 'latest' | undefined): Promise
{ - return Promise.resolve(Header.empty()); + getBlockHeader(_blockNumber?: number | 'latest' | undefined): Promise { + return Promise.resolve(BlockHeader.empty()); } simulatePublicCalls(tx: Tx): Promise { expect(tx).toBeInstanceOf(Tx); diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.ts index 96ac1a1f3ed0..427ff8d88ace 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.ts @@ -1,11 +1,11 @@ import { ARCHIVE_HEIGHT, + BlockHeader, type ContractClassPublic, ContractClassPublicSchema, type ContractInstanceWithAddress, ContractInstanceWithAddressSchema, GasFees, - Header, L1_TO_L2_MSG_TREE_HEIGHT, NOTE_HASH_TREE_HEIGHT, NULLIFIER_TREE_HEIGHT, @@ -379,7 +379,7 @@ export interface AztecNode * Returns the currently committed block header. * @returns The current committed block header. */ - getBlockHeader(blockNumber?: L2BlockNumber): Promise
; + getBlockHeader(blockNumber?: L2BlockNumber): Promise; /** * Simulates the public part of a transaction with the current state. @@ -560,7 +560,7 @@ export const AztecNodeApiSchema: ApiSchemaFor = { getPublicStorageAt: z.function().args(schemas.AztecAddress, schemas.Fr, L2BlockNumberSchema).returns(schemas.Fr), - getBlockHeader: z.function().args(optional(L2BlockNumberSchema)).returns(Header.schema), + getBlockHeader: z.function().args(optional(L2BlockNumberSchema)).returns(BlockHeader.schema), simulatePublicCalls: z.function().args(Tx.schema).returns(PublicSimulationOutput.schema), diff --git a/yarn-project/circuit-types/src/interfaces/block-builder.ts b/yarn-project/circuit-types/src/interfaces/block-builder.ts index bad2bcda99b9..ecb0103ff1f4 100644 --- a/yarn-project/circuit-types/src/interfaces/block-builder.ts +++ b/yarn-project/circuit-types/src/interfaces/block-builder.ts @@ -1,4 +1,4 @@ -import { type Fr, type GlobalVariables, type Header } from '@aztec/circuits.js'; +import { type BlockHeader, type Fr, type GlobalVariables } from '@aztec/circuits.js'; import { type L2Block } from '../l2_block.js'; import { type ProcessedTx } from '../tx/processed_tx.js'; @@ -24,5 +24,5 @@ export interface BlockBuilder extends ProcessedTxHandler { * Pads the block with empty txs if it hasn't reached the declared number of txs. * Assembles the block and updates the archive tree. */ - setBlockCompleted(expectedBlockHeader?: Header): Promise; + setBlockCompleted(expectedBlockHeader?: BlockHeader): Promise; } diff --git a/yarn-project/circuit-types/src/interfaces/epoch-prover.ts b/yarn-project/circuit-types/src/interfaces/epoch-prover.ts index 16641c23e67c..4774741ebf16 100644 --- a/yarn-project/circuit-types/src/interfaces/epoch-prover.ts +++ b/yarn-project/circuit-types/src/interfaces/epoch-prover.ts @@ -1,4 +1,4 @@ -import { type Fr, type Header, type Proof, type RootRollupPublicInputs } from '@aztec/circuits.js'; +import { type BlockHeader, type Fr, type Proof, type RootRollupPublicInputs } from '@aztec/circuits.js'; import { type L2Block } from '../l2_block.js'; import { type BlockBuilder } from './block-builder.js'; @@ -14,7 +14,7 @@ export interface EpochProver extends Omit { startNewEpoch(epochNumber: number, firstBlockNumber: number, totalNumBlocks: number): void; /** Pads the block with empty txs if it hasn't reached the declared number of txs. */ - setBlockCompleted(blockNumber: number, expectedBlockHeader?: Header): Promise; + setBlockCompleted(blockNumber: number, expectedBlockHeader?: BlockHeader): Promise; /** Pads the epoch with empty block roots if needed and blocks until proven. Throws if proving has failed. */ finaliseEpoch(): Promise<{ publicInputs: RootRollupPublicInputs; proof: Proof }>; diff --git a/yarn-project/circuit-types/src/interfaces/merkle_tree_operations.ts b/yarn-project/circuit-types/src/interfaces/merkle_tree_operations.ts index 9017c1a6a846..60f72e0e167c 100644 --- a/yarn-project/circuit-types/src/interfaces/merkle_tree_operations.ts +++ b/yarn-project/circuit-types/src/interfaces/merkle_tree_operations.ts @@ -1,6 +1,6 @@ import { + type BlockHeader, type Fr, - type Header, type NullifierLeaf, type PublicDataTreeLeaf, type StateReference, @@ -133,7 +133,7 @@ export interface MerkleTreeReadOperations { /** * Gets the initial header. */ - getInitialHeader(): Header; + getInitialHeader(): BlockHeader; /** * Gets sibling path for a leaf. @@ -224,7 +224,7 @@ export interface MerkleTreeWriteOperations extends MerkleTreeReadOperations { * This includes all of the current roots of all of the data trees and the current blocks global vars. * @param header - The header to insert into the archive. */ - updateArchive(header: Header): Promise; + updateArchive(header: BlockHeader): Promise; /** * Batch insert multiple leaves into the tree. diff --git a/yarn-project/circuit-types/src/l2_block.ts b/yarn-project/circuit-types/src/l2_block.ts index 09d73fe4dd2e..c6d4f570595e 100644 --- a/yarn-project/circuit-types/src/l2_block.ts +++ b/yarn-project/circuit-types/src/l2_block.ts @@ -1,4 +1,4 @@ -import { AppendOnlyTreeSnapshot, Header } from '@aztec/circuits.js'; +import { AppendOnlyTreeSnapshot, BlockHeader } from '@aztec/circuits.js'; import { sha256, sha256ToField } from '@aztec/foundation/crypto'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; @@ -17,7 +17,7 @@ export class L2Block { /** Snapshot of archive tree after the block is applied. */ public archive: AppendOnlyTreeSnapshot, /** L2 block header. */ - public header: Header, + public header: BlockHeader, /** L2 block body. */ public body: Body, ) {} @@ -26,7 +26,7 @@ export class L2Block { return z .object({ archive: AppendOnlyTreeSnapshot.schema, - header: Header.schema, + header: BlockHeader.schema, body: Body.schema, }) .transform(({ archive, header, body }) => new L2Block(archive, header, body)); @@ -38,7 +38,7 @@ export class L2Block { */ static fromBuffer(buf: Buffer | BufferReader) { const reader = BufferReader.asReader(buf); - const header = reader.readObject(Header); + const header = reader.readObject(BlockHeader); const archive = reader.readObject(AppendOnlyTreeSnapshot); const body = reader.readObject(Body); @@ -103,7 +103,7 @@ export class L2Block { * @returns The L2 block. */ static empty(): L2Block { - return new L2Block(AppendOnlyTreeSnapshot.zero(), Header.empty(), Body.empty()); + return new L2Block(AppendOnlyTreeSnapshot.zero(), BlockHeader.empty(), Body.empty()); } get number(): number { diff --git a/yarn-project/circuit-types/src/l2_block_code_to_purge.ts b/yarn-project/circuit-types/src/l2_block_code_to_purge.ts index 64e2e06db9e1..70d2b3e83066 100644 --- a/yarn-project/circuit-types/src/l2_block_code_to_purge.ts +++ b/yarn-project/circuit-types/src/l2_block_code_to_purge.ts @@ -1,12 +1,12 @@ import { AppendOnlyTreeSnapshot, AztecAddress, + BlockHeader, ContentCommitment, EthAddress, Fr, GasFees, GlobalVariables, - Header, NUM_BYTES_PER_SHA256, PartialStateReference, StateReference, @@ -23,8 +23,8 @@ export function makeHeader( slotNumber: number | undefined = undefined, txsEffectsHash: Buffer | undefined = undefined, inHash: Buffer | undefined = undefined, -): Header { - return new Header( +): BlockHeader { + return new BlockHeader( makeAppendOnlyTreeSnapshot(seed + 0x100), makeContentCommitment(seed + 0x200, txsEffectsHash, inHash), makeStateReference(seed + 0x600), diff --git a/yarn-project/circuit-types/src/l2_block_downloader/l2_block_stream.test.ts b/yarn-project/circuit-types/src/l2_block_downloader/l2_block_stream.test.ts index bb8eaafc83a5..ae25c222d5bb 100644 --- a/yarn-project/circuit-types/src/l2_block_downloader/l2_block_stream.test.ts +++ b/yarn-project/circuit-types/src/l2_block_downloader/l2_block_stream.test.ts @@ -1,4 +1,4 @@ -import { Fr, type Header } from '@aztec/circuits.js'; +import { type BlockHeader, Fr } from '@aztec/circuits.js'; import { compactArray } from '@aztec/foundation/collection'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -42,7 +42,7 @@ describe('L2BlockStream', () => { const makeBlock = (number: number) => ({ number } as L2Block); - const makeHeader = (number: number) => mock
({ hash: () => new Fr(number) } as Header); + const makeHeader = (number: number) => mock({ hash: () => new Fr(number) } as BlockHeader); const setRemoteTips = (latest_: number, proven?: number, finalized?: number) => { proven = proven ?? 0; diff --git a/yarn-project/circuit-types/src/l2_block_downloader/l2_block_stream.ts b/yarn-project/circuit-types/src/l2_block_downloader/l2_block_stream.ts index 41eb4581346c..bfe7e8eba6ce 100644 --- a/yarn-project/circuit-types/src/l2_block_downloader/l2_block_stream.ts +++ b/yarn-project/circuit-types/src/l2_block_downloader/l2_block_stream.ts @@ -46,7 +46,7 @@ export class L2BlockStream { try { const sourceTips = await this.l2BlockSource.getL2Tips(); const localTips = await this.localData.getL2Tips(); - this.log.debug(`Running L2 block stream`, { + this.log.trace(`Running L2 block stream`, { sourceLatest: sourceTips.latest.number, localLatest: localTips.latest.number, sourceFinalized: sourceTips.finalized.number, @@ -80,7 +80,7 @@ export class L2BlockStream { while (latestBlockNumber < sourceTips.latest.number) { const from = latestBlockNumber + 1; const limit = Math.min(this.opts.batchSize ?? 20, sourceTips.latest.number - from + 1); - this.log.debug(`Requesting blocks from ${from} limit ${limit} proven=${this.opts.proven}`); + this.log.trace(`Requesting blocks from ${from} limit ${limit} proven=${this.opts.proven}`); const blocks = await this.l2BlockSource.getBlocks(from, limit, this.opts.proven); if (blocks.length === 0) { break; @@ -119,7 +119,7 @@ export class L2BlockStream { const sourceBlockHash = args.sourceCache.find(id => id.number === blockNumber && id.hash)?.hash ?? (await this.l2BlockSource.getBlockHeader(blockNumber).then(h => h?.hash().toString())); - this.log.debug(`Comparing block hashes for block ${blockNumber}`, { + this.log.trace(`Comparing block hashes for block ${blockNumber}`, { localBlockHash, sourceBlockHash, sourceCacheNumber: args.sourceCache[0]?.number, diff --git a/yarn-project/circuit-types/src/l2_block_source.ts b/yarn-project/circuit-types/src/l2_block_source.ts index 6f749b281892..5a62f159b386 100644 --- a/yarn-project/circuit-types/src/l2_block_source.ts +++ b/yarn-project/circuit-types/src/l2_block_source.ts @@ -1,4 +1,4 @@ -import { type EthAddress, type Header } from '@aztec/circuits.js'; +import { type BlockHeader, type EthAddress } from '@aztec/circuits.js'; import { z } from 'zod'; @@ -54,7 +54,7 @@ export interface L2BlockSource { * @param number - The block number to return or 'latest' for the most recent one. * @returns The requested L2 block header. */ - getBlockHeader(number: number | 'latest'): Promise
; + getBlockHeader(number: number | 'latest'): Promise; /** * Gets up to `limit` amount of L2 blocks starting from `from`. diff --git a/yarn-project/circuit-types/src/p2p/block_proposal.ts b/yarn-project/circuit-types/src/p2p/block_proposal.ts index 8e64f4c1fd9f..207312ba4a1d 100644 --- a/yarn-project/circuit-types/src/p2p/block_proposal.ts +++ b/yarn-project/circuit-types/src/p2p/block_proposal.ts @@ -1,5 +1,5 @@ import { Buffer32 } from '@aztec/foundation/buffer'; -import { recoverAddress } from '@aztec/foundation/crypto'; +import { keccak256, recoverAddress } from '@aztec/foundation/crypto'; import { type EthAddress } from '@aztec/foundation/eth-address'; import { Signature } from '@aztec/foundation/eth-signature'; import { type Fr } from '@aztec/foundation/fields'; @@ -42,13 +42,17 @@ export class BlockProposal extends Gossipable { } override p2pMessageIdentifier(): Buffer32 { - return BlockProposalHash.fromField(this.payload.archive); + return new BlockProposalHash(keccak256(this.signature.toBuffer())); } get archive(): Fr { return this.payload.archive; } + get slotNumber(): Fr { + return this.payload.header.globalVariables.slotNumber; + } + static async createProposalFromSigner( payload: ConsensusPayload, payloadSigner: (payload: Buffer32) => Promise, diff --git a/yarn-project/circuit-types/src/p2p/consensus_payload.ts b/yarn-project/circuit-types/src/p2p/consensus_payload.ts index 3c4d5e946b0e..a043a8d20101 100644 --- a/yarn-project/circuit-types/src/p2p/consensus_payload.ts +++ b/yarn-project/circuit-types/src/p2p/consensus_payload.ts @@ -1,4 +1,4 @@ -import { Header } from '@aztec/circuits.js'; +import { BlockHeader } from '@aztec/circuits.js'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { hexToBuffer } from '@aztec/foundation/string'; @@ -14,7 +14,7 @@ export class ConsensusPayload implements Signable { constructor( /** The block header the attestation is made over */ - public readonly header: Header, + public readonly header: BlockHeader, // TODO(https://github.com/AztecProtocol/aztec-packages/pull/7727#discussion_r1713670830): temporary public readonly archive: Fr, /** The sequence of transactions in the block */ @@ -51,7 +51,7 @@ export class ConsensusPayload implements Signable { static fromBuffer(buf: Buffer | BufferReader): ConsensusPayload { const reader = BufferReader.asReader(buf); return new ConsensusPayload( - reader.readObject(Header), + reader.readObject(BlockHeader), reader.readObject(Fr), reader.readArray(reader.readNumber(), TxHash), ); @@ -62,7 +62,7 @@ export class ConsensusPayload implements Signable { } static empty(): ConsensusPayload { - return new ConsensusPayload(Header.empty(), Fr.ZERO, []); + return new ConsensusPayload(BlockHeader.empty(), Fr.ZERO, []); } /** diff --git a/yarn-project/circuit-types/src/p2p/interface.ts b/yarn-project/circuit-types/src/p2p/interface.ts index 1e252250399b..06a027946021 100644 --- a/yarn-project/circuit-types/src/p2p/interface.ts +++ b/yarn-project/circuit-types/src/p2p/interface.ts @@ -17,3 +17,15 @@ export const TopicTypeMap: Record = { [TopicType.block_attestation]: BlockAttestation as unknown as typeof Gossipable, [TopicType.epoch_proof_quote]: EpochProofQuote as unknown as typeof Gossipable, }; + +/** + * Map from topic to deserialiser + * + * Used in msgIdFn libp2p to get the p2pMessageIdentifier from a message + */ +export const TopicToDeserializer = { + [Tx.p2pTopic]: Tx.fromBuffer, + [BlockProposal.p2pTopic]: BlockProposal.fromBuffer, + [BlockAttestation.p2pTopic]: BlockAttestation.fromBuffer, + [EpochProofQuote.p2pTopic]: EpochProofQuote.fromBuffer, +}; diff --git a/yarn-project/circuit-types/src/p2p/mocks.ts b/yarn-project/circuit-types/src/p2p/mocks.ts index 821d4fd3b4bf..1e4e99ac0420 100644 --- a/yarn-project/circuit-types/src/p2p/mocks.ts +++ b/yarn-project/circuit-types/src/p2p/mocks.ts @@ -1,4 +1,4 @@ -import { type Header } from '@aztec/circuits.js'; +import { type BlockHeader } from '@aztec/circuits.js'; import { makeHeader } from '@aztec/circuits.js/testing'; import { Secp256k1Signer } from '@aztec/foundation/crypto'; import { Fr } from '@aztec/foundation/fields'; @@ -11,7 +11,7 @@ import { SignatureDomainSeperator, getHashedSignaturePayloadEthSignedMessage } f export interface MakeConsensusPayloadOptions { signer?: Secp256k1Signer; - header?: Header; + header?: BlockHeader; archive?: Fr; txHashes?: TxHash[]; } diff --git a/yarn-project/circuit-types/src/test/factories.ts b/yarn-project/circuit-types/src/test/factories.ts index 72e2c318edf1..6360135750ad 100644 --- a/yarn-project/circuit-types/src/test/factories.ts +++ b/yarn-project/circuit-types/src/test/factories.ts @@ -2,6 +2,7 @@ import { AvmCircuitInputs, AvmCircuitPublicInputs, AvmExecutionHints, + type BlockHeader, FIXED_DA_GAS, FIXED_L2_GAS, Fr, @@ -9,7 +10,6 @@ import { GasFees, GasSettings, GlobalVariables, - type Header, MAX_NULLIFIERS_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, PublicCircuitPublicInputs, @@ -42,7 +42,7 @@ export function makeBloatedProcessedTx({ privateOnly = false, }: { seed?: number; - header?: Header; + header?: BlockHeader; db?: MerkleTreeReadOperations; chainId?: Fr; version?: Fr; diff --git a/yarn-project/circuit-types/src/tx/processed_tx.ts b/yarn-project/circuit-types/src/tx/processed_tx.ts index b52006846b20..6ae2779be14f 100644 --- a/yarn-project/circuit-types/src/tx/processed_tx.ts +++ b/yarn-project/circuit-types/src/tx/processed_tx.ts @@ -1,10 +1,10 @@ import { + type BlockHeader, ClientIvcProof, CombinedConstantData, Fr, Gas, type GlobalVariables, - type Header, PrivateKernelTailCircuitPublicInputs, type PublicDataWrite, RevertCode, @@ -86,7 +86,7 @@ export type FailedTx = { * @returns A processed empty tx. */ export function makeEmptyProcessedTx( - header: Header, + header: BlockHeader, chainId: Fr, version: Fr, vkTreeRoot: Fr, diff --git a/yarn-project/circuits.js/src/constants.gen.ts b/yarn-project/circuits.js/src/constants.gen.ts index 3168b6099f64..a0ff9ef18b12 100644 --- a/yarn-project/circuits.js/src/constants.gen.ts +++ b/yarn-project/circuits.js/src/constants.gen.ts @@ -177,7 +177,7 @@ export const TX_CONTEXT_LENGTH = 8; export const TX_REQUEST_LENGTH = 12; export const TOTAL_FEES_LENGTH = 1; export const TOTAL_MANA_USED_LENGTH = 1; -export const HEADER_LENGTH = 25; +export const BLOCK_HEADER_LENGTH = 25; export const PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH = 739; export const PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH = 867; export const PRIVATE_CONTEXT_INPUTS_LENGTH = 38; @@ -220,7 +220,7 @@ export const TUBE_PROOF_LENGTH = 463; export const HONK_VERIFICATION_KEY_LENGTH_IN_FIELDS = 128; export const CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS = 143; export const AVM_VERIFICATION_KEY_LENGTH_IN_FIELDS = 86; -export const AVM_PROOF_LENGTH_IN_FIELDS = 4166; +export const AVM_PROOF_LENGTH_IN_FIELDS = 4161; export const AVM_PUBLIC_COLUMN_MAX_SIZE = 1024; export const AVM_PUBLIC_INPUTS_FLATTENED_SIZE = 2915; export const MEM_TAG_FF = 0; @@ -232,7 +232,6 @@ export const MEM_TAG_U64 = 5; export const MEM_TAG_U128 = 6; export const SENDER_KERNEL_INPUTS_COL_OFFSET = 0; export const ADDRESS_KERNEL_INPUTS_COL_OFFSET = 1; -export const FUNCTION_SELECTOR_KERNEL_INPUTS_COL_OFFSET = 2; export const IS_STATIC_CALL_KERNEL_INPUTS_COL_OFFSET = 3; export const CHAIN_ID_KERNEL_INPUTS_COL_OFFSET = 4; export const VERSION_KERNEL_INPUTS_COL_OFFSET = 5; diff --git a/yarn-project/circuits.js/src/contract/artifact_hash.ts b/yarn-project/circuits.js/src/contract/artifact_hash.ts index a7bc52ae7adf..a170f49106db 100644 --- a/yarn-project/circuits.js/src/contract/artifact_hash.ts +++ b/yarn-project/circuits.js/src/contract/artifact_hash.ts @@ -47,7 +47,7 @@ export function computeArtifactHash( const preimage = computeArtifactHashPreimage(artifact); const artifactHash = computeArtifactHash(computeArtifactHashPreimage(artifact)); - getLogger().debug('Computed artifact hash', { artifactHash, ...preimage }); + getLogger().trace('Computed artifact hash', { artifactHash, ...preimage }); return artifactHash; } diff --git a/yarn-project/circuits.js/src/scripts/constants.in.ts b/yarn-project/circuits.js/src/scripts/constants.in.ts index 26a4857e2c5c..6ac65ef90ea0 100644 --- a/yarn-project/circuits.js/src/scripts/constants.in.ts +++ b/yarn-project/circuits.js/src/scripts/constants.in.ts @@ -63,7 +63,6 @@ const CPP_CONSTANTS = [ 'START_EMIT_UNENCRYPTED_LOG_WRITE_OFFSET', 'SENDER_KERNEL_INPUTS_COL_OFFSET', 'ADDRESS_KERNEL_INPUTS_COL_OFFSET', - 'FUNCTION_SELECTOR_KERNEL_INPUTS_COL_OFFSET', 'CHAIN_ID_KERNEL_INPUTS_COL_OFFSET', 'VERSION_KERNEL_INPUTS_COL_OFFSET', 'BLOCK_NUMBER_KERNEL_INPUTS_COL_OFFSET', @@ -85,6 +84,12 @@ const CPP_CONSTANTS = [ 'MEM_TAG_FF', 'MAX_L2_GAS_PER_ENQUEUED_CALL', 'MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS', + 'CANONICAL_AUTH_REGISTRY_ADDRESS', + 'DEPLOYER_CONTRACT_ADDRESS', + 'REGISTERER_CONTRACT_ADDRESS', + 'MULTI_CALL_ENTRYPOINT_ADDRESS', + 'FEE_JUICE_ADDRESS', + 'ROUTER_ADDRESS', ]; const CPP_GENERATORS: string[] = [ @@ -123,7 +128,6 @@ const PIL_CONSTANTS = [ 'START_EMIT_UNENCRYPTED_LOG_WRITE_OFFSET', 'SENDER_KERNEL_INPUTS_COL_OFFSET', 'ADDRESS_KERNEL_INPUTS_COL_OFFSET', - 'FUNCTION_SELECTOR_KERNEL_INPUTS_COL_OFFSET', 'CHAIN_ID_KERNEL_INPUTS_COL_OFFSET', 'VERSION_KERNEL_INPUTS_COL_OFFSET', 'BLOCK_NUMBER_KERNEL_INPUTS_COL_OFFSET', diff --git a/yarn-project/circuits.js/src/structs/__snapshots__/block_header.test.ts.snap b/yarn-project/circuits.js/src/structs/__snapshots__/block_header.test.ts.snap new file mode 100644 index 000000000000..fac31063f85c --- /dev/null +++ b/yarn-project/circuits.js/src/structs/__snapshots__/block_header.test.ts.snap @@ -0,0 +1,5 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`BlockHeader computes empty hash 1`] = `Fr<0x28e48e620bc00817609b5fc765bc74864561f25a3c941b33e5ee05266b752839>`; + +exports[`BlockHeader computes hash 1`] = `Fr<0x2352a779093c231d53586b8c09d3d63033327f5f80029f007fe9deedc67c4be3>`; diff --git a/yarn-project/circuits.js/src/structs/__snapshots__/header.test.ts.snap b/yarn-project/circuits.js/src/structs/__snapshots__/header.test.ts.snap deleted file mode 100644 index 175c31be1823..000000000000 --- a/yarn-project/circuits.js/src/structs/__snapshots__/header.test.ts.snap +++ /dev/null @@ -1,5 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`Header computes empty hash 1`] = `Fr<0x28e48e620bc00817609b5fc765bc74864561f25a3c941b33e5ee05266b752839>`; - -exports[`Header computes hash 1`] = `Fr<0x2352a779093c231d53586b8c09d3d63033327f5f80029f007fe9deedc67c4be3>`; diff --git a/yarn-project/circuits.js/src/structs/avm/avm.ts b/yarn-project/circuits.js/src/structs/avm/avm.ts index 30c37a7b1329..6e7728b9bedf 100644 --- a/yarn-project/circuits.js/src/structs/avm/avm.ts +++ b/yarn-project/circuits.js/src/structs/avm/avm.ts @@ -259,6 +259,7 @@ export class AvmContractInstanceHint { public readonly contractClassId: Fr, public readonly initializationHash: Fr, public readonly publicKeys: PublicKeys, + public readonly membershipHint: AvmNullifierReadTreeHint = AvmNullifierReadTreeHint.empty(), ) {} /** * Serializes the inputs to a buffer. @@ -288,7 +289,8 @@ export class AvmContractInstanceHint { this.deployer.isZero() && this.contractClassId.isZero() && this.initializationHash.isZero() && - this.publicKeys.isEmpty() + this.publicKeys.isEmpty() && + this.membershipHint.isEmpty() ); } @@ -315,6 +317,7 @@ export class AvmContractInstanceHint { fields.contractClassId, fields.initializationHash, fields.publicKeys, + fields.membershipHint, ] as const; } @@ -333,6 +336,7 @@ export class AvmContractInstanceHint { Fr.fromBuffer(reader), Fr.fromBuffer(reader), PublicKeys.fromBuffer(reader), + AvmNullifierReadTreeHint.fromBuffer(reader), ); } @@ -592,7 +596,7 @@ export class AvmNullifierReadTreeHint { constructor( public readonly lowLeafPreimage: NullifierLeafPreimage, public readonly lowLeafIndex: Fr, - public readonly _lowLeafSiblingPath: Fr[], + public _lowLeafSiblingPath: Fr[], ) { this.lowLeafSiblingPath = new Vector(_lowLeafSiblingPath); } @@ -630,6 +634,10 @@ export class AvmNullifierReadTreeHint { return new AvmNullifierReadTreeHint(fields.lowLeafPreimage, fields.lowLeafIndex, fields.lowLeafSiblingPath.items); } + static empty(): AvmNullifierReadTreeHint { + return new AvmNullifierReadTreeHint(NullifierLeafPreimage.empty(), Fr.ZERO, []); + } + /** * Extracts fields from an instance. * @param fields - Fields to create the instance from. @@ -851,13 +859,13 @@ export class AvmExecutionHints { public readonly contractInstances: Vector; public readonly contractBytecodeHints: Vector; - public readonly storageReadRequest: Vector; - public readonly storageUpdateRequest: Vector; - public readonly nullifierReadRequest: Vector; - public readonly nullifierWriteHints: Vector; - public readonly noteHashReadRequest: Vector; - public readonly noteHashWriteRequest: Vector; - public readonly l1ToL2MessageReadRequest: Vector; + public readonly publicDataReads: Vector; + public readonly publicDataWrites: Vector; + public readonly nullifierReads: Vector; + public readonly nullifierWrites: Vector; + public readonly noteHashReads: Vector; + public readonly noteHashWrites: Vector; + public readonly l1ToL2MessageReads: Vector; constructor( enqueuedCalls: AvmEnqueuedCallHint[], @@ -868,13 +876,13 @@ export class AvmExecutionHints { externalCalls: AvmExternalCallHint[], contractInstances: AvmContractInstanceHint[], contractBytecodeHints: AvmContractBytecodeHints[], - storageReadRequest: AvmPublicDataReadTreeHint[], - storageUpdateRequest: AvmPublicDataWriteTreeHint[], - nullifierReadRequest: AvmNullifierReadTreeHint[], - nullifierWriteHints: AvmNullifierWriteTreeHint[], - noteHashReadRequest: AvmAppendTreeHint[], - noteHashWriteRequest: AvmAppendTreeHint[], - l1ToL2MessageReadRequest: AvmAppendTreeHint[], + publicDataReads: AvmPublicDataReadTreeHint[], + publicDataWrites: AvmPublicDataWriteTreeHint[], + nullifierReads: AvmNullifierReadTreeHint[], + nullifierWrites: AvmNullifierWriteTreeHint[], + noteHashReads: AvmAppendTreeHint[], + noteHashWrites: AvmAppendTreeHint[], + l1ToL2MessageReads: AvmAppendTreeHint[], ) { this.enqueuedCalls = new Vector(enqueuedCalls); this.storageValues = new Vector(storageValues); @@ -884,14 +892,13 @@ export class AvmExecutionHints { this.externalCalls = new Vector(externalCalls); this.contractInstances = new Vector(contractInstances); this.contractBytecodeHints = new Vector(contractBytecodeHints); - this.storageReadRequest = new Vector(storageReadRequest); - this.storageUpdateRequest = new Vector(storageUpdateRequest); - this.noteHashReadRequest = new Vector(noteHashReadRequest); - this.nullifierReadRequest = new Vector(nullifierReadRequest); - this.nullifierWriteHints = new Vector(nullifierWriteHints); - this.noteHashReadRequest = new Vector(noteHashReadRequest); - this.noteHashWriteRequest = new Vector(noteHashWriteRequest); - this.l1ToL2MessageReadRequest = new Vector(l1ToL2MessageReadRequest); + this.publicDataReads = new Vector(publicDataReads); + this.publicDataWrites = new Vector(publicDataWrites); + this.nullifierReads = new Vector(nullifierReads); + this.nullifierWrites = new Vector(nullifierWrites); + this.noteHashReads = new Vector(noteHashReads); + this.noteHashWrites = new Vector(noteHashWrites); + this.l1ToL2MessageReads = new Vector(l1ToL2MessageReads); } /** @@ -932,13 +939,13 @@ export class AvmExecutionHints { this.externalCalls.items.length == 0 && this.contractInstances.items.length == 0 && this.contractBytecodeHints.items.length == 0 && - this.storageReadRequest.items.length == 0 && - this.storageUpdateRequest.items.length == 0 && - this.nullifierReadRequest.items.length == 0 && - this.nullifierWriteHints.items.length == 0 && - this.noteHashReadRequest.items.length == 0 && - this.noteHashWriteRequest.items.length == 0 && - this.l1ToL2MessageReadRequest.items.length == 0 + this.publicDataReads.items.length == 0 && + this.publicDataWrites.items.length == 0 && + this.nullifierReads.items.length == 0 && + this.nullifierWrites.items.length == 0 && + this.noteHashReads.items.length == 0 && + this.noteHashWrites.items.length == 0 && + this.l1ToL2MessageReads.items.length == 0 ); } @@ -949,8 +956,7 @@ export class AvmExecutionHints { */ static from(fields: FieldsOf): AvmExecutionHints { return new AvmExecutionHints( - // omit enqueued call hints until they're implemented in C++ - new Array(), + fields.enqueuedCalls.items, fields.storageValues.items, fields.noteHashExists.items, fields.nullifierExists.items, @@ -958,13 +964,13 @@ export class AvmExecutionHints { fields.externalCalls.items, fields.contractInstances.items, fields.contractBytecodeHints.items, - fields.storageReadRequest.items, - fields.storageUpdateRequest.items, - fields.nullifierReadRequest.items, - fields.nullifierWriteHints.items, - fields.noteHashReadRequest.items, - fields.noteHashWriteRequest.items, - fields.l1ToL2MessageReadRequest.items, + fields.publicDataReads.items, + fields.publicDataWrites.items, + fields.nullifierReads.items, + fields.nullifierWrites.items, + fields.noteHashReads.items, + fields.noteHashWrites.items, + fields.l1ToL2MessageReads.items, ); } @@ -975,8 +981,7 @@ export class AvmExecutionHints { */ static getFields(fields: FieldsOf) { return [ - // omit enqueued call hints until they're implemented in C++ - //fields.enqueuedCalls, + fields.enqueuedCalls, fields.storageValues, fields.noteHashExists, fields.nullifierExists, @@ -984,13 +989,13 @@ export class AvmExecutionHints { fields.externalCalls, fields.contractInstances, fields.contractBytecodeHints, - fields.storageReadRequest, - fields.storageUpdateRequest, - fields.nullifierReadRequest, - fields.nullifierWriteHints, - fields.noteHashReadRequest, - fields.noteHashWriteRequest, - fields.l1ToL2MessageReadRequest, + fields.publicDataReads, + fields.publicDataWrites, + fields.nullifierReads, + fields.nullifierWrites, + fields.noteHashReads, + fields.noteHashWrites, + fields.l1ToL2MessageReads, ] as const; } @@ -1002,8 +1007,7 @@ export class AvmExecutionHints { static fromBuffer(buff: Buffer | BufferReader): AvmExecutionHints { const reader = BufferReader.asReader(buff); return new AvmExecutionHints( - // omit enqueued call hints until they're implemented in C++ - new Array(), + reader.readVector(AvmEnqueuedCallHint), reader.readVector(AvmKeyValueHint), reader.readVector(AvmKeyValueHint), reader.readVector(AvmKeyValueHint), diff --git a/yarn-project/circuits.js/src/structs/header.test.ts b/yarn-project/circuits.js/src/structs/block_header.test.ts similarity index 77% rename from yarn-project/circuits.js/src/structs/header.test.ts rename to yarn-project/circuits.js/src/structs/block_header.test.ts index 352acf4c6811..053bf0d26397 100644 --- a/yarn-project/circuits.js/src/structs/header.test.ts +++ b/yarn-project/circuits.js/src/structs/block_header.test.ts @@ -1,12 +1,12 @@ import { randomInt } from '@aztec/foundation/crypto'; import { setupCustomSnapshotSerializers, updateInlineTestData } from '@aztec/foundation/testing'; -import { HEADER_LENGTH } from '../constants.gen.js'; +import { BLOCK_HEADER_LENGTH } from '../constants.gen.js'; import { makeHeader } from '../tests/factories.js'; -import { Header } from './header.js'; +import { BlockHeader } from './block_header.js'; -describe('Header', () => { - let header: Header; +describe('BlockHeader', () => { + let header: BlockHeader; beforeAll(() => { setupCustomSnapshotSerializers(expect); @@ -15,13 +15,13 @@ describe('Header', () => { it('serializes to buffer and deserializes it back', () => { const buffer = header.toBuffer(); - const res = Header.fromBuffer(buffer); + const res = BlockHeader.fromBuffer(buffer); expect(res).toEqual(header); }); it('serializes to field array and deserializes it back', () => { const fieldArray = header.toFields(); - const res = Header.fromFields(fieldArray); + const res = BlockHeader.fromFields(fieldArray); expect(res).toEqual(header); }); @@ -34,11 +34,11 @@ describe('Header', () => { it('number of fields matches constant', () => { const fields = header.toFields(); - expect(fields.length).toBe(HEADER_LENGTH); + expect(fields.length).toBe(BLOCK_HEADER_LENGTH); }); it('computes empty hash', () => { - const header = Header.empty(); + const header = BlockHeader.empty(); const hash = header.hash(); expect(hash).toMatchSnapshot(); diff --git a/yarn-project/circuits.js/src/structs/header.ts b/yarn-project/circuits.js/src/structs/block_header.ts similarity index 83% rename from yarn-project/circuits.js/src/structs/header.ts rename to yarn-project/circuits.js/src/structs/block_header.ts index 21be70c07177..8d335e7a5dee 100644 --- a/yarn-project/circuits.js/src/structs/header.ts +++ b/yarn-project/circuits.js/src/structs/block_header.ts @@ -8,14 +8,14 @@ import { type FieldsOf } from '@aztec/foundation/types'; import { inspect } from 'util'; import { z } from 'zod'; -import { GeneratorIndex, HEADER_LENGTH } from '../constants.gen.js'; +import { BLOCK_HEADER_LENGTH, GeneratorIndex } from '../constants.gen.js'; import { ContentCommitment } from './content_commitment.js'; import { GlobalVariables } from './global_variables.js'; import { AppendOnlyTreeSnapshot } from './rollup/append_only_tree_snapshot.js'; import { StateReference } from './state_reference.js'; /** A header of an L2 block. */ -export class Header { +export class BlockHeader { constructor( /** Snapshot of archive before the block is applied. */ public lastArchive: AppendOnlyTreeSnapshot, @@ -41,10 +41,10 @@ export class Header { totalFees: schemas.Fr, totalManaUsed: schemas.Fr, }) - .transform(Header.from); + .transform(BlockHeader.from); } - static getFields(fields: FieldsOf
) { + static getFields(fields: FieldsOf) { // Note: The order here must match the order in the HeaderLib solidity library. return [ fields.lastArchive, @@ -56,8 +56,8 @@ export class Header { ] as const; } - static from(fields: FieldsOf
) { - return new Header(...Header.getFields(fields)); + static from(fields: FieldsOf) { + return new BlockHeader(...BlockHeader.getFields(fields)); } getSize() { @@ -72,25 +72,25 @@ export class Header { } toBuffer() { - return serializeToBuffer(...Header.getFields(this)); + return serializeToBuffer(...BlockHeader.getFields(this)); } toFields(): Fr[] { - const fields = serializeToFields(...Header.getFields(this)); - if (fields.length !== HEADER_LENGTH) { - throw new Error(`Invalid number of fields for Header. Expected ${HEADER_LENGTH}, got ${fields.length}`); + const fields = serializeToFields(...BlockHeader.getFields(this)); + if (fields.length !== BLOCK_HEADER_LENGTH) { + throw new Error(`Invalid number of fields for Header. Expected ${BLOCK_HEADER_LENGTH}, got ${fields.length}`); } return fields; } - clone(): Header { - return Header.fromBuffer(this.toBuffer()); + clone(): BlockHeader { + return BlockHeader.fromBuffer(this.toBuffer()); } - static fromBuffer(buffer: Buffer | BufferReader): Header { + static fromBuffer(buffer: Buffer | BufferReader): BlockHeader { const reader = BufferReader.asReader(buffer); - return new Header( + return new BlockHeader( reader.readObject(AppendOnlyTreeSnapshot), reader.readObject(ContentCommitment), reader.readObject(StateReference), @@ -100,10 +100,10 @@ export class Header { ); } - static fromFields(fields: Fr[] | FieldReader): Header { + static fromFields(fields: Fr[] | FieldReader): BlockHeader { const reader = FieldReader.asReader(fields); - return new Header( + return new BlockHeader( AppendOnlyTreeSnapshot.fromFields(reader), ContentCommitment.fromFields(reader), StateReference.fromFields(reader), @@ -113,8 +113,8 @@ export class Header { ); } - static empty(fields: Partial> = {}): Header { - return Header.from({ + static empty(fields: Partial> = {}): BlockHeader { + return BlockHeader.from({ lastArchive: AppendOnlyTreeSnapshot.zero(), contentCommitment: ContentCommitment.empty(), state: StateReference.empty(), @@ -144,8 +144,8 @@ export class Header { return bufferToHex(this.toBuffer()); } - static fromString(str: string): Header { - return Header.fromBuffer(hexToBuffer(str)); + static fromString(str: string): BlockHeader { + return BlockHeader.fromBuffer(hexToBuffer(str)); } hash(): Fr { diff --git a/yarn-project/circuits.js/src/structs/client_ivc_proof.ts b/yarn-project/circuits.js/src/structs/client_ivc_proof.ts index 9c47db7e8450..3ce4169c2f8e 100644 --- a/yarn-project/circuits.js/src/structs/client_ivc_proof.ts +++ b/yarn-project/circuits.js/src/structs/client_ivc_proof.ts @@ -1,7 +1,7 @@ import { bufferSchemaFor } from '@aztec/foundation/schemas'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; -import * as fs from 'fs/promises'; +import { promises as fs } from 'fs'; import path from 'path'; /** diff --git a/yarn-project/circuits.js/src/structs/context/private_context_inputs.ts b/yarn-project/circuits.js/src/structs/context/private_context_inputs.ts index f930eabf3ba7..4f52855805bb 100644 --- a/yarn-project/circuits.js/src/structs/context/private_context_inputs.ts +++ b/yarn-project/circuits.js/src/structs/context/private_context_inputs.ts @@ -1,19 +1,19 @@ import { serializeToFields } from '@aztec/foundation/serialize'; +import { BlockHeader } from '../block_header.js'; import { CallContext } from '../call_context.js'; -import { Header } from '../header.js'; import { TxContext } from '../tx_context.js'; export class PrivateContextInputs { constructor( public callContext: CallContext, - public historicalHeader: Header, + public historicalHeader: BlockHeader, public txContext: TxContext, public startSideEffectCounter: number, ) {} public static empty(): PrivateContextInputs { - return new PrivateContextInputs(CallContext.empty(), Header.empty(), TxContext.empty(), 0); + return new PrivateContextInputs(CallContext.empty(), BlockHeader.empty(), TxContext.empty(), 0); } public toFields() { diff --git a/yarn-project/circuits.js/src/structs/global_variables.ts b/yarn-project/circuits.js/src/structs/global_variables.ts index 914dc7b5fb61..240b4d9abedf 100644 --- a/yarn-project/circuits.js/src/structs/global_variables.ts +++ b/yarn-project/circuits.js/src/structs/global_variables.ts @@ -159,10 +159,22 @@ export class GlobalVariables { ); } + toInspect() { + return { + chainId: this.chainId.toNumber(), + version: this.version.toNumber(), + blockNumber: this.blockNumber.toNumber(), + slotNumber: this.slotNumber.toNumber(), + timestamp: this.timestamp.toNumber(), + coinbase: this.coinbase.toString(), + feeRecipient: this.feeRecipient.toString(), + feePerDaGas: this.gasFees.feePerDaGas.toNumber(), + feePerL2Gas: this.gasFees.feePerL2Gas.toNumber(), + }; + } + [inspect.custom]() { - return `GlobalVariables { chainId: ${this.chainId.toString()}, version: ${this.version.toString()}, blockNumber: ${this.blockNumber.toString()}, slotNumber: ${this.slotNumber.toString()}, timestamp: ${this.timestamp.toString()}, coinbase: ${this.coinbase.toString()}, feeRecipient: ${this.feeRecipient.toString()}, gasFees: ${inspect( - this.gasFees, - )} }`; + return `GlobalVariables ${inspect(this.toInspect())}`; } public equals(other: this): boolean { diff --git a/yarn-project/circuits.js/src/structs/index.ts b/yarn-project/circuits.js/src/structs/index.ts index 7f76ff4a96d8..4959a0fb048b 100644 --- a/yarn-project/circuits.js/src/structs/index.ts +++ b/yarn-project/circuits.js/src/structs/index.ts @@ -14,7 +14,7 @@ export * from './gas.js'; export * from './gas_fees.js'; export * from './gas_settings.js'; export * from './global_variables.js'; -export * from './header.js'; +export * from './block_header.js'; export * from './tagging_secret.js'; export * from './kernel/combined_accumulated_data.js'; export * from './kernel/combined_constant_data.js'; diff --git a/yarn-project/circuits.js/src/structs/kernel/combined_constant_data.ts b/yarn-project/circuits.js/src/structs/kernel/combined_constant_data.ts index 084873c7b9b1..e676c34b7a90 100644 --- a/yarn-project/circuits.js/src/structs/kernel/combined_constant_data.ts +++ b/yarn-project/circuits.js/src/structs/kernel/combined_constant_data.ts @@ -5,8 +5,8 @@ import { type FieldsOf } from '@aztec/foundation/types'; import { z } from 'zod'; +import { BlockHeader } from '../block_header.js'; import { GlobalVariables } from '../global_variables.js'; -import { Header } from '../header.js'; import { TxContext } from '../tx_context.js'; import { type TxConstantData } from './tx_constant_data.js'; @@ -16,7 +16,7 @@ import { type TxConstantData } from './tx_constant_data.js'; export class CombinedConstantData { constructor( /** Header of a block whose state is used during execution (not the block the transaction is included in). */ - public historicalHeader: Header, + public historicalHeader: BlockHeader, /** * Context of the transaction. * @@ -51,7 +51,7 @@ export class CombinedConstantData { static get schema() { return z .object({ - historicalHeader: Header.schema, + historicalHeader: BlockHeader.schema, txContext: TxContext.schema, vkTreeRoot: schemas.Fr, protocolContractTreeRoot: schemas.Fr, @@ -103,7 +103,7 @@ export class CombinedConstantData { static fromBuffer(buffer: Buffer | BufferReader): CombinedConstantData { const reader = BufferReader.asReader(buffer); return new CombinedConstantData( - reader.readObject(Header), + reader.readObject(BlockHeader), reader.readObject(TxContext), Fr.fromBuffer(reader), Fr.fromBuffer(reader), @@ -114,7 +114,7 @@ export class CombinedConstantData { static fromFields(fields: Fr[] | FieldReader): CombinedConstantData { const reader = FieldReader.asReader(fields); return new CombinedConstantData( - reader.readObject(Header), + reader.readObject(BlockHeader), reader.readObject(TxContext), reader.readField(), reader.readField(), @@ -123,6 +123,6 @@ export class CombinedConstantData { } static empty() { - return new CombinedConstantData(Header.empty(), TxContext.empty(), Fr.ZERO, Fr.ZERO, GlobalVariables.empty()); + return new CombinedConstantData(BlockHeader.empty(), TxContext.empty(), Fr.ZERO, Fr.ZERO, GlobalVariables.empty()); } } diff --git a/yarn-project/circuits.js/src/structs/kernel/private_kernel_empty_inputs.ts b/yarn-project/circuits.js/src/structs/kernel/private_kernel_empty_inputs.ts index 9fe7957baa03..ad8e71572595 100644 --- a/yarn-project/circuits.js/src/structs/kernel/private_kernel_empty_inputs.ts +++ b/yarn-project/circuits.js/src/structs/kernel/private_kernel_empty_inputs.ts @@ -5,13 +5,13 @@ import { bufferToHex, hexToBuffer } from '@aztec/foundation/string'; import { type FieldsOf } from '@aztec/foundation/types'; import { RECURSIVE_PROOF_LENGTH } from '../../constants.gen.js'; -import { Header } from '../header.js'; +import { BlockHeader } from '../block_header.js'; import { RecursiveProof } from '../recursive_proof.js'; import { VerificationKeyAsFields } from '../verification_key.js'; export class PrivateKernelEmptyInputData { constructor( - public readonly header: Header, + public readonly header: BlockHeader, public readonly chainId: Fr, public readonly version: Fr, public readonly vkTreeRoot: Fr, @@ -29,7 +29,7 @@ export class PrivateKernelEmptyInputData { static fromBuffer(buf: Buffer) { const reader = BufferReader.asReader(buf); return new PrivateKernelEmptyInputData( - reader.readObject(Header), + reader.readObject(BlockHeader), reader.readObject(Fr), reader.readObject(Fr), reader.readObject(Fr), @@ -65,7 +65,7 @@ export class PrivateKernelEmptyInputData { export class PrivateKernelEmptyInputs { constructor( public readonly emptyNested: EmptyNestedData, - public readonly header: Header, + public readonly header: BlockHeader, public readonly chainId: Fr, public readonly version: Fr, public readonly vkTreeRoot: Fr, @@ -98,7 +98,7 @@ export class PrivateKernelEmptyInputs { const reader = BufferReader.asReader(buf); return new PrivateKernelEmptyInputs( reader.readObject(EmptyNestedData), - reader.readObject(Header), + reader.readObject(BlockHeader), reader.readObject(Fr), reader.readObject(Fr), reader.readObject(Fr), diff --git a/yarn-project/circuits.js/src/structs/kernel/tx_constant_data.ts b/yarn-project/circuits.js/src/structs/kernel/tx_constant_data.ts index 84e852be92cf..67549544e4a2 100644 --- a/yarn-project/circuits.js/src/structs/kernel/tx_constant_data.ts +++ b/yarn-project/circuits.js/src/structs/kernel/tx_constant_data.ts @@ -2,7 +2,7 @@ import { Fr } from '@aztec/foundation/fields'; import { BufferReader, FieldReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { type FieldsOf } from '@aztec/foundation/types'; -import { Header } from '../header.js'; +import { BlockHeader } from '../block_header.js'; import { TxContext } from '../tx_context.js'; /** @@ -11,7 +11,7 @@ import { TxContext } from '../tx_context.js'; export class TxConstantData { constructor( /** Header of a block whose state is used during execution (not the block the transaction is included in). */ - public historicalHeader: Header, + public historicalHeader: BlockHeader, /** * Context of the transaction. * @@ -42,7 +42,7 @@ export class TxConstantData { static fromFields(fields: Fr[] | FieldReader): TxConstantData { const reader = FieldReader.asReader(fields); return new TxConstantData( - reader.readObject(Header), + reader.readObject(BlockHeader), reader.readObject(TxContext), reader.readField(), reader.readField(), @@ -52,7 +52,7 @@ export class TxConstantData { static fromBuffer(buffer: Buffer | BufferReader): TxConstantData { const reader = BufferReader.asReader(buffer); return new TxConstantData( - reader.readObject(Header), + reader.readObject(BlockHeader), reader.readObject(TxContext), Fr.fromBuffer(reader), Fr.fromBuffer(reader), @@ -64,7 +64,7 @@ export class TxConstantData { } static empty() { - return new TxConstantData(Header.empty(), TxContext.empty(), Fr.ZERO, Fr.ZERO); + return new TxConstantData(BlockHeader.empty(), TxContext.empty(), Fr.ZERO, Fr.ZERO); } getSize() { diff --git a/yarn-project/circuits.js/src/structs/private_circuit_public_inputs.ts b/yarn-project/circuits.js/src/structs/private_circuit_public_inputs.ts index c6de783a93a9..94c0041a9252 100644 --- a/yarn-project/circuits.js/src/structs/private_circuit_public_inputs.ts +++ b/yarn-project/circuits.js/src/structs/private_circuit_public_inputs.ts @@ -23,7 +23,7 @@ import { MAX_PRIVATE_LOGS_PER_CALL, PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH, } from '../constants.gen.js'; -import { Header } from '../structs/header.js'; +import { BlockHeader } from '../structs/block_header.js'; import { isEmptyArray } from '../utils/index.js'; import { CallContext } from './call_context.js'; import { KeyValidationRequestAndGenerator } from './key_validation_request_and_generator.js'; @@ -126,7 +126,7 @@ export class PrivateCircuitPublicInputs { /** * Header of a block whose state is used during private execution (not the block the transaction is included in). */ - public historicalHeader: Header, + public historicalHeader: BlockHeader, /** * Transaction context. * @@ -173,7 +173,7 @@ export class PrivateCircuitPublicInputs { reader.readArray(MAX_CONTRACT_CLASS_LOGS_PER_CALL, LogHash), reader.readObject(Fr), reader.readObject(Fr), - reader.readObject(Header), + reader.readObject(BlockHeader), reader.readObject(TxContext), ); } @@ -200,7 +200,7 @@ export class PrivateCircuitPublicInputs { reader.readArray(MAX_CONTRACT_CLASS_LOGS_PER_CALL, LogHash), reader.readField(), reader.readField(), - reader.readObject(Header), + reader.readObject(BlockHeader), reader.readObject(TxContext), ); } @@ -230,7 +230,7 @@ export class PrivateCircuitPublicInputs { makeTuple(MAX_CONTRACT_CLASS_LOGS_PER_CALL, LogHash.empty), Fr.ZERO, Fr.ZERO, - Header.empty(), + BlockHeader.empty(), TxContext.empty(), ); } diff --git a/yarn-project/circuits.js/src/structs/public_circuit_public_inputs.ts b/yarn-project/circuits.js/src/structs/public_circuit_public_inputs.ts index 46437e0da402..9834e2f4cb93 100644 --- a/yarn-project/circuits.js/src/structs/public_circuit_public_inputs.ts +++ b/yarn-project/circuits.js/src/structs/public_circuit_public_inputs.ts @@ -27,12 +27,12 @@ import { PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH, } from '../constants.gen.js'; import { isEmptyArray } from '../utils/index.js'; +import { BlockHeader } from './block_header.js'; import { CallContext } from './call_context.js'; import { ContractStorageRead } from './contract_storage_read.js'; import { ContractStorageUpdateRequest } from './contract_storage_update_request.js'; import { Gas } from './gas.js'; import { GlobalVariables } from './global_variables.js'; -import { Header } from './header.js'; import { L2ToL1Message } from './l2_to_l1_message.js'; import { LogHash } from './log_hash.js'; import { NoteHash } from './note_hash.js'; @@ -121,7 +121,7 @@ export class PublicCircuitPublicInputs { * Header of a block whose state is used during public execution. Set by sequencer to be a header of a block * previous to the one in which the tx is included. */ - public historicalHeader: Header, + public historicalHeader: BlockHeader, /** Global variables for the block. */ public globalVariables: GlobalVariables, /** @@ -175,7 +175,7 @@ export class PublicCircuitPublicInputs { Fr.ZERO, Fr.ZERO, makeTuple(MAX_UNENCRYPTED_LOGS_PER_CALL, LogHash.empty), - Header.empty(), + BlockHeader.empty(), GlobalVariables.empty(), AztecAddress.ZERO, RevertCode.OK, @@ -287,7 +287,7 @@ export class PublicCircuitPublicInputs { reader.readObject(Fr), reader.readObject(Fr), reader.readArray(MAX_UNENCRYPTED_LOGS_PER_CALL, LogHash), - reader.readObject(Header), + reader.readObject(BlockHeader), reader.readObject(GlobalVariables), reader.readObject(AztecAddress), reader.readObject(RevertCode), @@ -317,7 +317,7 @@ export class PublicCircuitPublicInputs { reader.readField(), reader.readField(), reader.readArray(MAX_UNENCRYPTED_LOGS_PER_CALL, LogHash), - Header.fromFields(reader), + BlockHeader.fromFields(reader), GlobalVariables.fromFields(reader), AztecAddress.fromFields(reader), RevertCode.fromFields(reader), diff --git a/yarn-project/circuits.js/src/tests/factories.ts b/yarn-project/circuits.js/src/tests/factories.ts index 79d5d63d2c6e..52bbcd9b5355 100644 --- a/yarn-project/circuits.js/src/tests/factories.ts +++ b/yarn-project/circuits.js/src/tests/factories.ts @@ -117,12 +117,12 @@ import { computePublicBytecodeCommitment, makeRecursiveProof, } from '../index.js'; +import { BlockHeader } from '../structs/block_header.js'; import { ContentCommitment, NUM_BYTES_PER_SHA256 } from '../structs/content_commitment.js'; import { Gas } from '../structs/gas.js'; import { GasFees } from '../structs/gas_fees.js'; import { GasSettings } from '../structs/gas_settings.js'; import { GlobalVariables } from '../structs/global_variables.js'; -import { Header } from '../structs/header.js'; import { AvmAccumulatedData, AvmAppendTreeHint, @@ -879,8 +879,8 @@ export function makeHeader( blockNumber: number | undefined = undefined, slotNumber: number | undefined = undefined, txsEffectsHash: Buffer | undefined = undefined, -): Header { - return new Header( +): BlockHeader { + return new BlockHeader( makeAppendOnlyTreeSnapshot(seed + 0x100), makeContentCommitment(seed + 0x200, txsEffectsHash), makeStateReference(seed + 0x600), @@ -1296,6 +1296,7 @@ export function makeAvmBytecodeHints(seed = 0): AvmContractBytecodeHints { instance.contractClassId, instance.initializationHash, instance.publicKeys, + makeAvmNullifierReadTreeHints(seed + 0x2000), ); const publicBytecodeCommitment = computePublicBytecodeCommitment(packedBytecode); @@ -1366,6 +1367,7 @@ export function makeAvmContractInstanceHint(seed = 0): AvmContractInstanceHint { new Point(new Fr(seed + 0x10), new Fr(seed + 0x11), false), new Point(new Fr(seed + 0x12), new Fr(seed + 0x13), false), ), + makeAvmNullifierReadTreeHints(seed + 0x1000), ); } @@ -1398,13 +1400,13 @@ export function makeAvmExecutionHints( externalCalls: makeVector(baseLength + 4, makeAvmExternalCallHint, seed + 0x4600), contractInstances: makeVector(baseLength + 5, makeAvmContractInstanceHint, seed + 0x4700), contractBytecodeHints: makeVector(baseLength + 6, makeAvmBytecodeHints, seed + 0x4800), - storageReadRequest: makeVector(baseLength + 7, makeAvmStorageReadTreeHints, seed + 0x4900), - storageUpdateRequest: makeVector(baseLength + 8, makeAvmStorageUpdateTreeHints, seed + 0x4a00), - nullifierReadRequest: makeVector(baseLength + 9, makeAvmNullifierReadTreeHints, seed + 0x4b00), - nullifierWriteHints: makeVector(baseLength + 10, makeAvmNullifierInsertionTreeHints, seed + 0x4c00), - noteHashReadRequest: makeVector(baseLength + 11, makeAvmTreeHints, seed + 0x4d00), - noteHashWriteRequest: makeVector(baseLength + 12, makeAvmTreeHints, seed + 0x4e00), - l1ToL2MessageReadRequest: makeVector(baseLength + 13, makeAvmTreeHints, seed + 0x4f00), + publicDataReads: makeVector(baseLength + 7, makeAvmStorageReadTreeHints, seed + 0x4900), + publicDataWrites: makeVector(baseLength + 8, makeAvmStorageUpdateTreeHints, seed + 0x4a00), + nullifierReads: makeVector(baseLength + 9, makeAvmNullifierReadTreeHints, seed + 0x4b00), + nullifierWrites: makeVector(baseLength + 10, makeAvmNullifierInsertionTreeHints, seed + 0x4c00), + noteHashReads: makeVector(baseLength + 11, makeAvmTreeHints, seed + 0x4d00), + noteHashWrites: makeVector(baseLength + 12, makeAvmTreeHints, seed + 0x4e00), + l1ToL2MessageReads: makeVector(baseLength + 13, makeAvmTreeHints, seed + 0x4f00), ...overrides, }); } diff --git a/yarn-project/cli-wallet/package.json b/yarn-project/cli-wallet/package.json index e8fe9b83dfb5..6706f3273b31 100644 --- a/yarn-project/cli-wallet/package.json +++ b/yarn-project/cli-wallet/package.json @@ -19,7 +19,7 @@ "scripts": { "start": "node --no-warnings ./dest/bin", "start:debug": "node --inspect=0.0.0.0:9221 --no-warnings ./dest/bin", - "dev": "DEBUG='aztec:*' LOG_LEVEL=debug && node ./dest/bin", + "dev": "LOG_LEVEL=debug && node ./dest/bin", "build": "yarn clean && tsc -b", "build:dev": "tsc -b --watch", "clean": "rm -rf ./dest .tsbuildinfo", @@ -75,6 +75,7 @@ "@aztec/foundation": "workspace:^", "@aztec/kv-store": "workspace:^", "@aztec/noir-contracts.js": "workspace:^", + "@aztec/pxe": "workspace:^", "commander": "^12.1.0", "inquirer": "^10.1.8", "source-map-support": "^0.5.21", diff --git a/yarn-project/cli-wallet/src/bin/index.ts b/yarn-project/cli-wallet/src/bin/index.ts index 638c800d1907..8d70d048bf97 100644 --- a/yarn-project/cli-wallet/src/bin/index.ts +++ b/yarn-project/cli-wallet/src/bin/index.ts @@ -1,14 +1,17 @@ import { Fr, computeSecretHash, fileURLToPath } from '@aztec/aztec.js'; +import { LOCALHOST } from '@aztec/cli/cli-utils'; import { type LogFn, createConsoleLogger, createDebugLogger } from '@aztec/foundation/log'; import { AztecLmdbStore } from '@aztec/kv-store/lmdb'; +import { type PXEService } from '@aztec/pxe'; -import { Argument, Command } from 'commander'; +import { Argument, Command, Option } from 'commander'; import { readFileSync } from 'fs'; -import { dirname, resolve } from 'path'; +import { dirname, join, resolve } from 'path'; import { injectCommands } from '../cmds/index.js'; import { Aliases, WalletDB } from '../storage/wallet_db.js'; import { createAliasOption } from '../utils/options/index.js'; +import { PXEWrapper } from '../utils/pxe_wrapper.js'; const userLog = createConsoleLogger(); const debugLogger = createDebugLogger('aztec:wallet'); @@ -66,18 +69,39 @@ async function main() { const walletVersion: string = JSON.parse(readFileSync(packageJsonPath).toString()).version; const db = WalletDB.getInstance(); + const pxeWrapper = new PXEWrapper(); const program = new Command('wallet'); program .description('Aztec wallet') .version(walletVersion) .option('-d, --data-dir ', 'Storage directory for wallet data', WALLET_DATA_DIRECTORY) - .hook('preSubcommand', command => { - const dataDir = command.optsWithGlobals().dataDir; + .addOption( + new Option('--remote-pxe', 'Connect to an external PXE RPC server, instead of the local one') + .env('REMOTE_PXE') + .default(false) + .conflicts('rpc-url'), + ) + .addOption( + new Option('-n, --node-url ', 'URL of the Aztec node to connect to') + .env('AZTEC_NODE_URL') + .default(`http://${LOCALHOST}:8080`), + ) + .hook('preSubcommand', async command => { + const { dataDir, remotePxe, nodeUrl } = command.optsWithGlobals(); + if (!remotePxe) { + debugLogger.info('Using local PXE service'); + await pxeWrapper.init(nodeUrl, join(dataDir, 'pxe')); + } db.init(AztecLmdbStore.open(dataDir)); + }) + .hook('postAction', async () => { + if (pxeWrapper.getPXE()) { + await (pxeWrapper.getPXE() as PXEService).stop(); + } }); - injectCommands(program, userLog, debugLogger, db); + injectCommands(program, userLog, debugLogger, db, pxeWrapper); injectInternalCommands(program, userLog, db); await program.parseAsync(process.argv); } diff --git a/yarn-project/cli-wallet/src/cmds/bridge_fee_juice.ts b/yarn-project/cli-wallet/src/cmds/bridge_fee_juice.ts index 12daf7172c39..a9558b142816 100644 --- a/yarn-project/cli-wallet/src/cmds/bridge_fee_juice.ts +++ b/yarn-project/cli-wallet/src/cmds/bridge_fee_juice.ts @@ -1,4 +1,4 @@ -import { L1FeeJuicePortalManager, createCompatibleClient } from '@aztec/aztec.js'; +import { L1FeeJuicePortalManager, type PXE } from '@aztec/aztec.js'; import { prettyPrintJSON } from '@aztec/cli/utils'; import { createEthereumChain, createL1Clients } from '@aztec/ethereum'; import { type AztecAddress } from '@aztec/foundation/aztec-address'; @@ -8,7 +8,7 @@ import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; export async function bridgeL1FeeJuice( amount: bigint, recipient: AztecAddress, - rpcUrl: string, + pxe: PXE, l1RpcUrl: string, chainId: number, privateKey: string | undefined, @@ -24,15 +24,12 @@ export async function bridgeL1FeeJuice( const chain = createEthereumChain(l1RpcUrl, chainId); const { publicClient, walletClient } = createL1Clients(chain.rpcUrl, privateKey ?? mnemonic, chain.chainInfo); - // Prepare L2 client - const client = await createCompatibleClient(rpcUrl, debugLogger); - const { protocolContractAddresses: { feeJuice: feeJuiceAddress }, - } = await client.getPXEInfo(); + } = await pxe.getPXEInfo(); // Setup portal manager - const portal = await L1FeeJuicePortalManager.new(client, publicClient, walletClient, debugLogger); + const portal = await L1FeeJuicePortalManager.new(pxe, publicClient, walletClient, debugLogger); const { claimAmount, claimSecret, messageHash, messageLeafIndex } = await portal.bridgeTokensPublic( recipient, amount, @@ -69,7 +66,7 @@ export async function bridgeL1FeeJuice( const delayedCheck = (delay: number) => { return new Promise(resolve => { setTimeout(async () => { - const witness = await client.getL1ToL2MembershipWitness( + const witness = await pxe.getL1ToL2MembershipWitness( feeJuiceAddress, Fr.fromString(messageHash), claimSecret, diff --git a/yarn-project/cli-wallet/src/cmds/create_account.ts b/yarn-project/cli-wallet/src/cmds/create_account.ts index 4d21262c97fb..7381e61e6e30 100644 --- a/yarn-project/cli-wallet/src/cmds/create_account.ts +++ b/yarn-project/cli-wallet/src/cmds/create_account.ts @@ -27,8 +27,8 @@ export async function createAccount( client, undefined /* address, we don't have it yet */, undefined /* db, as we want to create from scratch */, - accountType, secretKey, + accountType, Fr.ZERO, publicKey, ); diff --git a/yarn-project/cli-wallet/src/cmds/index.ts b/yarn-project/cli-wallet/src/cmds/index.ts index a286ad586038..a836081825d0 100644 --- a/yarn-project/cli-wallet/src/cmds/index.ts +++ b/yarn-project/cli-wallet/src/cmds/index.ts @@ -1,6 +1,6 @@ import { getIdentities } from '@aztec/accounts/utils'; import { TxHash, createCompatibleClient } from '@aztec/aztec.js'; -import { Fr, PublicKeys } from '@aztec/circuits.js'; +import { PublicKeys } from '@aztec/circuits.js'; import { ETHEREUM_HOST, PRIVATE_KEY, @@ -39,8 +39,15 @@ import { integerArgParser, parsePaymentMethod, } from '../utils/options/index.js'; - -export function injectCommands(program: Command, log: LogFn, debugLogger: DebugLogger, db?: WalletDB) { +import { type PXEWrapper } from '../utils/pxe_wrapper.js'; + +export function injectCommands( + program: Command, + log: LogFn, + debugLogger: DebugLogger, + db?: WalletDB, + pxeWrapper?: PXEWrapper, +) { const createAccountCommand = program .command('create-account') .description( @@ -91,7 +98,7 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL ]); publicKey = answers.identity.split(' ')[1]; } - const client = await createCompatibleClient(rpcUrl, debugLogger); + const client = pxeWrapper?.getPXE() ?? (await createCompatibleClient(rpcUrl, debugLogger)); const accountCreationResult = await createAccount( client, type, @@ -128,7 +135,7 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL const options = command.optsWithGlobals(); const { rpcUrl, wait, from: parsedFromAddress, json } = options; - const client = await createCompatibleClient(rpcUrl, debugLogger); + const client = pxeWrapper?.getPXE() ?? (await createCompatibleClient(rpcUrl, debugLogger)); const account = await createOrRetrieveAccount(client, parsedFromAddress, db); await deployAccount(account, wait, await FeeOpts.fromCli(options, client, log, db), json, debugLogger, log); @@ -158,7 +165,6 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL ) .addOption(createAccountOption('Alias or address of the account to deploy from', !db, db)) .addOption(createAliasOption('Alias for the contract. Used for easy reference subsequent commands.', !db)) - .addOption(createTypeOption(false)) .option('--json', 'Emit output as json') // `options.wait` is default true. Passing `--no-wait` will set it to false. // https://github.com/tj/commander.js#other-option-types-negatable-boolean-and-booleanvalue @@ -183,10 +189,9 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL rpcUrl, from: parsedFromAddress, alias, - type, } = options; - const client = await createCompatibleClient(rpcUrl, debugLogger); - const account = await createOrRetrieveAccount(client, parsedFromAddress, db, type, secretKey, Fr.ZERO, publicKey); + const client = pxeWrapper?.getPXE() ?? (await createCompatibleClient(rpcUrl, debugLogger)); + const account = await createOrRetrieveAccount(client, parsedFromAddress, db, secretKey); const wallet = await getWalletWithScopes(account, db); const artifactPath = await artifactPathPromise; @@ -231,7 +236,6 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL createSecretKeyOption("The sender's secret key", !db, sk => aliasedSecretKeyParser(sk, db)).conflicts('account'), ) .addOption(createAccountOption('Alias or address of the account to send the transaction from', !db, db)) - .addOption(createTypeOption(false)) .option('--no-wait', 'Print transaction hash without waiting for it to be mined') .option('--no-cancel', 'Do not allow the transaction to be cancelled. This makes for cheaper transactions.'); @@ -245,14 +249,12 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL from: parsedFromAddress, wait, rpcUrl, - type, secretKey, - publicKey, alias, cancel, } = options; - const client = await createCompatibleClient(rpcUrl, debugLogger); - const account = await createOrRetrieveAccount(client, parsedFromAddress, db, type, secretKey, Fr.ZERO, publicKey); + const client = pxeWrapper?.getPXE() ?? (await createCompatibleClient(rpcUrl, debugLogger)); + const account = await createOrRetrieveAccount(client, parsedFromAddress, db, secretKey); const wallet = await getWalletWithScopes(account, db); const artifactPath = await artifactPathFromPromiseOrAlias(artifactPathPromise, contractAddress, db); @@ -287,7 +289,6 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL createSecretKeyOption("The sender's secret key", !db, sk => aliasedSecretKeyParser(sk, db)).conflicts('account'), ) .addOption(createAccountOption('Alias or address of the account to simulate from', !db, db)) - .addOption(createTypeOption(false)) .addOption(createProfileOption()) .action(async (functionName, _options, command) => { const { simulate } = await import('./simulate.js'); @@ -298,14 +299,12 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL contractAddress, from: parsedFromAddress, rpcUrl, - type, secretKey, - publicKey, profile, } = options; - const client = await createCompatibleClient(rpcUrl, debugLogger); - const account = await createOrRetrieveAccount(client, parsedFromAddress, db, type, secretKey, Fr.ZERO, publicKey); + const client = pxeWrapper?.getPXE() ?? (await createCompatibleClient(rpcUrl, debugLogger)); + const account = await createOrRetrieveAccount(client, parsedFromAddress, db, secretKey); const wallet = await getWalletWithScopes(account, db); const artifactPath = await artifactPathFromPromiseOrAlias(artifactPathPromise, contractAddress, db); await simulate(wallet, functionName, args, artifactPath, contractAddress, profile, log); @@ -344,10 +343,12 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL .action(async (amount, recipient, options) => { const { bridgeL1FeeJuice } = await import('./bridge_fee_juice.js'); const { rpcUrl, l1RpcUrl, l1ChainId, l1PrivateKey, mnemonic, mint, json, wait, interval: intervalS } = options; + const client = pxeWrapper?.getPXE() ?? (await createCompatibleClient(rpcUrl, debugLogger)); + const [secret, messageLeafIndex] = await bridgeL1FeeJuice( amount, recipient, - rpcUrl, + client, l1RpcUrl, l1ChainId, l1PrivateKey, @@ -404,7 +405,7 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL transactionHash, } = options; const artifactPath = await artifactPathFromPromiseOrAlias(artifactPathPromise, contractAddress, db); - const client = await createCompatibleClient(rpcUrl, debugLogger); + const client = pxeWrapper?.getPXE() ?? (await createCompatibleClient(rpcUrl, debugLogger)); const account = await createOrRetrieveAccount(client, address, db, undefined, secretKey); const wallet = await getWalletWithScopes(account, db); @@ -438,7 +439,6 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL createSecretKeyOption("The sender's secret key", !db, sk => aliasedSecretKeyParser(sk, db)).conflicts('account'), ) .addOption(createAccountOption('Alias or address of the account to simulate from', !db, db)) - .addOption(createTypeOption(false)) .addOption( createAliasOption('Alias for the authorization witness. Used for easy reference in subsequent commands.', !db), ) @@ -451,14 +451,12 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL contractAddress, from: parsedFromAddress, rpcUrl, - type, secretKey, - publicKey, alias, } = options; - const client = await createCompatibleClient(rpcUrl, debugLogger); - const account = await createOrRetrieveAccount(client, parsedFromAddress, db, type, secretKey, Fr.ZERO, publicKey); + const client = pxeWrapper?.getPXE() ?? (await createCompatibleClient(rpcUrl, debugLogger)); + const account = await createOrRetrieveAccount(client, parsedFromAddress, db, secretKey); const wallet = await getWalletWithScopes(account, db); const artifactPath = await artifactPathFromPromiseOrAlias(artifactPathPromise, contractAddress, db); const witness = await createAuthwit(wallet, functionName, caller, args, artifactPath, contractAddress, log); @@ -485,7 +483,6 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL createSecretKeyOption("The sender's secret key", !db, sk => aliasedSecretKeyParser(sk, db)).conflicts('account'), ) .addOption(createAccountOption('Alias or address of the account to simulate from', !db, db)) - .addOption(createTypeOption(false)) .action(async (functionName, caller, _options, command) => { const { authorizeAction } = await import('./authorize_action.js'); const options = command.optsWithGlobals(); @@ -495,13 +492,11 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL contractAddress, from: parsedFromAddress, rpcUrl, - type, secretKey, - publicKey, } = options; - const client = await createCompatibleClient(rpcUrl, debugLogger); - const account = await createOrRetrieveAccount(client, parsedFromAddress, db, type, secretKey, Fr.ZERO, publicKey); + const client = pxeWrapper?.getPXE() ?? (await createCompatibleClient(rpcUrl, debugLogger)); + const account = await createOrRetrieveAccount(client, parsedFromAddress, db, secretKey); const wallet = await getWalletWithScopes(account, db); const artifactPath = await artifactPathFromPromiseOrAlias(artifactPathPromise, contractAddress, db); await authorizeAction(wallet, functionName, caller, args, artifactPath, contractAddress, log); @@ -521,17 +516,16 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL createSecretKeyOption("The sender's secret key", !db, sk => aliasedSecretKeyParser(sk, db)).conflicts('account'), ) .addOption(createAccountOption('Alias or address of the account to simulate from', !db, db)) - .addOption(createTypeOption(false)) .addOption( createAliasOption('Alias for the authorization witness. Used for easy reference in subsequent commands.', !db), ) .action(async (authwit, authorizer, _options, command) => { const { addAuthwit } = await import('./add_authwit.js'); const options = command.optsWithGlobals(); - const { from: parsedFromAddress, rpcUrl, type, secretKey, publicKey } = options; + const { from: parsedFromAddress, rpcUrl, secretKey } = options; - const client = await createCompatibleClient(rpcUrl, debugLogger); - const account = await createOrRetrieveAccount(client, parsedFromAddress, db, type, secretKey, Fr.ZERO, publicKey); + const client = pxeWrapper?.getPXE() ?? (await createCompatibleClient(rpcUrl, debugLogger)); + const account = await createOrRetrieveAccount(client, parsedFromAddress, db, secretKey); const wallet = await getWalletWithScopes(account, db); await addAuthwit(wallet, authwit, authorizer, log); await addScopeToWallet(wallet, authorizer, db); @@ -553,7 +547,7 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL const { checkTx } = await import('./check_tx.js'); const { rpcUrl, pageSize } = options; let { page } = options; - const client = await createCompatibleClient(rpcUrl, debugLogger); + const client = pxeWrapper?.getPXE() ?? (await createCompatibleClient(rpcUrl, debugLogger)); if (txHash) { await checkTx(client, txHash, false, log); @@ -592,13 +586,12 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL createSecretKeyOption("The sender's secret key", !db, sk => aliasedSecretKeyParser(sk, db)).conflicts('account'), ) .addOption(createAccountOption('Alias or address of the account to simulate from', !db, db)) - .addOption(createTypeOption(false)) .addOption(FeeOpts.paymentMethodOption().default('method=none')) .action(async (txHash, options) => { const { cancelTx } = await import('./cancel_tx.js'); - const { from: parsedFromAddress, rpcUrl, type, secretKey, publicKey, payment } = options; - const client = await createCompatibleClient(rpcUrl, debugLogger); - const account = await createOrRetrieveAccount(client, parsedFromAddress, db, type, secretKey, Fr.ZERO, publicKey); + const { from: parsedFromAddress, rpcUrl, secretKey, payment } = options; + const client = pxeWrapper?.getPXE() ?? (await createCompatibleClient(rpcUrl, debugLogger)); + const account = await createOrRetrieveAccount(client, parsedFromAddress, db, secretKey); const wallet = await getWalletWithScopes(account, db); const txData = db?.retrieveTxData(txHash); @@ -611,5 +604,91 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL await cancelTx(wallet, txData, paymentMethod, log); }); + program + .command('register-contact') + .description( + "Registers a contact's address in the wallet, so the note synching process will look for notes sent by them", + ) + .argument('[address]', 'The address of the contact to register', address => + aliasedAddressParser('accounts', address, db), + ) + .addOption(pxeOption) + .addOption(createAccountOption('Alias or address of the account to simulate from', !db, db)) + .addOption(createAliasOption('Alias for the contact. Used for easy reference in subsequent commands.', !db)) + .action(async (address, options) => { + const { registerContact } = await import('./register_contact.js'); + const { from: parsedFromAddress, rpcUrl, secretKey, alias } = options; + const client = pxeWrapper?.getPXE() ?? (await createCompatibleClient(rpcUrl, debugLogger)); + const account = await createOrRetrieveAccount(client, parsedFromAddress, db, secretKey); + const wallet = await getWalletWithScopes(account, db); + + await registerContact(wallet, address, log); + + if (db && alias) { + await db.storeContact(address, alias, log); + } + }); + + program + .command('register-contract') + .description("Registers a contract in this wallet's PXE") + .argument('[address]', 'The address of the contract to register', address => + aliasedAddressParser('accounts', address, db), + ) + .argument('[artifact]', ARTIFACT_DESCRIPTION, artifactPathParser) + .option('--init ', 'The contract initializer function to call', 'constructor') + .option( + '-k, --public-key ', + 'Optional encryption public key for this address. Set this value only if this contract is expected to receive private notes, which will be encrypted using this public key.', + parsePublicKey, + ) + .option( + '-s, --salt ', + 'Optional deployment salt as a hex string for generating the deployment address.', + parseFieldFromHexString, + ) + .option('--deployer ', 'The address of the account that deployed the contract', address => + aliasedAddressParser('accounts', address, db), + ) + .addOption(createArgsOption(true, db)) + .addOption(pxeOption) + .addOption(createAccountOption('Alias or address of the account to simulate from', !db, db)) + .addOption(createAliasOption('Alias for the contact. Used for easy reference in subsequent commands.', !db)) + .action(async (address, artifactPathPromise, _options, command) => { + const { registerContract } = await import('./register_contract.js'); + const { + from: parsedFromAddress, + rpcUrl, + secretKey, + alias, + init, + publicKey, + salt, + deployer, + args, + } = command.optsWithGlobals(); + const client = pxeWrapper?.getPXE() ?? (await createCompatibleClient(rpcUrl, debugLogger)); + const account = await createOrRetrieveAccount(client, parsedFromAddress, db, secretKey); + const wallet = await getWalletWithScopes(account, db); + + const artifactPath = await artifactPathPromise; + + const instance = await registerContract( + wallet, + address, + artifactPath, + init, + publicKey ? PublicKeys.fromString(publicKey) : undefined, + args, + salt, + deployer, + log, + ); + + if (db && alias) { + await db.storeContract(instance.address, artifactPath, log, alias); + } + }); + return program; } diff --git a/yarn-project/cli-wallet/src/cmds/register_contact.ts b/yarn-project/cli-wallet/src/cmds/register_contact.ts new file mode 100644 index 000000000000..8a3c9ea9d6f6 --- /dev/null +++ b/yarn-project/cli-wallet/src/cmds/register_contact.ts @@ -0,0 +1,7 @@ +import { type AccountWalletWithSecretKey, type AztecAddress } from '@aztec/aztec.js'; +import { type LogFn } from '@aztec/foundation/log'; + +export async function registerContact(wallet: AccountWalletWithSecretKey, address: AztecAddress, log: LogFn) { + await wallet.registerContact(address); + log(`Contact registered: ${address}`); +} diff --git a/yarn-project/cli-wallet/src/cmds/register_contract.ts b/yarn-project/cli-wallet/src/cmds/register_contract.ts new file mode 100644 index 000000000000..530d8f238f5a --- /dev/null +++ b/yarn-project/cli-wallet/src/cmds/register_contract.ts @@ -0,0 +1,38 @@ +import { + type AccountWalletWithSecretKey, + type AztecAddress, + type Fr, + PublicKeys, + getContractInstanceFromDeployParams, +} from '@aztec/aztec.js'; +import { getContractArtifact } from '@aztec/cli/cli-utils'; +import { getInitializer } from '@aztec/foundation/abi'; +import { type LogFn } from '@aztec/foundation/log'; + +export async function registerContract( + wallet: AccountWalletWithSecretKey, + address: AztecAddress, + artifactPath: string, + initializer: string, + publicKeys: PublicKeys | undefined, + rawArgs: any[], + salt: Fr, + deployer: AztecAddress | undefined, + log: LogFn, +) { + const contractArtifact = await getContractArtifact(artifactPath, log); + const constructorArtifact = getInitializer(contractArtifact, initializer); + const contractInstance = getContractInstanceFromDeployParams(contractArtifact, { + constructorArtifact, + publicKeys: publicKeys ?? PublicKeys.default(), + constructorArgs: rawArgs, + salt, + deployer, + }); + if (!contractInstance.address.equals(address)) { + throw new Error(`Contract address mismatch: expected ${address}, got ${contractInstance.address}`); + } + await wallet.registerContract({ instance: contractInstance, artifact: contractArtifact }); + log(`Contract registered: at ${contractInstance.address}`); + return contractInstance; +} diff --git a/yarn-project/cli-wallet/src/storage/wallet_db.ts b/yarn-project/cli-wallet/src/storage/wallet_db.ts index aeaf2f40cf42..3bef8ed01531 100644 --- a/yarn-project/cli-wallet/src/storage/wallet_db.ts +++ b/yarn-project/cli-wallet/src/storage/wallet_db.ts @@ -82,6 +82,11 @@ export class WalletDB { log(`Account stored in database with alias${alias ? `es last & ${alias}` : ' last'}`); } + async storeContact(address: AztecAddress, alias: string, log: LogFn) { + await this.#aliases.set(`accounts:${alias}`, Buffer.from(address.toString())); + log(`Account stored in database with alias ${alias} as a contact`); + } + async storeContract(address: AztecAddress, artifactPath: string, log: LogFn, alias?: string) { if (alias) { await this.#aliases.set(`contracts:${alias}`, Buffer.from(address.toString())); diff --git a/yarn-project/cli-wallet/src/utils/accounts.ts b/yarn-project/cli-wallet/src/utils/accounts.ts index c0b9bd910e40..8dbca3df63bb 100644 --- a/yarn-project/cli-wallet/src/utils/accounts.ts +++ b/yarn-project/cli-wallet/src/utils/accounts.ts @@ -15,8 +15,8 @@ export async function createOrRetrieveAccount( pxe: PXE, address?: AztecAddress, db?: WalletDB, - type: AccountType = 'schnorr', secretKey?: Fr, + type: AccountType = 'schnorr', salt?: Fr, publicKey?: string | undefined, ) { diff --git a/yarn-project/cli-wallet/src/utils/pxe_wrapper.ts b/yarn-project/cli-wallet/src/utils/pxe_wrapper.ts new file mode 100644 index 000000000000..4fcf026373ab --- /dev/null +++ b/yarn-project/cli-wallet/src/utils/pxe_wrapper.ts @@ -0,0 +1,21 @@ +import { type PXE, createAztecNodeClient } from '@aztec/circuit-types'; +import { createPXEService, getPXEServiceConfig } from '@aztec/pxe'; + +/* + * Wrapper class for PXE service, avoids initialization issues due to + * closures when providing PXE service to injected commander.js commands + */ +export class PXEWrapper { + private static pxe: PXE | undefined; + + getPXE(): PXE | undefined { + return PXEWrapper.pxe; + } + + async init(nodeUrl: string, dataDir: string) { + const aztecNode = createAztecNodeClient(nodeUrl); + const pxeConfig = getPXEServiceConfig(); + pxeConfig.dataDirectory = dataDir; + PXEWrapper.pxe = await createPXEService(aztecNode, pxeConfig); + } +} diff --git a/yarn-project/cli-wallet/test/test.sh b/yarn-project/cli-wallet/test/test.sh index 58e68ad29259..c0c7255782bb 100755 --- a/yarn-project/cli-wallet/test/test.sh +++ b/yarn-project/cli-wallet/test/test.sh @@ -17,6 +17,10 @@ while [[ $# -gt 0 ]]; do FILTER="$2" shift 2 ;; + -r|--remote-pxe) + REMOTE_PXE="1" + shift 3 + ;; -*|--*) echo "Unknown option $1" exit 1 @@ -37,6 +41,11 @@ mkdir -p $WALLET_DATA_DIRECTORY COMMAND="node --no-warnings $(realpath ../dest/bin/index.js)" +if [ "${REMOTE_PXE:-}" = "1" ]; then + echo "Using remote PXE" + export REMOTE_PXE="1" +fi + if [ "${USE_DOCKER:-}" = "1" ]; then echo "Using docker" COMMAND="aztec-wallet" diff --git a/yarn-project/cli-wallet/tsconfig.json b/yarn-project/cli-wallet/tsconfig.json index 66251395644d..8d99fdc9b021 100644 --- a/yarn-project/cli-wallet/tsconfig.json +++ b/yarn-project/cli-wallet/tsconfig.json @@ -32,6 +32,9 @@ }, { "path": "../noir-contracts.js" + }, + { + "path": "../pxe" } ], "include": ["src"] diff --git a/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts b/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts index de452290aad0..12b283cbdfd5 100644 --- a/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts +++ b/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts @@ -1,5 +1,5 @@ import { getSchnorrAccount } from '@aztec/accounts/schnorr'; -import { BatchCall, type PXE, type Wallet, createCompatibleClient, retryUntil } from '@aztec/aztec.js'; +import { BatchCall, type PXE, type WaitOpts, type Wallet, createCompatibleClient, retryUntil } from '@aztec/aztec.js'; import { L1FeeJuicePortalManager } from '@aztec/aztec.js'; import { type AztecAddress, type EthAddress, FEE_FUNDING_FOR_TESTER_ACCOUNT, Fq, Fr } from '@aztec/circuits.js'; import { @@ -20,6 +20,12 @@ type ContractDeploymentInfo = { salt: Fr; }; +const waitOpts: WaitOpts = { + timeout: 120, + provenTimeout: 1200, + interval: 1, +}; + export async function bootstrapNetwork( pxeUrl: string, l1Url: string, @@ -90,9 +96,17 @@ export async function bootstrapNetwork( log(`DevCoin L1: ${erc20Address}`); log(`DevCoin L1 Portal: ${portalAddress}`); log(`DevCoin L2: ${token.address}`); + log(`DevCoin L2 init hash: ${token.initHash}`); + log(`DevCoin L2 salt: ${token.salt}`); log(`DevCoin L2 Bridge: ${bridge.address}`); + log(`DevCoin L2 Bridge init hash: ${bridge.initHash}`); + log(`DevCoin L2 Bridge salt: ${bridge.salt}`); log(`DevCoin FPC: ${fpc.address}`); + log(`DevCoin FPC init hash: ${fpc.initHash}`); + log(`DevCoin FPC salt: ${fpc.salt}`); log(`Counter: ${counter.address}`); + log(`Counter init hash: ${counter.initHash}`); + log(`Counter salt: ${counter.salt}`); } } @@ -116,6 +130,7 @@ async function deployERC20({ walletClient, publicClient }: L1Clients) { publicClient, erc20.contractAbi, erc20.contractBytecode, + ['DevCoin', 'DEV', walletClient.account.address], ); const { address: portalAddress } = await deployL1Contract( walletClient, @@ -142,17 +157,17 @@ async function deployToken( const { TokenContract, TokenBridgeContract } = await import('@aztec/noir-contracts.js'); const devCoin = await TokenContract.deploy(wallet, wallet.getAddress(), 'DevCoin', 'DEV', 18) .send({ universalDeploy: true }) - .deployed(); + .deployed(waitOpts); const bridge = await TokenBridgeContract.deploy(wallet, devCoin.address, l1Portal) .send({ universalDeploy: true }) - .deployed(); + .deployed(waitOpts); await new BatchCall(wallet, [ devCoin.methods.set_minter(bridge.address, true).request(), devCoin.methods.set_admin(bridge.address).request(), ]) .send() - .wait(); + .wait(waitOpts); return { token: { @@ -202,7 +217,9 @@ async function deployFPC( // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore - Importing noir-contracts.js even in devDeps results in a circular dependency error. Need to ignore because this line doesn't cause an error in a dev environment const { FPCContract } = await import('@aztec/noir-contracts.js'); - const fpc = await FPCContract.deploy(wallet, tokenAddress, feeRecipient).send({ universalDeploy: true }).deployed(); + const fpc = await FPCContract.deploy(wallet, tokenAddress, feeRecipient) + .send({ universalDeploy: true }) + .deployed(waitOpts); const info: ContractDeploymentInfo = { address: fpc.address, initHash: fpc.instance.initializationHash, @@ -217,7 +234,7 @@ async function deployCounter(wallet: Wallet): Promise { const { CounterContract } = await import('@aztec/noir-contracts.js'); const counter = await CounterContract.deploy(wallet, 1, wallet.getAddress(), wallet.getAddress()) .send({ universalDeploy: true }) - .deployed(); + .deployed(waitOpts); const info: ContractDeploymentInfo = { address: counter.address, initHash: counter.instance.initializationHash, @@ -263,11 +280,11 @@ async function fundFPC( // TODO (alexg) remove this once sequencer builds blocks continuously // advance the chain - await counter.methods.increment(wallet.getAddress(), wallet.getAddress()).send().wait(); - await counter.methods.increment(wallet.getAddress(), wallet.getAddress()).send().wait(); + await counter.methods.increment(wallet.getAddress(), wallet.getAddress()).send().wait(waitOpts); + await counter.methods.increment(wallet.getAddress(), wallet.getAddress()).send().wait(waitOpts); await feeJuiceContract.methods .claim(fpcAddress, claimAmount, claimSecret, messageLeafIndex) .send() - .wait({ proven: true }); + .wait({ ...waitOpts, proven: true }); } diff --git a/yarn-project/cli/src/cmds/infrastructure/sequencers.ts b/yarn-project/cli/src/cmds/infrastructure/sequencers.ts index 341744f0a6c4..3e82bcaacc22 100644 --- a/yarn-project/cli/src/cmds/infrastructure/sequencers.ts +++ b/yarn-project/cli/src/cmds/infrastructure/sequencers.ts @@ -1,7 +1,7 @@ import { createCompatibleClient } from '@aztec/aztec.js'; -import { createEthereumChain } from '@aztec/ethereum'; +import { MINIMUM_STAKE, createEthereumChain } from '@aztec/ethereum'; import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; -import { RollupAbi } from '@aztec/l1-artifacts'; +import { RollupAbi, TestERC20Abi } from '@aztec/l1-artifacts'; import { createPublicClient, createWalletClient, getContract, http } from 'viem'; import { mnemonicToAccount } from 'viem/accounts'; @@ -49,7 +49,7 @@ export async function sequencers(opts: { const who = (maybeWho as `0x{string}`) ?? walletClient?.account.address.toString(); if (command === 'list') { - const sequencers = await rollup.read.getValidators(); + const sequencers = await rollup.read.getAttesters(); if (sequencers.length === 0) { log(`No sequencers registered on rollup`); } else { @@ -59,11 +59,26 @@ export async function sequencers(opts: { } } } else if (command === 'add') { - if (!who || !writeableRollup) { + if (!who || !writeableRollup || !walletClient) { throw new Error(`Missing sequencer address`); } + log(`Adding ${who} as sequencer`); - const hash = await writeableRollup.write.addValidator([who]); + + const stakingAsset = getContract({ + address: await rollup.read.STAKING_ASSET(), + abi: TestERC20Abi, + client: walletClient, + }); + + await Promise.all( + [ + await stakingAsset.write.mint([walletClient.account.address, MINIMUM_STAKE], {} as any), + await stakingAsset.write.approve([rollup.address, MINIMUM_STAKE], {} as any), + ].map(txHash => publicClient.waitForTransactionReceipt({ hash: txHash })), + ); + + const hash = await writeableRollup.write.deposit([who, who, who, MINIMUM_STAKE]); await publicClient.waitForTransactionReceipt({ hash }); log(`Added in tx ${hash}`); } else if (command === 'remove') { @@ -71,7 +86,7 @@ export async function sequencers(opts: { throw new Error(`Missing sequencer address`); } log(`Removing ${who} as sequencer`); - const hash = await writeableRollup.write.removeValidator([who]); + const hash = await writeableRollup.write.initiateWithdraw([who, who]); await publicClient.waitForTransactionReceipt({ hash }); log(`Removed in tx ${hash}`); } else if (command === 'who-next') { diff --git a/yarn-project/cli/src/cmds/l1/deploy_l1_contracts.ts b/yarn-project/cli/src/cmds/l1/deploy_l1_contracts.ts index 2d7165ff442e..4052658acc97 100644 --- a/yarn-project/cli/src/cmds/l1/deploy_l1_contracts.ts +++ b/yarn-project/cli/src/cmds/l1/deploy_l1_contracts.ts @@ -42,6 +42,7 @@ export async function deployL1Contracts( log(`L1 -> L2 Inbox Address: ${l1ContractAddresses.inboxAddress.toString()}`); log(`L2 -> L1 Outbox Address: ${l1ContractAddresses.outboxAddress.toString()}`); log(`Fee Juice Address: ${l1ContractAddresses.feeJuiceAddress.toString()}`); + log(`Staking Asset Address: ${l1ContractAddresses.stakingAssetAddress.toString()}`); log(`Fee Juice Portal Address: ${l1ContractAddresses.feeJuicePortalAddress.toString()}`); log(`CoinIssuer Address: ${l1ContractAddresses.coinIssuerAddress.toString()}`); log(`RewardDistributor Address: ${l1ContractAddresses.rewardDistributorAddress.toString()}`); diff --git a/yarn-project/cli/src/cmds/l1/update_l1_validators.ts b/yarn-project/cli/src/cmds/l1/update_l1_validators.ts index 4f3d33eb5745..916d0c351fe3 100644 --- a/yarn-project/cli/src/cmds/l1/update_l1_validators.ts +++ b/yarn-project/cli/src/cmds/l1/update_l1_validators.ts @@ -1,8 +1,8 @@ import { EthCheatCodes } from '@aztec/aztec.js'; import { type EthAddress } from '@aztec/circuits.js'; -import { createEthereumChain, getL1ContractsConfigEnvVars, isAnvilTestChain } from '@aztec/ethereum'; +import { MINIMUM_STAKE, createEthereumChain, getL1ContractsConfigEnvVars, isAnvilTestChain } from '@aztec/ethereum'; import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; -import { RollupAbi } from '@aztec/l1-artifacts'; +import { RollupAbi, TestERC20Abi } from '@aztec/l1-artifacts'; import { createPublicClient, createWalletClient, getContract, http } from 'viem'; import { generatePrivateKey, mnemonicToAccount, privateKeyToAccount } from 'viem/accounts'; @@ -49,8 +49,26 @@ export async function addL1Validator({ client: walletClient, }); + const stakingAsset = getContract({ + address: await rollup.read.STAKING_ASSET(), + abi: TestERC20Abi, + client: walletClient, + }); + + await Promise.all( + [ + await stakingAsset.write.mint([walletClient.account.address, MINIMUM_STAKE], {} as any), + await stakingAsset.write.approve([rollupAddress.toString(), MINIMUM_STAKE], {} as any), + ].map(txHash => publicClient.waitForTransactionReceipt({ hash: txHash })), + ); + dualLog(`Adding validator ${validatorAddress.toString()} to rollup ${rollupAddress.toString()}`); - const txHash = await rollup.write.addValidator([validatorAddress.toString()]); + const txHash = await rollup.write.deposit([ + validatorAddress.toString(), + validatorAddress.toString(), + validatorAddress.toString(), + MINIMUM_STAKE, + ]); dualLog(`Transaction hash: ${txHash}`); await publicClient.waitForTransactionReceipt({ hash: txHash }); if (isAnvilTestChain(chainId)) { @@ -87,7 +105,7 @@ export async function removeL1Validator({ }); dualLog(`Removing validator ${validatorAddress.toString()} from rollup ${rollupAddress.toString()}`); - const txHash = await rollup.write.removeValidator([validatorAddress.toString()]); + const txHash = await rollup.write.initiateWithdraw([validatorAddress.toString(), validatorAddress.toString()]); dualLog(`Transaction hash: ${txHash}`); await publicClient.waitForTransactionReceipt({ hash: txHash }); } @@ -163,7 +181,7 @@ export async function debugRollup({ rpcUrl, chainId, rollupAddress, log }: Rollu log(`Pending block num: ${pendingNum}`); const provenNum = await rollup.read.getProvenBlockNumber(); log(`Proven block num: ${provenNum}`); - const validators = await rollup.read.getValidators(); + const validators = await rollup.read.getAttesters(); log(`Validators: ${validators.map(v => v.toString()).join(', ')}`); const committee = await rollup.read.getCurrentEpochCommittee(); log(`Committee: ${committee.map(v => v.toString()).join(', ')}`); diff --git a/yarn-project/cli/src/cmds/pxe/get_node_info.ts b/yarn-project/cli/src/cmds/pxe/get_node_info.ts index bbef7fde3e80..ea13af3a6dd8 100644 --- a/yarn-project/cli/src/cmds/pxe/get_node_info.ts +++ b/yarn-project/cli/src/cmds/pxe/get_node_info.ts @@ -19,6 +19,7 @@ export async function getNodeInfo(rpcUrl: string, pxeRequest: boolean, debugLogg log(` L1 -> L2 Inbox Address: ${info.l1ContractAddresses.inboxAddress.toString()}`); log(` L2 -> L1 Outbox Address: ${info.l1ContractAddresses.outboxAddress.toString()}`); log(` Fee Juice Address: ${info.l1ContractAddresses.feeJuiceAddress.toString()}`); + log(` Staking Asset Address: ${info.l1ContractAddresses.stakingAssetAddress.toString()}`); log(` Fee Juice Portal Address: ${info.l1ContractAddresses.feeJuicePortalAddress.toString()}`); log(` CoinIssuer Address: ${info.l1ContractAddresses.coinIssuerAddress.toString()}`); log(` RewardDistributor Address: ${info.l1ContractAddresses.rewardDistributorAddress.toString()}`); diff --git a/yarn-project/cli/src/cmds/pxe/index.ts b/yarn-project/cli/src/cmds/pxe/index.ts index ec3fec68ee7a..8bae3b797051 100644 --- a/yarn-project/cli/src/cmds/pxe/index.ts +++ b/yarn-project/cli/src/cmds/pxe/index.ts @@ -4,7 +4,6 @@ import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; import { type Command } from 'commander'; import { - LOCALHOST, logJson, makePxeOption, parseAztecAddress, @@ -145,7 +144,7 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL program .command('get-node-info') .description('Gets the information of an Aztec node from a PXE or directly from an Aztec node.') - .option('--node-url ', 'URL of the node.', `http://${LOCALHOST}:8080`) + .option('--node-url ', 'URL of the node.') .addOption(makePxeOption(false)) .action(async options => { const { getNodeInfo } = await import('./get_node_info.js'); diff --git a/yarn-project/cli/src/utils/commands.ts b/yarn-project/cli/src/utils/commands.ts index 5bc66a8a5c0f..6df225aed63f 100644 --- a/yarn-project/cli/src/utils/commands.ts +++ b/yarn-project/cli/src/utils/commands.ts @@ -33,6 +33,7 @@ export const makePxeOption = (mandatory: boolean) => new Option('-u, --rpc-url ', 'URL of the PXE') .env('PXE_URL') .default(`http://${LOCALHOST}:8080`) + .conflicts('remote-pxe') .makeOptionMandatory(mandatory); export const pxeOption = makePxeOption(true); diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index d9514c42dfb4..97faceb22f79 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -15,11 +15,11 @@ "clean": "rm -rf ./dest .tsbuildinfo", "formatting": "run -T prettier --check ./src \"!src/web/main.js\" && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", - "test": "LOG_LEVEL=${LOG_LEVEL:-verbose} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", + "test": "LOG_LEVEL=${LOG_LEVEL:-verbose} NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", "test:with-alerts": "./scripts/test-with-alerts.sh", - "test:profile": "LOG_LEVEL=${LOG_LEVEL:-verbose} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 0x --output-dir \"flame_graph/{pid}.0x\" -- node --experimental-vm-modules ../node_modules/jest/bin/jest.js --runInBand --testTimeout=300000 --forceExit", + "test:profile": "LOG_LEVEL=${LOG_LEVEL:-verbose} NODE_NO_WARNINGS=1 0x --output-dir \"flame_graph/{pid}.0x\" -- node --experimental-vm-modules ../node_modules/jest/bin/jest.js --runInBand --testTimeout=300000 --forceExit", "serve:flames": "python3 -m http.server --directory \"flame_graph\" 8000", - "test:debug": "LOG_LEVEL=${LOG_LEVEL:-verbose} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --inspect --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", + "test:debug": "LOG_LEVEL=${LOG_LEVEL:-verbose} NODE_NO_WARNINGS=1 node --inspect --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", "test:integration": "concurrently -k -s first -c reset,dim -n test,anvil \"yarn test:integration:run\" \"anvil\"", "test:integration:run": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --no-cache --runInBand --config jest.integration.config.json", "test:unit": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest src/fixtures" @@ -93,12 +93,12 @@ "viem": "^2.7.15", "webpack": "^5.88.2", "webpack-cli": "^5.1.4", - "winston": "^3.10.0", "zod": "^3.23.8" }, "devDependencies": { "0x": "^5.7.0", "@jest/globals": "^29.5.0", + "@sinonjs/fake-timers": "^13.0.5", "@types/jest": "^29.5.0", "@types/js-yaml": "^4.0.9", "@types/lodash.chunk": "^4.2.9", @@ -145,9 +145,9 @@ }, "reporters": [ [ - "default", + "summary", { - "summaryThreshold": 9999 + "summaryThreshold": 0 } ] ], diff --git a/yarn-project/end-to-end/package.local.json b/yarn-project/end-to-end/package.local.json index a5214893419b..39b8bc052ef2 100644 --- a/yarn-project/end-to-end/package.local.json +++ b/yarn-project/end-to-end/package.local.json @@ -2,7 +2,17 @@ "scripts": { "build": "yarn clean && tsc -b && webpack", "formatting": "run -T prettier --check ./src \"!src/web/main.js\" && run -T eslint ./src", - "test": "LOG_LEVEL=${LOG_LEVEL:-verbose} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", + "test": "LOG_LEVEL=${LOG_LEVEL:-verbose} NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=300000 --forceExit", "test:unit": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest src/fixtures" + }, + "jest": { + "reporters": [ + [ + "summary", + { + "summaryThreshold": 0 + } + ] + ] } } diff --git a/yarn-project/end-to-end/scripts/docker-compose-devnet.yml b/yarn-project/end-to-end/scripts/docker-compose-devnet.yml index 4f6efe441ad2..d8ce6a6b7b47 100644 --- a/yarn-project/end-to-end/scripts/docker-compose-devnet.yml +++ b/yarn-project/end-to-end/scripts/docker-compose-devnet.yml @@ -7,8 +7,7 @@ services: - aztec-node-url - faucet-url environment: - DEBUG: ${DEBUG:-'aztec:*'} - DEBUG_COLORS: 1 + LOG_LEVEL: ${LOG_LEVEL:-'verbose'} ETHEREUM_HOST: JOB_NAME: ${JOB_NAME:-''} PXE_PROVER_ENABLED: ${PXE_PROVER_ENABLED:-1} diff --git a/yarn-project/end-to-end/scripts/docker-compose-no-sandbox.yml b/yarn-project/end-to-end/scripts/docker-compose-no-sandbox.yml index 26aee9130358..2fbbabb4d6dd 100644 --- a/yarn-project/end-to-end/scripts/docker-compose-no-sandbox.yml +++ b/yarn-project/end-to-end/scripts/docker-compose-no-sandbox.yml @@ -11,8 +11,7 @@ services: image: aztecprotocol/end-to-end:${AZTEC_DOCKER_TAG:-latest} environment: BENCHMARK: 'true' - DEBUG: ${DEBUG:-'aztec:*'} - DEBUG_COLORS: 1 + LOG_LEVEL: ${LOG_LEVEL:-'verbose'} ETHEREUM_HOST: http://fork:8545 L1_CHAIN_ID: 31337 ARCHIVER_POLLING_INTERVAL_MS: 50 diff --git a/yarn-project/end-to-end/scripts/docker-compose-p2p.yml b/yarn-project/end-to-end/scripts/docker-compose-p2p.yml index f5c79b9c0c28..c16d467ac9e2 100644 --- a/yarn-project/end-to-end/scripts/docker-compose-p2p.yml +++ b/yarn-project/end-to-end/scripts/docker-compose-p2p.yml @@ -14,8 +14,8 @@ services: - '40400:40400/tcp' - '40400:40400/udp' environment: - DEBUG: 'aztec:*' - DEBUG_COLORS: 1 + LOG_LEVEL: ${LOG_LEVEL:-'verbose'} + DEBUG: ${DEBUG:-""} P2P_TCP_LISTEN_ADDR: '0.0.0.0:40400' P2P_TCP_ANNOUNCE_ADDR: 'p2p-bootstrap:40400' P2P_UDP_ANNOUNCE_ADDR: 'p2p-bootstrap:40400' @@ -25,8 +25,8 @@ services: image: aztecprotocol/end-to-end:${AZTEC_DOCKER_TAG:-latest} environment: BENCHMARK: true - DEBUG: ${DEBUG:-'aztec:*'} - DEBUG_COLORS: 1 + LOG_LEVEL: ${LOG_LEVEL:-'verbose'} + DEBUG: ${DEBUG:-""} ETHEREUM_HOST: http://fork:8545 L1_CHAIN_ID: 31337 ARCHIVER_POLLING_INTERVAL_MS: 500 diff --git a/yarn-project/end-to-end/scripts/docker-compose-wallet.yml b/yarn-project/end-to-end/scripts/docker-compose-wallet.yml index 7aa1646aa727..b1a421ed5752 100644 --- a/yarn-project/end-to-end/scripts/docker-compose-wallet.yml +++ b/yarn-project/end-to-end/scripts/docker-compose-wallet.yml @@ -11,8 +11,7 @@ services: image: aztecprotocol/aztec:${AZTEC_DOCKER_TAG:-latest} command: 'start --sandbox' environment: - DEBUG: 'aztec:*' - DEBUG_COLORS: 1 + LOG_LEVEL: ${LOG_LEVEL:-'verbose'} ETHEREUM_HOST: http://fork:8545 L1_CHAIN_ID: 31337 ARCHIVER_POLLING_INTERVAL_MS: 50 @@ -29,8 +28,7 @@ services: end-to-end: image: aztecprotocol/end-to-end:${AZTEC_DOCKER_TAG:-latest} environment: - DEBUG: ${DEBUG:-aztec:*} - DEBUG_COLORS: 1 + LOG_LEVEL: ${LOG_LEVEL:-'verbose'} ETHEREUM_HOST: http://fork:8545 L1_CHAIN_ID: 31337 PXE_URL: http://sandbox:8080 diff --git a/yarn-project/end-to-end/scripts/docker-compose.yml b/yarn-project/end-to-end/scripts/docker-compose.yml index 75b9cd0d36ff..4a96de1088dc 100644 --- a/yarn-project/end-to-end/scripts/docker-compose.yml +++ b/yarn-project/end-to-end/scripts/docker-compose.yml @@ -11,8 +11,7 @@ services: image: aztecprotocol/aztec:${AZTEC_DOCKER_TAG:-latest} command: 'start --sandbox' environment: - DEBUG: ${DEBUG:-aztec:*,-aztec:avm_simulator:memory} - DEBUG_COLORS: 1 + LOG_LEVEL: ${LOG_LEVEL:-'verbose'} ETHEREUM_HOST: http://fork:8545 L1_CHAIN_ID: 31337 ARCHIVER_POLLING_INTERVAL_MS: 50 @@ -30,11 +29,11 @@ services: image: aztecprotocol/end-to-end:${AZTEC_DOCKER_TAG:-latest} environment: BENCHMARK: 'true' - DEBUG: ${DEBUG:-aztec:*,-aztec:avm_simulator:memory} - DEBUG_COLORS: 1 + LOG_LEVEL: ${LOG_LEVEL:-'verbose'} ETHEREUM_HOST: http://fork:8545 L1_CHAIN_ID: 31337 PXE_URL: http://sandbox:8080 + AZTEC_NODE_URL: http://sandbox:8080 entrypoint: > sh -c ' while ! nc -z sandbox 8080; do sleep 1; done; diff --git a/yarn-project/end-to-end/scripts/e2e_compose_test.sh b/yarn-project/end-to-end/scripts/e2e_compose_test.sh index e6a74f587457..2cfdda8f712e 100755 --- a/yarn-project/end-to-end/scripts/e2e_compose_test.sh +++ b/yarn-project/end-to-end/scripts/e2e_compose_test.sh @@ -3,7 +3,7 @@ # Usage: ./e2e_compose_test.sh # Optional environment variables: # COMPOSE_FILE (default: ./scripts/docker-compose.yml) -# DEBUG (default: "aztec:*") +# LOG_LEVEL (default: "verbose") # HARDWARE_CONCURRENCY (default: "") # ENABLE_GAS (default: "") # AZTEC_DOCKER_TAG (default: current git commit) @@ -16,7 +16,7 @@ set -eu export TEST="$1" # Variables with defaults COMPOSE_FILE="${COMPOSE_FILE:-./scripts/docker-compose.yml}" -export DEBUG="${DEBUG:-aztec:*,-aztec:avm_simulator:*}" +export LOG_LEVEL="${LOG_LEVEL:-verbose}" export HARDWARE_CONCURRENCY="${HARDWARE_CONCURRENCY:-}" export AZTEC_DOCKER_TAG="${AZTEC_DOCKER_TAG:-$(git rev-parse HEAD)}" diff --git a/yarn-project/end-to-end/scripts/e2e_test_config.yml b/yarn-project/end-to-end/scripts/e2e_test_config.yml index 8a65a011708d..2fb7902c93f8 100644 --- a/yarn-project/end-to-end/scripts/e2e_test_config.yml +++ b/yarn-project/end-to-end/scripts/e2e_test_config.yml @@ -4,19 +4,19 @@ tests: env: HARDWARE_CONCURRENCY: '32' COMPOSE_FILE: 'scripts/docker-compose-no-sandbox.yml' - DEBUG: 'aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees' + LOG_LEVEL: 'verbose; debug: aztec:benchmarks,aztec:sequencer,aztec:world_state,aztec:merkle_trees' command: './scripts/e2e_compose_test.sh bench_prover' bench_publish_rollup: env: HARDWARE_CONCURRENCY: '32' COMPOSE_FILE: 'scripts/docker-compose-no-sandbox.yml' - DEBUG: 'aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees' + LOG_LEVEL: 'verbose; debug: aztec:benchmarks,aztec:sequencer,aztec:world_state,aztec:merkle_trees' command: './scripts/e2e_compose_test.sh bench_publish_rollup' bench_tx_size: env: HARDWARE_CONCURRENCY: '32' COMPOSE_FILE: 'scripts/docker-compose-no-sandbox.yml' - DEBUG: 'aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees' + LOG_LEVEL: 'verbose; debug: aztec:benchmarks,aztec:sequencer,aztec:world_state,aztec:merkle_trees' command: './scripts/e2e_compose_test.sh bench_tx_size' e2e_2_pxes: {} e2e_account_contracts: {} diff --git a/yarn-project/end-to-end/scripts/native-network/boot-node.sh b/yarn-project/end-to-end/scripts/native-network/boot-node.sh index 39067971ab94..0a569f93b10d 100755 --- a/yarn-project/end-to-end/scripts/native-network/boot-node.sh +++ b/yarn-project/end-to-end/scripts/native-network/boot-node.sh @@ -11,8 +11,8 @@ exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname # Set environment variables export PORT=${PORT:-"8080"} -export DEBUG=${DEBUG:-"aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:l2_block_stream,-aztec:world-state:*"} -export LOG_LEVEL=${LOG_LEVEL:-"debug"} +export DEBUG=${DEBUG:-""} +export LOG_LEVEL=${LOG_LEVEL:-"verbose"} export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} export P2P_ENABLED="true" export VALIDATOR_DISABLED="true" diff --git a/yarn-project/end-to-end/scripts/native-network/deploy-l1-contracts.sh b/yarn-project/end-to-end/scripts/native-network/deploy-l1-contracts.sh index 2d4677b16604..2f1d670620ce 100755 --- a/yarn-project/end-to-end/scripts/native-network/deploy-l1-contracts.sh +++ b/yarn-project/end-to-end/scripts/native-network/deploy-l1-contracts.sh @@ -2,6 +2,7 @@ # Get the name of the script without the path and extension SCRIPT_NAME=$(basename "$0" .sh) +REPO=$(git rev-parse --show-toplevel) # Redirect stdout and stderr to .log while also printing to the console exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log" >&2) @@ -13,7 +14,9 @@ set -eu # Check for validator addresses if [ $# -gt 0 ]; then INIT_VALIDATORS="true" - VALIDATOR_ADDRESSES="$1" + NUMBER_OF_VALIDATORS="$1" + # Generate validator keys, this will set the VALIDATOR_ADDRESSES variable + source $REPO/yarn-project/end-to-end/scripts/native-network/generate-aztec-validator-keys.sh $NUMBER_OF_VALIDATORS else INIT_VALIDATORS="false" fi @@ -54,6 +57,7 @@ REGISTRY_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'Registry Address: \K0x[a- INBOX_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'L1 -> L2 Inbox Address: \K0x[a-fA-F0-9]{40}') OUTBOX_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'L2 -> L1 Outbox Address: \K0x[a-fA-F0-9]{40}') FEE_JUICE_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'Fee Juice Address: \K0x[a-fA-F0-9]{40}') +STAKING_ASSET_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'Staking Asset Address: \K0x[a-fA-F0-9]{40}') FEE_JUICE_PORTAL_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'Fee Juice Portal Address: \K0x[a-fA-F0-9]{40}') COIN_ISSUER_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'CoinIssuer Address: \K0x[a-fA-F0-9]{40}') REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'RewardDistributor Address: \K0x[a-fA-F0-9]{40}') @@ -67,6 +71,7 @@ export REGISTRY_CONTRACT_ADDRESS=$REGISTRY_CONTRACT_ADDRESS export INBOX_CONTRACT_ADDRESS=$INBOX_CONTRACT_ADDRESS export OUTBOX_CONTRACT_ADDRESS=$OUTBOX_CONTRACT_ADDRESS export FEE_JUICE_CONTRACT_ADDRESS=$FEE_JUICE_CONTRACT_ADDRESS +export STAKING_ASSET_CONTRACT_ADDRESS=$STAKING_ASSET_CONTRACT_ADDRESS export FEE_JUICE_PORTAL_CONTRACT_ADDRESS=$FEE_JUICE_PORTAL_CONTRACT_ADDRESS export COIN_ISSUER_CONTRACT_ADDRESS=$COIN_ISSUER_CONTRACT_ADDRESS export REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=$REWARD_DISTRIBUTOR_CONTRACT_ADDRESS diff --git a/yarn-project/end-to-end/scripts/native-network/generate-aztec-validator-keys.sh b/yarn-project/end-to-end/scripts/native-network/generate-aztec-validator-keys.sh new file mode 100755 index 000000000000..85fb8ac8a895 --- /dev/null +++ b/yarn-project/end-to-end/scripts/native-network/generate-aztec-validator-keys.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +# Generate Aztec validator keys + +NUMBER_OF_KEYS=${1:-16} +MNEMONIC=${2:-"test test test test test test test test test test test junk"} + +# Initialize arrays to store private keys and addresses +declare -a VALIDATOR_PRIVATE_KEYS +declare -a VALIDATOR_ADDRESSES_LIST + +for i in $(seq 0 $(($NUMBER_OF_KEYS - 1))); do + # Get private key and store it in array + PRIVATE_KEY=$(cast wallet private-key "$MNEMONIC" --mnemonic-index $i) + VALIDATOR_PRIVATE_KEYS+=("$PRIVATE_KEY") + + # Get address from private key and store it in array + ADDRESS=$(cast wallet address "$PRIVATE_KEY") + VALIDATOR_ADDRESSES_LIST+=("$ADDRESS") +done + +# Join addresses with commas +VALIDATOR_ADDRESSES=$(IFS=, ; echo "${VALIDATOR_ADDRESSES_LIST[*]}") + +# Optionally, if you need the arrays for further use, you can export them: +export VALIDATOR_PRIVATE_KEYS +export VALIDATOR_ADDRESSES_LIST +export VALIDATOR_ADDRESSES diff --git a/yarn-project/end-to-end/scripts/native-network/prover-node.sh b/yarn-project/end-to-end/scripts/native-network/prover-node.sh index 866ee0f73e9d..08a4c748855f 100755 --- a/yarn-project/end-to-end/scripts/native-network/prover-node.sh +++ b/yarn-project/end-to-end/scripts/native-network/prover-node.sh @@ -32,8 +32,8 @@ output=$(node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js get-nod export BOOTSTRAP_NODES=$(echo "$output" | grep -oP 'Node ENR: \K.*') # Set environment variables -export LOG_LEVEL=${LOG_LEVEL:-"debug"} -export DEBUG=${DEBUG:-"aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:l2_block_stream,-aztec:world-state:*"} +export LOG_LEVEL=${LOG_LEVEL:-"verbose"} +export DEBUG=${DEBUG:-""} export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} export PROVER_AGENT_COUNT="1" export PROVER_AGENT_ENABLED="true" diff --git a/yarn-project/end-to-end/scripts/native-network/pxe.sh b/yarn-project/end-to-end/scripts/native-network/pxe.sh index c7db13a4c562..ee1fbca03e5a 100755 --- a/yarn-project/end-to-end/scripts/native-network/pxe.sh +++ b/yarn-project/end-to-end/scripts/native-network/pxe.sh @@ -12,8 +12,7 @@ exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} export AZTEC_NODE_URL=${AZTEC_NODE_URL:-"http://127.0.0.1:8080"} export VALIDATOR_NODE_URL=${VALIDATOR_NODE_URL:-"http://127.0.0.1:8081"} -export LOG_LEVEL=${LOG_LEVEL:-"debug"} -export DEBUG="aztec:*" +export LOG_LEVEL=${LOG_LEVEL:-"verbose"} echo "Waiting for Aztec Node..." until curl -s $AZTEC_NODE_URL/status >/dev/null; do diff --git a/yarn-project/end-to-end/scripts/native-network/test-4epochs.sh b/yarn-project/end-to-end/scripts/native-network/test-4epochs.sh index 3edbcd486372..3e00718517c3 100755 --- a/yarn-project/end-to-end/scripts/native-network/test-4epochs.sh +++ b/yarn-project/end-to-end/scripts/native-network/test-4epochs.sh @@ -31,7 +31,6 @@ until [ -f "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l2-contr done echo "Done waiting." -export DEBUG="aztec:*" -export LOG_LEVEL=${LOG_LEVEL:-"debug"} +export LOG_LEVEL=${LOG_LEVEL:-"verbose"} cd $(git rev-parse --show-toplevel)/yarn-project/end-to-end -DEBUG=aztec:* yarn test src/spartan/4epochs.test.ts \ No newline at end of file +yarn test src/spartan/4epochs.test.ts \ No newline at end of file diff --git a/yarn-project/end-to-end/scripts/native-network/test-transfer.sh b/yarn-project/end-to-end/scripts/native-network/test-transfer.sh index e54d8966ede1..a58483c3fc06 100755 --- a/yarn-project/end-to-end/scripts/native-network/test-transfer.sh +++ b/yarn-project/end-to-end/scripts/native-network/test-transfer.sh @@ -32,7 +32,6 @@ until [ -f "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l2-contr done echo "Done waiting." -export DEBUG="aztec:*" -export LOG_LEVEL=${LOG_LEVEL:-"debug"} +export LOG_LEVEL=${LOG_LEVEL:-"verbose"} cd $(git rev-parse --show-toplevel)/yarn-project/end-to-end -DEBUG=aztec:* yarn test src/spartan/transfer.test.ts \ No newline at end of file +yarn test src/spartan/transfer.test.ts \ No newline at end of file diff --git a/yarn-project/end-to-end/scripts/native-network/transaction-bot.sh b/yarn-project/end-to-end/scripts/native-network/transaction-bot.sh index a42c2417ffde..038236dd63e3 100755 --- a/yarn-project/end-to-end/scripts/native-network/transaction-bot.sh +++ b/yarn-project/end-to-end/scripts/native-network/transaction-bot.sh @@ -37,8 +37,7 @@ fi # Set environment variables export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} export AZTEC_NODE_URL=${AZTEC_NODE_URL:-"http://127.0.0.1:8080"} -export LOG_LEVEL=${LOG_LEVEL:-"debug"} -export DEBUG="aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:l2_block_stream,-aztec:world-state:*" +export LOG_LEVEL=${LOG_LEVEL:-"verbose"} export BOT_PRIVATE_KEY="0xcafe" export BOT_TX_INTERVAL_SECONDS="5" export BOT_PRIVATE_TRANSFERS_PER_TX="1" diff --git a/yarn-project/end-to-end/scripts/native-network/validator.sh b/yarn-project/end-to-end/scripts/native-network/validator.sh index fa183829d61a..ecc04b6eedba 100755 --- a/yarn-project/end-to-end/scripts/native-network/validator.sh +++ b/yarn-project/end-to-end/scripts/native-network/validator.sh @@ -49,7 +49,8 @@ fi export L1_PRIVATE_KEY=$VALIDATOR_PRIVATE_KEY export SEQ_PUBLISHER_PRIVATE_KEY=$VALIDATOR_PRIVATE_KEY -export DEBUG=${DEBUG:-"aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:l2_block_stream,-aztec:world-state:*"} +export DEBUG=${DEBUG:-""} +export LOG_LEVEL=${LOG_LEVEL:-"verbose"} export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} # Automatically detect if we're using Anvil @@ -85,11 +86,12 @@ else node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js add-l1-validator --validator $ADDRESS --rollup $ROLLUP_CONTRACT_ADDRESS && break sleep 1 done -fi -# Fast forward epochs if we're on an anvil chain -if [ "$IS_ANVIL" = "true" ]; then - node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js fast-forward-epochs --rollup $ROLLUP_CONTRACT_ADDRESS --count 1 + # Fast forward epochs if we're on an anvil chain + if [ "$IS_ANVIL" = "true" ]; then + node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js fast-forward-epochs --rollup $ROLLUP_CONTRACT_ADDRESS --count 1 + fi fi + # Start the Validator Node with the sequencer and archiver node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js start --port="$PORT" --node --archiver --sequencer diff --git a/yarn-project/end-to-end/scripts/native-network/validators.sh b/yarn-project/end-to-end/scripts/native-network/validators.sh index c2454b87481a..b3a75886368b 100755 --- a/yarn-project/end-to-end/scripts/native-network/validators.sh +++ b/yarn-project/end-to-end/scripts/native-network/validators.sh @@ -4,6 +4,7 @@ set -eu # Get the name of the script without the path and extension SCRIPT_NAME=$(basename "$0" .sh) +REPO=$(git rev-parse --show-toplevel) # Redirect stdout and stderr to .log while also printing to the console exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log" >&2) @@ -15,17 +16,17 @@ cd "$(dirname "${BASH_SOURCE[0]}")" CMD=() +# Generate validator private keys +source $REPO/yarn-project/end-to-end/scripts/native-network/generate-aztec-validator-keys.sh $NUM_VALIDATORS + # Generate validator commands for ((i = 0; i < NUM_VALIDATORS; i++)); do PORT=$((8081 + i)) P2P_PORT=$((40401 + i)) - IDX=$((i + 1)) - # These variables should be set in public networks if we have funded validators already. Leave empty for test environments. - ADDRESS_VAR="ADDRESS_${IDX}" - PRIVATE_KEY_VAR="VALIDATOR_PRIVATE_KEY_${IDX}" - ADDRESS="${!ADDRESS_VAR:-}" - VALIDATOR_PRIVATE_KEY="${!PRIVATE_KEY_VAR:-}" + # Use the arrays generated from generate-aztec-validator-keys.sh + ADDRESS="${VALIDATOR_ADDRESSES_LIST[$i]}" + VALIDATOR_PRIVATE_KEY="${VALIDATOR_PRIVATE_KEYS[$i]}" CMD+=("./validator.sh $PORT $P2P_PORT $ADDRESS $VALIDATOR_PRIVATE_KEY") done diff --git a/yarn-project/end-to-end/scripts/network_test.sh b/yarn-project/end-to-end/scripts/network_test.sh index e5de5ca71856..b9083df3d7e4 100755 --- a/yarn-project/end-to-end/scripts/network_test.sh +++ b/yarn-project/end-to-end/scripts/network_test.sh @@ -53,11 +53,13 @@ if [ "$FRESH_INSTALL" = "true" ]; then kubectl delete namespace "$NAMESPACE" --ignore-not-found=true --wait=true --now --timeout=10m fi -STERN_PID="" +# STERN_PID="" function copy_stern_to_log() { - ulimit -n 4096 - stern spartan -n $NAMESPACE > $SCRIPT_DIR/network-test.log & - STERN_PID=$! + # TODO(AD) we need to figure out a less resource intensive solution than stern + # ulimit -n 4096 + # stern spartan -n $NAMESPACE > $SCRIPT_DIR/network-test.log & + echo "disabled until less resource intensive solution than stern implemented" > $SCRIPT_DIR/network-test.log & + # STERN_PID=$! } function show_status_until_pxe_ready() { @@ -113,7 +115,7 @@ show_status_until_pxe_ready & function cleanup() { # kill everything in our process group except our process - trap - SIGTERM && kill -9 $(pgrep -g $$ | grep -v $$) $(jobs -p) $STERN_PID &>/dev/null || true + trap - SIGTERM && kill -9 $(pgrep -g $$ | grep -v $$) $(jobs -p) &>/dev/null || true if [ "$CLEANUP_CLUSTER" = "true" ]; then kind delete cluster || true @@ -176,7 +178,7 @@ docker run --rm --network=host \ -e HOST_METRICS_PORT=$METRICS_PORT \ -e CONTAINER_METRICS_PORT=80 \ -e GRAFANA_PASSWORD=$GRAFANA_PASSWORD \ - -e DEBUG="aztec:*" \ + -e DEBUG=${DEBUG:-""} \ -e LOG_JSON=1 \ - -e LOG_LEVEL=debug \ + -e LOG_LEVEL=verbose \ aztecprotocol/end-to-end:$AZTEC_DOCKER_TAG $TEST diff --git a/yarn-project/end-to-end/scripts/start_p2p_e2e.sh b/yarn-project/end-to-end/scripts/start_p2p_e2e.sh index edb89f08d827..d407b5623b70 100755 --- a/yarn-project/end-to-end/scripts/start_p2p_e2e.sh +++ b/yarn-project/end-to-end/scripts/start_p2p_e2e.sh @@ -1,6 +1,7 @@ #! /bin/bash set -eu -export DEBUG='aztec:*' +export LOG_LEVEL='verbose' +export DEBUG='libp2p:*' export ARCHIVER_POLLING_INTERVAL_MS=500 export P2P_CHECK_INTERVAL=50 export WS_CHECK_INTERVAL=50 @@ -13,6 +14,5 @@ export BOOTSTRAP_NODES='/ip4/127.0.0.1/tcp/40400/p2p/12D3KooWGBpbC6qQFkaCYphjNeY export P2P_TCP_LISTEN_ADDR='0.0.0.0:40400' export P2P_NAT_ENABLED='false' export P2P_ENABLED='true' -export DEBUG='aztec:*,libp2p:*' yarn test e2e_p2p_network.test.ts diff --git a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts index 1d9f1c178015..8529d43e1c48 100644 --- a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts @@ -9,18 +9,18 @@ import { } from '@aztec/circuit-types'; import { makeBloatedProcessedTx } from '@aztec/circuit-types/test'; import { + type BlockHeader, EthAddress, GENESIS_ARCHIVE_ROOT, GasFees, GasSettings, - type Header, MAX_NULLIFIERS_PER_TX, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, } from '@aztec/circuits.js'; import { fr } from '@aztec/circuits.js/testing'; import { type L1ContractAddresses, createEthereumChain } from '@aztec/ethereum'; import { range } from '@aztec/foundation/array'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { OutboxAbi, RollupAbi } from '@aztec/l1-artifacts'; import { SHA256Trunc, StandardTree } from '@aztec/merkle-tree'; import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types'; @@ -84,7 +84,7 @@ describe('L1Publisher integration', () => { let builderDb: MerkleTreeAdminDatabase; // The header of the last block - let prevHeader: Header; + let prevHeader: BlockHeader; let baseFee: GasFees; diff --git a/yarn-project/end-to-end/src/e2e_epochs.test.ts b/yarn-project/end-to-end/src/e2e_epochs.test.ts index 3ac4e07afd86..ee0dc3197ee6 100644 --- a/yarn-project/end-to-end/src/e2e_epochs.test.ts +++ b/yarn-project/end-to-end/src/e2e_epochs.test.ts @@ -1,5 +1,5 @@ -import { type EpochConstants, getTimestampRangeForEpoch } from '@aztec/archiver/epoch'; -import { type DebugLogger, retryUntil } from '@aztec/aztec.js'; +// eslint-disable-next-line no-restricted-imports +import { type DebugLogger, type EpochConstants, getTimestampRangeForEpoch, retryUntil } from '@aztec/aztec.js'; import { RollupContract } from '@aztec/ethereum/contracts'; import { type Delayer, waitUntilL1Timestamp } from '@aztec/ethereum/test'; diff --git a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts index 6b8401823a4c..afa5cdbb1cd9 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/gossip_network.test.ts @@ -21,7 +21,7 @@ const DATA_DIR = './data/gossip'; const qosAlerts: AlertConfig[] = [ { alert: 'SequencerTimeToCollectAttestations', - expr: 'aztec_sequencer_time_to_collect_attestations > 2500', + expr: 'aztec_sequencer_time_to_collect_attestations > 3500', labels: { severity: 'error' }, for: '10m', annotations: {}, @@ -37,11 +37,12 @@ describe('e2e_p2p_network', () => { testName: 'e2e_p2p_network', numberOfNodes: NUM_NODES, basePort: BOOT_NODE_UDP_PORT, - // To collect metrics - run in aztec-packages `docker compose --profile metrics up` and set COLLECT_METRICS=true metricsPort: shouldCollectMetrics(), }); + await t.applyBaseSnapshots(); await t.setup(); + await t.removeInitialNode(); }); afterEach(async () => { diff --git a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts index 3289add932a1..478d2fccf697 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/p2p_network.ts @@ -1,10 +1,9 @@ import { getSchnorrAccount } from '@aztec/accounts/schnorr'; import { type AztecNodeConfig, type AztecNodeService } from '@aztec/aztec-node'; -import { type AccountWalletWithSecretKey, EthCheatCodes } from '@aztec/aztec.js'; -import { EthAddress } from '@aztec/circuits.js'; -import { getL1ContractsConfigEnvVars } from '@aztec/ethereum'; +import { type AccountWalletWithSecretKey } from '@aztec/aztec.js'; +import { EthCheatCodes, MINIMUM_STAKE, getL1ContractsConfigEnvVars } from '@aztec/ethereum'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; -import { RollupAbi } from '@aztec/l1-artifacts'; +import { RollupAbi, TestERC20Abi } from '@aztec/l1-artifacts'; import { SpamContract } from '@aztec/noir-contracts.js'; import { type BootstrapNode } from '@aztec/p2p'; import { createBootstrapNodeFromPrivateKey } from '@aztec/p2p/mocks'; @@ -14,9 +13,10 @@ import { getContract } from 'viem'; import { privateKeyToAccount } from 'viem/accounts'; import { - PRIVATE_KEYS_START_INDEX, + ATTESTER_PRIVATE_KEYS_START_INDEX, + PROPOSER_PRIVATE_KEYS_START_INDEX, createValidatorConfig, - generateNodePrivateKeys, + generatePrivateKeys, } from '../fixtures/setup_p2p_test.js'; import { type ISnapshotManager, @@ -39,8 +39,9 @@ export class P2PNetworkTest { public logger: DebugLogger; public ctx!: SubsystemsContext; - public nodePrivateKeys: `0x${string}`[] = []; - public nodePublicKeys: string[] = []; + public attesterPrivateKeys: `0x${string}`[] = []; + public attesterPublicKeys: string[] = []; + public proposerPrivateKeys: `0x${string}`[] = []; public peerIdPrivateKeys: string[] = []; public bootstrapNodeEnr: string = ''; @@ -49,12 +50,13 @@ export class P2PNetworkTest { public wallet?: AccountWalletWithSecretKey; public spamContract?: SpamContract; + private cleanupInterval: NodeJS.Timeout | undefined = undefined; + constructor( testName: string, public bootstrapNode: BootstrapNode, public bootNodePort: number, private numberOfNodes: number, - initialValidatorAddress: string, initialValidatorConfig: AztecNodeConfig, // If set enable metrics collection metricsPort?: number, @@ -63,22 +65,49 @@ export class P2PNetworkTest { // Set up the base account and node private keys for the initial network deployment this.baseAccount = privateKeyToAccount(`0x${getPrivateKeyFromIndex(0)!.toString('hex')}`); - this.nodePrivateKeys = generateNodePrivateKeys(PRIVATE_KEYS_START_INDEX, numberOfNodes); - this.nodePublicKeys = this.nodePrivateKeys.map(privateKey => privateKeyToAccount(privateKey).address); + this.proposerPrivateKeys = generatePrivateKeys(PROPOSER_PRIVATE_KEYS_START_INDEX, numberOfNodes); + this.attesterPrivateKeys = generatePrivateKeys(ATTESTER_PRIVATE_KEYS_START_INDEX, numberOfNodes); + this.attesterPublicKeys = this.attesterPrivateKeys.map(privateKey => privateKeyToAccount(privateKey).address); this.bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); - const initialValidators = [EthAddress.fromString(initialValidatorAddress)]; - this.snapshotManager = createSnapshotManager(`e2e_p2p_network/${testName}`, process.env.E2E_DATA_PATH, { ...initialValidatorConfig, ethereumSlotDuration: l1ContractsConfig.ethereumSlotDuration, salt: 420, - initialValidators, metricsPort: metricsPort, }); } + /** + * Start a loop to sync the mock system time with the L1 block time + */ + public startSyncMockSystemTimeInterval() { + this.cleanupInterval = setInterval(async () => { + await this.syncMockSystemTime(); + }, l1ContractsConfig.aztecSlotDuration * 1000); + } + + /** + * When using fake timers, we need to keep the system and anvil clocks in sync. + */ + public async syncMockSystemTime() { + this.logger.info('Syncing mock system time'); + const { timer, deployL1ContractsValues } = this.ctx!; + // Send a tx and only update the time after the tx is mined, as eth time is not continuous + const tx = await deployL1ContractsValues.walletClient.sendTransaction({ + to: this.baseAccount.address, + value: 1n, + account: this.baseAccount, + }); + const receipt = await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ + hash: tx, + }); + const timestamp = await deployL1ContractsValues.publicClient.getBlock({ blockNumber: receipt.blockNumber }); + timer.setSystemTime(Number(timestamp.timestamp) * 1000); + this.logger.info(`Synced mock system time to ${timestamp.timestamp * 1000n}`); + } + static async create({ testName, numberOfNodes, @@ -92,73 +121,91 @@ export class P2PNetworkTest { }) { const port = basePort || (await getPort()); - const telemetry = await getEndToEndTestTelemetryClient(metricsPort, /*service name*/ `bootstrapnode`); + const telemetry = await getEndToEndTestTelemetryClient(metricsPort); const bootstrapNode = await createBootstrapNodeFromPrivateKey(BOOTSTRAP_NODE_PRIVATE_KEY, port, telemetry); const bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); const initialValidatorConfig = await createValidatorConfig({} as AztecNodeConfig, bootstrapNodeEnr); - const intiailValidatorAddress = privateKeyToAccount(initialValidatorConfig.publisherPrivateKey).address; - - return new P2PNetworkTest( - testName, - bootstrapNode, - port, - numberOfNodes, - intiailValidatorAddress, - initialValidatorConfig, - ); + + return new P2PNetworkTest(testName, bootstrapNode, port, numberOfNodes, initialValidatorConfig); } async applyBaseSnapshots() { - await this.snapshotManager.snapshot('add-validators', async ({ deployL1ContractsValues, aztecNodeConfig }) => { - const rollup = getContract({ - address: deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(), - abi: RollupAbi, - client: deployL1ContractsValues.walletClient, - }); - - this.logger.verbose(`Adding ${this.numberOfNodes} validators`); - - const txHashes: `0x${string}`[] = []; - for (let i = 0; i < this.numberOfNodes; i++) { - const account = privateKeyToAccount(this.nodePrivateKeys[i]!); - this.logger.debug(`Adding ${account.address} as validator`); - const txHash = await rollup.write.addValidator([account.address]); - txHashes.push(txHash); - - this.logger.debug(`Adding ${account.address} as validator`); - } - - // Wait for all the transactions adding validators to be mined - await Promise.all( - txHashes.map(txHash => - deployL1ContractsValues.publicClient.waitForTransactionReceipt({ - hash: txHash, + await this.snapshotManager.snapshot( + 'add-validators', + async ({ deployL1ContractsValues, aztecNodeConfig, timer }) => { + const rollup = getContract({ + address: deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(), + abi: RollupAbi, + client: deployL1ContractsValues.walletClient, + }); + + this.logger.verbose(`Adding ${this.numberOfNodes} validators`); + + const stakingAsset = getContract({ + address: deployL1ContractsValues.l1ContractAddresses.stakingAssetAddress.toString(), + abi: TestERC20Abi, + client: deployL1ContractsValues.walletClient, + }); + + const stakeNeeded = MINIMUM_STAKE * BigInt(this.numberOfNodes); + await Promise.all( + [ + await stakingAsset.write.mint( + [deployL1ContractsValues.walletClient.account.address, stakeNeeded], + {} as any, + ), + await stakingAsset.write.approve( + [deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(), stakeNeeded], + {} as any, + ), + ].map(txHash => deployL1ContractsValues.publicClient.waitForTransactionReceipt({ hash: txHash })), + ); + + const validators = []; + + for (let i = 0; i < this.numberOfNodes; i++) { + const attester = privateKeyToAccount(this.attesterPrivateKeys[i]!); + const proposer = privateKeyToAccount(this.proposerPrivateKeys[i]!); + validators.push({ + attester: attester.address, + proposer: proposer.address, + withdrawer: attester.address, + amount: MINIMUM_STAKE, + } as const); + + this.logger.verbose( + `Adding (attester, proposer) pair: (${attester.address}, ${proposer.address}) as validator`, + ); + } + + await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ + hash: await rollup.write.cheat__InitialiseValidatorSet([validators]), + }); + + const slotsInEpoch = await rollup.read.EPOCH_DURATION(); + const timestamp = await rollup.read.getTimestampForSlot([slotsInEpoch]); + const cheatCodes = new EthCheatCodes(aztecNodeConfig.l1RpcUrl); + try { + await cheatCodes.warp(Number(timestamp)); + } catch (err) { + this.logger.debug('Warp failed, time already satisfied'); + } + + // Send and await a tx to make sure we mine a block for the warp to correctly progress. + await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ + hash: await deployL1ContractsValues.walletClient.sendTransaction({ + to: this.baseAccount.address, + value: 1n, + account: this.baseAccount, }), - ), - ); - - //@note Now we jump ahead to the next epoch such that the validator committee is picked - // INTERVAL MINING: If we are using anvil interval mining this will NOT progress the time! - // Which means that the validator set will still be empty! So anyone can propose. - const slotsInEpoch = await rollup.read.EPOCH_DURATION(); - const timestamp = await rollup.read.getTimestampForSlot([slotsInEpoch]); - const cheatCodes = new EthCheatCodes(aztecNodeConfig.l1RpcUrl); - try { - await cheatCodes.warp(Number(timestamp)); - } catch (err) { - this.logger.debug('Warp failed, time already satisfied'); - } - - // Send and await a tx to make sure we mine a block for the warp to correctly progress. - await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ - hash: await deployL1ContractsValues.walletClient.sendTransaction({ - to: this.baseAccount.address, - value: 1n, - account: this.baseAccount, - }), - }); - }); + }); + + // Set the system time in the node, only after we have warped the time and waited for a block + // Time is only set in the NEXT block + timer.setSystemTime(Number(timestamp) * 1000); + }, + ); } async setupAccount() { @@ -197,49 +244,28 @@ export class P2PNetworkTest { async removeInitialNode() { await this.snapshotManager.snapshot( 'remove-inital-validator', - async ({ deployL1ContractsValues, aztecNodeConfig }) => { - const rollup = getContract({ - address: deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(), - abi: RollupAbi, - client: deployL1ContractsValues.walletClient, - }); - - // Remove the setup validator - const initialValidatorAddress = privateKeyToAccount(`0x${getPrivateKeyFromIndex(0)!.toString('hex')}`).address; - const txHash = await rollup.write.removeValidator([initialValidatorAddress]); - - await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ - hash: txHash, - }); - - //@note Now we jump ahead to the next epoch such that the validator committee is picked - // INTERVAL MINING: If we are using anvil interval mining this will NOT progress the time! - // Which means that the validator set will still be empty! So anyone can propose. - const slotsInEpoch = await rollup.read.EPOCH_DURATION(); - const timestamp = await rollup.read.getTimestampForSlot([slotsInEpoch]); - const cheatCodes = new EthCheatCodes(aztecNodeConfig.l1RpcUrl); - try { - await cheatCodes.warp(Number(timestamp)); - } catch (err) { - this.logger.debug('Warp failed, time already satisfied'); - } - + async ({ deployL1ContractsValues, aztecNode, timer }) => { // Send and await a tx to make sure we mine a block for the warp to correctly progress. - await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ + const receipt = await deployL1ContractsValues.publicClient.waitForTransactionReceipt({ hash: await deployL1ContractsValues.walletClient.sendTransaction({ to: this.baseAccount.address, value: 1n, account: this.baseAccount, }), }); + const block = await deployL1ContractsValues.publicClient.getBlock({ + blockNumber: receipt.blockNumber, + }); + timer.setSystemTime(Number(block.timestamp) * 1000); - await this.ctx.aztecNode.stop(); + await aztecNode.stop(); }, ); } async setup() { this.ctx = await this.snapshotManager.setup(); + this.startSyncMockSystemTimeInterval(); } async stopNodes(nodes: AztecNodeService[]) { @@ -259,5 +285,8 @@ export class P2PNetworkTest { async teardown() { await this.bootstrapNode.stop(); await this.snapshotManager.teardown(); + if (this.cleanupInterval) { + clearInterval(this.cleanupInterval); + } } } diff --git a/yarn-project/end-to-end/src/e2e_p2p/reqresp.test.ts b/yarn-project/end-to-end/src/e2e_p2p/reqresp.test.ts index c7644b77f3dd..d9be627e8610 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/reqresp.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/reqresp.test.ts @@ -123,6 +123,11 @@ describe('e2e_p2p_reqresp_tx', () => { client: t.ctx.deployL1ContractsValues.publicClient, }); + const attesters = await rollupContract.read.getAttesters(); + const mappedProposers = await Promise.all( + attesters.map(async attester => await rollupContract.read.getProposerForAttester([attester])), + ); + const currentTime = await t.ctx.cheatCodes.eth.timestamp(); const slotDuration = await rollupContract.read.SLOT_DURATION(); @@ -133,9 +138,11 @@ describe('e2e_p2p_reqresp_tx', () => { const proposer = await rollupContract.read.getProposerAt([nextSlot]); proposers.push(proposer); } - // Get the indexes of the nodes that are responsible for the next two slots - const proposerIndexes = proposers.map(proposer => t.nodePublicKeys.indexOf(proposer)); + const proposerIndexes = proposers.map(proposer => mappedProposers.indexOf(proposer as `0x${string}`)); + + t.logger.info('proposerIndexes: ' + proposerIndexes.join(', ')); + const nodesToTurnOffTxGossip = Array.from({ length: NUM_NODES }, (_, i) => i).filter( i => !proposerIndexes.includes(i), ); diff --git a/yarn-project/end-to-end/src/fixtures/get_acvm_config.ts b/yarn-project/end-to-end/src/fixtures/get_acvm_config.ts index d11a078c78cf..f7f840c31d20 100644 --- a/yarn-project/end-to-end/src/fixtures/get_acvm_config.ts +++ b/yarn-project/end-to-end/src/fixtures/get_acvm_config.ts @@ -1,7 +1,7 @@ import { type DebugLogger } from '@aztec/aztec.js'; import { randomBytes } from '@aztec/foundation/crypto'; -import * as fs from 'fs/promises'; +import { promises as fs } from 'fs'; export { deployAndInitializeTokenAndBridgeContracts } from '../shared/cross_chain_test_harness.js'; diff --git a/yarn-project/end-to-end/src/fixtures/logging.ts b/yarn-project/end-to-end/src/fixtures/logging.ts index 5e16e5585f80..8c2488158c53 100644 --- a/yarn-project/end-to-end/src/fixtures/logging.ts +++ b/yarn-project/end-to-end/src/fixtures/logging.ts @@ -1,10 +1,5 @@ -import { onLog } from '@aztec/aztec.js'; - import { mkdirpSync } from 'fs-extra'; import { dirname } from 'path'; -import * as winston from 'winston'; - -const { format, transports } = winston; let metricsLoggerSet = false; @@ -22,15 +17,6 @@ export function setupMetricsLogger(filename: string) { return; } mkdirpSync(dirname(filename)); - const logger = winston.createLogger({ - level: 'debug', - format: format.combine(format.timestamp(), format.json()), - transports: [new transports.File({ filename })], - }); - onLog((level, namespace, message, data) => { - if (data && data['eventName']) { - logger.log({ ...data, level, namespace, message }); - } - }); + // TODO(palla/log): Reenable or kill metrics logger metricsLoggerSet = true; } diff --git a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts index f8d4fdacaaa4..7f70fd2c735c 100644 --- a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts +++ b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts @@ -13,7 +13,8 @@ import { getEndToEndTestTelemetryClient } from './with_telemetry_utils.js'; // Setup snapshots will create a node with index 0, so all of our loops here // need to start from 1 to avoid running validators with the same key -export const PRIVATE_KEYS_START_INDEX = 1; +export const PROPOSER_PRIVATE_KEYS_START_INDEX = 1; +export const ATTESTER_PRIVATE_KEYS_START_INDEX = 1001; export interface NodeContext { node: AztecNodeService; @@ -22,13 +23,13 @@ export interface NodeContext { account: AztecAddress; } -export function generateNodePrivateKeys(startIndex: number, numberOfNodes: number): `0x${string}`[] { - const nodePrivateKeys: `0x${string}`[] = []; +export function generatePrivateKeys(startIndex: number, numberOfKeys: number): `0x${string}`[] { + const privateKeys: `0x${string}`[] = []; // Do not start from 0 as it is used during setup - for (let i = startIndex; i < startIndex + numberOfNodes; i++) { - nodePrivateKeys.push(`0x${getPrivateKeyFromIndex(i)!.toString('hex')}`); + for (let i = startIndex; i < startIndex + numberOfKeys; i++) { + privateKeys.push(`0x${getPrivateKeyFromIndex(i)!.toString('hex')}`); } - return nodePrivateKeys; + return privateKeys; } export function createNodes( @@ -45,7 +46,7 @@ export function createNodes( const port = bootNodePort + i + 1; const dataDir = dataDirectory ? `${dataDirectory}-${i}` : undefined; - const nodePromise = createNode(config, port, bootstrapNodeEnr, i + PRIVATE_KEYS_START_INDEX, dataDir, metricsPort); + const nodePromise = createNode(config, port, bootstrapNodeEnr, i, dataDir, metricsPort); nodePromises.push(nodePromise); } return Promise.all(nodePromises); @@ -56,19 +57,13 @@ export async function createNode( config: AztecNodeConfig, tcpPort: number, bootstrapNode: string | undefined, - publisherAddressIndex: number, + accountIndex: number, dataDirectory?: string, metricsPort?: number, ) { - const validatorConfig = await createValidatorConfig( - config, - bootstrapNode, - tcpPort, - publisherAddressIndex, - dataDirectory, - ); + const validatorConfig = await createValidatorConfig(config, bootstrapNode, tcpPort, accountIndex, dataDirectory); - const telemetryClient = await getEndToEndTestTelemetryClient(metricsPort, /*serviceName*/ `node:${tcpPort}`); + const telemetryClient = await getEndToEndTestTelemetryClient(metricsPort); return await AztecNodeService.createAndSync(validatorConfig, { telemetry: telemetryClient, @@ -85,11 +80,15 @@ export async function createValidatorConfig( ) { port = port ?? (await getPort()); - const privateKey = getPrivateKeyFromIndex(accountIndex); - const privateKeyHex: `0x${string}` = `0x${privateKey!.toString('hex')}`; + const attesterPrivateKey: `0x${string}` = `0x${getPrivateKeyFromIndex( + ATTESTER_PRIVATE_KEYS_START_INDEX + accountIndex, + )!.toString('hex')}`; + const proposerPrivateKey: `0x${string}` = `0x${getPrivateKeyFromIndex( + PROPOSER_PRIVATE_KEYS_START_INDEX + accountIndex, + )!.toString('hex')}`; - config.publisherPrivateKey = privateKeyHex; - config.validatorPrivateKey = privateKeyHex; + config.validatorPrivateKey = attesterPrivateKey; + config.publisherPrivateKey = proposerPrivateKey; const nodeConfig: AztecNodeConfig = { ...config, diff --git a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts index 488e7291bda4..b37e4044bab2 100644 --- a/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts +++ b/yarn-project/end-to-end/src/fixtures/snapshot_manager.ts @@ -24,6 +24,7 @@ import { type ProverNode } from '@aztec/prover-node'; import { type PXEService, createPXEService, getPXEServiceConfig } from '@aztec/pxe'; import { createAndStartTelemetryClient, getConfigEnvVars as getTelemetryConfig } from '@aztec/telemetry-client/start'; +import { type InstalledClock, install } from '@sinonjs/fake-timers'; import { type Anvil } from '@viem/anvil'; import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs'; import { copySync, removeSync } from 'fs-extra/esm'; @@ -49,6 +50,7 @@ export type SubsystemsContext = { proverNode?: ProverNode; watcher: AnvilTestWatcher; cheatCodes: CheatCodes; + timer: InstalledClock; }; type SnapshotEntry = { @@ -247,6 +249,7 @@ async function teardown(context: SubsystemsContext | undefined) { await context.acvmConfig?.cleanup(); await context.anvil.stop(); await context.watcher.stop(); + context.timer?.uninstall(); } /** @@ -265,6 +268,9 @@ async function setupFromFresh( ): Promise { logger.verbose(`Initializing state...`); + // Use sinonjs fake timers + const timer = install({ shouldAdvanceTime: true, advanceTimeDelta: 20, toFake: ['Date'] }); + // Fetch the AztecNode config. // TODO: For some reason this is currently the union of a bunch of subsystems. That needs fixing. const aztecNodeConfig: AztecNodeConfig & SetupOptions = { ...getConfigEnvVars(), ...opts }; @@ -296,9 +302,9 @@ async function setupFromFresh( const deployL1ContractsValues = await setupL1Contracts(aztecNodeConfig.l1RpcUrl, hdAccount, logger, { salt: opts.salt, - initialValidators: opts.initialValidators, ...deployL1ContractsArgs, ...getL1ContractsConfigEnvVars(), + initialValidators: opts.initialValidators, }); aztecNodeConfig.l1Contracts = deployL1ContractsValues.l1ContractAddresses; aztecNodeConfig.l1PublishRetryIntervalMS = 100; @@ -317,7 +323,7 @@ async function setupFromFresh( const feeJuice = getContract({ address: deployL1ContractsValues.l1ContractAddresses.feeJuiceAddress.toString(), - abi: l1Artifacts.feeJuice.contractAbi, + abi: l1Artifacts.feeAsset.contractAbi, client: deployL1ContractsValues.walletClient, }); @@ -345,7 +351,7 @@ async function setupFromFresh( aztecNodeConfig.bbWorkingDirectory = bbConfig.bbWorkingDirectory; } - const telemetry = await getEndToEndTestTelemetryClient(opts.metricsPort, /*serviceName*/ statePath); + const telemetry = await getEndToEndTestTelemetryClient(opts.metricsPort); logger.verbose('Creating and synching an aztec node...'); const aztecNode = await AztecNodeService.createAndSync(aztecNodeConfig, { telemetry }); @@ -382,6 +388,7 @@ async function setupFromFresh( proverNode, watcher, cheatCodes, + timer, }; } @@ -391,6 +398,9 @@ async function setupFromFresh( async function setupFromState(statePath: string, logger: Logger): Promise { logger.verbose(`Initializing with saved state at ${statePath}...`); + // TODO: make one function + const timer = install({ shouldAdvanceTime: true, advanceTimeDelta: 20, toFake: ['Date'] }); + // TODO: For some reason this is currently the union of a bunch of subsystems. That needs fixing. const aztecNodeConfig: AztecNodeConfig & SetupOptions = JSON.parse( readFileSync(`${statePath}/aztec_node_config.json`, 'utf-8'), @@ -463,6 +473,7 @@ async function setupFromState(statePath: string, logger: Logger): Promise { - return !metricsPort - ? Promise.resolve(new NoopTelemetryClient()) - : createAndStartTelemetryClient(getEndToEndTestTelemetryConfig(metricsPort, serviceName)); +let telemetryClient: Promise | undefined; +export function getEndToEndTestTelemetryClient(metricsPort?: number): Promise { + if (!metricsPort) { + return Promise.resolve(new NoopTelemetryClient()); + } + if (!telemetryClient) { + telemetryClient = createEndToEndTestOtelClient(metricsPort); + } + return telemetryClient; +} + +function createEndToEndTestOtelClient(metricsPort: number): Promise { + const otelStream = new OTelPinoStream({ levels }); + registerLoggingStream(otelStream); + return createAndStartTelemetryClient(getEndToEndTestTelemetryConfig(metricsPort)); } /** @@ -17,7 +30,7 @@ export function getEndToEndTestTelemetryClient(metricsPort?: number, serviceName * * Read from env vars, override if metricsPort is set */ -export function getEndToEndTestTelemetryConfig(metricsPort?: number, serviceName?: string) { +function getEndToEndTestTelemetryConfig(metricsPort?: number) { const telemetryConfig: TelemetryClientConfig = getTelemetryConfig(); if (metricsPort) { telemetryConfig.metricsCollectorUrl = new URL(`http://127.0.0.1:${metricsPort}/v1/metrics`); @@ -27,8 +40,5 @@ export function getEndToEndTestTelemetryConfig(metricsPort?: number, serviceName telemetryConfig.otelCollectIntervalMs = 5000; telemetryConfig.otelExportTimeoutMs = 2500; } - if (serviceName) { - telemetryConfig.serviceName = serviceName; - } return telemetryConfig; } diff --git a/yarn-project/end-to-end/src/guides/up_quick_start.test.ts b/yarn-project/end-to-end/src/guides/up_quick_start.test.ts index 15f31c364e26..1fc6920a1f91 100644 --- a/yarn-project/end-to-end/src/guides/up_quick_start.test.ts +++ b/yarn-project/end-to-end/src/guides/up_quick_start.test.ts @@ -1,17 +1,20 @@ -import { createPXEClient, waitForPXE } from '@aztec/aztec.js'; +import { createAztecNodeClient, waitForNode } from '@aztec/aztec.js'; import { execSync } from 'child_process'; -const { PXE_URL = '' } = process.env; +const { AZTEC_NODE_URL = '' } = process.env; // Entrypoint for running the up-quick-start script on the CI describe('guides/up_quick_start', () => { // TODO: update to not use CLI it('works', async () => { - await waitForPXE(createPXEClient(PXE_URL)); - execSync(`DEBUG="aztec:*" PXE_URL=\${PXE_URL:-http://localhost:8080} ./src/guides/up_quick_start.sh`, { - shell: '/bin/bash', - stdio: 'inherit', - }); + await waitForNode(createAztecNodeClient(AZTEC_NODE_URL)); + execSync( + `LOG_LEVEL=\${LOG_LEVEL:-verbose} AZTEC_NODE_URL=\${AZTEC_NODE_URL:-http://localhost:8080} ./src/guides/up_quick_start.sh`, + { + shell: '/bin/bash', + stdio: 'inherit', + }, + ); }); }); diff --git a/yarn-project/end-to-end/src/shared/browser.ts b/yarn-project/end-to-end/src/shared/browser.ts index ea9dde82e564..50e09adf0225 100644 --- a/yarn-project/end-to-end/src/shared/browser.ts +++ b/yarn-project/end-to-end/src/shared/browser.ts @@ -6,6 +6,7 @@ import * as AztecJs from '@aztec/aztec.js'; import { TokenContractArtifact } from '@aztec/noir-contracts.js/Token'; import { contractArtifactToBuffer } from '@aztec/types/abi'; +import getPort from 'get-port'; import { type Server } from 'http'; import Koa from 'koa'; import serve from 'koa-static'; @@ -77,16 +78,18 @@ export const browserTestSuite = ( app = new Koa(); app.use(serve(path.resolve(__dirname, './web'))); + const debuggingPort = await getPort({ port: 9222 }); browser = await launch({ executablePath: process.env.CHROME_BIN, headless: true, + debuggingPort, args: [ '--no-sandbox', '--headless', '--disable-gpu', '--disable-dev-shm-usage', '--disable-software-rasterizer', - '--remote-debugging-port=9222', + `--remote-debugging-port=${debuggingPort}`, ], }); page = await browser.newPage(); diff --git a/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts b/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts index d68750db2302..a72729961bf9 100644 --- a/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts +++ b/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts @@ -76,9 +76,11 @@ export async function deployAndInitializeTokenAndBridgeContracts( underlyingERC20: any; }> { if (!underlyingERC20Address) { - underlyingERC20Address = await deployL1Contract(walletClient, publicClient, TestERC20Abi, TestERC20Bytecode).then( - ({ address }) => address, - ); + underlyingERC20Address = await deployL1Contract(walletClient, publicClient, TestERC20Abi, TestERC20Bytecode, [ + 'Underlying', + 'UND', + walletClient.account.address, + ]).then(({ address }) => address); } const underlyingERC20 = getContract({ address: underlyingERC20Address!.toString(), @@ -86,6 +88,9 @@ export async function deployAndInitializeTokenAndBridgeContracts( client: walletClient, }); + // allow anyone to mint + await underlyingERC20.write.setFreeForAll([true], {} as any); + // deploy the token portal const { address: tokenPortalAddress } = await deployL1Contract( walletClient, diff --git a/yarn-project/end-to-end/src/spartan/gating-passive.test.ts b/yarn-project/end-to-end/src/spartan/gating-passive.test.ts index 6369f912a7a9..6d8b52261aa8 100644 --- a/yarn-project/end-to-end/src/spartan/gating-passive.test.ts +++ b/yarn-project/end-to-end/src/spartan/gating-passive.test.ts @@ -41,15 +41,15 @@ const { SPARTAN_DIR, INSTANCE_NAME, } = config; -const debugLogger = createDebugLogger('aztec:spartan-test:reorg'); +const debugLogger = createDebugLogger('aztec:spartan-test:gating-passive'); describe('a test that passively observes the network in the presence of network chaos', () => { jest.setTimeout(60 * 60 * 1000); // 60 minutes const ETHEREUM_HOST = `http://127.0.0.1:${HOST_ETHEREUM_PORT}`; const PXE_URL = `http://127.0.0.1:${HOST_PXE_PORT}`; - // 50% is the max that we expect to miss - const MAX_MISSED_SLOT_PERCENT = 0.5; + // 60% is the max that we expect to miss + const MAX_MISSED_SLOT_PERCENT = 0.6; afterAll(async () => { await runAlertCheck(config, qosAlerts, debugLogger); @@ -126,14 +126,16 @@ describe('a test that passively observes the network in the presence of network await sleep(Number(epochDuration * slotDuration) * 1000); const newTips = await rollupCheatCodes.getTips(); - const expectedPending = - controlTips.pending + BigInt(Math.floor((1 - MAX_MISSED_SLOT_PERCENT) * Number(epochDuration))); - expect(newTips.pending).toBeGreaterThan(expectedPending); // calculate the percentage of slots missed const perfectPending = controlTips.pending + BigInt(Math.floor(Number(epochDuration))); const missedSlots = Number(perfectPending) - Number(newTips.pending); const missedSlotsPercentage = (missedSlots / Number(epochDuration)) * 100; debugLogger.info(`Missed ${missedSlots} slots, ${missedSlotsPercentage.toFixed(2)}%`); + + // Ensure we missed at most the max allowed slots + // This is in place to ensure that we don't have a bad regression in the network + const maxMissedSlots = Math.floor(Number(epochDuration) * MAX_MISSED_SLOT_PERCENT); + expect(missedSlots).toBeLessThanOrEqual(maxMissedSlots); } }); }); diff --git a/yarn-project/end-to-end/webpack.config.js b/yarn-project/end-to-end/webpack.config.js index 88f6bb5178c1..3ae5808f82fe 100644 --- a/yarn-project/end-to-end/webpack.config.js +++ b/yarn-project/end-to-end/webpack.config.js @@ -60,6 +60,7 @@ export default { fs: false, path: false, url: false, + tty: false, worker_threads: false, buffer: require.resolve('buffer/'), util: require.resolve('util/'), diff --git a/yarn-project/epoch-cache/.eslintrc.cjs b/yarn-project/epoch-cache/.eslintrc.cjs new file mode 100644 index 000000000000..e659927475c0 --- /dev/null +++ b/yarn-project/epoch-cache/.eslintrc.cjs @@ -0,0 +1 @@ +module.exports = require('@aztec/foundation/eslint'); diff --git a/yarn-project/epoch-cache/READMD.md b/yarn-project/epoch-cache/READMD.md new file mode 100644 index 000000000000..c5a9f339f131 --- /dev/null +++ b/yarn-project/epoch-cache/READMD.md @@ -0,0 +1,3 @@ +## Epoch Cache + +Stores the current validator set. \ No newline at end of file diff --git a/yarn-project/epoch-cache/package.json b/yarn-project/epoch-cache/package.json new file mode 100644 index 000000000000..ed86b9c1b590 --- /dev/null +++ b/yarn-project/epoch-cache/package.json @@ -0,0 +1,94 @@ +{ + "name": "@aztec/epoch-cache", + "version": "0.1.0", + "type": "module", + "exports": { + ".": "./dest/index.js", + "./test": "./dest/test/index.js", + "./contracts": "./dest/contracts/index.js" + }, + "typedocOptions": { + "entryPoints": [ + "./src/index.ts" + ], + "name": "Epoch Cache", + "tsconfig": "./tsconfig.json" + }, + "scripts": { + "build": "yarn clean && tsc -b", + "build:dev": "tsc -b --watch", + "clean": "rm -rf ./dest .tsbuildinfo", + "formatting": "run -T prettier --check ./src && run -T eslint ./src", + "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", + "start:dev": "tsc-watch -p tsconfig.json --onSuccess 'yarn start'", + "start": "node ./dest/index.js", + "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests" + }, + "inherits": [ + "../package.common.json" + ], + "dependencies": { + "@aztec/circuit-types": "workspace:*", + "@aztec/ethereum": "workspace:*", + "@aztec/foundation": "workspace:^", + "@aztec/l1-artifacts": "workspace:^", + "@viem/anvil": "^0.0.10", + "dotenv": "^16.0.3", + "get-port": "^7.1.0", + "jest-mock-extended": "^3.0.7", + "tslib": "^2.4.0", + "viem": "^2.7.15", + "zod": "^3.23.8" + }, + "devDependencies": { + "@jest/globals": "^29.5.0", + "@types/jest": "^29.5.0", + "@types/node": "^18.14.6", + "jest": "^29.5.0", + "ts-node": "^10.9.1", + "typescript": "^5.0.4" + }, + "files": [ + "dest", + "src", + "!*.test.*" + ], + "types": "./dest/index.d.ts", + "jest": { + "moduleNameMapper": { + "^(\\.{1,2}/.*)\\.[cm]?js$": "$1" + }, + "testRegex": "./src/.*\\.test\\.(js|mjs|ts)$", + "rootDir": "./src", + "transform": { + "^.+\\.tsx?$": [ + "@swc/jest", + { + "jsc": { + "parser": { + "syntax": "typescript", + "decorators": true + }, + "transform": { + "decoratorVersion": "2022-03" + } + } + } + ] + }, + "extensionsToTreatAsEsm": [ + ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] + ] + }, + "engines": { + "node": ">=18" + } +} diff --git a/yarn-project/epoch-cache/src/config.ts b/yarn-project/epoch-cache/src/config.ts new file mode 100644 index 000000000000..3da15946771e --- /dev/null +++ b/yarn-project/epoch-cache/src/config.ts @@ -0,0 +1,12 @@ +import { + type L1ContractsConfig, + type L1ReaderConfig, + getL1ContractsConfigEnvVars, + getL1ReaderConfigFromEnv, +} from '@aztec/ethereum'; + +export type EpochCacheConfig = L1ReaderConfig & L1ContractsConfig; + +export function getEpochCacheConfigEnvVars(): EpochCacheConfig { + return { ...getL1ReaderConfigFromEnv(), ...getL1ContractsConfigEnvVars() }; +} diff --git a/yarn-project/epoch-cache/src/epoch_cache.test.ts b/yarn-project/epoch-cache/src/epoch_cache.test.ts new file mode 100644 index 000000000000..4cde40eff1e9 --- /dev/null +++ b/yarn-project/epoch-cache/src/epoch_cache.test.ts @@ -0,0 +1,117 @@ +import { type RollupContract } from '@aztec/ethereum'; +import { EthAddress } from '@aztec/foundation/eth-address'; + +import { afterEach, beforeEach, describe, expect, it, jest } from '@jest/globals'; +import { type MockProxy, mock } from 'jest-mock-extended'; + +import { EpochCache } from './epoch_cache.js'; + +describe('EpochCache', () => { + let rollupContract: MockProxy; + let epochCache: EpochCache; + + // Test constants + const SLOT_DURATION = 12; + const EPOCH_DURATION = 32; // 384 seconds + // const L1_GENESIS_TIME = 1000n; + let l1GenesisTime: bigint; + + const testCommittee = [ + EthAddress.fromString('0x0000000000000000000000000000000000000001'), + EthAddress.fromString('0x0000000000000000000000000000000000000002'), + EthAddress.fromString('0x0000000000000000000000000000000000000003'), + ]; + + const extraTestValidator = EthAddress.fromString('0x0000000000000000000000000000000000000004'); + + beforeEach(() => { + rollupContract = mock(); + + // Mock the getCommitteeAt method + rollupContract.getCommitteeAt.mockResolvedValue(testCommittee.map(v => v.toString())); + rollupContract.getSampleSeedAt.mockResolvedValue(0n); + + l1GenesisTime = BigInt(Math.floor(Date.now() / 1000)); + + // Setup fake timers + jest.useFakeTimers(); + + // Initialize with test constants + const testConstants = { + l1StartBlock: 0n, + l1GenesisTime, + slotDuration: SLOT_DURATION, + ethereumSlotDuration: SLOT_DURATION, + epochDuration: EPOCH_DURATION, + }; + + epochCache = new EpochCache(rollupContract, testCommittee, 0n, testConstants); + }); + + afterEach(() => { + jest.useRealTimers(); + }); + + it('should cache the validator set for the length of an epoch', async () => { + // Initial call to get validators + const initialCommittee = await epochCache.getCommittee(); + expect(initialCommittee).toEqual(testCommittee); + // Not called as we should cache with the initial validator set + expect(rollupContract.getCommitteeAt).toHaveBeenCalledTimes(0); + + // Move time forward within the same epoch (less than EPOCH_DURATION) (x 1000 for milliseconds) + jest.setSystemTime(Date.now() + (Number(EPOCH_DURATION * SLOT_DURATION) / 4) * 1000); + + // Add another validator to the set + rollupContract.getCommitteeAt.mockResolvedValue([...testCommittee, extraTestValidator].map(v => v.toString())); + + // Should use cached validators + const midEpochCommittee = await epochCache.getCommittee(); + expect(midEpochCommittee).toEqual(testCommittee); + expect(rollupContract.getCommitteeAt).toHaveBeenCalledTimes(0); // Still cached + + // Move time forward to next epoch (x 1000 for milliseconds) + jest.setSystemTime(Date.now() + Number(EPOCH_DURATION * SLOT_DURATION) * 1000); + + // Should fetch new validator + const nextEpochCommittee = await epochCache.getCommittee(); + expect(nextEpochCommittee).toEqual([...testCommittee, extraTestValidator]); + expect(rollupContract.getCommitteeAt).toHaveBeenCalledTimes(1); // Called again for new epoch + }); + + it('should correctly get current validator based on slot number', async () => { + // Set initial time to a known slot + const initialTime = Number(l1GenesisTime) * 1000; // Convert to milliseconds + jest.setSystemTime(initialTime); + + // The valid proposer has been calculated in advance to be [1,1,0] for the slots chosen + // Hence the chosen values for testCommittee below + + // Get validator for slot 0 + let [currentValidator] = await epochCache.getProposerInCurrentOrNextSlot(); + expect(currentValidator).toEqual(testCommittee[1]); + + // Move to next slot + jest.setSystemTime(initialTime + Number(SLOT_DURATION) * 1000); + [currentValidator] = await epochCache.getProposerInCurrentOrNextSlot(); + expect(currentValidator).toEqual(testCommittee[1]); + + // Move to slot that wraps around validator set + jest.setSystemTime(initialTime + Number(SLOT_DURATION) * 3 * 1000); + [currentValidator] = await epochCache.getProposerInCurrentOrNextSlot(); + expect(currentValidator).toEqual(testCommittee[0]); + }); + + it('Should request to update the validator set when on the epoch boundary', async () => { + // Set initial time to a known slot + const initialTime = Number(l1GenesisTime) * 1000; // Convert to milliseconds + jest.setSystemTime(initialTime); + + // Move forward to slot before the epoch boundary + jest.setSystemTime(initialTime + Number(SLOT_DURATION) * (EPOCH_DURATION - 1) * 1000); + + // Should request to update the validator set + await epochCache.getProposerInCurrentOrNextSlot(); + expect(rollupContract.getCommitteeAt).toHaveBeenCalledTimes(1); + }); +}); diff --git a/yarn-project/epoch-cache/src/epoch_cache.ts b/yarn-project/epoch-cache/src/epoch_cache.ts new file mode 100644 index 000000000000..7ebee68ef149 --- /dev/null +++ b/yarn-project/epoch-cache/src/epoch_cache.ts @@ -0,0 +1,192 @@ +import { + EmptyL1RollupConstants, + type L1RollupConstants, + getEpochNumberAtTimestamp, + getSlotAtTimestamp, +} from '@aztec/circuit-types'; +import { RollupContract, createEthereumChain } from '@aztec/ethereum'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import { type Logger, createDebugLogger } from '@aztec/foundation/log'; + +import { createPublicClient, encodeAbiParameters, http, keccak256 } from 'viem'; + +import { type EpochCacheConfig, getEpochCacheConfigEnvVars } from './config.js'; + +type EpochAndSlot = { + epoch: bigint; + slot: bigint; + ts: bigint; +}; + +/** + * Epoch cache + * + * This class is responsible for managing traffic to the l1 node, by caching the validator set. + * It also provides a method to get the current or next proposer, and to check who is in the current slot. + * + * If the epoch changes, then we update the stored validator set. + * + * Note: This class is very dependent on the system clock being in sync. + */ +export class EpochCache { + private committee: EthAddress[]; + private cachedEpoch: bigint; + private cachedSampleSeed: bigint; + private readonly log: Logger = createDebugLogger('aztec:EpochCache'); + + constructor( + private rollup: RollupContract, + initialValidators: EthAddress[] = [], + initialSampleSeed: bigint = 0n, + private readonly l1constants: L1RollupConstants = EmptyL1RollupConstants, + ) { + this.committee = initialValidators; + this.cachedSampleSeed = initialSampleSeed; + + this.log.debug(`Initialized EpochCache with constants and validators`, { l1constants, initialValidators }); + + this.cachedEpoch = getEpochNumberAtTimestamp(BigInt(Math.floor(Date.now() / 1000)), this.l1constants); + } + + static async create(rollupAddress: EthAddress, config?: EpochCacheConfig) { + config = config ?? getEpochCacheConfigEnvVars(); + + const chain = createEthereumChain(config.l1RpcUrl, config.l1ChainId); + const publicClient = createPublicClient({ + chain: chain.chainInfo, + transport: http(chain.rpcUrl), + pollingInterval: config.viemPollingIntervalMS, + }); + + const rollup = new RollupContract(publicClient, rollupAddress.toString()); + const [l1StartBlock, l1GenesisTime, initialValidators, sampleSeed] = await Promise.all([ + rollup.getL1StartBlock(), + rollup.getL1GenesisTime(), + rollup.getCurrentEpochCommittee(), + rollup.getCurrentSampleSeed(), + ] as const); + + const l1RollupConstants: L1RollupConstants = { + l1StartBlock, + l1GenesisTime, + slotDuration: config.aztecSlotDuration, + epochDuration: config.aztecEpochDuration, + ethereumSlotDuration: config.ethereumSlotDuration, + }; + + return new EpochCache( + rollup, + initialValidators.map(v => EthAddress.fromString(v)), + sampleSeed, + l1RollupConstants, + ); + } + + getEpochAndSlotNow(): EpochAndSlot { + const now = BigInt(Math.floor(Date.now() / 1000)); + return this.getEpochAndSlotAtTimestamp(now); + } + + getEpochAndSlotInNextSlot(): EpochAndSlot { + const nextSlotTs = BigInt(Math.floor(Date.now() / 1000) + this.l1constants.slotDuration); + return this.getEpochAndSlotAtTimestamp(nextSlotTs); + } + + getEpochAndSlotAtTimestamp(ts: bigint): EpochAndSlot { + return { + epoch: getEpochNumberAtTimestamp(ts, this.l1constants), + slot: getSlotAtTimestamp(ts, this.l1constants), + ts, + }; + } + + /** + * Get the current validator set + * + * @param nextSlot - If true, get the validator set for the next slot. + * @returns The current validator set. + */ + async getCommittee(nextSlot: boolean = false): Promise { + // If the current epoch has changed, then we need to make a request to update the validator set + const { epoch: calculatedEpoch, ts } = nextSlot ? this.getEpochAndSlotInNextSlot() : this.getEpochAndSlotNow(); + + if (calculatedEpoch !== this.cachedEpoch) { + this.log.debug(`Epoch changed, updating validator set`, { calculatedEpoch, cachedEpoch: this.cachedEpoch }); + this.cachedEpoch = calculatedEpoch; + const [committeeAtTs, sampleSeedAtTs] = await Promise.all([ + this.rollup.getCommitteeAt(ts), + this.rollup.getSampleSeedAt(ts), + ]); + this.committee = committeeAtTs.map((v: `0x${string}`) => EthAddress.fromString(v)); + this.cachedSampleSeed = sampleSeedAtTs; + } + + return this.committee; + } + + /** + * Get the ABI encoding of the proposer index - see Leonidas.sol _computeProposerIndex + */ + getProposerIndexEncoding(epoch: bigint, slot: bigint, seed: bigint): `0x${string}` { + return encodeAbiParameters( + [ + { type: 'uint256', name: 'epoch' }, + { type: 'uint256', name: 'slot' }, + { type: 'uint256', name: 'seed' }, + ], + [epoch, slot, seed], + ); + } + + computeProposerIndex(slot: bigint, epoch: bigint, seed: bigint, size: bigint): bigint { + return BigInt(keccak256(this.getProposerIndexEncoding(epoch, slot, seed))) % size; + } + + /** + * Returns the current and next proposer + * + * We return the next proposer as the node will check if it is the proposer at the next ethereum block, which + * can be the next slot. If this is the case, then it will send proposals early. + * + * If we are at an epoch boundary, then we can update the cache for the next epoch, this is the last check + * we do in the validator client, so we can update the cache here. + */ + async getProposerInCurrentOrNextSlot(): Promise<[EthAddress, EthAddress]> { + // Validators are sorted by their index in the committee, and getValidatorSet will cache + const committee = await this.getCommittee(); + const { slot: currentSlot, epoch: currentEpoch } = this.getEpochAndSlotNow(); + const { slot: nextSlot, epoch: nextEpoch } = this.getEpochAndSlotInNextSlot(); + + // Compute the proposer in this and the next slot + const proposerIndex = this.computeProposerIndex( + currentSlot, + this.cachedEpoch, + this.cachedSampleSeed, + BigInt(committee.length), + ); + + // Check if the next proposer is in the next epoch + if (nextEpoch !== currentEpoch) { + await this.getCommittee(/*next slot*/ true); + } + const nextProposerIndex = this.computeProposerIndex( + nextSlot, + this.cachedEpoch, + this.cachedSampleSeed, + BigInt(committee.length), + ); + + const calculatedProposer = committee[Number(proposerIndex)]; + const nextCalculatedProposer = committee[Number(nextProposerIndex)]; + + return [calculatedProposer, nextCalculatedProposer]; + } + + /** + * Check if a validator is in the current epoch's committee + */ + async isInCommittee(validator: EthAddress): Promise { + const committee = await this.getCommittee(); + return committee.some(v => v.equals(validator)); + } +} diff --git a/yarn-project/epoch-cache/src/index.ts b/yarn-project/epoch-cache/src/index.ts new file mode 100644 index 000000000000..f6a7dba8382b --- /dev/null +++ b/yarn-project/epoch-cache/src/index.ts @@ -0,0 +1,2 @@ +export * from './epoch_cache.js'; +export * from './config.js'; diff --git a/yarn-project/epoch-cache/src/timestamp_provider.ts b/yarn-project/epoch-cache/src/timestamp_provider.ts new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/yarn-project/epoch-cache/tsconfig.json b/yarn-project/epoch-cache/tsconfig.json new file mode 100644 index 000000000000..249d08ef855c --- /dev/null +++ b/yarn-project/epoch-cache/tsconfig.json @@ -0,0 +1,23 @@ +{ + "extends": "..", + "compilerOptions": { + "outDir": "dest", + "rootDir": "src", + "tsBuildInfoFile": ".tsbuildinfo" + }, + "include": ["src"], + "references": [ + { + "path": "../circuit-types" + }, + { + "path": "../ethereum" + }, + { + "path": "../foundation" + }, + { + "path": "../l1-artifacts" + } + ] +} diff --git a/yarn-project/ethereum/src/constants.ts b/yarn-project/ethereum/src/constants.ts index c1f4b34d7321..2fea0175acac 100644 --- a/yarn-project/ethereum/src/constants.ts +++ b/yarn-project/ethereum/src/constants.ts @@ -2,3 +2,4 @@ import { type Hex } from 'viem'; export const NULL_KEY: Hex = `0x${'0000000000000000000000000000000000000000000000000000000000000000'}`; export const AZTEC_TEST_CHAIN_ID = 677692; +export const MINIMUM_STAKE = BigInt(100e18); diff --git a/yarn-project/ethereum/src/contracts/rollup.ts b/yarn-project/ethereum/src/contracts/rollup.ts index 98e3ac29fe2d..69bc1065653d 100644 --- a/yarn-project/ethereum/src/contracts/rollup.ts +++ b/yarn-project/ethereum/src/contracts/rollup.ts @@ -44,6 +44,26 @@ export class RollupContract { return this.rollup.read.getCurrentSlot(); } + getCommitteeAt(timestamp: bigint) { + return this.rollup.read.getCommitteeAt([timestamp]); + } + + getSampleSeedAt(timestamp: bigint) { + return this.rollup.read.getSampleSeedAt([timestamp]); + } + + getCurrentSampleSeed() { + return this.rollup.read.getCurrentSampleSeed(); + } + + getCurrentEpochCommittee() { + return this.rollup.read.getCurrentEpochCommittee(); + } + + getCurrentProposer() { + return this.rollup.read.getCurrentProposer(); + } + async getEpochNumber(blockNumber?: bigint) { blockNumber ??= await this.getBlockNumber(); return this.rollup.read.getEpochForBlock([BigInt(blockNumber)]); diff --git a/yarn-project/ethereum/src/deploy_l1_contracts.ts b/yarn-project/ethereum/src/deploy_l1_contracts.ts index 32832708c2a3..d6efa076de82 100644 --- a/yarn-project/ethereum/src/deploy_l1_contracts.ts +++ b/yarn-project/ethereum/src/deploy_l1_contracts.ts @@ -53,6 +53,7 @@ import { type HDAccount, type PrivateKeyAccount, mnemonicToAccount, privateKeyTo import { foundry } from 'viem/chains'; import { type L1ContractsConfig } from './config.js'; +import { MINIMUM_STAKE } from './constants.js'; import { isAnvilTestChain } from './ethereum_chain.js'; import { type L1ContractAddresses } from './l1_contract_addresses.js'; import { L1TxUtils } from './l1_tx_utils.js'; @@ -127,10 +128,14 @@ export interface L1ContractArtifactsForDeployment { * Rollup contract artifacts */ rollup: ContractArtifacts; + /** + * The token to stake. + */ + stakingAsset: ContractArtifacts; /** * The token to pay for gas. This will be bridged to L2 via the feeJuicePortal below */ - feeJuice: ContractArtifacts; + feeAsset: ContractArtifacts; /** * Fee juice portal contract artifacts. Optional for now as gas is not strictly enforced */ @@ -183,7 +188,11 @@ export const l1Artifacts: L1ContractArtifactsForDeployment = { }, }, }, - feeJuice: { + stakingAsset: { + contractAbi: TestERC20Abi, + contractBytecode: TestERC20Bytecode, + }, + feeAsset: { contractAbi: TestERC20Abi, contractBytecode: TestERC20Bytecode, }, @@ -256,6 +265,7 @@ export function createL1Clients( const publicClient = createPublicClient({ chain, transport: http(rpcUrl), + pollingInterval: 100, }); return { walletClient, publicClient }; @@ -307,8 +317,19 @@ export const deployL1Contracts = async ( const registryAddress = await govDeployer.deploy(l1Artifacts.registry, [account.address.toString()]); logger.info(`Deployed Registry at ${registryAddress}`); - const feeJuiceAddress = await govDeployer.deploy(l1Artifacts.feeJuice); - logger.info(`Deployed Fee Juice at ${feeJuiceAddress}`); + const feeAssetAddress = await govDeployer.deploy(l1Artifacts.feeAsset, [ + 'FeeJuice', + 'FEE', + account.address.toString(), + ]); + logger.info(`Deployed Fee Juice at ${feeAssetAddress}`); + + const stakingAssetAddress = await govDeployer.deploy(l1Artifacts.stakingAsset, [ + 'Staking', + 'STK', + account.address.toString(), + ]); + logger.info(`Deployed Staking Asset at ${stakingAssetAddress}`); // @todo #8084 // @note These numbers are just chosen to make testing simple. @@ -321,26 +342,29 @@ export const deployL1Contracts = async ( ]); logger.info(`Deployed GovernanceProposer at ${governanceProposerAddress}`); + // @note @LHerskind the assets are expected to be the same at some point, but for better + // configurability they are different for now. const governanceAddress = await govDeployer.deploy(l1Artifacts.governance, [ - feeJuiceAddress.toString(), + feeAssetAddress.toString(), governanceProposerAddress.toString(), ]); logger.info(`Deployed Governance at ${governanceAddress}`); const coinIssuerAddress = await govDeployer.deploy(l1Artifacts.coinIssuer, [ - feeJuiceAddress.toString(), + feeAssetAddress.toString(), 1n * 10n ** 18n, // @todo #8084 governanceAddress.toString(), ]); logger.info(`Deployed CoinIssuer at ${coinIssuerAddress}`); const rewardDistributorAddress = await govDeployer.deploy(l1Artifacts.rewardDistributor, [ - feeJuiceAddress.toString(), + feeAssetAddress.toString(), registryAddress.toString(), governanceAddress.toString(), ]); logger.info(`Deployed RewardDistributor at ${rewardDistributorAddress}`); + logger.verbose(`Waiting for governance contracts to be deployed`); await govDeployer.waitForDeployments(); logger.info(`All governance contracts deployed`); @@ -348,27 +372,29 @@ export const deployL1Contracts = async ( const feeJuicePortalAddress = await deployer.deploy(l1Artifacts.feeJuicePortal, [ registryAddress.toString(), - feeJuiceAddress.toString(), + feeAssetAddress.toString(), args.l2FeeJuiceAddress.toString(), ]); logger.info(`Deployed Fee Juice Portal at ${feeJuicePortalAddress}`); - const rollupArgs = { + const rollupConfigArgs = { aztecSlotDuration: args.aztecSlotDuration, aztecEpochDuration: args.aztecEpochDuration, targetCommitteeSize: args.aztecTargetCommitteeSize, aztecEpochProofClaimWindowInL2Slots: args.aztecEpochProofClaimWindowInL2Slots, + minimumStake: MINIMUM_STAKE, }; - const rollupAddress = await deployer.deploy(l1Artifacts.rollup, [ + const rollupArgs = [ feeJuicePortalAddress.toString(), rewardDistributorAddress.toString(), + stakingAssetAddress.toString(), args.vkTreeRoot.toString(), args.protocolContractTreeRoot.toString(), account.address.toString(), - args.initialValidators?.map(v => v.toString()) ?? [], - rollupArgs, - ]); - logger.info(`Deployed Rollup at ${rollupAddress}`, rollupArgs); + rollupConfigArgs, + ]; + const rollupAddress = await deployer.deploy(l1Artifacts.rollup, rollupArgs); + logger.info(`Deployed Rollup at ${rollupAddress}`, rollupConfigArgs); await deployer.waitForDeployments(); logger.info(`All core contracts deployed`); @@ -379,9 +405,15 @@ export const deployL1Contracts = async ( client: walletClient, }); - const feeJuice = getContract({ - address: feeJuiceAddress.toString(), - abi: l1Artifacts.feeJuice.contractAbi, + const feeAsset = getContract({ + address: feeAssetAddress.toString(), + abi: l1Artifacts.feeAsset.contractAbi, + client: walletClient, + }); + + const stakingAsset = getContract({ + address: stakingAssetAddress.toString(), + abi: l1Artifacts.stakingAsset.contractAbi, client: walletClient, }); @@ -394,12 +426,40 @@ export const deployL1Contracts = async ( // Transaction hashes to await const txHashes: Hex[] = []; + { + const txHash = await feeAsset.write.setFreeForAll([true], {} as any); + logger.info(`Fee asset set to free for all in ${txHash}`); + txHashes.push(txHash); + } + + if (args.initialValidators && args.initialValidators.length > 0) { + // Mint tokens, approve them, use cheat code to initialise validator set without setting up the epoch. + const stakeNeeded = MINIMUM_STAKE * BigInt(args.initialValidators.length); + await Promise.all( + [ + await stakingAsset.write.mint([walletClient.account.address, stakeNeeded], {} as any), + await stakingAsset.write.approve([rollupAddress.toString(), stakeNeeded], {} as any), + ].map(txHash => publicClient.waitForTransactionReceipt({ hash: txHash })), + ); + + const initiateValidatorSetTxHash = await rollup.write.cheat__InitialiseValidatorSet([ + args.initialValidators.map(v => ({ + attester: v.toString(), + proposer: v.toString(), + withdrawer: v.toString(), + amount: MINIMUM_STAKE, + })), + ]); + txHashes.push(initiateValidatorSetTxHash); + logger.info(`Initialized validator set (${args.initialValidators.join(', ')}) in tx ${initiateValidatorSetTxHash}`); + } + // @note This value MUST match what is in `constants.nr`. It is currently specified here instead of just importing // because there is circular dependency hell. This is a temporary solution. #3342 // @todo #8084 // fund the portal contract with Fee Juice - const FEE_JUICE_INITIAL_MINT = 200000000000000000000; - const mintTxHash = await feeJuice.write.mint([feeJuicePortalAddress.toString(), FEE_JUICE_INITIAL_MINT], {} as any); + const FEE_JUICE_INITIAL_MINT = 200000000000000000000n; + const mintTxHash = await feeAsset.write.mint([feeJuicePortalAddress.toString(), FEE_JUICE_INITIAL_MINT], {} as any); // @note This is used to ensure we fully wait for the transaction when running against a real chain // otherwise we execute subsequent transactions too soon @@ -415,7 +475,7 @@ export const deployL1Contracts = async ( } logger.info( - `Initialized Fee Juice Portal at ${feeJuicePortalAddress} to bridge between L1 ${feeJuiceAddress} to L2 ${args.l2FeeJuiceAddress}`, + `Initialized Fee Juice Portal at ${feeJuicePortalAddress} to bridge between L1 ${feeAssetAddress} to L2 ${args.l2FeeJuiceAddress}`, ); if (isAnvilTestChain(chain.id)) { @@ -493,7 +553,8 @@ export const deployL1Contracts = async ( registryAddress, inboxAddress, outboxAddress, - feeJuiceAddress, + feeJuiceAddress: feeAssetAddress, + stakingAssetAddress, feeJuicePortalAddress, coinIssuerAddress, rewardDistributorAddress, diff --git a/yarn-project/ethereum/src/index.ts b/yarn-project/ethereum/src/index.ts index d63935600939..10f28f3bd0f1 100644 --- a/yarn-project/ethereum/src/index.ts +++ b/yarn-project/ethereum/src/index.ts @@ -8,3 +8,4 @@ export * from './l1_reader.js'; export * from './utils.js'; export * from './config.js'; export * from './types.js'; +export * from './contracts/index.js'; diff --git a/yarn-project/ethereum/src/l1_contract_addresses.ts b/yarn-project/ethereum/src/l1_contract_addresses.ts index 1733e15fc066..eca35f4edead 100644 --- a/yarn-project/ethereum/src/l1_contract_addresses.ts +++ b/yarn-project/ethereum/src/l1_contract_addresses.ts @@ -20,6 +20,7 @@ export const L1ContractsNames = [ 'rewardDistributorAddress', 'governanceProposerAddress', 'governanceAddress', + 'stakingAssetAddress', ] as const; /** Provides the directory of current L1 contract addresses */ @@ -33,6 +34,7 @@ export const L1ContractAddressesSchema = z.object({ inboxAddress: schemas.EthAddress, outboxAddress: schemas.EthAddress, feeJuiceAddress: schemas.EthAddress, + stakingAssetAddress: schemas.EthAddress, feeJuicePortalAddress: schemas.EthAddress, coinIssuerAddress: schemas.EthAddress, rewardDistributorAddress: schemas.EthAddress, @@ -68,6 +70,11 @@ export const l1ContractAddressesMapping: ConfigMappingsType description: 'The deployed L1 Fee Juice contract address.', parseEnv, }, + stakingAssetAddress: { + env: 'STAKING_ASSET_CONTRACT_ADDRESS', + description: 'The deployed L1 staking asset contract address.', + parseEnv, + }, feeJuicePortalAddress: { env: 'FEE_JUICE_PORTAL_CONTRACT_ADDRESS', description: 'The deployed L1 Fee Juice portal contract address.', diff --git a/yarn-project/ethereum/src/l1_reader.ts b/yarn-project/ethereum/src/l1_reader.ts index f2f481968244..2c6340ba0250 100644 --- a/yarn-project/ethereum/src/l1_reader.ts +++ b/yarn-project/ethereum/src/l1_reader.ts @@ -1,4 +1,4 @@ -import { type ConfigMappingsType, numberConfigHelper } from '@aztec/foundation/config'; +import { type ConfigMappingsType, getConfigFromMappings, numberConfigHelper } from '@aztec/foundation/config'; import { type L1ContractAddresses, l1ContractAddressesMapping } from './l1_contract_addresses.js'; @@ -36,3 +36,7 @@ export const l1ReaderConfigMappings: ConfigMappingsType = { ...numberConfigHelper(1_000), }, }; + +export function getL1ReaderConfigFromEnv(): L1ReaderConfig { + return getConfigFromMappings(l1ReaderConfigMappings); +} diff --git a/yarn-project/ethereum/src/test/tx_delayer.test.ts b/yarn-project/ethereum/src/test/tx_delayer.test.ts index f85bcd453cf7..1fc1435e80c9 100644 --- a/yarn-project/ethereum/src/test/tx_delayer.test.ts +++ b/yarn-project/ethereum/src/test/tx_delayer.test.ts @@ -72,7 +72,11 @@ describe('tx_delayer', () => { }, 20000); it('delays a tx sent through a contract', async () => { - const deployTxHash = await client.deployContract({ abi: TestERC20Abi, bytecode: TestERC20Bytecode, args: [] }); + const deployTxHash = await client.deployContract({ + abi: TestERC20Abi, + bytecode: TestERC20Bytecode, + args: ['test', 'TST', account.address], + }); const { contractAddress, blockNumber } = await client.waitForTransactionReceipt({ hash: deployTxHash, pollingInterval: 100, diff --git a/yarn-project/foundation/package.json b/yarn-project/foundation/package.json index cdaaafa04e9a..ae009fa83048 100644 --- a/yarn-project/foundation/package.json +++ b/yarn-project/foundation/package.json @@ -103,6 +103,7 @@ "@koa/cors": "^5.0.0", "@noble/curves": "^1.2.0", "bn.js": "^5.2.1", + "colorette": "^2.0.20", "debug": "^4.3.4", "detect-node": "^2.1.0", "elliptic": "^6.5.4", @@ -117,6 +118,8 @@ "lodash.clonedeepwith": "^4.5.0", "memdown": "^6.1.1", "pako": "^2.1.0", + "pino": "^9.5.0", + "pino-pretty": "^13.0.0", "sha3": "^2.1.4", "zod": "^3.23.8" }, diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 41a41143c91c..41e2d61b49f6 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -8,6 +8,7 @@ export type EnvVar = | 'ARCHIVER_POLLING_INTERVAL_MS' | 'ARCHIVER_URL' | 'ARCHIVER_VIEM_POLLING_INTERVAL_MS' + | 'ARCHIVER_BATCH_SIZE' | 'ASSUME_PROVEN_THROUGH_BLOCK_NUMBER' | 'AZTEC_NODE_URL' | 'AZTEC_PORT' @@ -59,6 +60,7 @@ export type EnvVar = | 'MNEMONIC' | 'NETWORK_NAME' | 'NETWORK' + | 'NO_PXE' | 'COIN_ISSUER_CONTRACT_ADDRESS' | 'OTEL_EXPORTER_OTLP_METRICS_ENDPOINT' | 'OTEL_EXPORTER_OTLP_TRACES_ENDPOINT' @@ -145,6 +147,7 @@ export type EnvVar = | 'SEQ_REQUIRED_CONFIRMATIONS' | 'SEQ_TX_POLLING_INTERVAL_MS' | 'SEQ_ENFORCE_TIME_TABLE' + | 'STAKING_ASSET_CONTRACT_ADDRESS' | 'REWARD_DISTRIBUTOR_CONTRACT_ADDRESS' | 'TELEMETRY' | 'TEST_ACCOUNTS' diff --git a/yarn-project/foundation/src/crypto/random/randomness_singleton.ts b/yarn-project/foundation/src/crypto/random/randomness_singleton.ts index f226874a9216..a848f85a6064 100644 --- a/yarn-project/foundation/src/crypto/random/randomness_singleton.ts +++ b/yarn-project/foundation/src/crypto/random/randomness_singleton.ts @@ -1,4 +1,4 @@ -import { createDebugLogger } from '../../log/logger.js'; +import { createDebugLogger } from '../../log/pino-logger.js'; /** * A number generator which is used as a source of randomness in the system. If the SEED env variable is set, the diff --git a/yarn-project/foundation/src/fs/run_in_dir.ts b/yarn-project/foundation/src/fs/run_in_dir.ts index 9ff007a49b6f..5bd47da89754 100644 --- a/yarn-project/foundation/src/fs/run_in_dir.ts +++ b/yarn-project/foundation/src/fs/run_in_dir.ts @@ -1,4 +1,4 @@ -import * as fs from 'fs/promises'; +import { promises as fs } from 'fs'; import * as path from 'path'; // Create a random directory underneath a 'base' directory diff --git a/yarn-project/foundation/src/iterable/index.ts b/yarn-project/foundation/src/iterable/index.ts index 364baf20342e..ca18f4de0be5 100644 --- a/yarn-project/foundation/src/iterable/index.ts +++ b/yarn-project/foundation/src/iterable/index.ts @@ -4,3 +4,4 @@ export * from './sort.js'; export * from './take.js'; export * from './all.js'; export * from './peek.js'; +export * from './toArray.js'; diff --git a/yarn-project/foundation/src/iterable/toArray.ts b/yarn-project/foundation/src/iterable/toArray.ts new file mode 100644 index 000000000000..af7554d01f8f --- /dev/null +++ b/yarn-project/foundation/src/iterable/toArray.ts @@ -0,0 +1,7 @@ +export async function toArray(iterator: AsyncIterableIterator | IterableIterator): Promise { + const arr = []; + for await (const i of iterator) { + arr.push(i); + } + return arr; +} diff --git a/yarn-project/foundation/src/json-rpc/client/safe_json_rpc_client.ts b/yarn-project/foundation/src/json-rpc/client/safe_json_rpc_client.ts index 3c2bf3c32ba3..2de143063b6b 100644 --- a/yarn-project/foundation/src/json-rpc/client/safe_json_rpc_client.ts +++ b/yarn-project/foundation/src/json-rpc/client/safe_json_rpc_client.ts @@ -1,6 +1,6 @@ import { format } from 'util'; -import { createDebugLogger } from '../../log/logger.js'; +import { createDebugLogger } from '../../log/pino-logger.js'; import { type ApiSchema, type ApiSchemaFor, schemaHasMethod } from '../../schemas/api.js'; import { defaultFetch } from './fetch.js'; diff --git a/yarn-project/foundation/src/log/index.ts b/yarn-project/foundation/src/log/index.ts index 2bf44ed88ba4..8e61bc817825 100644 --- a/yarn-project/foundation/src/log/index.ts +++ b/yarn-project/foundation/src/log/index.ts @@ -1,5 +1,5 @@ export * from './console.js'; export * from './debug.js'; -export * from './logger.js'; +export * from './pino-logger.js'; export * from './log_history.js'; export * from './log_fn.js'; diff --git a/yarn-project/foundation/src/log/log-filters.test.ts b/yarn-project/foundation/src/log/log-filters.test.ts new file mode 100644 index 000000000000..11cabca8ed08 --- /dev/null +++ b/yarn-project/foundation/src/log/log-filters.test.ts @@ -0,0 +1,50 @@ +import { parseEnv } from './log-filters.js'; + +describe('parseEnv', () => { + const defaultLevel = 'info'; + + it('returns default level and empty filters when env is empty', () => { + const env = ''; + const [level, filters] = parseEnv(env, defaultLevel); + expect(level).toBe(defaultLevel); + expect(filters).toEqual([]); + }); + + it('parses level and filters from env string', () => { + const env = 'debug;warn:module1,module2;error:module3'; + const [level, filters] = parseEnv(env, defaultLevel); + expect(level).toBe('debug'); + expect(filters).toEqual([ + ['module3', 'error'], + ['module2', 'warn'], + ['module1', 'warn'], + ]); + }); + + it('handles spaces in env string', () => { + const env = 'debug; warn: module1, module2; error: module3'; + const [level, filters] = parseEnv(env, defaultLevel); + expect(level).toBe('debug'); + expect(filters).toEqual([ + ['module3', 'error'], + ['module2', 'warn'], + ['module1', 'warn'], + ]); + }); + + it('throws an error for invalid default log level', () => { + const env = 'invalid;module1:warn'; + expect(() => parseEnv(env, defaultLevel)).toThrow('Invalid log level: invalid'); + }); + + it('throws an error for invalid log level in filter', () => { + const env = 'invalid;warn:module'; + expect(() => parseEnv(env, defaultLevel)).toThrow('Invalid log level: invalid'); + }); + + it('throws an error for invalid log filter statement', () => { + const defaultLevel = 'info'; + const env = 'debug;warn:module1;error:'; + expect(() => parseEnv(env, defaultLevel)).toThrow('Invalid log filter statement: error'); + }); +}); diff --git a/yarn-project/foundation/src/log/log-filters.ts b/yarn-project/foundation/src/log/log-filters.ts new file mode 100644 index 000000000000..808818c3fd59 --- /dev/null +++ b/yarn-project/foundation/src/log/log-filters.ts @@ -0,0 +1,49 @@ +import { type LogLevel, LogLevels } from './log-levels.js'; + +export type LogFilters = [string, LogLevel][]; + +export function getLogLevelFromFilters(filters: LogFilters, module: string): LogLevel | undefined { + for (const [filterModule, level] of filters) { + if (module.startsWith(filterModule)) { + return level as LogLevel; + } + } + return undefined; +} + +export function assertLogLevel(level: string): asserts level is LogLevel { + if (!LogLevels.includes(level as LogLevel)) { + throw new Error(`Invalid log level: ${level}`); + } +} + +export function parseEnv(env: string | undefined, defaultLevel: LogLevel): [LogLevel, LogFilters] { + if (!env) { + return [defaultLevel, []]; + } + const [level] = env.split(';', 1); + assertLogLevel(level); + return [level, parseFilters(env.slice(level.length + 1))]; +} + +export function parseFilters(definition: string | undefined): LogFilters { + if (!definition) { + return []; + } + + const statements = definition.split(';'); + const filters: LogFilters = []; + for (const statement of statements) { + const [level] = statement.split(':', 1); + const modules = statement.slice(level.length + 1); + if (!modules || !level) { + throw new Error(`Invalid log filter statement: ${statement}`); + } + const sanitizedLevel = level.trim().toLowerCase(); + assertLogLevel(sanitizedLevel); + for (const module of modules.split(',')) { + filters.push([module.trim().toLowerCase(), sanitizedLevel as LogLevel | 'silent']); + } + } + return filters.reverse(); +} diff --git a/yarn-project/foundation/src/log/log-levels.ts b/yarn-project/foundation/src/log/log-levels.ts new file mode 100644 index 000000000000..d2a630de9f30 --- /dev/null +++ b/yarn-project/foundation/src/log/log-levels.ts @@ -0,0 +1,3 @@ +export const LogLevels = ['silent', 'fatal', 'error', 'warn', 'info', 'verbose', 'debug', 'trace'] as const; + +export type LogLevel = (typeof LogLevels)[number]; diff --git a/yarn-project/foundation/src/log/logger.ts b/yarn-project/foundation/src/log/logger.ts deleted file mode 100644 index 2f5954f6eb07..000000000000 --- a/yarn-project/foundation/src/log/logger.ts +++ /dev/null @@ -1,179 +0,0 @@ -import debug from 'debug'; -import { inspect } from 'util'; - -import { type LogData, type LogFn } from './log_fn.js'; - -const LogLevels = ['silent', 'error', 'warn', 'info', 'verbose', 'debug'] as const; - -/** - * A valid log severity level. - */ -export type LogLevel = (typeof LogLevels)[number]; - -function getLogLevel() { - const envLogLevel = process.env.LOG_LEVEL?.toLowerCase() as LogLevel; - let defaultLogLevel: LogLevel = 'info'; - if (process.env.DEBUG) { - // if we set DEBUG to a non-empty string, use debug as default - defaultLogLevel = 'debug'; - } else if (process.env.NODE_ENV === 'test') { - // otherwise, be silent in tests as these are frequently ran en-masse - defaultLogLevel = 'silent'; - } - return LogLevels.includes(envLogLevel) ? envLogLevel : defaultLogLevel; -} - -export let currentLevel = getLogLevel(); - -const logElapsedTime = ['1', 'true'].includes(process.env.LOG_ELAPSED_TIME ?? ''); -const firstTimestamp: number = Date.now(); - -function filterNegativePatterns(debugString: string): string { - return debugString - .split(',') - .filter(p => !p.startsWith('-')) - .join(','); -} -function extractNegativePatterns(debugString: string): string[] { - return ( - debugString - .split(',') - .filter(p => p.startsWith('-')) - // Remove the leading '-' from the pattern - .map(p => p.slice(1)) - ); -} - -const namespaces = process.env.DEBUG ?? 'aztec:*'; -debug.enable(filterNegativePatterns(namespaces)); - -/** Log function that accepts an exception object */ -type ErrorLogFn = (msg: string, err?: Error | unknown, data?: LogData) => void; - -/** - * Logger that supports multiple severity levels. - */ -export type Logger = { [K in LogLevel]: LogFn } & { /** Error log function */ error: ErrorLogFn }; - -/** - * Logger that supports multiple severity levels and can be called directly to issue a debug statement. - * Intended as a drop-in replacement for the debug module. - */ -export type DebugLogger = Logger; - -/** - * Creates a new DebugLogger for the current module, defaulting to the LOG_LEVEL env var. - * If DEBUG="[module]" env is set, will enable debug logging if the module matches. - * Uses npm debug for debug level and console.error for other levels. - * @param name - Name of the module. - * @param fixedLogData - Additional data to include in the log message. - * @usage createDebugLogger('aztec:validator'); - * // will always add the validator address to the log labels - * @returns A debug logger. - */ - -export function createDebugLogger(name: string): DebugLogger { - const debugLogger = debug(name); - - const negativePatterns = extractNegativePatterns(namespaces); - const accepted = () => { - return !negativePatterns.some(pattern => name.match(pattern)); - }; - const log = (level: LogLevel, msg: string, data?: LogData) => { - if (accepted()) { - logWithDebug(debugLogger, level, msg, data); - } - }; - const logger = { - silent: () => {}, - error: (msg: string, err?: unknown, data?: LogData) => log('error', fmtErr(msg, err), data), - warn: (msg: string, data?: LogData) => log('warn', msg, data), - info: (msg: string, data?: LogData) => log('info', msg, data), - verbose: (msg: string, data?: LogData) => log('verbose', msg, data), - debug: (msg: string, data?: LogData) => log('debug', msg, data), - }; - return Object.assign((msg: string, data?: LogData) => log('debug', msg, data), logger); -} - -/** - * A function to create a logger that automatically includes fixed data in each log entry. - * @param debugLogger - The base DebugLogger instance to which we attach fixed log data. - * @param fixedLogData - The data to be included in every log entry. - * @returns A DebugLogger with log level methods (error, warn, info, verbose, debug) that - * automatically attach `fixedLogData` to every log message. - */ -export function attachedFixedDataToLogger(debugLogger: DebugLogger, fixedLogData: LogData): DebugLogger { - // Helper function to merge fixed data with additional data passed to log entries. - const attach = (data?: LogData) => ({ ...fixedLogData, ...data }); - // Define the logger with all the necessary log level methods. - const logger = { - // Silent log level does nothing. - silent: () => {}, - error: (msg: string, err?: unknown, data?: LogData) => debugLogger.error(fmtErr(msg, err), attach(data)), - warn: (msg: string, data?: LogData) => debugLogger.warn(msg, attach(data)), - info: (msg: string, data?: LogData) => debugLogger.info(msg, attach(data)), - verbose: (msg: string, data?: LogData) => debugLogger.verbose(msg, attach(data)), - debug: (msg: string, data?: LogData) => debugLogger.debug(msg, attach(data)), - }; - return Object.assign((msg: string, data?: LogData) => debugLogger.debug(msg, attach(data)), logger); -} - -/** A callback to capture all logs. */ -export type LogHandler = (level: LogLevel, namespace: string, msg: string, data?: LogData) => void; - -const logHandlers: LogHandler[] = []; - -/** - * Registers a callback for all logs, whether they are emitted in the current log level or not. - * @param handler - Callback to be called on every log. - */ -export function onLog(handler: LogHandler) { - logHandlers.push(handler); -} - -/** Overrides current log level. */ -export function setLevel(level: LogLevel) { - currentLevel = level; -} - -/** - * Logs args to npm debug if enabled or log level is debug, console.error otherwise. - * @param debug - Instance of npm debug. - * @param level - Intended log level. - * @param args - Args to log. - */ -function logWithDebug(debug: debug.Debugger, level: LogLevel, msg: string, data?: LogData) { - for (const handler of logHandlers) { - handler(level, debug.namespace, msg, data); - } - - msg = data ? `${msg} ${fmtLogData(data)}` : msg; - if (debug.enabled && LogLevels.indexOf(level) <= LogLevels.indexOf(currentLevel)) { - if (logElapsedTime) { - const ts = ((Date.now() - firstTimestamp) / 1000).toFixed(3); - debug('%ss [%s] %s', ts, level.toUpperCase(), msg); - } else { - debug('[%s] %s', level.toUpperCase(), msg); - } - } -} - -/** - * Concatenates a log message and an exception. - * @param msg - Log message - * @param err - Error to log - * @returns A string with both the log message and the error message. - */ -function fmtErr(msg: string, err?: Error | unknown): string { - return err ? `${msg}: ${inspect(err)}` : msg; -} - -/** - * Formats structured log data as a string for console output. - * @param data - Optional log data. - */ -export function fmtLogData(data?: LogData): string { - return Object.entries(data ?? {}) - .map(([key, value]) => `${key}=${typeof value === 'object' && 'toString' in value ? value.toString() : value}`) - .join(' '); -} diff --git a/yarn-project/foundation/src/log/pino-logger.ts b/yarn-project/foundation/src/log/pino-logger.ts new file mode 100644 index 000000000000..1eafd070c5a0 --- /dev/null +++ b/yarn-project/foundation/src/log/pino-logger.ts @@ -0,0 +1,196 @@ +import { createColors } from 'colorette'; +import isNode from 'detect-node'; +import { pino, symbols } from 'pino'; +import pretty from 'pino-pretty'; +import { type Writable } from 'stream'; +import { inspect } from 'util'; + +import { compactArray } from '../collection/array.js'; +import { getLogLevelFromFilters, parseEnv } from './log-filters.js'; +import { type LogLevel } from './log-levels.js'; +import { type LogData, type LogFn } from './log_fn.js'; + +// TODO(palla/log): Rename to createLogger +export function createDebugLogger(module: string): DebugLogger { + // TODO(palla/log): Rename all module names to remove the aztec prefix + const pinoLogger = logger.child( + { module: module.replace(/^aztec:/, '') }, + { level: getLogLevelFromFilters(logFilters, module) }, + ); + + // We check manually for isLevelEnabled to avoid calling processLogData unnecessarily. + // Note that isLevelEnabled is missing from the browser version of pino. + const logFn = (level: LogLevel, msg: string, data?: LogData) => + isLevelEnabled(pinoLogger, level) && pinoLogger[level](processLogData(data ?? {}), msg); + + return { + silent: () => {}, + // TODO(palla/log): Should we move err to data instead of the text message? + /** Log as fatal. Use when an error has brought down the system. */ + fatal: (msg: string, err?: unknown, data?: LogData) => logFn('fatal', formatErr(msg, err), data), + /** Log as error. Use for errors in general. */ + error: (msg: string, err?: unknown, data?: LogData) => logFn('error', formatErr(msg, err), data), + /** Log as warn. Use for when we stray from the happy path. */ + warn: (msg: string, data?: LogData) => logFn('warn', msg, data), + /** Log as info. Use for providing an operator with info on what the system is doing. */ + info: (msg: string, data?: LogData) => logFn('info', msg, data), + /** Log as verbose. Use for when we need additional insight on what a subsystem is doing. */ + verbose: (msg: string, data?: LogData) => logFn('verbose', msg, data), + /** Log as debug. Use for when we need debugging info to troubleshoot an issue on a specific component. */ + debug: (msg: string, data?: LogData) => logFn('debug', msg, data), + /** Log as trace. Use for when we want to denial-of-service any recipient of the logs. */ + trace: (msg: string, data?: LogData) => logFn('trace', msg, data), + level: pinoLogger.level as LogLevel, + isLevelEnabled: (level: LogLevel) => isLevelEnabled(pinoLogger, level), + }; +} + +// Allow global hooks for processing log data. +// Used for injecting OTEL trace_id in telemetry client. +type LogDataHandler = (data: LogData) => LogData; +const logDataHandlers: LogDataHandler[] = []; + +export function addLogDataHandler(handler: LogDataHandler): void { + logDataHandlers.push(handler); +} + +function processLogData(data: LogData): LogData { + return logDataHandlers.reduce((accum, handler) => handler(accum), data); +} + +// Patch isLevelEnabled missing from pino/browser. +function isLevelEnabled(logger: pino.Logger<'verbose', boolean>, level: LogLevel): boolean { + return typeof logger.isLevelEnabled === 'function' + ? logger.isLevelEnabled(level) + : logger.levels.values[level] >= logger.levels.values[logger.level]; +} + +// Load log levels from environment variables. +const defaultLogLevel = process.env.NODE_ENV === 'test' ? 'silent' : 'info'; +const [logLevel, logFilters] = parseEnv(process.env.LOG_LEVEL, defaultLogLevel); + +// Transport options for pretty logging to stderr via pino-pretty. +const useColor = true; +const { bold, reset } = createColors({ useColor }); +const pinoPrettyOpts = { + destination: 2, + sync: true, + colorize: useColor, + ignore: 'module,pid,hostname,trace_id,span_id,trace_flags', + messageFormat: `${bold('{module}')} ${reset('{msg}')}`, + customLevels: 'fatal:60,error:50,warn:40,info:30,verbose:25,debug:20,trace:10', + customColors: 'fatal:bgRed,error:red,warn:yellow,info:green,verbose:magenta,debug:blue,trace:gray', + minimumLevel: 'trace' as const, +}; +const prettyTransport: pino.TransportSingleOptions = { + target: 'pino-pretty', + options: pinoPrettyOpts, +}; + +// Transport for vanilla stdio logging as JSON. +const stdioTransport: pino.TransportSingleOptions = { + target: 'pino/file', + options: { destination: 2 }, +}; + +// Define custom logging levels for pino. +const customLevels = { verbose: 25 }; +const pinoOpts = { customLevels, useOnlyCustomLevels: false, level: logLevel }; + +export const levels = { + labels: { ...pino.levels.labels, ...Object.fromEntries(Object.entries(customLevels).map(e => e.reverse())) }, + values: { ...pino.levels.values, ...customLevels }, +}; + +// Transport for OpenTelemetry logging. While defining this here is an abstraction leakage since this +// should live in the telemetry-client, it is necessary to ensure that the logger is initialized with +// the correct transport. Tweaking transports of a live pino instance is tricky, and creating a new instance +// would mean that all child loggers created before the telemetry-client is initialized would not have +// this transport configured. Note that the target is defined as the export in the telemetry-client, +// since pino will load this transport separately on a worker thread, to minimize disruption to the main loop. +const otlpEndpoint = process.env.OTEL_EXPORTER_OTLP_LOGS_ENDPOINT; +const otelOpts = { levels }; +const otelTransport: pino.TransportSingleOptions = { + target: '@aztec/telemetry-client/otel-pino-stream', + options: otelOpts, +}; + +function makeLogger() { + if (!isNode) { + // We are on the browser + return pino({ ...pinoOpts, browser: { asObject: false } }); + } else if (process.env.JEST_WORKER_ID) { + // We are on jest, so we need sync logging. We stream to stderr with pretty. + return pino(pinoOpts, pretty(pinoPrettyOpts)); + } else { + // Regular nodejs with transports on worker thread, using pino-pretty for console logging if LOG_JSON + // is not set, and an optional OTLP transport if the OTLP endpoint is provided. + const targets: pino.TransportSingleOptions[] = compactArray([ + ['1', 'true', 'TRUE'].includes(process.env.LOG_JSON ?? '') ? stdioTransport : prettyTransport, + otlpEndpoint ? otelTransport : undefined, + ]); + return pino(pinoOpts, pino.transport({ targets })); + } +} + +const logger = makeLogger(); + +// Log the logger configuration. +logger.verbose( + { + module: 'logger', + ...logFilters.reduce((accum, [module, level]) => ({ ...accum, [`log.${module}`]: level }), {}), + }, + isNode + ? `Logger initialized with level ${logLevel}` + (otlpEndpoint ? ` with OTLP exporter to ${otlpEndpoint}` : '') + : `Browser console logger initialized with level ${logLevel}`, +); + +/** + * Registers an additional destination to the pino logger. + * Use only when working with destinations, not worker transports. + */ +export function registerLoggingStream(stream: Writable): void { + logger.verbose({ module: 'logger' }, `Registering additional logging stream`); + const original = (logger as any)[symbols.streamSym]; + const destination = original + ? pino.multistream( + [ + // Set streams to lowest logging level, and control actual logging from the parent logger + // otherwise streams default to info and refuse to log anything below that. + { level: 'trace', stream: original }, + { level: 'trace', stream }, + ], + { levels: levels.values }, + ) + : stream; + (logger as any)[symbols.streamSym] = destination; +} + +/** Log function that accepts an exception object */ +type ErrorLogFn = (msg: string, err?: Error | unknown, data?: LogData) => void; + +/** + * Logger that supports multiple severity levels. + */ +export type Logger = { [K in LogLevel]: LogFn } & { /** Error log function */ error: ErrorLogFn } & { + level: LogLevel; + isLevelEnabled: (level: LogLevel) => boolean; +}; + +/** + * Logger that supports multiple severity levels and can be called directly to issue a debug statement. + * Intended as a drop-in replacement for the debug module. + * TODO(palla/log): Remove this alias + */ +export type DebugLogger = Logger; + +/** + * Concatenates a log message and an exception. + * @param msg - Log message + * @param err - Error to log + * @returns A string with both the log message and the error message. + */ +function formatErr(msg: string, err?: Error | unknown): string { + return err ? `${msg}: ${inspect(err)}` : msg; +} diff --git a/yarn-project/foundation/src/queue/fifo_memory_queue.ts b/yarn-project/foundation/src/queue/fifo_memory_queue.ts index e2271143ac6f..080133ed71c7 100644 --- a/yarn-project/foundation/src/queue/fifo_memory_queue.ts +++ b/yarn-project/foundation/src/queue/fifo_memory_queue.ts @@ -1,4 +1,4 @@ -import { type DebugLogger } from '../log/logger.js'; +import { type DebugLogger } from '../log/index.js'; import { BaseMemoryQueue } from './base_memory_queue.js'; /** diff --git a/yarn-project/ivc-integration/src/avm_integration.test.ts b/yarn-project/ivc-integration/src/avm_integration.test.ts index 31a14eac16cb..820744c12392 100644 --- a/yarn-project/ivc-integration/src/avm_integration.test.ts +++ b/yarn-project/ivc-integration/src/avm_integration.test.ts @@ -12,7 +12,7 @@ import { BufferReader } from '@aztec/foundation/serialize'; import { type FixedLengthArray } from '@aztec/noir-protocol-circuits-types/types'; import { simulateAvmTestContractGenerateCircuitInputs } from '@aztec/simulator/public/fixtures'; -import fs from 'fs/promises'; +import { promises as fs } from 'fs'; import { tmpdir } from 'node:os'; import os from 'os'; import path from 'path'; @@ -123,14 +123,13 @@ async function proveAvmTestContract(functionName: string, calldata: Fr[] = []): const avmCircuitInputs = await simulateAvmTestContractGenerateCircuitInputs(functionName, calldata); const internalLogger = createDebugLogger('aztec:avm-proving-test'); - const logger = (msg: string, _data?: any) => internalLogger.verbose(msg); // The paths for the barretenberg binary and the write path are hardcoded for now. const bbPath = path.resolve('../../barretenberg/cpp/build/bin/bb'); const bbWorkingDirectory = await fs.mkdtemp(path.join(tmpdir(), 'bb-')); // Then we prove. - const proofRes = await generateAvmProof(bbPath, bbWorkingDirectory, avmCircuitInputs, logger); + const proofRes = await generateAvmProof(bbPath, bbWorkingDirectory, avmCircuitInputs, internalLogger); if (proofRes.status === BB_RESULT.FAILURE) { internalLogger.error(`Proof generation failed: ${proofRes.reason}`); } diff --git a/yarn-project/ivc-integration/src/index.ts b/yarn-project/ivc-integration/src/index.ts index 932345648646..dab8804a187e 100644 --- a/yarn-project/ivc-integration/src/index.ts +++ b/yarn-project/ivc-integration/src/index.ts @@ -1,3 +1,5 @@ +import { type CLIENT_IVC_VERIFICATION_KEY_LENGTH_IN_FIELDS } from '@aztec/circuits.js'; + import { type ForeignCallOutput, Noir } from '@noir-lang/noir_js'; import createDebug from 'debug'; import { ungzip } from 'pako'; @@ -5,6 +7,12 @@ import { type Page } from 'playwright'; import MockAppCreatorCircuit from '../artifacts/app_creator.json' assert { type: 'json' }; import MockAppReaderCircuit from '../artifacts/app_reader.json' assert { type: 'json' }; +import MockAppCreatorVk from '../artifacts/keys/app_creator.vk.data.json' assert { type: 'json' }; +import MockAppReaderVk from '../artifacts/keys/app_reader.vk.data.json' assert { type: 'json' }; +import MockPrivateKernelInitVk from '../artifacts/keys/mock_private_kernel_init.vk.data.json' assert { type: 'json' }; +import MockPrivateKernelInnerVk from '../artifacts/keys/mock_private_kernel_inner.vk.data.json' assert { type: 'json' }; +import MockPrivateKernelResetVk from '../artifacts/keys/mock_private_kernel_reset.vk.data.json' assert { type: 'json' }; +import MockPrivateKernelTailVk from '../artifacts/keys/mock_private_kernel_tail.vk.data.json' assert { type: 'json' }; import MockPrivateKernelInitCircuit from '../artifacts/mock_private_kernel_init.json' assert { type: 'json' }; import MockPrivateKernelInnerCircuit from '../artifacts/mock_private_kernel_inner.json' assert { type: 'json' }; import MockPrivateKernelResetCircuit from '../artifacts/mock_private_kernel_reset.json' assert { type: 'json' }; @@ -14,6 +22,7 @@ import type { AppCreatorInputType, AppPublicInputs, AppReaderInputType, + FixedLengthArray, KernelPublicInputs, MockPrivateKernelInitInputType, MockPrivateKernelInnerInputType, @@ -33,6 +42,12 @@ export { MockPrivateKernelResetCircuit, MockPrivateKernelTailCircuit, MockPublicBaseCircuit, + MockAppCreatorVk, + MockAppReaderVk, + MockPrivateKernelInitVk, + MockPrivateKernelInnerVk, + MockPrivateKernelResetVk, + MockPrivateKernelTailVk, }; createDebug.enable('*'); @@ -126,6 +141,13 @@ export async function witnessGenMockPublicBaseCircuit(args: MockPublicBaseInputT }; } +export function getVkAsFields(vk: { + keyAsBytes: string; + keyAsFields: string[]; +}): FixedLengthArray { + return vk.keyAsFields as FixedLengthArray; +} + export async function generate3FunctionTestingIVCStack(): Promise<[string[], Uint8Array[]]> { const tx = { number_of_calls: '0x1', @@ -138,11 +160,13 @@ export async function generate3FunctionTestingIVCStack(): Promise<[string[], Uin const initWitnessGenResult = await witnessGenMockPrivateKernelInitCircuit({ app_inputs: appWitnessGenResult.publicInputs, tx, + app_vk: getVkAsFields(MockAppCreatorVk), }); logger('generated mock private kernel init witness'); const tailWitnessGenResult = await witnessGenMockPrivateKernelTailCircuit({ prev_kernel_public_inputs: initWitnessGenResult.publicInputs, + kernel_vk: getVkAsFields(MockPrivateKernelResetVk), }); logger('generated mock private kernel tail witness'); @@ -168,10 +192,13 @@ export async function generate6FunctionTestingIVCStack(): Promise<[string[], Uin const initWitnessGenResult = await witnessGenMockPrivateKernelInitCircuit({ app_inputs: creatorAppWitnessGenResult.publicInputs, tx, + app_vk: getVkAsFields(MockAppCreatorVk), }); const innerWitnessGenResult = await witnessGenMockPrivateKernelInnerCircuit({ prev_kernel_public_inputs: initWitnessGenResult.publicInputs, app_inputs: readerAppWitnessGenResult.publicInputs, + app_vk: getVkAsFields(MockAppReaderVk), + kernel_vk: getVkAsFields(MockPrivateKernelInitVk), }); const resetWitnessGenResult = await witnessGenMockPrivateKernelResetCircuit({ @@ -182,10 +209,12 @@ export async function generate6FunctionTestingIVCStack(): Promise<[string[], Uin MOCK_MAX_COMMITMENTS_PER_TX.toString(), MOCK_MAX_COMMITMENTS_PER_TX.toString(), ], + kernel_vk: getVkAsFields(MockPrivateKernelInnerVk), }); const tailWitnessGenResult = await witnessGenMockPrivateKernelTailCircuit({ prev_kernel_public_inputs: resetWitnessGenResult.publicInputs, + kernel_vk: getVkAsFields(MockPrivateKernelResetVk), }); // Create client IVC proof diff --git a/yarn-project/ivc-integration/src/native_client_ivc_integration.test.ts b/yarn-project/ivc-integration/src/native_client_ivc_integration.test.ts index 6f413b19a949..c08645a932a3 100644 --- a/yarn-project/ivc-integration/src/native_client_ivc_integration.test.ts +++ b/yarn-project/ivc-integration/src/native_client_ivc_integration.test.ts @@ -4,7 +4,7 @@ import { createDebugLogger } from '@aztec/foundation/log'; import { jest } from '@jest/globals'; import { encode } from '@msgpack/msgpack'; -import fs from 'fs/promises'; +import { promises as fs } from 'fs'; import os from 'os'; import path from 'path'; import { fileURLToPath } from 'url'; @@ -40,6 +40,7 @@ describe('Client IVC Integration', () => { path.join(bbWorkingDirectory, 'acir.msgpack'), path.join(bbWorkingDirectory, 'witnesses.msgpack'), logger.info, + true, ); if (provingResult.status === BB_RESULT.FAILURE) { diff --git a/yarn-project/ivc-integration/src/scripts/generate_ts_from_abi.ts b/yarn-project/ivc-integration/src/scripts/generate_ts_from_abi.ts index 80545c2587ac..af44a1f6bdad 100644 --- a/yarn-project/ivc-integration/src/scripts/generate_ts_from_abi.ts +++ b/yarn-project/ivc-integration/src/scripts/generate_ts_from_abi.ts @@ -3,7 +3,7 @@ import { createConsoleLogger } from '@aztec/foundation/log'; import { codegen } from '@noir-lang/noir_codegen'; import { type CompiledCircuit } from '@noir-lang/types'; import { pascalCase } from 'change-case'; -import fs from 'fs/promises'; +import { promises as fs } from 'fs'; const log = createConsoleLogger('aztec:mock-circuits'); diff --git a/yarn-project/ivc-integration/src/wasm_client_ivc_integration.test.ts b/yarn-project/ivc-integration/src/wasm_client_ivc_integration.test.ts index 44c3b7ba2345..5f105b7f71ec 100644 --- a/yarn-project/ivc-integration/src/wasm_client_ivc_integration.test.ts +++ b/yarn-project/ivc-integration/src/wasm_client_ivc_integration.test.ts @@ -6,11 +6,17 @@ import { ungzip } from 'pako'; import { MOCK_MAX_COMMITMENTS_PER_TX, MockAppCreatorCircuit, + MockAppCreatorVk, MockAppReaderCircuit, + MockAppReaderVk, MockPrivateKernelInitCircuit, + MockPrivateKernelInitVk, MockPrivateKernelInnerCircuit, + MockPrivateKernelInnerVk, MockPrivateKernelResetCircuit, + MockPrivateKernelResetVk, MockPrivateKernelTailCircuit, + getVkAsFields, witnessGenCreatorAppMockCircuit, witnessGenMockPrivateKernelInitCircuit, witnessGenMockPrivateKernelInnerCircuit, @@ -70,11 +76,13 @@ describe('Client IVC Integration', () => { const initWitnessGenResult = await witnessGenMockPrivateKernelInitCircuit({ app_inputs: appWitnessGenResult.publicInputs, tx, + app_vk: getVkAsFields(MockAppCreatorVk), }); logger.debug('generated mock private kernel init witness'); const tailWitnessGenResult = await witnessGenMockPrivateKernelTailCircuit({ prev_kernel_public_inputs: initWitnessGenResult.publicInputs, + kernel_vk: getVkAsFields(MockPrivateKernelInitVk), }); logger.debug('generated mock private kernel tail witness'); @@ -112,10 +120,13 @@ describe('Client IVC Integration', () => { const initWitnessGenResult = await witnessGenMockPrivateKernelInitCircuit({ app_inputs: creatorAppWitnessGenResult.publicInputs, tx, + app_vk: getVkAsFields(MockAppCreatorVk), }); const innerWitnessGenResult = await witnessGenMockPrivateKernelInnerCircuit({ prev_kernel_public_inputs: initWitnessGenResult.publicInputs, app_inputs: readerAppWitnessGenResult.publicInputs, + app_vk: getVkAsFields(MockAppReaderVk), + kernel_vk: getVkAsFields(MockPrivateKernelInitVk), }); const resetWitnessGenResult = await witnessGenMockPrivateKernelResetCircuit({ @@ -126,10 +137,12 @@ describe('Client IVC Integration', () => { MOCK_MAX_COMMITMENTS_PER_TX.toString(), MOCK_MAX_COMMITMENTS_PER_TX.toString(), ], + kernel_vk: getVkAsFields(MockPrivateKernelInnerVk), }); const tailWitnessGenResult = await witnessGenMockPrivateKernelTailCircuit({ prev_kernel_public_inputs: resetWitnessGenResult.publicInputs, + kernel_vk: getVkAsFields(MockPrivateKernelResetVk), }); // Create client IVC proof diff --git a/yarn-project/ivc-integration/webpack.config.js b/yarn-project/ivc-integration/webpack.config.js index 679267bc82cc..09dafd51eff5 100644 --- a/yarn-project/ivc-integration/webpack.config.js +++ b/yarn-project/ivc-integration/webpack.config.js @@ -30,6 +30,9 @@ export default { ], resolve: { plugins: [new ResolveTypeScriptPlugin()], + fallback: { + tty: false, + }, }, devServer: { hot: false, diff --git a/yarn-project/key-store/src/key_store.test.ts b/yarn-project/key-store/src/key_store.test.ts index a816660a9a79..1435225b6dec 100644 --- a/yarn-project/key-store/src/key_store.test.ts +++ b/yarn-project/key-store/src/key_store.test.ts @@ -1,5 +1,5 @@ import { AztecAddress, Fr, deriveKeys, derivePublicKeyFromSecretKey } from '@aztec/circuits.js'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { KeyStore } from './key_store.js'; diff --git a/yarn-project/key-store/src/key_store.ts b/yarn-project/key-store/src/key_store.ts index c0d41bd9318d..10d539c40c3a 100644 --- a/yarn-project/key-store/src/key_store.ts +++ b/yarn-project/key-store/src/key_store.ts @@ -15,16 +15,17 @@ import { derivePublicKeyFromSecretKey, } from '@aztec/circuits.js'; import { poseidon2HashWithSeparator } from '@aztec/foundation/crypto'; +import { toArray } from '@aztec/foundation/iterable'; import { type Bufferable, serializeToBuffer } from '@aztec/foundation/serialize'; -import { type AztecKVStore, type AztecMap } from '@aztec/kv-store'; +import { type AztecAsyncKVStore, type AztecAsyncMap } from '@aztec/kv-store'; /** * Used for managing keys. Can hold keys of multiple accounts. */ export class KeyStore { - #keys: AztecMap; + #keys: AztecAsyncMap; - constructor(database: AztecKVStore) { + constructor(database: AztecAsyncKVStore) { this.#keys = database.openMap('key_store'); } @@ -81,18 +82,18 @@ export class KeyStore { await this.#keys.set(`${account.toString()}-tpk_m_hash`, publicKeys.masterTaggingPublicKey.hash().toBuffer()); // At last, we return the newly derived account address - return Promise.resolve(completeAddress); + return completeAddress; } /** * Retrieves addresses of accounts stored in the key store. * @returns A Promise that resolves to an array of account addresses. */ - public getAccounts(): Promise { - const allMapKeys = Array.from(this.#keys.keys()); + public async getAccounts(): Promise { + const allMapKeys = await toArray(this.#keys.keysAsync()); // We return account addresses based on the map keys that end with '-ivsk_m' const accounts = allMapKeys.filter(key => key.endsWith('-ivsk_m')).map(key => key.split('-')[0]); - return Promise.resolve(accounts.map(account => AztecAddress.fromString(account))); + return accounts.map(account => AztecAddress.fromString(account)); } /** @@ -102,11 +103,11 @@ export class KeyStore { * @param contractAddress - The contract address to silo the secret key in the key validation request with. * @returns The key validation request. */ - public getKeyValidationRequest(pkMHash: Fr, contractAddress: AztecAddress): Promise { - const [keyPrefix, account] = this.getKeyPrefixAndAccount(pkMHash); + public async getKeyValidationRequest(pkMHash: Fr, contractAddress: AztecAddress): Promise { + const [keyPrefix, account] = await this.getKeyPrefixAndAccount(pkMHash); // Now we find the master public key for the account - const pkMBuffer = this.#keys.get(`${account.toString()}-${keyPrefix}pk_m`); + const pkMBuffer = await this.#keys.getAsync(`${account.toString()}-${keyPrefix}pk_m`); if (!pkMBuffer) { throw new Error( `Could not find ${keyPrefix}pk_m for account ${account.toString()} whose address was successfully obtained with ${keyPrefix}pk_m_hash ${pkMHash.toString()}.`, @@ -120,7 +121,7 @@ export class KeyStore { } // Now we find the secret key for the public key - const skMBuffer = this.#keys.get(`${account.toString()}-${keyPrefix}sk_m`); + const skMBuffer = await this.#keys.getAsync(`${account.toString()}-${keyPrefix}sk_m`); if (!skMBuffer) { throw new Error( `Could not find ${keyPrefix}sk_m for account ${account.toString()} whose address was successfully obtained with ${keyPrefix}pk_m_hash ${pkMHash.toString()}.`, @@ -137,7 +138,7 @@ export class KeyStore { // At last we silo the secret key and return the key validation request const skApp = computeAppSecretKey(skM, contractAddress, keyPrefix!); - return Promise.resolve(new KeyValidationRequest(pkM, skApp)); + return new KeyValidationRequest(pkM, skApp); } /** @@ -147,13 +148,13 @@ export class KeyStore { * @returns The master nullifier public key for the account. */ public async getMasterNullifierPublicKey(account: AztecAddress): Promise { - const masterNullifierPublicKeyBuffer = this.#keys.get(`${account.toString()}-npk_m`); + const masterNullifierPublicKeyBuffer = await this.#keys.getAsync(`${account.toString()}-npk_m`); if (!masterNullifierPublicKeyBuffer) { throw new Error( `Account ${account.toString()} does not exist. Registered accounts: ${await this.getAccounts()}.`, ); } - return Promise.resolve(Point.fromBuffer(masterNullifierPublicKeyBuffer)); + return Point.fromBuffer(masterNullifierPublicKeyBuffer); } /** @@ -163,13 +164,13 @@ export class KeyStore { * @returns The master incoming viewing public key for the account. */ public async getMasterIncomingViewingPublicKey(account: AztecAddress): Promise { - const masterIncomingViewingPublicKeyBuffer = this.#keys.get(`${account.toString()}-ivpk_m`); + const masterIncomingViewingPublicKeyBuffer = await this.#keys.getAsync(`${account.toString()}-ivpk_m`); if (!masterIncomingViewingPublicKeyBuffer) { throw new Error( `Account ${account.toString()} does not exist. Registered accounts: ${await this.getAccounts()}.`, ); } - return Promise.resolve(Point.fromBuffer(masterIncomingViewingPublicKeyBuffer)); + return Point.fromBuffer(masterIncomingViewingPublicKeyBuffer); } /** @@ -179,13 +180,13 @@ export class KeyStore { * @returns A Promise that resolves to the master outgoing viewing key. */ public async getMasterOutgoingViewingPublicKey(account: AztecAddress): Promise { - const masterOutgoingViewingPublicKeyBuffer = this.#keys.get(`${account.toString()}-ovpk_m`); + const masterOutgoingViewingPublicKeyBuffer = await this.#keys.getAsync(`${account.toString()}-ovpk_m`); if (!masterOutgoingViewingPublicKeyBuffer) { throw new Error( `Account ${account.toString()} does not exist. Registered accounts: ${await this.getAccounts()}.`, ); } - return Promise.resolve(Point.fromBuffer(masterOutgoingViewingPublicKeyBuffer)); + return Point.fromBuffer(masterOutgoingViewingPublicKeyBuffer); } /** @@ -195,13 +196,13 @@ export class KeyStore { * @returns A Promise that resolves to the master tagging key. */ public async getMasterTaggingPublicKey(account: AztecAddress): Promise { - const masterTaggingPublicKeyBuffer = this.#keys.get(`${account.toString()}-tpk_m`); + const masterTaggingPublicKeyBuffer = await this.#keys.getAsync(`${account.toString()}-tpk_m`); if (!masterTaggingPublicKeyBuffer) { throw new Error( `Account ${account.toString()} does not exist. Registered accounts: ${await this.getAccounts()}.`, ); } - return Promise.resolve(Point.fromBuffer(masterTaggingPublicKeyBuffer)); + return Point.fromBuffer(masterTaggingPublicKeyBuffer); } /** @@ -211,15 +212,13 @@ export class KeyStore { * @returns A Promise that resolves to the master incoming viewing secret key. */ public async getMasterIncomingViewingSecretKey(account: AztecAddress): Promise { - const masterIncomingViewingSecretKeyBuffer = this.#keys.get(`${account.toString()}-ivsk_m`); + const masterIncomingViewingSecretKeyBuffer = await this.#keys.getAsync(`${account.toString()}-ivsk_m`); if (!masterIncomingViewingSecretKeyBuffer) { throw new Error( `Account ${account.toString()} does not exist. Registered accounts: ${await this.getAccounts()}.`, ); } - const masterIncomingViewingSecretKey = GrumpkinScalar.fromBuffer(masterIncomingViewingSecretKeyBuffer); - - return Promise.resolve(masterIncomingViewingSecretKey); + return GrumpkinScalar.fromBuffer(masterIncomingViewingSecretKeyBuffer); } /** @@ -230,7 +229,7 @@ export class KeyStore { * @returns A Promise that resolves to the application outgoing viewing secret key. */ public async getAppOutgoingViewingSecretKey(account: AztecAddress, app: AztecAddress): Promise { - const masterOutgoingViewingSecretKeyBuffer = this.#keys.get(`${account.toString()}-ovsk_m`); + const masterOutgoingViewingSecretKeyBuffer = await this.#keys.getAsync(`${account.toString()}-ovsk_m`); if (!masterOutgoingViewingSecretKeyBuffer) { throw new Error( `Account ${account.toString()} does not exist. Registered accounts: ${await this.getAccounts()}.`, @@ -238,11 +237,9 @@ export class KeyStore { } const masterOutgoingViewingSecretKey = GrumpkinScalar.fromBuffer(masterOutgoingViewingSecretKeyBuffer); - return Promise.resolve( - poseidon2HashWithSeparator( - [masterOutgoingViewingSecretKey.hi, masterOutgoingViewingSecretKey.lo, app], - GeneratorIndex.OVSK_M, - ), + return poseidon2HashWithSeparator( + [masterOutgoingViewingSecretKey.hi, masterOutgoingViewingSecretKey.lo, app], + GeneratorIndex.OVSK_M, ); } @@ -253,10 +250,10 @@ export class KeyStore { * @returns A Promise that resolves to sk_m. * @dev Used when feeding the sk_m to the kernel circuit for keys verification. */ - public getMasterSecretKey(pkM: PublicKey): Promise { - const [keyPrefix, account] = this.getKeyPrefixAndAccount(pkM); + public async getMasterSecretKey(pkM: PublicKey): Promise { + const [keyPrefix, account] = await this.getKeyPrefixAndAccount(pkM); - const secretKeyBuffer = this.#keys.get(`${account.toString()}-${keyPrefix}sk_m`); + const secretKeyBuffer = await this.#keys.getAsync(`${account.toString()}-${keyPrefix}sk_m`); if (!secretKeyBuffer) { throw new Error( `Could not find ${keyPrefix}sk_m for ${keyPrefix}pk_m ${pkM.toString()}. This should not happen.`, @@ -277,10 +274,11 @@ export class KeyStore { * @dev Note that this is quite inefficient but it should not matter because there should never be too many keys * in the key store. */ - public getKeyPrefixAndAccount(value: Bufferable): [KeyPrefix, AztecAddress] { + public async getKeyPrefixAndAccount(value: Bufferable): Promise<[KeyPrefix, AztecAddress]> { const valueBuffer = serializeToBuffer(value); - for (const [key, val] of this.#keys.entries()) { - if (val.equals(valueBuffer)) { + for await (const [key, val] of this.#keys.entriesAsync()) { + // Browser returns Uint8Array, Node.js returns Buffer + if (Buffer.from(val).equals(valueBuffer)) { for (const prefix of KEY_PREFIXES) { if (key.includes(`-${prefix}`)) { const account = AztecAddress.fromString(key.split('-')[0]); diff --git a/yarn-project/kv-store/.gitignore b/yarn-project/kv-store/.gitignore new file mode 100644 index 000000000000..68c5d18f00dc --- /dev/null +++ b/yarn-project/kv-store/.gitignore @@ -0,0 +1,5 @@ +node_modules/ +/test-results/ +/playwright-report/ +/blob-report/ +/playwright/.cache/ diff --git a/yarn-project/kv-store/.mocharc.json b/yarn-project/kv-store/.mocharc.json new file mode 100644 index 000000000000..f6d8ef05651a --- /dev/null +++ b/yarn-project/kv-store/.mocharc.json @@ -0,0 +1,13 @@ +{ + "require": "ts-node/register", + "extensions": [ + "ts" + ], + "spec": [ + "./src/**/!(indexeddb)/*.test.ts" + ], + "node-option": [ + "experimental-specifier-resolution=node", + "loader=ts-node/esm" + ] + } diff --git a/yarn-project/kv-store/package.json b/yarn-project/kv-store/package.json index bacc49e1a380..6bdc1eeb3e3f 100644 --- a/yarn-project/kv-store/package.json +++ b/yarn-project/kv-store/package.json @@ -5,7 +5,7 @@ "exports": { ".": "./dest/interfaces/index.js", "./lmdb": "./dest/lmdb/index.js", - "./utils": "./dest/utils.js", + "./indexeddb": "./dest/indexeddb/index.js", "./stores": "./dest/stores/index.js", "./config": "./dest/config.js" }, @@ -15,19 +15,53 @@ "clean": "rm -rf ./dest .tsbuildinfo", "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", - "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests", - "start": "DEBUG='aztec:*' && node ./dest/bin/index.js" + "test:node": "NODE_NO_WARNINGS=1 mocha --config ./.mocharc.json", + "test:browser": "wtr --config ./web-test-runner.config.mjs", + "test": "yarn test:node && yarn test:browser" }, "inherits": [ - "../package.common.json" + "../package.common.json", + "./package.local.json" ], + "dependencies": { + "@aztec/circuit-types": "workspace:^", + "@aztec/ethereum": "workspace:^", + "@aztec/foundation": "workspace:^", + "idb": "^8.0.0", + "lmdb": "^3.0.6" + }, + "devDependencies": { + "@aztec/circuits.js": "workspace:^", + "@jest/globals": "^29.5.0", + "@types/chai": "^5.0.1", + "@types/chai-as-promised": "^8.0.1", + "@types/jest": "^29.5.0", + "@types/mocha": "^10.0.10", + "@types/mocha-each": "^2.0.4", + "@types/node": "^18.7.23", + "@web/dev-server-esbuild": "^1.0.3", + "@web/test-runner": "^0.19.0", + "@web/test-runner-playwright": "^0.11.0", + "chai": "^5.1.2", + "chai-as-promised": "^8.0.1", + "jest": "^29.5.0", + "mocha": "^10.8.2", + "mocha-each": "^2.0.1", + "ts-node": "^10.9.1", + "typescript": "^5.0.4" + }, + "files": [ + "dest", + "src", + "!*.test.*" + ], + "engines": { + "node": ">=18" + }, "jest": { - "moduleNameMapper": { - "^(\\.{1,2}/.*)\\.[cm]?js$": "$1" - }, - "testRegex": "./src/.*\\.test\\.(js|mjs|ts)$", - "rootDir": "./src", - "workerThreads": true, + "extensionsToTreatAsEsm": [ + ".ts" + ], "transform": { "^.+\\.tsx?$": [ "@swc/jest", @@ -44,9 +78,9 @@ } ] }, - "extensionsToTreatAsEsm": [ - ".ts" - ], + "moduleNameMapper": { + "^(\\.{1,2}/.*)\\.[cm]?js$": "$1" + }, "reporters": [ [ "default", @@ -54,30 +88,8 @@ "summaryThreshold": 9999 } ] - ] - }, - "dependencies": { - "@aztec/circuit-types": "workspace:^", - "@aztec/ethereum": "workspace:^", - "@aztec/foundation": "workspace:^", - "lmdb": "^3.0.6" - }, - "devDependencies": { - "@aztec/circuits.js": "workspace:^", - "@jest/globals": "^29.5.0", - "@types/jest": "^29.5.0", - "@types/node": "^18.7.23", - "jest": "^29.5.0", - "jest-mock-extended": "^3.0.3", - "ts-node": "^10.9.1", - "typescript": "^5.0.4" - }, - "files": [ - "dest", - "src", - "!*.test.*" - ], - "engines": { - "node": ">=18" + ], + "testRegex": "./src/.*\\.test\\.(js|mjs|ts)$", + "rootDir": "./src" } } diff --git a/yarn-project/kv-store/package.local.json b/yarn-project/kv-store/package.local.json new file mode 100644 index 000000000000..1604caf6ae89 --- /dev/null +++ b/yarn-project/kv-store/package.local.json @@ -0,0 +1,5 @@ +{ + "scripts": { + "test": "yarn test:node && yarn test:browser" + } +} \ No newline at end of file diff --git a/yarn-project/kv-store/src/indexeddb/array.test.ts b/yarn-project/kv-store/src/indexeddb/array.test.ts new file mode 100644 index 000000000000..37d108fc99a5 --- /dev/null +++ b/yarn-project/kv-store/src/indexeddb/array.test.ts @@ -0,0 +1,7 @@ +import { describeAztecArray } from '../interfaces/array_test_suite.js'; +import { mockLogger } from '../interfaces/utils.js'; +import { AztecIndexedDBStore } from './store.js'; + +describe('IndexedDBArray', () => { + describeAztecArray('AztecArray', async () => await AztecIndexedDBStore.open(mockLogger, undefined, true)); +}); diff --git a/yarn-project/kv-store/src/indexeddb/array.ts b/yarn-project/kv-store/src/indexeddb/array.ts new file mode 100644 index 000000000000..f1297143ae22 --- /dev/null +++ b/yarn-project/kv-store/src/indexeddb/array.ts @@ -0,0 +1,118 @@ +import { type IDBPDatabase, type IDBPObjectStore } from 'idb'; + +import { type AztecAsyncArray } from '../interfaces/array.js'; +import { type AztecIDBSchema } from './store.js'; + +/** + * A persistent array backed by IndexedDB. + */ +export class IndexedDBAztecArray implements AztecAsyncArray { + #_db?: IDBPObjectStore; + #rootDB: IDBPDatabase; + #container: string; + #name: string; + + constructor(rootDB: IDBPDatabase, name: string) { + this.#rootDB = rootDB; + this.#name = name; + this.#container = `array:${this.#name}`; + } + + set db(db: IDBPObjectStore | undefined) { + this.#_db = db; + } + + get db(): IDBPObjectStore { + return this.#_db ? this.#_db : this.#rootDB.transaction('data', 'readwrite').store; + } + + async lengthAsync(): Promise { + return ( + (await this.db + .index('key') + .count(IDBKeyRange.bound([this.#container, this.#name], [this.#container, this.#name]))) ?? 0 + ); + } + + async push(...vals: T[]): Promise { + let length = await this.lengthAsync(); + for (const val of vals) { + await this.db.put({ + value: val, + container: this.#container, + key: this.#name, + keyCount: length + 1, + slot: this.#slot(length), + }); + length += 1; + } + return length; + } + + async pop(): Promise { + const length = await this.lengthAsync(); + if (length === 0) { + return undefined; + } + + const slot = this.#slot(length - 1); + const data = await this.db.get(slot); + await this.db.delete(slot); + + return data?.value; + } + + async atAsync(index: number): Promise { + const length = await this.lengthAsync(); + + if (index < 0) { + index = length + index; + } + + const data = await this.db.get(this.#slot(index)); + return data?.value; + } + + async setAt(index: number, val: T): Promise { + const length = await this.lengthAsync(); + + if (index < 0) { + index = length + index; + } + + if (index < 0 || index >= length) { + return Promise.resolve(false); + } + + await this.db.put({ + value: val, + container: this.#container, + key: this.#name, + keyCount: index + 1, + slot: this.#slot(index), + }); + return true; + } + + async *entriesAsync(): AsyncIterableIterator<[number, T]> { + const index = this.db.index('key'); + const rangeQuery = IDBKeyRange.bound([this.#container, this.#name], [this.#container, this.#name]); + for await (const cursor of index.iterate(rangeQuery)) { + yield [cursor.value.keyCount - 1, cursor.value.value] as [number, T]; + } + } + + async *valuesAsync(): AsyncIterableIterator { + for await (const [_, value] of this.entriesAsync()) { + yield value; + } + } + + [Symbol.asyncIterator](): AsyncIterableIterator { + return this.valuesAsync(); + } + + #slot(index: number): string { + return `array:${this.#name}:slot:${index}`; + } +} diff --git a/yarn-project/kv-store/src/indexeddb/index.ts b/yarn-project/kv-store/src/indexeddb/index.ts new file mode 100644 index 000000000000..3d42a057da9a --- /dev/null +++ b/yarn-project/kv-store/src/indexeddb/index.ts @@ -0,0 +1,37 @@ +import { type Logger, createDebugLogger } from '@aztec/foundation/log'; + +import { type DataStoreConfig } from '../config.js'; +import { initStoreForRollup } from '../utils.js'; +import { AztecIndexedDBStore } from './store.js'; + +export { AztecIndexedDBStore } from './store.js'; + +export async function createStore( + name: string, + config: DataStoreConfig, + log: Logger = createDebugLogger('aztec:kv-store'), +) { + let { dataDirectory } = config; + if (typeof dataDirectory !== 'undefined') { + dataDirectory = `${dataDirectory}/${name}`; + } + + log.info( + dataDirectory + ? `Creating ${name} data store at directory ${dataDirectory} with map size ${config.dataStoreMapSizeKB} KB` + : `Creating ${name} ephemeral data store with map size ${config.dataStoreMapSizeKB} KB`, + ); + const store = await AztecIndexedDBStore.open( + createDebugLogger('aztec:kv-store:indexeddb'), + dataDirectory ?? '', + false, + ); + if (config.l1Contracts?.rollupAddress) { + return initStoreForRollup(store, config.l1Contracts.rollupAddress, log); + } + return store; +} + +export function openTmpStore(ephemeral: boolean = false): Promise { + return AztecIndexedDBStore.open(createDebugLogger('aztec:kv-store:indexeddb'), undefined, ephemeral); +} diff --git a/yarn-project/kv-store/src/indexeddb/map.test.ts b/yarn-project/kv-store/src/indexeddb/map.test.ts new file mode 100644 index 000000000000..f8b12c808191 --- /dev/null +++ b/yarn-project/kv-store/src/indexeddb/map.test.ts @@ -0,0 +1,7 @@ +import { describeAztecMap } from '../interfaces/map_test_suite.js'; +import { mockLogger } from '../interfaces/utils.js'; +import { AztecIndexedDBStore } from './store.js'; + +describe('IndexedDBMap', () => { + describeAztecMap('AztecMap', async () => await AztecIndexedDBStore.open(mockLogger, undefined, true)); +}); diff --git a/yarn-project/kv-store/src/indexeddb/map.ts b/yarn-project/kv-store/src/indexeddb/map.ts new file mode 100644 index 000000000000..5002877b6c67 --- /dev/null +++ b/yarn-project/kv-store/src/indexeddb/map.ts @@ -0,0 +1,142 @@ +import { type IDBPDatabase, type IDBPObjectStore } from 'idb'; + +import { type Key, type Range } from '../interfaces/common.js'; +import { type AztecAsyncMultiMap } from '../interfaces/map.js'; +import { type AztecIDBSchema } from './store.js'; + +/** + * A map backed by IndexedDB. + */ +export class IndexedDBAztecMap implements AztecAsyncMultiMap { + protected name: string; + #container: string; + + #_db?: IDBPObjectStore; + #rootDB: IDBPDatabase; + + constructor(rootDB: IDBPDatabase, mapName: string) { + this.name = mapName; + this.#container = `map:${mapName}`; + this.#rootDB = rootDB; + } + + set db(db: IDBPObjectStore | undefined) { + this.#_db = db; + } + + get db(): IDBPObjectStore { + return this.#_db ? this.#_db : this.#rootDB.transaction('data', 'readwrite').store; + } + + async getAsync(key: K): Promise { + const data = await this.db.get(this.#slot(key)); + return data?.value as V; + } + + async *getValuesAsync(key: K): AsyncIterableIterator { + const index = this.db.index('keyCount'); + const rangeQuery = IDBKeyRange.bound( + [this.#container, this.#normalizeKey(key), 0], + [this.#container, this.#normalizeKey(key), Number.MAX_SAFE_INTEGER], + false, + false, + ); + for await (const cursor of index.iterate(rangeQuery)) { + yield cursor.value.value as V; + } + } + + async hasAsync(key: K): Promise { + const result = (await this.getAsync(key)) !== undefined; + return result; + } + + async set(key: K, val: V): Promise { + const count = await this.db + .index('key') + .count(IDBKeyRange.bound([this.#container, this.#normalizeKey(key)], [this.#container, this.#normalizeKey(key)])); + await this.db.put({ + value: val, + container: this.#container, + key: this.#normalizeKey(key), + keyCount: count + 1, + slot: this.#slot(key, count), + }); + } + + swap(_key: K, _fn: (val: V | undefined) => V): Promise { + throw new Error('Not implemented'); + } + + async setIfNotExists(key: K, val: V): Promise { + if (!(await this.hasAsync(key))) { + await this.set(key, val); + return true; + } + return false; + } + + async delete(key: K): Promise { + await this.db.delete(this.#slot(key)); + } + + async deleteValue(key: K, val: V): Promise { + const index = this.db.index('keyCount'); + const rangeQuery = IDBKeyRange.bound( + [this.#container, this.#normalizeKey(key), 0], + [this.#container, this.#normalizeKey(key), Number.MAX_SAFE_INTEGER], + false, + false, + ); + for await (const cursor of index.iterate(rangeQuery)) { + if (JSON.stringify(cursor.value.value) === JSON.stringify(val)) { + await cursor.delete(); + return; + } + } + } + + async *entriesAsync(range: Range = {}): AsyncIterableIterator<[K, V]> { + const index = this.db.index('key'); + const rangeQuery = IDBKeyRange.bound( + [this.#container, range.start ?? ''], + [this.#container, range.end ?? '\uffff'], + !!range.reverse, + !range.reverse, + ); + let count = 0; + for await (const cursor of index.iterate(rangeQuery, range.reverse ? 'prev' : 'next')) { + if (range.limit && count >= range.limit) { + return; + } + yield [cursor.value.key, cursor.value.value] as [K, V]; + count++; + } + } + + async *valuesAsync(range: Range = {}): AsyncIterableIterator { + for await (const [_, value] of this.entriesAsync(range)) { + yield value; + } + } + + async *keysAsync(range: Range = {}): AsyncIterableIterator { + for await (const [key, _] of this.entriesAsync(range)) { + yield this.#denormalizeKey(key as string); + } + } + + #denormalizeKey(key: string): K { + const denormalizedKey = (key as string).split(',').map(part => (isNaN(parseInt(part)) ? part : parseInt(part))); + return (denormalizedKey.length > 1 ? denormalizedKey : key) as K; + } + + #normalizeKey(key: K): string { + const arrayKey = Array.isArray(key) ? key : [key]; + return arrayKey.join(','); + } + + #slot(key: K, index: number = 0): string { + return `map:${this.name}:slot:${this.#normalizeKey(key)}:${index}`; + } +} diff --git a/yarn-project/kv-store/src/indexeddb/set.test.ts b/yarn-project/kv-store/src/indexeddb/set.test.ts new file mode 100644 index 000000000000..28255ae9a499 --- /dev/null +++ b/yarn-project/kv-store/src/indexeddb/set.test.ts @@ -0,0 +1,7 @@ +import { describeAztecSet } from '../interfaces/set_test_suite.js'; +import { mockLogger } from '../interfaces/utils.js'; +import { AztecIndexedDBStore } from './store.js'; + +describe('IndexedDBSet', () => { + describeAztecSet('AztecSet', async () => await AztecIndexedDBStore.open(mockLogger, undefined, true)); +}); diff --git a/yarn-project/kv-store/src/indexeddb/set.ts b/yarn-project/kv-store/src/indexeddb/set.ts new file mode 100644 index 000000000000..c4c4db9a306f --- /dev/null +++ b/yarn-project/kv-store/src/indexeddb/set.ts @@ -0,0 +1,37 @@ +import { type IDBPDatabase, type IDBPObjectStore } from 'idb'; + +import { type Key, type Range } from '../interfaces/common.js'; +import { type AztecAsyncSet } from '../interfaces/set.js'; +import { IndexedDBAztecMap } from './map.js'; +import { type AztecIDBSchema } from './store.js'; + +/** + * A set backed by IndexedDB. + */ +export class IndexedDBAztecSet implements AztecAsyncSet { + private map: IndexedDBAztecMap; + + constructor(rootDb: IDBPDatabase, mapName: string) { + this.map = new IndexedDBAztecMap(rootDb, mapName); + } + + set db(db: IDBPObjectStore | undefined) { + this.map.db = db; + } + + hasAsync(key: K): Promise { + return this.map.hasAsync(key); + } + + add(key: K): Promise { + return this.map.set(key, true); + } + + delete(key: K): Promise { + return this.map.delete(key); + } + + async *entriesAsync(range: Range = {}): AsyncIterableIterator { + yield* this.map.keysAsync(range); + } +} diff --git a/yarn-project/kv-store/src/indexeddb/singleton.test.ts b/yarn-project/kv-store/src/indexeddb/singleton.test.ts new file mode 100644 index 000000000000..08cb63034525 --- /dev/null +++ b/yarn-project/kv-store/src/indexeddb/singleton.test.ts @@ -0,0 +1,7 @@ +import { describeAztecSingleton } from '../interfaces/singleton_test_suite.js'; +import { mockLogger } from '../interfaces/utils.js'; +import { AztecIndexedDBStore } from './store.js'; + +describe('IndexedDBSingleton', () => { + describeAztecSingleton('AztecSingleton', async () => await AztecIndexedDBStore.open(mockLogger, undefined, true)); +}); diff --git a/yarn-project/kv-store/src/indexeddb/singleton.ts b/yarn-project/kv-store/src/indexeddb/singleton.ts new file mode 100644 index 000000000000..19750cfbdb61 --- /dev/null +++ b/yarn-project/kv-store/src/indexeddb/singleton.ts @@ -0,0 +1,49 @@ +import { type IDBPDatabase, type IDBPObjectStore } from 'idb'; + +import { type AztecAsyncSingleton } from '../interfaces/singleton.js'; +import { type AztecIDBSchema } from './store.js'; + +/** + * Stores a single value in IndexedDB. + */ +export class IndexedDBAztecSingleton implements AztecAsyncSingleton { + #_db?: IDBPObjectStore; + #rootDB: IDBPDatabase; + #container: string; + #slot: string; + + constructor(rootDB: IDBPDatabase, name: string) { + this.#rootDB = rootDB; + this.#container = `singleton:${name}`; + this.#slot = `singleton:${name}:value`; + } + + set db(db: IDBPObjectStore | undefined) { + this.#_db = db; + } + + get db(): IDBPObjectStore { + return this.#_db ? this.#_db : this.#rootDB.transaction('data', 'readwrite').store; + } + + async getAsync(): Promise { + const data = await this.db.get(this.#slot); + return data?.value as T; + } + + async set(val: T): Promise { + const result = await this.db.put({ + container: this.#container, + slot: this.#slot, + key: this.#slot, + keyCount: 1, + value: val, + }); + return result !== undefined; + } + + async delete(): Promise { + await this.db.delete(this.#slot); + return true; + } +} diff --git a/yarn-project/kv-store/src/indexeddb/store.test.ts b/yarn-project/kv-store/src/indexeddb/store.test.ts new file mode 100644 index 000000000000..d2f529b62215 --- /dev/null +++ b/yarn-project/kv-store/src/indexeddb/store.test.ts @@ -0,0 +1,12 @@ +import { describeAztecStore } from '../interfaces/store_test_suite.js'; +import { mockLogger } from '../interfaces/utils.js'; +import { AztecIndexedDBStore } from './store.js'; + +describe('AztecIndexedDBStore', () => { + describeAztecStore( + 'AztecStore', + async () => await AztecIndexedDBStore.open(mockLogger, 'test', false), + async () => await AztecIndexedDBStore.open(mockLogger, undefined, false), + async () => await AztecIndexedDBStore.open(mockLogger, undefined, true), + ); +}); diff --git a/yarn-project/kv-store/src/indexeddb/store.ts b/yarn-project/kv-store/src/indexeddb/store.ts new file mode 100644 index 000000000000..ad841c0a9ab6 --- /dev/null +++ b/yarn-project/kv-store/src/indexeddb/store.ts @@ -0,0 +1,192 @@ +import { type Logger } from '@aztec/foundation/log'; + +import { type DBSchema, type IDBPDatabase, openDB } from 'idb'; + +import { type AztecAsyncArray } from '../interfaces/array.js'; +import { type Key } from '../interfaces/common.js'; +import { type AztecAsyncCounter } from '../interfaces/counter.js'; +import { type AztecAsyncMap, type AztecAsyncMultiMap } from '../interfaces/map.js'; +import { type AztecAsyncSet } from '../interfaces/set.js'; +import { type AztecAsyncSingleton } from '../interfaces/singleton.js'; +import { type AztecAsyncKVStore } from '../interfaces/store.js'; +import { IndexedDBAztecArray } from './array.js'; +import { IndexedDBAztecMap } from './map.js'; +import { IndexedDBAztecSet } from './set.js'; +import { IndexedDBAztecSingleton } from './singleton.js'; + +export type StoredData = { value: V; container: string; key: string; keyCount: number; slot: string }; + +export interface AztecIDBSchema extends DBSchema { + data: { + value: StoredData; + key: string; + indexes: { container: string; key: string; keyCount: number }; + }; +} + +/** + * A key-value store backed by IndexedDB. + */ + +export class AztecIndexedDBStore implements AztecAsyncKVStore { + #log: Logger; + #rootDB: IDBPDatabase; + #name: string; + + #containers = new Set< + IndexedDBAztecArray | IndexedDBAztecMap | IndexedDBAztecSet | IndexedDBAztecSingleton + >(); + + constructor(rootDB: IDBPDatabase, public readonly isEphemeral: boolean, log: Logger, name: string) { + this.#rootDB = rootDB; + this.#log = log; + this.#name = name; + } + /** + * Creates a new AztecKVStore backed by IndexedDB. The path to the database is optional. If not provided, + * the database will be stored in a temporary location and be deleted when the process exists. + * + * + * @param path - A path on the disk to store the database. Optional + * @param ephemeral - true if the store should only exist in memory and not automatically be flushed to disk. Optional + * @param log - A logger to use. Optional + * @returns The store + */ + static async open(log: Logger, name?: string, ephemeral: boolean = false): Promise { + name = name && !ephemeral ? name : self.crypto.getRandomValues(new Uint8Array(16)).join(''); + log.debug(`Opening IndexedDB ${ephemeral ? 'temp ' : ''}database with name ${name}`); + const rootDB = await openDB(name, 1, { + upgrade(db) { + const objectStore = db.createObjectStore('data', { keyPath: 'slot' }); + + objectStore.createIndex('key', ['container', 'key'], { unique: false }); + objectStore.createIndex('keyCount', ['container', 'key', 'keyCount'], { unique: false }); + }, + }); + + const kvStore = new AztecIndexedDBStore(rootDB, ephemeral, log, name); + return kvStore; + } + + /** + * Forks the current DB into a new DB by backing it up to a temporary location and opening a new indexedb. + * @returns A new AztecIndexedDBStore. + */ + async fork(): Promise { + const forkedStore = await AztecIndexedDBStore.open(this.#log, undefined, true); + this.#log.verbose(`Forking store to ${forkedStore.#name}`); + + // Copy old data to new store + const oldData = this.#rootDB.transaction('data').store; + const dataToWrite = []; + for await (const cursor of oldData.iterate()) { + dataToWrite.push(cursor.value); + } + const tx = forkedStore.#rootDB.transaction('data', 'readwrite').store; + for (const data of dataToWrite) { + await tx.add(data); + } + + this.#log.debug(`Forked store at ${forkedStore.#name} opened successfully`); + return forkedStore; + } + + /** + * Creates a new AztecMap in the store. + * @param name - Name of the map + * @returns A new AztecMap + */ + openMap(name: string): AztecAsyncMap { + const map = new IndexedDBAztecMap(this.#rootDB, name); + this.#containers.add(map); + return map; + } + + /** + * Creates a new AztecSet in the store. + * @param name - Name of the set + * @returns A new AztecSet + */ + openSet(name: string): AztecAsyncSet { + const set = new IndexedDBAztecSet(this.#rootDB, name); + this.#containers.add(set); + return set; + } + + /** + * Creates a new AztecMultiMap in the store. A multi-map stores multiple values for a single key automatically. + * @param name - Name of the map + * @returns A new AztecMultiMap + */ + openMultiMap(name: string): AztecAsyncMultiMap { + const multimap = new IndexedDBAztecMap(this.#rootDB, name); + this.#containers.add(multimap); + return multimap; + } + + openCounter>(_name: string): AztecAsyncCounter { + throw new Error('Method not implemented.'); + } + + /** + * Creates a new AztecArray in the store. + * @param name - Name of the array + * @returns A new AztecArray + */ + openArray(name: string): AztecAsyncArray { + const array = new IndexedDBAztecArray(this.#rootDB, name); + this.#containers.add(array); + return array; + } + + /** + * Creates a new AztecSingleton in the store. + * @param name - Name of the singleton + * @returns A new AztecSingleton + */ + openSingleton(name: string): AztecAsyncSingleton { + const singleton = new IndexedDBAztecSingleton(this.#rootDB, name); + this.#containers.add(singleton); + return singleton; + } + + /** + * Runs a callback in a transaction. + * @param callback - Function to execute in a transaction + * @returns A promise that resolves to the return value of the callback + */ + async transactionAsync(callback: () => Promise): Promise { + const tx = this.#rootDB.transaction('data', 'readwrite'); + for (const container of this.#containers) { + container.db = tx.store; + } + // Avoid awaiting this promise so it doesn't get scheduled in the next microtask + // By then, the tx would be closed + const runningPromise = callback(); + // Wait for the transaction to finish + await tx.done; + for (const container of this.#containers) { + container.db = undefined; + } + // Return the result of the callback. + // Tx is guaranteed to already be closed, so the await doesn't hurt anything here + return await runningPromise; + } + + /** + * Clears all entries in the store & sub DBs. + */ + async clear() { + await this.#rootDB.transaction('data', 'readwrite').store.clear(); + } + + /** Deletes this store and removes the database */ + delete() { + this.#containers.clear(); + return Promise.resolve(this.#rootDB.deleteObjectStore('data')); + } + + estimateSize(): { mappingSize: number; actualSize: number; numItems: number } { + return { mappingSize: 0, actualSize: 0, numItems: 0 }; + } +} diff --git a/yarn-project/kv-store/src/interfaces/array.ts b/yarn-project/kv-store/src/interfaces/array.ts index e24922042129..70e98a24c3e9 100644 --- a/yarn-project/kv-store/src/interfaces/array.ts +++ b/yarn-project/kv-store/src/interfaces/array.ts @@ -1,12 +1,7 @@ /** * An array backed by a persistent store. Can not have any holes in it. */ -export interface AztecArray { - /** - * The size of the array - */ - length: number; - +interface BaseAztecArray { /** * Pushes values to the end of the array * @param vals - The values to push to the end of the array @@ -20,6 +15,24 @@ export interface AztecArray { */ pop(): Promise; + /** + * Updates the value at the given index. Index can be in the range [-length, length - 1). + * @param index - The index to set the value at + * @param val - The value to set + * @returns Whether the value was set + */ + setAt(index: number, val: T): Promise; +} + +/** + * An array backed by a persistent store. Can not have any holes in it. + */ +export interface AztecAsyncArray extends BaseAztecArray { + /** + * The size of the array + */ + lengthAsync(): Promise; + /** * Gets the value at the given index. Index can be in the range [-length, length - 1). * If the index is negative, it will be treated as an offset from the end of the array. @@ -27,15 +40,38 @@ export interface AztecArray { * @param index - The index to get the value from * @returns The value at the given index or undefined if the index is out of bounds */ - at(index: number): T | undefined; + atAsync(index: number): Promise; /** - * Updates the value at the given index. Index can be in the range [-length, length - 1). - * @param index - The index to set the value at - * @param val - The value to set - * @returns Whether the value was set + * Iterates over the array with indexes. */ - setAt(index: number, val: T): Promise; + entriesAsync(): AsyncIterableIterator<[number, T]>; + + /** + * Iterates over the array. + */ + valuesAsync(): AsyncIterableIterator; + + /** + * Iterates over the array. + */ + [Symbol.asyncIterator](): AsyncIterableIterator; +} + +export interface AztecArray extends BaseAztecArray { + /** + * The size of the array + */ + length: number; + + /** + * Gets the value at the given index. Index can be in the range [-length, length - 1). + * If the index is negative, it will be treated as an offset from the end of the array. + * + * @param index - The index to get the value from + * @returns The value at the given index or undefined if the index is out of bounds + */ + at(index: number): T | undefined; /** * Iterates over the array with indexes. diff --git a/yarn-project/kv-store/src/interfaces/array_test_suite.ts b/yarn-project/kv-store/src/interfaces/array_test_suite.ts new file mode 100644 index 000000000000..0affe23305e8 --- /dev/null +++ b/yarn-project/kv-store/src/interfaces/array_test_suite.ts @@ -0,0 +1,126 @@ +import { toArray } from '@aztec/foundation/iterable'; + +import { expect } from 'chai'; + +import { type AztecArray, type AztecAsyncArray } from './array.js'; +import { type AztecAsyncKVStore, type AztecKVStore } from './store.js'; +import { isSyncStore } from './utils.js'; + +export function describeAztecArray( + testName: string, + getStore: () => AztecKVStore | Promise, + forceAsync: boolean = false, +) { + describe(testName, () => { + let store: AztecKVStore | AztecAsyncKVStore; + let arr: AztecArray | AztecAsyncArray; + + beforeEach(async () => { + store = await getStore(); + arr = store.openArray('test'); + }); + + async function length(sut: AztecAsyncArray | AztecArray = arr) { + return isSyncStore(store) && !forceAsync + ? (sut as AztecArray).length + : await (sut as AztecAsyncArray).lengthAsync(); + } + + async function at(index: number) { + return isSyncStore(store) && !forceAsync + ? (arr as AztecArray).at(index) + : await (arr as AztecAsyncArray).atAsync(index); + } + + async function entries() { + return isSyncStore(store) && !forceAsync + ? await toArray((arr as AztecArray).entries()) + : await toArray((arr as AztecAsyncArray).entriesAsync()); + } + + async function values(sut: AztecAsyncArray | AztecArray = arr) { + return isSyncStore(store) && !forceAsync + ? await toArray((sut as AztecArray).values()) + : await toArray((sut as AztecAsyncArray).valuesAsync()); + } + + it('should be able to push and pop values', async () => { + await arr.push(1); + await arr.push(2); + await arr.push(3); + + expect(await length()).to.equal(3); + + expect(await arr.pop()).to.equal(3); + expect(await arr.pop()).to.equal(2); + expect(await arr.pop()).to.equal(1); + expect(await arr.pop()).to.equal(undefined); + }); + + it('should be able to get values by index', async () => { + await arr.push(1); + await arr.push(2); + await arr.push(3); + + expect(await at(0)).to.equal(1); + expect(await at(1)).to.equal(2); + expect(await at(2)).to.equal(3); + expect(await at(3)).to.equal(undefined); + expect(await at(-1)).to.equal(3); + expect(await at(-2)).to.equal(2); + expect(await at(-3)).to.equal(1); + expect(await at(-4)).to.equal(undefined); + }); + + it('should be able to set values by index', async () => { + await arr.push(1); + await arr.push(2); + await arr.push(3); + + expect(await arr.setAt(0, 4)).to.equal(true); + expect(await arr.setAt(1, 5)).to.equal(true); + expect(await arr.setAt(2, 6)).to.equal(true); + + expect(await arr.setAt(3, 7)).to.equal(false); + + expect(await at(0)).to.equal(4); + expect(await at(1)).to.equal(5); + expect(await at(2)).to.equal(6); + expect(await at(3)).to.equal(undefined); + + expect(await arr.setAt(-1, 8)).to.equal(true); + expect(await arr.setAt(-2, 9)).to.equal(true); + expect(await arr.setAt(-3, 10)).to.equal(true); + + expect(await arr.setAt(-4, 11)).to.equal(false); + + expect(await at(-1)).to.equal(8); + expect(await at(-2)).to.equal(9); + expect(await at(-3)).to.equal(10); + expect(await at(-4)).to.equal(undefined); + }); + + it('should be able to iterate over values', async () => { + await arr.push(1); + await arr.push(2); + await arr.push(3); + + expect(await values()).to.deep.equal([1, 2, 3]); + expect(await entries()).to.deep.equal([ + [0, 1], + [1, 2], + [2, 3], + ]); + }); + + it('should be able to restore state', async () => { + await arr.push(1); + await arr.push(2); + await arr.push(3); + + const arr2 = store.openArray('test'); + expect(await length(arr2)).to.equal(3); + expect(await values(arr2)).to.deep.equal(await values()); + }); + }); +} diff --git a/yarn-project/kv-store/src/interfaces/counter.ts b/yarn-project/kv-store/src/interfaces/counter.ts index 0fa636c58ddb..2b94a2ff8475 100644 --- a/yarn-project/kv-store/src/interfaces/counter.ts +++ b/yarn-project/kv-store/src/interfaces/counter.ts @@ -6,7 +6,7 @@ import { type Key, type Range } from './common.js'; * * Keys are stored in sorted order */ -export interface AztecCounter { +interface AztecBaseCounter { /** * Resets the count of the given key to the given value. * @param key - The key to reset @@ -22,7 +22,9 @@ export interface AztecCounter { * @param delta - The amount to modify the key by */ update(key: K, delta: number): Promise; +} +export interface AztecCounter extends AztecBaseCounter { /** * Gets the current count. * @param key - The key to get the count of @@ -41,3 +43,23 @@ export interface AztecCounter { */ entries(range: Range): IterableIterator<[K, number]>; } + +export interface AztecAsyncCounter extends AztecBaseCounter { + /** + * Gets the current count. + * @param key - The key to get the count of + */ + getAsync(key: K): Promise; + + /** + * Returns keys in the map in sorted order. Only returns keys that have been seen at least once. + * @param range - The range of keys to iterate over + */ + keysAsync(range: Range): AsyncIterableIterator; + + /** + * Returns keys and their counts in the map sorted by the key. Only returns keys that have been seen at least once. + * @param range - The range of keys to iterate over + */ + entriesAsync(range: Range): AsyncIterableIterator<[K, number]>; +} diff --git a/yarn-project/kv-store/src/interfaces/map.ts b/yarn-project/kv-store/src/interfaces/map.ts index 9b0d0ca4d177..6ded76080db4 100644 --- a/yarn-project/kv-store/src/interfaces/map.ts +++ b/yarn-project/kv-store/src/interfaces/map.ts @@ -3,20 +3,7 @@ import { type Key, type Range } from './common.js'; /** * A map backed by a persistent store. */ -export interface AztecMap { - /** - * Gets the value at the given key. - * @param key - The key to get the value from - */ - get(key: K): V | undefined; - - /** - * Checks if a key exists in the map. - * @param key - The key to check - * @returns True if the key exists, false otherwise - */ - has(key: K): boolean; - +interface AztecBaseMap { /** * Sets the value at the given key. * @param key - The key to set the value at @@ -43,6 +30,20 @@ export interface AztecMap { * @param key - The key to delete the value at */ delete(key: K): Promise; +} +export interface AztecMap extends AztecBaseMap { + /** + * Gets the value at the given key. + * @param key - The key to get the value from + */ + get(key: K): V | undefined; + + /** + * Checks if a key exists in the map. + * @param key - The key to check + * @returns True if the key exists, false otherwise + */ + has(key: K): boolean; /** * Iterates over the map's key-value entries in the key's natural order @@ -80,3 +81,57 @@ export interface AztecMultiMap extends AztecMap { */ deleteValue(key: K, val: V): Promise; } + +/** + * A map backed by a persistent store. + */ +export interface AztecAsyncMap extends AztecBaseMap { + /** + * Gets the value at the given key. + * @param key - The key to get the value from + */ + getAsync(key: K): Promise; + + /** + * Checks if a key exists in the map. + * @param key - The key to check + * @returns True if the key exists, false otherwise + */ + hasAsync(key: K): Promise; + + /** + * Iterates over the map's key-value entries in the key's natural order + * @param range - The range of keys to iterate over + */ + entriesAsync(range?: Range): AsyncIterableIterator<[K, V]>; + + /** + * Iterates over the map's values in the key's natural order + * @param range - The range of keys to iterate over + */ + valuesAsync(range?: Range): AsyncIterableIterator; + + /** + * Iterates over the map's keys in the key's natural order + * @param range - The range of keys to iterate over + */ + keysAsync(range?: Range): AsyncIterableIterator; +} + +/** + * A map backed by a persistent store that can have multiple values for a single key. + */ +export interface AztecAsyncMultiMap extends AztecAsyncMap { + /** + * Gets all the values at the given key. + * @param key - The key to get the values from + */ + getValuesAsync(key: K): AsyncIterableIterator; + + /** + * Deletes a specific value at the given key. + * @param key - The key to delete the value at + * @param val - The value to delete + */ + deleteValue(key: K, val: V): Promise; +} diff --git a/yarn-project/kv-store/src/interfaces/map_test_suite.ts b/yarn-project/kv-store/src/interfaces/map_test_suite.ts new file mode 100644 index 000000000000..3c999f01b2c0 --- /dev/null +++ b/yarn-project/kv-store/src/interfaces/map_test_suite.ts @@ -0,0 +1,154 @@ +import { toArray } from '@aztec/foundation/iterable'; + +import { expect } from 'chai'; + +import { type Key, type Range } from './common.js'; +import { type AztecAsyncMap, type AztecAsyncMultiMap, type AztecMap, type AztecMultiMap } from './map.js'; +import { type AztecAsyncKVStore, type AztecKVStore } from './store.js'; +import { isSyncStore } from './utils.js'; + +export function describeAztecMap( + testName: string, + getStore: () => AztecKVStore | Promise, + forceAsync: boolean = false, +) { + describe(testName, () => { + let store: AztecKVStore | AztecAsyncKVStore; + let map: AztecMultiMap | AztecAsyncMultiMap; + + beforeEach(async () => { + store = await getStore(); + map = store.openMultiMap('test'); + }); + + async function get(key: Key, sut: AztecAsyncMap | AztecMap = map) { + return isSyncStore(store) && !forceAsync + ? (sut as AztecMultiMap).get(key) + : await (sut as AztecAsyncMultiMap).getAsync(key); + } + + async function entries() { + return isSyncStore(store) && !forceAsync + ? await toArray((map as AztecMultiMap).entries()) + : await toArray((map as AztecAsyncMultiMap).entriesAsync()); + } + + async function values() { + return isSyncStore(store) && !forceAsync + ? await toArray((map as AztecMultiMap).values()) + : await toArray((map as AztecAsyncMultiMap).valuesAsync()); + } + + async function keys(range?: Range, sut: AztecAsyncMap | AztecMap = map) { + return isSyncStore(store) && !forceAsync + ? await toArray((sut as AztecMultiMap).keys(range)) + : await toArray((sut as AztecAsyncMultiMap).keysAsync(range)); + } + + async function getValues(key: Key) { + return isSyncStore(store) && !forceAsync + ? await toArray((map as AztecMultiMap).getValues(key)) + : await toArray((map as AztecAsyncMultiMap).getValuesAsync(key)); + } + + it('should be able to set and get values', async () => { + await map.set('foo', 'bar'); + await map.set('baz', 'qux'); + + expect(await get('foo')).to.equal('bar'); + expect(await get('baz')).to.equal('qux'); + expect(await get('quux')).to.equal(undefined); + }); + + it('should be able to set values if they do not exist', async () => { + expect(await map.setIfNotExists('foo', 'bar')).to.equal(true); + expect(await map.setIfNotExists('foo', 'baz')).to.equal(false); + + expect(await get('foo')).to.equal('bar'); + }); + + it('should be able to delete values', async () => { + await map.set('foo', 'bar'); + await map.set('baz', 'qux'); + + await map.delete('foo'); + + expect(await get('foo')).to.equal(undefined); + expect(await get('baz')).to.equal('qux'); + }); + + it('should be able to iterate over entries when there are no keys', async () => { + expect(await entries()).to.deep.equal([]); + }); + + it('should be able to iterate over entries', async () => { + await map.set('foo', 'bar'); + await map.set('baz', 'qux'); + + expect(await entries()).to.deep.equal([ + ['baz', 'qux'], + ['foo', 'bar'], + ]); + }); + + it('should be able to iterate over values', async () => { + await map.set('foo', 'bar'); + await map.set('baz', 'quux'); + + expect(await values()).to.deep.equal(['quux', 'bar']); + }); + + it('should be able to iterate over keys', async () => { + await map.set('foo', 'bar'); + await map.set('baz', 'qux'); + + expect(await keys()).to.deep.equal(['baz', 'foo']); + }); + + it('should be able to get multiple values for a single key', async () => { + await map.set('foo', 'bar'); + await map.set('foo', 'baz'); + + expect(await getValues('foo')).to.deep.equal(['bar', 'baz']); + }); + + it('should be able to delete individual values for a single key', async () => { + await map.set('foo', 'bar'); + await map.set('foo', 'baz'); + + await map.deleteValue('foo', 'bar'); + + expect(await getValues('foo')).to.deep.equal(['baz']); + }); + + it('supports tuple keys', async () => { + // Use a new map because key structure has changed + const tupleMap = store.openMap<[number, string], string>('test-tuple'); + + await tupleMap.set([5, 'bar'], 'val'); + await tupleMap.set([0, 'foo'], 'val'); + + expect(await keys(undefined, tupleMap)).to.deep.equal([ + [0, 'foo'], + [5, 'bar'], + ]); + + expect(await get([5, 'bar'], tupleMap)).to.equal('val'); + }); + + it('supports range queries', async () => { + await map.set('a', 'a'); + await map.set('b', 'b'); + await map.set('c', 'c'); + await map.set('d', 'd'); + + expect(await keys({ start: 'b', end: 'c' })).to.deep.equal(['b']); + expect(await keys({ start: 'b' })).to.deep.equal(['b', 'c', 'd']); + expect(await keys({ end: 'c' })).to.deep.equal(['a', 'b']); + expect(await keys({ start: 'b', end: 'c', reverse: true })).to.deep.equal(['c']); + expect(await keys({ start: 'b', limit: 1 })).to.deep.equal(['b']); + expect(await keys({ start: 'b', reverse: true })).to.deep.equal(['d', 'c']); + expect(await keys({ end: 'b', reverse: true })).to.deep.equal(['b', 'a']); + }); + }); +} diff --git a/yarn-project/kv-store/src/interfaces/set.ts b/yarn-project/kv-store/src/interfaces/set.ts index 4eaf1fe14421..a37f0d2ea202 100644 --- a/yarn-project/kv-store/src/interfaces/set.ts +++ b/yarn-project/kv-store/src/interfaces/set.ts @@ -3,14 +3,7 @@ import { type Key, type Range } from './common.js'; /** * A set backed by a persistent store. */ -export interface AztecSet { - /** - * Checks if a key exists in the set. - * @param key - The key to check - * @returns True if the key exists, false otherwise - */ - has(key: K): boolean; - +interface AztecBaseSet { /** * Adds the given value. * @param key - The key to add. @@ -22,6 +15,15 @@ export interface AztecSet { * @param key - The key to delete. */ delete(key: K): Promise; +} + +export interface AztecSet extends AztecBaseSet { + /** + * Checks if a key exists in the set. + * @param key - The key to check + * @returns True if the key exists, false otherwise + */ + has(key: K): boolean; /** * Iterates over the sets's keys entries in the key's natural order @@ -29,3 +31,18 @@ export interface AztecSet { */ entries(range?: Range): IterableIterator; } + +export interface AztecAsyncSet extends AztecBaseSet { + /** + * Checks if a key exists in the set. + * @param key - The key to check + * @returns True if the key exists, false otherwise + */ + hasAsync(key: K): Promise; + + /** + * Iterates over the sets's keys entries in the key's natural order + * @param range - The range of keys to iterate over + */ + entriesAsync(range?: Range): AsyncIterableIterator; +} diff --git a/yarn-project/kv-store/src/interfaces/set_test_suite.ts b/yarn-project/kv-store/src/interfaces/set_test_suite.ts new file mode 100644 index 000000000000..08f2758ebf3c --- /dev/null +++ b/yarn-project/kv-store/src/interfaces/set_test_suite.ts @@ -0,0 +1,77 @@ +import { toArray } from '@aztec/foundation/iterable'; + +import { expect } from 'chai'; + +import { type Range } from './common.js'; +import { type AztecAsyncSet, type AztecSet } from './set.js'; +import { type AztecAsyncKVStore, type AztecKVStore } from './store.js'; +import { isSyncStore } from './utils.js'; + +export function describeAztecSet( + testName: string, + getStore: () => AztecKVStore | Promise, + forceAsync: boolean = false, +) { + describe(testName, () => { + let store: AztecKVStore | AztecAsyncKVStore; + let set: AztecSet | AztecAsyncSet; + + beforeEach(async () => { + store = await getStore(); + set = store.openSet('test'); + }); + + async function has(key: string) { + return isSyncStore(store) && !forceAsync + ? (set as AztecSet).has(key) + : await (set as AztecAsyncSet).hasAsync(key); + } + + async function entries(range?: Range) { + return isSyncStore(store) && !forceAsync + ? await toArray((set as AztecSet).entries(range)) + : await toArray((set as AztecAsyncSet).entriesAsync(range)); + } + + it('should be able to set and get values', async () => { + await set.add('foo'); + await set.add('baz'); + + expect(await has('foo')).to.equal(true); + expect(await has('baz')).to.equal(true); + expect(await has('bar')).to.equal(false); + }); + + it('should be able to delete values', async () => { + await set.add('foo'); + await set.add('baz'); + + await set.delete('foo'); + + expect(await has('foo')).to.equal(false); + expect(await has('baz')).to.equal(true); + }); + + it('should be able to iterate over entries', async () => { + await set.add('baz'); + await set.add('foo'); + + expect(await entries()).to.deep.equal(['baz', 'foo']); + }); + + it('supports range queries', async () => { + await set.add('a'); + await set.add('b'); + await set.add('c'); + await set.add('d'); + + expect(await entries({ start: 'b', end: 'c' })).to.deep.equal(['b']); + expect(await entries({ start: 'b' })).to.deep.equal(['b', 'c', 'd']); + expect(await entries({ end: 'c' })).to.deep.equal(['a', 'b']); + expect(await entries({ start: 'b', end: 'c', reverse: true })).to.deep.equal(['c']); + expect(await entries({ start: 'b', limit: 1 })).to.deep.equal(['b']); + expect(await entries({ start: 'b', reverse: true })).to.deep.equal(['d', 'c']); + expect(await entries({ end: 'b', reverse: true })).to.deep.equal(['b', 'a']); + }); + }); +} diff --git a/yarn-project/kv-store/src/interfaces/singleton.ts b/yarn-project/kv-store/src/interfaces/singleton.ts index eba620e18b53..6d079ae22a2f 100644 --- a/yarn-project/kv-store/src/interfaces/singleton.ts +++ b/yarn-project/kv-store/src/interfaces/singleton.ts @@ -2,12 +2,7 @@ * Represents a singleton value in the database. * Note: The singleton loses type info so it's recommended to serialize to buffer when storing it. */ -export interface AztecSingleton { - /** - * Gets the value. - */ - get(): T | undefined; - +interface AztecBaseSingleton { /** * Sets the value. * @param val - The new value @@ -19,3 +14,16 @@ export interface AztecSingleton { */ delete(): Promise; } +export interface AztecSingleton extends AztecBaseSingleton { + /** + * Gets the value. + */ + get(): T | undefined; +} + +export interface AztecAsyncSingleton extends AztecBaseSingleton { + /** + * Gets the value. + */ + getAsync(): Promise; +} diff --git a/yarn-project/kv-store/src/interfaces/singleton_test_suite.ts b/yarn-project/kv-store/src/interfaces/singleton_test_suite.ts new file mode 100644 index 000000000000..b3ad148f8d00 --- /dev/null +++ b/yarn-project/kv-store/src/interfaces/singleton_test_suite.ts @@ -0,0 +1,42 @@ +import { expect } from 'chai'; + +import { type AztecAsyncSingleton, type AztecSingleton } from './singleton.js'; +import { type AztecAsyncKVStore, type AztecKVStore } from './store.js'; +import { isSyncStore } from './utils.js'; + +export function describeAztecSingleton( + testName: string, + getStore: () => AztecKVStore | Promise, + forceAsync: boolean = false, +) { + describe(testName, () => { + let store: AztecKVStore | AztecAsyncKVStore; + let singleton: AztecSingleton | AztecAsyncSingleton; + + beforeEach(async () => { + store = await getStore(); + singleton = store.openSingleton('test'); + }); + + async function get() { + return isSyncStore(store) && !forceAsync + ? (singleton as AztecSingleton).get() + : await (singleton as AztecAsyncSingleton).getAsync(); + } + + it('returns undefined if the value is not set', async () => { + expect(await get()).to.equal(undefined); + }); + + it('should be able to set and get values', async () => { + expect(await singleton.set('foo')).to.equal(true); + expect(await get()).to.equal('foo'); + }); + + it('overwrites the value if it is set again', async () => { + expect(await singleton.set('foo')).to.equal(true); + expect(await singleton.set('bar')).to.equal(true); + expect(await get()).to.equal('bar'); + }); + }); +} diff --git a/yarn-project/kv-store/src/interfaces/store.ts b/yarn-project/kv-store/src/interfaces/store.ts index 9764a4745460..81c4d956bed2 100644 --- a/yarn-project/kv-store/src/interfaces/store.ts +++ b/yarn-project/kv-store/src/interfaces/store.ts @@ -1,32 +1,33 @@ -import { type AztecArray } from './array.js'; +import { type AztecArray, type AztecAsyncArray } from './array.js'; import { type Key } from './common.js'; -import { type AztecCounter } from './counter.js'; -import { type AztecMap, type AztecMultiMap } from './map.js'; -import { type AztecSet } from './set.js'; -import { type AztecSingleton } from './singleton.js'; +import { type AztecAsyncCounter, type AztecCounter } from './counter.js'; +import { type AztecAsyncMap, type AztecAsyncMultiMap, type AztecMap, type AztecMultiMap } from './map.js'; +import { type AztecAsyncSet, type AztecSet } from './set.js'; +import { type AztecAsyncSingleton, type AztecSingleton } from './singleton.js'; /** A key-value store */ export interface AztecKVStore { + syncGetters: true; /** * Creates a new map. * @param name - The name of the map * @returns The map */ - openMap(name: string): AztecMap; + openMap(name: string): AztecMap; /** * Creates a new set. * @param name - The name of the set * @returns The set */ - openSet(name: string): AztecSet; + openSet(name: string): AztecSet; /** * Creates a new multi-map. * @param name - The name of the multi-map * @returns The multi-map */ - openMultiMap(name: string): AztecMultiMap; + openMultiMap(name: string): AztecMultiMap; /** * Creates a new array. @@ -74,3 +75,72 @@ export interface AztecKVStore { */ estimateSize(): { mappingSize: number; actualSize: number; numItems: number }; } + +export interface AztecAsyncKVStore { + /** + * Creates a new map. + * @param name - The name of the map + * @returns The map + */ + openMap(name: string): AztecAsyncMap; + + /** + * Creates a new set. + * @param name - The name of the set + * @returns The set + */ + openSet(name: string): AztecAsyncSet; + + /** + * Creates a new multi-map. + * @param name - The name of the multi-map + * @returns The multi-map + */ + openMultiMap(name: string): AztecAsyncMultiMap; + + /** + * Creates a new array. + * @param name - The name of the array + * @returns The array + */ + openArray(name: string): AztecAsyncArray; + + /** + * Creates a new singleton. + * @param name - The name of the singleton + * @returns The singleton + */ + openSingleton(name: string): AztecAsyncSingleton; + + /** + * Creates a new count map. + * @param name - name of the counter + */ + openCounter(name: string): AztecAsyncCounter; + + /** + * Starts a transaction. All calls to read/write data while in a transaction are queued and executed atomically. + * @param callback - The callback to execute in a transaction + */ + transactionAsync>>(callback: () => Promise): Promise; + + /** + * Clears all entries in the store + */ + clear(): Promise; + + /** + * Forks the store. + */ + fork(): Promise; + + /** + * Deletes the store + */ + delete(): Promise; + + /** + * Estimates the size of the store in bytes. + */ + estimateSize(): { mappingSize: number; actualSize: number; numItems: number }; +} diff --git a/yarn-project/kv-store/src/interfaces/store_test_suite.ts b/yarn-project/kv-store/src/interfaces/store_test_suite.ts new file mode 100644 index 000000000000..91c2240ecc30 --- /dev/null +++ b/yarn-project/kv-store/src/interfaces/store_test_suite.ts @@ -0,0 +1,52 @@ +import { expect } from 'chai'; + +import { type AztecAsyncSingleton, type AztecSingleton } from './singleton.js'; +import { type AztecAsyncKVStore, type AztecKVStore } from './store.js'; +import { isSyncStore } from './utils.js'; + +export function describeAztecStore( + testName: string, + getPersistentStore: () => Promise, + getPersistentNoPathStore: () => Promise, + getEphemeralStore: () => Promise, +) { + describe(testName, () => { + async function get( + store: AztecKVStore | AztecAsyncKVStore, + singleton: AztecSingleton | AztecAsyncSingleton, + ) { + return isSyncStore(store) + ? (singleton as AztecSingleton).get() + : await (singleton as AztecAsyncSingleton).getAsync(); + } + + const itForks = async (store: AztecKVStore | AztecAsyncKVStore) => { + const singleton = store.openSingleton('singleton'); + await singleton.set('foo'); + + const forkedStore = await store.fork(); + const forkedSingleton = forkedStore.openSingleton('singleton'); + expect(await get(store, singleton)).to.equal('foo'); + await forkedSingleton.set('bar'); + expect(await get(store, singleton)).to.equal('foo'); + expect(await get(forkedStore, forkedSingleton)).to.equal('bar'); + await forkedSingleton.delete(); + expect(await get(store, singleton)).to.equal('foo'); + }; + + it('forks a persistent store', async () => { + const store = await getPersistentStore(); + await itForks(store); + }); + + it('forks a persistent store with no path', async () => { + const store = await getPersistentNoPathStore(); + await itForks(store); + }); + + it('forks an ephemeral store', async () => { + const store = await getEphemeralStore(); + await itForks(store); + }); + }); +} diff --git a/yarn-project/kv-store/src/interfaces/utils.ts b/yarn-project/kv-store/src/interfaces/utils.ts new file mode 100644 index 000000000000..2176a996bbf9 --- /dev/null +++ b/yarn-project/kv-store/src/interfaces/utils.ts @@ -0,0 +1,20 @@ +import { type AztecAsyncKVStore, type AztecKVStore } from './store.js'; + +/* eslint-disable no-console */ +export const mockLogger = { + debug: (msg: string, data: any) => console.log(msg, data), + info: (msg: string, data: any) => console.log(msg, data), + warn: (msg: string, data: any) => console.log(msg, data), + error: (msg: string, data: any) => console.error(msg, data), + fatal: (msg: string, data: any) => console.error(msg, data), + silent: (_msg: string, _data: any) => {}, + verbose: (msg: string, data: any) => console.log(msg, data), + trace: (msg: string, data: any) => console.log(msg, data), + level: 'trace' as const, + isLevelEnabled: (_level: string) => true, +}; +/* eslint-enable no-console */ + +export function isSyncStore(store: AztecKVStore | AztecAsyncKVStore): store is AztecAsyncKVStore { + return (store as AztecKVStore).syncGetters === true; +} diff --git a/yarn-project/kv-store/src/lmdb/array.test.ts b/yarn-project/kv-store/src/lmdb/array.test.ts index 140d620a87d5..31aed9cd80cd 100644 --- a/yarn-project/kv-store/src/lmdb/array.test.ts +++ b/yarn-project/kv-store/src/lmdb/array.test.ts @@ -1,91 +1,8 @@ -import { type Database, open } from 'lmdb'; +import { describeAztecArray } from '../interfaces/array_test_suite.js'; +import { openTmpStore } from './index.js'; -import { LmdbAztecArray } from './array.js'; +describe('LMDBArray', () => { + describeAztecArray('Sync AztecArray', () => openTmpStore(true)); -describe('LmdbAztecArray', () => { - let db: Database; - let arr: LmdbAztecArray; - - beforeEach(() => { - db = open({} as any); - arr = new LmdbAztecArray(db, 'test'); - }); - - it('should be able to push and pop values', async () => { - await arr.push(1); - await arr.push(2); - await arr.push(3); - - expect(arr.length).toEqual(3); - expect(await arr.pop()).toEqual(3); - expect(await arr.pop()).toEqual(2); - expect(await arr.pop()).toEqual(1); - expect(await arr.pop()).toEqual(undefined); - }); - - it('should be able to get values by index', async () => { - await arr.push(1); - await arr.push(2); - await arr.push(3); - - expect(arr.at(0)).toEqual(1); - expect(arr.at(1)).toEqual(2); - expect(arr.at(2)).toEqual(3); - expect(arr.at(3)).toEqual(undefined); - expect(arr.at(-1)).toEqual(3); - expect(arr.at(-2)).toEqual(2); - expect(arr.at(-3)).toEqual(1); - expect(arr.at(-4)).toEqual(undefined); - }); - - it('should be able to set values by index', async () => { - await arr.push(1); - await arr.push(2); - await arr.push(3); - - expect(await arr.setAt(0, 4)).toEqual(true); - expect(await arr.setAt(1, 5)).toEqual(true); - expect(await arr.setAt(2, 6)).toEqual(true); - - expect(await arr.setAt(3, 7)).toEqual(false); - - expect(arr.at(0)).toEqual(4); - expect(arr.at(1)).toEqual(5); - expect(arr.at(2)).toEqual(6); - expect(arr.at(3)).toEqual(undefined); - - expect(await arr.setAt(-1, 8)).toEqual(true); - expect(await arr.setAt(-2, 9)).toEqual(true); - expect(await arr.setAt(-3, 10)).toEqual(true); - - expect(await arr.setAt(-4, 11)).toEqual(false); - - expect(arr.at(-1)).toEqual(8); - expect(arr.at(-2)).toEqual(9); - expect(arr.at(-3)).toEqual(10); - expect(arr.at(-4)).toEqual(undefined); - }); - - it('should be able to iterate over values', async () => { - await arr.push(1); - await arr.push(2); - await arr.push(3); - - expect([...arr.values()]).toEqual([1, 2, 3]); - expect([...arr.entries()]).toEqual([ - [0, 1], - [1, 2], - [2, 3], - ]); - }); - - it('should be able to restore state', async () => { - await arr.push(1); - await arr.push(2); - await arr.push(3); - - const arr2 = new LmdbAztecArray(db, 'test'); - expect(arr2.length).toEqual(3); - expect([...arr2.values()]).toEqual([...arr.values()]); - }); + describeAztecArray('Async AztecArray', () => Promise.resolve(openTmpStore(true)), true); }); diff --git a/yarn-project/kv-store/src/lmdb/array.ts b/yarn-project/kv-store/src/lmdb/array.ts index 3ba338d028fa..19537dd7598c 100644 --- a/yarn-project/kv-store/src/lmdb/array.ts +++ b/yarn-project/kv-store/src/lmdb/array.ts @@ -1,6 +1,6 @@ import { type Database, type Key } from 'lmdb'; -import { type AztecArray } from '../interfaces/array.js'; +import { type AztecArray, type AztecAsyncArray } from '../interfaces/array.js'; import { LmdbAztecSingleton } from './singleton.js'; /** The shape of a key that stores a value in an array */ @@ -9,7 +9,7 @@ type ArrayIndexSlot = ['array', string, 'slot', number]; /** * An persistent array backed by LMDB. */ -export class LmdbAztecArray implements AztecArray { +export class LmdbAztecArray implements AztecArray, AztecAsyncArray { #db: Database; #name: string; #length: LmdbAztecSingleton; @@ -24,6 +24,10 @@ export class LmdbAztecArray implements AztecArray { return this.#length.get() ?? 0; } + lengthAsync(): Promise { + return Promise.resolve(this.length); + } + push(...vals: T[]): Promise { return this.#db.childTransaction(() => { let length = this.length; @@ -69,6 +73,10 @@ export class LmdbAztecArray implements AztecArray { return this.#db.get(this.#slot(index)); } + atAsync(index: number): Promise { + return Promise.resolve(this.at(index)); + } + setAt(index: number, val: T): Promise { if (index < 0) { index = this.length + index; @@ -93,16 +101,32 @@ export class LmdbAztecArray implements AztecArray { } } + async *entriesAsync(): AsyncIterableIterator<[number, T]> { + for (const [key, value] of this.entries()) { + yield [key, value]; + } + } + *values(): IterableIterator { for (const [_, value] of this.entries()) { yield value; } } + async *valuesAsync(): AsyncIterableIterator { + for (const [_, value] of this.entries()) { + yield value; + } + } + [Symbol.iterator](): IterableIterator { return this.values(); } + [Symbol.asyncIterator](): AsyncIterableIterator { + return this.valuesAsync(); + } + #slot(index: number): ArrayIndexSlot { return ['array', this.#name, 'slot', index]; } diff --git a/yarn-project/kv-store/src/lmdb/counter.test.ts b/yarn-project/kv-store/src/lmdb/counter.test.ts index 36ac3e426799..7e1c8524cf96 100644 --- a/yarn-project/kv-store/src/lmdb/counter.test.ts +++ b/yarn-project/kv-store/src/lmdb/counter.test.ts @@ -1,9 +1,15 @@ import { randomBytes } from '@aztec/foundation/crypto'; +import { toArray } from '@aztec/foundation/iterable'; +import { expect, use } from 'chai'; +import chaiAsPromised from 'chai-as-promised'; import { type Database, open } from 'lmdb'; +import forEach from 'mocha-each'; import { LmdbAztecCounter } from './counter.js'; +use(chaiAsPromised); + describe('LmdbAztecCounter', () => { let db: Database; @@ -11,26 +17,26 @@ describe('LmdbAztecCounter', () => { db = open({} as any); }); - describe.each([ + forEach([ ['floating point number', () => Math.random()], ['integers', () => (Math.random() * 1000) | 0], ['strings', () => randomBytes(8).toString('hex')], ['strings', () => [Math.random(), randomBytes(8).toString('hex')]], - ])('counts occurrences of %s values', (_, genKey) => { + ]).describe('counts occurrences of %s values', (_, genKey) => { let counter: LmdbAztecCounter>; beforeEach(() => { counter = new LmdbAztecCounter(db, 'test'); }); it('returns 0 for unknown keys', () => { - expect(counter.get(genKey())).toEqual(0); + expect(counter.get(genKey())).to.equal(0); }); it('increments values', async () => { const key = genKey(); await counter.update(key, 1); - expect(counter.get(key)).toEqual(1); + expect(counter.get(key)).to.equal(1); }); it('decrements values', async () => { @@ -38,14 +44,14 @@ describe('LmdbAztecCounter', () => { await counter.update(key, 1); await counter.update(key, -1); - expect(counter.get(key)).toEqual(0); + expect(counter.get(key)).to.equal(0); }); it('throws when decrementing below zero', async () => { const key = genKey(); await counter.update(key, 1); - await expect(counter.update(key, -2)).rejects.toThrow(); + await expect(counter.update(key, -2)).to.be.rejected; }); it('increments values by a delta', async () => { @@ -53,7 +59,7 @@ describe('LmdbAztecCounter', () => { await counter.update(key, 1); await counter.update(key, 2); - expect(counter.get(key)).toEqual(3); + expect(counter.get(key)).to.equal(3); }); it('resets the counter', async () => { @@ -62,7 +68,7 @@ describe('LmdbAztecCounter', () => { await counter.update(key, 2); await counter.set(key, 0); - expect(counter.get(key)).toEqual(0); + expect(counter.get(key)).to.equal(0); }); it('iterates over entries', async () => { @@ -70,11 +76,11 @@ describe('LmdbAztecCounter', () => { await counter.update(key, 1); await counter.update(key, 2); - expect([...counter.entries()]).toEqual([[key, 3]]); + expect(await toArray(counter.entries())).to.deep.equal([[key, 3]]); }); }); - it.each([ + forEach([ [ [ ['c', 2342], @@ -115,9 +121,9 @@ describe('LmdbAztecCounter', () => { [[11, 'b'], 1], ], ], - ])('iterates in key order', async (insertOrder, expectedOrder) => { + ]).it('iterates in key order', async (insertOrder: [string, number][], expectedOrder) => { const counter = new LmdbAztecCounter(db, 'test'); await Promise.all(insertOrder.map(([key, value]) => counter.update(key, value as number))); - expect([...counter.entries()]).toEqual(expectedOrder); + expect(await toArray(counter.entries())).to.deep.equal(expectedOrder); }); }); diff --git a/yarn-project/kv-store/src/lmdb/counter.ts b/yarn-project/kv-store/src/lmdb/counter.ts index 78e70f5d3ff7..10f564abf779 100644 --- a/yarn-project/kv-store/src/lmdb/counter.ts +++ b/yarn-project/kv-store/src/lmdb/counter.ts @@ -1,13 +1,13 @@ import { type Key as BaseKey, type Database } from 'lmdb'; import { type Key, type Range } from '../interfaces/common.js'; -import { type AztecCounter } from '../interfaces/counter.js'; +import { type AztecAsyncCounter, type AztecCounter } from '../interfaces/counter.js'; import { LmdbAztecMap } from './map.js'; /** * A counter implementation backed by LMDB */ -export class LmdbAztecCounter implements AztecCounter { +export class LmdbAztecCounter implements AztecCounter, AztecAsyncCounter { #db: Database; #name: string; #map: LmdbAztecMap; @@ -45,11 +45,23 @@ export class LmdbAztecCounter implements AztecCounter { return this.#map.get(key) ?? 0; } + getAsync(key: K): Promise { + return Promise.resolve(this.get(key)); + } + entries(range: Range = {}): IterableIterator<[K, number]> { return this.#map.entries(range); } + async *entriesAsync(range: Range = {}): AsyncIterableIterator<[K, number]> { + yield* this.entries(range); + } + keys(range: Range = {}): IterableIterator { return this.#map.keys(range); } + + async *keysAsync(range: Range = {}): AsyncIterableIterator { + yield* this.keys(range); + } } diff --git a/yarn-project/kv-store/src/lmdb/index.ts b/yarn-project/kv-store/src/lmdb/index.ts index 53e012fa5392..54903fe8cf25 100644 --- a/yarn-project/kv-store/src/lmdb/index.ts +++ b/yarn-project/kv-store/src/lmdb/index.ts @@ -1 +1,37 @@ +import { type Logger, createDebugLogger } from '@aztec/foundation/log'; + +import { join } from 'path'; + +import { type DataStoreConfig } from '../config.js'; +import { initStoreForRollup } from '../utils.js'; +import { AztecLmdbStore } from './store.js'; + export { AztecLmdbStore } from './store.js'; + +export function createStore(name: string, config: DataStoreConfig, log: Logger = createDebugLogger('aztec:kv-store')) { + let { dataDirectory } = config; + if (typeof dataDirectory !== 'undefined') { + dataDirectory = join(dataDirectory, name); + } + + log.info( + dataDirectory + ? `Creating ${name} data store at directory ${dataDirectory} with map size ${config.dataStoreMapSizeKB} KB` + : `Creating ${name} ephemeral data store with map size ${config.dataStoreMapSizeKB} KB`, + ); + + const store = AztecLmdbStore.open(dataDirectory, config.dataStoreMapSizeKB, false); + if (config.l1Contracts?.rollupAddress) { + return initStoreForRollup(store, config.l1Contracts.rollupAddress, log); + } + return store; +} +/** + * Opens a temporary store for testing purposes. + * @param ephemeral - true if the store should only exist in memory and not automatically be flushed to disk. Optional + * @returns A new store + */ +export function openTmpStore(ephemeral: boolean = false): AztecLmdbStore { + const mapSize = 1024 * 1024 * 10; // 10 GB map size + return AztecLmdbStore.open(undefined, mapSize, ephemeral); +} diff --git a/yarn-project/kv-store/src/lmdb/map.test.ts b/yarn-project/kv-store/src/lmdb/map.test.ts index a13aa16e1266..224750df8e91 100644 --- a/yarn-project/kv-store/src/lmdb/map.test.ts +++ b/yarn-project/kv-store/src/lmdb/map.test.ts @@ -1,99 +1,8 @@ -import { type Database, open } from 'lmdb'; +import { describeAztecMap } from '../interfaces/map_test_suite.js'; +import { openTmpStore } from './index.js'; -import { LmdbAztecMap } from './map.js'; +describe('LMDBMap', () => { + describeAztecMap('Sync AztecMap', () => openTmpStore(true)); -describe('LmdbAztecMap', () => { - let db: Database; - let map: LmdbAztecMap; - - beforeEach(() => { - db = open({ dupSort: true } as any); - map = new LmdbAztecMap(db, 'test'); - }); - - it('should be able to set and get values', async () => { - await map.set('foo', 'bar'); - await map.set('baz', 'qux'); - - expect(map.get('foo')).toEqual('bar'); - expect(map.get('baz')).toEqual('qux'); - expect(map.get('quux')).toEqual(undefined); - }); - - it('should be able to set values if they do not exist', async () => { - expect(await map.setIfNotExists('foo', 'bar')).toEqual(true); - expect(await map.setIfNotExists('foo', 'baz')).toEqual(false); - - expect(map.get('foo')).toEqual('bar'); - }); - - it('should be able to delete values', async () => { - await map.set('foo', 'bar'); - await map.set('baz', 'qux'); - - await map.delete('foo'); - - expect(map.get('foo')).toEqual(undefined); - expect(map.get('baz')).toEqual('qux'); - }); - - it('should be able to iterate over entries', async () => { - await map.set('foo', 'bar'); - await map.set('baz', 'qux'); - - expect([...map.entries()]).toEqual([ - ['baz', 'qux'], - ['foo', 'bar'], - ]); - }); - - it('should be able to iterate over values', async () => { - await map.set('foo', 'bar'); - await map.set('baz', 'quux'); - - expect([...map.values()]).toEqual(['quux', 'bar']); - }); - - it('should be able to iterate over keys', async () => { - await map.set('foo', 'bar'); - await map.set('baz', 'qux'); - - expect([...map.keys()]).toEqual(['baz', 'foo']); - }); - - it('should be able to get multiple values for a single key', async () => { - await map.set('foo', 'bar'); - await map.set('foo', 'baz'); - - expect([...map.getValues('foo')]).toEqual(['bar', 'baz']); - }); - - it('supports tuple keys', async () => { - const map = new LmdbAztecMap<[number, string], string>(db, 'test'); - - await map.set([5, 'bar'], 'val'); - await map.set([0, 'foo'], 'val'); - - expect([...map.keys()]).toEqual([ - [0, 'foo'], - [5, 'bar'], - ]); - - expect(map.get([5, 'bar'])).toEqual('val'); - }); - - it('supports range queries', async () => { - await map.set('a', 'a'); - await map.set('b', 'b'); - await map.set('c', 'c'); - await map.set('d', 'd'); - - expect([...map.keys({ start: 'b', end: 'c' })]).toEqual(['b']); - expect([...map.keys({ start: 'b' })]).toEqual(['b', 'c', 'd']); - expect([...map.keys({ end: 'c' })]).toEqual(['a', 'b']); - expect([...map.keys({ start: 'b', end: 'c', reverse: true })]).toEqual(['c']); - expect([...map.keys({ start: 'b', limit: 1 })]).toEqual(['b']); - expect([...map.keys({ start: 'b', reverse: true })]).toEqual(['d', 'c']); - expect([...map.keys({ end: 'b', reverse: true })]).toEqual(['b', 'a']); - }); + describeAztecMap('Async AztecMap', () => Promise.resolve(openTmpStore(true)), true); }); diff --git a/yarn-project/kv-store/src/lmdb/map.ts b/yarn-project/kv-store/src/lmdb/map.ts index 3711fa36f55f..38d87cf9c6b0 100644 --- a/yarn-project/kv-store/src/lmdb/map.ts +++ b/yarn-project/kv-store/src/lmdb/map.ts @@ -1,7 +1,7 @@ import { type Database, type RangeOptions } from 'lmdb'; import { type Key, type Range } from '../interfaces/common.js'; -import { type AztecMultiMap } from '../interfaces/map.js'; +import { type AztecAsyncMultiMap, type AztecMultiMap } from '../interfaces/map.js'; /** The slot where a key-value entry would be stored */ type MapValueSlot = ['map', string, 'slot', K]; @@ -9,7 +9,7 @@ type MapValueSlot = ['map', string, 'slot', K]; /** * A map backed by LMDB. */ -export class LmdbAztecMap implements AztecMultiMap { +export class LmdbAztecMap implements AztecMultiMap, AztecAsyncMultiMap { protected db: Database<[K, V], MapValueSlot>; protected name: string; @@ -35,6 +35,10 @@ export class LmdbAztecMap implements AztecMultiMap { return this.db.get(this.#slot(key))?.[1]; } + getAsync(key: K): Promise { + return Promise.resolve(this.get(key)); + } + *getValues(key: K): IterableIterator { const values = this.db.getValues(this.#slot(key)); for (const value of values) { @@ -42,10 +46,20 @@ export class LmdbAztecMap implements AztecMultiMap { } } + async *getValuesAsync(key: K): AsyncIterableIterator { + for (const value of this.getValues(key)) { + yield value; + } + } + has(key: K): boolean { return this.db.doesExist(this.#slot(key)); } + hasAsync(key: K): Promise { + return Promise.resolve(this.has(key)); + } + async set(key: K, val: V): Promise { await this.db.put(this.#slot(key), [key, val]); } @@ -109,18 +123,36 @@ export class LmdbAztecMap implements AztecMultiMap { } } + async *entriesAsync(range?: Range | undefined): AsyncIterableIterator<[K, V]> { + for (const entry of this.entries(range)) { + yield entry; + } + } + *values(range: Range = {}): IterableIterator { for (const [_, value] of this.entries(range)) { yield value; } } + async *valuesAsync(range: Range = {}): AsyncIterableIterator { + for await (const [_, value] of this.entriesAsync(range)) { + yield value; + } + } + *keys(range: Range = {}): IterableIterator { for (const [key, _] of this.entries(range)) { yield key; } } + async *keysAsync(range: Range = {}): AsyncIterableIterator { + for await (const [key, _] of this.entriesAsync(range)) { + yield key; + } + } + #slot(key: K): MapValueSlot { return ['map', this.name, 'slot', key]; } diff --git a/yarn-project/kv-store/src/lmdb/set.test.ts b/yarn-project/kv-store/src/lmdb/set.test.ts index 677b17e543fb..4b7486154c4c 100644 --- a/yarn-project/kv-store/src/lmdb/set.test.ts +++ b/yarn-project/kv-store/src/lmdb/set.test.ts @@ -1,54 +1,8 @@ -import { type Database, open } from 'lmdb'; +import { describeAztecSet } from '../interfaces/set_test_suite.js'; +import { openTmpStore } from './index.js'; -import { LmdbAztecSet } from './set.js'; +describe('LMDBSet', () => { + describeAztecSet('Sync AztecSet', () => openTmpStore(true)); -describe('LmdbAztecSet', () => { - let db: Database; - let set: LmdbAztecSet; - - beforeEach(() => { - db = open({ dupSort: true } as any); - set = new LmdbAztecSet(db, 'test'); - }); - - it('should be able to set and get values', async () => { - await set.add('foo'); - await set.add('baz'); - - expect(set.has('foo')).toEqual(true); - expect(set.has('baz')).toEqual(true); - expect(set.has('bar')).toEqual(false); - }); - - it('should be able to delete values', async () => { - await set.add('foo'); - await set.add('baz'); - - await set.delete('foo'); - - expect(set.has('foo')).toEqual(false); - expect(set.has('baz')).toEqual(true); - }); - - it('should be able to iterate over entries', async () => { - await set.add('baz'); - await set.add('foo'); - - expect([...set.entries()]).toEqual(['baz', 'foo']); - }); - - it('supports range queries', async () => { - await set.add('a'); - await set.add('b'); - await set.add('c'); - await set.add('d'); - - expect([...set.entries({ start: 'b', end: 'c' })]).toEqual(['b']); - expect([...set.entries({ start: 'b' })]).toEqual(['b', 'c', 'd']); - expect([...set.entries({ end: 'c' })]).toEqual(['a', 'b']); - expect([...set.entries({ start: 'b', end: 'c', reverse: true })]).toEqual(['c']); - expect([...set.entries({ start: 'b', limit: 1 })]).toEqual(['b']); - expect([...set.entries({ start: 'b', reverse: true })]).toEqual(['d', 'c']); - expect([...set.entries({ end: 'b', reverse: true })]).toEqual(['b', 'a']); - }); + describeAztecSet('Aync AztecSet', () => Promise.resolve(openTmpStore(true)), true); }); diff --git a/yarn-project/kv-store/src/lmdb/set.ts b/yarn-project/kv-store/src/lmdb/set.ts index 887185b1b482..f92dce32f4a8 100644 --- a/yarn-project/kv-store/src/lmdb/set.ts +++ b/yarn-project/kv-store/src/lmdb/set.ts @@ -1,13 +1,13 @@ import { type Database } from 'lmdb'; import { type Key, type Range } from '../interfaces/common.js'; -import { type AztecSet } from '../interfaces/set.js'; +import { type AztecAsyncSet, type AztecSet } from '../interfaces/set.js'; import { LmdbAztecMap } from './map.js'; /** * A set backed by LMDB. */ -export class LmdbAztecSet implements AztecSet { +export class LmdbAztecSet implements AztecSet, AztecAsyncSet { private map: LmdbAztecMap; constructor(rootDb: Database, mapName: string) { this.map = new LmdbAztecMap(rootDb, mapName); @@ -21,6 +21,10 @@ export class LmdbAztecSet implements AztecSet { return this.map.has(key); } + hasAsync(key: K): Promise { + return Promise.resolve(this.has(key)); + } + add(key: K): Promise { return this.map.set(key, true); } @@ -32,4 +36,10 @@ export class LmdbAztecSet implements AztecSet { entries(range: Range = {}): IterableIterator { return this.map.keys(range); } + + async *entriesAsync(range: Range = {}): AsyncIterableIterator { + for await (const key of this.map.keysAsync(range)) { + yield key; + } + } } diff --git a/yarn-project/kv-store/src/lmdb/singleton.test.ts b/yarn-project/kv-store/src/lmdb/singleton.test.ts index de1eefae462f..24f9bca78fbc 100644 --- a/yarn-project/kv-store/src/lmdb/singleton.test.ts +++ b/yarn-project/kv-store/src/lmdb/singleton.test.ts @@ -1,25 +1,8 @@ -import { open } from 'lmdb'; +import { describeAztecSingleton } from '../interfaces/singleton_test_suite.js'; +import { openTmpStore } from './index.js'; -import { LmdbAztecSingleton } from './singleton.js'; +describe('LMDBSingleton', () => { + describeAztecSingleton('Sync AztecSingleton', () => openTmpStore(true)); -describe('LmdbAztecSingleton', () => { - let singleton: LmdbAztecSingleton; - beforeEach(() => { - singleton = new LmdbAztecSingleton(open({} as any), 'test'); - }); - - it('returns undefined if the value is not set', () => { - expect(singleton.get()).toEqual(undefined); - }); - - it('should be able to set and get values', async () => { - expect(await singleton.set('foo')).toEqual(true); - expect(singleton.get()).toEqual('foo'); - }); - - it('overwrites the value if it is set again', async () => { - expect(await singleton.set('foo')).toEqual(true); - expect(await singleton.set('bar')).toEqual(true); - expect(singleton.get()).toEqual('bar'); - }); + describeAztecSingleton('Async AztecSingleton', () => Promise.resolve(openTmpStore(true)), true); }); diff --git a/yarn-project/kv-store/src/lmdb/singleton.ts b/yarn-project/kv-store/src/lmdb/singleton.ts index abeefec242cd..8b4e5d28ff39 100644 --- a/yarn-project/kv-store/src/lmdb/singleton.ts +++ b/yarn-project/kv-store/src/lmdb/singleton.ts @@ -1,6 +1,6 @@ import { type Database, type Key } from 'lmdb'; -import { type AztecSingleton } from '../interfaces/singleton.js'; +import { type AztecAsyncSingleton, type AztecSingleton } from '../interfaces/singleton.js'; /** The slot where this singleton will store its value */ type ValueSlot = ['singleton', string, 'value']; @@ -8,7 +8,7 @@ type ValueSlot = ['singleton', string, 'value']; /** * Stores a single value in LMDB. */ -export class LmdbAztecSingleton implements AztecSingleton { +export class LmdbAztecSingleton implements AztecSingleton, AztecAsyncSingleton { #db: Database; #slot: ValueSlot; @@ -21,6 +21,10 @@ export class LmdbAztecSingleton implements AztecSingleton { return this.#db.get(this.#slot); } + getAsync(): Promise { + return Promise.resolve(this.get()); + } + set(val: T): Promise { return this.#db.put(this.#slot, val); } diff --git a/yarn-project/kv-store/src/lmdb/store.test.ts b/yarn-project/kv-store/src/lmdb/store.test.ts index c09953cb55b8..1c47ab90a07d 100644 --- a/yarn-project/kv-store/src/lmdb/store.test.ts +++ b/yarn-project/kv-store/src/lmdb/store.test.ts @@ -1,39 +1,20 @@ -import { mkdtemp } from 'fs/promises'; +import { promises as fs } from 'fs'; import { tmpdir } from 'os'; import { join } from 'path'; +import { describeAztecStore } from '../interfaces/store_test_suite.js'; import { AztecLmdbStore } from './store.js'; const defaultMapSize = 1024 * 1024 * 1024 * 10; describe('AztecLmdbStore', () => { - const itForks = async (store: AztecLmdbStore) => { - const singleton = store.openSingleton('singleton'); - await singleton.set('foo'); - - const forkedStore = await store.fork(); - const forkedSingleton = forkedStore.openSingleton('singleton'); - expect(forkedSingleton.get()).toEqual('foo'); - await forkedSingleton.set('bar'); - expect(singleton.get()).toEqual('foo'); - expect(forkedSingleton.get()).toEqual('bar'); - await forkedSingleton.delete(); - expect(singleton.get()).toEqual('foo'); - }; - - it('forks a persistent store', async () => { - const path = await mkdtemp(join(tmpdir(), 'aztec-store-test-')); - const store = AztecLmdbStore.open(path, defaultMapSize, false); - await itForks(store); - }); - - it('forks a persistent store with no path', async () => { - const store = AztecLmdbStore.open(undefined, defaultMapSize, false); - await itForks(store); - }); - - it('forks an ephemeral store', async () => { - const store = AztecLmdbStore.open(undefined, defaultMapSize, true); - await itForks(store); - }); + describeAztecStore( + 'AztecStore', + async () => { + const path = await fs.mkdtemp(join(tmpdir(), 'aztec-store-test-')); + return AztecLmdbStore.open(path, defaultMapSize, false); + }, + () => Promise.resolve(AztecLmdbStore.open(undefined, defaultMapSize, false)), + () => Promise.resolve(AztecLmdbStore.open(undefined, defaultMapSize, true)), + ); }); diff --git a/yarn-project/kv-store/src/lmdb/store.ts b/yarn-project/kv-store/src/lmdb/store.ts index 3e43972f088a..e66bfb9b4ca6 100644 --- a/yarn-project/kv-store/src/lmdb/store.ts +++ b/yarn-project/kv-store/src/lmdb/store.ts @@ -1,17 +1,17 @@ import { createDebugLogger } from '@aztec/foundation/log'; -import { mkdirSync } from 'fs'; -import { mkdtemp, rm } from 'fs/promises'; -import { type Database, type Key, type RootDatabase, open } from 'lmdb'; +import { promises as fs, mkdirSync } from 'fs'; +import { type Database, type RootDatabase, open } from 'lmdb'; import { tmpdir } from 'os'; import { dirname, join } from 'path'; -import { type AztecArray } from '../interfaces/array.js'; -import { type AztecCounter } from '../interfaces/counter.js'; -import { type AztecMap, type AztecMultiMap } from '../interfaces/map.js'; -import { type AztecSet } from '../interfaces/set.js'; -import { type AztecSingleton } from '../interfaces/singleton.js'; -import { type AztecKVStore } from '../interfaces/store.js'; +import { type AztecArray, type AztecAsyncArray } from '../interfaces/array.js'; +import { type Key } from '../interfaces/common.js'; +import { type AztecAsyncCounter, type AztecCounter } from '../interfaces/counter.js'; +import { type AztecAsyncMap, type AztecAsyncMultiMap, type AztecMap, type AztecMultiMap } from '../interfaces/map.js'; +import { type AztecAsyncSet, type AztecSet } from '../interfaces/set.js'; +import { type AztecAsyncSingleton, type AztecSingleton } from '../interfaces/singleton.js'; +import { type AztecAsyncKVStore, type AztecKVStore } from '../interfaces/store.js'; import { LmdbAztecArray } from './array.js'; import { LmdbAztecCounter } from './counter.js'; import { LmdbAztecMap } from './map.js'; @@ -21,7 +21,9 @@ import { LmdbAztecSingleton } from './singleton.js'; /** * A key-value store backed by LMDB. */ -export class AztecLmdbStore implements AztecKVStore { +export class AztecLmdbStore implements AztecKVStore, AztecAsyncKVStore { + syncGetters = true as const; + #rootDb: RootDatabase; #data: Database; #multiMapData: Database; @@ -79,7 +81,7 @@ export class AztecLmdbStore implements AztecKVStore { const baseDir = this.path ? dirname(this.path) : tmpdir(); this.#log.debug(`Forking store with basedir ${baseDir}`); const forkPath = - (await mkdtemp(join(baseDir, 'aztec-store-fork-'))) + (this.isEphemeral || !this.path ? '/data.mdb' : ''); + (await fs.mkdtemp(join(baseDir, 'aztec-store-fork-'))) + (this.isEphemeral || !this.path ? '/data.mdb' : ''); this.#log.verbose(`Forking store to ${forkPath}`); await this.#rootDb.backup(forkPath, false); const forkDb = open(forkPath, { noSync: this.isEphemeral }); @@ -92,7 +94,7 @@ export class AztecLmdbStore implements AztecKVStore { * @param name - Name of the map * @returns A new AztecMap */ - openMap(name: string): AztecMap { + openMap(name: string): AztecMap & AztecAsyncMap { return new LmdbAztecMap(this.#data, name); } @@ -101,7 +103,7 @@ export class AztecLmdbStore implements AztecKVStore { * @param name - Name of the set * @returns A new AztecSet */ - openSet(name: string): AztecSet { + openSet(name: string): AztecSet & AztecAsyncSet { return new LmdbAztecSet(this.#data, name); } @@ -110,11 +112,11 @@ export class AztecLmdbStore implements AztecKVStore { * @param name - Name of the map * @returns A new AztecMultiMap */ - openMultiMap(name: string): AztecMultiMap { + openMultiMap(name: string): AztecMultiMap & AztecAsyncMultiMap { return new LmdbAztecMap(this.#multiMapData, name); } - openCounter>(name: string): AztecCounter { + openCounter(name: string): AztecCounter & AztecAsyncCounter { return new LmdbAztecCounter(this.#data, name); } @@ -123,7 +125,7 @@ export class AztecLmdbStore implements AztecKVStore { * @param name - Name of the array * @returns A new AztecArray */ - openArray(name: string): AztecArray { + openArray(name: string): AztecArray & AztecAsyncArray { return new LmdbAztecArray(this.#data, name); } @@ -132,7 +134,7 @@ export class AztecLmdbStore implements AztecKVStore { * @param name - Name of the singleton * @returns A new AztecSingleton */ - openSingleton(name: string): AztecSingleton { + openSingleton(name: string): AztecSingleton & AztecAsyncSingleton { return new LmdbAztecSingleton(this.#data, name); } @@ -145,6 +147,15 @@ export class AztecLmdbStore implements AztecKVStore { return this.#rootDb.transaction(callback); } + /** + * Runs a callback in a transaction. + * @param callback - Function to execute in a transaction + * @returns A promise that resolves to the return value of the callback + */ + async transactionAsync(callback: () => Promise): Promise { + return await this.#rootDb.transaction(callback); + } + /** * Clears all entries in the store & sub DBs. */ @@ -177,7 +188,7 @@ export class AztecLmdbStore implements AztecKVStore { await this.drop(); await this.close(); if (this.path) { - await rm(this.path, { recursive: true, force: true }); + await fs.rm(this.path, { recursive: true, force: true }); this.#log.verbose(`Deleted database files at ${this.path}`); } } diff --git a/yarn-project/kv-store/src/stores/l2_tips_store.test.ts b/yarn-project/kv-store/src/stores/l2_tips_store.test.ts index 2b820aaf4327..d9ec9845fc19 100644 --- a/yarn-project/kv-store/src/stores/l2_tips_store.test.ts +++ b/yarn-project/kv-store/src/stores/l2_tips_store.test.ts @@ -1,13 +1,15 @@ import { type L2Block } from '@aztec/circuit-types'; -import { Fr, type Header } from '@aztec/circuits.js'; +import { type BlockHeader, Fr } from '@aztec/circuits.js'; import { times } from '@aztec/foundation/collection'; -import { type AztecKVStore } from '@aztec/kv-store'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { type AztecAsyncKVStore } from '@aztec/kv-store'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; + +import { expect } from 'chai'; import { L2TipsStore } from './l2_tips_store.js'; describe('L2TipsStore', () => { - let kvStore: AztecKVStore; + let kvStore: AztecAsyncKVStore; let tipsStore: L2TipsStore; beforeEach(() => { @@ -16,7 +18,7 @@ describe('L2TipsStore', () => { }); const makeBlock = (number: number): L2Block => - ({ number, header: { hash: () => new Fr(number) } as Header } as L2Block); + ({ number, header: { hash: () => new Fr(number) } as BlockHeader } as L2Block); const makeTip = (number: number) => ({ number, hash: number === 0 ? undefined : new Fr(number).toString() }); @@ -28,7 +30,7 @@ describe('L2TipsStore', () => { it('returns zero if no tips are stored', async () => { const tips = await tipsStore.getL2Tips(); - expect(tips).toEqual(makeTips(0, 0, 0)); + expect(tips).to.deep.equal(makeTips(0, 0, 0)); }); it('stores chain tips', async () => { @@ -39,18 +41,18 @@ describe('L2TipsStore', () => { await tipsStore.handleBlockStreamEvent({ type: 'chain-pruned', blockNumber: 10 }); const tips = await tipsStore.getL2Tips(); - expect(tips).toEqual(makeTips(10, 8, 5)); + expect(tips).to.deep.equal(makeTips(10, 8, 5)); }); it('sets latest tip from blocks added', async () => { await tipsStore.handleBlockStreamEvent({ type: 'blocks-added', blocks: times(3, i => makeBlock(i + 1)) }); const tips = await tipsStore.getL2Tips(); - expect(tips).toEqual(makeTips(3, 0, 0)); + expect(tips).to.deep.equal(makeTips(3, 0, 0)); - expect(await tipsStore.getL2BlockHash(1)).toEqual(new Fr(1).toString()); - expect(await tipsStore.getL2BlockHash(2)).toEqual(new Fr(2).toString()); - expect(await tipsStore.getL2BlockHash(3)).toEqual(new Fr(3).toString()); + expect(await tipsStore.getL2BlockHash(1)).to.deep.equal(new Fr(1).toString()); + expect(await tipsStore.getL2BlockHash(2)).to.deep.equal(new Fr(2).toString()); + expect(await tipsStore.getL2BlockHash(3)).to.deep.equal(new Fr(3).toString()); }); it('clears block hashes when setting finalized chain', async () => { @@ -59,13 +61,13 @@ describe('L2TipsStore', () => { await tipsStore.handleBlockStreamEvent({ type: 'chain-finalized', blockNumber: 3 }); const tips = await tipsStore.getL2Tips(); - expect(tips).toEqual(makeTips(5, 3, 3)); + expect(tips).to.deep.equal(makeTips(5, 3, 3)); - expect(await tipsStore.getL2BlockHash(1)).toBeUndefined(); - expect(await tipsStore.getL2BlockHash(2)).toBeUndefined(); + expect(await tipsStore.getL2BlockHash(1)).to.be.undefined; + expect(await tipsStore.getL2BlockHash(2)).to.be.undefined; - expect(await tipsStore.getL2BlockHash(3)).toEqual(new Fr(3).toString()); - expect(await tipsStore.getL2BlockHash(4)).toEqual(new Fr(4).toString()); - expect(await tipsStore.getL2BlockHash(5)).toEqual(new Fr(5).toString()); + expect(await tipsStore.getL2BlockHash(3)).to.deep.equal(new Fr(3).toString()); + expect(await tipsStore.getL2BlockHash(4)).to.deep.equal(new Fr(4).toString()); + expect(await tipsStore.getL2BlockHash(5)).to.deep.equal(new Fr(5).toString()); }); }); diff --git a/yarn-project/kv-store/src/stores/l2_tips_store.ts b/yarn-project/kv-store/src/stores/l2_tips_store.ts index 8141d804ce00..149f7be80b39 100644 --- a/yarn-project/kv-store/src/stores/l2_tips_store.ts +++ b/yarn-project/kv-store/src/stores/l2_tips_store.ts @@ -7,37 +7,37 @@ import { type L2Tips, } from '@aztec/circuit-types'; -import { type AztecMap } from '../interfaces/map.js'; -import { type AztecKVStore } from '../interfaces/store.js'; +import { type AztecAsyncMap } from '../interfaces/map.js'; +import { type AztecAsyncKVStore } from '../interfaces/store.js'; /** Stores currently synced L2 tips and unfinalized block hashes. */ export class L2TipsStore implements L2BlockStreamEventHandler, L2BlockStreamLocalDataProvider { - private readonly l2TipsStore: AztecMap; - private readonly l2BlockHashesStore: AztecMap; + private readonly l2TipsStore: AztecAsyncMap; + private readonly l2BlockHashesStore: AztecAsyncMap; - constructor(store: AztecKVStore, namespace: string) { + constructor(store: AztecAsyncKVStore, namespace: string) { this.l2TipsStore = store.openMap([namespace, 'l2_tips'].join('_')); this.l2BlockHashesStore = store.openMap([namespace, 'l2_block_hashes'].join('_')); } public getL2BlockHash(number: number): Promise { - return Promise.resolve(this.l2BlockHashesStore.get(number)); + return this.l2BlockHashesStore.getAsync(number); } - public getL2Tips(): Promise { - return Promise.resolve({ - latest: this.getL2Tip('latest'), - finalized: this.getL2Tip('finalized'), - proven: this.getL2Tip('proven'), - }); + public async getL2Tips(): Promise { + return { + latest: await this.getL2Tip('latest'), + finalized: await this.getL2Tip('finalized'), + proven: await this.getL2Tip('proven'), + }; } - private getL2Tip(tag: L2BlockTag): L2BlockId { - const blockNumber = this.l2TipsStore.get(tag); + private async getL2Tip(tag: L2BlockTag): Promise { + const blockNumber = await this.l2TipsStore.getAsync(tag); if (blockNumber === undefined || blockNumber === 0) { return { number: 0, hash: undefined }; } - const blockHash = this.l2BlockHashesStore.get(blockNumber); + const blockHash = await this.l2BlockHashesStore.getAsync(blockNumber); if (!blockHash) { throw new Error(`Block hash not found for block number ${blockNumber}`); } @@ -60,7 +60,7 @@ export class L2TipsStore implements L2BlockStreamEventHandler, L2BlockStreamLoca break; case 'chain-finalized': await this.l2TipsStore.set('finalized', event.blockNumber); - for (const key of this.l2BlockHashesStore.keys({ end: event.blockNumber })) { + for await (const key of this.l2BlockHashesStore.keysAsync({ end: event.blockNumber })) { await this.l2BlockHashesStore.delete(key); } break; diff --git a/yarn-project/kv-store/src/utils.ts b/yarn-project/kv-store/src/utils.ts index 25b651d0922e..03e0f327f383 100644 --- a/yarn-project/kv-store/src/utils.ts +++ b/yarn-project/kv-store/src/utils.ts @@ -1,30 +1,9 @@ import { type EthAddress } from '@aztec/foundation/eth-address'; -import { type Logger, createDebugLogger } from '@aztec/foundation/log'; +import { type Logger } from '@aztec/foundation/log'; -import { join } from 'path'; - -import { type DataStoreConfig } from './config.js'; -import { type AztecKVStore } from './interfaces/store.js'; -import { AztecLmdbStore } from './lmdb/store.js'; - -export function createStore(name: string, config: DataStoreConfig, log: Logger = createDebugLogger('aztec:kv-store')) { - let { dataDirectory } = config; - if (typeof dataDirectory !== 'undefined') { - dataDirectory = join(dataDirectory, name); - } - - log.info( - dataDirectory - ? `Creating ${name} data store at directory ${dataDirectory} with map size ${config.dataStoreMapSizeKB} KB` - : `Creating ${name} ephemeral data store with map size ${config.dataStoreMapSizeKB} KB`, - ); - - const store = AztecLmdbStore.open(dataDirectory, config.dataStoreMapSizeKB, false); - if (config.l1Contracts?.rollupAddress) { - return initStoreForRollup(store, config.l1Contracts.rollupAddress, log); - } - return store; -} +import { type AztecAsyncSingleton, type AztecSingleton } from './interfaces/singleton.js'; +import { type AztecAsyncKVStore, type AztecKVStore } from './interfaces/store.js'; +import { isSyncStore } from './interfaces/utils.js'; /** * Clears the store if the rollup address does not match the one stored in the database. @@ -33,7 +12,7 @@ export function createStore(name: string, config: DataStoreConfig, log: Logger = * @param rollupAddress - The ETH address of the rollup contract * @returns A promise that resolves when the store is cleared, or rejects if the rollup address does not match */ -async function initStoreForRollup( +export async function initStoreForRollup( store: T, rollupAddress: EthAddress, log?: Logger, @@ -43,7 +22,9 @@ async function initStoreForRollup( } const rollupAddressValue = store.openSingleton>('rollupAddress'); const rollupAddressString = rollupAddress.toString(); - const storedRollupAddressString = rollupAddressValue.get(); + const storedRollupAddressString = isSyncStore(store) + ? (rollupAddressValue as AztecSingleton>).get() + : await (rollupAddressValue as AztecAsyncSingleton>).getAsync(); if (typeof storedRollupAddressString !== 'undefined' && storedRollupAddressString !== rollupAddressString) { log?.warn(`Rollup address mismatch. Clearing entire database...`, { @@ -57,13 +38,3 @@ async function initStoreForRollup( await rollupAddressValue.set(rollupAddressString); return store; } - -/** - * Opens a temporary store for testing purposes. - * @param ephemeral - true if the store should only exist in memory and not automatically be flushed to disk. Optional - * @returns A new store - */ -export function openTmpStore(ephemeral: boolean = false): AztecLmdbStore { - const mapSize = 1024 * 1024 * 10; // 10 GB map size - return AztecLmdbStore.open(undefined, mapSize, ephemeral); -} diff --git a/yarn-project/kv-store/web-test-runner.config.mjs b/yarn-project/kv-store/web-test-runner.config.mjs new file mode 100644 index 000000000000..19811eec4879 --- /dev/null +++ b/yarn-project/kv-store/web-test-runner.config.mjs @@ -0,0 +1,24 @@ +import { defaultReporter } from '@web/test-runner'; +import { summaryReporter } from '@web/test-runner'; +import { fileURLToPath } from 'url'; +import { esbuildPlugin } from '@web/dev-server-esbuild'; +import { playwrightLauncher } from '@web/test-runner-playwright'; + +const reporter = process.env.CI ? summaryReporter() : defaultReporter(); + +export default { + browsers: [ + playwrightLauncher({ product: 'chromium' }), + // playwrightLauncher({ product: "webkit" }), + // playwrightLauncher({ product: "firefox" }), + ], + plugins: [ + esbuildPlugin({ + ts: true, + }), + ], + files: ['./src/**/indexeddb/*.test.ts'], + rootDir: fileURLToPath(new URL('../', import.meta.url)), + nodeResolve: true, + reporters: [reporter], +}; diff --git a/yarn-project/merkle-tree/src/snapshots/append_only_snapshot.test.ts b/yarn-project/merkle-tree/src/snapshots/append_only_snapshot.test.ts index e68bde0ee71e..094953b0661b 100644 --- a/yarn-project/merkle-tree/src/snapshots/append_only_snapshot.test.ts +++ b/yarn-project/merkle-tree/src/snapshots/append_only_snapshot.test.ts @@ -1,7 +1,7 @@ import { Fr } from '@aztec/foundation/fields'; import { type FromBuffer } from '@aztec/foundation/serialize'; import { type AztecKVStore } from '@aztec/kv-store'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { Pedersen, StandardTree, newTree } from '../index.js'; import { AppendOnlySnapshotBuilder } from './append_only_snapshot.js'; diff --git a/yarn-project/merkle-tree/src/snapshots/full_snapshot.test.ts b/yarn-project/merkle-tree/src/snapshots/full_snapshot.test.ts index be8a8572f6c1..35b0a3a9c4af 100644 --- a/yarn-project/merkle-tree/src/snapshots/full_snapshot.test.ts +++ b/yarn-project/merkle-tree/src/snapshots/full_snapshot.test.ts @@ -1,7 +1,7 @@ import { Fr } from '@aztec/foundation/fields'; import { type FromBuffer } from '@aztec/foundation/serialize'; import { type AztecKVStore } from '@aztec/kv-store'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { Pedersen, StandardTree, newTree } from '../index.js'; import { FullTreeSnapshotBuilder } from './full_snapshot.js'; diff --git a/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.test.ts b/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.test.ts index 75679d4904a3..ed25a419e5e6 100644 --- a/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.test.ts +++ b/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.test.ts @@ -1,6 +1,6 @@ import { Fr, NullifierLeaf, NullifierLeafPreimage } from '@aztec/circuits.js'; import { type AztecKVStore } from '@aztec/kv-store'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { type Hasher } from '@aztec/types/interfaces'; import { Pedersen, newTree } from '../index.js'; diff --git a/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.test.ts b/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.test.ts index 56cbaee0a58c..fac97ade47ee 100644 --- a/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.test.ts +++ b/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.test.ts @@ -3,7 +3,7 @@ import { randomBigInt } from '@aztec/foundation/crypto'; import { Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; import { type AztecKVStore } from '@aztec/kv-store'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { type Hasher } from '@aztec/types/interfaces'; import { INITIAL_LEAF, newTree } from '../index.js'; diff --git a/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree.test.ts b/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree.test.ts index d15aa61326ec..0cd8dcc4ead6 100644 --- a/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree.test.ts +++ b/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree.test.ts @@ -9,7 +9,7 @@ import { import { toBufferBE } from '@aztec/foundation/bigint-buffer'; import { type FromBuffer } from '@aztec/foundation/serialize'; import { type AztecKVStore } from '@aztec/kv-store'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { type Hasher } from '@aztec/types/interfaces'; import { INITIAL_LEAF, type MerkleTree, Pedersen, loadTree, newTree } from '../../index.js'; diff --git a/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts b/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts index b01409eb14c5..5068474e0242 100644 --- a/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts +++ b/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts @@ -1,7 +1,7 @@ import { Fr } from '@aztec/foundation/fields'; import { type FromBuffer } from '@aztec/foundation/serialize'; import { type AztecKVStore } from '@aztec/kv-store'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { type Hasher } from '@aztec/types/interfaces'; import { loadTree } from '../load_tree.js'; diff --git a/yarn-project/merkle-tree/src/test/standard_based_test_suite.ts b/yarn-project/merkle-tree/src/test/standard_based_test_suite.ts index 6f8fc6d09873..f11033ae76e6 100644 --- a/yarn-project/merkle-tree/src/test/standard_based_test_suite.ts +++ b/yarn-project/merkle-tree/src/test/standard_based_test_suite.ts @@ -1,7 +1,7 @@ import { SiblingPath } from '@aztec/circuit-types'; import { Fr } from '@aztec/foundation/fields'; import { type AztecKVStore } from '@aztec/kv-store'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { type Hasher } from '@aztec/types/interfaces'; import { INITIAL_LEAF, Pedersen } from '../index.js'; diff --git a/yarn-project/merkle-tree/src/test/test_suite.ts b/yarn-project/merkle-tree/src/test/test_suite.ts index dba34f92d8fb..3c6b9772dcd3 100644 --- a/yarn-project/merkle-tree/src/test/test_suite.ts +++ b/yarn-project/merkle-tree/src/test/test_suite.ts @@ -1,6 +1,6 @@ import { SiblingPath } from '@aztec/circuit-types'; import { type AztecKVStore } from '@aztec/kv-store'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { type Hasher } from '@aztec/types/interfaces'; import { Pedersen } from '../index.js'; diff --git a/yarn-project/merkle-tree/src/unbalanced_tree.test.ts b/yarn-project/merkle-tree/src/unbalanced_tree.test.ts index 14ee0252ee44..7851151a64dc 100644 --- a/yarn-project/merkle-tree/src/unbalanced_tree.test.ts +++ b/yarn-project/merkle-tree/src/unbalanced_tree.test.ts @@ -1,7 +1,7 @@ import { sha256Trunc } from '@aztec/foundation/crypto'; import { Fr } from '@aztec/foundation/fields'; import { type FromBuffer } from '@aztec/foundation/serialize'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { type Hasher } from '@aztec/types/interfaces'; import { SHA256Trunc } from './sha_256.js'; diff --git a/yarn-project/noir-protocol-circuits-types/src/scripts/generate_private_kernel_reset_data.ts b/yarn-project/noir-protocol-circuits-types/src/scripts/generate_private_kernel_reset_data.ts index f415b887b724..3ce3065dd95b 100644 --- a/yarn-project/noir-protocol-circuits-types/src/scripts/generate_private_kernel_reset_data.ts +++ b/yarn-project/noir-protocol-circuits-types/src/scripts/generate_private_kernel_reset_data.ts @@ -12,7 +12,7 @@ import { } from '@aztec/circuits.js'; import { createConsoleLogger } from '@aztec/foundation/log'; -import fs from 'fs/promises'; +import { promises as fs } from 'fs'; const log = createConsoleLogger('aztec:autogenerate'); diff --git a/yarn-project/noir-protocol-circuits-types/src/scripts/generate_ts_from_abi.ts b/yarn-project/noir-protocol-circuits-types/src/scripts/generate_ts_from_abi.ts index a6b6ceae2ea0..e5c07dead20c 100644 --- a/yarn-project/noir-protocol-circuits-types/src/scripts/generate_ts_from_abi.ts +++ b/yarn-project/noir-protocol-circuits-types/src/scripts/generate_ts_from_abi.ts @@ -3,7 +3,7 @@ import { createConsoleLogger } from '@aztec/foundation/log'; import { codegen } from '@noir-lang/noir_codegen'; import { type CompiledCircuit } from '@noir-lang/types'; import { pascalCase } from 'change-case'; -import fs from 'fs/promises'; +import { promises as fs } from 'fs'; const log = createConsoleLogger('aztec:autogenerate'); diff --git a/yarn-project/noir-protocol-circuits-types/src/scripts/generate_vk_hashes.ts b/yarn-project/noir-protocol-circuits-types/src/scripts/generate_vk_hashes.ts index a0a2573e9e0a..418f2451b44e 100644 --- a/yarn-project/noir-protocol-circuits-types/src/scripts/generate_vk_hashes.ts +++ b/yarn-project/noir-protocol-circuits-types/src/scripts/generate_vk_hashes.ts @@ -3,7 +3,7 @@ import { hashVK } from '@aztec/circuits.js/hash'; import { createConsoleLogger } from '@aztec/foundation/log'; import { fileURLToPath } from '@aztec/foundation/url'; -import fs from 'fs/promises'; +import { promises as fs } from 'fs'; import { join } from 'path'; const log = createConsoleLogger('aztec:autogenerate'); diff --git a/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts b/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts index 77ec114a2a84..91727ff4918d 100644 --- a/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts +++ b/yarn-project/noir-protocol-circuits-types/src/type_conversion.ts @@ -9,6 +9,7 @@ import { AztecAddress, BaseOrMergeRollupPublicInputs, type BaseParityInputs, + BlockHeader, type BlockMergeRollupInputs, BlockRootOrBlockMergePublicInputs, type BlockRootRollupInputs, @@ -32,7 +33,6 @@ import { GlobalVariables, GrumpkinScalar, HONK_VERIFICATION_KEY_LENGTH_IN_FIELDS, - Header, KernelCircuitPublicInputs, type KeyValidationHint, KeyValidationRequest, @@ -138,6 +138,7 @@ import type { AvmProofData as AvmProofDataNoir, BaseOrMergeRollupPublicInputs as BaseOrMergeRollupPublicInputsNoir, BaseParityInputs as BaseParityInputsNoir, + BlockHeader as BlockHeaderNoir, BlockMergeRollupInputs as BlockMergeRollupInputsNoir, BlockRootOrBlockMergePublicInputs as BlockRootOrBlockMergePublicInputsNoir, BlockRootRollupInputs as BlockRootRollupInputsNoir, @@ -159,7 +160,6 @@ import type { GasSettings as GasSettingsNoir, GlobalVariables as GlobalVariablesNoir, EmbeddedCurveScalar as GrumpkinScalarNoir, - Header as HeaderNoir, KernelCircuitPublicInputs as KernelCircuitPublicInputsNoir, KeyValidationHint as KeyValidationHintNoir, KeyValidationRequestAndGenerator as KeyValidationRequestAndGeneratorNoir, @@ -1864,11 +1864,11 @@ export function mapParityPublicInputsFromNoir(parityPublicInputs: ParityPublicIn } /** - * Maps header to Noir - * @param header - The header. - * @returns Header. + * Maps a block header to Noir + * @param header - The block header. + * @returns BlockHeader. */ -export function mapHeaderToNoir(header: Header): HeaderNoir { +export function mapHeaderToNoir(header: BlockHeader): BlockHeaderNoir { return { last_archive: mapAppendOnlyTreeSnapshotToNoir(header.lastArchive), content_commitment: mapContentCommitmentToNoir(header.contentCommitment), @@ -1880,12 +1880,12 @@ export function mapHeaderToNoir(header: Header): HeaderNoir { } /** - * Maps header from Noir. - * @param header - The header. - * @returns Header. + * Maps a block header from Noir. + * @param header - The block header. + * @returns BlockHeader. */ -export function mapHeaderFromNoir(header: HeaderNoir): Header { - return new Header( +export function mapHeaderFromNoir(header: BlockHeaderNoir): BlockHeader { + return new BlockHeader( mapAppendOnlyTreeSnapshotFromNoir(header.last_archive), mapContentCommitmentFromNoir(header.content_commitment), mapStateReferenceFromNoir(header.state), diff --git a/yarn-project/p2p-bootstrap/scripts/docker-compose-bootstrap.yml b/yarn-project/p2p-bootstrap/scripts/docker-compose-bootstrap.yml index 1d6be73b473e..508bbf6918fe 100644 --- a/yarn-project/p2p-bootstrap/scripts/docker-compose-bootstrap.yml +++ b/yarn-project/p2p-bootstrap/scripts/docker-compose-bootstrap.yml @@ -6,6 +6,6 @@ services: ports: - '40400:40400' environment: - DEBUG: 'aztec:*' + LOG_LEVEL: 'verbose' P2P_TCP_LISTEN_ADDR: '0.0.0.0:40400' PEER_ID: '0a260024080112205ea53185db2e52dae74d0d4d6cadc494174810d0a713cd09b0ac517c38bc781e1224080112205ea53185db2e52dae74d0d4d6cadc494174810d0a713cd09b0ac517c38bc781e1a44080112402df8b977f356c6e34fa021c9647973234dff4df706c185794405aafb556723cf5ea53185db2e52dae74d0d4d6cadc494174810d0a713cd09b0ac517c38bc781e' diff --git a/yarn-project/p2p-bootstrap/src/index.ts b/yarn-project/p2p-bootstrap/src/index.ts index 373281518908..e48edcff8e27 100644 --- a/yarn-project/p2p-bootstrap/src/index.ts +++ b/yarn-project/p2p-bootstrap/src/index.ts @@ -1,5 +1,5 @@ import { createDebugLogger } from '@aztec/foundation/log'; -import { createStore } from '@aztec/kv-store/utils'; +import { createStore } from '@aztec/kv-store/lmdb'; import { type BootnodeConfig, BootstrapNode } from '@aztec/p2p'; import { type TelemetryClient } from '@aztec/telemetry-client'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; diff --git a/yarn-project/p2p-bootstrap/terraform/main.tf b/yarn-project/p2p-bootstrap/terraform/main.tf index 31b76cb33cd4..e48152f282d8 100644 --- a/yarn-project/p2p-bootstrap/terraform/main.tf +++ b/yarn-project/p2p-bootstrap/terraform/main.tf @@ -137,7 +137,11 @@ resource "aws_ecs_task_definition" "p2p-bootstrap" { }, { "name": "DEBUG", - "value": "aztec:*,discv5:*" + "value": "discv5:*" + }, + { + "name": "LOG_LEVEL", + "value": "debug" }, { "name": "P2P_MIN_PEERS", diff --git a/yarn-project/p2p/package.json b/yarn-project/p2p/package.json index a458dcc94a55..9fffdafad29d 100644 --- a/yarn-project/p2p/package.json +++ b/yarn-project/p2p/package.json @@ -91,7 +91,9 @@ "libp2p": "1.5.0", "semver": "^7.6.0", "sha3": "^2.1.4", - "tslib": "^2.4.0" + "snappy": "^7.2.2", + "tslib": "^2.4.0", + "xxhash-wasm": "^1.1.0" }, "devDependencies": { "@aztec/archiver": "workspace:^", diff --git a/yarn-project/p2p/src/client/index.ts b/yarn-project/p2p/src/client/index.ts index 05056a3c54a2..ad0af77b4fd4 100644 --- a/yarn-project/p2p/src/client/index.ts +++ b/yarn-project/p2p/src/client/index.ts @@ -2,7 +2,7 @@ import type { ClientProtocolCircuitVerifier, L2BlockSource, WorldStateSynchroniz import { createDebugLogger } from '@aztec/foundation/log'; import { type AztecKVStore } from '@aztec/kv-store'; import { type DataStoreConfig } from '@aztec/kv-store/config'; -import { createStore } from '@aztec/kv-store/utils'; +import { createStore } from '@aztec/kv-store/lmdb'; import { type TelemetryClient } from '@aztec/telemetry-client'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; diff --git a/yarn-project/p2p/src/client/p2p_client.test.ts b/yarn-project/p2p/src/client/p2p_client.test.ts index 219d2caeded7..24a112ae839b 100644 --- a/yarn-project/p2p/src/client/p2p_client.test.ts +++ b/yarn-project/p2p/src/client/p2p_client.test.ts @@ -4,7 +4,7 @@ import { Fr } from '@aztec/circuits.js'; import { retryUntil } from '@aztec/foundation/retry'; import { sleep } from '@aztec/foundation/sleep'; import { type AztecKVStore } from '@aztec/kv-store'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { expect, jest } from '@jest/globals'; diff --git a/yarn-project/p2p/src/mem_pools/instrumentation.ts b/yarn-project/p2p/src/mem_pools/instrumentation.ts index e4271029ba2c..d80b2f69d558 100644 --- a/yarn-project/p2p/src/mem_pools/instrumentation.ts +++ b/yarn-project/p2p/src/mem_pools/instrumentation.ts @@ -3,6 +3,7 @@ import { Attributes, type Histogram, LmdbMetrics, + type LmdbStatsCallback, Metrics, type TelemetryClient, type UpDownCounter, @@ -58,7 +59,7 @@ export class PoolInstrumentation { private defaultAttributes; - constructor(telemetry: TelemetryClient, name: PoolName) { + constructor(telemetry: TelemetryClient, name: PoolName, dbStats?: LmdbStatsCallback) { const meter = telemetry.getMeter(name); this.defaultAttributes = { [Attributes.POOL_NAME]: name }; @@ -98,13 +99,10 @@ export class PoolInstrumentation { name: Metrics.MEMPOOL_DB_NUM_ITEMS, description: 'Num items in database for the Tx mempool', }, + dbStats, ); } - public recordDBMetrics(metrics: { mappingSize: number; numItems: number; actualSize: number }) { - this.dbMetrics.recordDBMetrics(metrics); - } - public recordSize(poolObject: PoolObject) { this.objectSize.record(poolObject.getSize()); } diff --git a/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts b/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts index 4bd4f3e63f43..dfc5df7f105b 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.test.ts @@ -1,4 +1,4 @@ -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { AztecKVTxPool } from './aztec_kv_tx_pool.js'; diff --git a/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.ts b/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.ts index 865fbd8fdf28..18ba3c5fc1d6 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.ts @@ -37,7 +37,7 @@ export class AztecKVTxPool implements TxPool { this.#store = store; this.#log = log; - this.#metrics = new PoolInstrumentation(telemetry, PoolName.TX_POOL); + this.#metrics = new PoolInstrumentation(telemetry, PoolName.TX_POOL, () => store.estimateSize()); } public markAsMined(txHashes: TxHash[], blockNumber: number): Promise { @@ -53,8 +53,6 @@ export class AztecKVTxPool implements TxPool { } this.#metrics.recordRemovedObjects(deleted, 'pending'); this.#metrics.recordAddedObjects(txHashes.length, 'mined'); - const storeSizes = this.#store.estimateSize(); - this.#metrics.recordDBMetrics(storeSizes); }); } diff --git a/yarn-project/p2p/src/mocks/index.ts b/yarn-project/p2p/src/mocks/index.ts index f0fc6cd2ecf1..d05243788062 100644 --- a/yarn-project/p2p/src/mocks/index.ts +++ b/yarn-project/p2p/src/mocks/index.ts @@ -5,7 +5,7 @@ import { type WorldStateSynchronizer, } from '@aztec/circuit-types'; import { type DataStoreConfig } from '@aztec/kv-store/config'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { type TelemetryClient } from '@aztec/telemetry-client'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; @@ -148,7 +148,7 @@ export type ReqRespNode = { export const MOCK_SUB_PROTOCOL_HANDLERS: ReqRespSubProtocolHandlers = { [PING_PROTOCOL]: pingHandler, [STATUS_PROTOCOL]: statusHandler, - [TX_REQ_PROTOCOL]: (_msg: any) => Promise.resolve(Uint8Array.from(Buffer.from('tx'))), + [TX_REQ_PROTOCOL]: (_msg: any) => Promise.resolve(Buffer.from('tx')), }; // By default, all requests are valid diff --git a/yarn-project/p2p/src/service/discv5_service.test.ts b/yarn-project/p2p/src/service/discv5_service.test.ts index 42f207ada159..1f2ce69cb34a 100644 --- a/yarn-project/p2p/src/service/discv5_service.test.ts +++ b/yarn-project/p2p/src/service/discv5_service.test.ts @@ -1,6 +1,6 @@ import { sleep } from '@aztec/foundation/sleep'; import { type AztecKVStore } from '@aztec/kv-store'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { jest } from '@jest/globals'; diff --git a/yarn-project/p2p/src/service/encoding.ts b/yarn-project/p2p/src/service/encoding.ts new file mode 100644 index 000000000000..0713b7e8a26e --- /dev/null +++ b/yarn-project/p2p/src/service/encoding.ts @@ -0,0 +1,61 @@ +// Taken from lodestar: https://github.com/ChainSafe/lodestar +import { sha256 } from '@aztec/foundation/crypto'; + +import { type RPC } from '@chainsafe/libp2p-gossipsub/message'; +import { type DataTransform } from '@chainsafe/libp2p-gossipsub/types'; +import { type Message } from '@libp2p/interface'; +import { compressSync, uncompressSync } from 'snappy'; +import xxhashFactory from 'xxhash-wasm'; + +// Load WASM +const xxhash = await xxhashFactory(); + +// Use salt to prevent msgId from being mined for collisions +const h64Seed = BigInt(Math.floor(Math.random() * 1e9)); + +// Shared buffer to convert msgId to string +const sharedMsgIdBuf = Buffer.alloc(20); + +/** + * The function used to generate a gossipsub message id + * We use the first 8 bytes of SHA256(data) for content addressing + */ +export function fastMsgIdFn(rpcMsg: RPC.Message): string { + if (rpcMsg.data) { + return xxhash.h64Raw(rpcMsg.data, h64Seed).toString(16); + } + return '0000000000000000'; +} + +export function msgIdToStrFn(msgId: Uint8Array): string { + // This happens serially, no need to reallocate the buffer + sharedMsgIdBuf.set(msgId); + return `0x${sharedMsgIdBuf.toString('hex')}`; +} + +/** + * Get the message identifier from a libp2p message + * + * Follows similarly to: + * https://github.com/ethereum/consensus-specs/blob/v1.1.0-alpha.7/specs/altair/p2p-interface.md#topics-and-messages + * + * @param message - The libp2p message + * @returns The message identifier + */ +export function getMsgIdFn(message: Message) { + const { topic } = message; + + const vec = [Buffer.from(topic), message.data]; + return sha256(Buffer.concat(vec)).subarray(0, 20); +} + +export class SnappyTransform implements DataTransform { + inboundTransform(_topicStr: string, data: Uint8Array): Uint8Array { + const uncompressed = Buffer.from(uncompressSync(Buffer.from(data), { asBuffer: true })); + return new Uint8Array(uncompressed); + } + + outboundTransform(_topicStr: string, data: Uint8Array): Uint8Array { + return new Uint8Array(compressSync(Buffer.from(data))); + } +} diff --git a/yarn-project/p2p/src/service/libp2p_service.ts b/yarn-project/p2p/src/service/libp2p_service.ts index 18d2d180a4a5..f1e9df0c992a 100644 --- a/yarn-project/p2p/src/service/libp2p_service.ts +++ b/yarn-project/p2p/src/service/libp2p_service.ts @@ -43,6 +43,7 @@ import { } from '../tx_validator/index.js'; import { type PubSubLibp2p, convertToMultiaddr } from '../util.js'; import { AztecDatastore } from './data_store.js'; +import { SnappyTransform, fastMsgIdFn, getMsgIdFn, msgIdToStrFn } from './encoding.js'; import { PeerManager } from './peer_manager.js'; import { PeerErrorSeverity } from './peer_scoring.js'; import { pingHandler, statusHandler } from './reqresp/handlers.js'; @@ -139,7 +140,7 @@ export class LibP2PService extends WithTracer implements P2PService { // add GossipSub listener this.node.services.pubsub.addEventListener('gossipsub:message', async e => { const { msg, propagationSource: peerId } = e.detail; - this.logger.debug(`Received PUBSUB message.`); + this.logger.trace(`Received PUBSUB message.`); await this.jobQueue.put(() => this.handleNewGossipMessage(msg, peerId)); }); @@ -242,6 +243,10 @@ export class LibP2PService extends WithTracer implements P2PService { heartbeatInterval: config.gossipsubInterval, mcacheLength: config.gossipsubMcacheLength, mcacheGossip: config.gossipsubMcacheGossip, + msgIdFn: getMsgIdFn, + msgIdToStrFn: msgIdToStrFn, + fastMsgIdFn: fastMsgIdFn, + dataTransform: new SnappyTransform(), metricsRegister: otelMetricsAdapter, metricsTopicStrToLabel: metricsTopicStrToLabels(), scoreParams: createPeerScoreParams({ @@ -278,11 +283,11 @@ export class LibP2PService extends WithTracer implements P2PService { * @param msg - the tx request message * @returns the tx response message */ - const txHandler = (msg: Buffer): Promise => { + const txHandler = (msg: Buffer): Promise => { const txHash = TxHash.fromBuffer(msg); const foundTx = mempools.txPool.getTxByHash(txHash); - const asUint8Array = Uint8Array.from(foundTx ? foundTx.toBuffer() : Buffer.alloc(0)); - return Promise.resolve(asUint8Array); + const buf = foundTx ? foundTx.toBuffer() : Buffer.alloc(0); + return Promise.resolve(buf); }; const requestResponseHandlers = { @@ -451,7 +456,7 @@ export class LibP2PService extends WithTracer implements P2PService { * @param message - The message to propagate. */ public propagate(message: T): void { - this.logger.debug(`[${message.p2pMessageIdentifier()}] queued`); + this.logger.trace(`[${message.p2pMessageIdentifier()}] queued`); void this.jobQueue.put(async () => { await this.sendToPeers(message); }); diff --git a/yarn-project/p2p/src/service/reqresp/handlers.ts b/yarn-project/p2p/src/service/reqresp/handlers.ts index 688fab959e3e..20a9163f88e5 100644 --- a/yarn-project/p2p/src/service/reqresp/handlers.ts +++ b/yarn-project/p2p/src/service/reqresp/handlers.ts @@ -3,8 +3,8 @@ * @param _msg - The ping request message. * @returns A resolved promise with the pong response. */ -export function pingHandler(_msg: any): Promise { - return Promise.resolve(Uint8Array.from(Buffer.from('pong'))); +export function pingHandler(_msg: any): Promise { + return Promise.resolve(Buffer.from('pong')); } /** @@ -12,6 +12,6 @@ export function pingHandler(_msg: any): Promise { * @param _msg - The status request message. * @returns A resolved promise with the ok response. */ -export function statusHandler(_msg: any): Promise { - return Promise.resolve(Uint8Array.from(Buffer.from('ok'))); +export function statusHandler(_msg: any): Promise { + return Promise.resolve(Buffer.from('ok')); } diff --git a/yarn-project/p2p/src/service/reqresp/interface.ts b/yarn-project/p2p/src/service/reqresp/interface.ts index 8370b8a8a21b..e23608c3665f 100644 --- a/yarn-project/p2p/src/service/reqresp/interface.ts +++ b/yarn-project/p2p/src/service/reqresp/interface.ts @@ -16,7 +16,7 @@ export type ReqRespSubProtocol = typeof PING_PROTOCOL | typeof STATUS_PROTOCOL | * A handler for a sub protocol * The message will arrive as a buffer, and the handler must return a buffer */ -export type ReqRespSubProtocolHandler = (msg: Buffer) => Promise; +export type ReqRespSubProtocolHandler = (msg: Buffer) => Promise; /** * A type mapping from supprotocol to it's rate limits @@ -83,8 +83,8 @@ export type SubProtocolMap = { * Default handler for unimplemented sub protocols, this SHOULD be overwritten * by the service, but is provided as a fallback */ -const defaultHandler = (_msg: any): Promise => { - return Promise.resolve(Uint8Array.from(Buffer.from('unimplemented'))); +const defaultHandler = (_msg: any): Promise => { + return Promise.resolve(Buffer.from('unimplemented')); }; /** diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.integration.test.ts b/yarn-project/p2p/src/service/reqresp/reqresp.integration.test.ts index c6545c5b4930..4584db095e76 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.integration.test.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.integration.test.ts @@ -5,7 +5,7 @@ import { createDebugLogger } from '@aztec/foundation/log'; import { sleep } from '@aztec/foundation/sleep'; import { type AztecKVStore } from '@aztec/kv-store'; import { type DataStoreConfig } from '@aztec/kv-store/config'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { SignableENR } from '@chainsafe/enr'; import { describe, expect, it, jest } from '@jest/globals'; diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.test.ts b/yarn-project/p2p/src/service/reqresp/reqresp.test.ts index 1807a3185227..349b3a8f6b50 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.test.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.test.ts @@ -8,6 +8,7 @@ import { CollectiveReqRespTimeoutError, IndiviualReqRespTimeoutError } from '../ import { MOCK_SUB_PROTOCOL_HANDLERS, MOCK_SUB_PROTOCOL_VALIDATORS, + type ReqRespNode, connectToPeers, createNodes, startNodes, @@ -23,15 +24,22 @@ const PING_REQUEST = RequestableBuffer.fromBuffer(Buffer.from('ping')); // and ask for specific data that they missed via the traditional gossip protocol. describe('ReqResp', () => { let peerManager: MockProxy; + let nodes: ReqRespNode[]; beforeEach(() => { peerManager = mock(); }); + afterEach(async () => { + if (nodes) { + await stopNodes(nodes as ReqRespNode[]); + } + }); + it('Should perform a ping request', async () => { // Create two nodes // They need to discover each other - const nodes = await createNodes(peerManager, 2); + nodes = await createNodes(peerManager, 2); const { req: pinger } = nodes[0]; await startNodes(nodes); @@ -45,12 +53,10 @@ describe('ReqResp', () => { await sleep(500); expect(res?.toBuffer().toString('utf-8')).toEqual('pong'); - - await stopNodes(nodes); }); it('Should handle gracefully if a peer connected peer is offline', async () => { - const nodes = await createNodes(peerManager, 2); + nodes = await createNodes(peerManager, 2); const { req: pinger } = nodes[0]; const { req: ponger } = nodes[1]; @@ -66,12 +72,10 @@ describe('ReqResp', () => { const res = await pinger.sendRequest(PING_PROTOCOL, PING_REQUEST); expect(res).toBeUndefined(); - - await stopNodes(nodes); }); it('Should request from a later peer if other peers are offline', async () => { - const nodes = await createNodes(peerManager, 4); + nodes = await createNodes(peerManager, 4); await startNodes(nodes); await sleep(500); @@ -86,12 +90,10 @@ describe('ReqResp', () => { const res = await nodes[0].req.sendRequest(PING_PROTOCOL, PING_REQUEST); expect(res?.toBuffer().toString('utf-8')).toEqual('pong'); - - await stopNodes(nodes); }); it('Should hit a rate limit if too many requests are made in quick succession', async () => { - const nodes = await createNodes(peerManager, 2); + nodes = await createNodes(peerManager, 2); await startNodes(nodes); @@ -110,8 +112,6 @@ describe('ReqResp', () => { // Make sure the error message is logged const errorMessage = `Rate limit exceeded for ${PING_PROTOCOL} from ${nodes[0].p2p.peerId.toString()}`; expect(loggerSpy).toHaveBeenCalledWith(errorMessage); - - await stopNodes(nodes); }); describe('TX REQ PROTOCOL', () => { @@ -120,15 +120,15 @@ describe('ReqResp', () => { const txHash = tx.getTxHash(); const protocolHandlers = MOCK_SUB_PROTOCOL_HANDLERS; - protocolHandlers[TX_REQ_PROTOCOL] = (message: Buffer): Promise => { + protocolHandlers[TX_REQ_PROTOCOL] = (message: Buffer): Promise => { const receivedHash = TxHash.fromBuffer(message); if (txHash.equals(receivedHash)) { - return Promise.resolve(Uint8Array.from(tx.toBuffer())); + return Promise.resolve(tx.toBuffer()); } - return Promise.resolve(Uint8Array.from(Buffer.from(''))); + return Promise.resolve(Buffer.from('')); }; - const nodes = await createNodes(peerManager, 2); + nodes = await createNodes(peerManager, 2); await startNodes(nodes, protocolHandlers); await sleep(500); @@ -137,8 +137,6 @@ describe('ReqResp', () => { const res = await nodes[0].req.sendRequest(TX_REQ_PROTOCOL, txHash); expect(res).toEqual(tx); - - await stopNodes(nodes); }); it('Does not crash if tx hash returns undefined', async () => { @@ -147,11 +145,11 @@ describe('ReqResp', () => { const protocolHandlers = MOCK_SUB_PROTOCOL_HANDLERS; // Return nothing - protocolHandlers[TX_REQ_PROTOCOL] = (_message: Buffer): Promise => { - return Promise.resolve(Uint8Array.from(Buffer.from(''))); + protocolHandlers[TX_REQ_PROTOCOL] = (_message: Buffer): Promise => { + return Promise.resolve(Buffer.from('')); }; - const nodes = await createNodes(peerManager, 2); + nodes = await createNodes(peerManager, 2); await startNodes(nodes, protocolHandlers); await sleep(500); @@ -160,12 +158,10 @@ describe('ReqResp', () => { const res = await nodes[0].req.sendRequest(TX_REQ_PROTOCOL, txHash); expect(res).toBeUndefined(); - - await stopNodes(nodes); }); it('Should hit individual timeout if nothing is returned over the stream', async () => { - const nodes = await createNodes(peerManager, 2); + nodes = await createNodes(peerManager, 2); await startNodes(nodes); @@ -197,12 +193,10 @@ describe('ReqResp', () => { }), PeerErrorSeverity.HighToleranceError, ); - - await stopNodes(nodes); }); it('Should hit collective timeout if nothing is returned over the stream from multiple peers', async () => { - const nodes = await createNodes(peerManager, 4); + nodes = await createNodes(peerManager, 4); await startNodes(nodes); @@ -226,8 +220,6 @@ describe('ReqResp', () => { // Make sure the error message is logged const errorMessage = `${new CollectiveReqRespTimeoutError().message} | subProtocol: ${TX_REQ_PROTOCOL}`; expect(loggerSpy).toHaveBeenCalledWith(errorMessage); - - await stopNodes(nodes); }); it('Should penalize peer if transaction validation fails', async () => { @@ -236,12 +228,12 @@ describe('ReqResp', () => { // Mock that the node will respond with the tx const protocolHandlers = MOCK_SUB_PROTOCOL_HANDLERS; - protocolHandlers[TX_REQ_PROTOCOL] = (message: Buffer): Promise => { + protocolHandlers[TX_REQ_PROTOCOL] = (message: Buffer): Promise => { const receivedHash = TxHash.fromBuffer(message); if (txHash.equals(receivedHash)) { - return Promise.resolve(Uint8Array.from(tx.toBuffer())); + return Promise.resolve(tx.toBuffer()); } - return Promise.resolve(Uint8Array.from(Buffer.from(''))); + return Promise.resolve(Buffer.from('')); }; // Mock that the receiving node will find that the transaction is invalid @@ -251,7 +243,7 @@ describe('ReqResp', () => { return Promise.resolve(false); }; - const nodes = await createNodes(peerManager, 2); + nodes = await createNodes(peerManager, 2); await startNodes(nodes, protocolHandlers, protocolValidators); await sleep(500); @@ -268,8 +260,6 @@ describe('ReqResp', () => { }), PeerErrorSeverity.LowToleranceError, ); - - await stopNodes(nodes); }); }); }); diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.ts b/yarn-project/p2p/src/service/reqresp/reqresp.ts index a2249015c2f0..9d67de5c367c 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.ts @@ -5,6 +5,7 @@ import { executeTimeoutWithCustomError } from '@aztec/foundation/timer'; import { type IncomingStreamData, type PeerId, type Stream } from '@libp2p/interface'; import { pipe } from 'it-pipe'; import { type Libp2p } from 'libp2p'; +import { compressSync, uncompressSync } from 'snappy'; import { type Uint8ArrayList } from 'uint8arraylist'; import { CollectiveReqRespTimeoutError, IndiviualReqRespTimeoutError } from '../../errors/reqresp.error.js'; @@ -31,6 +32,9 @@ import { RequestResponseRateLimiter } from './rate_limiter/rate_limiter.js'; * This service implements the request response sub protocol, it is heavily inspired from * ethereum implementations of the same name. * + * Note, responses get compressed in streamHandler + * so they get decompressed in readMessage + * * see: https://github.com/ethereum/consensus-specs/blob/dev/specs/phase0/p2p-interface.md#the-reqresp-domain */ export class ReqResp { @@ -232,7 +236,7 @@ export class ReqResp { chunks.push(chunk.subarray()); } const messageData = chunks.concat(); - return Buffer.concat(messageData); + return uncompressSync(Buffer.concat(messageData), { asBuffer: true }) as Buffer; } /** @@ -269,7 +273,8 @@ export class ReqResp { async function* (source: any) { for await (const chunkList of source) { const msg = Buffer.from(chunkList.subarray()); - yield handler(msg); + const response = await handler(msg); + yield new Uint8Array(compressSync(response)); } }, stream, diff --git a/yarn-project/p2p/src/utils.test.ts b/yarn-project/p2p/src/utils.test.ts index dc00b340060f..89e9174b577e 100644 --- a/yarn-project/p2p/src/utils.test.ts +++ b/yarn-project/p2p/src/utils.test.ts @@ -1,5 +1,5 @@ import { type AztecKVStore } from '@aztec/kv-store'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { generateKeyPair, marshalPrivateKey } from '@libp2p/crypto/keys'; import { createSecp256k1PeerId } from '@libp2p/peer-id-factory'; diff --git a/yarn-project/package.json b/yarn-project/package.json index a1547a4ccda6..7d31e95a5c88 100644 --- a/yarn-project/package.json +++ b/yarn-project/package.json @@ -38,6 +38,7 @@ "docs", "end-to-end", "entrypoints", + "epoch-cache", "ethereum", "foundation", "key-store", diff --git a/yarn-project/protocol-contracts/src/scripts/generate_data.ts b/yarn-project/protocol-contracts/src/scripts/generate_data.ts index eebd16860fe6..d1044644e62a 100644 --- a/yarn-project/protocol-contracts/src/scripts/generate_data.ts +++ b/yarn-project/protocol-contracts/src/scripts/generate_data.ts @@ -18,7 +18,7 @@ import { createConsoleLogger } from '@aztec/foundation/log'; import { loadContractArtifact } from '@aztec/types/abi'; import { type NoirCompiledContract } from '@aztec/types/noir'; -import fs from 'fs/promises'; +import { promises as fs } from 'fs'; import path from 'path'; import { buildProtocolContractTree } from '../build_protocol_contract_tree.js'; diff --git a/yarn-project/prover-client/package.json b/yarn-project/prover-client/package.json index b87665420836..2c01d5617c0f 100644 --- a/yarn-project/prover-client/package.json +++ b/yarn-project/prover-client/package.json @@ -28,8 +28,8 @@ "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", "bb": "node --no-warnings ./dest/bb/index.js", - "test": "DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=1500000 --forceExit", - "test:debug": "LOG_LEVEL=debug DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=1500000 --forceExit --testNamePattern prover/bb_prover/parity" + "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=1500000 --forceExit", + "test:debug": "LOG_LEVEL=debug NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=1500000 --forceExit --testNamePattern prover/bb_prover/parity" }, "jest": { "moduleNameMapper": { diff --git a/yarn-project/prover-client/package.local.json b/yarn-project/prover-client/package.local.json index bc11a5330d00..74d761e17a1b 100644 --- a/yarn-project/prover-client/package.local.json +++ b/yarn-project/prover-client/package.local.json @@ -1,5 +1,5 @@ { "scripts": { - "test": "DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=1500000 --forceExit" + "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=1500000 --forceExit" } } diff --git a/yarn-project/prover-client/src/mocks/fixtures.ts b/yarn-project/prover-client/src/mocks/fixtures.ts index c6f54f98d413..cbcc2596890f 100644 --- a/yarn-project/prover-client/src/mocks/fixtures.ts +++ b/yarn-project/prover-client/src/mocks/fixtures.ts @@ -15,7 +15,7 @@ import { type DebugLogger } from '@aztec/foundation/log'; import { fileURLToPath } from '@aztec/foundation/url'; import { NativeACVMSimulator, type SimulationProvider, WASMSimulator } from '@aztec/simulator'; -import * as fs from 'fs/promises'; +import { promises as fs } from 'fs'; import path from 'path'; const { diff --git a/yarn-project/prover-client/src/mocks/test_context.ts b/yarn-project/prover-client/src/mocks/test_context.ts index e2df1346c11f..2211b10cb3bb 100644 --- a/yarn-project/prover-client/src/mocks/test_context.ts +++ b/yarn-project/prover-client/src/mocks/test_context.ts @@ -9,7 +9,7 @@ import { type TxValidator, } from '@aztec/circuit-types'; import { makeBloatedProcessedTx } from '@aztec/circuit-types/test'; -import { type AppendOnlyTreeSnapshot, type Gas, type GlobalVariables, Header } from '@aztec/circuits.js'; +import { type AppendOnlyTreeSnapshot, BlockHeader, type Gas, type GlobalVariables } from '@aztec/circuits.js'; import { times } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/fields'; import { type DebugLogger } from '@aztec/foundation/log'; @@ -27,7 +27,7 @@ import { type MerkleTreeAdminDatabase } from '@aztec/world-state'; import { NativeWorldStateService } from '@aztec/world-state/native'; import { jest } from '@jest/globals'; -import * as fs from 'fs/promises'; +import { promises as fs } from 'fs'; import { mock } from 'jest-mock-extended'; import { TestCircuitProver } from '../../../bb-prover/src/test/test_circuit_prover.js'; @@ -40,7 +40,7 @@ import { ProverAgent } from '../prover-agent/prover-agent.js'; import { getEnvironmentConfig, getSimulationProvider, makeGlobals } from './fixtures.js'; export class TestContext { - private headers: Map = new Map(); + private headers: Map = new Map(); constructor( public publicTxSimulator: PublicTxSimulator, @@ -83,7 +83,7 @@ export class TestContext { const processor = new PublicProcessor( publicDb, globalVariables, - Header.empty(), + BlockHeader.empty(), worldStateDB, publicTxSimulator, telemetry, @@ -138,9 +138,9 @@ export class TestContext { return this.worldState.fork(); } - public getHeader(blockNumber: 0): Header; - public getHeader(blockNumber: number): Header | undefined; - public getHeader(blockNumber = 0) { + public getBlockHeader(blockNumber: 0): BlockHeader; + public getBlockHeader(blockNumber: number): BlockHeader | undefined; + public getBlockHeader(blockNumber = 0) { return blockNumber === 0 ? this.worldState.getCommitted().getInitialHeader() : this.headers.get(blockNumber); } @@ -156,7 +156,7 @@ export class TestContext { public makeProcessedTx(seedOrOpts?: Parameters[0] | number): ProcessedTx { const opts = typeof seedOrOpts === 'number' ? { seed: seedOrOpts } : seedOrOpts; const blockNum = (opts?.globalVariables ?? this.globalVariables).blockNumber.toNumber(); - const header = this.getHeader(blockNum - 1); + const header = this.getBlockHeader(blockNum - 1); return makeBloatedProcessedTx({ header, vkTreeRoot: getVKTreeRoot(), diff --git a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts index 9dc700689b62..ca78b4395152 100644 --- a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts +++ b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts @@ -10,13 +10,13 @@ import { ARCHIVE_HEIGHT, AppendOnlyTreeSnapshot, type BaseOrMergeRollupPublicInputs, + BlockHeader, BlockMergeRollupInputs, type BlockRootOrBlockMergePublicInputs, ConstantRollupData, ContentCommitment, Fr, type GlobalVariables, - Header, MAX_NOTE_HASHES_PER_TX, MAX_NULLIFIERS_PER_TX, MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, @@ -310,7 +310,7 @@ export function buildHeaderFromCircuitOutputs( sha256Trunc(Buffer.concat([previousMergeData[0].outHash.toBuffer(), previousMergeData[1].outHash.toBuffer()])), ); const state = new StateReference(updatedL1ToL2TreeSnapshot, previousMergeData[1].end); - const header = new Header( + const header = new BlockHeader( rootRollupOutputs.previousArchive, contentCommitment, state, @@ -371,7 +371,7 @@ export async function buildHeaderAndBodyFromTxs( const fees = body.txEffects.reduce((acc, tx) => acc.add(tx.transactionFee), Fr.ZERO); const manaUsed = txs.reduce((acc, tx) => acc.add(new Fr(tx.gasUsed.totalGas.l2Gas)), Fr.ZERO); - const header = new Header(previousArchive, contentCommitment, stateReference, globalVariables, fees, manaUsed); + const header = new BlockHeader(previousArchive, contentCommitment, stateReference, globalVariables, fees, manaUsed); return { header, body }; } @@ -379,7 +379,7 @@ export async function buildHeaderAndBodyFromTxs( // Validate that the roots of all local trees match the output of the root circuit simulation export async function validateBlockRootOutput( blockRootOutput: BlockRootOrBlockMergePublicInputs, - blockHeader: Header, + blockHeader: BlockHeader, db: MerkleTreeReadOperations, ) { await Promise.all([ diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index 713e6350c6bd..73a7a425b030 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -19,12 +19,12 @@ import { type BaseOrMergeRollupPublicInputs, BaseParityInputs, type BaseRollupHints, + type BlockHeader, type BlockRootOrBlockMergePublicInputs, BlockRootRollupInputs, EmptyBlockRootRollupInputs, Fr, type GlobalVariables, - type Header, L1_TO_L2_MSG_SUBTREE_HEIGHT, L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, type NESTED_RECURSIVE_PROOF_LENGTH, @@ -279,7 +279,7 @@ export class ProvingOrchestrator implements EpochProver { @trackSpan('ProvingOrchestrator.setBlockCompleted', (blockNumber: number) => ({ [Attributes.BLOCK_NUMBER]: blockNumber, })) - public async setBlockCompleted(blockNumber: number, expectedHeader?: Header): Promise { + public async setBlockCompleted(blockNumber: number, expectedHeader?: BlockHeader): Promise { const provingState = this.provingState?.getBlockProvingStateByBlockNumber(blockNumber); if (!provingState) { throw new Error(`Block proving state for ${blockNumber} not found`); @@ -407,7 +407,7 @@ export class ProvingOrchestrator implements EpochProver { return Promise.resolve(); } - private async buildBlock(provingState: BlockProvingState, expectedHeader?: Header) { + private async buildBlock(provingState: BlockProvingState, expectedHeader?: BlockHeader) { // Collect all new nullifiers, commitments, and contracts from all txs in this block to build body const txs = provingState!.allTxs.map(a => a.processedTx); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_multi_public_functions.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_multi_public_functions.test.ts index 26997fca8d55..5b4adf7d34d9 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_multi_public_functions.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_multi_public_functions.test.ts @@ -35,7 +35,7 @@ describe('prover/orchestrator/public-functions', () => { }), ); for (const tx of txs) { - tx.data.constants.historicalHeader = context.getHeader(0); + tx.data.constants.historicalHeader = context.getBlockHeader(0); tx.data.constants.vkTreeRoot = getVKTreeRoot(); tx.data.constants.protocolContractTreeRoot = protocolContractTreeRoot; } diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_public_functions.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_public_functions.test.ts index 393329094f18..040ab5ad44d6 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_public_functions.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_public_functions.test.ts @@ -35,7 +35,7 @@ describe('prover/orchestrator/public-functions', () => { numberOfNonRevertiblePublicCallRequests, numberOfRevertiblePublicCallRequests, }); - tx.data.constants.historicalHeader = context.getHeader(0); + tx.data.constants.historicalHeader = context.getBlockHeader(0); tx.data.constants.vkTreeRoot = getVKTreeRoot(); tx.data.constants.protocolContractTreeRoot = protocolContractTreeRoot; diff --git a/yarn-project/prover-client/src/prover-client/prover-client.ts b/yarn-project/prover-client/src/prover-client/prover-client.ts index 3cc5b9aa32b0..d41e3ad8851b 100644 --- a/yarn-project/prover-client/src/prover-client/prover-client.ts +++ b/yarn-project/prover-client/src/prover-client/prover-client.ts @@ -137,7 +137,15 @@ export class ProverClient implements EpochProverManager { const prover = await buildServerCircuitProver(this.config, this.telemetry); this.agents = times( this.config.proverAgentCount, - () => new ProvingAgent(this.agentClient!, proofStore, prover, [], this.config.proverAgentPollIntervalMs), + () => + new ProvingAgent( + this.agentClient!, + proofStore, + prover, + this.telemetry, + [], + this.config.proverAgentPollIntervalMs, + ), ); await Promise.all(this.agents.map(agent => agent.start())); diff --git a/yarn-project/prover-client/src/proving_broker/factory.ts b/yarn-project/prover-client/src/proving_broker/factory.ts index 02a5fcb314b2..67295fb60116 100644 --- a/yarn-project/prover-client/src/proving_broker/factory.ts +++ b/yarn-project/prover-client/src/proving_broker/factory.ts @@ -1,16 +1,20 @@ import { type ProverBrokerConfig } from '@aztec/circuit-types'; import { AztecLmdbStore } from '@aztec/kv-store/lmdb'; +import { type TelemetryClient } from '@aztec/telemetry-client'; import { ProvingBroker } from './proving_broker.js'; import { InMemoryBrokerDatabase } from './proving_broker_database/memory.js'; import { KVBrokerDatabase } from './proving_broker_database/persisted.js'; -export async function createAndStartProvingBroker(config: ProverBrokerConfig): Promise { +export async function createAndStartProvingBroker( + config: ProverBrokerConfig, + client: TelemetryClient, +): Promise { const database = config.proverBrokerDataDirectory - ? new KVBrokerDatabase(AztecLmdbStore.open(config.proverBrokerDataDirectory)) + ? new KVBrokerDatabase(AztecLmdbStore.open(config.proverBrokerDataDirectory), client) : new InMemoryBrokerDatabase(); - const broker = new ProvingBroker(database, { + const broker = new ProvingBroker(database, client, { jobTimeoutMs: config.proverBrokerJobTimeoutMs, maxRetries: config.proverBrokerJobMaxRetries, timeoutIntervalMs: config.proverBrokerPollIntervalMs, diff --git a/yarn-project/prover-client/src/proving_broker/proving_agent.test.ts b/yarn-project/prover-client/src/proving_broker/proving_agent.test.ts index cc49057ab6da..5a33598a31d3 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_agent.test.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_agent.test.ts @@ -19,6 +19,7 @@ import { makeBaseParityInputs, makeParityPublicInputs } from '@aztec/circuits.js import { randomBytes } from '@aztec/foundation/crypto'; import { AbortError } from '@aztec/foundation/error'; import { promiseWithResolvers } from '@aztec/foundation/promise'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { jest } from '@jest/globals'; @@ -50,7 +51,7 @@ describe('ProvingAgent', () => { saveProofOutput: jest.fn(), }; - agent = new ProvingAgent(jobSource, proofDB, prover, [ProvingRequestType.BASE_PARITY]); + agent = new ProvingAgent(jobSource, proofDB, prover, new NoopTelemetryClient(), [ProvingRequestType.BASE_PARITY]); }); afterEach(async () => { diff --git a/yarn-project/prover-client/src/proving_broker/proving_agent.ts b/yarn-project/prover-client/src/proving_broker/proving_agent.ts index 6d17c8176b5d..333ac91a4a94 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_agent.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_agent.ts @@ -10,8 +10,11 @@ import { } from '@aztec/circuit-types'; import { createDebugLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; +import { Timer } from '@aztec/foundation/timer'; +import { type TelemetryClient } from '@aztec/telemetry-client'; import { type ProofStore } from './proof_store.js'; +import { ProvingAgentInstrumentation } from './proving_agent_instrumentation.js'; import { ProvingJobController, ProvingJobControllerStatus } from './proving_job_controller.js'; /** @@ -20,6 +23,8 @@ import { ProvingJobController, ProvingJobControllerStatus } from './proving_job_ export class ProvingAgent { private currentJobController?: ProvingJobController; private runningPromise: RunningPromise; + private instrumentation: ProvingAgentInstrumentation; + private idleTimer: Timer | undefined; constructor( /** The source of proving jobs */ @@ -28,12 +33,15 @@ export class ProvingAgent { private proofStore: ProofStore, /** The prover implementation to defer jobs to */ private circuitProver: ServerCircuitProver, + /** A telemetry client through which to emit metrics */ + client: TelemetryClient, /** Optional list of allowed proof types to build */ private proofAllowList: Array = [], /** How long to wait between jobs */ private pollIntervalMs = 1000, private log = createDebugLogger('aztec:prover-client:proving-agent'), ) { + this.instrumentation = new ProvingAgentInstrumentation(client); this.runningPromise = new RunningPromise(this.safeWork, this.pollIntervalMs); } @@ -46,6 +54,7 @@ export class ProvingAgent { } public start(): void { + this.idleTimer = new Timer(); this.runningPromise.start(); } @@ -114,6 +123,11 @@ export class ProvingAgent { ); } + if (this.idleTimer) { + this.instrumentation.recordIdleTime(this.idleTimer); + } + this.idleTimer = undefined; + this.currentJobController.start(); } catch (err) { this.log.error(`Error in ProvingAgent: ${String(err)}`); @@ -126,6 +140,7 @@ export class ProvingAgent { err: Error | undefined, result: ProvingJobResultsMap[T] | undefined, ) => { + this.idleTimer = new Timer(); if (err) { const retry = err.name === ProvingError.NAME ? (err as ProvingError).retry : false; this.log.error(`Job id=${jobId} type=${ProvingRequestType[type]} failed err=${err.message} retry=${retry}`, err); diff --git a/yarn-project/prover-client/src/proving_broker/proving_agent_instrumentation.ts b/yarn-project/prover-client/src/proving_broker/proving_agent_instrumentation.ts new file mode 100644 index 000000000000..573b71f2e932 --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/proving_agent_instrumentation.ts @@ -0,0 +1,21 @@ +import { type Timer } from '@aztec/foundation/timer'; +import { type Histogram, Metrics, type TelemetryClient, ValueType } from '@aztec/telemetry-client'; + +export class ProvingAgentInstrumentation { + private idleTime: Histogram; + + constructor(client: TelemetryClient, name = 'ProvingAgent') { + const meter = client.getMeter(name); + + this.idleTime = meter.createHistogram(Metrics.PROVING_AGENT_IDLE, { + description: 'Records how long an agent was idle', + unit: 'ms', + valueType: ValueType.INT, + }); + } + + recordIdleTime(msOrTimer: Timer | number) { + const duration = typeof msOrTimer === 'number' ? msOrTimer : Math.floor(msOrTimer.ms()); + this.idleTime.record(duration); + } +} diff --git a/yarn-project/prover-client/src/proving_broker/proving_broker.test.ts b/yarn-project/prover-client/src/proving_broker/proving_broker.test.ts index 543843a6e15e..304d30a3b37d 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_broker.test.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_broker.test.ts @@ -1,6 +1,7 @@ import { type ProofUri, type ProvingJob, type ProvingJobId, ProvingRequestType } from '@aztec/circuit-types'; import { randomBytes } from '@aztec/foundation/crypto'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { jest } from '@jest/globals'; @@ -17,7 +18,7 @@ describe.each([ () => ({ database: new InMemoryBrokerDatabase(), cleanup: undefined }), () => { const store = openTmpStore(true); - const database = new KVBrokerDatabase(store); + const database = new KVBrokerDatabase(store, new NoopTelemetryClient()); const cleanup = () => store.close(); return { database, cleanup }; }, @@ -35,7 +36,7 @@ describe.each([ maxRetries = 2; ({ database, cleanup } = createDb()); - broker = new ProvingBroker(database, { + broker = new ProvingBroker(database, new NoopTelemetryClient(), { jobTimeoutMs, timeoutIntervalMs: jobTimeoutMs / 4, maxRetries, @@ -409,7 +410,7 @@ describe.each([ // fake some time passing while the broker restarts await jest.advanceTimersByTimeAsync(10_000); - broker = new ProvingBroker(database); + broker = new ProvingBroker(database, new NoopTelemetryClient()); await broker.start(); await assertJobStatus(job1.id, 'in-queue'); @@ -470,7 +471,7 @@ describe.each([ // fake some time passing while the broker restarts await jest.advanceTimersByTimeAsync(10_000); - broker = new ProvingBroker(database); + broker = new ProvingBroker(database, new NoopTelemetryClient()); await broker.start(); await assertJobStatus(job1.id, 'in-queue'); @@ -521,7 +522,7 @@ describe.each([ // fake some time passing while the broker restarts await jest.advanceTimersByTimeAsync(100 * jobTimeoutMs); - broker = new ProvingBroker(database); + broker = new ProvingBroker(database, new NoopTelemetryClient()); await broker.start(); await assertJobStatus(job1.id, 'in-queue'); diff --git a/yarn-project/prover-client/src/proving_broker/proving_broker.ts b/yarn-project/prover-client/src/proving_broker/proving_broker.ts index 62667821ec7d..1c73b62b84ab 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_broker.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_broker.ts @@ -12,10 +12,13 @@ import { import { createDebugLogger } from '@aztec/foundation/log'; import { type PromiseWithResolvers, RunningPromise, promiseWithResolvers } from '@aztec/foundation/promise'; import { PriorityMemoryQueue } from '@aztec/foundation/queue'; +import { Timer } from '@aztec/foundation/timer'; +import { type TelemetryClient } from '@aztec/telemetry-client'; import assert from 'assert'; import { type ProvingBrokerDatabase } from './proving_broker_database.js'; +import { type MonitorCallback, ProvingBrokerInstrumentation } from './proving_broker_instrumentation.js'; type InProgressMetadata = { id: ProvingJobId; @@ -58,6 +61,9 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { // as above, but for results private resultsCache = new Map(); + // tracks when each job was enqueued + private enqueuedAt = new Map(); + // keeps track of which jobs are currently being processed // in the event of a crash this information is lost, but that's ok // the next time the broker starts it will recreate jobsCache and still @@ -75,18 +81,37 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { private jobTimeoutMs: number; private maxRetries: number; + private instrumentation: ProvingBrokerInstrumentation; + public constructor( private database: ProvingBrokerDatabase, - { jobTimeoutMs = 30, timeoutIntervalMs = 10, maxRetries = 3 }: ProofRequestBrokerConfig = {}, + client: TelemetryClient, + { jobTimeoutMs = 30_000, timeoutIntervalMs = 10_000, maxRetries = 3 }: ProofRequestBrokerConfig = {}, private logger = createDebugLogger('aztec:prover-client:proving-broker'), ) { + this.instrumentation = new ProvingBrokerInstrumentation(client); this.timeoutPromise = new RunningPromise(this.timeoutCheck, timeoutIntervalMs); this.jobTimeoutMs = jobTimeoutMs; this.maxRetries = maxRetries; } - // eslint-disable-next-line require-await - public async start(): Promise { + private measureQueueDepth: MonitorCallback = (type: ProvingRequestType) => { + return this.queues[type].length(); + }; + + private countActiveJobs: MonitorCallback = (type: ProvingRequestType) => { + let count = 0; + for (const { id } of this.inProgress.values()) { + const job = this.jobsCache.get(id); + if (job?.type === type) { + count++; + } + } + + return count; + }; + + public start(): Promise { for (const [item, result] of this.database.allProvingJobs()) { this.logger.info(`Restoring proving job id=${item.id} settled=${!!result}`); @@ -103,6 +128,11 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { } this.timeoutPromise.start(); + + this.instrumentation.monitorQueueDepth(this.measureQueueDepth); + this.instrumentation.monitorActiveJobs(this.countActiveJobs); + + return Promise.resolve(); } public stop(): Promise { @@ -187,6 +217,10 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { startedAt: time, lastUpdatedAt: time, }); + const enqueuedAt = this.enqueuedAt.get(job.id); + if (enqueuedAt) { + this.instrumentation.recordJobWait(job.type, enqueuedAt); + } return { job, time }; } @@ -216,6 +250,7 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { this.logger.info(`Retrying proving job id=${id} type=${ProvingRequestType[item.type]} retry=${retries + 1}`); this.retries.set(id, retries + 1); this.enqueueJobInternal(item); + this.instrumentation.incRetriedJobs(item.type); return; } @@ -228,6 +263,11 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { const result: ProvingJobSettledResult = { status: 'rejected', reason: String(err) }; this.resultsCache.set(id, result); this.promises.get(id)!.resolve(result); + this.instrumentation.incRejectedJobs(item.type); + if (info) { + const duration = this.timeSource() - info.startedAt; + this.instrumentation.recordJobDuration(item.type, duration * 1000); + } } reportProvingJobProgress( @@ -303,6 +343,7 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { const result: ProvingJobSettledResult = { status: 'fulfilled', value }; this.resultsCache.set(id, result); this.promises.get(id)!.resolve(result); + this.instrumentation.incResolvedJobs(item.type); } private timeoutCheck = () => { @@ -320,6 +361,7 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { this.logger.warn(`Proving job id=${id} timed out. Adding it back to the queue.`); this.inProgress.delete(id); this.enqueueJobInternal(item); + this.instrumentation.incTimedOutJobs(item.type); } } }; @@ -329,6 +371,7 @@ export class ProvingBroker implements ProvingJobProducer, ProvingJobConsumer { this.promises.set(job.id, promiseWithResolvers()); } this.queues[job.type].put(job); + this.enqueuedAt.set(job.id, new Timer()); this.logger.debug(`Enqueued new proving job id=${job.id}`); } } diff --git a/yarn-project/prover-client/src/proving_broker/proving_broker_database/persisted.ts b/yarn-project/prover-client/src/proving_broker/proving_broker_database/persisted.ts index 909b2d6e4e1f..61ca5232015b 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_broker_database/persisted.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_broker_database/persisted.ts @@ -1,14 +1,29 @@ import { type ProofUri, ProvingJob, type ProvingJobId, ProvingJobSettledResult } from '@aztec/circuit-types'; import { jsonParseWithSchema, jsonStringify } from '@aztec/foundation/json-rpc'; import { type AztecKVStore, type AztecMap } from '@aztec/kv-store'; +import { LmdbMetrics, Metrics, type TelemetryClient } from '@aztec/telemetry-client'; import { type ProvingBrokerDatabase } from '../proving_broker_database.js'; export class KVBrokerDatabase implements ProvingBrokerDatabase { private jobs: AztecMap; private jobResults: AztecMap; - - constructor(private store: AztecKVStore) { + private metrics: LmdbMetrics; + + constructor(private store: AztecKVStore, client: TelemetryClient) { + this.metrics = new LmdbMetrics( + client.getMeter('KVBrokerDatabase'), + { + name: Metrics.PROVING_QUEUE_DB_MAP_SIZE, + description: 'Database map size for the proving broker', + }, + { + name: Metrics.PROVING_QUEUE_DB_USED_SIZE, + description: 'Database used size for the proving broker', + }, + { name: Metrics.PROVING_QUEUE_DB_NUM_ITEMS, description: 'Number of items in the broker database' }, + () => store.estimateSize(), + ); this.jobs = store.openMap('proving_jobs'); this.jobResults = store.openMap('proving_job_results'); } diff --git a/yarn-project/prover-client/src/proving_broker/proving_broker_instrumentation.ts b/yarn-project/prover-client/src/proving_broker/proving_broker_instrumentation.ts new file mode 100644 index 000000000000..2379bdd8a323 --- /dev/null +++ b/yarn-project/prover-client/src/proving_broker/proving_broker_instrumentation.ts @@ -0,0 +1,130 @@ +import { ProvingRequestType } from '@aztec/circuit-types'; +import { type Timer } from '@aztec/foundation/timer'; +import { + Attributes, + type Histogram, + Metrics, + type ObservableGauge, + type ObservableResult, + type TelemetryClient, + type UpDownCounter, + ValueType, + millisecondBuckets, +} from '@aztec/telemetry-client'; + +export type MonitorCallback = (proofType: ProvingRequestType) => number; + +export class ProvingBrokerInstrumentation { + private queueSize: ObservableGauge; + private activeJobs: ObservableGauge; + private resolvedJobs: UpDownCounter; + private rejectedJobs: UpDownCounter; + private timedOutJobs: UpDownCounter; + private jobWait: Histogram; + private jobDuration: Histogram; + private retriedJobs: UpDownCounter; + + constructor(client: TelemetryClient, name = 'ProvingBroker') { + const meter = client.getMeter(name); + + this.queueSize = meter.createObservableGauge(Metrics.PROVING_QUEUE_SIZE, { + valueType: ValueType.INT, + }); + + this.activeJobs = meter.createObservableGauge(Metrics.PROVING_QUEUE_ACTIVE_JOBS, { + valueType: ValueType.INT, + }); + + this.resolvedJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_RESOLVED_JOBS, { + valueType: ValueType.INT, + }); + + this.rejectedJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_REJECTED_JOBS, { + valueType: ValueType.INT, + }); + + this.retriedJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_RETRIED_JOBS, { + valueType: ValueType.INT, + }); + + this.timedOutJobs = meter.createUpDownCounter(Metrics.PROVING_QUEUE_TIMED_OUT_JOBS, { + valueType: ValueType.INT, + }); + + this.jobWait = meter.createHistogram(Metrics.PROVING_QUEUE_JOB_WAIT, { + description: 'Records how long a job sits in the queue', + unit: 'ms', + valueType: ValueType.INT, + advice: { + explicitBucketBoundaries: millisecondBuckets(1), // 10ms -> ~327s + }, + }); + + this.jobDuration = meter.createHistogram(Metrics.PROVING_QUEUE_JOB_DURATION, { + description: 'Records how long a job takes to complete', + unit: 'ms', + valueType: ValueType.INT, + advice: { + explicitBucketBoundaries: millisecondBuckets(1), // 10ms -> ~327s + }, + }); + } + + monitorQueueDepth(fn: MonitorCallback) { + this.queueSize.addCallback(obs => this.observe(obs, fn)); + } + + monitorActiveJobs(fn: MonitorCallback) { + this.activeJobs.addCallback(obs => this.observe(obs, fn)); + } + + incResolvedJobs(proofType: ProvingRequestType) { + this.resolvedJobs.add(1, { + [Attributes.PROVING_JOB_TYPE]: ProvingRequestType[proofType], + }); + } + + incRejectedJobs(proofType: ProvingRequestType) { + this.rejectedJobs.add(1, { + [Attributes.PROVING_JOB_TYPE]: ProvingRequestType[proofType], + }); + } + + incRetriedJobs(proofType: ProvingRequestType) { + this.retriedJobs.add(1, { + [Attributes.PROVING_JOB_TYPE]: ProvingRequestType[proofType], + }); + } + + incTimedOutJobs(proofType: ProvingRequestType) { + this.timedOutJobs.add(1, { + [Attributes.PROVING_JOB_TYPE]: ProvingRequestType[proofType], + }); + } + + recordJobWait(proofType: ProvingRequestType, msOrTimer: Timer | number) { + const duration = typeof msOrTimer === 'number' ? msOrTimer : Math.floor(msOrTimer.ms()); + this.jobWait.record(duration, { + [Attributes.PROVING_JOB_TYPE]: ProvingRequestType[proofType], + }); + } + + recordJobDuration(proofType: ProvingRequestType, msOrTimer: Timer | number) { + const duration = typeof msOrTimer === 'number' ? msOrTimer : Math.floor(msOrTimer.ms()); + this.jobDuration.record(duration, { + [Attributes.PROVING_JOB_TYPE]: ProvingRequestType[proofType], + }); + } + + private observe(obs: ObservableResult, fn: MonitorCallback) { + for (const proofType of Object.values(ProvingRequestType)) { + // a type predicate for TypeScript to recognize that we're only iterating over enum values + if (typeof proofType !== 'number') { + continue; + } + obs.observe(fn(proofType), { + [Attributes.PROVING_JOB_TYPE]: ProvingRequestType[proofType], + }); + } + } +} diff --git a/yarn-project/prover-client/src/test/bb_prover_base_rollup.test.ts b/yarn-project/prover-client/src/test/bb_prover_base_rollup.test.ts index 154ac6c71dd9..09945d2010e1 100644 --- a/yarn-project/prover-client/src/test/bb_prover_base_rollup.test.ts +++ b/yarn-project/prover-client/src/test/bb_prover_base_rollup.test.ts @@ -35,7 +35,7 @@ describe('prover/bb_prover/base-rollup', () => { }); it('proves the base rollup', async () => { - const header = context.getHeader(0); + const header = context.getBlockHeader(0); const chainId = context.globalVariables.chainId; const version = context.globalVariables.version; const vkTreeRoot = getVKTreeRoot(); diff --git a/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts b/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts index 43684e6f1a9d..182742183e69 100644 --- a/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts +++ b/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts @@ -39,7 +39,7 @@ describe('prover/bb_prover/full-rollup', () => { async (blockCount, totalBlocks, nonEmptyTxs, totalTxs) => { log.info(`Proving epoch with ${blockCount}/${totalBlocks} blocks with ${nonEmptyTxs}/${totalTxs} non-empty txs`); - const initialHeader = context.getHeader(0); + const initialHeader = context.getBlockHeader(0); context.orchestrator.startNewEpoch(1, 1, totalBlocks); for (let blockNum = 1; blockNum <= blockCount; blockNum++) { @@ -94,7 +94,7 @@ describe('prover/bb_prover/full-rollup', () => { }), ); for (const tx of txs) { - tx.data.constants.historicalHeader = context.getHeader(0); + tx.data.constants.historicalHeader = context.getBlockHeader(0); } const l1ToL2Messages = makeTuple( diff --git a/yarn-project/prover-client/src/test/mock_prover.ts b/yarn-project/prover-client/src/test/mock_prover.ts index c0ea23c26430..30a26cd78383 100644 --- a/yarn-project/prover-client/src/test/mock_prover.ts +++ b/yarn-project/prover-client/src/test/mock_prover.ts @@ -43,6 +43,7 @@ import { makeRootRollupPublicInputs, } from '@aztec/circuits.js/testing'; import { times } from '@aztec/foundation/collection'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { InlineProofStore, type ProofStore } from '../proving_broker/proof_store.js'; import { ProvingAgent } from '../proving_broker/proving_agent.js'; @@ -50,7 +51,7 @@ import { ProvingBroker } from '../proving_broker/proving_broker.js'; import { InMemoryBrokerDatabase } from '../proving_broker/proving_broker_database/memory.js'; export class TestBroker implements ProvingJobProducer { - private broker = new ProvingBroker(new InMemoryBrokerDatabase()); + private broker = new ProvingBroker(new InMemoryBrokerDatabase(), new NoopTelemetryClient()); private agents: ProvingAgent[]; constructor( @@ -58,7 +59,7 @@ export class TestBroker implements ProvingJobProducer { prover: ServerCircuitProver, private proofStore: ProofStore = new InlineProofStore(), ) { - this.agents = times(agentCount, () => new ProvingAgent(this.broker, proofStore, prover)); + this.agents = times(agentCount, () => new ProvingAgent(this.broker, proofStore, prover, new NoopTelemetryClient())); } public async start() { diff --git a/yarn-project/prover-node/src/factory.ts b/yarn-project/prover-node/src/factory.ts index 7190d81ee66a..8d2db37c6233 100644 --- a/yarn-project/prover-node/src/factory.ts +++ b/yarn-project/prover-node/src/factory.ts @@ -47,7 +47,7 @@ export async function createProverNode( const worldStateSynchronizer = await createWorldStateSynchronizer(worldStateConfig, archiver, telemetry); await worldStateSynchronizer.start(); - const broker = deps.broker ?? (await createAndStartProvingBroker(config)); + const broker = deps.broker ?? (await createAndStartProvingBroker(config, telemetry)); const prover = await createProverClient(config, worldStateSynchronizer, broker, telemetry); // REFACTOR: Move publisher out of sequencer package and into an L1-related package diff --git a/yarn-project/prover-node/src/prover-node.test.ts b/yarn-project/prover-node/src/prover-node.test.ts index bc8ca80897be..a9f4be5f2e01 100644 --- a/yarn-project/prover-node/src/prover-node.test.ts +++ b/yarn-project/prover-node/src/prover-node.test.ts @@ -16,7 +16,7 @@ import { type ContractDataSource, EthAddress, Fr } from '@aztec/circuits.js'; import { times } from '@aztec/foundation/collection'; import { Signature } from '@aztec/foundation/eth-signature'; import { sleep } from '@aztec/foundation/sleep'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { type BootstrapNode, InMemoryAttestationPool, diff --git a/yarn-project/pxe/.gitignore b/yarn-project/pxe/.gitignore new file mode 100644 index 000000000000..3993ba71067b --- /dev/null +++ b/yarn-project/pxe/.gitignore @@ -0,0 +1 @@ +src/config/package_info.ts diff --git a/yarn-project/pxe/package.json b/yarn-project/pxe/package.json index 2d84ed81f134..69ced063f46a 100644 --- a/yarn-project/pxe/package.json +++ b/yarn-project/pxe/package.json @@ -2,7 +2,13 @@ "name": "@aztec/pxe", "version": "0.1.0", "type": "module", - "exports": "./dest/index.js", + "exports": { + ".": "./dest/index.js", + "./service": "./dest/pxe_service/index.js", + "./config": "./dest/config/index.js", + "./database": "./dest/database/index.js", + "./kernel_prover": "./dest/kernel_prover/index.js" + }, "bin": "./dest/bin/index.js", "typedocOptions": { "entryPoints": [ @@ -12,16 +18,18 @@ "tsconfig": "./tsconfig.json" }, "scripts": { - "build": "yarn clean && tsc -b", + "build": "yarn clean && yarn generate:package_info && tsc -b", "build:dev": "tsc -b --watch", - "clean": "rm -rf ./dest .tsbuildinfo", + "clean": "rm -rf ./dest .tsbuildinfo ./src/config/package_info.ts", "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", + "generate:package_info": "node ./scripts/generate_package_info.js", "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests", "start": "DEBUG='aztec:*' && node ./dest/bin/index.js" }, "inherits": [ - "../package.common.json" + "../package.common.json", + "./package.local.json" ], "jest": { "moduleNameMapper": { diff --git a/yarn-project/pxe/package.local.json b/yarn-project/pxe/package.local.json new file mode 100644 index 000000000000..ee72375cfea5 --- /dev/null +++ b/yarn-project/pxe/package.local.json @@ -0,0 +1,7 @@ +{ + "scripts": { + "build": "yarn clean && yarn generate:package_info && tsc -b", + "clean": "rm -rf ./dest .tsbuildinfo ./src/config/package_info.ts", + "generate:package_info": "node ./scripts/generate_package_info.js" + } +} \ No newline at end of file diff --git a/yarn-project/pxe/scripts/generate_package_info.js b/yarn-project/pxe/scripts/generate_package_info.js new file mode 100644 index 000000000000..2b0f2e8212f8 --- /dev/null +++ b/yarn-project/pxe/scripts/generate_package_info.js @@ -0,0 +1,14 @@ +import { readFileSync, writeFileSync } from 'fs'; +import { dirname, join } from 'path'; +import { fileURLToPath } from 'url'; + +const currentDir = dirname(fileURLToPath(import.meta.url)); + +const { version, name } = JSON.parse(readFileSync(join(currentDir, '../package.json'))); + +writeFileSync(join(currentDir, '../src/config/package_info.ts'), +`export function getPackageInfo() { + return { version: '${version}', name: '${name}' }; +} +`); + diff --git a/yarn-project/pxe/src/bin/index.ts b/yarn-project/pxe/src/bin/index.ts index ac3f0d6f84e8..fcf72856dc45 100644 --- a/yarn-project/pxe/src/bin/index.ts +++ b/yarn-project/pxe/src/bin/index.ts @@ -5,7 +5,7 @@ import { createDebugLogger } from '@aztec/foundation/log'; import { getPXEServiceConfig } from '../config/index.js'; import { startPXEHttpServer } from '../pxe_http/index.js'; -import { createPXEService } from '../pxe_service/index.js'; +import { createPXEService } from '../utils/index.js'; const { PXE_PORT = 8080, AZTEC_NODE_URL = 'http://localhost:8079' } = process.env; diff --git a/yarn-project/pxe/src/config/index.ts b/yarn-project/pxe/src/config/index.ts index d97cacbdb924..4841f7ce6fd5 100644 --- a/yarn-project/pxe/src/config/index.ts +++ b/yarn-project/pxe/src/config/index.ts @@ -8,10 +8,6 @@ import { import { type DataStoreConfig, dataConfigMappings } from '@aztec/kv-store/config'; import { type Network } from '@aztec/types/network'; -import { readFileSync } from 'fs'; -import { dirname, resolve } from 'path'; -import { fileURLToPath } from 'url'; - /** * Temporary configuration until WASM can be used instead of native */ @@ -128,13 +124,3 @@ export function getCliPXEOptions(): CliPXEOptions & PXEServiceConfig { proverEnabled: pxeConfig.proverEnabled || !!cliOptions.network, }; } - -/** - * Returns package name and version. - */ -export function getPackageInfo() { - const packageJsonPath = resolve(dirname(fileURLToPath(import.meta.url)), '../../package.json'); - const { version, name } = JSON.parse(readFileSync(packageJsonPath).toString()); - - return { version, name }; -} diff --git a/yarn-project/pxe/src/contract_data_oracle/index.ts b/yarn-project/pxe/src/contract_data_oracle/index.ts index d5f6f9e5c0d2..981366f3602d 100644 --- a/yarn-project/pxe/src/contract_data_oracle/index.ts +++ b/yarn-project/pxe/src/contract_data_oracle/index.ts @@ -7,7 +7,7 @@ import { getFunctionDebugMetadata, } from '@aztec/foundation/abi'; import { type Fr } from '@aztec/foundation/fields'; -import { ContractClassNotFoundError, ContractNotFoundError } from '@aztec/simulator'; +import { ContractClassNotFoundError, ContractNotFoundError } from '@aztec/simulator/client'; import { type ContractArtifactDatabase } from '../database/contracts/contract_artifact_db.js'; import { type ContractInstanceDatabase } from '../database/contracts/contract_instance_db.js'; diff --git a/yarn-project/pxe/src/database/incoming_note_dao.ts b/yarn-project/pxe/src/database/incoming_note_dao.ts index d2dc2d388153..2c9c62821cb7 100644 --- a/yarn-project/pxe/src/database/incoming_note_dao.ts +++ b/yarn-project/pxe/src/database/incoming_note_dao.ts @@ -3,7 +3,7 @@ import { AztecAddress, Fr, Point, type PublicKey } from '@aztec/circuits.js'; import { NoteSelector } from '@aztec/foundation/abi'; import { toBigIntBE } from '@aztec/foundation/bigint-buffer'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; -import { type NoteData } from '@aztec/simulator'; +import { type NoteData } from '@aztec/simulator/acvm'; import { type NoteInfo } from '../note_decryption_utils/index.js'; diff --git a/yarn-project/pxe/src/database/kv_pxe_database.test.ts b/yarn-project/pxe/src/database/kv_pxe_database.test.ts index 501e8de99212..a965cc964a62 100644 --- a/yarn-project/pxe/src/database/kv_pxe_database.test.ts +++ b/yarn-project/pxe/src/database/kv_pxe_database.test.ts @@ -1,4 +1,4 @@ -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { KVPxeDatabase } from './kv_pxe_database.js'; import { describePxeDatabase } from './pxe_database_test_suite.js'; @@ -6,8 +6,8 @@ import { describePxeDatabase } from './pxe_database_test_suite.js'; describe('KVPxeDatabase', () => { let database: KVPxeDatabase; - beforeEach(() => { - database = new KVPxeDatabase(openTmpStore()); + beforeEach(async () => { + database = await KVPxeDatabase.create(openTmpStore()); }); describePxeDatabase(() => database); diff --git a/yarn-project/pxe/src/database/kv_pxe_database.ts b/yarn-project/pxe/src/database/kv_pxe_database.ts index 287af7b6bbd2..ba9e59a5e983 100644 --- a/yarn-project/pxe/src/database/kv_pxe_database.ts +++ b/yarn-project/pxe/src/database/kv_pxe_database.ts @@ -7,9 +7,9 @@ import { } from '@aztec/circuit-types'; import { AztecAddress, + BlockHeader, CompleteAddress, type ContractInstanceWithAddress, - Header, type IndexedTaggingSecret, type PublicKey, SerializableContractInstance, @@ -17,13 +17,14 @@ import { import { type ContractArtifact, FunctionSelector, FunctionType } from '@aztec/foundation/abi'; import { toBufferBE } from '@aztec/foundation/bigint-buffer'; import { Fr } from '@aztec/foundation/fields'; +import { toArray } from '@aztec/foundation/iterable'; import { - type AztecArray, - type AztecKVStore, - type AztecMap, - type AztecMultiMap, - type AztecSet, - type AztecSingleton, + type AztecAsyncArray, + type AztecAsyncKVStore, + type AztecAsyncMap, + type AztecAsyncMultiMap, + type AztecAsyncSet, + type AztecAsyncSingleton, } from '@aztec/kv-store'; import { contractArtifactFromBuffer, contractArtifactToBuffer } from '@aztec/types/abi'; @@ -35,48 +36,48 @@ import { type PxeDatabase } from './pxe_database.js'; * A PXE database backed by LMDB. */ export class KVPxeDatabase implements PxeDatabase { - #synchronizedBlock: AztecSingleton; - #completeAddresses: AztecArray; - #completeAddressIndex: AztecMap; - #addressBook: AztecSet; - #authWitnesses: AztecMap; - #capsules: AztecArray; - #notes: AztecMap; - #nullifiedNotes: AztecMap; - #nullifierToNoteId: AztecMap; - #nullifiersByBlockNumber: AztecMultiMap; - - #nullifiedNotesToScope: AztecMultiMap; - #nullifiedNotesByContract: AztecMultiMap; - #nullifiedNotesByStorageSlot: AztecMultiMap; - #nullifiedNotesByTxHash: AztecMultiMap; - #nullifiedNotesByAddressPoint: AztecMultiMap; - #nullifiedNotesByNullifier: AztecMap; - #syncedBlockPerPublicKey: AztecMap; - #contractArtifacts: AztecMap; - #contractInstances: AztecMap; - #db: AztecKVStore; - - #outgoingNotes: AztecMap; - #outgoingNotesByContract: AztecMultiMap; - #outgoingNotesByStorageSlot: AztecMultiMap; - #outgoingNotesByTxHash: AztecMultiMap; - #outgoingNotesByOvpkM: AztecMultiMap; - - #scopes: AztecSet; - #notesToScope: AztecMultiMap; - #notesByContractAndScope: Map>; - #notesByStorageSlotAndScope: Map>; - #notesByTxHashAndScope: Map>; - #notesByAddressPointAndScope: Map>; + #synchronizedBlock: AztecAsyncSingleton; + #completeAddresses: AztecAsyncArray; + #completeAddressIndex: AztecAsyncMap; + #addressBook: AztecAsyncSet; + #authWitnesses: AztecAsyncMap; + #capsules: AztecAsyncArray; + #notes: AztecAsyncMap; + #nullifiedNotes: AztecAsyncMap; + #nullifierToNoteId: AztecAsyncMap; + #nullifiersByBlockNumber: AztecAsyncMultiMap; + + #nullifiedNotesToScope: AztecAsyncMultiMap; + #nullifiedNotesByContract: AztecAsyncMultiMap; + #nullifiedNotesByStorageSlot: AztecAsyncMultiMap; + #nullifiedNotesByTxHash: AztecAsyncMultiMap; + #nullifiedNotesByAddressPoint: AztecAsyncMultiMap; + #nullifiedNotesByNullifier: AztecAsyncMap; + #syncedBlockPerPublicKey: AztecAsyncMap; + #contractArtifacts: AztecAsyncMap; + #contractInstances: AztecAsyncMap; + #db: AztecAsyncKVStore; + + #outgoingNotes: AztecAsyncMap; + #outgoingNotesByContract: AztecAsyncMultiMap; + #outgoingNotesByStorageSlot: AztecAsyncMultiMap; + #outgoingNotesByTxHash: AztecAsyncMultiMap; + #outgoingNotesByOvpkM: AztecAsyncMultiMap; + + #scopes: AztecAsyncSet; + #notesToScope: AztecAsyncMultiMap; + #notesByContractAndScope: Map>; + #notesByStorageSlotAndScope: Map>; + #notesByTxHashAndScope: Map>; + #notesByAddressPointAndScope: Map>; // Stores the last index used for each tagging secret, taking direction into account // This is necessary to avoid reusing the same index for the same secret, which happens if // sender and recipient are the same - #taggingSecretIndexesForSenders: AztecMap; - #taggingSecretIndexesForRecipients: AztecMap; + #taggingSecretIndexesForSenders: AztecAsyncMap; + #taggingSecretIndexesForRecipients: AztecAsyncMap; - constructor(private db: AztecKVStore) { + protected constructor(private db: AztecAsyncKVStore) { this.#db = db; this.#completeAddresses = db.openArray('complete_addresses'); @@ -113,22 +114,26 @@ export class KVPxeDatabase implements PxeDatabase { this.#scopes = db.openSet('scopes'); this.#notesToScope = db.openMultiMap('notes_to_scope'); - this.#notesByContractAndScope = new Map>(); - this.#notesByStorageSlotAndScope = new Map>(); - this.#notesByTxHashAndScope = new Map>(); - this.#notesByAddressPointAndScope = new Map>(); - - for (const scope of this.#scopes.entries()) { - this.#notesByContractAndScope.set(scope, db.openMultiMap(`${scope}:notes_by_contract`)); - this.#notesByStorageSlotAndScope.set(scope, db.openMultiMap(`${scope}:notes_by_storage_slot`)); - this.#notesByTxHashAndScope.set(scope, db.openMultiMap(`${scope}:notes_by_tx_hash`)); - this.#notesByAddressPointAndScope.set(scope, db.openMultiMap(`${scope}:notes_by_address_point`)); - } + this.#notesByContractAndScope = new Map>(); + this.#notesByStorageSlotAndScope = new Map>(); + this.#notesByTxHashAndScope = new Map>(); + this.#notesByAddressPointAndScope = new Map>(); this.#taggingSecretIndexesForSenders = db.openMap('tagging_secret_indexes_for_senders'); this.#taggingSecretIndexesForRecipients = db.openMap('tagging_secret_indexes_for_recipients'); } + public static async create(db: AztecAsyncKVStore): Promise { + const pxeDB = new KVPxeDatabase(db); + for await (const scope of pxeDB.#scopes.entriesAsync()) { + pxeDB.#notesByContractAndScope.set(scope, db.openMultiMap(`${scope}:notes_by_contract`)); + pxeDB.#notesByStorageSlotAndScope.set(scope, db.openMultiMap(`${scope}:notes_by_storage_slot`)); + pxeDB.#notesByTxHashAndScope.set(scope, db.openMultiMap(`${scope}:notes_by_tx_hash`)); + pxeDB.#notesByAddressPointAndScope.set(scope, db.openMultiMap(`${scope}:notes_by_address_point`)); + } + return pxeDB; + } + public async getContract( address: AztecAddress, ): Promise<(ContractInstanceWithAddress & ContractArtifact) | undefined> { @@ -157,10 +162,10 @@ export class KVPxeDatabase implements PxeDatabase { await this.#contractArtifacts.set(id.toString(), contractArtifactToBuffer(contract)); } - public getContractArtifact(id: Fr): Promise { - const contract = this.#contractArtifacts.get(id.toString()); - // TODO(@spalladino): AztecMap lies and returns Uint8Arrays instead of Buffers, hence the extra Buffer.from. - return Promise.resolve(contract && contractArtifactFromBuffer(Buffer.from(contract))); + public async getContractArtifact(id: Fr): Promise { + const contract = await this.#contractArtifacts.getAsync(id.toString()); + // TODO(@spalladino): AztecAsyncMap lies and returns Uint8Arrays instead of Buffers, hence the extra Buffer.from. + return contract && contractArtifactFromBuffer(Buffer.from(contract)); } async addContractInstance(contract: ContractInstanceWithAddress): Promise { @@ -170,13 +175,14 @@ export class KVPxeDatabase implements PxeDatabase { ); } - getContractInstance(address: AztecAddress): Promise { - const contract = this.#contractInstances.get(address.toString()); - return Promise.resolve(contract && SerializableContractInstance.fromBuffer(contract).withAddress(address)); + async getContractInstance(address: AztecAddress): Promise { + const contract = await this.#contractInstances.getAsync(address.toString()); + return contract && SerializableContractInstance.fromBuffer(contract).withAddress(address); } - getContractsAddresses(): Promise { - return Promise.resolve(Array.from(this.#contractInstances.keys()).map(AztecAddress.fromString)); + async getContractsAddresses(): Promise { + const keys = await toArray(this.#contractInstances.keysAsync()); + return keys.map(AztecAddress.fromString); } async addAuthWitness(messageHash: Fr, witness: Fr[]): Promise { @@ -186,8 +192,8 @@ export class KVPxeDatabase implements PxeDatabase { ); } - getAuthWitness(messageHash: Fr): Promise { - const witness = this.#authWitnesses.get(messageHash.toString()); + async getAuthWitness(messageHash: Fr): Promise { + const witness = await this.#authWitnesses.getAsync(messageHash.toString()); return Promise.resolve(witness?.map(w => Fr.fromBuffer(w))); } @@ -209,65 +215,69 @@ export class KVPxeDatabase implements PxeDatabase { outgoingNotes: OutgoingNoteDao[], scope: AztecAddress = AztecAddress.ZERO, ): Promise { - if (!this.#scopes.has(scope.toString())) { + if (!(await this.#scopes.hasAsync(scope.toString()))) { await this.#addScope(scope); } - return this.db.transaction(() => { + return this.db.transactionAsync(async () => { for (const dao of incomingNotes) { // store notes by their index in the notes hash tree // this provides the uniqueness we need to store individual notes // and should also return notes in the order that they were created. // Had we stored them by their nullifier, they would be returned in random order const noteIndex = toBufferBE(dao.index, 32).toString('hex'); - void this.#notes.set(noteIndex, dao.toBuffer()); - void this.#notesToScope.set(noteIndex, scope.toString()); - void this.#nullifierToNoteId.set(dao.siloedNullifier.toString(), noteIndex); - - void this.#notesByContractAndScope.get(scope.toString())!.set(dao.contractAddress.toString(), noteIndex); - void this.#notesByStorageSlotAndScope.get(scope.toString())!.set(dao.storageSlot.toString(), noteIndex); - void this.#notesByTxHashAndScope.get(scope.toString())!.set(dao.txHash.toString(), noteIndex); - void this.#notesByAddressPointAndScope.get(scope.toString())!.set(dao.addressPoint.toString(), noteIndex); + await this.#notes.set(noteIndex, dao.toBuffer()); + await this.#notesToScope.set(noteIndex, scope.toString()); + await this.#nullifierToNoteId.set(dao.siloedNullifier.toString(), noteIndex); + + await this.#notesByContractAndScope.get(scope.toString())!.set(dao.contractAddress.toString(), noteIndex); + await this.#notesByStorageSlotAndScope.get(scope.toString())!.set(dao.storageSlot.toString(), noteIndex); + await this.#notesByTxHashAndScope.get(scope.toString())!.set(dao.txHash.toString(), noteIndex); + await this.#notesByAddressPointAndScope.get(scope.toString())!.set(dao.addressPoint.toString(), noteIndex); } for (const dao of outgoingNotes) { const noteIndex = toBufferBE(dao.index, 32).toString('hex'); - void this.#outgoingNotes.set(noteIndex, dao.toBuffer()); - void this.#outgoingNotesByContract.set(dao.contractAddress.toString(), noteIndex); - void this.#outgoingNotesByStorageSlot.set(dao.storageSlot.toString(), noteIndex); - void this.#outgoingNotesByTxHash.set(dao.txHash.toString(), noteIndex); - void this.#outgoingNotesByOvpkM.set(dao.ovpkM.toString(), noteIndex); + await this.#outgoingNotes.set(noteIndex, dao.toBuffer()); + await this.#outgoingNotesByContract.set(dao.contractAddress.toString(), noteIndex); + await this.#outgoingNotesByStorageSlot.set(dao.storageSlot.toString(), noteIndex); + await this.#outgoingNotesByTxHash.set(dao.txHash.toString(), noteIndex); + await this.#outgoingNotesByOvpkM.set(dao.ovpkM.toString(), noteIndex); } }); } public removeNotesAfter(blockNumber: number): Promise { - return this.db.transaction(() => { - for (const note of this.#notes.values()) { + return this.db.transactionAsync(async () => { + const notes = await toArray(this.#notes.valuesAsync()); + for (const note of notes) { const noteDao = IncomingNoteDao.fromBuffer(note); if (noteDao.l2BlockNumber > blockNumber) { const noteIndex = toBufferBE(noteDao.index, 32).toString('hex'); - void this.#notes.delete(noteIndex); - void this.#notesToScope.delete(noteIndex); - void this.#nullifierToNoteId.delete(noteDao.siloedNullifier.toString()); - for (const scope of this.#scopes.entries()) { - void this.#notesByAddressPointAndScope.get(scope)!.deleteValue(noteDao.addressPoint.toString(), noteIndex); - void this.#notesByTxHashAndScope.get(scope)!.deleteValue(noteDao.txHash.toString(), noteIndex); - void this.#notesByContractAndScope.get(scope)!.deleteValue(noteDao.contractAddress.toString(), noteIndex); - void this.#notesByStorageSlotAndScope.get(scope)!.deleteValue(noteDao.storageSlot.toString(), noteIndex); + await this.#notes.delete(noteIndex); + await this.#notesToScope.delete(noteIndex); + await this.#nullifierToNoteId.delete(noteDao.siloedNullifier.toString()); + const scopes = await toArray(this.#scopes.entriesAsync()); + for (const scope of scopes) { + await this.#notesByAddressPointAndScope.get(scope)!.deleteValue(noteDao.addressPoint.toString(), noteIndex); + await this.#notesByTxHashAndScope.get(scope)!.deleteValue(noteDao.txHash.toString(), noteIndex); + await this.#notesByContractAndScope.get(scope)!.deleteValue(noteDao.contractAddress.toString(), noteIndex); + await this.#notesByStorageSlotAndScope.get(scope)!.deleteValue(noteDao.storageSlot.toString(), noteIndex); } } } - for (const note of this.#outgoingNotes.values()) { + const outgoingNotes = await toArray(this.#outgoingNotes.valuesAsync()); + + for (const note of outgoingNotes) { const noteDao = OutgoingNoteDao.fromBuffer(note); if (noteDao.l2BlockNumber > blockNumber) { const noteIndex = toBufferBE(noteDao.index, 32).toString('hex'); - void this.#outgoingNotes.delete(noteIndex); - void this.#outgoingNotesByContract.deleteValue(noteDao.contractAddress.toString(), noteIndex); - void this.#outgoingNotesByStorageSlot.deleteValue(noteDao.storageSlot.toString(), noteIndex); - void this.#outgoingNotesByTxHash.deleteValue(noteDao.txHash.toString(), noteIndex); - void this.#outgoingNotesByOvpkM.deleteValue(noteDao.ovpkM.toString(), noteIndex); + await this.#outgoingNotes.delete(noteIndex); + await this.#outgoingNotesByContract.deleteValue(noteDao.contractAddress.toString(), noteIndex); + await this.#outgoingNotesByStorageSlot.deleteValue(noteDao.storageSlot.toString(), noteIndex); + await this.#outgoingNotesByTxHash.deleteValue(noteDao.txHash.toString(), noteIndex); + await this.#outgoingNotesByOvpkM.deleteValue(noteDao.ovpkM.toString(), noteIndex); } } }); @@ -276,101 +286,112 @@ export class KVPxeDatabase implements PxeDatabase { public async unnullifyNotesAfter(blockNumber: number): Promise { const nullifiersToUndo: string[] = []; const currentBlockNumber = blockNumber + 1; - const maxBlockNumber = this.getBlockNumber() ?? currentBlockNumber; + const maxBlockNumber = (await this.getBlockNumber()) ?? currentBlockNumber; for (let i = currentBlockNumber; i <= maxBlockNumber; i++) { - nullifiersToUndo.push(...this.#nullifiersByBlockNumber.getValues(i)); + nullifiersToUndo.push(...(await toArray(this.#nullifiersByBlockNumber.getValuesAsync(i)))); } - const notesIndexesToReinsert = await this.db.transaction(() => - nullifiersToUndo.map(nullifier => this.#nullifiedNotesByNullifier.get(nullifier)), + const notesIndexesToReinsert = await Promise.all( + nullifiersToUndo.map(nullifier => this.#nullifiedNotesByNullifier.getAsync(nullifier)), + ); + const notNullNoteIndexes = notesIndexesToReinsert.filter(noteIndex => noteIndex != undefined); + const nullifiedNoteBuffers = await Promise.all( + notNullNoteIndexes.map(noteIndex => this.#nullifiedNotes.getAsync(noteIndex!)), ); - const nullifiedNoteBuffers = await this.db.transaction(() => { - return notesIndexesToReinsert - .filter(noteIndex => noteIndex != undefined) - .map(noteIndex => this.#nullifiedNotes.get(noteIndex!)); - }); const noteDaos = nullifiedNoteBuffers .filter(buffer => buffer != undefined) .map(buffer => IncomingNoteDao.fromBuffer(buffer!)); - await this.db.transaction(() => { + await this.db.transactionAsync(async () => { for (const dao of noteDaos) { const noteIndex = toBufferBE(dao.index, 32).toString('hex'); - void this.#notes.set(noteIndex, dao.toBuffer()); - void this.#nullifierToNoteId.set(dao.siloedNullifier.toString(), noteIndex); + await this.#notes.set(noteIndex, dao.toBuffer()); + await this.#nullifierToNoteId.set(dao.siloedNullifier.toString(), noteIndex); - let scopes = Array.from(this.#nullifiedNotesToScope.getValues(noteIndex) ?? []); + let scopes = (await toArray(this.#nullifiedNotesToScope.getValuesAsync(noteIndex))) ?? []; if (scopes.length === 0) { scopes = [new AztecAddress(dao.addressPoint.x).toString()]; } for (const scope of scopes) { - void this.#notesByContractAndScope.get(scope)!.set(dao.contractAddress.toString(), noteIndex); - void this.#notesByStorageSlotAndScope.get(scope)!.set(dao.storageSlot.toString(), noteIndex); - void this.#notesByTxHashAndScope.get(scope)!.set(dao.txHash.toString(), noteIndex); - void this.#notesByAddressPointAndScope.get(scope)!.set(dao.addressPoint.toString(), noteIndex); - void this.#notesToScope.set(noteIndex, scope); + await this.#notesByContractAndScope.get(scope.toString())!.set(dao.contractAddress.toString(), noteIndex); + await this.#notesByStorageSlotAndScope.get(scope.toString())!.set(dao.storageSlot.toString(), noteIndex); + await this.#notesByTxHashAndScope.get(scope.toString())!.set(dao.txHash.toString(), noteIndex); + await this.#notesByAddressPointAndScope.get(scope.toString())!.set(dao.addressPoint.toString(), noteIndex); + await this.#notesToScope.set(noteIndex, scope); } - void this.#nullifiedNotes.delete(noteIndex); - void this.#nullifiedNotesToScope.delete(noteIndex); - void this.#nullifiersByBlockNumber.deleteValue(dao.l2BlockNumber, dao.siloedNullifier.toString()); - void this.#nullifiedNotesByContract.deleteValue(dao.contractAddress.toString(), noteIndex); - void this.#nullifiedNotesByStorageSlot.deleteValue(dao.storageSlot.toString(), noteIndex); - void this.#nullifiedNotesByTxHash.deleteValue(dao.txHash.toString(), noteIndex); - void this.#nullifiedNotesByAddressPoint.deleteValue(dao.addressPoint.toString(), noteIndex); - void this.#nullifiedNotesByNullifier.delete(dao.siloedNullifier.toString()); + await this.#nullifiedNotes.delete(noteIndex); + await this.#nullifiedNotesToScope.delete(noteIndex); + await this.#nullifiersByBlockNumber.deleteValue(dao.l2BlockNumber, dao.siloedNullifier.toString()); + await this.#nullifiedNotesByContract.deleteValue(dao.contractAddress.toString(), noteIndex); + await this.#nullifiedNotesByStorageSlot.deleteValue(dao.storageSlot.toString(), noteIndex); + await this.#nullifiedNotesByTxHash.deleteValue(dao.txHash.toString(), noteIndex); + await this.#nullifiedNotesByAddressPoint.deleteValue(dao.addressPoint.toString(), noteIndex); + await this.#nullifiedNotesByNullifier.delete(dao.siloedNullifier.toString()); } }); } - getIncomingNotes(filter: IncomingNotesFilter): Promise { + async getIncomingNotes(filter: IncomingNotesFilter): Promise { const publicKey: PublicKey | undefined = filter.owner ? filter.owner.toAddressPoint() : undefined; filter.status = filter.status ?? NoteStatus.ACTIVE; const candidateNoteSources = []; - filter.scopes ??= [...this.#scopes.entries()].map(addressString => AztecAddress.fromString(addressString)); + filter.scopes ??= (await toArray(this.#scopes.entriesAsync())).map(addressString => + AztecAddress.fromString(addressString), + ); - const activeNoteIdsPerScope: IterableIterator[] = []; + const activeNoteIdsPerScope: string[][] = []; for (const scope of new Set(filter.scopes)) { const formattedScopeString = scope.toString(); - if (!this.#scopes.has(formattedScopeString)) { + if (!this.#scopes.hasAsync(formattedScopeString)) { throw new Error('Trying to get incoming notes of an scope that is not in the PXE database'); } activeNoteIdsPerScope.push( publicKey - ? this.#notesByAddressPointAndScope.get(formattedScopeString)!.getValues(publicKey.toString()) + ? await toArray( + this.#notesByAddressPointAndScope.get(formattedScopeString)!.getValuesAsync(publicKey.toString()), + ) : filter.txHash - ? this.#notesByTxHashAndScope.get(formattedScopeString)!.getValues(filter.txHash.toString()) + ? await toArray( + this.#notesByTxHashAndScope.get(formattedScopeString)!.getValuesAsync(filter.txHash.toString()), + ) : filter.contractAddress - ? this.#notesByContractAndScope.get(formattedScopeString)!.getValues(filter.contractAddress.toString()) + ? await toArray( + this.#notesByContractAndScope + .get(formattedScopeString)! + .getValuesAsync(filter.contractAddress.toString()), + ) : filter.storageSlot - ? this.#notesByStorageSlotAndScope.get(formattedScopeString)!.getValues(filter.storageSlot.toString()) - : this.#notesByAddressPointAndScope.get(formattedScopeString)!.values(), + ? await toArray( + this.#notesByStorageSlotAndScope.get(formattedScopeString)!.getValuesAsync(filter.storageSlot.toString()), + ) + : await toArray(this.#notesByAddressPointAndScope.get(formattedScopeString)!.valuesAsync()), ); } candidateNoteSources.push({ - ids: new Set(activeNoteIdsPerScope.flatMap(iterableIterator => [...iterableIterator])), + ids: new Set(activeNoteIdsPerScope.flat()), notes: this.#notes, }); if (filter.status == NoteStatus.ACTIVE_OR_NULLIFIED) { candidateNoteSources.push({ ids: publicKey - ? this.#nullifiedNotesByAddressPoint.getValues(publicKey.toString()) + ? await toArray(this.#nullifiedNotesByAddressPoint.getValuesAsync(publicKey.toString())) : filter.txHash - ? this.#nullifiedNotesByTxHash.getValues(filter.txHash.toString()) + ? await toArray(this.#nullifiedNotesByTxHash.getValuesAsync(filter.txHash.toString())) : filter.contractAddress - ? this.#nullifiedNotesByContract.getValues(filter.contractAddress.toString()) + ? await toArray(this.#nullifiedNotesByContract.getValuesAsync(filter.contractAddress.toString())) : filter.storageSlot - ? this.#nullifiedNotesByStorageSlot.getValues(filter.storageSlot.toString()) - : this.#nullifiedNotes.keys(), + ? await toArray(this.#nullifiedNotesByStorageSlot.getValuesAsync(filter.storageSlot.toString())) + : await toArray(this.#nullifiedNotes.keysAsync()), notes: this.#nullifiedNotes, }); } @@ -378,7 +399,7 @@ export class KVPxeDatabase implements PxeDatabase { const result: IncomingNoteDao[] = []; for (const { ids, notes } of candidateNoteSources) { for (const id of ids) { - const serializedNote = notes.get(id); + const serializedNote = await notes.getAsync(id); if (!serializedNote) { continue; } @@ -408,32 +429,34 @@ export class KVPxeDatabase implements PxeDatabase { } } - return Promise.resolve(result); + return result; } - getOutgoingNotes(filter: OutgoingNotesFilter): Promise { + async getOutgoingNotes(filter: OutgoingNotesFilter): Promise { const ovpkM: PublicKey | undefined = filter.owner - ? this.#getCompleteAddress(filter.owner)?.publicKeys.masterOutgoingViewingPublicKey + ? (await this.#getCompleteAddress(filter.owner))?.publicKeys.masterOutgoingViewingPublicKey : undefined; // Check if ovpkM is truthy - const ids = ovpkM - ? this.#outgoingNotesByOvpkM.getValues(ovpkM.toString()) + const idsIterator = ovpkM + ? this.#outgoingNotesByOvpkM.getValuesAsync(ovpkM.toString()) : // If ovpkM is falsy, check if filter.txHash is truthy filter.txHash - ? this.#outgoingNotesByTxHash.getValues(filter.txHash.toString()) + ? this.#outgoingNotesByTxHash.getValuesAsync(filter.txHash.toString()) : // If both ovpkM and filter.txHash are falsy, check if filter.contractAddress is truthy filter.contractAddress - ? this.#outgoingNotesByContract.getValues(filter.contractAddress.toString()) + ? this.#outgoingNotesByContract.getValuesAsync(filter.contractAddress.toString()) : // If ovpkM, filter.txHash, and filter.contractAddress are all falsy, check if filter.storageSlot is truthy filter.storageSlot - ? this.#outgoingNotesByStorageSlot.getValues(filter.storageSlot.toString()) + ? this.#outgoingNotesByStorageSlot.getValuesAsync(filter.storageSlot.toString()) : // If none of the above conditions are met, retrieve all keys from this.#outgoingNotes - this.#outgoingNotes.keys(); + this.#outgoingNotes.keysAsync(); const notes: OutgoingNoteDao[] = []; + + const ids = await toArray(idsIterator); for (const id of ids) { - const serializedNote = this.#outgoingNotes.get(id); + const serializedNote = await this.#outgoingNotes.getAsync(id); if (!serializedNote) { continue; } @@ -458,7 +481,7 @@ export class KVPxeDatabase implements PxeDatabase { notes.push(note); } - return Promise.resolve(notes); + return notes; } removeNullifiedNotes(nullifiers: InBlock[], accountAddressPoint: PublicKey): Promise { @@ -466,23 +489,23 @@ export class KVPxeDatabase implements PxeDatabase { return Promise.resolve([]); } - return this.#db.transaction(() => { + return this.db.transactionAsync(async () => { const nullifiedNotes: IncomingNoteDao[] = []; for (const blockScopedNullifier of nullifiers) { const { data: nullifier, l2BlockNumber: blockNumber } = blockScopedNullifier; - const noteIndex = this.#nullifierToNoteId.get(nullifier.toString()); + const noteIndex = await this.#nullifierToNoteId.getAsync(nullifier.toString()); if (!noteIndex) { continue; } - const noteBuffer = noteIndex ? this.#notes.get(noteIndex) : undefined; + const noteBuffer = noteIndex ? await this.#notes.getAsync(noteIndex) : undefined; if (!noteBuffer) { // note doesn't exist. Maybe it got nullified already continue; } - const noteScopes = this.#notesToScope.getValues(noteIndex) ?? []; + const noteScopes = (await toArray(this.#notesToScope.getValuesAsync(noteIndex))) ?? []; const note = IncomingNoteDao.fromBuffer(noteBuffer); if (!note.addressPoint.equals(accountAddressPoint)) { // tried to nullify someone else's note @@ -491,32 +514,33 @@ export class KVPxeDatabase implements PxeDatabase { nullifiedNotes.push(note); - void this.#notes.delete(noteIndex); - void this.#notesToScope.delete(noteIndex); + await this.#notes.delete(noteIndex); + await this.#notesToScope.delete(noteIndex); - for (const scope of this.#scopes.entries()) { - void this.#notesByAddressPointAndScope.get(scope)!.deleteValue(accountAddressPoint.toString(), noteIndex); - void this.#notesByTxHashAndScope.get(scope)!.deleteValue(note.txHash.toString(), noteIndex); - void this.#notesByContractAndScope.get(scope)!.deleteValue(note.contractAddress.toString(), noteIndex); - void this.#notesByStorageSlotAndScope.get(scope)!.deleteValue(note.storageSlot.toString(), noteIndex); + const scopes = await toArray(this.#scopes.entriesAsync()); + + for (const scope of scopes) { + await this.#notesByAddressPointAndScope.get(scope)!.deleteValue(accountAddressPoint.toString(), noteIndex); + await this.#notesByTxHashAndScope.get(scope)!.deleteValue(note.txHash.toString(), noteIndex); + await this.#notesByContractAndScope.get(scope)!.deleteValue(note.contractAddress.toString(), noteIndex); + await this.#notesByStorageSlotAndScope.get(scope)!.deleteValue(note.storageSlot.toString(), noteIndex); } if (noteScopes !== undefined) { for (const scope of noteScopes) { - void this.#nullifiedNotesToScope.set(noteIndex, scope); + await this.#nullifiedNotesToScope.set(noteIndex, scope); } } - void this.#nullifiedNotes.set(noteIndex, note.toBuffer()); - void this.#nullifiersByBlockNumber.set(blockNumber, nullifier.toString()); - void this.#nullifiedNotesByContract.set(note.contractAddress.toString(), noteIndex); - void this.#nullifiedNotesByStorageSlot.set(note.storageSlot.toString(), noteIndex); - void this.#nullifiedNotesByTxHash.set(note.txHash.toString(), noteIndex); - void this.#nullifiedNotesByAddressPoint.set(note.addressPoint.toString(), noteIndex); - void this.#nullifiedNotesByNullifier.set(nullifier.toString(), noteIndex); - - void this.#nullifierToNoteId.delete(nullifier.toString()); + await this.#nullifiedNotes.set(noteIndex, note.toBuffer()); + await this.#nullifiersByBlockNumber.set(blockNumber, nullifier.toString()); + await this.#nullifiedNotesByContract.set(note.contractAddress.toString(), noteIndex); + await this.#nullifiedNotesByStorageSlot.set(note.storageSlot.toString(), noteIndex); + await this.#nullifiedNotesByTxHash.set(note.txHash.toString(), noteIndex); + await this.#nullifiedNotesByAddressPoint.set(note.addressPoint.toString(), noteIndex); + await this.#nullifiedNotesByNullifier.set(nullifier.toString(), noteIndex); + + await this.#nullifierToNoteId.delete(nullifier.toString()); } - return nullifiedNotes; }); } @@ -529,36 +553,34 @@ export class KVPxeDatabase implements PxeDatabase { await this.#nullifiedNotesByStorageSlot.set(note.storageSlot.toString(), noteIndex); await this.#nullifiedNotesByTxHash.set(note.txHash.toString(), noteIndex); await this.#nullifiedNotesByAddressPoint.set(note.addressPoint.toString(), noteIndex); - - return Promise.resolve(); } - async setHeader(header: Header): Promise { + async setHeader(header: BlockHeader): Promise { await this.#synchronizedBlock.set(header.toBuffer()); } - getBlockNumber(): number | undefined { - const headerBuffer = this.#synchronizedBlock.get(); + async getBlockNumber(): Promise { + const headerBuffer = await this.#synchronizedBlock.getAsync(); if (!headerBuffer) { return undefined; } - return Number(Header.fromBuffer(headerBuffer).globalVariables.blockNumber.toBigInt()); + return Number(BlockHeader.fromBuffer(headerBuffer).globalVariables.blockNumber.toBigInt()); } - getHeader(): Header { - const headerBuffer = this.#synchronizedBlock.get(); + async getBlockHeader(): Promise { + const headerBuffer = await this.#synchronizedBlock.getAsync(); if (!headerBuffer) { throw new Error(`Header not set`); } - return Header.fromBuffer(headerBuffer); + return BlockHeader.fromBuffer(headerBuffer); } async #addScope(scope: AztecAddress): Promise { const scopeString = scope.toString(); - if (this.#scopes.has(scopeString)) { + if (await this.#scopes.hasAsync(scopeString)) { return false; } @@ -571,23 +593,23 @@ export class KVPxeDatabase implements PxeDatabase { return true; } - async addCompleteAddress(completeAddress: CompleteAddress): Promise { - await this.#addScope(completeAddress.address); + addCompleteAddress(completeAddress: CompleteAddress): Promise { + return this.db.transactionAsync(async () => { + await this.#addScope(completeAddress.address); - return this.#db.transaction(() => { const addressString = completeAddress.address.toString(); const buffer = completeAddress.toBuffer(); - const existing = this.#completeAddressIndex.get(addressString); - if (typeof existing === 'undefined') { - const index = this.#completeAddresses.length; - void this.#completeAddresses.push(buffer); - void this.#completeAddressIndex.set(addressString, index); + const existing = await this.#completeAddressIndex.getAsync(addressString); + if (existing === undefined) { + const index = await this.#completeAddresses.lengthAsync(); + await this.#completeAddresses.push(buffer); + await this.#completeAddressIndex.set(addressString, index); return true; } else { - const existingBuffer = this.#completeAddresses.at(existing); + const existingBuffer = await this.#completeAddresses.atAsync(existing); - if (existingBuffer?.equals(buffer)) { + if (existingBuffer && Buffer.from(existingBuffer).equals(buffer)) { return false; } @@ -598,26 +620,26 @@ export class KVPxeDatabase implements PxeDatabase { }); } - #getCompleteAddress(address: AztecAddress): CompleteAddress | undefined { - const index = this.#completeAddressIndex.get(address.toString()); - if (typeof index === 'undefined') { + async #getCompleteAddress(address: AztecAddress): Promise { + const index = await this.#completeAddressIndex.getAsync(address.toString()); + if (index === undefined) { return undefined; } - const value = this.#completeAddresses.at(index); + const value = await this.#completeAddresses.atAsync(index); return value ? CompleteAddress.fromBuffer(value) : undefined; } getCompleteAddress(account: AztecAddress): Promise { - return Promise.resolve(this.#getCompleteAddress(account)); + return this.#getCompleteAddress(account); } - getCompleteAddresses(): Promise { - return Promise.resolve(Array.from(this.#completeAddresses).map(v => CompleteAddress.fromBuffer(v))); + async getCompleteAddresses(): Promise { + return (await toArray(this.#completeAddresses.valuesAsync())).map(v => CompleteAddress.fromBuffer(v)); } async addContactAddress(address: AztecAddress): Promise { - if (this.#addressBook.has(address.toString())) { + if (await this.#addressBook.hasAsync(address.toString())) { return false; } @@ -626,12 +648,12 @@ export class KVPxeDatabase implements PxeDatabase { return true; } - getContactAddresses(): AztecAddress[] { - return [...this.#addressBook.entries()].map(AztecAddress.fromString); + async getContactAddresses(): Promise { + return (await toArray(this.#addressBook.entriesAsync())).map(AztecAddress.fromString); } async removeContactAddress(address: AztecAddress): Promise { - if (!this.#addressBook.has(address.toString())) { + if (!this.#addressBook.hasAsync(address.toString())) { return false; } @@ -640,8 +662,8 @@ export class KVPxeDatabase implements PxeDatabase { return true; } - getSynchedBlockNumberForAccount(account: AztecAddress): number | undefined { - return this.#syncedBlockPerPublicKey.get(account.toString()); + getSynchedBlockNumberForAccount(account: AztecAddress): Promise { + return this.#syncedBlockPerPublicKey.getAsync(account.toString()); } setSynchedBlockNumberForAccount(account: AztecAddress, blockNumber: number): Promise { @@ -649,20 +671,14 @@ export class KVPxeDatabase implements PxeDatabase { } async estimateSize(): Promise { - const incomingNotesSize = Array.from(await this.getIncomingNotes({})).reduce( - (sum, note) => sum + note.getSize(), - 0, - ); - const outgoingNotesSize = Array.from(await this.getOutgoingNotes({})).reduce( - (sum, note) => sum + note.getSize(), - 0, - ); + const incomingNotesSize = (await this.getIncomingNotes({})).reduce((sum, note) => sum + note.getSize(), 0); + const outgoingNotesSize = (await this.getOutgoingNotes({})).reduce((sum, note) => sum + note.getSize(), 0); - const authWitsSize = Array.from(this.#authWitnesses.values()).reduce( + const authWitsSize = (await toArray(this.#authWitnesses.valuesAsync())).reduce( (sum, value) => sum + value.length * Fr.SIZE_IN_BYTES, 0, ); - const addressesSize = this.#completeAddresses.length * CompleteAddress.SIZE_IN_BYTES; + const addressesSize = (await this.#completeAddresses.lengthAsync()) * CompleteAddress.SIZE_IN_BYTES; const treeRootsSize = Object.keys(MerkleTreeId).length * Fr.SIZE_IN_BYTES; return incomingNotesSize + outgoingNotesSize + treeRootsSize + authWitsSize + addressesSize; @@ -676,12 +692,10 @@ export class KVPxeDatabase implements PxeDatabase { await this.#setTaggingSecretsIndexes(indexedSecrets, this.#taggingSecretIndexesForRecipients); } - #setTaggingSecretsIndexes(indexedSecrets: IndexedTaggingSecret[], storageMap: AztecMap) { - return this.db.transaction(() => { - indexedSecrets.forEach( - indexedSecret => void storageMap.set(indexedSecret.secret.toString(), indexedSecret.index), - ); - }); + async #setTaggingSecretsIndexes(indexedSecrets: IndexedTaggingSecret[], storageMap: AztecAsyncMap) { + await Promise.all( + indexedSecrets.map(indexedSecret => storageMap.set(indexedSecret.secret.toString(), indexedSecret.index)), + ); } async getTaggingSecretsIndexesAsRecipient(appTaggingSecrets: Fr[]) { @@ -692,18 +706,16 @@ export class KVPxeDatabase implements PxeDatabase { return await this.#getTaggingSecretsIndexes(appTaggingSecrets, this.#taggingSecretIndexesForSenders); } - #getTaggingSecretsIndexes(appTaggingSecrets: Fr[], storageMap: AztecMap): Promise { - return this.db.transaction(() => appTaggingSecrets.map(secret => storageMap.get(`${secret.toString()}`) ?? 0)); + #getTaggingSecretsIndexes(appTaggingSecrets: Fr[], storageMap: AztecAsyncMap): Promise { + return Promise.all(appTaggingSecrets.map(async secret => (await storageMap.getAsync(`${secret.toString()}`)) ?? 0)); } - async resetNoteSyncData(): Promise { - await this.db.transaction(() => { - for (const recipient of this.#taggingSecretIndexesForRecipients.keys()) { - void this.#taggingSecretIndexesForRecipients.delete(recipient); - } - for (const sender of this.#taggingSecretIndexesForSenders.keys()) { - void this.#taggingSecretIndexesForSenders.delete(sender); - } + resetNoteSyncData(): Promise { + return this.db.transactionAsync(async () => { + const recipients = await toArray(this.#taggingSecretIndexesForRecipients.keysAsync()); + await Promise.all(recipients.map(recipient => this.#taggingSecretIndexesForRecipients.delete(recipient))); + const senders = await toArray(this.#taggingSecretIndexesForSenders.keysAsync()); + await Promise.all(senders.map(sender => this.#taggingSecretIndexesForSenders.delete(sender))); }); } } diff --git a/yarn-project/pxe/src/database/pxe_database.ts b/yarn-project/pxe/src/database/pxe_database.ts index 8b884041bb97..211b83bd626c 100644 --- a/yarn-project/pxe/src/database/pxe_database.ts +++ b/yarn-project/pxe/src/database/pxe_database.ts @@ -1,8 +1,8 @@ import { type InBlock, type IncomingNotesFilter, type OutgoingNotesFilter } from '@aztec/circuit-types'; import { + type BlockHeader, type CompleteAddress, type ContractInstanceWithAddress, - type Header, type IndexedTaggingSecret, type PublicKey, } from '@aztec/circuits.js'; @@ -102,7 +102,7 @@ export interface PxeDatabase extends ContractArtifactDatabase, ContractInstanceD * Gets the most recently processed block number. * @returns The most recently processed block number or undefined if never synched. */ - getBlockNumber(): number | undefined; + getBlockNumber(): Promise; /** * Retrieve the stored Block Header from the database. @@ -115,7 +115,7 @@ export interface PxeDatabase extends ContractArtifactDatabase, ContractInstanceD * @returns The Block Header. * @throws If no block have been processed yet. */ - getHeader(): Header; + getBlockHeader(): Promise; /** * Set the latest Block Header. @@ -124,7 +124,7 @@ export interface PxeDatabase extends ContractArtifactDatabase, ContractInstanceD * @param header - An object containing the most recent block header. * @returns A Promise that resolves when the hash has been successfully updated in the database. */ - setHeader(header: Header): Promise; + setHeader(header: BlockHeader): Promise; /** * Adds contact address to the database. @@ -137,7 +137,7 @@ export interface PxeDatabase extends ContractArtifactDatabase, ContractInstanceD * Retrieves the list of contact addresses in the address book. * @returns An array of Aztec addresses. */ - getContactAddresses(): AztecAddress[]; + getContactAddresses(): Promise; /** * Removes a contact address from the database. @@ -179,7 +179,7 @@ export interface PxeDatabase extends ContractArtifactDatabase, ContractInstanceD * Get the synched block number for a given public key. * @param account - The account to get the synched block number for. */ - getSynchedBlockNumberForAccount(account: AztecAddress): number | undefined; + getSynchedBlockNumberForAccount(account: AztecAddress): Promise; /** * Returns the estimated size in bytes of this db. diff --git a/yarn-project/pxe/src/database/pxe_database_test_suite.ts b/yarn-project/pxe/src/database/pxe_database_test_suite.ts index 4fcf59993b12..3f683a8814e5 100644 --- a/yarn-project/pxe/src/database/pxe_database_test_suite.ts +++ b/yarn-project/pxe/src/database/pxe_database_test_suite.ts @@ -380,11 +380,11 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { const header = makeHeader(randomInt(1000), INITIAL_L2_BLOCK_NUM, 0 /** slot number */); await database.setHeader(header); - expect(database.getHeader()).toEqual(header); + await expect(database.getBlockHeader()).resolves.toEqual(header); }); - it('rejects getting header if no block set', () => { - expect(() => database.getHeader()).toThrow(); + it('rejects getting header if no block set', async () => { + await expect(() => database.getBlockHeader()).rejects.toThrow(); }); }); @@ -423,6 +423,16 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { expect(result).toEqual(expect.arrayContaining(addresses)); }); + it('returns a single address', async () => { + const addresses = Array.from({ length: 10 }).map(() => CompleteAddress.random()); + for (const address of addresses) { + await database.addCompleteAddress(address); + } + + const result = await database.getCompleteAddress(addresses[3].address); + expect(result).toEqual(addresses[3]); + }); + it("returns an empty array if it doesn't have addresses", async () => { expect(await database.getCompleteAddresses()).toEqual([]); }); diff --git a/yarn-project/pxe/src/index.ts b/yarn-project/pxe/src/index.ts index d6e46f5ad840..ae998668621e 100644 --- a/yarn-project/pxe/src/index.ts +++ b/yarn-project/pxe/src/index.ts @@ -10,5 +10,6 @@ export * from '@aztec/foundation/eth-address'; export * from '@aztec/foundation/aztec-address'; export * from '@aztec/key-store'; export * from './database/index.js'; +export * from './utils/index.js'; export { ContractDataOracle } from './contract_data_oracle/index.js'; export { PrivateFunctionsTree } from './contract_data_oracle/private_functions_tree.js'; diff --git a/yarn-project/pxe/src/kernel_prover/index.ts b/yarn-project/pxe/src/kernel_prover/index.ts index 94bc7f6892de..f37c2c857ca9 100644 --- a/yarn-project/pxe/src/kernel_prover/index.ts +++ b/yarn-project/pxe/src/kernel_prover/index.ts @@ -1,2 +1,4 @@ +export { TestPrivateKernelProver } from './test/test_circuit_prover.js'; + export * from './kernel_prover.js'; export * from './proving_data_oracle.js'; diff --git a/yarn-project/pxe/src/note_decryption_utils/add_public_values_to_payload.ts b/yarn-project/pxe/src/note_decryption_utils/add_public_values_to_payload.ts index 1d9c3806eea0..4d36c3a46e36 100644 --- a/yarn-project/pxe/src/note_decryption_utils/add_public_values_to_payload.ts +++ b/yarn-project/pxe/src/note_decryption_utils/add_public_values_to_payload.ts @@ -1,5 +1,5 @@ import { type L1NotePayload, Note } from '@aztec/circuit-types'; -import { ContractNotFoundError } from '@aztec/simulator'; +import { ContractNotFoundError } from '@aztec/simulator/client'; import { type PxeDatabase } from '../database/pxe_database.js'; diff --git a/yarn-project/pxe/src/note_decryption_utils/brute_force_note_info.ts b/yarn-project/pxe/src/note_decryption_utils/brute_force_note_info.ts index d24ef47c420f..7ce0ade4c4a1 100644 --- a/yarn-project/pxe/src/note_decryption_utils/brute_force_note_info.ts +++ b/yarn-project/pxe/src/note_decryption_utils/brute_force_note_info.ts @@ -3,7 +3,7 @@ import { type AztecAddress } from '@aztec/circuits.js'; import { computeNoteHashNonce, siloNullifier } from '@aztec/circuits.js/hash'; import { type NoteSelector } from '@aztec/foundation/abi'; import { Fr } from '@aztec/foundation/fields'; -import { type AcirSimulator } from '@aztec/simulator'; +import { type AcirSimulator } from '@aztec/simulator/client'; export interface NoteInfo { noteHashIndex: number; diff --git a/yarn-project/pxe/src/note_decryption_utils/produce_note_daos.ts b/yarn-project/pxe/src/note_decryption_utils/produce_note_daos.ts index fc3e1918ce1f..dafbad9afdde 100644 --- a/yarn-project/pxe/src/note_decryption_utils/produce_note_daos.ts +++ b/yarn-project/pxe/src/note_decryption_utils/produce_note_daos.ts @@ -1,7 +1,7 @@ import { type L1NotePayload, type PublicKey, type TxHash } from '@aztec/circuit-types'; import { type Fr } from '@aztec/foundation/fields'; import { type Logger } from '@aztec/foundation/log'; -import { type AcirSimulator } from '@aztec/simulator'; +import { type AcirSimulator } from '@aztec/simulator/client'; import { IncomingNoteDao } from '../database/incoming_note_dao.js'; import { OutgoingNoteDao } from '../database/outgoing_note_dao.js'; diff --git a/yarn-project/pxe/src/note_decryption_utils/produce_note_daos_for_key.ts b/yarn-project/pxe/src/note_decryption_utils/produce_note_daos_for_key.ts index eeeb6c9ee9e4..291d9efd80d9 100644 --- a/yarn-project/pxe/src/note_decryption_utils/produce_note_daos_for_key.ts +++ b/yarn-project/pxe/src/note_decryption_utils/produce_note_daos_for_key.ts @@ -1,7 +1,7 @@ import { type L1NotePayload, type Note, type TxHash } from '@aztec/circuit-types'; import { type Fr, type PublicKey } from '@aztec/circuits.js'; import { type Logger } from '@aztec/foundation/log'; -import { type AcirSimulator } from '@aztec/simulator'; +import { type AcirSimulator } from '@aztec/simulator/client'; import { type PxeDatabase } from '../database/pxe_database.js'; import { getOrderedNoteItems } from './add_public_values_to_payload.js'; diff --git a/yarn-project/pxe/src/pxe_service/create_pxe_service.ts b/yarn-project/pxe/src/pxe_service/create_pxe_service.ts index a3bdd43b105d..c92693951805 100644 --- a/yarn-project/pxe/src/pxe_service/create_pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/create_pxe_service.ts @@ -3,8 +3,8 @@ import { type AztecNode, type PrivateKernelProver } from '@aztec/circuit-types'; import { randomBytes } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; import { KeyStore } from '@aztec/key-store'; +import { createStore } from '@aztec/kv-store/lmdb'; import { L2TipsStore } from '@aztec/kv-store/stores'; -import { createStore } from '@aztec/kv-store/utils'; import { type PXEServiceConfig } from '../config/index.js'; import { KVPxeDatabase } from '../database/kv_pxe_database.js'; @@ -43,7 +43,7 @@ export async function createPXEService( const store = await createStore('pxe_data', configWithContracts, createDebugLogger('aztec:pxe:data:lmdb')); - const db = new KVPxeDatabase(store); + const db = await KVPxeDatabase.create(store); const tips = new L2TipsStore(store, 'pxe'); const prover = proofCreator ?? (await createProver(config, logSuffix)); diff --git a/yarn-project/pxe/src/pxe_service/error_enriching.ts b/yarn-project/pxe/src/pxe_service/error_enriching.ts index 938d391ada77..f9c26ba876ed 100644 --- a/yarn-project/pxe/src/pxe_service/error_enriching.ts +++ b/yarn-project/pxe/src/pxe_service/error_enriching.ts @@ -1,7 +1,7 @@ import { type SimulationError, isNoirCallStackUnresolved } from '@aztec/circuit-types'; import { AztecAddress, Fr, FunctionSelector, PUBLIC_DISPATCH_SELECTOR } from '@aztec/circuits.js'; import { type DebugLogger } from '@aztec/foundation/log'; -import { resolveAssertionMessageFromRevertData, resolveOpcodeLocations } from '@aztec/simulator'; +import { resolveAssertionMessageFromRevertData, resolveOpcodeLocations } from '@aztec/simulator/errors'; import { type ContractDataOracle, type PxeDatabase } from '../index.js'; diff --git a/yarn-project/pxe/src/pxe_service/index.ts b/yarn-project/pxe/src/pxe_service/index.ts index c9018d7ba8c8..66f9aae2adda 100644 --- a/yarn-project/pxe/src/pxe_service/index.ts +++ b/yarn-project/pxe/src/pxe_service/index.ts @@ -1,4 +1,3 @@ export * from './pxe_service.js'; -export * from './create_pxe_service.js'; export { enrichPublicSimulationError } from './error_enriching.js'; export { pxeTestSuite } from './test/pxe_test_suite.js'; diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index eadab26de2d7..691e7a95152b 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -65,11 +65,12 @@ import { getCanonicalProtocolContract, protocolContractNames, } from '@aztec/protocol-contracts'; -import { type AcirSimulator } from '@aztec/simulator'; +import { type AcirSimulator } from '@aztec/simulator/client'; import { inspect } from 'util'; -import { type PXEServiceConfig, getPackageInfo } from '../config/index.js'; +import { type PXEServiceConfig } from '../config/index.js'; +import { getPackageInfo } from '../config/package_info.js'; import { ContractDataOracle } from '../contract_data_oracle/index.js'; import { IncomingNoteDao } from '../database/incoming_note_dao.js'; import { type PxeDatabase } from '../database/index.js'; @@ -915,7 +916,7 @@ export class PXEService implements PXE { const vsks = await Promise.all( vpks.map(async vpk => { - const [keyPrefix, account] = this.keyStore.getKeyPrefixAndAccount(vpk); + const [keyPrefix, account] = await this.keyStore.getKeyPrefixAndAccount(vpk); let secretKey = await this.keyStore.getMasterSecretKey(vpk); if (keyPrefix === 'iv') { const registeredAccount = await this.getRegisteredAccount(account); diff --git a/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts b/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts index 4acbc87e2a4f..678f6c4bb761 100644 --- a/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts +++ b/yarn-project/pxe/src/pxe_service/test/pxe_service.test.ts @@ -3,8 +3,8 @@ import { INITIAL_L2_BLOCK_NUM } from '@aztec/circuits.js/constants'; import { type L1ContractAddresses } from '@aztec/ethereum'; import { EthAddress } from '@aztec/foundation/eth-address'; import { KeyStore } from '@aztec/key-store'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { L2TipsStore } from '@aztec/kv-store/stores'; -import { openTmpStore } from '@aztec/kv-store/utils'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -15,11 +15,11 @@ import { TestPrivateKernelProver } from '../../kernel_prover/test/test_circuit_p import { PXEService } from '../pxe_service.js'; import { pxeTestSuite } from './pxe_test_suite.js'; -function createPXEService(): Promise { +async function createPXEService(): Promise { const kvStore = openTmpStore(); const keyStore = new KeyStore(kvStore); const node = mock(); - const db = new KVPxeDatabase(kvStore); + const db = await KVPxeDatabase.create(kvStore); const tips = new L2TipsStore(kvStore, 'pxe'); const config: PXEServiceConfig = { l2BlockPollingIntervalMS: 100, @@ -39,6 +39,7 @@ function createPXEService(): Promise { inboxAddress: EthAddress.random(), outboxAddress: EthAddress.random(), feeJuiceAddress: EthAddress.random(), + stakingAssetAddress: EthAddress.random(), feeJuicePortalAddress: EthAddress.random(), governanceAddress: EthAddress.random(), coinIssuerAddress: EthAddress.random(), @@ -59,12 +60,12 @@ describe('PXEService', () => { let config: PXEServiceConfig; let tips: L2TipsStore; - beforeEach(() => { + beforeEach(async () => { const kvStore = openTmpStore(); keyStore = new KeyStore(kvStore); node = mock(); tips = new L2TipsStore(kvStore, 'pxe'); - db = new KVPxeDatabase(kvStore); + db = await KVPxeDatabase.create(kvStore); config = { l2BlockPollingIntervalMS: 100, l2StartingBlock: INITIAL_L2_BLOCK_NUM, diff --git a/yarn-project/pxe/src/simulator/index.ts b/yarn-project/pxe/src/simulator/index.ts index 0a4dc3abf736..8f41547a15e4 100644 --- a/yarn-project/pxe/src/simulator/index.ts +++ b/yarn-project/pxe/src/simulator/index.ts @@ -1,6 +1,6 @@ import { type AztecNode } from '@aztec/circuit-types'; import { type KeyStore } from '@aztec/key-store'; -import { AcirSimulator } from '@aztec/simulator'; +import { AcirSimulator } from '@aztec/simulator/client'; import { ContractDataOracle } from '../contract_data_oracle/index.js'; import { type PxeDatabase } from '../database/pxe_database.js'; diff --git a/yarn-project/pxe/src/simulator_oracle/index.ts b/yarn-project/pxe/src/simulator_oracle/index.ts index 4a18b6bf7581..366df568fd6f 100644 --- a/yarn-project/pxe/src/simulator_oracle/index.ts +++ b/yarn-project/pxe/src/simulator_oracle/index.ts @@ -14,11 +14,11 @@ import { } from '@aztec/circuit-types'; import { type AztecAddress, + type BlockHeader, type CompleteAddress, type ContractInstance, Fr, type FunctionSelector, - type Header, IndexedTaggingSecret, type KeyValidationRequest, type L1_TO_L2_MSG_TREE_HEIGHT, @@ -31,7 +31,8 @@ import { poseidon2Hash } from '@aztec/foundation/crypto'; import { tryJsonStringify } from '@aztec/foundation/json-rpc'; import { createDebugLogger } from '@aztec/foundation/log'; import { type KeyStore } from '@aztec/key-store'; -import { type AcirSimulator, type DBOracle, MessageLoadOracleInputs } from '@aztec/simulator'; +import { MessageLoadOracleInputs } from '@aztec/simulator/acvm'; +import { type AcirSimulator, type DBOracle } from '@aztec/simulator/client'; import { type ContractDataOracle } from '../contract_data_oracle/index.js'; import { type IncomingNoteDao } from '../database/incoming_note_dao.js'; @@ -229,10 +230,10 @@ export class SimulatorOracle implements DBOracle { * Retrieve the databases view of the Block Header object. * This structure is fed into the circuits simulator and is used to prove against certain historical roots. * - * @returns A Promise that resolves to a Header object. + * @returns A Promise that resolves to a BlockHeader object. */ - getHeader(): Promise
{ - return Promise.resolve(this.db.getHeader()); + getBlockHeader(): Promise { + return Promise.resolve(this.db.getBlockHeader()); } /** @@ -253,7 +254,7 @@ export class SimulatorOracle implements DBOracle { * finally the index specified tag. We will then query the node with this tag for each address in the address book. * @returns The full list of the users contact addresses. */ - public getContacts(): AztecAddress[] { + public getContacts(): Promise { return this.db.getContactAddresses(); } @@ -325,7 +326,7 @@ export class SimulatorOracle implements DBOracle { const recipientIvsk = await this.keyStore.getMasterIncomingViewingSecretKey(recipient); // We implicitly add all PXE accounts as contacts, this helps us decrypt tags on notes that we send to ourselves (recipient = us, sender = us) - const contacts = [...this.db.getContactAddresses(), ...(await this.keyStore.getAccounts())].filter( + const contacts = [...(await this.db.getContactAddresses()), ...(await this.keyStore.getAccounts())].filter( (address, index, self) => index === self.findIndex(otherAddress => otherAddress.equals(address)), ); const appTaggingSecrets = contacts.map(contact => { diff --git a/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts b/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts index a9804de5eecd..02620bbe16cc 100644 --- a/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts +++ b/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts @@ -25,8 +25,8 @@ import { } from '@aztec/circuits.js'; import { pedersenHash, poseidon2Hash } from '@aztec/foundation/crypto'; import { KeyStore } from '@aztec/key-store'; -import { openTmpStore } from '@aztec/kv-store/utils'; -import { type AcirSimulator } from '@aztec/simulator'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; +import { type AcirSimulator } from '@aztec/simulator/client'; import { jest } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -128,7 +128,7 @@ describe('Simulator oracle', () => { beforeEach(async () => { const db = openTmpStore(); aztecNode = mock(); - database = new KVPxeDatabase(db); + database = await KVPxeDatabase.create(db); contractDataOracle = new ContractDataOracle(database); jest.spyOn(contractDataOracle, 'getDebugContractName').mockImplementation(() => Promise.resolve('TestContract')); keyStore = new KeyStore(db); diff --git a/yarn-project/pxe/src/synchronizer/synchronizer.test.ts b/yarn-project/pxe/src/synchronizer/synchronizer.test.ts index e78c0dbb4abc..a5cedf499cde 100644 --- a/yarn-project/pxe/src/synchronizer/synchronizer.test.ts +++ b/yarn-project/pxe/src/synchronizer/synchronizer.test.ts @@ -1,6 +1,6 @@ import { type AztecNode, L2Block, type L2BlockStream } from '@aztec/circuit-types'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { L2TipsStore } from '@aztec/kv-store/stores'; -import { openTmpStore } from '@aztec/kv-store/utils'; import { jest } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; @@ -24,11 +24,11 @@ describe('Synchronizer', () => { } }; - beforeEach(() => { + beforeEach(async () => { const store = openTmpStore(); blockStream = mock(); aztecNode = mock(); - database = new KVPxeDatabase(store); + database = await KVPxeDatabase.create(store); tipsStore = new L2TipsStore(store, 'pxe'); synchronizer = new TestSynchronizer(aztecNode, database, tipsStore); }); @@ -37,7 +37,7 @@ describe('Synchronizer', () => { const block = L2Block.random(1, 4); await synchronizer.handleBlockStreamEvent({ type: 'blocks-added', blocks: [block] }); - const obtainedHeader = database.getHeader(); + const obtainedHeader = await database.getBlockHeader(); expect(obtainedHeader).toEqual(block.header); }); diff --git a/yarn-project/pxe/src/synchronizer/synchronizer.ts b/yarn-project/pxe/src/synchronizer/synchronizer.ts index d527a38b5357..ee8c22a06d29 100644 --- a/yarn-project/pxe/src/synchronizer/synchronizer.ts +++ b/yarn-project/pxe/src/synchronizer/synchronizer.ts @@ -79,8 +79,18 @@ export class Synchronizer implements L2BlockStreamEventHandler { } this.running = true; - // REFACTOR: We should know the header of the genesis block without having to request it from the node. - await this.db.setHeader(await this.node.getBlockHeader(0)); + let currentHeader; + + try { + currentHeader = this.db.getBlockHeader(); + } catch (e) { + this.log.debug('Header is not set, requesting from the node'); + } + if (!currentHeader) { + // REFACTOR: We should know the header of the genesis block without having to request it from the node. + const storedBlockNumber = this.db.getBlockNumber(); + await this.db.setHeader(await this.node.getBlockHeader(storedBlockNumber ?? 0)); + } await this.trigger(); this.log.info('Initial sync complete'); @@ -106,8 +116,8 @@ export class Synchronizer implements L2BlockStreamEventHandler { await this.blockStream.sync(); } - private getSynchedBlockNumber() { - return this.db.getBlockNumber() ?? this.initialSyncBlockNumber; + private async getSynchedBlockNumber() { + return (await this.db.getBlockNumber()) ?? this.initialSyncBlockNumber; } /** @@ -118,15 +128,15 @@ export class Synchronizer implements L2BlockStreamEventHandler { */ public async isGlobalStateSynchronized() { const latest = await this.node.getBlockNumber(); - return latest <= this.getSynchedBlockNumber(); + return latest <= (await this.getSynchedBlockNumber()); } /** * Returns the latest block that has been synchronized by the synchronizer and each account. * @returns The latest block synchronized for blocks, and the latest block synched for notes for each public key being tracked. */ - public getSyncStatus() { - const lastBlockNumber = this.getSynchedBlockNumber(); + public async getSyncStatus() { + const lastBlockNumber = await this.getSynchedBlockNumber(); return { blocks: lastBlockNumber, }; diff --git a/yarn-project/pxe/src/utils/index.ts b/yarn-project/pxe/src/utils/index.ts new file mode 100644 index 000000000000..f54e87e2cb26 --- /dev/null +++ b/yarn-project/pxe/src/utils/index.ts @@ -0,0 +1,67 @@ +import { BBNativePrivateKernelProver } from '@aztec/bb-prover'; +import { type AztecNode, type PrivateKernelProver } from '@aztec/circuit-types'; +import { randomBytes } from '@aztec/foundation/crypto'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { KeyStore } from '@aztec/key-store'; +import { createStore } from '@aztec/kv-store/lmdb'; +import { L2TipsStore } from '@aztec/kv-store/stores'; + +import { type PXEServiceConfig } from '../config/index.js'; +import { KVPxeDatabase } from '../database/kv_pxe_database.js'; +import { TestPrivateKernelProver } from '../kernel_prover/test/test_circuit_prover.js'; +import { PXEService } from '../pxe_service/pxe_service.js'; + +/** + * Create and start an PXEService instance with the given AztecNode. + * If no keyStore or database is provided, it will use KeyStore and MemoryDB as default values. + * Returns a Promise that resolves to the started PXEService instance. + * + * @param aztecNode - The AztecNode instance to be used by the server. + * @param config - The PXE Service Config to use + * @param options - (Optional) Optional information for creating an PXEService. + * @param proofCreator - An optional proof creator to use in place of any other configuration + * @returns A Promise that resolves to the started PXEService instance. + */ +export async function createPXEService( + aztecNode: AztecNode, + config: PXEServiceConfig, + useLogSuffix: string | boolean | undefined = undefined, + proofCreator?: PrivateKernelProver, +) { + const logSuffix = + typeof useLogSuffix === 'boolean' ? (useLogSuffix ? randomBytes(3).toString('hex') : undefined) : useLogSuffix; + + const l1Contracts = await aztecNode.getL1ContractAddresses(); + const configWithContracts = { + ...config, + l1Contracts, + } as PXEServiceConfig; + + const keyStore = new KeyStore( + await createStore('pxe_key_store', configWithContracts, createDebugLogger('aztec:pxe:keystore:lmdb')), + ); + + const store = await createStore('pxe_data', configWithContracts, createDebugLogger('aztec:pxe:data:lmdb')); + + const db = await KVPxeDatabase.create(store); + const tips = new L2TipsStore(store, 'pxe'); + + const prover = proofCreator ?? (await createProver(config, logSuffix)); + const server = new PXEService(keyStore, aztecNode, db, tips, prover, config, logSuffix); + await server.start(); + return server; +} + +function createProver(config: PXEServiceConfig, logSuffix?: string) { + if (!config.proverEnabled) { + return new TestPrivateKernelProver(); + } + + // (@PhilWindle) Temporary validation until WASM is implemented + if (!config.bbBinaryPath || !config.bbWorkingDirectory) { + throw new Error(`Prover must be configured with binary path and working directory`); + } + const bbConfig = config as Required> & PXEServiceConfig; + const log = createDebugLogger('aztec:pxe:bb-native-prover' + (logSuffix ? `:${logSuffix}` : '')); + return BBNativePrivateKernelProver.new({ bbSkipCleanup: false, ...bbConfig }, log); +} diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index d7e139d4dde1..204dd065b062 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -11,9 +11,9 @@ import { type L1PublishBlockStats, type L1PublishProofStats } from '@aztec/circu import { AGGREGATION_OBJECT_LENGTH, AZTEC_MAX_EPOCH_DURATION, + type BlockHeader, EthAddress, type FeeRecipient, - type Header, type Proof, type RootRollupPublicInputs, } from '@aztec/circuits.js'; @@ -208,6 +208,10 @@ export class L1Publisher { this.l1TxUtils = new L1TxUtils(this.publicClient, this.walletClient, this.log, config); } + get publisherAddress() { + return this.account.address; + } + protected createWalletClient( account: PrivateKeyAccount, chain: EthereumChain, @@ -354,7 +358,7 @@ export class L1Publisher { * */ public async validateBlockForSubmission( - header: Header, + header: BlockHeader, attestationData: { digest: Buffer; signatures: Signature[] } = { digest: Buffer.alloc(32), signatures: [], diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 325a2dd2d44a..d1bd7d3df1d4 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -13,10 +13,10 @@ import type { AllowedElement, Signature, WorldStateSynchronizerStatus } from '@a import { type L2BlockBuiltStats } from '@aztec/circuit-types/stats'; import { AppendOnlyTreeSnapshot, + BlockHeader, ContentCommitment, GENESIS_ARCHIVE_ROOT, type GlobalVariables, - Header, StateReference, } from '@aztec/circuits.js'; import { AztecAddress } from '@aztec/foundation/aztec-address'; @@ -289,7 +289,7 @@ export class Sequencer { this.log.debug(`Retrieved ${pendingTxs.length} txs from P2P pool`); // If I created a "partial" header here that should make our job much easier. - const proposalHeader = new Header( + const proposalHeader = new BlockHeader( new AppendOnlyTreeSnapshot(Fr.fromBuffer(chainTipArchive), 1), ContentCommitment.empty(), StateReference.empty(), @@ -343,7 +343,7 @@ export class Sequencer { } /** Whether to skip the check of min txs per block if more than maxSecondsBetweenBlocks has passed since the previous block. */ - private skipMinTxsPerBlockCheck(historicalHeader: Header | undefined): boolean { + private skipMinTxsPerBlockCheck(historicalHeader: BlockHeader | undefined): boolean { const lastBlockTime = historicalHeader?.globalVariables.timestamp.toNumber() || 0; const currentTime = Math.floor(Date.now() / 1000); const elapsed = currentTime - lastBlockTime; @@ -362,7 +362,9 @@ export class Sequencer { throw new Error(msg); } - this.log.verbose(`Can propose block ${proposalBlockNumber} at slot ${slot}`); + this.log.verbose(`Can propose block ${proposalBlockNumber} at slot ${slot}`, { + publisherAddress: this.publisher.publisherAddress, + }); return slot; } catch (err) { const msg = prettyLogViemErrorMsg(err); @@ -420,7 +422,7 @@ export class Sequencer { this.state = proposedState; } - shouldProposeBlock(historicalHeader: Header | undefined, args: ShouldProposeArgs): boolean { + shouldProposeBlock(historicalHeader: BlockHeader | undefined, args: ShouldProposeArgs): boolean { if (this.isFlushing) { this.log.verbose(`Flushing all pending txs in new block`); return true; @@ -499,7 +501,7 @@ export class Sequencer { private async buildBlock( validTxs: Tx[], newGlobalVariables: GlobalVariables, - historicalHeader?: Header, + historicalHeader?: BlockHeader, interrupt?: (processedTxs: ProcessedTx[]) => Promise, ) { this.log.debug('Requesting L1 to L2 messages from contract'); @@ -567,8 +569,8 @@ export class Sequencer { })) private async buildBlockAndAttemptToPublish( validTxs: Tx[], - proposalHeader: Header, - historicalHeader: Header | undefined, + proposalHeader: BlockHeader, + historicalHeader: BlockHeader | undefined, ): Promise { await this.publisher.validateBlockForSubmission(proposalHeader); @@ -691,6 +693,10 @@ export class Sequencer { this.log.info('Creating block proposal'); const proposal = await this.validatorClient.createBlockProposal(block.header, block.archive.root, txHashes); + if (!proposal) { + this.log.verbose(`Failed to create block proposal, skipping`); + return undefined; + } const slotNumber = block.header.globalVariables.slotNumber.toBigInt(); diff --git a/yarn-project/simulator/package.json b/yarn-project/simulator/package.json index 2832153c30a7..4902b9f74d41 100644 --- a/yarn-project/simulator/package.json +++ b/yarn-project/simulator/package.json @@ -4,6 +4,9 @@ "type": "module", "exports": { ".": "./dest/index.js", + "./client": "./dest/client/index.js", + "./acvm": "./dest/acvm/index.js", + "./errors": "./dest/common/errors.js", "./public/fixtures": "./dest/public/fixtures/index.js" }, "typedocOptions": { diff --git a/yarn-project/simulator/src/acvm/oracle/oracle.ts b/yarn-project/simulator/src/acvm/oracle/oracle.ts index cdf274883f41..99d5d5f29f2c 100644 --- a/yarn-project/simulator/src/acvm/oracle/oracle.ts +++ b/yarn-project/simulator/src/acvm/oracle/oracle.ts @@ -146,10 +146,10 @@ export class Oracle { return witness.toFields().map(toACVMField); } - async getHeader([blockNumber]: ACVMField[]): Promise { + async getBlockHeader([blockNumber]: ACVMField[]): Promise { const parsedBlockNumber = frToNumber(fromACVMField(blockNumber)); - const header = await this.typedOracle.getHeader(parsedBlockNumber); + const header = await this.typedOracle.getBlockHeader(parsedBlockNumber); if (!header) { throw new Error(`Block header not found for block ${parsedBlockNumber}.`); } diff --git a/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts b/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts index 197d235296a1..2505a0478b02 100644 --- a/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts +++ b/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts @@ -9,8 +9,8 @@ import { type UnencryptedL2Log, } from '@aztec/circuit-types'; import { + type BlockHeader, type ContractInstance, - type Header, type IndexedTaggingSecret, type KeyValidationRequest, type L1_TO_L2_MSG_TREE_HEIGHT, @@ -127,8 +127,8 @@ export abstract class TypedOracle { throw new OracleMethodNotAvailableError('getLowNullifierMembershipWitness'); } - getHeader(_blockNumber: number): Promise
{ - throw new OracleMethodNotAvailableError('getHeader'); + getBlockHeader(_blockNumber: number): Promise { + throw new OracleMethodNotAvailableError('getBlockHeader'); } getCompleteAddress(_account: AztecAddress): Promise { diff --git a/yarn-project/simulator/src/avm/avm_memory_types.ts b/yarn-project/simulator/src/avm/avm_memory_types.ts index 3acd31600830..974719f6a106 100644 --- a/yarn-project/simulator/src/avm/avm_memory_types.ts +++ b/yarn-project/simulator/src/avm/avm_memory_types.ts @@ -259,7 +259,7 @@ export class TaggedMemory implements TaggedMemoryInterface { public getAs(offset: number): T { assert(offset < TaggedMemory.MAX_MEMORY_SIZE); const word = this._mem[offset]; - TaggedMemory.log.debug(`get(${offset}) = ${word}`); + TaggedMemory.log.trace(`get(${offset}) = ${word}`); if (word === undefined) { TaggedMemory.log.debug(`WARNING: Memory at offset ${offset} is undefined!`); return new Field(0) as T; @@ -270,7 +270,7 @@ export class TaggedMemory implements TaggedMemoryInterface { public getSlice(offset: number, size: number): MemoryValue[] { assert(offset + size <= TaggedMemory.MAX_MEMORY_SIZE); const value = this._mem.slice(offset, offset + size); - TaggedMemory.log.debug(`getSlice(${offset}, ${size}) = ${value}`); + TaggedMemory.log.trace(`getSlice(${offset}, ${size}) = ${value}`); for (let i = 0; i < value.length; i++) { if (value[i] === undefined) { value[i] = new Field(0); @@ -293,7 +293,7 @@ export class TaggedMemory implements TaggedMemoryInterface { public set(offset: number, v: MemoryValue) { assert(offset < TaggedMemory.MAX_MEMORY_SIZE); this._mem[offset] = v; - TaggedMemory.log.debug(`set(${offset}, ${v})`); + TaggedMemory.log.trace(`set(${offset}, ${v})`); } public setSlice(offset: number, vs: MemoryValue[]) { @@ -303,7 +303,7 @@ export class TaggedMemory implements TaggedMemoryInterface { this._mem.length = offset + vs.length; } this._mem.splice(offset, vs.length, ...vs); - TaggedMemory.log.debug(`setSlice(${offset}, ${vs})`); + TaggedMemory.log.trace(`setSlice(${offset}, ${vs})`); } public getTag(offset: number): TypeTag { diff --git a/yarn-project/simulator/src/avm/avm_simulator.test.ts b/yarn-project/simulator/src/avm/avm_simulator.test.ts index 72889ea63c11..ef06934d7c54 100644 --- a/yarn-project/simulator/src/avm/avm_simulator.test.ts +++ b/yarn-project/simulator/src/avm/avm_simulator.test.ts @@ -1,9 +1,9 @@ import { MerkleTreeId, type MerkleTreeWriteOperations } from '@aztec/circuit-types'; import { + DEPLOYER_CONTRACT_ADDRESS, GasFees, GlobalVariables, PublicDataTreeLeafPreimage, - type PublicFunction, PublicKeys, SerializableContractInstance, } from '@aztec/circuits.js'; @@ -15,7 +15,7 @@ import { AztecAddress } from '@aztec/foundation/aztec-address'; import { keccak256, keccakf1600, pedersenCommit, pedersenHash, poseidon2Hash, sha256 } from '@aztec/foundation/crypto'; import { Fq, Fr, Point } from '@aztec/foundation/fields'; import { type Fieldable } from '@aztec/foundation/serialize'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { MerkleTrees } from '@aztec/world-state'; @@ -23,7 +23,8 @@ import { randomInt } from 'crypto'; import { mock } from 'jest-mock-extended'; import { PublicEnqueuedCallSideEffectTrace } from '../public/enqueued_call_side_effect_trace.js'; -import { type WorldStateDB } from '../public/public_db_sources.js'; +import { MockedAvmTestContractDataSource } from '../public/fixtures/index.js'; +import { WorldStateDB } from '../public/public_db_sources.js'; import { type PublicSideEffectTraceInterface } from '../public/side_effect_trace_interface.js'; import { type AvmContext } from './avm_context.js'; import { type AvmExecutionEnvironment } from './avm_execution_environment.js'; @@ -73,6 +74,14 @@ import { mockTraceFork, } from './test_utils.js'; +const siloAddress = (contractAddress: AztecAddress) => { + const contractAddressNullifier = siloNullifier( + AztecAddress.fromNumber(DEPLOYER_CONTRACT_ADDRESS), + contractAddress.toField(), + ); + return contractAddressNullifier; +}; + describe('AVM simulator: injected bytecode', () => { let calldata: Fr[]; let bytecode: Buffer; @@ -127,46 +136,29 @@ describe('AVM simulator: transpiled Noir contracts', () => { const globals = GlobalVariables.empty(); globals.timestamp = TIMESTAMP; - const bytecode = getAvmTestContractBytecode('public_dispatch'); - const fnSelector = getAvmTestContractFunctionSelector('public_dispatch'); - const publicFn: PublicFunction = { bytecode, selector: fnSelector }; - const contractClass = makeContractClassPublic(0, publicFn); - const contractInstance = makeContractInstanceFromClassId(contractClass.id); - - // The values here should match those in getContractInstance test case - const instanceGet = new SerializableContractInstance({ - version: 1, - salt: new Fr(0x123), - deployer: AztecAddress.fromNumber(0x456), - contractClassId: new Fr(0x789), - initializationHash: new Fr(0x101112), - publicKeys: new PublicKeys( - new Point(new Fr(0x131415), new Fr(0x161718), false), - new Point(new Fr(0x192021), new Fr(0x222324), false), - new Point(new Fr(0x252627), new Fr(0x282930), false), - new Point(new Fr(0x313233), new Fr(0x343536), false), - ), - }).withAddress(contractInstance.address); - const worldStateDB = mock(); - const tmp = openTmpStore(); - const telemetryClient = new NoopTelemetryClient(); - const merkleTree = await (await MerkleTrees.new(tmp, telemetryClient)).fork(); - worldStateDB.getMerkleInterface.mockReturnValue(merkleTree); - - worldStateDB.getContractInstance - .mockResolvedValueOnce(contractInstance) - .mockResolvedValueOnce(instanceGet) // test gets deployer - .mockResolvedValueOnce(instanceGet) // test gets class id - .mockResolvedValueOnce(instanceGet) // test gets init hash - .mockResolvedValue(contractInstance); - worldStateDB.getContractClass.mockResolvedValue(contractClass); - - const storageValue = new Fr(5); - mockStorageRead(worldStateDB, storageValue); + const telemetry = new NoopTelemetryClient(); + const merkleTrees = await (await MerkleTrees.new(openTmpStore(), telemetry)).fork(); + const contractDataSource = new MockedAvmTestContractDataSource(); + const worldStateDB = new WorldStateDB(merkleTrees, contractDataSource); + + const contractInstance = contractDataSource.contractInstance; + const contractAddressNullifier = siloNullifier( + AztecAddress.fromNumber(DEPLOYER_CONTRACT_ADDRESS), + contractInstance.address.toField(), + ); + await merkleTrees.batchInsert(MerkleTreeId.NULLIFIER_TREE, [contractAddressNullifier.toBuffer()], 0); + // other contract address used by the bulk test's GETCONTRACTINSTANCE test + const otherContractAddressNullifier = siloNullifier( + AztecAddress.fromNumber(DEPLOYER_CONTRACT_ADDRESS), + contractDataSource.otherContractInstance.address.toField(), + ); + await merkleTrees.batchInsert(MerkleTreeId.NULLIFIER_TREE, [otherContractAddressNullifier.toBuffer()], 0); const trace = mock(); - const merkleTrees = await AvmEphemeralForest.create(worldStateDB.getMerkleInterface()); - const persistableState = initPersistableStateManager({ worldStateDB, trace, merkleTrees }); + const nestedTrace = mock(); + mockTraceFork(trace, nestedTrace); + const ephemeralTrees = await AvmEphemeralForest.create(worldStateDB.getMerkleInterface()); + const persistableState = initPersistableStateManager({ worldStateDB, trace, merkleTrees: ephemeralTrees }); const environment = initExecutionEnvironment({ functionSelector, calldata, @@ -176,10 +168,6 @@ describe('AVM simulator: transpiled Noir contracts', () => { }); const context = initContext({ env: environment, persistableState }); - const nestedTrace = mock(); - mockTraceFork(trace, nestedTrace); - mockGetBytecode(worldStateDB, bytecode); - // First we simulate (though it's not needed in this simple case). const simulator = new AvmSimulator(context); const results = await simulator.execute(); @@ -234,18 +222,6 @@ describe('AVM simulator: transpiled Noir contracts', () => { expect(results.output).toEqual([computeVarArgsHash(calldata)]); }); - it('functionSelector getter via dispatch', async () => { - const selector = FunctionSelector.fromSignature('get_function_selector()').toField(); - const dispatchCalldata = [selector]; - - const context = initContext({ env: initExecutionEnvironment({ calldata: dispatchCalldata }) }); - const bytecode = getAvmTestContractBytecode('public_dispatch'); - const results = await new AvmSimulator(context).executeBytecode(bytecode); - - expect(results.reverted).toBe(false); - expect(results.output).toEqual([selector]); - }); - it('modulo and u1', async () => { const calldata: Fr[] = [new Fr(2)]; const context = initContext({ env: initExecutionEnvironment({ calldata }) }); @@ -591,7 +567,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { const bytecode = getAvmTestContractBytecode('nullifier_exists'); if (exists) { - mockNullifierExists(worldStateDB, leafIndex, value0); + mockNullifierExists(worldStateDB, leafIndex, siloedNullifier0); } const results = await new AvmSimulator(context).executeBytecode(bytecode); @@ -883,7 +859,14 @@ describe('AVM simulator: transpiled Noir contracts', () => { new Point(new Fr(0x313233), new Fr(0x343536), false), ), }); - mockGetContractInstance(worldStateDB, contractInstance.withAddress(address)); + const contractInstanceWithAddress = contractInstance.withAddress(address); + // mock once per enum value (deployer, classId, initializationHash) + mockGetContractInstance(worldStateDB, contractInstanceWithAddress); + mockGetContractInstance(worldStateDB, contractInstanceWithAddress); + mockGetContractInstance(worldStateDB, contractInstanceWithAddress); + mockNullifierExists(worldStateDB, siloAddress(contractInstanceWithAddress.address)); + mockNullifierExists(worldStateDB, siloAddress(contractInstanceWithAddress.address)); + mockNullifierExists(worldStateDB, siloAddress(contractInstanceWithAddress.address)); const bytecode = getAvmTestContractBytecode('test_get_contract_instance'); @@ -952,6 +935,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { mockGetContractClass(worldStateDB, contractClass); const contractInstance = makeContractInstanceFromClassId(contractClass.id); mockGetContractInstance(worldStateDB, contractInstance); + mockNullifierExists(worldStateDB, siloAddress(contractInstance.address)); const nestedTrace = mock(); mockTraceFork(trace, nestedTrace); @@ -977,6 +961,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { mockGetContractClass(worldStateDB, contractClass); const contractInstance = makeContractInstanceFromClassId(contractClass.id); mockGetContractInstance(worldStateDB, contractInstance); + mockNullifierExists(worldStateDB, siloAddress(contractInstance.address)); const nestedTrace = mock(); mockTraceFork(trace, nestedTrace); @@ -1005,6 +990,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { mockGetContractClass(worldStateDB, contractClass); const contractInstance = makeContractInstanceFromClassId(contractClass.id); mockGetContractInstance(worldStateDB, contractInstance); + mockNullifierExists(worldStateDB, siloAddress(contractInstance.address)); mockTraceFork(trace); @@ -1029,6 +1015,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { mockGetContractClass(worldStateDB, contractClass); const contractInstance = makeContractInstanceFromClassId(contractClass.id); mockGetContractInstance(worldStateDB, contractInstance); + mockNullifierExists(worldStateDB, siloAddress(contractInstance.address)); const nestedTrace = mock(); mockTraceFork(trace, nestedTrace); @@ -1060,6 +1047,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { mockGetContractClass(worldStateDB, contractClass); const contractInstance = makeContractInstanceFromClassId(contractClass.id); mockGetContractInstance(worldStateDB, contractInstance); + mockNullifierExists(worldStateDB, siloAddress(contractInstance.address)); mockTraceFork(trace); @@ -1084,6 +1072,7 @@ describe('AVM simulator: transpiled Noir contracts', () => { mockGetContractClass(worldStateDB, contractClass); const contractInstance = makeContractInstanceFromClassId(contractClass.id); mockGetContractInstance(worldStateDB, contractInstance); + mockNullifierExists(worldStateDB, siloAddress(contractInstance.address)); mockTraceFork(trace); diff --git a/yarn-project/simulator/src/avm/avm_simulator.ts b/yarn-project/simulator/src/avm/avm_simulator.ts index 643fae72da0c..480d668959f9 100644 --- a/yarn-project/simulator/src/avm/avm_simulator.ts +++ b/yarn-project/simulator/src/avm/avm_simulator.ts @@ -55,7 +55,8 @@ export class AvmSimulator { `Cannot allocate more than ${MAX_L2_GAS_PER_ENQUEUED_CALL} to the AVM for execution of an enqueued call`, ); this.log = createDebugLogger(`aztec:avm_simulator:core(f:${context.environment.functionSelector.toString()})`); - if (process.env.LOG_LEVEL === 'debug') { + // TODO(palla/log): Should tallies be printed on debug, or only on trace? + if (this.log.isLevelEnabled('debug')) { this.tallyPrintFunction = this.printOpcodeTallies; this.tallyInstructionFunction = this.tallyInstruction; } @@ -144,7 +145,7 @@ export class AvmSimulator { const instrStartGas = machineState.gasLeft; // Save gas before executing instruction (for profiling) const instrPc = machineState.pc; // Save PC before executing instruction (for profiling) - this.log.debug( + this.log.trace( `[PC:${machineState.pc}] [IC:${instrCounter++}] ${instruction.toString()} (gasLeft l2=${ machineState.l2GasLeft } da=${machineState.daGasLeft})`, @@ -185,7 +186,7 @@ export class AvmSimulator { } catch (err: any) { this.log.verbose('Exceptional halt (revert by something other than REVERT opcode)'); if (!(err instanceof AvmExecutionError || err instanceof SideEffectLimitReachedError)) { - this.log.verbose(`Unknown error thrown by AVM: ${err}`); + this.log.error(`Unknown error thrown by AVM: ${err}`); throw err; } diff --git a/yarn-project/simulator/src/avm/avm_tree.test.ts b/yarn-project/simulator/src/avm/avm_tree.test.ts index b30ef226cbb4..ead35b02af24 100644 --- a/yarn-project/simulator/src/avm/avm_tree.test.ts +++ b/yarn-project/simulator/src/avm/avm_tree.test.ts @@ -16,7 +16,7 @@ import { import { poseidon2Hash } from '@aztec/foundation/crypto'; import { Fr } from '@aztec/foundation/fields'; import { type IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { MerkleTrees, NativeWorldStateService } from '@aztec/world-state'; diff --git a/yarn-project/simulator/src/avm/avm_tree.ts b/yarn-project/simulator/src/avm/avm_tree.ts index f9cc70f745e6..5ac80dd87fd2 100644 --- a/yarn-project/simulator/src/avm/avm_tree.ts +++ b/yarn-project/simulator/src/avm/avm_tree.ts @@ -680,7 +680,12 @@ export class EphemeralAvmTree { for (let i = 0; i < siblingPath.length; i++) { // Flip(XOR) the last bit because we are inserting siblings of the leaf const sibIndex = index ^ 1n; - this.updateLeaf(siblingPath[i], sibIndex, this.depth - i); + const node = this.getNode(sibIndex, this.depth - i); + // If we are inserting a sibling path and we already have a branch at that index in our + // ephemeral tree, we should not overwrite it + if (node === undefined || node.tag === TreeType.LEAF) { + this.updateLeaf(siblingPath[i], sibIndex, this.depth - i); + } index >>= 1n; } } diff --git a/yarn-project/simulator/src/avm/journal/journal.test.ts b/yarn-project/simulator/src/avm/journal/journal.test.ts index 2665b1aec57c..dd50228ab55b 100644 --- a/yarn-project/simulator/src/avm/journal/journal.test.ts +++ b/yarn-project/simulator/src/avm/journal/journal.test.ts @@ -1,5 +1,6 @@ -import { AztecAddress, SerializableContractInstance } from '@aztec/circuits.js'; +import { AztecAddress, SerializableContractInstance, computePublicBytecodeCommitment } from '@aztec/circuits.js'; import { siloNullifier } from '@aztec/circuits.js/hash'; +import { makeContractClassPublic } from '@aztec/circuits.js/testing'; import { Fr } from '@aztec/foundation/fields'; import { mock } from 'jest-mock-extended'; @@ -8,6 +9,8 @@ import { type WorldStateDB } from '../../public/public_db_sources.js'; import { type PublicSideEffectTraceInterface } from '../../public/side_effect_trace_interface.js'; import { initPersistableStateManager } from '../fixtures/index.js'; import { + mockGetBytecode, + mockGetContractClass, mockGetContractInstance, mockL1ToL2MessageExists, mockNoteHashExists, @@ -132,7 +135,7 @@ describe('journal', () => { describe('Getting contract instances', () => { it('Should get contract instance', async () => { const contractInstance = SerializableContractInstance.default(); - mockGetContractInstance(worldStateDB, contractInstance.withAddress(address)); + mockNullifierExists(worldStateDB, leafIndex, utxo); mockGetContractInstance(worldStateDB, contractInstance.withAddress(address)); await persistableState.getContractInstance(address); expect(trace.traceGetContractInstance).toHaveBeenCalledTimes(1); @@ -145,6 +148,41 @@ describe('journal', () => { }); }); + describe('Getting bytecode', () => { + it('Should get bytecode', async () => { + const bytecode = Buffer.from('0xdeadbeef'); + const bytecodeCommitment = computePublicBytecodeCommitment(bytecode); + const contractInstance = SerializableContractInstance.default(); + const contractClass = makeContractClassPublic(); + + mockNullifierExists(worldStateDB, leafIndex, utxo); + mockGetContractInstance(worldStateDB, contractInstance.withAddress(address)); + mockGetContractClass(worldStateDB, contractClass); + mockGetBytecode(worldStateDB, bytecode); + + const expectedContractClassPreimage = { + artifactHash: contractClass.artifactHash, + privateFunctionsRoot: contractClass.privateFunctionsRoot, + publicBytecodeCommitment: bytecodeCommitment, + }; + + await persistableState.getBytecode(address); + expect(trace.traceGetBytecode).toHaveBeenCalledTimes(1); + expect(trace.traceGetBytecode).toHaveBeenCalledWith( + address, + /*exists=*/ true, + contractClass.packedBytecode, + contractInstance, + expectedContractClassPreimage, + ); + }); + it('Can get undefined contract instance', async () => { + await persistableState.getBytecode(address); + expect(trace.traceGetBytecode).toHaveBeenCalledTimes(1); + expect(trace.traceGetBytecode).toHaveBeenCalledWith(address, /*exists=*/ false); + }); + }); + //it('Should merge two successful journals together', async () => { // // Fundamentally checking that insert ordering of public storage is preserved upon journal merge // // time | journal | op | value diff --git a/yarn-project/simulator/src/avm/journal/journal.ts b/yarn-project/simulator/src/avm/journal/journal.ts index 63dbf59f09e8..7d27597a30ee 100644 --- a/yarn-project/simulator/src/avm/journal/journal.ts +++ b/yarn-project/simulator/src/avm/journal/journal.ts @@ -1,10 +1,16 @@ import { MerkleTreeId } from '@aztec/circuit-types'; import { - type AztecAddress, + AztecAddress, + CANONICAL_AUTH_REGISTRY_ADDRESS, + DEPLOYER_CONTRACT_ADDRESS, + FEE_JUICE_ADDRESS, type Gas, - type NullifierLeafPreimage, + MULTI_CALL_ENTRYPOINT_ADDRESS, + NullifierLeafPreimage, type PublicCallRequest, type PublicDataTreeLeafPreimage, + REGISTERER_CONTRACT_ADDRESS, + ROUTER_ADDRESS, SerializableContractInstance, } from '@aztec/circuits.js'; import { computePublicDataTreeLeafSlot, siloNoteHash, siloNullifier } from '@aztec/circuits.js/hash'; @@ -155,7 +161,7 @@ export class AvmPersistableStateManager { const leafSlot = computePublicDataTreeLeafSlot(contractAddress, slot); if (this.doMerkleOperations) { const result = await this.merkleTrees.writePublicStorage(leafSlot, value); - assert(result !== undefined, 'Public data tree insertion error. You might want to disable skipMerkleOperations.'); + assert(result !== undefined, 'Public data tree insertion error. You might want to disable doMerkleOperations.'); this.log.debug(`Inserted public data tree leaf at leafSlot ${leafSlot}, value: ${value}`); const lowLeafInfo = result.lowWitness; @@ -303,8 +309,47 @@ export class AvmPersistableStateManager { public async checkNullifierExists(contractAddress: AztecAddress, nullifier: Fr): Promise { this.log.debug(`Checking existence of nullifier (address=${contractAddress}, nullifier=${nullifier})`); const siloedNullifier = siloNullifier(contractAddress, nullifier); + const [exists, leafOrLowLeafPreimage, leafOrLowLeafIndex, leafOrLowLeafPath] = await this.getNullifierMembership( + siloedNullifier, + ); + + if (this.doMerkleOperations) { + this.trace.traceNullifierCheck( + siloedNullifier, + exists, + leafOrLowLeafPreimage, + leafOrLowLeafIndex, + leafOrLowLeafPath, + ); + } else { + this.trace.traceNullifierCheck(siloedNullifier, exists); + } + return Promise.resolve(exists); + } + + /** + * Helper to get membership information for a siloed nullifier when checking its existence. + * Optionally trace the nullifier check. + * + * @param siloedNullifier - the siloed nullifier to get membership information for + * @returns + * - exists - whether the nullifier exists in the nullifier set + * - leafOrLowLeafPreimage - the preimage of the nullifier leaf or its low-leaf if it doesn't exist + * - leafOrLowLeafIndex - the leaf index of the nullifier leaf or its low-leaf if it doesn't exist + * - leafOrLowLeafPath - the sibling path of the nullifier leaf or its low-leaf if it doesn't exist + */ + private async getNullifierMembership( + siloedNullifier: Fr, + ): Promise< + [ + /*exists=*/ boolean, + /*leafOrLowLeafPreimage=*/ NullifierLeafPreimage, + /*leafOrLowLeafIndex=*/ Fr, + /*leafOrLowLeafIndexPath=*/ Fr[], + ] + > { const [exists, isPending, _] = await this.nullifiers.checkExists(siloedNullifier); - this.log.debug(`Checked siloed nullifier ${siloedNullifier} (exists=${exists}, pending=${isPending})`); + this.log.debug(`Checked siloed nullifier ${siloedNullifier} (exists=${exists}), pending=${isPending}`); if (this.doMerkleOperations) { // Get leaf if present, low leaf if absent @@ -319,7 +364,7 @@ export class AvmPersistableStateManager { assert( alreadyPresent == exists, - 'WorldStateDB contains nullifier leaf, but merkle tree does not.... This is a bug!', + 'WorldStateDB contains nullifier leaf, but merkle tree does not (or vice versa).... This is a bug!', ); if (exists) { @@ -332,12 +377,10 @@ export class AvmPersistableStateManager { 'Nullifier tree low leaf should skip the target leaf nullifier when the target leaf does not exist.', ); } - - this.trace.traceNullifierCheck(siloedNullifier, exists, leafPreimage, new Fr(leafIndex), leafPath); + return [exists, leafPreimage, new Fr(leafIndex), leafPath]; } else { - this.trace.traceNullifierCheck(siloedNullifier, exists); + return [exists, NullifierLeafPreimage.empty(), Fr.ZERO, []]; } - return Promise.resolve(exists); } /** @@ -386,6 +429,11 @@ export class AvmPersistableStateManager { // Cache pending nullifiers for later access await this.nullifiers.append(siloedNullifier); // We append the new nullifier + this.log.debug( + `Nullifier tree root before insertion ${this.merkleTrees.treeMap + .get(MerkleTreeId.NULLIFIER_TREE)! + .getRoot()}`, + ); const appendResult = await this.merkleTrees.appendNullifier(siloedNullifier); this.log.debug( `Nullifier tree root after insertion ${this.merkleTrees.treeMap.get(MerkleTreeId.NULLIFIER_TREE)!.getRoot()}`, @@ -479,18 +527,59 @@ export class AvmPersistableStateManager { const instanceWithAddress = await this.worldStateDB.getContractInstance(contractAddress); const exists = instanceWithAddress !== undefined; - // TODO: nullifier check! + let [existsInTree, leafOrLowLeafPreimage, leafOrLowLeafIndex, leafOrLowLeafPath] = [ + exists, + NullifierLeafPreimage.empty(), + Fr.ZERO, + new Array(), + ]; + if (!contractAddressIsCanonical(contractAddress)) { + const contractAddressNullifier = siloNullifier( + AztecAddress.fromNumber(DEPLOYER_CONTRACT_ADDRESS), + contractAddress.toField(), + ); + [existsInTree, leafOrLowLeafPreimage, leafOrLowLeafIndex, leafOrLowLeafPath] = await this.getNullifierMembership( + /*siloedNullifier=*/ contractAddressNullifier, + ); + assert( + exists == existsInTree, + 'WorldStateDB contains contract instance, but nullifier tree does not contain contract address (or vice versa).... This is a bug!', + ); + } + if (exists) { const instance = new SerializableContractInstance(instanceWithAddress); this.log.debug( `Got contract instance (address=${contractAddress}): exists=${exists}, instance=${jsonStringify(instance)}`, ); - this.trace.traceGetContractInstance(contractAddress, exists, instance); + if (this.doMerkleOperations) { + this.trace.traceGetContractInstance( + contractAddress, + exists, + instance, + leafOrLowLeafPreimage, + leafOrLowLeafIndex, + leafOrLowLeafPath, + ); + } else { + this.trace.traceGetContractInstance(contractAddress, exists, instance); + } return Promise.resolve(instance); } else { this.log.debug(`Contract instance NOT FOUND (address=${contractAddress})`); - this.trace.traceGetContractInstance(contractAddress, exists); + if (this.doMerkleOperations) { + this.trace.traceGetContractInstance( + contractAddress, + exists, + /*instance=*/ undefined, + leafOrLowLeafPreimage, + leafOrLowLeafIndex, + leafOrLowLeafPath, + ); + } else { + this.trace.traceGetContractInstance(contractAddress, exists); + } return Promise.resolve(undefined); } } @@ -503,6 +592,26 @@ export class AvmPersistableStateManager { const instanceWithAddress = await this.worldStateDB.getContractInstance(contractAddress); const exists = instanceWithAddress !== undefined; + let [existsInTree, leafOrLowLeafPreimage, leafOrLowLeafIndex, leafOrLowLeafPath] = [ + exists, + NullifierLeafPreimage.empty(), + Fr.ZERO, + new Array(), + ]; + if (!contractAddressIsCanonical(contractAddress)) { + const contractAddressNullifier = siloNullifier( + AztecAddress.fromNumber(DEPLOYER_CONTRACT_ADDRESS), + contractAddress.toField(), + ); + [existsInTree, leafOrLowLeafPreimage, leafOrLowLeafIndex, leafOrLowLeafPath] = await this.getNullifierMembership( + /*siloedNullifier=*/ contractAddressNullifier, + ); + assert( + exists == existsInTree, + 'WorldStateDB contains contract instance, but nullifier tree does not contain contract address (or vice versa).... This is a bug!', + ); + } + if (exists) { const instance = new SerializableContractInstance(instanceWithAddress); const contractClass = await this.worldStateDB.getContractClass(instance.contractClassId); @@ -524,20 +633,46 @@ export class AvmPersistableStateManager { publicBytecodeCommitment: bytecodeCommitment, }; - this.trace.traceGetBytecode( - contractAddress, - exists, - contractClass.packedBytecode, - instance, - contractClassPreimage, - ); + if (this.doMerkleOperations) { + this.trace.traceGetBytecode( + contractAddress, + exists, + contractClass.packedBytecode, + instance, + contractClassPreimage, + leafOrLowLeafPreimage, + leafOrLowLeafIndex, + leafOrLowLeafPath, + ); + } else { + this.trace.traceGetBytecode( + contractAddress, + exists, + contractClass.packedBytecode, + instance, + contractClassPreimage, + ); + } return contractClass.packedBytecode; } else { // If the contract instance is not found, we assume it has not been deployed. // It doesnt matter what the values of the contract instance are in this case, as long as we tag it with exists=false. // This will hint to the avm circuit to just perform the non-membership check on the address and disregard the bytecode hash - this.trace.traceGetBytecode(contractAddress, exists); // bytecode, instance, class undefined + if (this.doMerkleOperations) { + this.trace.traceGetBytecode( + contractAddress, + exists, + /*instance=*/ undefined, + /*contractClass=*/ undefined, + /*bytecode=*/ undefined, + leafOrLowLeafPreimage, + leafOrLowLeafIndex, + leafOrLowLeafPath, + ); + } else { + this.trace.traceGetBytecode(contractAddress, exists); // bytecode, instance, class undefined + } return undefined; } } @@ -572,3 +707,14 @@ export class AvmPersistableStateManager { this.trace.traceEnqueuedCall(publicCallRequest, calldata, reverted); } } + +function contractAddressIsCanonical(contractAddress: AztecAddress): boolean { + return ( + contractAddress.equals(AztecAddress.fromNumber(CANONICAL_AUTH_REGISTRY_ADDRESS)) || + contractAddress.equals(AztecAddress.fromNumber(DEPLOYER_CONTRACT_ADDRESS)) || + contractAddress.equals(AztecAddress.fromNumber(REGISTERER_CONTRACT_ADDRESS)) || + contractAddress.equals(AztecAddress.fromNumber(MULTI_CALL_ENTRYPOINT_ADDRESS)) || + contractAddress.equals(AztecAddress.fromNumber(FEE_JUICE_ADDRESS)) || + contractAddress.equals(AztecAddress.fromNumber(ROUTER_ADDRESS)) + ); +} diff --git a/yarn-project/simulator/src/avm/opcodes/contract.test.ts b/yarn-project/simulator/src/avm/opcodes/contract.test.ts index 236d49f4d7df..c1703f70b83e 100644 --- a/yarn-project/simulator/src/avm/opcodes/contract.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/contract.test.ts @@ -8,7 +8,7 @@ import { type AvmContext } from '../avm_context.js'; import { Field, TypeTag, Uint1 } from '../avm_memory_types.js'; import { initContext, initPersistableStateManager } from '../fixtures/index.js'; import { type AvmPersistableStateManager } from '../journal/journal.js'; -import { mockGetContractInstance } from '../test_utils.js'; +import { mockGetContractInstance, mockNullifierExists } from '../test_utils.js'; import { ContractInstanceMember, GetContractInstance } from './contract.js'; describe('Contract opcodes', () => { @@ -59,6 +59,7 @@ describe('Contract opcodes', () => { ])('GETCONTRACTINSTANCE member instruction ', (memberEnum: ContractInstanceMember, value: Fr) => { it(`Should read '${ContractInstanceMember[memberEnum]}' correctly`, async () => { mockGetContractInstance(worldStateDB, contractInstance.withAddress(address)); + mockNullifierExists(worldStateDB, address.toField()); context.machineState.memory.set(0, new Field(address.toField())); await new GetContractInstance( diff --git a/yarn-project/simulator/src/avm/opcodes/environment_getters.test.ts b/yarn-project/simulator/src/avm/opcodes/environment_getters.test.ts index c13cc8a70e9d..cd7c8fafab0d 100644 --- a/yarn-project/simulator/src/avm/opcodes/environment_getters.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/environment_getters.test.ts @@ -63,7 +63,6 @@ describe('Environment getters', () => { describe.each([ [EnvironmentVariable.ADDRESS, address.toField()], [EnvironmentVariable.SENDER, sender.toField()], - [EnvironmentVariable.FUNCTIONSELECTOR, functionSelector.toField(), TypeTag.UINT32], [EnvironmentVariable.TRANSACTIONFEE, transactionFee.toField()], [EnvironmentVariable.CHAINID, chainId.toField()], [EnvironmentVariable.VERSION, version.toField()], diff --git a/yarn-project/simulator/src/avm/opcodes/environment_getters.ts b/yarn-project/simulator/src/avm/opcodes/environment_getters.ts index da29aa3948ba..97c78488d1bd 100644 --- a/yarn-project/simulator/src/avm/opcodes/environment_getters.ts +++ b/yarn-project/simulator/src/avm/opcodes/environment_getters.ts @@ -1,5 +1,5 @@ import type { AvmContext } from '../avm_context.js'; -import { Field, Uint32, Uint64 } from '../avm_memory_types.js'; +import { Field, Uint64 } from '../avm_memory_types.js'; import { InstructionExecutionError } from '../errors.js'; import { Opcode, OperandType } from '../serialization/instruction_serialization.js'; import { Addressing } from './addressing_mode.js'; @@ -8,7 +8,6 @@ import { Instruction } from './instruction.js'; export enum EnvironmentVariable { ADDRESS, SENDER, - FUNCTIONSELECTOR, TRANSACTIONFEE, CHAINID, VERSION, @@ -27,8 +26,6 @@ function getValue(e: EnvironmentVariable, ctx: AvmContext) { return new Field(ctx.environment.address.toField()); case EnvironmentVariable.SENDER: return new Field(ctx.environment.sender.toField()); - case EnvironmentVariable.FUNCTIONSELECTOR: - return new Uint32(ctx.environment.functionSelector.value); case EnvironmentVariable.TRANSACTIONFEE: return new Field(ctx.environment.transactionFee); case EnvironmentVariable.CHAINID: diff --git a/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts b/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts index 3dbefe89fe90..dbc32d22a9e2 100644 --- a/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/external_calls.test.ts @@ -13,7 +13,13 @@ import { initContext, initPersistableStateManager } from '../fixtures/index.js'; import { type AvmPersistableStateManager } from '../journal/journal.js'; import { encodeToBytecode } from '../serialization/bytecode_serialization.js'; import { Opcode } from '../serialization/instruction_serialization.js'; -import { mockGetBytecode, mockGetContractClass, mockGetContractInstance, mockTraceFork } from '../test_utils.js'; +import { + mockGetBytecode, + mockGetContractClass, + mockGetContractInstance, + mockNullifierExists, + mockTraceFork, +} from '../test_utils.js'; import { EnvironmentVariable, GetEnvVar } from './environment_getters.js'; import { Call, Return, Revert, StaticCall } from './external_calls.js'; import { type Instruction } from './instruction.js'; @@ -123,6 +129,7 @@ describe('External Calls', () => { mockGetContractClass(worldStateDB, contractClass); const contractInstance = makeContractInstanceFromClassId(contractClass.id); mockGetContractInstance(worldStateDB, contractInstance); + mockNullifierExists(worldStateDB, contractInstance.address.toField()); const { l2GasLeft: initialL2Gas, daGasLeft: initialDaGas } = context.machineState; @@ -166,6 +173,7 @@ describe('External Calls', () => { ]), ); mockGetBytecode(worldStateDB, otherContextInstructionsBytecode); + mockNullifierExists(worldStateDB, addr); const contractClass = makeContractClassPublic(0, { bytecode: otherContextInstructionsBytecode, @@ -174,6 +182,7 @@ describe('External Calls', () => { mockGetContractClass(worldStateDB, contractClass); const contractInstance = makeContractInstanceFromClassId(contractClass.id); mockGetContractInstance(worldStateDB, contractInstance); + mockNullifierExists(worldStateDB, contractInstance.address.toField()); const { l2GasLeft: initialL2Gas, daGasLeft: initialDaGas } = context.machineState; @@ -251,6 +260,7 @@ describe('External Calls', () => { const otherContextInstructionsBytecode = markBytecodeAsAvm(encodeToBytecode(otherContextInstructions)); mockGetBytecode(worldStateDB, otherContextInstructionsBytecode); + mockNullifierExists(worldStateDB, addr.toFr()); const contractClass = makeContractClassPublic(0, { bytecode: otherContextInstructionsBytecode, diff --git a/yarn-project/simulator/src/client/client_execution_context.ts b/yarn-project/simulator/src/client/client_execution_context.ts index 3eeb487d81f7..1baf1c434b62 100644 --- a/yarn-project/simulator/src/client/client_execution_context.ts +++ b/yarn-project/simulator/src/client/client_execution_context.ts @@ -11,9 +11,9 @@ import { type UnencryptedL2Log, } from '@aztec/circuit-types'; import { + type BlockHeader, CallContext, FunctionSelector, - type Header, PRIVATE_CONTEXT_INPUTS_LENGTH, PUBLIC_DISPATCH_SELECTOR, PrivateContextInputs, @@ -66,7 +66,7 @@ export class ClientExecutionContext extends ViewDataOracle { private readonly txContext: TxContext, private readonly callContext: CallContext, /** Header of a block whose state is used during private execution (not the block the transaction is included in). */ - protected readonly historicalHeader: Header, + protected readonly historicalHeader: BlockHeader, /** List of transient auth witnesses to be used during this simulation */ authWitnesses: AuthWitness[], private readonly packedValuesCache: PackedValuesCache, diff --git a/yarn-project/simulator/src/client/db_oracle.ts b/yarn-project/simulator/src/client/db_oracle.ts index 4047c17d83bc..6702810c86a3 100644 --- a/yarn-project/simulator/src/client/db_oracle.ts +++ b/yarn-project/simulator/src/client/db_oracle.ts @@ -8,9 +8,9 @@ import { type TxScopedL2Log, } from '@aztec/circuit-types'; import { + type BlockHeader, type CompleteAddress, type ContractInstance, - type Header, type IndexedTaggingSecret, type KeyValidationRequest, } from '@aztec/circuits.js'; @@ -138,7 +138,7 @@ export interface DBOracle extends CommitmentsDB { * * @returns A Promise that resolves to a Header object. */ - getHeader(): Promise
; + getBlockHeader(): Promise; /** * Fetch the index of the leaf in the respective tree diff --git a/yarn-project/simulator/src/client/private_execution.test.ts b/yarn-project/simulator/src/client/private_execution.test.ts index 815f48c36bba..b7cd2b47457a 100644 --- a/yarn-project/simulator/src/client/private_execution.test.ts +++ b/yarn-project/simulator/src/client/private_execution.test.ts @@ -10,13 +10,13 @@ import { } from '@aztec/circuit-types'; import { AppendOnlyTreeSnapshot, + BlockHeader, CallContext, CompleteAddress, GasFees, GasSettings, GeneratorIndex, type GrumpkinScalar, - Header, IndexedTaggingSecret, KeyValidationRequest, L1_TO_L2_MSG_TREE_HEIGHT, @@ -54,7 +54,7 @@ import { EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { type FieldsOf } from '@aztec/foundation/types'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { type AppendOnlyTree, Poseidon, StandardTree, newTree } from '@aztec/merkle-tree'; import { ChildContractArtifact, @@ -82,7 +82,7 @@ describe('Private Execution test suite', () => { let acirSimulator: AcirSimulator; - let header = Header.empty(); + let header = BlockHeader.empty(); let logger: DebugLogger; const defaultContractAddress = AztecAddress.random(); @@ -154,7 +154,7 @@ describe('Private Execution test suite', () => { const newSnap = new AppendOnlyTreeSnapshot(Fr.fromBuffer(tree.getRoot(true)), Number(tree.getNumLeaves(true))); if (name === 'noteHash' || name === 'l1ToL2Messages' || name === 'publicData') { - header = new Header( + header = new BlockHeader( header.lastArchive, header.contentCommitment, new StateReference( @@ -170,7 +170,7 @@ describe('Private Execution test suite', () => { header.totalManaUsed, ); } else { - header = new Header( + header = new BlockHeader( header.lastArchive, header.contentCommitment, new StateReference(newSnap, header.state.partial), @@ -241,7 +241,7 @@ describe('Private Execution test suite', () => { // We call insertLeaves here with no leaves to populate empty public data tree root --> this is necessary to be // able to get ivpk_m during execution await insertLeaves([], 'publicData'); - oracle.getHeader.mockResolvedValue(header); + oracle.getBlockHeader.mockResolvedValue(header); oracle.getCompleteAddress.mockImplementation((address: AztecAddress) => { if (address.equals(owner)) { @@ -605,7 +605,7 @@ describe('Private Execution test suite', () => { return Promise.resolve(new MessageLoadOracleInputs(0n, await tree.getSiblingPath(0n, true))); }); if (updateHeader) { - oracle.getHeader.mockResolvedValue(header); + oracle.getBlockHeader.mockResolvedValue(header); } }; @@ -653,7 +653,7 @@ describe('Private Execution test suite', () => { await mockOracles(); // Update state - oracle.getHeader.mockResolvedValue(header); + oracle.getBlockHeader.mockResolvedValue(header); await expect( runSimulator({ @@ -673,7 +673,7 @@ describe('Private Execution test suite', () => { await mockOracles(); // Update state - oracle.getHeader.mockResolvedValue(header); + oracle.getBlockHeader.mockResolvedValue(header); await expect( runSimulator({ @@ -692,7 +692,7 @@ describe('Private Execution test suite', () => { await mockOracles(); // Update state - oracle.getHeader.mockResolvedValue(header); + oracle.getBlockHeader.mockResolvedValue(header); await expect( runSimulator({ @@ -711,7 +711,7 @@ describe('Private Execution test suite', () => { await mockOracles(); // Update state - oracle.getHeader.mockResolvedValue(header); + oracle.getBlockHeader.mockResolvedValue(header); await expect( runSimulator({ @@ -731,7 +731,7 @@ describe('Private Execution test suite', () => { await mockOracles(); // Update state - oracle.getHeader.mockResolvedValue(header); + oracle.getBlockHeader.mockResolvedValue(header); await expect( runSimulator({ @@ -751,7 +751,7 @@ describe('Private Execution test suite', () => { await mockOracles(); // Update state - oracle.getHeader.mockResolvedValue(header); + oracle.getBlockHeader.mockResolvedValue(header); await expect( runSimulator({ @@ -1122,8 +1122,8 @@ describe('Private Execution test suite', () => { header = makeHeader(); - oracle.getHeader.mockClear(); - oracle.getHeader.mockResolvedValue(header); + oracle.getBlockHeader.mockClear(); + oracle.getBlockHeader.mockResolvedValue(header); }); it('Header is correctly set', async () => { diff --git a/yarn-project/simulator/src/client/simulator.ts b/yarn-project/simulator/src/client/simulator.ts index d434f75fa287..a60f634a2883 100644 --- a/yarn-project/simulator/src/client/simulator.ts +++ b/yarn-project/simulator/src/client/simulator.ts @@ -57,7 +57,7 @@ export class AcirSimulator { ); } - const header = await this.db.getHeader(); + const header = await this.db.getBlockHeader(); // reserve the first side effect for the tx hash (inserted by the private kernel) const startSideEffectCounter = 1; diff --git a/yarn-project/simulator/src/client/unconstrained_execution.test.ts b/yarn-project/simulator/src/client/unconstrained_execution.test.ts index 99bb3e3842d2..c285da49d9ad 100644 --- a/yarn-project/simulator/src/client/unconstrained_execution.test.ts +++ b/yarn-project/simulator/src/client/unconstrained_execution.test.ts @@ -1,5 +1,5 @@ import { type AztecNode, type FunctionCall, Note } from '@aztec/circuit-types'; -import { CompleteAddress, Header } from '@aztec/circuits.js'; +import { BlockHeader, CompleteAddress } from '@aztec/circuits.js'; import { FunctionSelector, FunctionType, encodeArguments } from '@aztec/foundation/abi'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { Fr } from '@aztec/foundation/fields'; @@ -55,7 +55,7 @@ describe('Unconstrained Execution test suite', () => { oracle.syncTaggedLogs.mockResolvedValue(new Map()); oracle.processTaggedLogs.mockResolvedValue(); - oracle.getHeader.mockResolvedValue(Header.empty()); + oracle.getBlockHeader.mockResolvedValue(BlockHeader.empty()); oracle.getNotes.mockResolvedValue( notes.map((note, index) => ({ contractAddress, diff --git a/yarn-project/simulator/src/client/view_data_oracle.ts b/yarn-project/simulator/src/client/view_data_oracle.ts index bd5633f2103e..67af9e77df39 100644 --- a/yarn-project/simulator/src/client/view_data_oracle.ts +++ b/yarn-project/simulator/src/client/view_data_oracle.ts @@ -8,8 +8,8 @@ import { type PublicDataWitness, } from '@aztec/circuit-types'; import { + type BlockHeader, type ContractInstance, - type Header, type IndexedTaggingSecret, type KeyValidationRequest, } from '@aztec/circuits.js'; @@ -139,7 +139,7 @@ export class ViewDataOracle extends TypedOracle { * @param blockNumber - The number of a block of which to get the block header. * @returns Block extracted from a block with block number `blockNumber`. */ - public override async getHeader(blockNumber: number): Promise
{ + public override async getBlockHeader(blockNumber: number): Promise { const block = await this.db.getBlock(blockNumber); if (!block) { return undefined; diff --git a/yarn-project/simulator/src/providers/acvm_native.ts b/yarn-project/simulator/src/providers/acvm_native.ts index 3bf1bdf01572..27fe7c043707 100644 --- a/yarn-project/simulator/src/providers/acvm_native.ts +++ b/yarn-project/simulator/src/providers/acvm_native.ts @@ -5,7 +5,7 @@ import { type NoirCompiledCircuit } from '@aztec/types/noir'; import { type WitnessMap } from '@noir-lang/types'; import * as proc from 'child_process'; -import fs from 'fs/promises'; +import { promises as fs } from 'fs'; import { type SimulationProvider } from './simulation_provider.js'; diff --git a/yarn-project/simulator/src/providers/factory.ts b/yarn-project/simulator/src/providers/factory.ts index 73f7c70cd550..06d88a2ffe96 100644 --- a/yarn-project/simulator/src/providers/factory.ts +++ b/yarn-project/simulator/src/providers/factory.ts @@ -1,6 +1,6 @@ import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; -import * as fs from 'fs/promises'; +import { promises as fs } from 'fs'; import { NativeACVMSimulator } from './acvm_native.js'; import { WASMSimulator } from './acvm_wasm.js'; diff --git a/yarn-project/simulator/src/public/dual_side_effect_trace.ts b/yarn-project/simulator/src/public/dual_side_effect_trace.ts index f6285e0e355c..1500ba12cf1e 100644 --- a/yarn-project/simulator/src/public/dual_side_effect_trace.ts +++ b/yarn-project/simulator/src/public/dual_side_effect_trace.ts @@ -140,9 +140,26 @@ export class DualSideEffectTrace implements PublicSideEffectTraceInterface { contractAddress: AztecAddress, exists: boolean, instance: SerializableContractInstance | undefined, + lowLeafPreimage: NullifierLeafPreimage | undefined, + lowLeafIndex: Fr | undefined, + lowLeafPath: Fr[] | undefined, ) { - this.innerCallTrace.traceGetContractInstance(contractAddress, exists, instance); - this.enqueuedCallTrace.traceGetContractInstance(contractAddress, exists, instance); + this.innerCallTrace.traceGetContractInstance( + contractAddress, + exists, + instance, + lowLeafPreimage, + lowLeafIndex, + lowLeafPath, + ); + this.enqueuedCallTrace.traceGetContractInstance( + contractAddress, + exists, + instance, + lowLeafPreimage, + lowLeafIndex, + lowLeafPath, + ); } public traceGetBytecode( @@ -151,9 +168,30 @@ export class DualSideEffectTrace implements PublicSideEffectTraceInterface { bytecode: Buffer, contractInstance: SerializableContractInstance | undefined, contractClass: ContractClassIdPreimage | undefined, + lowLeafPreimage: NullifierLeafPreimage | undefined, + lowLeafIndex: Fr | undefined, + lowLeafPath: Fr[] | undefined, ) { - this.innerCallTrace.traceGetBytecode(contractAddress, exists, bytecode, contractInstance, contractClass); - this.enqueuedCallTrace.traceGetBytecode(contractAddress, exists, bytecode, contractInstance, contractClass); + this.innerCallTrace.traceGetBytecode( + contractAddress, + exists, + bytecode, + contractInstance, + contractClass, + lowLeafPreimage, + lowLeafIndex, + lowLeafPath, + ); + this.enqueuedCallTrace.traceGetBytecode( + contractAddress, + exists, + bytecode, + contractInstance, + contractClass, + lowLeafPreimage, + lowLeafIndex, + lowLeafPath, + ); } /** diff --git a/yarn-project/simulator/src/public/enqueued_call_side_effect_trace.test.ts b/yarn-project/simulator/src/public/enqueued_call_side_effect_trace.test.ts index 6f84f4de2adb..60b73bc54393 100644 --- a/yarn-project/simulator/src/public/enqueued_call_side_effect_trace.test.ts +++ b/yarn-project/simulator/src/public/enqueued_call_side_effect_trace.test.ts @@ -6,6 +6,7 @@ import { AvmPublicDataReadTreeHint, AvmPublicDataWriteTreeHint, AztecAddress, + type ContractClassIdPreimage, EthAddress, L2ToL1Message, LogHash, @@ -31,6 +32,7 @@ import { SideEffectLimitReachedError } from './side_effect_errors.js'; describe('Enqueued-call Side Effect Trace', () => { const address = AztecAddress.random(); + const bytecode = Buffer.from('0xdeadbeef'); const utxo = Fr.random(); const leafIndex = Fr.random(); const lowLeafIndex = Fr.random(); @@ -59,7 +61,7 @@ describe('Enqueued-call Side Effect Trace', () => { expect(trace.getCounter()).toBe(startCounterPlus1); const expected = new AvmPublicDataReadTreeHint(leafPreimage, leafIndex, siblingPath); - expect(trace.getAvmCircuitHints().storageReadRequest.items).toEqual([expected]); + expect(trace.getAvmCircuitHints().publicDataReads.items).toEqual([expected]); }); it('Should trace storage writes', () => { @@ -84,14 +86,14 @@ describe('Enqueued-call Side Effect Trace', () => { const readHint = new AvmPublicDataReadTreeHint(lowLeafPreimage, lowLeafIndex, lowLeafSiblingPath); const expectedHint = new AvmPublicDataWriteTreeHint(readHint, newLeafPreimage, siblingPath); - expect(trace.getAvmCircuitHints().storageUpdateRequest.items).toEqual([expectedHint]); + expect(trace.getAvmCircuitHints().publicDataWrites.items).toEqual([expectedHint]); }); it('Should trace note hash checks', () => { const exists = true; trace.traceNoteHashCheck(address, utxo, leafIndex, exists, siblingPath); const expected = new AvmAppendTreeHint(leafIndex, utxo, siblingPath); - expect(trace.getAvmCircuitHints().noteHashReadRequest.items).toEqual([expected]); + expect(trace.getAvmCircuitHints().noteHashReads.items).toEqual([expected]); }); it('Should trace note hashes', () => { @@ -102,7 +104,7 @@ describe('Enqueued-call Side Effect Trace', () => { expect(trace.getSideEffects().noteHashes).toEqual(expected); const expectedHint = new AvmAppendTreeHint(leafIndex, utxo, siblingPath); - expect(trace.getAvmCircuitHints().noteHashWriteRequest.items).toEqual([expectedHint]); + expect(trace.getAvmCircuitHints().noteHashWrites.items).toEqual([expectedHint]); }); it('Should trace nullifier checks', () => { @@ -112,7 +114,7 @@ describe('Enqueued-call Side Effect Trace', () => { expect(trace.getCounter()).toBe(startCounterPlus1); const expected = new AvmNullifierReadTreeHint(lowLeafPreimage, leafIndex, siblingPath); - expect(trace.getAvmCircuitHints().nullifierReadRequest.items).toEqual([expected]); + expect(trace.getAvmCircuitHints().nullifierReads.items).toEqual([expected]); }); it('Should trace nullifiers', () => { @@ -125,14 +127,14 @@ describe('Enqueued-call Side Effect Trace', () => { const readHint = new AvmNullifierReadTreeHint(lowLeafPreimage, lowLeafIndex, lowLeafSiblingPath); const expectedHint = new AvmNullifierWriteTreeHint(readHint, siblingPath); - expect(trace.getAvmCircuitHints().nullifierWriteHints.items).toEqual([expectedHint]); + expect(trace.getAvmCircuitHints().nullifierWrites.items).toEqual([expectedHint]); }); it('Should trace L1ToL2 Message checks', () => { const exists = true; trace.traceL1ToL2MessageCheck(address, utxo, leafIndex, exists, siblingPath); const expected = new AvmAppendTreeHint(leafIndex, utxo, siblingPath); - expect(trace.getAvmCircuitHints().l1ToL2MessageReadRequest.items).toEqual([expected]); + expect(trace.getAvmCircuitHints().l1ToL2MessageReads.items).toEqual([expected]); }); it('Should trace new L2ToL1 messages', () => { @@ -159,18 +161,53 @@ describe('Enqueued-call Side Effect Trace', () => { it('Should trace get contract instance', () => { const instance = SerializableContractInstance.random(); const { version: _, ...instanceWithoutVersion } = instance; + const lowLeafPreimage = new NullifierLeafPreimage(/*siloedNullifier=*/ address.toField(), Fr.ZERO, 0n); const exists = true; - trace.traceGetContractInstance(address, exists, instance); + trace.traceGetContractInstance(address, exists, instance, lowLeafPreimage, lowLeafIndex, lowLeafSiblingPath); expect(trace.getCounter()).toBe(startCounterPlus1); + const membershipHint = new AvmNullifierReadTreeHint(lowLeafPreimage, lowLeafIndex, lowLeafSiblingPath); expect(trace.getAvmCircuitHints().contractInstances.items).toEqual([ { address, exists, ...instanceWithoutVersion, + membershipHint, }, ]); }); + + it('Should trace get bytecode', () => { + const instance = SerializableContractInstance.random(); + const contractClass: ContractClassIdPreimage = { + artifactHash: Fr.random(), + privateFunctionsRoot: Fr.random(), + publicBytecodeCommitment: Fr.random(), + }; + const { version: _, ...instanceWithoutVersion } = instance; + const lowLeafPreimage = new NullifierLeafPreimage(/*siloedNullifier=*/ address.toField(), Fr.ZERO, 0n); + const exists = true; + trace.traceGetBytecode( + address, + exists, + bytecode, + instance, + contractClass, + lowLeafPreimage, + lowLeafIndex, + lowLeafSiblingPath, + ); + + const membershipHint = new AvmNullifierReadTreeHint(lowLeafPreimage, lowLeafIndex, lowLeafSiblingPath); + expect(trace.getAvmCircuitHints().contractBytecodeHints.items).toEqual([ + { + bytecode, + contractInstanceHint: { address, exists, ...instanceWithoutVersion, membershipHint: { ...membershipHint } }, + contractClassHint: contractClass, + }, + ]); + }); + describe('Maximum accesses', () => { it('Should enforce maximum number of public storage writes', () => { for (let i = 0; i < MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX; i++) { @@ -286,9 +323,9 @@ describe('Enqueued-call Side Effect Trace', () => { testCounter++; nestedTrace.traceUnencryptedLog(address, log); testCounter++; - nestedTrace.traceGetContractInstance(address, /*exists=*/ true, contractInstance); + nestedTrace.traceGetContractInstance(address, /*exists=*/ true, contractInstance, lowLeafPreimage, Fr.ZERO, []); testCounter++; - nestedTrace.traceGetContractInstance(address, /*exists=*/ false, contractInstance); + nestedTrace.traceGetContractInstance(address, /*exists=*/ false, contractInstance, lowLeafPreimage, Fr.ZERO, []); testCounter++; trace.merge(nestedTrace, reverted); @@ -321,13 +358,13 @@ describe('Enqueued-call Side Effect Trace', () => { expect(parentHints.externalCalls.items).toEqual(childHints.externalCalls.items); expect(parentHints.contractInstances.items).toEqual(childHints.contractInstances.items); expect(parentHints.contractBytecodeHints.items).toEqual(childHints.contractBytecodeHints.items); - expect(parentHints.storageReadRequest.items).toEqual(childHints.storageReadRequest.items); - expect(parentHints.storageUpdateRequest.items).toEqual(childHints.storageUpdateRequest.items); - expect(parentHints.nullifierReadRequest.items).toEqual(childHints.nullifierReadRequest.items); - expect(parentHints.nullifierWriteHints.items).toEqual(childHints.nullifierWriteHints.items); - expect(parentHints.noteHashReadRequest.items).toEqual(childHints.noteHashReadRequest.items); - expect(parentHints.noteHashWriteRequest.items).toEqual(childHints.noteHashWriteRequest.items); - expect(parentHints.l1ToL2MessageReadRequest.items).toEqual(childHints.l1ToL2MessageReadRequest.items); + expect(parentHints.publicDataReads.items).toEqual(childHints.publicDataReads.items); + expect(parentHints.publicDataWrites.items).toEqual(childHints.publicDataWrites.items); + expect(parentHints.nullifierReads.items).toEqual(childHints.nullifierReads.items); + expect(parentHints.nullifierWrites.items).toEqual(childHints.nullifierWrites.items); + expect(parentHints.noteHashReads.items).toEqual(childHints.noteHashReads.items); + expect(parentHints.noteHashWrites.items).toEqual(childHints.noteHashWrites.items); + expect(parentHints.l1ToL2MessageReads.items).toEqual(childHints.l1ToL2MessageReads.items); }); }); }); diff --git a/yarn-project/simulator/src/public/enqueued_call_side_effect_trace.ts b/yarn-project/simulator/src/public/enqueued_call_side_effect_trace.ts index 84e85adcd640..d49ad8321d3c 100644 --- a/yarn-project/simulator/src/public/enqueued_call_side_effect_trace.ts +++ b/yarn-project/simulator/src/public/enqueued_call_side_effect_trace.ts @@ -179,15 +179,13 @@ export class PublicEnqueuedCallSideEffectTrace implements PublicSideEffectTraceI this.avmCircuitHints.contractInstances.items.push(...forkedTrace.avmCircuitHints.contractInstances.items); this.avmCircuitHints.contractBytecodeHints.items.push(...forkedTrace.avmCircuitHints.contractBytecodeHints.items); - this.avmCircuitHints.storageReadRequest.items.push(...forkedTrace.avmCircuitHints.storageReadRequest.items); - this.avmCircuitHints.storageUpdateRequest.items.push(...forkedTrace.avmCircuitHints.storageUpdateRequest.items); - this.avmCircuitHints.nullifierReadRequest.items.push(...forkedTrace.avmCircuitHints.nullifierReadRequest.items); - this.avmCircuitHints.nullifierWriteHints.items.push(...forkedTrace.avmCircuitHints.nullifierWriteHints.items); - this.avmCircuitHints.noteHashReadRequest.items.push(...forkedTrace.avmCircuitHints.noteHashReadRequest.items); - this.avmCircuitHints.noteHashWriteRequest.items.push(...forkedTrace.avmCircuitHints.noteHashWriteRequest.items); - this.avmCircuitHints.l1ToL2MessageReadRequest.items.push( - ...forkedTrace.avmCircuitHints.l1ToL2MessageReadRequest.items, - ); + this.avmCircuitHints.publicDataReads.items.push(...forkedTrace.avmCircuitHints.publicDataReads.items); + this.avmCircuitHints.publicDataWrites.items.push(...forkedTrace.avmCircuitHints.publicDataWrites.items); + this.avmCircuitHints.nullifierReads.items.push(...forkedTrace.avmCircuitHints.nullifierReads.items); + this.avmCircuitHints.nullifierWrites.items.push(...forkedTrace.avmCircuitHints.nullifierWrites.items); + this.avmCircuitHints.noteHashReads.items.push(...forkedTrace.avmCircuitHints.noteHashReads.items); + this.avmCircuitHints.noteHashWrites.items.push(...forkedTrace.avmCircuitHints.noteHashWrites.items); + this.avmCircuitHints.l1ToL2MessageReads.items.push(...forkedTrace.avmCircuitHints.l1ToL2MessageReads.items); } public getCounter() { @@ -211,7 +209,7 @@ export class PublicEnqueuedCallSideEffectTrace implements PublicSideEffectTraceI assert(leafPreimage.value.equals(value), 'Value mismatch when tracing in public data write'); } - this.avmCircuitHints.storageReadRequest.items.push(new AvmPublicDataReadTreeHint(leafPreimage, leafIndex, path)); + this.avmCircuitHints.publicDataReads.items.push(new AvmPublicDataReadTreeHint(leafPreimage, leafIndex, path)); this.log.debug(`SLOAD cnt: ${this.sideEffectCounter} val: ${value} slot: ${slot}`); this.incrementSideEffectCounter(); } @@ -245,7 +243,7 @@ export class PublicEnqueuedCallSideEffectTrace implements PublicSideEffectTraceI // New hinting const readHint = new AvmPublicDataReadTreeHint(lowLeafPreimage, lowLeafIndex, lowLeafPath); - this.avmCircuitHints.storageUpdateRequest.items.push( + this.avmCircuitHints.publicDataWrites.items.push( new AvmPublicDataWriteTreeHint(readHint, newLeafPreimage, insertionPath), ); @@ -264,7 +262,7 @@ export class PublicEnqueuedCallSideEffectTrace implements PublicSideEffectTraceI path: Fr[] = emptyNoteHashPath(), ) { // New Hinting - this.avmCircuitHints.noteHashReadRequest.items.push(new AvmAppendTreeHint(leafIndex, noteHash, path)); + this.avmCircuitHints.noteHashReads.items.push(new AvmAppendTreeHint(leafIndex, noteHash, path)); // NOTE: counter does not increment for note hash checks (because it doesn't rely on pending note hashes) } @@ -282,7 +280,7 @@ export class PublicEnqueuedCallSideEffectTrace implements PublicSideEffectTraceI //const siloedNoteHash = siloNoteHash(contractAddress, noteHash); this.noteHashes.push(new NoteHash(noteHash, this.sideEffectCounter).scope(contractAddress)); this.log.debug(`NEW_NOTE_HASH cnt: ${this.sideEffectCounter}`); - this.avmCircuitHints.noteHashWriteRequest.items.push(new AvmAppendTreeHint(leafIndex, noteHash, path)); + this.avmCircuitHints.noteHashWrites.items.push(new AvmAppendTreeHint(leafIndex, noteHash, path)); this.incrementSideEffectCounter(); } @@ -293,7 +291,7 @@ export class PublicEnqueuedCallSideEffectTrace implements PublicSideEffectTraceI lowLeafIndex: Fr = Fr.zero(), lowLeafPath: Fr[] = emptyNullifierPath(), ) { - this.avmCircuitHints.nullifierReadRequest.items.push( + this.avmCircuitHints.nullifierReads.items.push( new AvmNullifierReadTreeHint(lowLeafPreimage, lowLeafIndex, lowLeafPath), ); this.log.debug(`NULLIFIER_EXISTS cnt: ${this.sideEffectCounter}`); @@ -314,7 +312,7 @@ export class PublicEnqueuedCallSideEffectTrace implements PublicSideEffectTraceI this.nullifiers.push(new Nullifier(siloedNullifier, this.sideEffectCounter, /*noteHash=*/ Fr.ZERO)); const lowLeafReadHint = new AvmNullifierReadTreeHint(lowLeafPreimage, lowLeafIndex, lowLeafPath); - this.avmCircuitHints.nullifierWriteHints.items.push(new AvmNullifierWriteTreeHint(lowLeafReadHint, insertionPath)); + this.avmCircuitHints.nullifierWrites.items.push(new AvmNullifierWriteTreeHint(lowLeafReadHint, insertionPath)); this.log.debug(`NEW_NULLIFIER cnt: ${this.sideEffectCounter}`); this.incrementSideEffectCounter(); } @@ -327,7 +325,7 @@ export class PublicEnqueuedCallSideEffectTrace implements PublicSideEffectTraceI _exists: boolean, path: Fr[] = emptyL1ToL2MessagePath(), ) { - this.avmCircuitHints.l1ToL2MessageReadRequest.items.push(new AvmAppendTreeHint(msgLeafIndex, msgHash, path)); + this.avmCircuitHints.l1ToL2MessageReads.items.push(new AvmAppendTreeHint(msgLeafIndex, msgHash, path)); } public traceNewL2ToL1Message(contractAddress: AztecAddress, recipient: Fr, content: Fr) { @@ -367,7 +365,11 @@ export class PublicEnqueuedCallSideEffectTrace implements PublicSideEffectTraceI contractAddress: AztecAddress, exists: boolean, instance: SerializableContractInstance = SerializableContractInstance.default(), + lowLeafPreimage: NullifierLeafPreimage = NullifierLeafPreimage.empty(), + lowLeafIndex: Fr = Fr.zero(), + lowLeafPath: Fr[] = emptyNullifierPath(), ) { + const membershipHint = new AvmNullifierReadTreeHint(lowLeafPreimage, lowLeafIndex, lowLeafPath); this.avmCircuitHints.contractInstances.items.push( new AvmContractInstanceHint( contractAddress, @@ -377,6 +379,7 @@ export class PublicEnqueuedCallSideEffectTrace implements PublicSideEffectTraceI instance.contractClassId, instance.initializationHash, instance.publicKeys, + membershipHint, ), ); this.log.debug(`CONTRACT_INSTANCE cnt: ${this.sideEffectCounter}`); @@ -396,7 +399,11 @@ export class PublicEnqueuedCallSideEffectTrace implements PublicSideEffectTraceI privateFunctionsRoot: Fr.zero(), publicBytecodeCommitment: Fr.zero(), }, + lowLeafPreimage: NullifierLeafPreimage = NullifierLeafPreimage.empty(), + lowLeafIndex: Fr = Fr.zero(), + lowLeafPath: Fr[] = emptyNullifierPath(), ) { + const membershipHint = new AvmNullifierReadTreeHint(lowLeafPreimage, lowLeafIndex, lowLeafPath); const instance = new AvmContractInstanceHint( contractAddress, exists, @@ -405,6 +412,7 @@ export class PublicEnqueuedCallSideEffectTrace implements PublicSideEffectTraceI contractInstance.contractClassId, contractInstance.initializationHash, contractInstance.publicKeys, + membershipHint, ); // We need to deduplicate the contract instances based on addresses this.avmCircuitHints.contractBytecodeHints.items.push( diff --git a/yarn-project/simulator/src/public/fixtures/index.ts b/yarn-project/simulator/src/public/fixtures/index.ts index 512cbf93d30b..acbefc4be15b 100644 --- a/yarn-project/simulator/src/public/fixtures/index.ts +++ b/yarn-project/simulator/src/public/fixtures/index.ts @@ -1,16 +1,17 @@ -import { PublicExecutionRequest, Tx } from '@aztec/circuit-types'; +import { MerkleTreeId, PublicExecutionRequest, Tx } from '@aztec/circuit-types'; import { type AvmCircuitInputs, + BlockHeader, CallContext, type ContractClassPublic, type ContractInstanceWithAddress, DEFAULT_GAS_LIMIT, + DEPLOYER_CONTRACT_ADDRESS, FunctionSelector, Gas, GasFees, GasSettings, GlobalVariables, - Header, MAX_L2_GAS_PER_ENQUEUED_CALL, PartialPrivateTailPublicInputsForPublic, PrivateKernelTailCircuitPublicInputs, @@ -22,11 +23,12 @@ import { TxContext, computePublicBytecodeCommitment, } from '@aztec/circuits.js'; +import { siloNullifier } from '@aztec/circuits.js/hash'; import { makeContractClassPublic, makeContractInstanceFromClassId } from '@aztec/circuits.js/testing'; import { type ContractArtifact, type FunctionArtifact } from '@aztec/foundation/abi'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { Fr, Point } from '@aztec/foundation/fields'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { AvmTestContractArtifact } from '@aztec/noir-contracts.js'; import { PublicTxSimulator, WorldStateDB } from '@aztec/simulator'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; @@ -34,14 +36,10 @@ import { MerkleTrees } from '@aztec/world-state'; import { strict as assert } from 'assert'; -/** - * If assertionErrString is set, we expect a (non exceptional halting) revert due to a failing assertion and - * we check that the revert reason error contains this string. However, the circuit must correctly prove the - * execution. - */ export async function simulateAvmTestContractGenerateCircuitInputs( functionName: string, calldata: Fr[] = [], + expectRevert: boolean = false, assertionErrString?: string, ): Promise { const sender = AztecAddress.random(); @@ -58,13 +56,23 @@ export async function simulateAvmTestContractGenerateCircuitInputs( const worldStateDB = new WorldStateDB(merkleTrees, contractDataSource); const contractInstance = contractDataSource.contractInstance; + const contractAddressNullifier = siloNullifier( + AztecAddress.fromNumber(DEPLOYER_CONTRACT_ADDRESS), + contractInstance.address.toField(), + ); + await merkleTrees.batchInsert(MerkleTreeId.NULLIFIER_TREE, [contractAddressNullifier.toBuffer()], 0); + // other contract address used by the bulk test's GETCONTRACTINSTANCE test + const otherContractAddressNullifier = siloNullifier( + AztecAddress.fromNumber(DEPLOYER_CONTRACT_ADDRESS), + contractDataSource.otherContractInstance.address.toField(), + ); + await merkleTrees.batchInsert(MerkleTreeId.NULLIFIER_TREE, [otherContractAddressNullifier.toBuffer()], 0); const simulator = new PublicTxSimulator( merkleTrees, worldStateDB, new NoopTelemetryClient(), globalVariables, - /*realAvmProving=*/ true, /*doMerkleOperations=*/ true, ); @@ -80,13 +88,15 @@ export async function simulateAvmTestContractGenerateCircuitInputs( const avmResult = await simulator.simulate(tx); - if (assertionErrString == undefined) { + if (!expectRevert) { expect(avmResult.revertCode.isOK()).toBe(true); } else { // Explicit revert when an assertion failed. expect(avmResult.revertCode.isOK()).toBe(false); expect(avmResult.revertReason).toBeDefined(); - expect(avmResult.revertReason?.getMessage()).toContain(assertionErrString); + if (assertionErrString !== undefined) { + expect(avmResult.revertReason?.getMessage()).toContain(assertionErrString); + } } const avmCircuitInputs: AvmCircuitInputs = avmResult.avmProvingRequest.inputs; @@ -117,7 +127,7 @@ export function createTxForPublicCall( const teardownGasLimits = isTeardown ? gasLimits : Gas.empty(); const gasSettings = new GasSettings(gasLimits, teardownGasLimits, GasFees.empty()); const txContext = new TxContext(Fr.zero(), Fr.zero(), gasSettings); - const constantData = new TxConstantData(Header.empty(), txContext, Fr.zero(), Fr.zero()); + const constantData = new TxConstantData(BlockHeader.empty(), txContext, Fr.zero(), Fr.zero()); const txData = new PrivateKernelTailCircuitPublicInputs( constantData, @@ -134,7 +144,7 @@ export function createTxForPublicCall( return tx; } -class MockedAvmTestContractDataSource { +export class MockedAvmTestContractDataSource { private fnName = 'public_dispatch'; private bytecode: Buffer; public fnSelector: FunctionSelector; @@ -142,7 +152,7 @@ class MockedAvmTestContractDataSource { private contractClass: ContractClassPublic; public contractInstance: ContractInstanceWithAddress; private bytecodeCommitment: Fr; - private otherContractInstance: ContractInstanceWithAddress; + public otherContractInstance: ContractInstanceWithAddress; constructor() { this.bytecode = getAvmTestContractBytecode(this.fnName); @@ -152,6 +162,7 @@ class MockedAvmTestContractDataSource { this.contractInstance = makeContractInstanceFromClassId(this.contractClass.id); this.bytecodeCommitment = computePublicBytecodeCommitment(this.bytecode); // The values here should match those in `avm_simulator.test.ts` + // Used for GETCONTRACTINSTANCE test this.otherContractInstance = new SerializableContractInstance({ version: 1, salt: new Fr(0x123), @@ -164,7 +175,7 @@ class MockedAvmTestContractDataSource { new Point(new Fr(0x252627), new Fr(0x282930), false), new Point(new Fr(0x313233), new Fr(0x343536), false), ), - }).withAddress(this.contractInstance.address); + }).withAddress(AztecAddress.fromNumber(0x4444)); } getPublicFunction(_address: AztecAddress, _selector: FunctionSelector): Promise { diff --git a/yarn-project/simulator/src/public/public_processor.test.ts b/yarn-project/simulator/src/public/public_processor.test.ts index 7db6b283dc8a..2b2a0a27fbf1 100644 --- a/yarn-project/simulator/src/public/public_processor.test.ts +++ b/yarn-project/simulator/src/public/public_processor.test.ts @@ -12,11 +12,11 @@ import { AvmCircuitInputs, type AvmCircuitPublicInputs, AztecAddress, + BlockHeader, Fr, Gas, GasFees, GlobalVariables, - Header, PublicDataWrite, RevertCode, countAccumulatedItems, @@ -86,7 +86,7 @@ describe('public_processor', () => { processor = new PublicProcessor( db, globalVariables, - Header.empty(), + BlockHeader.empty(), worldStateDB, publicTxProcessor, new NoopTelemetryClient(), diff --git a/yarn-project/simulator/src/public/public_processor.ts b/yarn-project/simulator/src/public/public_processor.ts index d11ac645e592..7b1677e776ff 100644 --- a/yarn-project/simulator/src/public/public_processor.ts +++ b/yarn-project/simulator/src/public/public_processor.ts @@ -13,10 +13,10 @@ import { } from '@aztec/circuit-types'; import { type AztecAddress, + type BlockHeader, type ContractDataSource, Fr, type GlobalVariables, - type Header, MAX_NOTE_HASHES_PER_TX, MAX_NULLIFIERS_PER_TX, NULLIFIER_SUBTREE_HEIGHT, @@ -47,13 +47,19 @@ export class PublicProcessorFactory { */ public create( merkleTree: MerkleTreeWriteOperations, - maybeHistoricalHeader: Header | undefined, + maybeHistoricalHeader: BlockHeader | undefined, globalVariables: GlobalVariables, ): PublicProcessor { const historicalHeader = maybeHistoricalHeader ?? merkleTree.getInitialHeader(); const worldStateDB = new WorldStateDB(merkleTree, this.contractDataSource); - const publicTxSimulator = new PublicTxSimulator(merkleTree, worldStateDB, this.telemetryClient, globalVariables); + const publicTxSimulator = new PublicTxSimulator( + merkleTree, + worldStateDB, + this.telemetryClient, + globalVariables, + /*doMerkleOperations=*/ true, + ); return new PublicProcessor( merkleTree, @@ -75,7 +81,7 @@ export class PublicProcessor { constructor( protected db: MerkleTreeWriteOperations, protected globalVariables: GlobalVariables, - protected historicalHeader: Header, + protected historicalHeader: BlockHeader, protected worldStateDB: WorldStateDB, protected publicTxSimulator: PublicTxSimulator, telemetryClient: TelemetryClient, diff --git a/yarn-project/simulator/src/public/public_tx_context.ts b/yarn-project/simulator/src/public/public_tx_context.ts index f6fd8e7c9e19..94057597a186 100644 --- a/yarn-project/simulator/src/public/public_tx_context.ts +++ b/yarn-project/simulator/src/public/public_tx_context.ts @@ -2,6 +2,7 @@ import { type AvmProvingRequest, MerkleTreeId, type MerkleTreeReadOperations, + ProvingRequestType, type PublicExecutionRequest, type SimulationError, type Tx, @@ -9,16 +10,19 @@ import { TxHash, } from '@aztec/circuit-types'; import { + AppendOnlyTreeSnapshot, + AvmCircuitInputs, type AvmCircuitPublicInputs, Fr, Gas, type GasSettings, type GlobalVariables, - type Header, type PrivateToPublicAccumulatedData, type PublicCallRequest, + PublicCircuitPublicInputs, RevertCode, type StateReference, + TreeSnapshots, countAccumulatedItems, } from '@aztec/circuits.js'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; @@ -26,13 +30,12 @@ import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { strict as assert } from 'assert'; import { inspect } from 'util'; -import { type AvmFinalizedCallResult } from '../avm/avm_contract_call_result.js'; import { AvmPersistableStateManager } from '../avm/index.js'; import { DualSideEffectTrace } from './dual_side_effect_trace.js'; import { PublicEnqueuedCallSideEffectTrace, SideEffectArrayLengths } from './enqueued_call_side_effect_trace.js'; import { type WorldStateDB } from './public_db_sources.js'; import { PublicSideEffectTrace } from './side_effect_trace.js'; -import { generateAvmCircuitPublicInputs, generateAvmProvingRequest } from './transitional_adapters.js'; +import { generateAvmCircuitPublicInputs } from './transitional_adapters.js'; import { getCallRequestsByPhase, getExecutionRequestsByPhase } from './utils.js'; /** @@ -58,7 +61,6 @@ export class PublicTxContext { constructor( public readonly state: PhaseStateManager, private readonly globalVariables: GlobalVariables, - private readonly historicalHeader: Header, // FIXME(dbanks12): remove private readonly startStateReference: StateReference, private readonly startGasUsed: Gas, private readonly gasSettings: GasSettings, @@ -89,7 +91,7 @@ export class PublicTxContext { const previousAccumulatedDataArrayLengths = new SideEffectArrayLengths( /*publicDataWrites*/ 0, countAccumulatedItems(nonRevertibleAccumulatedDataFromPrivate.noteHashes), - countAccumulatedItems(nonRevertibleAccumulatedDataFromPrivate.nullifiers), + /*nullifiers=*/ 0, countAccumulatedItems(nonRevertibleAccumulatedDataFromPrivate.l2ToL1Msgs), /*unencryptedLogsHashes*/ 0, ); @@ -105,7 +107,6 @@ export class PublicTxContext { return new PublicTxContext( new PhaseStateManager(txStateManager), globalVariables, - tx.data.constants.historicalHeader, await db.getStateReference(), tx.data.gasUsed, tx.data.constants.txContext.gasSettings, @@ -301,11 +302,24 @@ export class PublicTxContext { */ private generateAvmCircuitPublicInputs(endStateReference: StateReference): AvmCircuitPublicInputs { assert(this.halted, 'Can only get AvmCircuitPublicInputs after tx execution ends'); - // TODO(dbanks12): use the state roots from ephemeral trees - endStateReference.partial.nullifierTree.root = this.state - .getActiveStateManager() - .merkleTrees.treeMap.get(MerkleTreeId.NULLIFIER_TREE)! - .getRoot(); + const ephemeralTrees = this.state.getActiveStateManager().merkleTrees.treeMap; + + const getAppendSnaphot = (id: MerkleTreeId) => { + const tree = ephemeralTrees.get(id)!; + return new AppendOnlyTreeSnapshot(tree.getRoot(), Number(tree.leafCount)); + }; + + const noteHashTree = getAppendSnaphot(MerkleTreeId.NOTE_HASH_TREE); + const nullifierTree = getAppendSnaphot(MerkleTreeId.NULLIFIER_TREE); + const publicDataTree = getAppendSnaphot(MerkleTreeId.PUBLIC_DATA_TREE); + + const endTreeSnapshots = new TreeSnapshots( + endStateReference.l1ToL2MessageTree, + noteHashTree, + nullifierTree, + publicDataTree, + ); + return generateAvmCircuitPublicInputs( this.trace, this.globalVariables, @@ -317,7 +331,7 @@ export class PublicTxContext { this.teardownCallRequests, this.nonRevertibleAccumulatedDataFromPrivate, this.revertibleAccumulatedDataFromPrivate, - endStateReference, + endTreeSnapshots, /*endGasUsed=*/ this.gasUsed, this.getTransactionFeeUnsafe(), this.revertCode, @@ -328,38 +342,17 @@ export class PublicTxContext { * Generate the proving request for the AVM circuit. */ generateProvingRequest(endStateReference: StateReference): AvmProvingRequest { - // TODO(dbanks12): Once we actually have tx-level proving, this will generate the entire - // proving request for the first time - this.avmProvingRequest!.inputs.output = this.generateAvmCircuitPublicInputs(endStateReference); - return this.avmProvingRequest!; - } - - // TODO(dbanks12): remove once AVM proves entire public tx - updateProvingRequest( - real: boolean, - phase: TxExecutionPhase, - fnName: string, - stateManager: AvmPersistableStateManager, - executionRequest: PublicExecutionRequest, - result: AvmFinalizedCallResult, - allocatedGas: Gas, - ) { - if (this.avmProvingRequest === undefined) { - // Propagate the very first avmProvingRequest of the tx for now. - // Eventually this will be the proof for the entire public portion of the transaction. - this.avmProvingRequest = generateAvmProvingRequest( - real, - fnName, - stateManager, - this.historicalHeader, - this.globalVariables, - executionRequest, - // TODO(dbanks12): do we need this return type unless we are doing an isolated call? - stateManager.trace.toPublicEnqueuedCallExecutionResult(result), - allocatedGas, - this.getTransactionFee(phase), - ); - } + const hints = this.trace.getAvmCircuitHints(); + return { + type: ProvingRequestType.PUBLIC_VM, + inputs: new AvmCircuitInputs( + 'public_dispatch', + [], + PublicCircuitPublicInputs.empty(), + hints, + this.generateAvmCircuitPublicInputs(endStateReference), + ), + }; } } diff --git a/yarn-project/simulator/src/public/public_tx_simulator.test.ts b/yarn-project/simulator/src/public/public_tx_simulator.test.ts index c17d1d03bf5c..871368471460 100644 --- a/yarn-project/simulator/src/public/public_tx_simulator.test.ts +++ b/yarn-project/simulator/src/public/public_tx_simulator.test.ts @@ -7,12 +7,12 @@ import { } from '@aztec/circuit-types'; import { AppendOnlyTreeSnapshot, + BlockHeader, Fr, Gas, GasFees, GasSettings, GlobalVariables, - Header, NULLIFIER_SUBTREE_HEIGHT, PUBLIC_DATA_TREE_HEIGHT, PartialStateReference, @@ -24,7 +24,7 @@ import { import { computePublicDataTreeLeafSlot } from '@aztec/circuits.js/hash'; import { fr } from '@aztec/circuits.js/testing'; import { type AztecKVStore } from '@aztec/kv-store'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { type AppendOnlyTree, Poseidon, StandardTree, newTree } from '@aztec/merkle-tree'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { MerkleTrees } from '@aztec/world-state'; @@ -194,7 +194,7 @@ describe('public_tx_simulator', () => { Fr.fromBuffer(publicDataTree.getRoot(true)), Number(publicDataTree.getNumLeaves(true)), ); - const header = Header.empty(); + const header = BlockHeader.empty(); const stateReference = new StateReference( header.state.l1ToL2MessageTree, new PartialStateReference(header.state.partial.noteHashTree, header.state.partial.nullifierTree, snap), @@ -209,7 +209,6 @@ describe('public_tx_simulator', () => { worldStateDB, new NoopTelemetryClient(), GlobalVariables.from({ ...GlobalVariables.empty(), gasFees }), - /*realAvmProvingRequest=*/ false, /*doMerkleOperations=*/ true, ); diff --git a/yarn-project/simulator/src/public/public_tx_simulator.ts b/yarn-project/simulator/src/public/public_tx_simulator.ts index 7cf250cc1d58..3fd7afd59056 100644 --- a/yarn-project/simulator/src/public/public_tx_simulator.ts +++ b/yarn-project/simulator/src/public/public_tx_simulator.ts @@ -60,7 +60,6 @@ export class PublicTxSimulator { private worldStateDB: WorldStateDB, telemetryClient: TelemetryClient, private globalVariables: GlobalVariables, - private realAvmProvingRequests: boolean = true, private doMerkleOperations: boolean = false, ) { this.log = createDebugLogger(`aztec:public_tx_simulator`); @@ -290,17 +289,6 @@ export class PublicTxSimulator { `[AVM] Enqueued public call consumed ${gasUsed.l2Gas} L2 gas ending with ${result.gasLeft.l2Gas} L2 gas left.`, ); - // TODO(dbanks12): remove once AVM proves entire public tx - context.updateProvingRequest( - this.realAvmProvingRequests, - phase, - fnName, - stateManager, - executionRequest, - result, - allocatedGas, - ); - stateManager.traceEnqueuedCall(callRequest, executionRequest.args, result.reverted); if (result.reverted) { diff --git a/yarn-project/simulator/src/public/side_effect_trace.test.ts b/yarn-project/simulator/src/public/side_effect_trace.test.ts index 7d7e024e967c..528c7dac1a46 100644 --- a/yarn-project/simulator/src/public/side_effect_trace.test.ts +++ b/yarn-project/simulator/src/public/side_effect_trace.test.ts @@ -230,23 +230,6 @@ describe('Side Effect Trace', () => { ]); }); - it('Should trace get contract instance', () => { - const instance = SerializableContractInstance.random(); - const { version: _, ...instanceWithoutVersion } = instance; - const exists = true; - trace.traceGetContractInstance(address, exists, instance); - expect(trace.getCounter()).toBe(startCounterPlus1); - - const pxResult = toPxResult(trace); - expect(pxResult.avmCircuitHints.contractInstances.items).toEqual([ - { - // hint omits "version" - address, - exists, - ...instanceWithoutVersion, - }, - ]); - }); describe('Maximum accesses', () => { it('Should enforce maximum number of public storage reads', () => { for (let i = 0; i < MAX_PUBLIC_DATA_READS_PER_TX; i++) { diff --git a/yarn-project/simulator/src/public/side_effect_trace.ts b/yarn-project/simulator/src/public/side_effect_trace.ts index 474e3ff155dd..bb7e48791cda 100644 --- a/yarn-project/simulator/src/public/side_effect_trace.ts +++ b/yarn-project/simulator/src/public/side_effect_trace.ts @@ -138,7 +138,7 @@ export class PublicSideEffectTrace implements PublicSideEffectTraceInterface { ); // New hinting - this.avmCircuitHints.storageReadRequest.items.push(new AvmPublicDataReadTreeHint(leafPreimage, leafIndex, path)); + this.avmCircuitHints.publicDataReads.items.push(new AvmPublicDataReadTreeHint(leafPreimage, leafIndex, path)); this.log.debug(`SLOAD cnt: ${this.sideEffectCounter} val: ${value} slot: ${slot}`); this.incrementSideEffectCounter(); @@ -168,7 +168,7 @@ export class PublicSideEffectTrace implements PublicSideEffectTraceInterface { // New hinting const readHint = new AvmPublicDataReadTreeHint(lowLeafPreimage, lowLeafIndex, lowLeafPath); - this.avmCircuitHints.storageUpdateRequest.items.push( + this.avmCircuitHints.publicDataWrites.items.push( new AvmPublicDataWriteTreeHint(readHint, newLeafPreimage, insertionPath), ); this.log.debug(`SSTORE cnt: ${this.sideEffectCounter} val: ${value} slot: ${slot}`); @@ -193,7 +193,7 @@ export class PublicSideEffectTrace implements PublicSideEffectTraceInterface { new AvmKeyValueHint(/*key=*/ new Fr(leafIndex), /*value=*/ exists ? Fr.ONE : Fr.ZERO), ); // New Hinting - this.avmCircuitHints.noteHashReadRequest.items.push(new AvmAppendTreeHint(leafIndex, noteHash, path)); + this.avmCircuitHints.noteHashReads.items.push(new AvmAppendTreeHint(leafIndex, noteHash, path)); // NOTE: counter does not increment for note hash checks (because it doesn't rely on pending note hashes) } @@ -210,7 +210,7 @@ export class PublicSideEffectTrace implements PublicSideEffectTraceInterface { this.log.debug(`NEW_NOTE_HASH cnt: ${this.sideEffectCounter}`); // New Hinting - this.avmCircuitHints.noteHashWriteRequest.items.push(new AvmAppendTreeHint(leafIndex, noteHash, path)); + this.avmCircuitHints.noteHashWrites.items.push(new AvmAppendTreeHint(leafIndex, noteHash, path)); this.incrementSideEffectCounter(); } @@ -237,7 +237,7 @@ export class PublicSideEffectTrace implements PublicSideEffectTraceInterface { ); // New Hints - this.avmCircuitHints.nullifierReadRequest.items.push( + this.avmCircuitHints.nullifierReads.items.push( new AvmNullifierReadTreeHint(lowLeafPreimage, lowLeafIndex, lowLeafPath), ); this.log.debug(`NULLIFIER_EXISTS cnt: ${this.sideEffectCounter}`); @@ -259,7 +259,7 @@ export class PublicSideEffectTrace implements PublicSideEffectTraceInterface { this.nullifiers.push(new Nullifier(siloedNullifier, this.sideEffectCounter, /*noteHash=*/ Fr.ZERO)); // New hinting const lowLeafReadHint = new AvmNullifierReadTreeHint(lowLeafPreimage, lowLeafIndex, lowLeafPath); - this.avmCircuitHints.nullifierWriteHints.items.push(new AvmNullifierWriteTreeHint(lowLeafReadHint, insertionPath)); + this.avmCircuitHints.nullifierWrites.items.push(new AvmNullifierWriteTreeHint(lowLeafReadHint, insertionPath)); this.log.debug(`NEW_NULLIFIER cnt: ${this.sideEffectCounter}`); this.incrementSideEffectCounter(); } @@ -282,7 +282,7 @@ export class PublicSideEffectTrace implements PublicSideEffectTraceInterface { ); // New Hinting - this.avmCircuitHints.l1ToL2MessageReadRequest.items.push(new AvmAppendTreeHint(msgLeafIndex, msgHash, path)); + this.avmCircuitHints.l1ToL2MessageReads.items.push(new AvmAppendTreeHint(msgLeafIndex, msgHash, path)); // NOTE: counter does not increment for l1tol2 message checks (because it doesn't rely on pending messages) } @@ -315,6 +315,9 @@ export class PublicSideEffectTrace implements PublicSideEffectTraceInterface { contractAddress: AztecAddress, exists: boolean, instance: SerializableContractInstance = SerializableContractInstance.default(), + _lowLeafPreimage: NullifierLeafPreimage = NullifierLeafPreimage.empty(), + _lowLeafIndex: Fr = Fr.zero(), + _lowLeafPath: Fr[] = emptyNullifierPath(), ) { this.enforceLimitOnNullifierChecks('(contract address nullifier from GETCONTRACTINSTANCE)'); @@ -347,6 +350,9 @@ export class PublicSideEffectTrace implements PublicSideEffectTraceInterface { privateFunctionsRoot: Fr.zero(), publicBytecodeCommitment: Fr.zero(), }, + _lowLeafPreimage: NullifierLeafPreimage = NullifierLeafPreimage.empty(), + _lowLeafIndex: Fr = Fr.zero(), + _lowLeafPath: Fr[] = emptyNullifierPath(), ) { const instance = new AvmContractInstanceHint( contractAddress, diff --git a/yarn-project/simulator/src/public/side_effect_trace_interface.ts b/yarn-project/simulator/src/public/side_effect_trace_interface.ts index 06a1c6eb5637..eaf2382cefd0 100644 --- a/yarn-project/simulator/src/public/side_effect_trace_interface.ts +++ b/yarn-project/simulator/src/public/side_effect_trace_interface.ts @@ -66,6 +66,9 @@ export interface PublicSideEffectTraceInterface { contractAddress: AztecAddress, exists: boolean, instance?: SerializableContractInstance, + lowLeafPreimage?: NullifierLeafPreimage, + lowLeafIndex?: Fr, + lowLeafPath?: Fr[], ): void; traceGetBytecode( contractAddress: AztecAddress, @@ -73,6 +76,9 @@ export interface PublicSideEffectTraceInterface { bytecode?: Buffer, contractInstance?: SerializableContractInstance, contractClass?: ContractClassIdPreimage, + lowLeafPreimage?: NullifierLeafPreimage, + lowLeafIndex?: Fr, + lowLeafPath?: Fr[], ): void; traceNestedCall( /** The trace of the nested call. */ diff --git a/yarn-project/simulator/src/public/transitional_adapters.ts b/yarn-project/simulator/src/public/transitional_adapters.ts index 5bb11a5c7bb2..29b9d3e284e2 100644 --- a/yarn-project/simulator/src/public/transitional_adapters.ts +++ b/yarn-project/simulator/src/public/transitional_adapters.ts @@ -1,57 +1,28 @@ -import { type AvmProvingRequest, ProvingRequestType, type PublicExecutionRequest } from '@aztec/circuit-types'; import { - AvmCircuitInputs, - AvmCircuitPublicInputs, - AztecAddress, - ContractStorageRead, - ContractStorageUpdateRequest, - Fr, - Gas, + type AvmCircuitPublicInputs, + type Fr, + type Gas, type GasSettings, type GlobalVariables, - type Header, - L2ToL1Message, - LogHash, - MAX_ENQUEUED_CALLS_PER_CALL, - MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_CALL, - MAX_L2_TO_L1_MSGS_PER_CALL, MAX_L2_TO_L1_MSGS_PER_TX, - MAX_NOTE_HASHES_PER_CALL, MAX_NOTE_HASHES_PER_TX, - MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, - MAX_NULLIFIERS_PER_CALL, - MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL, - MAX_NULLIFIER_READ_REQUESTS_PER_CALL, - MAX_PUBLIC_DATA_READS_PER_CALL, - MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, - MAX_UNENCRYPTED_LOGS_PER_CALL, - NoteHash, - Nullifier, PrivateToAvmAccumulatedData, PrivateToAvmAccumulatedDataArrayLengths, type PrivateToPublicAccumulatedData, PublicCallRequest, - PublicCircuitPublicInputs, PublicDataWrite, - PublicInnerCallRequest, - ReadRequest, - RevertCode, + type RevertCode, type StateReference, - TreeLeafReadRequest, TreeSnapshots, countAccumulatedItems, mergeAccumulatedData, } from '@aztec/circuits.js'; -import { computeNoteHashNonce, computeUniqueNoteHash, computeVarArgsHash, siloNoteHash } from '@aztec/circuits.js/hash'; +import { computeNoteHashNonce, computeUniqueNoteHash, siloNoteHash } from '@aztec/circuits.js/hash'; import { padArrayEnd } from '@aztec/foundation/collection'; import { assertLength } from '@aztec/foundation/serialize'; -import { AvmFinalizedCallResult } from '../avm/avm_contract_call_result.js'; -import { AvmExecutionEnvironment } from '../avm/avm_execution_environment.js'; -import { type AvmPersistableStateManager } from '../avm/journal/journal.js'; import { type PublicEnqueuedCallSideEffectTrace } from './enqueued_call_side_effect_trace.js'; -import { type EnqueuedPublicCallExecutionResult, type PublicFunctionCallResult } from './execution.js'; export function generateAvmCircuitPublicInputs( trace: PublicEnqueuedCallSideEffectTrace, @@ -64,7 +35,7 @@ export function generateAvmCircuitPublicInputs( teardownCallRequests: PublicCallRequest[], nonRevertibleAccumulatedDataFromPrivate: PrivateToPublicAccumulatedData, revertibleAccumulatedDataFromPrivate: PrivateToPublicAccumulatedData, - endStateReference: StateReference, + endTreeSnapshots: TreeSnapshots, endGasUsed: Gas, transactionFee: Fr, revertCode: RevertCode, @@ -75,12 +46,6 @@ export function generateAvmCircuitPublicInputs( startStateReference.partial.nullifierTree, startStateReference.partial.publicDataTree, ); - const endTreeSnapshots = new TreeSnapshots( - endStateReference.l1ToL2MessageTree, - endStateReference.partial.noteHashTree, - endStateReference.partial.nullifierTree, - endStateReference.partial.publicDataTree, - ); const avmCircuitPublicInputs = trace.toAvmCircuitPublicInputs( globalVariables, @@ -200,155 +165,3 @@ export function generateAvmCircuitPublicInputs( //console.log(`AvmCircuitPublicInputs:\n${inspect(avmCircuitPublicInputs)}`); return avmCircuitPublicInputs; } - -export function generateAvmProvingRequest( - real: boolean, - fnName: string, - stateManager: AvmPersistableStateManager, - historicalHeader: Header, - globalVariables: GlobalVariables, - executionRequest: PublicExecutionRequest, - result: EnqueuedPublicCallExecutionResult, - allocatedGas: Gas, - transactionFee: Fr, -): AvmProvingRequest { - const avmExecutionEnv = new AvmExecutionEnvironment( - executionRequest.callContext.contractAddress, - executionRequest.callContext.msgSender, - executionRequest.callContext.functionSelector, - /*contractCallDepth=*/ Fr.zero(), - transactionFee, - globalVariables, - executionRequest.callContext.isStaticCall, - executionRequest.args, - ); - - const avmCallResult = new AvmFinalizedCallResult(result.reverted, result.returnValues, result.endGasLeft); - - // Generate an AVM proving request - let avmProvingRequest: AvmProvingRequest; - if (real) { - const deprecatedFunctionCallResult = stateManager.trace.toPublicFunctionCallResult( - avmExecutionEnv, - /*startGasLeft=*/ allocatedGas, - Buffer.alloc(0), - avmCallResult, - fnName, - ); - const publicInputs = getPublicCircuitPublicInputs(historicalHeader, globalVariables, deprecatedFunctionCallResult); - avmProvingRequest = makeAvmProvingRequest(publicInputs, deprecatedFunctionCallResult); - } else { - avmProvingRequest = emptyAvmProvingRequest(); - } - return avmProvingRequest; -} - -function emptyAvmProvingRequest(): AvmProvingRequest { - return { - type: ProvingRequestType.PUBLIC_VM, - inputs: AvmCircuitInputs.empty(), - }; -} -function makeAvmProvingRequest(inputs: PublicCircuitPublicInputs, result: PublicFunctionCallResult): AvmProvingRequest { - return { - type: ProvingRequestType.PUBLIC_VM, - inputs: new AvmCircuitInputs( - result.functionName, - result.calldata, - inputs, - result.avmCircuitHints, - AvmCircuitPublicInputs.empty(), - ), - }; -} - -function getPublicCircuitPublicInputs( - historicalHeader: Header, - globalVariables: GlobalVariables, - result: PublicFunctionCallResult, -) { - const header = historicalHeader.clone(); // don't modify the original - header.state.partial.publicDataTree.root = Fr.zero(); // AVM doesn't check this yet - - return PublicCircuitPublicInputs.from({ - callContext: result.executionRequest.callContext, - proverAddress: AztecAddress.ZERO, - argsHash: computeVarArgsHash(result.executionRequest.args), - noteHashes: padArrayEnd( - result.noteHashes, - NoteHash.empty(), - MAX_NOTE_HASHES_PER_CALL, - `Too many note hashes. Got ${result.noteHashes.length} with max being ${MAX_NOTE_HASHES_PER_CALL}`, - ), - nullifiers: padArrayEnd( - result.nullifiers, - Nullifier.empty(), - MAX_NULLIFIERS_PER_CALL, - `Too many nullifiers. Got ${result.nullifiers.length} with max being ${MAX_NULLIFIERS_PER_CALL}`, - ), - l2ToL1Msgs: padArrayEnd( - result.l2ToL1Messages, - L2ToL1Message.empty(), - MAX_L2_TO_L1_MSGS_PER_CALL, - `Too many L2 to L1 messages. Got ${result.l2ToL1Messages.length} with max being ${MAX_L2_TO_L1_MSGS_PER_CALL}`, - ), - startSideEffectCounter: result.startSideEffectCounter, - endSideEffectCounter: result.endSideEffectCounter, - returnsHash: computeVarArgsHash(result.returnValues), - noteHashReadRequests: padArrayEnd( - result.noteHashReadRequests, - TreeLeafReadRequest.empty(), - MAX_NOTE_HASH_READ_REQUESTS_PER_CALL, - `Too many note hash read requests. Got ${result.noteHashReadRequests.length} with max being ${MAX_NOTE_HASH_READ_REQUESTS_PER_CALL}`, - ), - nullifierReadRequests: padArrayEnd( - result.nullifierReadRequests, - ReadRequest.empty(), - MAX_NULLIFIER_READ_REQUESTS_PER_CALL, - `Too many nullifier read requests. Got ${result.nullifierReadRequests.length} with max being ${MAX_NULLIFIER_READ_REQUESTS_PER_CALL}`, - ), - nullifierNonExistentReadRequests: padArrayEnd( - result.nullifierNonExistentReadRequests, - ReadRequest.empty(), - MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL, - `Too many nullifier non-existent read requests. Got ${result.nullifierNonExistentReadRequests.length} with max being ${MAX_NULLIFIER_NON_EXISTENT_READ_REQUESTS_PER_CALL}`, - ), - l1ToL2MsgReadRequests: padArrayEnd( - result.l1ToL2MsgReadRequests, - TreeLeafReadRequest.empty(), - MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_CALL, - `Too many L1 to L2 message read requests. Got ${result.l1ToL2MsgReadRequests.length} with max being ${MAX_L1_TO_L2_MSG_READ_REQUESTS_PER_CALL}`, - ), - contractStorageReads: padArrayEnd( - result.contractStorageReads, - ContractStorageRead.empty(), - MAX_PUBLIC_DATA_READS_PER_CALL, - `Too many public data reads. Got ${result.contractStorageReads.length} with max being ${MAX_PUBLIC_DATA_READS_PER_CALL}`, - ), - contractStorageUpdateRequests: padArrayEnd( - result.contractStorageUpdateRequests, - ContractStorageUpdateRequest.empty(), - MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, - `Too many public data update requests. Got ${result.contractStorageUpdateRequests.length} with max being ${MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL}`, - ), - publicCallRequests: padArrayEnd( - result.publicCallRequests, - PublicInnerCallRequest.empty(), - MAX_ENQUEUED_CALLS_PER_CALL, - `Too many public call requests. Got ${result.publicCallRequests.length} with max being ${MAX_ENQUEUED_CALLS_PER_CALL}`, - ), - unencryptedLogsHashes: padArrayEnd( - result.unencryptedLogsHashes, - LogHash.empty(), - MAX_UNENCRYPTED_LOGS_PER_CALL, - `Too many unencrypted logs. Got ${result.unencryptedLogsHashes.length} with max being ${MAX_UNENCRYPTED_LOGS_PER_CALL}`, - ), - historicalHeader: header, - globalVariables: globalVariables, - startGasLeft: Gas.from(result.startGasLeft), - endGasLeft: Gas.from(result.endGasLeft), - transactionFee: result.transactionFee, - // TODO(@just-mitch): need better mapping from simulator to revert code. - revertCode: result.reverted ? RevertCode.APP_LOGIC_REVERTED : RevertCode.OK, - }); -} diff --git a/yarn-project/telemetry-client/package.json b/yarn-project/telemetry-client/package.json index fdd9d252fafb..52702de6db9b 100644 --- a/yarn-project/telemetry-client/package.json +++ b/yarn-project/telemetry-client/package.json @@ -7,7 +7,8 @@ "exports": { ".": "./dest/index.js", "./start": "./dest/start.js", - "./noop": "./dest/noop.js" + "./noop": "./dest/noop.js", + "./otel-pino-stream": "./dest/vendor/otel-pino-stream.js" }, "scripts": { "build": "yarn clean && tsc -b", @@ -28,21 +29,20 @@ "dependencies": { "@aztec/foundation": "workspace:^", "@opentelemetry/api": "^1.9.0", - "@opentelemetry/api-logs": "^0.54.0", - "@opentelemetry/exporter-logs-otlp-http": "^0.54.0", - "@opentelemetry/exporter-metrics-otlp-http": "^0.52.0", - "@opentelemetry/exporter-trace-otlp-http": "^0.54.0", - "@opentelemetry/host-metrics": "^0.35.2", - "@opentelemetry/otlp-exporter-base": "^0.54.0", - "@opentelemetry/resource-detector-aws": "^1.5.2", - "@opentelemetry/resources": "^1.25.0", - "@opentelemetry/sdk-logs": "^0.54.0", - "@opentelemetry/sdk-metrics": "^1.25.0", - "@opentelemetry/sdk-trace-node": "^1.25.0", - "@opentelemetry/semantic-conventions": "^1.25.0", - "@opentelemetry/winston-transport": "^0.7.0", - "prom-client": "^15.1.3", - "winston": "^3.10.0" + "@opentelemetry/api-logs": "^0.55.0", + "@opentelemetry/core": "^1.28.0", + "@opentelemetry/exporter-logs-otlp-http": "^0.55.0", + "@opentelemetry/exporter-metrics-otlp-http": "^0.55.0", + "@opentelemetry/exporter-trace-otlp-http": "^0.55.0", + "@opentelemetry/host-metrics": "^0.35.4", + "@opentelemetry/otlp-exporter-base": "^0.55.0", + "@opentelemetry/resource-detector-aws": "^1.8.0", + "@opentelemetry/resources": "^1.28.0", + "@opentelemetry/sdk-logs": "^0.55.0", + "@opentelemetry/sdk-metrics": "^1.28.0", + "@opentelemetry/sdk-trace-node": "^1.28.0", + "@opentelemetry/semantic-conventions": "^1.28.0", + "prom-client": "^15.1.3" }, "devDependencies": { "@jest/globals": "^29.5.0", diff --git a/yarn-project/telemetry-client/src/lmdb_metrics.ts b/yarn-project/telemetry-client/src/lmdb_metrics.ts index c8efc91a801b..a8e70662d653 100644 --- a/yarn-project/telemetry-client/src/lmdb_metrics.ts +++ b/yarn-project/telemetry-client/src/lmdb_metrics.ts @@ -1,38 +1,47 @@ -import { type Gauge, type Meter, type Metrics, ValueType } from './telemetry.js'; +import { type BatchObservableResult, type Meter, type Metrics, type ObservableGauge, ValueType } from './telemetry.js'; export type LmdbMetricDescriptor = { name: Metrics; description: string; }; +export type LmdbStatsCallback = () => { mappingSize: number; numItems: number; actualSize: number }; + export class LmdbMetrics { - private dbMapSize: Gauge; - private dbUsedSize: Gauge; - private dbNumItems: Gauge; + private dbMapSize: ObservableGauge; + private dbUsedSize: ObservableGauge; + private dbNumItems: ObservableGauge; constructor( meter: Meter, dbMapSizeDescriptor: LmdbMetricDescriptor, dbUsedSizeDescriptor: LmdbMetricDescriptor, dbNumItemsDescriptor: LmdbMetricDescriptor, + private getStats?: LmdbStatsCallback, ) { - this.dbMapSize = meter.createGauge(dbMapSizeDescriptor.name, { + this.dbMapSize = meter.createObservableGauge(dbMapSizeDescriptor.name, { description: dbMapSizeDescriptor.description, valueType: ValueType.INT, }); - this.dbUsedSize = meter.createGauge(dbUsedSizeDescriptor.name, { + this.dbUsedSize = meter.createObservableGauge(dbUsedSizeDescriptor.name, { description: dbUsedSizeDescriptor.description, valueType: ValueType.INT, }); - this.dbNumItems = meter.createGauge(dbNumItemsDescriptor.name, { + this.dbNumItems = meter.createObservableGauge(dbNumItemsDescriptor.name, { description: dbNumItemsDescriptor.description, valueType: ValueType.INT, }); - } - public recordDBMetrics(metrics: { mappingSize: number; numItems: number; actualSize: number }) { - this.dbMapSize.record(metrics.mappingSize); - this.dbNumItems.record(metrics.actualSize); - this.dbUsedSize.record(metrics.actualSize); + meter.addBatchObservableCallback(this.recordDBMetrics, [this.dbMapSize, this.dbUsedSize, this.dbNumItems]); } + + private recordDBMetrics = (observable: BatchObservableResult) => { + if (!this.getStats) { + return; + } + const metrics = this.getStats(); + observable.observe(this.dbMapSize, metrics.mappingSize); + observable.observe(this.dbNumItems, metrics.numItems); + observable.observe(this.dbUsedSize, metrics.actualSize); + }; } diff --git a/yarn-project/telemetry-client/src/metrics.ts b/yarn-project/telemetry-client/src/metrics.ts index 853ce0bb58f6..d737e6dd8634 100644 --- a/yarn-project/telemetry-client/src/metrics.ts +++ b/yarn-project/telemetry-client/src/metrics.ts @@ -79,6 +79,18 @@ export const PROVING_ORCHESTRATOR_BASE_ROLLUP_INPUTS_DURATION = export const PROVING_QUEUE_JOB_SIZE = 'aztec.proving_queue.job_size'; export const PROVING_QUEUE_SIZE = 'aztec.proving_queue.size'; +export const PROVING_QUEUE_ACTIVE_JOBS = 'aztec.proving_queue.active_jobs'; +export const PROVING_QUEUE_RESOLVED_JOBS = 'aztec.proving_queue.resolved_jobs'; +export const PROVING_QUEUE_REJECTED_JOBS = 'aztec.proving_queue.rejected_jobs'; +export const PROVING_QUEUE_RETRIED_JOBS = 'aztec.proving_queue.retried_jobs'; +export const PROVING_QUEUE_TIMED_OUT_JOBS = 'aztec.proving_queue.timed_out_jobs'; +export const PROVING_QUEUE_JOB_WAIT = 'aztec.proving_queue.job_wait'; +export const PROVING_QUEUE_JOB_DURATION = 'aztec.proving_queue.job_duration'; +export const PROVING_QUEUE_DB_NUM_ITEMS = 'aztec.proving_queue.db.num_items'; +export const PROVING_QUEUE_DB_MAP_SIZE = 'aztec.proving_queue.db.map_size'; +export const PROVING_QUEUE_DB_USED_SIZE = 'aztec.proving_queue.db.used_size'; + +export const PROVING_AGENT_IDLE = 'aztec.proving_queue.agent.idle'; export const PROVER_NODE_JOB_DURATION = 'aztec.prover_node.job_duration'; diff --git a/yarn-project/telemetry-client/src/otel.ts b/yarn-project/telemetry-client/src/otel.ts index f94d054cb314..46b0b8d0ff8e 100644 --- a/yarn-project/telemetry-client/src/otel.ts +++ b/yarn-project/telemetry-client/src/otel.ts @@ -1,4 +1,4 @@ -import { type DebugLogger } from '@aztec/foundation/log'; +import { type DebugLogger, type LogData, addLogDataHandler } from '@aztec/foundation/log'; import { DiagConsoleLogger, @@ -6,28 +6,23 @@ import { type Meter, type Tracer, type TracerProvider, + context, diag, + isSpanContextValid, + trace, } from '@opentelemetry/api'; import { OTLPMetricExporter } from '@opentelemetry/exporter-metrics-otlp-http'; import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http'; import { HostMetrics } from '@opentelemetry/host-metrics'; -import { awsEc2Detector, awsEcsDetector } from '@opentelemetry/resource-detector-aws'; -import { - type IResource, - detectResourcesSync, - envDetectorSync, - osDetectorSync, - processDetectorSync, - serviceInstanceIdDetectorSync, -} from '@opentelemetry/resources'; +import { type IResource } from '@opentelemetry/resources'; import { type LoggerProvider } from '@opentelemetry/sdk-logs'; import { MeterProvider, PeriodicExportingMetricReader } from '@opentelemetry/sdk-metrics'; import { BatchSpanProcessor, NodeTracerProvider } from '@opentelemetry/sdk-trace-node'; -import { SEMRESATTRS_SERVICE_NAME, SEMRESATTRS_SERVICE_VERSION } from '@opentelemetry/semantic-conventions'; +import { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION } from '@opentelemetry/semantic-conventions'; -import { aztecDetector } from './aztec_resource_detector.js'; import { type TelemetryClientConfig } from './config.js'; -import { registerOtelLoggerProvider } from './otelLoggerProvider.js'; +import { registerOtelLoggerProvider } from './otel_logger_provider.js'; +import { getOtelResource } from './otel_resource.js'; import { type Gauge, type TelemetryClient } from './telemetry.js'; export class OpenTelemetryClient implements TelemetryClient { @@ -43,19 +38,33 @@ export class OpenTelemetryClient implements TelemetryClient { ) {} getMeter(name: string): Meter { - return this.meterProvider.getMeter(name, this.resource.attributes[SEMRESATTRS_SERVICE_VERSION] as string); + return this.meterProvider.getMeter(name, this.resource.attributes[ATTR_SERVICE_VERSION] as string); } getTracer(name: string): Tracer { - return this.traceProvider.getTracer(name, this.resource.attributes[SEMRESATTRS_SERVICE_VERSION] as string); + return this.traceProvider.getTracer(name, this.resource.attributes[ATTR_SERVICE_VERSION] as string); } public start() { this.log.info('Starting OpenTelemetry client'); diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.INFO); + // Add a callback to the logger to set context data from current trace + // Adapted from open-telemetry/opentelemetry-js-contrib PinoInstrumentation._getMixinFunction + addLogDataHandler((data: LogData) => { + const spanContext = trace.getSpan(context.active())?.spanContext(); + return spanContext && isSpanContextValid(spanContext) + ? { + ...data, + ['trace_id']: spanContext.traceId, + ['span_id']: spanContext.spanId, + ['trace_flags']: `0${spanContext.traceFlags.toString(16)}`, + } + : data; + }); + this.hostMetrics = new HostMetrics({ - name: this.resource.attributes[SEMRESATTRS_SERVICE_NAME] as string, + name: this.resource.attributes[ATTR_SERVICE_NAME] as string, meterProvider: this.meterProvider, }); @@ -88,33 +97,16 @@ export class OpenTelemetryClient implements TelemetryClient { } public static async createAndStart(config: TelemetryClientConfig, log: DebugLogger): Promise { - const resource = detectResourcesSync({ - detectors: [ - osDetectorSync, - envDetectorSync, - processDetectorSync, - serviceInstanceIdDetectorSync, - awsEc2Detector, - awsEcsDetector, - aztecDetector, - ], - }); - - if (resource.asyncAttributesPending) { - await resource.waitForAsyncAttributes!(); - } + const resource = await getOtelResource(); + // TODO(palla/log): Should we show traces as logs in stdout when otel collection is disabled? const tracerProvider = new NodeTracerProvider({ resource, + spanProcessors: config.tracesCollectorUrl + ? [new BatchSpanProcessor(new OTLPTraceExporter({ url: config.tracesCollectorUrl.href }))] + : [], }); - // optionally push traces to an OTEL collector instance - if (config.tracesCollectorUrl) { - tracerProvider.addSpanProcessor( - new BatchSpanProcessor(new OTLPTraceExporter({ url: config.tracesCollectorUrl.href })), - ); - } - tracerProvider.register(); const meterProvider = new MeterProvider({ @@ -129,7 +121,8 @@ export class OpenTelemetryClient implements TelemetryClient { }), ], }); - const loggerProvider = registerOtelLoggerProvider(resource, config.logsCollectorUrl); + + const loggerProvider = await registerOtelLoggerProvider(resource, config.logsCollectorUrl); const service = new OpenTelemetryClient(resource, meterProvider, tracerProvider, loggerProvider, log); service.start(); diff --git a/yarn-project/telemetry-client/src/otelLoggerProvider.ts b/yarn-project/telemetry-client/src/otel_logger_provider.ts similarity index 85% rename from yarn-project/telemetry-client/src/otelLoggerProvider.ts rename to yarn-project/telemetry-client/src/otel_logger_provider.ts index e5289b606c5a..7566520afbaa 100644 --- a/yarn-project/telemetry-client/src/otelLoggerProvider.ts +++ b/yarn-project/telemetry-client/src/otel_logger_provider.ts @@ -4,7 +4,11 @@ import { CompressionAlgorithm } from '@opentelemetry/otlp-exporter-base'; import { type IResource } from '@opentelemetry/resources'; import { BatchLogRecordProcessor, LoggerProvider } from '@opentelemetry/sdk-logs'; -export function registerOtelLoggerProvider(resource: IResource, otelLogsUrl?: URL) { +import { getOtelResource } from './otel_resource.js'; + +export async function registerOtelLoggerProvider(resource?: IResource, otelLogsUrl?: URL) { + resource ??= await getOtelResource(); + const loggerProvider = new LoggerProvider({ resource }); if (!otelLogsUrl) { // If no URL provided, return it disconnected. @@ -24,7 +28,7 @@ export function registerOtelLoggerProvider(resource: IResource, otelLogsUrl?: UR maxQueueSize: 4096, }), ); - otelLogs.setGlobalLoggerProvider(loggerProvider); + otelLogs.setGlobalLoggerProvider(loggerProvider); return loggerProvider; } diff --git a/yarn-project/telemetry-client/src/otel_resource.ts b/yarn-project/telemetry-client/src/otel_resource.ts new file mode 100644 index 000000000000..3810d3e73be6 --- /dev/null +++ b/yarn-project/telemetry-client/src/otel_resource.ts @@ -0,0 +1,32 @@ +import { awsEc2Detector, awsEcsDetector } from '@opentelemetry/resource-detector-aws'; +import { + type IResource, + detectResourcesSync, + envDetectorSync, + osDetectorSync, + processDetectorSync, + serviceInstanceIdDetectorSync, +} from '@opentelemetry/resources'; + +import { aztecDetector } from './aztec_resource_detector.js'; + +export async function getOtelResource(): Promise { + // TODO(palla/log): Do we really need *all* this info? + const resource = detectResourcesSync({ + detectors: [ + osDetectorSync, + envDetectorSync, + processDetectorSync, + serviceInstanceIdDetectorSync, + awsEc2Detector, + awsEcsDetector, + aztecDetector, + ], + }); + + if (resource.asyncAttributesPending) { + await resource.waitForAsyncAttributes!(); + } + + return resource; +} diff --git a/yarn-project/telemetry-client/src/telemetry.ts b/yarn-project/telemetry-client/src/telemetry.ts index a481690f1550..60e55b8b1c66 100644 --- a/yarn-project/telemetry-client/src/telemetry.ts +++ b/yarn-project/telemetry-client/src/telemetry.ts @@ -1,9 +1,13 @@ import { type AttributeValue, + type BatchObservableCallback, type MetricOptions, + type Observable, + type BatchObservableResult as OtelBatchObservableResult, type Gauge as OtelGauge, type Histogram as OtelHistogram, type ObservableGauge as OtelObservableGauge, + type ObservableResult as OtelObservableResult, type ObservableUpDownCounter as OtelObservableUpDownCounter, type UpDownCounter as OtelUpDownCounter, type Span, @@ -31,6 +35,8 @@ export type Histogram = OtelHistogram; export type UpDownCounter = OtelUpDownCounter; export type ObservableGauge = OtelObservableGauge; export type ObservableUpDownCounter = OtelObservableUpDownCounter; +export type ObservableResult = OtelObservableResult; +export type BatchObservableResult = OtelBatchObservableResult; export { Tracer }; @@ -53,6 +59,16 @@ export interface Meter { */ createObservableGauge(name: Metrics, options?: MetricOptions): ObservableGauge; + addBatchObservableCallback( + callback: BatchObservableCallback, + observables: Observable[], + ): void; + + removeBatchObservableCallback( + callback: BatchObservableCallback, + observables: Observable[], + ): void; + /** * Creates a new histogram instrument. A histogram is a metric that samples observations (usually things like request durations or response sizes) and counts them in configurable buckets. * @param name - The name of the histogram diff --git a/yarn-project/telemetry-client/src/vendor/otel-pino-stream.ts b/yarn-project/telemetry-client/src/vendor/otel-pino-stream.ts new file mode 100644 index 000000000000..3f1361df7a78 --- /dev/null +++ b/yarn-project/telemetry-client/src/vendor/otel-pino-stream.ts @@ -0,0 +1,280 @@ +/* + * Adapted from open-telemetry/opentelemetry-js-contrib + * All changes are prefixed with [aztec] to make them easy to identify + * + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { type Logger, SeverityNumber, logs } from '@opentelemetry/api-logs'; +import { millisToHrTime } from '@opentelemetry/core'; +import { Writable } from 'stream'; + +import { registerOtelLoggerProvider } from '../otel_logger_provider.js'; + +/* eslint-disable @typescript-eslint/ban-types */ +/* eslint-disable camelcase */ + +// This block is a copy (modulo code style and TypeScript types) of the Pino +// code that defines log level value and names. This file is part of +// *instrumenting* Pino, so we want to avoid a dependency on the library. +const DEFAULT_LEVELS = { + trace: 10, + debug: 20, + info: 30, + warn: 40, + error: 50, + fatal: 60, +}; + +const OTEL_SEV_NUM_FROM_PINO_LEVEL: { [level: number]: SeverityNumber } = { + [DEFAULT_LEVELS.trace]: SeverityNumber.TRACE, + [DEFAULT_LEVELS.debug]: SeverityNumber.DEBUG, + [DEFAULT_LEVELS.info]: SeverityNumber.INFO, + [DEFAULT_LEVELS.warn]: SeverityNumber.WARN, + [DEFAULT_LEVELS.error]: SeverityNumber.ERROR, + [DEFAULT_LEVELS.fatal]: SeverityNumber.FATAL, +}; + +const EXTRA_SEV_NUMS = [ + SeverityNumber.TRACE2, + SeverityNumber.TRACE3, + SeverityNumber.TRACE4, + SeverityNumber.DEBUG2, + SeverityNumber.DEBUG3, + SeverityNumber.DEBUG4, + SeverityNumber.INFO2, + SeverityNumber.INFO3, + SeverityNumber.INFO4, + SeverityNumber.WARN2, + SeverityNumber.WARN3, + SeverityNumber.WARN4, + SeverityNumber.ERROR2, + SeverityNumber.ERROR3, + SeverityNumber.ERROR4, + SeverityNumber.FATAL2, + SeverityNumber.FATAL3, + SeverityNumber.FATAL4, +]; + +function severityNumberFromPinoLevel(lvl: number) { + // Fast common case: one of the known levels + const sev = OTEL_SEV_NUM_FROM_PINO_LEVEL[lvl]; + if (sev !== undefined) { + return sev; + } + + // Otherwise, scale the Pino level range -- 10 (trace) to 70 (fatal+10) + // -- onto the extra OTel severity numbers (TRACE2, TRACE3, ..., FATAL4). + // Values below trace (10) map to SeverityNumber.TRACE2, which may be + // considered a bit weird, but it means the unnumbered levels are always + // just for exactly matching values. + const relativeLevelWeight = (lvl - 10) / (70 - 10); + const otelSevIdx = Math.floor(relativeLevelWeight * EXTRA_SEV_NUMS.length); + const cappedOTelIdx = Math.min(EXTRA_SEV_NUMS.length - 1, Math.max(0, otelSevIdx)); + const otelSevValue = EXTRA_SEV_NUMS[cappedOTelIdx]; + return otelSevValue; +} + +// [aztec] Custom function to map Aztec logging levels to OpenTelemetry severity numbers +function severityNumberFromAztecPinoLevel(lvl: number) { + return ( + OTEL_SEV_NUM_FROM_PINO_LEVEL[lvl] ?? + /* verbose */ (lvl === 25 ? SeverityNumber.DEBUG3 : undefined) ?? + severityNumberFromPinoLevel(lvl) + ); +} + +/** + * Return a function that knows how to convert the "time" field value on a + * Pino log record to an OTel LogRecord timestamp value. + * + * How to convert the serialized "time" on a Pino log record + * depends on the Logger's `Symbol(pino.time)` prop, configurable + * via https://getpino.io/#/docs/api?id=timestamp-boolean-function + * + * For example: + * const logger = pino({timestamp: pino.stdTimeFunctions.isoTime}) + * results in log record entries of the form: + * ,"time":"2024-05-17T22:03:25.969Z" + * `otelTimestampFromTime` will be given the value of the "time" field: + * "2024-05-17T22:03:25.969Z" + * which should be parsed to a number of milliseconds since the epoch. + */ +export function getTimeConverter(pinoLogger: any, pinoMod: any) { + const stdTimeFns = pinoMod.stdTimeFunctions; + const loggerTimeFn = pinoLogger[pinoMod.symbols.timeSym]; + if (loggerTimeFn === stdTimeFns.epochTime) { + return (time: number) => time; + } else if (loggerTimeFn === stdTimeFns.unixTime) { + return (time: number) => time * 1e3; + } else if (loggerTimeFn === stdTimeFns.isoTime) { + return (time: string) => new Date(time).getTime(); + } else if (loggerTimeFn === stdTimeFns.nullTime) { + return () => Date.now(); + } else { + // The logger has a custom time function. Don't guess. + return () => NaN; + } +} + +interface OTelPinoStreamOptions { + messageKey?: string; + levels: any; // Pino.LevelMapping + otelTimestampFromTime?: (time: any) => number; +} + +/** + * A Pino stream for sending records to the OpenTelemetry Logs API. + * + * - This stream emits an 'unknown' event on an unprocessable pino record. + * The event arguments are: `logLine: string`, `err: string | Error`. + */ +export class OTelPinoStream extends Writable { + private _otelLogger: Logger; + private _messageKey: string; + private _levels; + private _otelTimestampFromTime; + + constructor(options: OTelPinoStreamOptions) { + super(); + + // Note: A PINO_CONFIG event was added to pino (2024-04-04) to send config + // to transports. Eventually OTelPinoStream might be able to use this + // for auto-configuration in newer pino versions. The event currently does + // not include the `timeSym` value that is needed here, however. + this._messageKey = options.messageKey ?? 'msg'; + this._levels = options.levels; + + // [aztec] The following will break if we set up a custom time function in our logger + this._otelTimestampFromTime = options.otelTimestampFromTime ?? ((time: number) => time); + + // Cannot use `instrumentation.logger` until have delegating LoggerProvider: + // https://github.com/open-telemetry/opentelemetry-js/issues/4399 + // [aztec] Use the name of this package + this._otelLogger = logs.getLogger('@aztec/telemetry-client/otel-pino-stream', '0.1.0'); + } + + override _write(s: string, _encoding: string, callback: Function) { + try { + /* istanbul ignore if */ + if (!s) { + return; + } + + // Parse, and handle edge cases similar to how `pino-abtract-transport` does: + // https://github.com/pinojs/pino-abstract-transport/blob/v1.2.0/index.js#L28-L45 + // - Emitting an 'unknown' event on parse error mimicks pino-abstract-transport. + let recObj; + try { + recObj = JSON.parse(s); + } catch (parseErr) { + // Invalid JSON suggests a bug in Pino, or a logger configuration bug + // (a bogus `options.timestamp` or serializer). + this.emit('unknown', s.toString(), parseErr); + callback(); + return; + } + /* istanbul ignore if */ + if (recObj === null) { + this.emit('unknown', s.toString(), 'Null value ignored'); + callback(); + return; + } + /* istanbul ignore if */ + if (typeof recObj !== 'object') { + recObj = { + data: recObj, + }; + } + + const { + time, + [this._messageKey]: body, + level, // eslint-disable-line @typescript-eslint/no-unused-vars + + // The typical Pino `hostname` and `pid` fields are removed because they + // are redundant with the OpenTelemetry `host.name` and `process.pid` + // Resource attributes, respectively. This code cannot change the + // LoggerProvider's `resource`, so getting the OpenTelemetry equivalents + // depends on the user using the OpenTelemetry HostDetector and + // ProcessDetector. + // https://getpino.io/#/docs/api?id=opt-base + hostname, // eslint-disable-line @typescript-eslint/no-unused-vars + pid, // eslint-disable-line @typescript-eslint/no-unused-vars + + // The `trace_id` et al fields that may have been added by the + // "log correlation" feature are stripped, because they are redundant. + // trace_id, // eslint-disable-line @typescript-eslint/no-unused-vars + // span_id, // eslint-disable-line @typescript-eslint/no-unused-vars + // trace_flags, // eslint-disable-line @typescript-eslint/no-unused-vars + + // [aztec] They are not redundant, we depend on them for correlation. + // The instrumentation package seems to be adding these fields via a custom hook. + // We push them from the logger module in foundation, so we don't want to clear them here. + + ...attributes + } = recObj; + + let timestamp = this._otelTimestampFromTime(time); + if (isNaN(timestamp)) { + attributes['time'] = time; // save the unexpected "time" field to attributes + timestamp = Date.now(); + } + + // This avoids a possible subtle bug when a Pino logger uses + // `time: pino.stdTimeFunctions.unixTime` and logs in the first half-second + // since process start. The rounding involved results in: + // timestamp < performance.timeOrigin + // If that is passed to Logger.emit() it will be misinterpreted by + // `timeInputToHrTime` as a `performance.now()` value. + const timestampHrTime = millisToHrTime(timestamp); + + // Prefer using `stream.lastLevel`, because `recObj.level` can be customized + // to anything via `formatters.level` + // (https://getpino.io/#/docs/api?id=formatters-object). + // const lastLevel = (this as any).lastLevel; + + // [aztec] We do not prefer stream.lastLevel since it's undefined here, as we are running + // on a worker thread, so we use recObj.level because we know that we won't customize it. + const lastLevel = recObj.level; + + const otelRec = { + timestamp: timestampHrTime, + observedTimestamp: timestampHrTime, + severityNumber: severityNumberFromAztecPinoLevel(lastLevel), + severityText: this._levels.labels[lastLevel], + body, + attributes, + }; + + this._otelLogger.emit(otelRec); + } catch (err) { + // [aztec] Log errors to stderr + // eslint-disable-next-line no-console + console.error(`Error in OTelPinoStream: ${err}`); + } + callback(); + } +} + +// [aztec] Default export that loads the resource information and creates a new otel pino stream. +// Invoked by pino when creating a transport in a worker thread out of this stream. +// Note that the original open-telemetry/opentelemetry-js-contrib was set up to run on the main +// nodejs loop, as opposed to in a worker as pino recommends. +export default async function (options: OTelPinoStreamOptions) { + const url = process.env.OTEL_EXPORTER_OTLP_LOGS_ENDPOINT; + // We re-register here because this runs on a worker thread + await registerOtelLoggerProvider(undefined, url ? new URL(url) : undefined); + return new OTelPinoStream(options); +} diff --git a/yarn-project/txe/package.json b/yarn-project/txe/package.json index efa06e6bf574..0fa9709f348e 100644 --- a/yarn-project/txe/package.json +++ b/yarn-project/txe/package.json @@ -18,7 +18,7 @@ "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --passWithNoTests", - "dev": "DEBUG='aztec:*,-aztec:avm_simulator:*' LOG_LEVEL=debug node ./dest/bin/index.js", + "dev": "LOG_LEVEL=debug node ./dest/bin/index.js", "start": "node ./dest/bin/index.js" }, "inherits": [ diff --git a/yarn-project/txe/src/oracle/txe_oracle.ts b/yarn-project/txe/src/oracle/txe_oracle.ts index e9c6d1c01c98..eda766bdce72 100644 --- a/yarn-project/txe/src/oracle/txe_oracle.ts +++ b/yarn-project/txe/src/oracle/txe_oracle.ts @@ -11,13 +11,14 @@ import { } from '@aztec/circuit-types'; import { type CircuitWitnessGenerationStats } from '@aztec/circuit-types/stats'; import { + BlockHeader, CallContext, type ContractInstance, type ContractInstanceWithAddress, + DEPLOYER_CONTRACT_ADDRESS, Gas, GasFees, GlobalVariables, - Header, IndexedTaggingSecret, type KeyValidationRequest, type L1_TO_L2_MSG_TREE_HEIGHT, @@ -210,7 +211,7 @@ export class TXE implements TypedOracle { } async addAuthWitness(address: AztecAddress, messageHash: Fr) { - const account = this.txeDatabase.getAccount(address); + const account = await this.txeDatabase.getAccount(address); const privateKey = await this.keyStore.getMasterSecretKey(account.publicKeys.masterIncomingViewingPublicKey); const schnorr = new Schnorr(); const signature = schnorr.constructSignature(messageHash.toBuffer(), privateKey).toBuffer(); @@ -362,8 +363,8 @@ export class TXE implements TypedOracle { throw new Error('Method not implemented.'); } - async getHeader(blockNumber: number): Promise
{ - const header = Header.empty(); + async getBlockHeader(blockNumber: number): Promise { + const header = BlockHeader.empty(); const db = await this.#getTreesAt(blockNumber); header.state = await db.getStateReference(); header.globalVariables.blockNumber = new Fr(blockNumber); @@ -634,6 +635,7 @@ export class TXE implements TypedOracle { const executionRequest = new PublicExecutionRequest(callContext, args); const db = await this.trees.getLatest(); + const worldStateDb = new TXEWorldStateDB(db, new TXEPublicContractDataSource(this)); const globalVariables = GlobalVariables.empty(); globalVariables.chainId = this.chainId; @@ -641,12 +643,23 @@ export class TXE implements TypedOracle { globalVariables.blockNumber = new Fr(this.blockNumber); globalVariables.gasFees = new GasFees(1, 1); + // If the contract instance exists in the TXE's world state, make sure its nullifier is present in the tree + // so its nullifier check passes. + if ((await worldStateDb.getContractInstance(callContext.contractAddress)) !== undefined) { + const contractAddressNullifier = siloNullifier( + AztecAddress.fromNumber(DEPLOYER_CONTRACT_ADDRESS), + callContext.contractAddress.toField(), + ); + if ((await worldStateDb.getNullifierIndex(contractAddressNullifier)) === undefined) { + await db.batchInsert(MerkleTreeId.NULLIFIER_TREE, [contractAddressNullifier.toBuffer()], 0); + } + } + const simulator = new PublicTxSimulator( db, new TXEWorldStateDB(db, new TXEPublicContractDataSource(this)), new NoopTelemetryClient(), globalVariables, - /*realAvmProvingRequests=*/ false, ); // When setting up a teardown call, we tell it that diff --git a/yarn-project/txe/src/txe_service/txe_service.ts b/yarn-project/txe/src/txe_service/txe_service.ts index 1fdaee4d6359..4965ef1e0a31 100644 --- a/yarn-project/txe/src/txe_service/txe_service.ts +++ b/yarn-project/txe/src/txe_service/txe_service.ts @@ -1,9 +1,9 @@ import { SchnorrAccountContractArtifact } from '@aztec/accounts/schnorr'; import { L2Block, MerkleTreeId, SimulationError } from '@aztec/circuit-types'; import { + BlockHeader, Fr, FunctionSelector, - Header, PublicDataTreeLeaf, PublicKeys, computePartialAddress, @@ -14,7 +14,7 @@ import { type ContractArtifact, NoteSelector } from '@aztec/foundation/abi'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { type Logger } from '@aztec/foundation/log'; import { KeyStore } from '@aztec/key-store'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { getCanonicalProtocolContract, protocolContractNames } from '@aztec/protocol-contracts'; import { enrichPublicSimulationError } from '@aztec/pxe'; import { ExecutionNoteCache, PackedValuesCache, type TypedOracle } from '@aztec/simulator'; @@ -72,7 +72,7 @@ export class TXEService { const trees = (this.typedOracle as TXE).getTrees(); for (let i = 0; i < nBlocks; i++) { const blockNumber = await this.typedOracle.getBlockNumber(); - const header = Header.empty(); + const header = BlockHeader.empty(); const l2Block = L2Block.empty(); header.state = await trees.getStateReference(true); header.globalVariables.blockNumber = new Fr(blockNumber); @@ -550,8 +550,8 @@ export class TXEService { return toForeignCallResult([]); } - async getHeader(blockNumber: ForeignCallSingle) { - const header = await this.typedOracle.getHeader(fromSingle(blockNumber).toNumber()); + async getBlockHeader(blockNumber: ForeignCallSingle) { + const header = await this.typedOracle.getBlockHeader(fromSingle(blockNumber).toNumber()); if (!header) { throw new Error(`Block header not found for block ${blockNumber}.`); } @@ -661,11 +661,6 @@ export class TXEService { return toForeignCallResult([toSingle(new Fr(blockNumber))]); } - avmOpcodeFunctionSelector() { - const functionSelector = (this.typedOracle as TXE).getFunctionSelector(); - return toForeignCallResult([toSingle(functionSelector.toField())]); - } - avmOpcodeIsStaticCall() { const isStaticCall = (this.typedOracle as TXE).getIsStaticCall(); return toForeignCallResult([toSingle(new Fr(isStaticCall ? 1 : 0))]); diff --git a/yarn-project/txe/src/util/txe_database.ts b/yarn-project/txe/src/util/txe_database.ts index b154fd8702a2..5bb4621ec7f9 100644 --- a/yarn-project/txe/src/util/txe_database.ts +++ b/yarn-project/txe/src/util/txe_database.ts @@ -1,17 +1,17 @@ import { type AztecAddress, CompleteAddress } from '@aztec/circuits.js'; -import { type AztecKVStore, type AztecMap } from '@aztec/kv-store'; +import { type AztecAsyncKVStore, type AztecAsyncMap } from '@aztec/kv-store'; import { KVPxeDatabase } from '@aztec/pxe'; export class TXEDatabase extends KVPxeDatabase { - #accounts: AztecMap; + #accounts: AztecAsyncMap; - constructor(db: AztecKVStore) { + constructor(db: AztecAsyncKVStore) { super(db); this.#accounts = db.openMap('accounts'); } - getAccount(key: AztecAddress) { - const completeAddress = this.#accounts.get(key.toString()); + async getAccount(key: AztecAddress) { + const completeAddress = await this.#accounts.getAsync(key.toString()); if (!completeAddress) { throw new Error(`Account not found: ${key.toString()}`); } diff --git a/yarn-project/validator-client/package.json b/yarn-project/validator-client/package.json index 7fff0272c513..23cba4d90368 100644 --- a/yarn-project/validator-client/package.json +++ b/yarn-project/validator-client/package.json @@ -61,6 +61,7 @@ "dependencies": { "@aztec/circuit-types": "workspace:^", "@aztec/circuits.js": "workspace:^", + "@aztec/epoch-cache": "workspace:^", "@aztec/ethereum": "workspace:^", "@aztec/foundation": "workspace:^", "@aztec/p2p": "workspace:^", diff --git a/yarn-project/validator-client/src/duties/validation_service.ts b/yarn-project/validator-client/src/duties/validation_service.ts index ee5718b2e007..e79fe5fa8c13 100644 --- a/yarn-project/validator-client/src/duties/validation_service.ts +++ b/yarn-project/validator-client/src/duties/validation_service.ts @@ -5,7 +5,7 @@ import { SignatureDomainSeperator, type TxHash, } from '@aztec/circuit-types'; -import { type Header } from '@aztec/circuits.js'; +import { type BlockHeader } from '@aztec/circuits.js'; import { Buffer32 } from '@aztec/foundation/buffer'; import { keccak256 } from '@aztec/foundation/crypto'; import { type Fr } from '@aztec/foundation/fields'; @@ -24,7 +24,7 @@ export class ValidationService { * * @returns A block proposal signing the above information (not the current implementation!!!) */ - createBlockProposal(header: Header, archive: Fr, txs: TxHash[]): Promise { + createBlockProposal(header: BlockHeader, archive: Fr, txs: TxHash[]): Promise { const payloadSigner = (payload: Buffer32) => this.keyStore.signMessage(payload); return BlockProposal.createProposalFromSigner(new ConsensusPayload(header, archive, txs), payloadSigner); diff --git a/yarn-project/validator-client/src/factory.ts b/yarn-project/validator-client/src/factory.ts index 56b5306969b1..7ea8b09e8d6e 100644 --- a/yarn-project/validator-client/src/factory.ts +++ b/yarn-project/validator-client/src/factory.ts @@ -1,3 +1,5 @@ +import { EpochCache, type EpochCacheConfig } from '@aztec/epoch-cache'; +import { type EthAddress } from '@aztec/foundation/eth-address'; import { type P2P } from '@aztec/p2p'; import { type TelemetryClient } from '@aztec/telemetry-client'; @@ -6,7 +8,12 @@ import { generatePrivateKey } from 'viem/accounts'; import { type ValidatorClientConfig } from './config.js'; import { ValidatorClient } from './validator.js'; -export function createValidatorClient(config: ValidatorClientConfig, p2pClient: P2P, telemetry: TelemetryClient) { +export async function createValidatorClient( + config: ValidatorClientConfig & EpochCacheConfig, + rollupAddress: EthAddress, + p2pClient: P2P, + telemetry: TelemetryClient, +) { if (config.disableValidator) { return undefined; } @@ -14,5 +21,8 @@ export function createValidatorClient(config: ValidatorClientConfig, p2pClient: config.validatorPrivateKey = generatePrivateKey(); } - return ValidatorClient.new(config, p2pClient, telemetry); + // Create the epoch cache + const epochCache = await EpochCache.create(rollupAddress, /*l1TimestampSource,*/ config); + + return ValidatorClient.new(config, epochCache, p2pClient, telemetry); } diff --git a/yarn-project/validator-client/src/validator.test.ts b/yarn-project/validator-client/src/validator.test.ts index d60937f2fcc3..df2bee4ccbb2 100644 --- a/yarn-project/validator-client/src/validator.test.ts +++ b/yarn-project/validator-client/src/validator.test.ts @@ -3,6 +3,7 @@ */ import { TxHash, mockTx } from '@aztec/circuit-types'; import { makeHeader } from '@aztec/circuits.js/testing'; +import { type EpochCache } from '@aztec/epoch-cache'; import { Secp256k1Signer } from '@aztec/foundation/crypto'; import { EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; @@ -27,11 +28,13 @@ describe('ValidationService', () => { let config: ValidatorClientConfig; let validatorClient: ValidatorClient; let p2pClient: MockProxy; + let epochCache: MockProxy; let validatorAccount: PrivateKeyAccount; beforeEach(() => { p2pClient = mock(); p2pClient.getAttestationsForSlot.mockImplementation(() => Promise.resolve([])); + epochCache = mock(); const validatorPrivateKey = generatePrivateKey(); validatorAccount = privateKeyToAccount(validatorPrivateKey); @@ -43,12 +46,12 @@ describe('ValidationService', () => { disableValidator: false, validatorReexecute: false, }; - validatorClient = ValidatorClient.new(config, p2pClient, new NoopTelemetryClient()); + validatorClient = ValidatorClient.new(config, epochCache, p2pClient, new NoopTelemetryClient()); }); it('Should throw error if an invalid private key is provided', () => { config.validatorPrivateKey = '0x1234567890123456789'; - expect(() => ValidatorClient.new(config, p2pClient, new NoopTelemetryClient())).toThrow( + expect(() => ValidatorClient.new(config, epochCache, p2pClient, new NoopTelemetryClient())).toThrow( InvalidValidatorPrivateKeyError, ); }); @@ -56,7 +59,7 @@ describe('ValidationService', () => { it('Should throw an error if re-execution is enabled but no block builder is provided', async () => { config.validatorReexecute = true; p2pClient.getTxByHash.mockImplementation(() => Promise.resolve(mockTx())); - const val = ValidatorClient.new(config, p2pClient); + const val = ValidatorClient.new(config, epochCache, p2pClient); await expect(val.reExecuteTransactions(makeBlockProposal())).rejects.toThrow(BlockBuilderNotProvidedError); }); @@ -70,7 +73,7 @@ describe('ValidationService', () => { expect(blockProposal).toBeDefined(); const validatorAddress = EthAddress.fromString(validatorAccount.address); - expect(blockProposal.getSender()).toEqual(validatorAddress); + expect(blockProposal?.getSender()).toEqual(validatorAddress); }); it('Should a timeout if we do not collect enough attestations in time', async () => { @@ -97,8 +100,12 @@ describe('ValidationService', () => { // mock the p2pClient.getTxStatus to return undefined for all transactions p2pClient.getTxStatus.mockImplementation(() => undefined); + epochCache.getProposerInCurrentOrNextSlot.mockImplementation(() => + Promise.resolve([proposal.getSender(), proposal.getSender()]), + ); + epochCache.isInCommittee.mockImplementation(() => Promise.resolve(true)); - const val = ValidatorClient.new(config, p2pClient, new NoopTelemetryClient()); + const val = ValidatorClient.new(config, epochCache, p2pClient); val.registerBlockBuilder(() => { throw new Error('Failed to build block'); }); @@ -107,6 +114,32 @@ describe('ValidationService', () => { expect(attestation).toBeUndefined(); }); + it('Should not return an attestation if the validator is not in the committee', async () => { + const proposal = makeBlockProposal(); + + // Setup epoch cache mocks + epochCache.getProposerInCurrentOrNextSlot.mockImplementation(() => + Promise.resolve([proposal.getSender(), proposal.getSender()]), + ); + epochCache.isInCommittee.mockImplementation(() => Promise.resolve(false)); + + const attestation = await validatorClient.attestToProposal(proposal); + expect(attestation).toBeUndefined(); + }); + + it('Should not return an attestation if the proposer is not the current proposer', async () => { + const proposal = makeBlockProposal(); + + // Setup epoch cache mocks + epochCache.getProposerInCurrentOrNextSlot.mockImplementation(() => + Promise.resolve([EthAddress.random(), EthAddress.random()]), + ); + epochCache.isInCommittee.mockImplementation(() => Promise.resolve(true)); + + const attestation = await validatorClient.attestToProposal(proposal); + expect(attestation).toBeUndefined(); + }); + it('Should collect attestations for a proposal', async () => { const signer = Secp256k1Signer.random(); const attestor1 = Secp256k1Signer.random(); diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 7ced2639ab1f..5067875db34b 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -6,7 +6,8 @@ import { type Tx, type TxHash, } from '@aztec/circuit-types'; -import { type GlobalVariables, type Header } from '@aztec/circuits.js'; +import { type BlockHeader, type GlobalVariables } from '@aztec/circuits.js'; +import { type EpochCache } from '@aztec/epoch-cache'; import { Buffer32 } from '@aztec/foundation/buffer'; import { type Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; @@ -37,7 +38,7 @@ import { ValidatorMetrics } from './metrics.js'; type BlockBuilderCallback = ( txs: Tx[], globalVariables: GlobalVariables, - historicalHeader?: Header, + historicalHeader?: BlockHeader, interrupt?: (processedTxs: ProcessedTx[]) => Promise, ) => Promise<{ block: L2Block; publicProcessorDuration: number; numProcessedTxs: number; blockBuildingTimer: Timer }>; @@ -47,7 +48,7 @@ export interface Validator { registerBlockBuilder(blockBuilder: BlockBuilderCallback): void; // Block validation responsiblities - createBlockProposal(header: Header, archive: Fr, txs: TxHash[]): Promise; + createBlockProposal(header: BlockHeader, archive: Fr, txs: TxHash[]): Promise; attestToProposal(proposal: BlockProposal): void; broadcastBlockProposal(proposal: BlockProposal): void; @@ -61,11 +62,15 @@ export class ValidatorClient extends WithTracer implements Validator { private validationService: ValidationService; private metrics: ValidatorMetrics; + // Used to check if we are sending the same proposal twice + private previousProposal?: BlockProposal; + // Callback registered to: sequencer.buildBlock private blockBuilder?: BlockBuilderCallback = undefined; constructor( - keyStore: ValidatorKeyStore, + private keyStore: ValidatorKeyStore, + private epochCache: EpochCache, private p2pClient: P2P, private config: ValidatorClientConfig, telemetry: TelemetryClient = new NoopTelemetryClient(), @@ -75,12 +80,16 @@ export class ValidatorClient extends WithTracer implements Validator { super(telemetry, 'Validator'); this.metrics = new ValidatorMetrics(telemetry); - //TODO: We need to setup and store all of the currently active validators https://github.com/AztecProtocol/aztec-packages/issues/7962 this.validationService = new ValidationService(keyStore); this.log.verbose('Initialized validator'); } - static new(config: ValidatorClientConfig, p2pClient: P2P, telemetry: TelemetryClient = new NoopTelemetryClient()) { + static new( + config: ValidatorClientConfig, + epochCache: EpochCache, + p2pClient: P2P, + telemetry: TelemetryClient = new NoopTelemetryClient(), + ) { if (!config.validatorPrivateKey) { throw new InvalidValidatorPrivateKeyError(); } @@ -88,7 +97,7 @@ export class ValidatorClient extends WithTracer implements Validator { const privateKey = validatePrivateKey(config.validatorPrivateKey); const localKeyStore = new LocalKeyStore(privateKey); - const validator = new ValidatorClient(localKeyStore, p2pClient, config, telemetry); + const validator = new ValidatorClient(localKeyStore, epochCache, p2pClient, config, telemetry); validator.registerBlockProposalHandler(); return validator; } @@ -118,6 +127,19 @@ export class ValidatorClient extends WithTracer implements Validator { } async attestToProposal(proposal: BlockProposal): Promise { + // Check that I am in the committee + if (!(await this.epochCache.isInCommittee(this.keyStore.getAddress()))) { + this.log.verbose(`Not in the committee, skipping attestation`); + return undefined; + } + + // Check that the proposal is from the current proposer, or the next proposer. + const [currentProposer, nextSlotProposer] = await this.epochCache.getProposerInCurrentOrNextSlot(); + if (!proposal.getSender().equals(currentProposer) && !proposal.getSender().equals(nextSlotProposer)) { + this.log.verbose(`Not the current or next proposer, skipping attestation`); + return undefined; + } + // Check that all of the tranasctions in the proposal are available in the tx pool before attesting this.log.verbose(`request to attest`, { archive: proposal.payload.archive.toString(), @@ -131,14 +153,18 @@ export class ValidatorClient extends WithTracer implements Validator { await this.reExecuteTransactions(proposal); } } catch (error: any) { + // If the transactions are not available, then we should not attempt to attest if (error instanceof TransactionsNotAvailableError) { this.log.error(`Transactions not available, skipping attestation ${error.message}`); } else { + // This branch most commonly be hit if the transactions are available, but the re-execution fails // Catch all error handler this.log.error(`Failed to attest to proposal: ${error.message}`); } return undefined; } + + // Provided all of the above checks pass, we can attest to the proposal this.log.verbose( `Transactions available, attesting to proposal with ${proposal.payload.txHashes.length} transactions`, ); @@ -210,8 +236,15 @@ export class ValidatorClient extends WithTracer implements Validator { } } - createBlockProposal(header: Header, archive: Fr, txs: TxHash[]): Promise { - return this.validationService.createBlockProposal(header, archive, txs); + async createBlockProposal(header: BlockHeader, archive: Fr, txs: TxHash[]): Promise { + if (this.previousProposal?.slotNumber.equals(header.globalVariables.slotNumber)) { + this.log.verbose(`Already made a proposal for the same slot, skipping proposal`); + return Promise.resolve(undefined); + } + + const newProposal = await this.validationService.createBlockProposal(header, archive, txs); + this.previousProposal = newProposal; + return newProposal; } broadcastBlockProposal(proposal: BlockProposal): void { diff --git a/yarn-project/validator-client/tsconfig.json b/yarn-project/validator-client/tsconfig.json index 17533523097e..d2409d81fdba 100644 --- a/yarn-project/validator-client/tsconfig.json +++ b/yarn-project/validator-client/tsconfig.json @@ -12,6 +12,9 @@ { "path": "../circuits.js" }, + { + "path": "../epoch-cache" + }, { "path": "../ethereum" }, diff --git a/yarn-project/world-state/src/native/merkle_trees_facade.ts b/yarn-project/world-state/src/native/merkle_trees_facade.ts index 9ce07806d0e2..6d3ea76d7fda 100644 --- a/yarn-project/world-state/src/native/merkle_trees_facade.ts +++ b/yarn-project/world-state/src/native/merkle_trees_facade.ts @@ -10,8 +10,8 @@ import { type TreeInfo, } from '@aztec/circuit-types'; import { + type BlockHeader, Fr, - type Header, NullifierLeaf, NullifierLeafPreimage, PartialStateReference, @@ -37,11 +37,11 @@ import { type NativeWorldStateInstance } from './native_world_state_instance.js' export class MerkleTreesFacade implements MerkleTreeReadOperations { constructor( protected readonly instance: NativeWorldStateInstance, - protected readonly initialHeader: Header, + protected readonly initialHeader: BlockHeader, protected readonly revision: WorldStateRevision, ) {} - getInitialHeader(): Header { + getInitialHeader(): BlockHeader { return this.initialHeader; } @@ -182,13 +182,13 @@ export class MerkleTreesFacade implements MerkleTreeReadOperations { } export class MerkleTreesForkFacade extends MerkleTreesFacade implements MerkleTreeWriteOperations { - constructor(instance: NativeWorldStateInstance, initialHeader: Header, revision: WorldStateRevision) { + constructor(instance: NativeWorldStateInstance, initialHeader: BlockHeader, revision: WorldStateRevision) { assert.notEqual(revision.forkId, 0, 'Fork ID must be set'); assert.equal(revision.includeUncommitted, true, 'Fork must include uncommitted data'); super(instance, initialHeader, revision); } - async updateArchive(header: Header): Promise { + async updateArchive(header: BlockHeader): Promise { await this.instance.call(WorldStateMessageType.UPDATE_ARCHIVE, { forkId: this.revision.forkId, blockHeaderHash: header.hash().toBuffer(), diff --git a/yarn-project/world-state/src/native/native_world_state.test.ts b/yarn-project/world-state/src/native/native_world_state.test.ts index 91044fdef56e..ff2767e5971b 100644 --- a/yarn-project/world-state/src/native/native_world_state.test.ts +++ b/yarn-project/world-state/src/native/native_world_state.test.ts @@ -2,9 +2,9 @@ import { type L2Block, MerkleTreeId } from '@aztec/circuit-types'; import { ARCHIVE_HEIGHT, AppendOnlyTreeSnapshot, + BlockHeader, EthAddress, Fr, - Header, L1_TO_L2_MSG_TREE_HEIGHT, MAX_L2_TO_L1_MSGS_PER_TX, MAX_NOTE_HASHES_PER_TX, @@ -188,7 +188,7 @@ describe('NativeWorldState', () => { const stateReference = await fork.getStateReference(); const archiveInfo = await fork.getTreeInfo(MerkleTreeId.ARCHIVE); - const header = new Header( + const header = new BlockHeader( new AppendOnlyTreeSnapshot(new Fr(archiveInfo.root), Number(archiveInfo.size)), makeContentCommitment(), stateReference, @@ -218,7 +218,7 @@ describe('NativeWorldState', () => { const fork = await ws.fork(3); const stateReference = await fork.getStateReference(); const archiveInfo = await fork.getTreeInfo(MerkleTreeId.ARCHIVE); - const header = new Header( + const header = new BlockHeader( new AppendOnlyTreeSnapshot(new Fr(archiveInfo.root), Number(archiveInfo.size)), makeContentCommitment(), stateReference, diff --git a/yarn-project/world-state/src/native/native_world_state.ts b/yarn-project/world-state/src/native/native_world_state.ts index 9e0b175ac6c5..6ba046827694 100644 --- a/yarn-project/world-state/src/native/native_world_state.ts +++ b/yarn-project/world-state/src/native/native_world_state.ts @@ -7,9 +7,9 @@ import { TxEffect, } from '@aztec/circuit-types'; import { + BlockHeader, EthAddress, Fr, - Header, MAX_NOTE_HASHES_PER_TX, MAX_NULLIFIERS_PER_TX, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, @@ -52,7 +52,7 @@ export const WORLD_STATE_VERSION_FILE = 'version'; export const WORLD_STATE_DB_VERSION = 1; // The initial version export class NativeWorldStateService implements MerkleTreeDatabase { - protected initialHeader: Header | undefined; + protected initialHeader: BlockHeader | undefined; // This is read heavily and only changes when data is persisted, so we cache it private cachedStatusSummary: WorldStateStatusSummary | undefined; @@ -156,7 +156,7 @@ export class NativeWorldStateService implements MerkleTreeDatabase { return new MerkleTreesForkFacade(this.instance, this.initialHeader!, worldStateRevision(true, resp.forkId, 0)); } - public getInitialHeader(): Header { + public getInitialHeader(): BlockHeader { return this.initialHeader!; } @@ -208,9 +208,9 @@ export class NativeWorldStateService implements MerkleTreeDatabase { await this.cleanup(); } - private async buildInitialHeader(): Promise
{ + private async buildInitialHeader(): Promise { const state = await this.getInitialStateReference(); - return Header.empty({ state }); + return BlockHeader.empty({ state }); } private sanitiseAndCacheSummaryFromFull(response: WorldStateStatusFull) { diff --git a/yarn-project/world-state/src/native/native_world_state_instance.ts b/yarn-project/world-state/src/native/native_world_state_instance.ts index a1fa6baed48f..f2af45b2acb8 100644 --- a/yarn-project/world-state/src/native/native_world_state_instance.ts +++ b/yarn-project/world-state/src/native/native_world_state_instance.ts @@ -10,7 +10,7 @@ import { NULLIFIER_TREE_HEIGHT, PUBLIC_DATA_TREE_HEIGHT, } from '@aztec/circuits.js'; -import { createDebugLogger, fmtLogData } from '@aztec/foundation/log'; +import { createDebugLogger } from '@aztec/foundation/log'; import { SerialQueue } from '@aztec/foundation/queue'; import { Timer } from '@aztec/foundation/timer'; @@ -203,9 +203,9 @@ export class NativeWorldState implements NativeWorldStateInstance { data['publicDataWritesCount'] = body.publicDataWrites.length; } - this.log.debug(`Calling messageId=${messageId} ${WorldStateMessageType[messageType]} with ${fmtLogData(data)}`); + this.log.trace(`Calling messageId=${messageId} ${WorldStateMessageType[messageType]}`, data); } else { - this.log.debug(`Calling messageId=${messageId} ${WorldStateMessageType[messageType]}`); + this.log.trace(`Calling messageId=${messageId} ${WorldStateMessageType[messageType]}`); } const timer = new Timer(); @@ -248,14 +248,12 @@ export class NativeWorldState implements NativeWorldStateInstance { const response = TypedMessage.fromMessagePack(decodedResponse); const decodingDuration = timer.ms() - callDuration; const totalDuration = timer.ms(); - this.log.debug( - `Call messageId=${messageId} ${WorldStateMessageType[messageType]} took (ms) ${fmtLogData({ - totalDuration, - encodingDuration, - callDuration, - decodingDuration, - })}`, - ); + this.log.trace(`Call messageId=${messageId} ${WorldStateMessageType[messageType]} took (ms)`, { + totalDuration, + encodingDuration, + callDuration, + decodingDuration, + }); if (response.header.requestId !== request.header.messageId) { throw new Error( diff --git a/yarn-project/world-state/src/synchronizer/factory.ts b/yarn-project/world-state/src/synchronizer/factory.ts index 10f174e2d9a1..92d863e4d11f 100644 --- a/yarn-project/world-state/src/synchronizer/factory.ts +++ b/yarn-project/world-state/src/synchronizer/factory.ts @@ -1,7 +1,7 @@ import { type L1ToL2MessageSource, type L2BlockSource } from '@aztec/circuit-types'; import { createDebugLogger } from '@aztec/foundation/log'; import { type DataStoreConfig } from '@aztec/kv-store/config'; -import { createStore } from '@aztec/kv-store/utils'; +import { createStore } from '@aztec/kv-store/lmdb'; import { type TelemetryClient } from '@aztec/telemetry-client'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; diff --git a/yarn-project/world-state/src/world-state-db/merkle_tree_operations_facade.ts b/yarn-project/world-state/src/world-state-db/merkle_tree_operations_facade.ts index a1b93999290c..69f01189aef9 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_tree_operations_facade.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_tree_operations_facade.ts @@ -6,7 +6,7 @@ import { type SequentialInsertionResult, type TreeInfo, } from '@aztec/circuit-types/interfaces'; -import { type Header, type StateReference } from '@aztec/circuits.js'; +import { type BlockHeader, type StateReference } from '@aztec/circuits.js'; import { type IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; import { type MerkleTrees } from './merkle_trees.js'; @@ -39,7 +39,7 @@ export class MerkleTreeReadOperationsFacade implements MerkleTreeWriteOperations * Returns the initial header for the chain before the first block. * @returns The initial header. */ - getInitialHeader(): Header { + getInitialHeader(): BlockHeader { return this.trees.getInitialHeader(); } @@ -149,7 +149,7 @@ export class MerkleTreeReadOperationsFacade implements MerkleTreeWriteOperations * This includes all of the current roots of all of the data trees and the current blocks global vars. * @param header - The header to insert into the archive. */ - public updateArchive(header: Header): Promise { + public updateArchive(header: BlockHeader): Promise { return this.trees.updateArchive(header); } diff --git a/yarn-project/world-state/src/world-state-db/merkle_tree_snapshot_operations_facade.ts b/yarn-project/world-state/src/world-state-db/merkle_tree_snapshot_operations_facade.ts index 7f4e4bc9d6ac..5e703e9c3131 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_tree_snapshot_operations_facade.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_tree_snapshot_operations_facade.ts @@ -5,7 +5,13 @@ import { type MerkleTreeReadOperations, type TreeInfo, } from '@aztec/circuit-types/interfaces'; -import { AppendOnlyTreeSnapshot, Fr, type Header, PartialStateReference, StateReference } from '@aztec/circuits.js'; +import { + AppendOnlyTreeSnapshot, + type BlockHeader, + Fr, + PartialStateReference, + StateReference, +} from '@aztec/circuits.js'; import { type IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; import { type IndexedTreeSnapshot } from '@aztec/merkle-tree'; @@ -136,7 +142,7 @@ export class MerkleTreeSnapshotOperationsFacade implements MerkleTreeReadOperati ); } - getInitialHeader(): Header { + getInitialHeader(): BlockHeader { throw new Error('Getting initial header not supported on snapshot.'); } } diff --git a/yarn-project/world-state/src/world-state-db/merkle_trees.ts b/yarn-project/world-state/src/world-state-db/merkle_trees.ts index 2842eebdae1d..16640e78bdfc 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_trees.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_trees.ts @@ -10,8 +10,8 @@ import { import { ARCHIVE_HEIGHT, AppendOnlyTreeSnapshot, + BlockHeader, Fr, - Header, L1_TO_L2_MSG_TREE_HEIGHT, MAX_NOTE_HASHES_PER_TX, MAX_NULLIFIERS_PER_TX, @@ -36,7 +36,7 @@ import { SerialQueue } from '@aztec/foundation/queue'; import { Timer, elapsed } from '@aztec/foundation/timer'; import { type IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; import { type AztecKVStore, type AztecSingleton } from '@aztec/kv-store'; -import { openTmpStore } from '@aztec/kv-store/utils'; +import { openTmpStore } from '@aztec/kv-store/lmdb'; import { type AppendOnlyTree, type IndexedTree, @@ -249,8 +249,8 @@ export class MerkleTrees implements MerkleTreeAdminDatabase { await this.store.delete(); } - public getInitialHeader(): Header { - return Header.empty({ state: this.#loadInitialStateReference() }); + public getInitialHeader(): BlockHeader { + return BlockHeader.empty({ state: this.#loadInitialStateReference() }); } /** @@ -285,7 +285,7 @@ export class MerkleTrees implements MerkleTreeAdminDatabase { * @param header - The header whose hash to insert into the archive. * @param includeUncommitted - Indicates whether to include uncommitted data. */ - public async updateArchive(header: Header) { + public async updateArchive(header: BlockHeader) { await this.synchronize(() => this.#updateArchive(header)); } @@ -519,7 +519,7 @@ export class MerkleTrees implements MerkleTreeAdminDatabase { return StateReference.fromBuffer(serialized); } - async #updateArchive(header: Header) { + async #updateArchive(header: BlockHeader) { const state = await this.getStateReference(true); // This method should be called only when the block builder already updated the state so we sanity check that it's diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index c53310ab30d3..df1012b94986 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -260,7 +260,6 @@ __metadata: "@aztec/txe": "workspace:^" "@aztec/types": "workspace:^" "@jest/globals": ^29.5.0 - "@opentelemetry/winston-transport": ^0.7.0 "@types/chalk": ^2.2.0 "@types/jest": ^29.5.0 "@types/koa": ^2.13.6 @@ -273,8 +272,6 @@ __metadata: ts-node: ^10.9.1 typescript: ^5.0.4 viem: ^2.7.15 - winston: ^3.10.0 - winston-daily-rotate-file: ^4.7.1 bin: aztec: ./dest/bin/index.js languageName: unknown @@ -376,7 +373,7 @@ __metadata: languageName: unknown linkType: soft -"@aztec/circuit-types@workspace:^, @aztec/circuit-types@workspace:circuit-types": +"@aztec/circuit-types@workspace:*, @aztec/circuit-types@workspace:^, @aztec/circuit-types@workspace:circuit-types": version: 0.0.0-use.local resolution: "@aztec/circuit-types@workspace:circuit-types" dependencies: @@ -440,6 +437,7 @@ __metadata: "@aztec/foundation": "workspace:^" "@aztec/kv-store": "workspace:^" "@aztec/noir-contracts.js": "workspace:^" + "@aztec/pxe": "workspace:^" "@jest/globals": ^29.5.0 "@types/jest": ^29.5.0 "@types/node": ^18.7.23 @@ -548,6 +546,7 @@ __metadata: "@aztec/world-state": "workspace:^" "@jest/globals": ^29.5.0 "@noble/curves": ^1.0.0 + "@sinonjs/fake-timers": ^13.0.5 "@swc/core": ^1.4.11 "@swc/jest": ^0.2.36 "@types/fs-extra": ^11.0.2 @@ -592,7 +591,6 @@ __metadata: viem: ^2.7.15 webpack: ^5.88.2 webpack-cli: ^5.1.4 - winston: ^3.10.0 zod: ^3.23.8 languageName: unknown linkType: soft @@ -615,7 +613,31 @@ __metadata: languageName: unknown linkType: soft -"@aztec/ethereum@workspace:^, @aztec/ethereum@workspace:ethereum": +"@aztec/epoch-cache@workspace:^, @aztec/epoch-cache@workspace:epoch-cache": + version: 0.0.0-use.local + resolution: "@aztec/epoch-cache@workspace:epoch-cache" + dependencies: + "@aztec/circuit-types": "workspace:*" + "@aztec/ethereum": "workspace:*" + "@aztec/foundation": "workspace:^" + "@aztec/l1-artifacts": "workspace:^" + "@jest/globals": ^29.5.0 + "@types/jest": ^29.5.0 + "@types/node": ^18.14.6 + "@viem/anvil": ^0.0.10 + dotenv: ^16.0.3 + get-port: ^7.1.0 + jest: ^29.5.0 + jest-mock-extended: ^3.0.7 + ts-node: ^10.9.1 + tslib: ^2.4.0 + typescript: ^5.0.4 + viem: ^2.7.15 + zod: ^3.23.8 + languageName: unknown + linkType: soft + +"@aztec/ethereum@workspace:*, @aztec/ethereum@workspace:^, @aztec/ethereum@workspace:ethereum": version: 0.0.0-use.local resolution: "@aztec/ethereum@workspace:ethereum" dependencies: @@ -666,6 +688,7 @@ __metadata: "@typescript-eslint/eslint-plugin": ^6.2.1 "@typescript-eslint/parser": ^6.2.1 bn.js: ^5.2.1 + colorette: ^2.0.20 comlink: ^4.4.1 debug: ^4.3.4 detect-node: ^2.1.0 @@ -687,6 +710,8 @@ __metadata: lodash.clonedeepwith: ^4.5.0 memdown: ^6.1.1 pako: ^2.1.0 + pino: ^9.5.0 + pino-pretty: ^13.0.0 prettier: ^2.7.1 sha3: ^2.1.4 supertest: ^6.3.3 @@ -771,11 +796,22 @@ __metadata: "@aztec/ethereum": "workspace:^" "@aztec/foundation": "workspace:^" "@jest/globals": ^29.5.0 + "@types/chai": ^5.0.1 + "@types/chai-as-promised": ^8.0.1 "@types/jest": ^29.5.0 + "@types/mocha": ^10.0.10 + "@types/mocha-each": ^2.0.4 "@types/node": ^18.7.23 + "@web/dev-server-esbuild": ^1.0.3 + "@web/test-runner": ^0.19.0 + "@web/test-runner-playwright": ^0.11.0 + chai: ^5.1.2 + chai-as-promised: ^8.0.1 + idb: ^8.0.0 jest: ^29.5.0 - jest-mock-extended: ^3.0.3 lmdb: ^3.0.6 + mocha: ^10.8.2 + mocha-each: ^2.0.1 ts-node: ^10.9.1 typescript: ^5.0.4 languageName: unknown @@ -919,11 +955,13 @@ __metadata: libp2p: 1.5.0 semver: ^7.6.0 sha3: ^2.1.4 + snappy: ^7.2.2 ts-node: ^10.9.1 tslib: ^2.4.0 typescript: ^5.0.4 uint8arrays: ^5.0.3 viem: ^2.7.15 + xxhash-wasm: ^1.1.0 languageName: unknown linkType: soft @@ -1198,25 +1236,24 @@ __metadata: "@aztec/foundation": "workspace:^" "@jest/globals": ^29.5.0 "@opentelemetry/api": ^1.9.0 - "@opentelemetry/api-logs": ^0.54.0 - "@opentelemetry/exporter-logs-otlp-http": ^0.54.0 - "@opentelemetry/exporter-metrics-otlp-http": ^0.52.0 - "@opentelemetry/exporter-trace-otlp-http": ^0.54.0 - "@opentelemetry/host-metrics": ^0.35.2 - "@opentelemetry/otlp-exporter-base": ^0.54.0 - "@opentelemetry/resource-detector-aws": ^1.5.2 - "@opentelemetry/resources": ^1.25.0 - "@opentelemetry/sdk-logs": ^0.54.0 - "@opentelemetry/sdk-metrics": ^1.25.0 - "@opentelemetry/sdk-trace-node": ^1.25.0 - "@opentelemetry/semantic-conventions": ^1.25.0 - "@opentelemetry/winston-transport": ^0.7.0 + "@opentelemetry/api-logs": ^0.55.0 + "@opentelemetry/core": ^1.28.0 + "@opentelemetry/exporter-logs-otlp-http": ^0.55.0 + "@opentelemetry/exporter-metrics-otlp-http": ^0.55.0 + "@opentelemetry/exporter-trace-otlp-http": ^0.55.0 + "@opentelemetry/host-metrics": ^0.35.4 + "@opentelemetry/otlp-exporter-base": ^0.55.0 + "@opentelemetry/resource-detector-aws": ^1.8.0 + "@opentelemetry/resources": ^1.28.0 + "@opentelemetry/sdk-logs": ^0.55.0 + "@opentelemetry/sdk-metrics": ^1.28.0 + "@opentelemetry/sdk-trace-node": ^1.28.0 + "@opentelemetry/semantic-conventions": ^1.28.0 "@types/jest": ^29.5.0 jest: ^29.5.0 prom-client: ^15.1.3 ts-node: ^10.9.1 typescript: ^5.0.4 - winston: ^3.10.0 languageName: unknown linkType: soft @@ -1283,6 +1320,7 @@ __metadata: dependencies: "@aztec/circuit-types": "workspace:^" "@aztec/circuits.js": "workspace:^" + "@aztec/epoch-cache": "workspace:^" "@aztec/ethereum": "workspace:^" "@aztec/foundation": "workspace:^" "@aztec/p2p": "workspace:^" @@ -1344,6 +1382,17 @@ __metadata: languageName: node linkType: hard +"@babel/code-frame@npm:^7.12.11": + version: 7.26.2 + resolution: "@babel/code-frame@npm:7.26.2" + dependencies: + "@babel/helper-validator-identifier": ^7.25.9 + js-tokens: ^4.0.0 + picocolors: ^1.0.0 + checksum: db13f5c42d54b76c1480916485e6900748bbcb0014a8aca87f50a091f70ff4e0d0a6db63cade75eb41fcc3d2b6ba0a7f89e343def4f96f00269b41b8ab8dd7b8 + languageName: node + linkType: hard + "@babel/compat-data@npm:^7.23.5": version: 7.24.4 resolution: "@babel/compat-data@npm:7.24.4" @@ -1499,6 +1548,13 @@ __metadata: languageName: node linkType: hard +"@babel/helper-validator-identifier@npm:^7.25.9": + version: 7.25.9 + resolution: "@babel/helper-validator-identifier@npm:7.25.9" + checksum: 5b85918cb1a92a7f3f508ea02699e8d2422fe17ea8e82acd445006c0ef7520fbf48e3dbcdaf7b0a1d571fc3a2715a29719e5226636cb6042e15fe6ed2a590944 + languageName: node + linkType: hard + "@babel/helper-validator-option@npm:^7.23.5": version: 7.23.5 resolution: "@babel/helper-validator-option@npm:7.23.5" @@ -1903,13 +1959,6 @@ __metadata: languageName: node linkType: hard -"@colors/colors@npm:1.6.0, @colors/colors@npm:^1.6.0": - version: 1.6.0 - resolution: "@colors/colors@npm:1.6.0" - checksum: aa209963e0c3218e80a4a20553ba8c0fbb6fa13140540b4e5f97923790be06801fc90172c1114fc8b7e888b3d012b67298cde6b9e81521361becfaee400c662f - languageName: node - linkType: hard - "@cspotcode/source-map-support@npm:^0.8.0": version: 0.8.1 resolution: "@cspotcode/source-map-support@npm:0.8.1" @@ -1919,17 +1968,6 @@ __metadata: languageName: node linkType: hard -"@dabh/diagnostics@npm:^2.0.2": - version: 2.0.3 - resolution: "@dabh/diagnostics@npm:2.0.3" - dependencies: - colorspace: 1.1.x - enabled: 2.0.x - kuler: ^2.0.0 - checksum: 4879600c55c8315a0fb85fbb19057bad1adc08f0a080a8cb4e2b63f723c379bfc4283b68123a2b078d367b327dd8df12fcb27464efe791addc0a48b9df6d79a1 - languageName: node - linkType: hard - "@dependents/detective-less@npm:^3.0.1": version: 3.0.2 resolution: "@dependents/detective-less@npm:3.0.2" @@ -1958,6 +1996,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/aix-ppc64@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/aix-ppc64@npm:0.24.0" + conditions: os=aix & cpu=ppc64 + languageName: node + linkType: hard + "@esbuild/android-arm64@npm:0.18.20": version: 0.18.20 resolution: "@esbuild/android-arm64@npm:0.18.20" @@ -1965,6 +2010,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/android-arm64@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/android-arm64@npm:0.24.0" + conditions: os=android & cpu=arm64 + languageName: node + linkType: hard + "@esbuild/android-arm@npm:0.18.20": version: 0.18.20 resolution: "@esbuild/android-arm@npm:0.18.20" @@ -1972,6 +2024,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/android-arm@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/android-arm@npm:0.24.0" + conditions: os=android & cpu=arm + languageName: node + linkType: hard + "@esbuild/android-x64@npm:0.18.20": version: 0.18.20 resolution: "@esbuild/android-x64@npm:0.18.20" @@ -1979,6 +2038,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/android-x64@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/android-x64@npm:0.24.0" + conditions: os=android & cpu=x64 + languageName: node + linkType: hard + "@esbuild/darwin-arm64@npm:0.18.20": version: 0.18.20 resolution: "@esbuild/darwin-arm64@npm:0.18.20" @@ -1986,6 +2052,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/darwin-arm64@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/darwin-arm64@npm:0.24.0" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + "@esbuild/darwin-x64@npm:0.18.20": version: 0.18.20 resolution: "@esbuild/darwin-x64@npm:0.18.20" @@ -1993,6 +2066,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/darwin-x64@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/darwin-x64@npm:0.24.0" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + "@esbuild/freebsd-arm64@npm:0.18.20": version: 0.18.20 resolution: "@esbuild/freebsd-arm64@npm:0.18.20" @@ -2000,6 +2080,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/freebsd-arm64@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/freebsd-arm64@npm:0.24.0" + conditions: os=freebsd & cpu=arm64 + languageName: node + linkType: hard + "@esbuild/freebsd-x64@npm:0.18.20": version: 0.18.20 resolution: "@esbuild/freebsd-x64@npm:0.18.20" @@ -2007,6 +2094,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/freebsd-x64@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/freebsd-x64@npm:0.24.0" + conditions: os=freebsd & cpu=x64 + languageName: node + linkType: hard + "@esbuild/linux-arm64@npm:0.18.20": version: 0.18.20 resolution: "@esbuild/linux-arm64@npm:0.18.20" @@ -2014,6 +2108,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-arm64@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/linux-arm64@npm:0.24.0" + conditions: os=linux & cpu=arm64 + languageName: node + linkType: hard + "@esbuild/linux-arm@npm:0.18.20": version: 0.18.20 resolution: "@esbuild/linux-arm@npm:0.18.20" @@ -2021,6 +2122,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-arm@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/linux-arm@npm:0.24.0" + conditions: os=linux & cpu=arm + languageName: node + linkType: hard + "@esbuild/linux-ia32@npm:0.18.20": version: 0.18.20 resolution: "@esbuild/linux-ia32@npm:0.18.20" @@ -2028,6 +2136,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-ia32@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/linux-ia32@npm:0.24.0" + conditions: os=linux & cpu=ia32 + languageName: node + linkType: hard + "@esbuild/linux-loong64@npm:0.18.20": version: 0.18.20 resolution: "@esbuild/linux-loong64@npm:0.18.20" @@ -2035,6 +2150,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-loong64@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/linux-loong64@npm:0.24.0" + conditions: os=linux & cpu=loong64 + languageName: node + linkType: hard + "@esbuild/linux-mips64el@npm:0.18.20": version: 0.18.20 resolution: "@esbuild/linux-mips64el@npm:0.18.20" @@ -2042,6 +2164,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-mips64el@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/linux-mips64el@npm:0.24.0" + conditions: os=linux & cpu=mips64el + languageName: node + linkType: hard + "@esbuild/linux-ppc64@npm:0.18.20": version: 0.18.20 resolution: "@esbuild/linux-ppc64@npm:0.18.20" @@ -2049,6 +2178,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-ppc64@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/linux-ppc64@npm:0.24.0" + conditions: os=linux & cpu=ppc64 + languageName: node + linkType: hard + "@esbuild/linux-riscv64@npm:0.18.20": version: 0.18.20 resolution: "@esbuild/linux-riscv64@npm:0.18.20" @@ -2056,6 +2192,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-riscv64@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/linux-riscv64@npm:0.24.0" + conditions: os=linux & cpu=riscv64 + languageName: node + linkType: hard + "@esbuild/linux-s390x@npm:0.18.20": version: 0.18.20 resolution: "@esbuild/linux-s390x@npm:0.18.20" @@ -2063,6 +2206,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-s390x@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/linux-s390x@npm:0.24.0" + conditions: os=linux & cpu=s390x + languageName: node + linkType: hard + "@esbuild/linux-x64@npm:0.18.20": version: 0.18.20 resolution: "@esbuild/linux-x64@npm:0.18.20" @@ -2070,6 +2220,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/linux-x64@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/linux-x64@npm:0.24.0" + conditions: os=linux & cpu=x64 + languageName: node + linkType: hard + "@esbuild/netbsd-x64@npm:0.18.20": version: 0.18.20 resolution: "@esbuild/netbsd-x64@npm:0.18.20" @@ -2077,6 +2234,20 @@ __metadata: languageName: node linkType: hard +"@esbuild/netbsd-x64@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/netbsd-x64@npm:0.24.0" + conditions: os=netbsd & cpu=x64 + languageName: node + linkType: hard + +"@esbuild/openbsd-arm64@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/openbsd-arm64@npm:0.24.0" + conditions: os=openbsd & cpu=arm64 + languageName: node + linkType: hard + "@esbuild/openbsd-x64@npm:0.18.20": version: 0.18.20 resolution: "@esbuild/openbsd-x64@npm:0.18.20" @@ -2084,6 +2255,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/openbsd-x64@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/openbsd-x64@npm:0.24.0" + conditions: os=openbsd & cpu=x64 + languageName: node + linkType: hard + "@esbuild/sunos-x64@npm:0.18.20": version: 0.18.20 resolution: "@esbuild/sunos-x64@npm:0.18.20" @@ -2091,6 +2269,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/sunos-x64@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/sunos-x64@npm:0.24.0" + conditions: os=sunos & cpu=x64 + languageName: node + linkType: hard + "@esbuild/win32-arm64@npm:0.18.20": version: 0.18.20 resolution: "@esbuild/win32-arm64@npm:0.18.20" @@ -2098,6 +2283,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/win32-arm64@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/win32-arm64@npm:0.24.0" + conditions: os=win32 & cpu=arm64 + languageName: node + linkType: hard + "@esbuild/win32-ia32@npm:0.18.20": version: 0.18.20 resolution: "@esbuild/win32-ia32@npm:0.18.20" @@ -2105,6 +2297,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/win32-ia32@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/win32-ia32@npm:0.24.0" + conditions: os=win32 & cpu=ia32 + languageName: node + linkType: hard + "@esbuild/win32-x64@npm:0.18.20": version: 0.18.20 resolution: "@esbuild/win32-x64@npm:0.18.20" @@ -2112,6 +2311,13 @@ __metadata: languageName: node linkType: hard +"@esbuild/win32-x64@npm:0.24.0": + version: 0.24.0 + resolution: "@esbuild/win32-x64@npm:0.24.0" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + "@eslint-community/eslint-utils@npm:^4.2.0, @eslint-community/eslint-utils@npm:^4.4.0": version: 4.4.0 resolution: "@eslint-community/eslint-utils@npm:4.4.0" @@ -2154,6 +2360,13 @@ __metadata: languageName: node linkType: hard +"@hapi/bourne@npm:^3.0.0": + version: 3.0.0 + resolution: "@hapi/bourne@npm:3.0.0" + checksum: 7174cab6c33191918fcdb1953fe3169a1106e6ac79a67ef5fd08b351f0813f8f608170f2239786cbe5519e03cdfe5ab748ea1635caa06dcd5802410295514ef8 + languageName: node + linkType: hard + "@humanwhocodes/config-array@npm:^0.11.14": version: 0.11.14 resolution: "@humanwhocodes/config-array@npm:0.11.14" @@ -3227,6 +3440,13 @@ __metadata: languageName: node linkType: hard +"@mdn/browser-compat-data@npm:^4.0.0": + version: 4.2.1 + resolution: "@mdn/browser-compat-data@npm:4.2.1" + checksum: 76eaa7dafed154040e769ba6d23f2dcb58e805ed3ccb376a5c4b76326c92643753c20194faed363870800dc3c1af26c107b8562710c8bb37aaee8c5ffe2a89cd + languageName: node + linkType: hard + "@microsoft/tsdoc-config@npm:0.16.2": version: 0.16.2 resolution: "@microsoft/tsdoc-config@npm:0.16.2" @@ -3401,6 +3621,97 @@ __metadata: languageName: node linkType: hard +"@napi-rs/snappy-android-arm-eabi@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-android-arm-eabi@npm:7.2.2" + conditions: os=android & cpu=arm + languageName: node + linkType: hard + +"@napi-rs/snappy-android-arm64@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-android-arm64@npm:7.2.2" + conditions: os=android & cpu=arm64 + languageName: node + linkType: hard + +"@napi-rs/snappy-darwin-arm64@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-darwin-arm64@npm:7.2.2" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + +"@napi-rs/snappy-darwin-x64@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-darwin-x64@npm:7.2.2" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + +"@napi-rs/snappy-freebsd-x64@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-freebsd-x64@npm:7.2.2" + conditions: os=freebsd & cpu=x64 + languageName: node + linkType: hard + +"@napi-rs/snappy-linux-arm-gnueabihf@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-linux-arm-gnueabihf@npm:7.2.2" + conditions: os=linux & cpu=arm + languageName: node + linkType: hard + +"@napi-rs/snappy-linux-arm64-gnu@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-linux-arm64-gnu@npm:7.2.2" + conditions: os=linux & cpu=arm64 + languageName: node + linkType: hard + +"@napi-rs/snappy-linux-arm64-musl@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-linux-arm64-musl@npm:7.2.2" + conditions: os=linux & cpu=arm64 + languageName: node + linkType: hard + +"@napi-rs/snappy-linux-x64-gnu@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-linux-x64-gnu@npm:7.2.2" + conditions: os=linux & cpu=x64 + languageName: node + linkType: hard + +"@napi-rs/snappy-linux-x64-musl@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-linux-x64-musl@npm:7.2.2" + conditions: os=linux & cpu=x64 + languageName: node + linkType: hard + +"@napi-rs/snappy-win32-arm64-msvc@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-win32-arm64-msvc@npm:7.2.2" + conditions: os=win32 & cpu=arm64 + languageName: node + linkType: hard + +"@napi-rs/snappy-win32-ia32-msvc@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-win32-ia32-msvc@npm:7.2.2" + conditions: os=win32 & cpu=ia32 + languageName: node + linkType: hard + +"@napi-rs/snappy-win32-x64-msvc@npm:7.2.2": + version: 7.2.2 + resolution: "@napi-rs/snappy-win32-x64-msvc@npm:7.2.2" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + "@noble/ciphers@npm:^0.4.0": version: 0.4.1 resolution: "@noble/ciphers@npm:0.4.1" @@ -3548,37 +3859,28 @@ __metadata: languageName: node linkType: hard -"@opentelemetry/api-logs@npm:0.52.0": - version: 0.52.0 - resolution: "@opentelemetry/api-logs@npm:0.52.0" - dependencies: - "@opentelemetry/api": ^1.0.0 - checksum: 502f60fd3a4b08fb7e54eaf22d0415e34dcbc9995696945eff8a4a12910e933149900cc470fb476b9411b4bbb98f8b598e3f4d4a37137698fcf0a7ea6ab240d6 - languageName: node - linkType: hard - -"@opentelemetry/api-logs@npm:0.54.0, @opentelemetry/api-logs@npm:^0.54.0": - version: 0.54.0 - resolution: "@opentelemetry/api-logs@npm:0.54.0" +"@opentelemetry/api-logs@npm:0.55.0, @opentelemetry/api-logs@npm:^0.55.0": + version: 0.55.0 + resolution: "@opentelemetry/api-logs@npm:0.55.0" dependencies: "@opentelemetry/api": ^1.3.0 - checksum: 5fc91054a290663844049cd9eb66419ea06d191b82220f2513147acdbd82579d1d3703a7e09f58a0014118d52b96d8b6340f9b43dd33a2c4469a31f13b3abc62 + checksum: 07833624711b4146ea4450b4ca714ada33e07a3c354feb4df08e4312a69d9fd200726deb910c8aaba17b13c52645252845ca9aa7113b78d277a806a28d0b2b90 languageName: node linkType: hard -"@opentelemetry/api@npm:^1.0.0, @opentelemetry/api@npm:^1.3.0, @opentelemetry/api@npm:^1.4.0, @opentelemetry/api@npm:^1.9.0": +"@opentelemetry/api@npm:^1.3.0, @opentelemetry/api@npm:^1.4.0, @opentelemetry/api@npm:^1.9.0": version: 1.9.0 resolution: "@opentelemetry/api@npm:1.9.0" checksum: 9e88e59d53ced668f3daaecfd721071c5b85a67dd386f1c6f051d1be54375d850016c881f656ffbe9a03bedae85f7e89c2f2b635313f9c9b195ad033cdc31020 languageName: node linkType: hard -"@opentelemetry/context-async-hooks@npm:1.25.0": - version: 1.25.0 - resolution: "@opentelemetry/context-async-hooks@npm:1.25.0" +"@opentelemetry/context-async-hooks@npm:1.28.0": + version: 1.28.0 + resolution: "@opentelemetry/context-async-hooks@npm:1.28.0" peerDependencies: "@opentelemetry/api": ">=1.0.0 <1.10.0" - checksum: f50f6ef621b6cfaa1d0919e4470b7c8326371beaf6be9a635c6f3221677bf9f5429a81a29b5518a41d3c002e35d4a89cb748ae61f650d61aa2ae3cbe123c0301 + checksum: 23288e78e25bb8d3af216825f7108a0380044d3ca3d9d427e6a33c8dbea3c67617e5024371190a9f09e171f13c4b40afc9135a807e40e866d9b98227c6b95a89 languageName: node linkType: hard @@ -3593,179 +3895,150 @@ __metadata: languageName: node linkType: hard -"@opentelemetry/core@npm:1.25.1, @opentelemetry/core@npm:^1.0.0": - version: 1.25.1 - resolution: "@opentelemetry/core@npm:1.25.1" +"@opentelemetry/core@npm:1.28.0, @opentelemetry/core@npm:^1.28.0": + version: 1.28.0 + resolution: "@opentelemetry/core@npm:1.28.0" dependencies: - "@opentelemetry/semantic-conventions": 1.25.1 + "@opentelemetry/semantic-conventions": 1.27.0 peerDependencies: "@opentelemetry/api": ">=1.0.0 <1.10.0" - checksum: ba1672fde4a1cfd9b55bf6070db71b808702fe59c4a70cda52a6156b2c813827954a6b4d3c3641283d394ff75a69b6359a0487459b4d26cd7d714ab3d21bc780 + checksum: ed80e0640df8ba8387e6f16ed3242891a08491f93d18106bd02ef0e6e75ad111e5f312ccf412edf8479e5800a6f27101a40d5023bd8f8566213a50a5a83e76ee languageName: node linkType: hard -"@opentelemetry/core@npm:1.27.0": - version: 1.27.0 - resolution: "@opentelemetry/core@npm:1.27.0" +"@opentelemetry/core@npm:^1.0.0": + version: 1.25.1 + resolution: "@opentelemetry/core@npm:1.25.1" dependencies: - "@opentelemetry/semantic-conventions": 1.27.0 + "@opentelemetry/semantic-conventions": 1.25.1 peerDependencies: "@opentelemetry/api": ">=1.0.0 <1.10.0" - checksum: 33ff551f89f0bb95830c9f9464c43b11adf88882ec1d3a03a5b9afcc89d2aafab33c36cb5047f18667d7929d6ab40ed0121649c42d0105f1cb33ffdca48f8b13 + checksum: ba1672fde4a1cfd9b55bf6070db71b808702fe59c4a70cda52a6156b2c813827954a6b4d3c3641283d394ff75a69b6359a0487459b4d26cd7d714ab3d21bc780 languageName: node linkType: hard -"@opentelemetry/exporter-logs-otlp-http@npm:^0.54.0": - version: 0.54.0 - resolution: "@opentelemetry/exporter-logs-otlp-http@npm:0.54.0" +"@opentelemetry/exporter-logs-otlp-http@npm:^0.55.0": + version: 0.55.0 + resolution: "@opentelemetry/exporter-logs-otlp-http@npm:0.55.0" dependencies: - "@opentelemetry/api-logs": 0.54.0 - "@opentelemetry/core": 1.27.0 - "@opentelemetry/otlp-exporter-base": 0.54.0 - "@opentelemetry/otlp-transformer": 0.54.0 - "@opentelemetry/sdk-logs": 0.54.0 + "@opentelemetry/api-logs": 0.55.0 + "@opentelemetry/core": 1.28.0 + "@opentelemetry/otlp-exporter-base": 0.55.0 + "@opentelemetry/otlp-transformer": 0.55.0 + "@opentelemetry/sdk-logs": 0.55.0 peerDependencies: "@opentelemetry/api": ^1.3.0 - checksum: 407cde2dd930aa19c0c826147d15aba84f94a58f1afbf86cfa1c41576be4492b689e1e9c7971a92805b051851cd6fab063bf24f29160b14c2d3b2cf1fded2bec + checksum: f639babc0bc62407577c1e34367a42577d242a43fb82f4c1af1fbb596fa805c6ea1b273bbfd3a49527fecfd51c42b4d43df25c308f592d5fb7dee056920e6297 languageName: node linkType: hard -"@opentelemetry/exporter-metrics-otlp-http@npm:^0.52.0": - version: 0.52.0 - resolution: "@opentelemetry/exporter-metrics-otlp-http@npm:0.52.0" +"@opentelemetry/exporter-metrics-otlp-http@npm:^0.55.0": + version: 0.55.0 + resolution: "@opentelemetry/exporter-metrics-otlp-http@npm:0.55.0" dependencies: - "@opentelemetry/core": 1.25.0 - "@opentelemetry/otlp-exporter-base": 0.52.0 - "@opentelemetry/otlp-transformer": 0.52.0 - "@opentelemetry/resources": 1.25.0 - "@opentelemetry/sdk-metrics": 1.25.0 + "@opentelemetry/core": 1.28.0 + "@opentelemetry/otlp-exporter-base": 0.55.0 + "@opentelemetry/otlp-transformer": 0.55.0 + "@opentelemetry/resources": 1.28.0 + "@opentelemetry/sdk-metrics": 1.28.0 peerDependencies: "@opentelemetry/api": ^1.3.0 - checksum: 8438733189879e3162ab4a374d7f22a4f9655257cbcde156f1041954cbc86bfab7299e696df49187684f1c219a76b263e6489c411b7008b81a05d5b0e7dcd92d + checksum: 236713540b5a5d0c6921cf0756ced5ffdd5952a4ec5d15d11363dff8ff4a576857d39007e99da45097b69bd09ae62119b1fa49de65edc1f5fa22037582c43cfe languageName: node linkType: hard -"@opentelemetry/exporter-trace-otlp-http@npm:^0.54.0": - version: 0.54.0 - resolution: "@opentelemetry/exporter-trace-otlp-http@npm:0.54.0" +"@opentelemetry/exporter-trace-otlp-http@npm:^0.55.0": + version: 0.55.0 + resolution: "@opentelemetry/exporter-trace-otlp-http@npm:0.55.0" dependencies: - "@opentelemetry/core": 1.27.0 - "@opentelemetry/otlp-exporter-base": 0.54.0 - "@opentelemetry/otlp-transformer": 0.54.0 - "@opentelemetry/resources": 1.27.0 - "@opentelemetry/sdk-trace-base": 1.27.0 + "@opentelemetry/core": 1.28.0 + "@opentelemetry/otlp-exporter-base": 0.55.0 + "@opentelemetry/otlp-transformer": 0.55.0 + "@opentelemetry/resources": 1.28.0 + "@opentelemetry/sdk-trace-base": 1.28.0 peerDependencies: "@opentelemetry/api": ^1.3.0 - checksum: e53263c3ffcfe62d7d299efac9515a977d284aabc4c89a961cec60853095f24e439abae348c326c7bd88911a85d08dd57833a64769e20254d02df8ac73e9b277 + checksum: e842ea7b24b6db7e0f9adc3ace81f682e7634c3ca25721c63313fea7711cf77085e77111cb113c0b7dad098332aa88a7ae96d7420e371cb1ff1b3116908b750f languageName: node linkType: hard -"@opentelemetry/host-metrics@npm:^0.35.2": - version: 0.35.2 - resolution: "@opentelemetry/host-metrics@npm:0.35.2" +"@opentelemetry/host-metrics@npm:^0.35.4": + version: 0.35.4 + resolution: "@opentelemetry/host-metrics@npm:0.35.4" dependencies: "@opentelemetry/sdk-metrics": ^1.8.0 systeminformation: 5.22.9 peerDependencies: "@opentelemetry/api": ^1.3.0 - checksum: 541df2585f9cbf8b6606f6782a2d351383f7a5b0a92b92ad4011ac46adac513474463d0c2474d6902d9d6d3b633be67c60ea0716ea2de277cebc1cb2538fa7a4 + checksum: d0be6116f5ffb81937820f887721da6a1ae841816d3c98159b94adc6146cf1ad1558527b9ed033b58db5439049bfde076842ac0b8ed57e819664f5a03f9e6c73 languageName: node linkType: hard -"@opentelemetry/otlp-exporter-base@npm:0.52.0": - version: 0.52.0 - resolution: "@opentelemetry/otlp-exporter-base@npm:0.52.0" +"@opentelemetry/otlp-exporter-base@npm:0.55.0, @opentelemetry/otlp-exporter-base@npm:^0.55.0": + version: 0.55.0 + resolution: "@opentelemetry/otlp-exporter-base@npm:0.55.0" dependencies: - "@opentelemetry/core": 1.25.0 - "@opentelemetry/otlp-transformer": 0.52.0 - peerDependencies: - "@opentelemetry/api": ^1.0.0 - checksum: 5230ba86d274f4d05fa2820a21e8278d796a299299e2af96150085c871427fe5ef4c6fa4954cdc1b8cdd0a87d5d6677ca0e547cc51253968572a6ede51f63ea2 - languageName: node - linkType: hard - -"@opentelemetry/otlp-exporter-base@npm:0.54.0, @opentelemetry/otlp-exporter-base@npm:^0.54.0": - version: 0.54.0 - resolution: "@opentelemetry/otlp-exporter-base@npm:0.54.0" - dependencies: - "@opentelemetry/core": 1.27.0 - "@opentelemetry/otlp-transformer": 0.54.0 + "@opentelemetry/core": 1.28.0 + "@opentelemetry/otlp-transformer": 0.55.0 peerDependencies: "@opentelemetry/api": ^1.3.0 - checksum: ded78325f22cd98314971216eb18d8f021a6cf7f3b1f69d08b0d257880deb2d409d598bfc3a6016b0557a1ec3b0c50527ba9acf09d4e3902f48d003f763441c0 + checksum: 1d4806bd7d36565c5165699b98f99cf00a7aaed23938bf039082550b5b2f973cfb056f71a6ec0317b5113647434d17dd48d9ab0b5fd4c7103beef97c8aa3bec6 languageName: node linkType: hard -"@opentelemetry/otlp-transformer@npm:0.52.0": - version: 0.52.0 - resolution: "@opentelemetry/otlp-transformer@npm:0.52.0" +"@opentelemetry/otlp-transformer@npm:0.55.0": + version: 0.55.0 + resolution: "@opentelemetry/otlp-transformer@npm:0.55.0" dependencies: - "@opentelemetry/api-logs": 0.52.0 - "@opentelemetry/core": 1.25.0 - "@opentelemetry/resources": 1.25.0 - "@opentelemetry/sdk-logs": 0.52.0 - "@opentelemetry/sdk-metrics": 1.25.0 - "@opentelemetry/sdk-trace-base": 1.25.0 - protobufjs: ^7.3.0 - peerDependencies: - "@opentelemetry/api": ">=1.3.0 <1.10.0" - checksum: 5f75f41a710e5e536faecdec7b1687352e450d185d12613bbcbb206570d96ca2833db15e1d7945cb27040a04c017135b07df2f607ccf9ca9a061f86ad87e8c35 - languageName: node - linkType: hard - -"@opentelemetry/otlp-transformer@npm:0.54.0": - version: 0.54.0 - resolution: "@opentelemetry/otlp-transformer@npm:0.54.0" - dependencies: - "@opentelemetry/api-logs": 0.54.0 - "@opentelemetry/core": 1.27.0 - "@opentelemetry/resources": 1.27.0 - "@opentelemetry/sdk-logs": 0.54.0 - "@opentelemetry/sdk-metrics": 1.27.0 - "@opentelemetry/sdk-trace-base": 1.27.0 + "@opentelemetry/api-logs": 0.55.0 + "@opentelemetry/core": 1.28.0 + "@opentelemetry/resources": 1.28.0 + "@opentelemetry/sdk-logs": 0.55.0 + "@opentelemetry/sdk-metrics": 1.28.0 + "@opentelemetry/sdk-trace-base": 1.28.0 protobufjs: ^7.3.0 peerDependencies: "@opentelemetry/api": ^1.3.0 - checksum: 69451290ec2c65ee27f35b29d41a1b961d169ff928d231805c2694cbc4b4bda788027cf8149a6a1325da7c3bc2ca20dc939ef91a4f3e2af481ed187653386610 + checksum: eaca0c0a428e4b5fb24a770d89767d278f1fbfbafcec7e126d5f06fc090ef74af8b5feadc031682749bb9231862c412a7452029d16c281f7a9a3f791130c2ec5 languageName: node linkType: hard -"@opentelemetry/propagator-b3@npm:1.25.0": - version: 1.25.0 - resolution: "@opentelemetry/propagator-b3@npm:1.25.0" +"@opentelemetry/propagator-b3@npm:1.28.0": + version: 1.28.0 + resolution: "@opentelemetry/propagator-b3@npm:1.28.0" dependencies: - "@opentelemetry/core": 1.25.0 + "@opentelemetry/core": 1.28.0 peerDependencies: "@opentelemetry/api": ">=1.0.0 <1.10.0" - checksum: 5e8a0feec400ebb20644ee217f904ec8894ccad49b753e80c5e131a4f3390504ca3fd17de58ff546313dedc6498dbd198ff83acc3d8084a205e1d901cfc0bb2d + checksum: 793812d47fde1cd55239ebc941b2439e18c226c4d29eb92168cce89c9305865a7058f397ff2f7d9ef5dc3d9cd7ac29c52bcb6c7d25947269d3d02c05643e371b languageName: node linkType: hard -"@opentelemetry/propagator-jaeger@npm:1.25.0": - version: 1.25.0 - resolution: "@opentelemetry/propagator-jaeger@npm:1.25.0" +"@opentelemetry/propagator-jaeger@npm:1.28.0": + version: 1.28.0 + resolution: "@opentelemetry/propagator-jaeger@npm:1.28.0" dependencies: - "@opentelemetry/core": 1.25.0 + "@opentelemetry/core": 1.28.0 peerDependencies: "@opentelemetry/api": ">=1.0.0 <1.10.0" - checksum: c652b4285e254041654a5153649f822b8e2eaa526b67e0a8c56c4eb173d9d0d0efa41ffed3f7dcdd1c2c2b85365cd05e001ee145e8701e4af9d7eef79488ca18 + checksum: 887589595a906a309e9962efcbc7940f37b85b6934d4910141de73b034c155d1309d336e259743a23684a7aa4669d6eeee89639ed33f97d1a0e8d8394251857f languageName: node linkType: hard -"@opentelemetry/resource-detector-aws@npm:^1.5.2": - version: 1.5.2 - resolution: "@opentelemetry/resource-detector-aws@npm:1.5.2" +"@opentelemetry/resource-detector-aws@npm:^1.8.0": + version: 1.8.0 + resolution: "@opentelemetry/resource-detector-aws@npm:1.8.0" dependencies: "@opentelemetry/core": ^1.0.0 - "@opentelemetry/resources": ^1.0.0 - "@opentelemetry/semantic-conventions": ^1.22.0 + "@opentelemetry/resources": ^1.10.0 + "@opentelemetry/semantic-conventions": ^1.27.0 peerDependencies: "@opentelemetry/api": ^1.0.0 - checksum: c58274117fb6a44593aab1135f11d39770a69a1a14108a826086a36a7108de13d0d9df333cf5533e98d40f751b20d8a3284426bfcd5dcc941157458bbba7fe1c + checksum: 7f393a3b3a9e1c015db188ea4b7ee651c0d7dc196bd574eb6bebec0a7ff93cbd652afcf1cdd02e97e56c0c53b3987487483f73ddd3323f2ba427af5f752ff806 languageName: node linkType: hard -"@opentelemetry/resources@npm:1.25.0, @opentelemetry/resources@npm:^1.25.0": +"@opentelemetry/resources@npm:1.25.0": version: 1.25.0 resolution: "@opentelemetry/resources@npm:1.25.0" dependencies: @@ -3777,57 +4050,44 @@ __metadata: languageName: node linkType: hard -"@opentelemetry/resources@npm:1.27.0": - version: 1.27.0 - resolution: "@opentelemetry/resources@npm:1.27.0" +"@opentelemetry/resources@npm:1.28.0, @opentelemetry/resources@npm:^1.10.0, @opentelemetry/resources@npm:^1.28.0": + version: 1.28.0 + resolution: "@opentelemetry/resources@npm:1.28.0" dependencies: - "@opentelemetry/core": 1.27.0 + "@opentelemetry/core": 1.28.0 "@opentelemetry/semantic-conventions": 1.27.0 peerDependencies: "@opentelemetry/api": ">=1.0.0 <1.10.0" - checksum: 43d298afea7daf7524e6b98c1441bcce9fa73b76aecf17e36cabb1a4cfaae6818acf9759d3e42706b1fd91243644076d2291e78c3ed81641d3b351fcff6cb9a9 - languageName: node - linkType: hard - -"@opentelemetry/resources@npm:^1.0.0": - version: 1.25.1 - resolution: "@opentelemetry/resources@npm:1.25.1" - dependencies: - "@opentelemetry/core": 1.25.1 - "@opentelemetry/semantic-conventions": 1.25.1 - peerDependencies: - "@opentelemetry/api": ">=1.0.0 <1.10.0" - checksum: 806e5aabbc93afcab767dc84707f702ca51bbc93e4565eb69a8591ed2fe78439aca19c5ca0d9f044c85ed97b9efb35936fdb65bef01f5f3e68504002c8a07220 + checksum: b5cb13b75e5da1ef306885cef06e68dc41197c0a25f37fc3029941de8912b0efac089b084fd38c2819a70d01c3b70bc781a60f776bb68ec901b9dfd24eb3a834 languageName: node linkType: hard -"@opentelemetry/sdk-logs@npm:0.52.0": - version: 0.52.0 - resolution: "@opentelemetry/sdk-logs@npm:0.52.0" +"@opentelemetry/sdk-logs@npm:0.55.0, @opentelemetry/sdk-logs@npm:^0.55.0": + version: 0.55.0 + resolution: "@opentelemetry/sdk-logs@npm:0.55.0" dependencies: - "@opentelemetry/api-logs": 0.52.0 - "@opentelemetry/core": 1.25.0 - "@opentelemetry/resources": 1.25.0 + "@opentelemetry/api-logs": 0.55.0 + "@opentelemetry/core": 1.28.0 + "@opentelemetry/resources": 1.28.0 peerDependencies: "@opentelemetry/api": ">=1.4.0 <1.10.0" - checksum: 7bf7aed40a168866d76e2260237f6cec9c82acaebcc02a3597985b2be644e4aebf69e0f57739e7fd7cc8e75ecd0bdc98b0429ea985d7de6064148477ffd6432e + checksum: 7e8d05d302759341f10d2c853378a0556bea295660419103b2be906b933ca660704bf1cdac30fc803a4bd7ed852f0a626e8774a6307e57aafcf08e67d3fcd737 languageName: node linkType: hard -"@opentelemetry/sdk-logs@npm:0.54.0, @opentelemetry/sdk-logs@npm:^0.54.0": - version: 0.54.0 - resolution: "@opentelemetry/sdk-logs@npm:0.54.0" +"@opentelemetry/sdk-metrics@npm:1.28.0, @opentelemetry/sdk-metrics@npm:^1.28.0": + version: 1.28.0 + resolution: "@opentelemetry/sdk-metrics@npm:1.28.0" dependencies: - "@opentelemetry/api-logs": 0.54.0 - "@opentelemetry/core": 1.27.0 - "@opentelemetry/resources": 1.27.0 + "@opentelemetry/core": 1.28.0 + "@opentelemetry/resources": 1.28.0 peerDependencies: - "@opentelemetry/api": ">=1.4.0 <1.10.0" - checksum: fd6db65af6d7afdb454eac1df8a4029d3d287d37e9289a4d128bea07995e8843b7b1e5d1f39aa39538397ce1b6bf624cc2548f40dc18324ba3bbaec86dd845b9 + "@opentelemetry/api": ">=1.3.0 <1.10.0" + checksum: b1a42fbad2046f21e384185b1559e198bb23bdfcd5970fc7f3a3cc4cfe5fb37ab8a6f29deef1b6753eb6a68e2c3b19c6d8a4957be4024af0ac0165eea24c051f languageName: node linkType: hard -"@opentelemetry/sdk-metrics@npm:1.25.0, @opentelemetry/sdk-metrics@npm:^1.25.0, @opentelemetry/sdk-metrics@npm:^1.8.0": +"@opentelemetry/sdk-metrics@npm:^1.8.0": version: 1.25.0 resolution: "@opentelemetry/sdk-metrics@npm:1.25.0" dependencies: @@ -3840,68 +4100,43 @@ __metadata: languageName: node linkType: hard -"@opentelemetry/sdk-metrics@npm:1.27.0": - version: 1.27.0 - resolution: "@opentelemetry/sdk-metrics@npm:1.27.0" - dependencies: - "@opentelemetry/core": 1.27.0 - "@opentelemetry/resources": 1.27.0 - peerDependencies: - "@opentelemetry/api": ">=1.3.0 <1.10.0" - checksum: c8776577063a3a5199d5717247270daf5820ce6636530b5ea4b5a8d6b40170cec9bb6b56dacb5c118d2e90588af83d0ebbb13f4d370c7efe50f69d22e5d13463 - languageName: node - linkType: hard - -"@opentelemetry/sdk-trace-base@npm:1.25.0": - version: 1.25.0 - resolution: "@opentelemetry/sdk-trace-base@npm:1.25.0" - dependencies: - "@opentelemetry/core": 1.25.0 - "@opentelemetry/resources": 1.25.0 - "@opentelemetry/semantic-conventions": 1.25.0 - peerDependencies: - "@opentelemetry/api": ">=1.0.0 <1.10.0" - checksum: 4c0ce40dbe9dcf5e5f79c60c44ffadb6806f1a8cf45c13d901ea6a2345f6cf26a83a1dad4358859fcf941e01f8bd8654f907f88137d5051e023211f8d645e959 - languageName: node - linkType: hard - -"@opentelemetry/sdk-trace-base@npm:1.27.0": - version: 1.27.0 - resolution: "@opentelemetry/sdk-trace-base@npm:1.27.0" +"@opentelemetry/sdk-trace-base@npm:1.28.0": + version: 1.28.0 + resolution: "@opentelemetry/sdk-trace-base@npm:1.28.0" dependencies: - "@opentelemetry/core": 1.27.0 - "@opentelemetry/resources": 1.27.0 + "@opentelemetry/core": 1.28.0 + "@opentelemetry/resources": 1.28.0 "@opentelemetry/semantic-conventions": 1.27.0 peerDependencies: "@opentelemetry/api": ">=1.0.0 <1.10.0" - checksum: d28c36724aeaf4884f7957e2ab138d9a0ca715a68b2ad23e2935ff0e39cd438c57fd0c8cc85fd5e280464857ede1ae8f9c8e40a37088a1e34d2e625e77276fee + checksum: 13828679153d1690384a57e17709c18a76dcee680e92c7f64c85bf6dc5771cc05f1eb70f64c726859718fe494428aab049511d26bd39fa4d9ebd5270ca39eca0 languageName: node linkType: hard -"@opentelemetry/sdk-trace-node@npm:^1.25.0": - version: 1.25.0 - resolution: "@opentelemetry/sdk-trace-node@npm:1.25.0" +"@opentelemetry/sdk-trace-node@npm:^1.28.0": + version: 1.28.0 + resolution: "@opentelemetry/sdk-trace-node@npm:1.28.0" dependencies: - "@opentelemetry/context-async-hooks": 1.25.0 - "@opentelemetry/core": 1.25.0 - "@opentelemetry/propagator-b3": 1.25.0 - "@opentelemetry/propagator-jaeger": 1.25.0 - "@opentelemetry/sdk-trace-base": 1.25.0 + "@opentelemetry/context-async-hooks": 1.28.0 + "@opentelemetry/core": 1.28.0 + "@opentelemetry/propagator-b3": 1.28.0 + "@opentelemetry/propagator-jaeger": 1.28.0 + "@opentelemetry/sdk-trace-base": 1.28.0 semver: ^7.5.2 peerDependencies: "@opentelemetry/api": ">=1.0.0 <1.10.0" - checksum: 22a0a61a6c092841ef4438f914edd259d3025078cc9331aaac340c624c2963aa6fdc4970ade5a0e6647c64e92e893ebde0b8ecdd021abac5358ea3c814a5c01c + checksum: 60868374d1eda9de8835b819a3fab2db2dae640a6253c0d12affe254dcdb02a7e79e7f151e2e09773bbf167e428c5582b810884870b8497dbd28886eb144241d languageName: node linkType: hard -"@opentelemetry/semantic-conventions@npm:1.25.0, @opentelemetry/semantic-conventions@npm:^1.25.0": +"@opentelemetry/semantic-conventions@npm:1.25.0": version: 1.25.0 resolution: "@opentelemetry/semantic-conventions@npm:1.25.0" checksum: 8c9d36f57f0d3d1d4945effe626894ffea860b4be4d5257666ee28b90843ce22694c5b01f9b25ed47a08043958b7e89a65b7ae8e4128f5ed72dcdfe71ac7a19a languageName: node linkType: hard -"@opentelemetry/semantic-conventions@npm:1.25.1, @opentelemetry/semantic-conventions@npm:^1.22.0": +"@opentelemetry/semantic-conventions@npm:1.25.1": version: 1.25.1 resolution: "@opentelemetry/semantic-conventions@npm:1.25.1" checksum: fea418a4b09c55121c6da11c49dd2105116533838c484aead17e8acf8029dad711e145849812f9c61f9e48fad8e2b6cf103d2c18847ca993032ce9b27c2f863d @@ -3915,13 +4150,10 @@ __metadata: languageName: node linkType: hard -"@opentelemetry/winston-transport@npm:^0.7.0": - version: 0.7.0 - resolution: "@opentelemetry/winston-transport@npm:0.7.0" - dependencies: - "@opentelemetry/api-logs": ^0.54.0 - winston-transport: 4.* - checksum: a75d1915e90ab9beaec842fe2f2ce053ea2b43001d8be7cfd47945fa6e1dee6e1d1b5850becb72c9553edb6904844b685df838a1a2cbea0f2f6edf6ce85dc3bb +"@opentelemetry/semantic-conventions@npm:^1.27.0, @opentelemetry/semantic-conventions@npm:^1.28.0": + version: 1.28.0 + resolution: "@opentelemetry/semantic-conventions@npm:1.28.0" + checksum: 1d708afa654990236cdb6b5da84f7ab899b70bff9f753bc49d93616a5c7f7f339ba1eba6a9fbb57dee596995334f4e7effa57a4624741882ab5b3c419c3511e2 languageName: node linkType: hard @@ -4052,6 +4284,184 @@ __metadata: languageName: node linkType: hard +"@puppeteer/browsers@npm:2.4.1": + version: 2.4.1 + resolution: "@puppeteer/browsers@npm:2.4.1" + dependencies: + debug: ^4.3.7 + extract-zip: ^2.0.1 + progress: ^2.0.3 + proxy-agent: ^6.4.0 + semver: ^7.6.3 + tar-fs: ^3.0.6 + unbzip2-stream: ^1.4.3 + yargs: ^17.7.2 + bin: + browsers: lib/cjs/main-cli.js + checksum: 1e8193ae4ad84cf5a4f1ad59abe92f77a8ba4a902670f8a54002b9b772bf4df081de9e2ccb47efc465c31a0c6a649cbf7b7be8318f9fd3a6af466836b6835327 + languageName: node + linkType: hard + +"@rollup/plugin-node-resolve@npm:^15.0.1": + version: 15.3.0 + resolution: "@rollup/plugin-node-resolve@npm:15.3.0" + dependencies: + "@rollup/pluginutils": ^5.0.1 + "@types/resolve": 1.20.2 + deepmerge: ^4.2.2 + is-module: ^1.0.0 + resolve: ^1.22.1 + peerDependencies: + rollup: ^2.78.0||^3.0.0||^4.0.0 + peerDependenciesMeta: + rollup: + optional: true + checksum: 90e4e94b173e7edd57e374ac0cc0a69cc6f1b4507e83731132ac6fa1747d96a5648a48441e4452728429b6db5e67561439b7b2f4d2c6a941a33d38be56d871b4 + languageName: node + linkType: hard + +"@rollup/pluginutils@npm:^5.0.1": + version: 5.1.3 + resolution: "@rollup/pluginutils@npm:5.1.3" + dependencies: + "@types/estree": ^1.0.0 + estree-walker: ^2.0.2 + picomatch: ^4.0.2 + peerDependencies: + rollup: ^1.20.0||^2.0.0||^3.0.0||^4.0.0 + peerDependenciesMeta: + rollup: + optional: true + checksum: a6e9bac8ae94da39679dae390b53b43fe7a218f8fa2bfecf86e59be4da4ba02ac004f166daf55f03506e49108399394f13edeb62cce090f8cfc967b29f4738bf + languageName: node + linkType: hard + +"@rollup/rollup-android-arm-eabi@npm:4.27.4": + version: 4.27.4 + resolution: "@rollup/rollup-android-arm-eabi@npm:4.27.4" + conditions: os=android & cpu=arm + languageName: node + linkType: hard + +"@rollup/rollup-android-arm64@npm:4.27.4": + version: 4.27.4 + resolution: "@rollup/rollup-android-arm64@npm:4.27.4" + conditions: os=android & cpu=arm64 + languageName: node + linkType: hard + +"@rollup/rollup-darwin-arm64@npm:4.27.4": + version: 4.27.4 + resolution: "@rollup/rollup-darwin-arm64@npm:4.27.4" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + +"@rollup/rollup-darwin-x64@npm:4.27.4": + version: 4.27.4 + resolution: "@rollup/rollup-darwin-x64@npm:4.27.4" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + +"@rollup/rollup-freebsd-arm64@npm:4.27.4": + version: 4.27.4 + resolution: "@rollup/rollup-freebsd-arm64@npm:4.27.4" + conditions: os=freebsd & cpu=arm64 + languageName: node + linkType: hard + +"@rollup/rollup-freebsd-x64@npm:4.27.4": + version: 4.27.4 + resolution: "@rollup/rollup-freebsd-x64@npm:4.27.4" + conditions: os=freebsd & cpu=x64 + languageName: node + linkType: hard + +"@rollup/rollup-linux-arm-gnueabihf@npm:4.27.4": + version: 4.27.4 + resolution: "@rollup/rollup-linux-arm-gnueabihf@npm:4.27.4" + conditions: os=linux & cpu=arm & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-arm-musleabihf@npm:4.27.4": + version: 4.27.4 + resolution: "@rollup/rollup-linux-arm-musleabihf@npm:4.27.4" + conditions: os=linux & cpu=arm & libc=musl + languageName: node + linkType: hard + +"@rollup/rollup-linux-arm64-gnu@npm:4.27.4": + version: 4.27.4 + resolution: "@rollup/rollup-linux-arm64-gnu@npm:4.27.4" + conditions: os=linux & cpu=arm64 & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-arm64-musl@npm:4.27.4": + version: 4.27.4 + resolution: "@rollup/rollup-linux-arm64-musl@npm:4.27.4" + conditions: os=linux & cpu=arm64 & libc=musl + languageName: node + linkType: hard + +"@rollup/rollup-linux-powerpc64le-gnu@npm:4.27.4": + version: 4.27.4 + resolution: "@rollup/rollup-linux-powerpc64le-gnu@npm:4.27.4" + conditions: os=linux & cpu=ppc64 & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-riscv64-gnu@npm:4.27.4": + version: 4.27.4 + resolution: "@rollup/rollup-linux-riscv64-gnu@npm:4.27.4" + conditions: os=linux & cpu=riscv64 & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-s390x-gnu@npm:4.27.4": + version: 4.27.4 + resolution: "@rollup/rollup-linux-s390x-gnu@npm:4.27.4" + conditions: os=linux & cpu=s390x & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-x64-gnu@npm:4.27.4": + version: 4.27.4 + resolution: "@rollup/rollup-linux-x64-gnu@npm:4.27.4" + conditions: os=linux & cpu=x64 & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-x64-musl@npm:4.27.4": + version: 4.27.4 + resolution: "@rollup/rollup-linux-x64-musl@npm:4.27.4" + conditions: os=linux & cpu=x64 & libc=musl + languageName: node + linkType: hard + +"@rollup/rollup-win32-arm64-msvc@npm:4.27.4": + version: 4.27.4 + resolution: "@rollup/rollup-win32-arm64-msvc@npm:4.27.4" + conditions: os=win32 & cpu=arm64 + languageName: node + linkType: hard + +"@rollup/rollup-win32-ia32-msvc@npm:4.27.4": + version: 4.27.4 + resolution: "@rollup/rollup-win32-ia32-msvc@npm:4.27.4" + conditions: os=win32 & cpu=ia32 + languageName: node + linkType: hard + +"@rollup/rollup-win32-x64-msvc@npm:4.27.4": + version: 4.27.4 + resolution: "@rollup/rollup-win32-x64-msvc@npm:4.27.4" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + "@scure/base@npm:~1.1.0, @scure/base@npm:~1.1.2, @scure/base@npm:~1.1.4": version: 1.1.6 resolution: "@scure/base@npm:1.1.6" @@ -4122,7 +4532,7 @@ __metadata: languageName: node linkType: hard -"@sinonjs/commons@npm:^3.0.0": +"@sinonjs/commons@npm:^3.0.0, @sinonjs/commons@npm:^3.0.1": version: 3.0.1 resolution: "@sinonjs/commons@npm:3.0.1" dependencies: @@ -4140,6 +4550,15 @@ __metadata: languageName: node linkType: hard +"@sinonjs/fake-timers@npm:^13.0.5": + version: 13.0.5 + resolution: "@sinonjs/fake-timers@npm:13.0.5" + dependencies: + "@sinonjs/commons": ^3.0.1 + checksum: b1c6ba87fadb7666d3aa126c9e8b4ac32b2d9e84c9e5fd074aa24cab3c8342fd655459de014b08e603be1e6c24c9f9716d76d6d2a36c50f59bb0091be61601dd + languageName: node + linkType: hard + "@swc/core-darwin-arm64@npm:1.5.5": version: 1.5.5 resolution: "@swc/core-darwin-arm64@npm:1.5.5" @@ -4356,6 +4775,13 @@ __metadata: languageName: node linkType: hard +"@types/babel__code-frame@npm:^7.0.2": + version: 7.0.6 + resolution: "@types/babel__code-frame@npm:7.0.6" + checksum: 5325ab85d95e58fe84279757788ddb0de68bfd6814bc636e868f9ff7b5229915873f28847c4baf48fd3a4a460a73b4ea87bc9e1d78a3a5a60cfc7ca627a722c5 + languageName: node + linkType: hard + "@types/babel__core@npm:^7.1.14": version: 7.20.5 resolution: "@types/babel__core@npm:7.20.5" @@ -4434,6 +4860,24 @@ __metadata: languageName: node linkType: hard +"@types/chai-as-promised@npm:^8.0.1": + version: 8.0.1 + resolution: "@types/chai-as-promised@npm:8.0.1" + dependencies: + "@types/chai": "*" + checksum: 7b298bab0cc16e244f9402de25eed8d626e803d328b67241487e3296bf68e95792f572491a2995ba409288263344fdc0da380064c5b6562a2a4d69bf78624d74 + languageName: node + linkType: hard + +"@types/chai@npm:*, @types/chai@npm:^5.0.1": + version: 5.0.1 + resolution: "@types/chai@npm:5.0.1" + dependencies: + "@types/deep-eql": "*" + checksum: 53d813cbca3755c025381ad4ac8b51b17897df90316350247f9527bdba3adb48b3b1315308fbd717d9013d8e60375c0ab4bd004dc72330133486ff5db4cb0b2c + languageName: node + linkType: hard + "@types/chalk@npm:^2.2.0": version: 2.2.0 resolution: "@types/chalk@npm:2.2.0" @@ -4443,6 +4887,23 @@ __metadata: languageName: node linkType: hard +"@types/co-body@npm:^6.1.0": + version: 6.1.3 + resolution: "@types/co-body@npm:6.1.3" + dependencies: + "@types/node": "*" + "@types/qs": "*" + checksum: e93fdc177f69ee0535cf401783258e4255f5eb8235c58b5a2a5a8958cf341fadf3d0bf2c75907ed6b7d188ce2c2f2cf9593a71d4eef12900beba54ebbbdd5cc1 + languageName: node + linkType: hard + +"@types/command-line-args@npm:^5.0.0": + version: 5.2.3 + resolution: "@types/command-line-args@npm:5.2.3" + checksum: 3d90db5b4bbaabd049654a0d12fa378989ab0d76a0f98d4c606761b5a08ce76458df0f9bb175219e187b4cd57e285e6f836d23e86b2c3d997820854cc3ed9121 + languageName: node + linkType: hard + "@types/connect-history-api-fallback@npm:^1.5.4": version: 1.5.4 resolution: "@types/connect-history-api-fallback@npm:1.5.4" @@ -4469,6 +4930,13 @@ __metadata: languageName: node linkType: hard +"@types/convert-source-map@npm:^2.0.0": + version: 2.0.3 + resolution: "@types/convert-source-map@npm:2.0.3" + checksum: 411cf9a02cf5dbe204e325dd5ebf50de00b58b38d1d2a3064c6ea28417c23bae956206eaa9ed3a75a994909b4ab3f9c6389073d0636a62500fa6d6333c64d45a + languageName: node + linkType: hard + "@types/cookiejar@npm:^2.1.5": version: 2.1.5 resolution: "@types/cookiejar@npm:2.1.5" @@ -4488,6 +4956,13 @@ __metadata: languageName: node linkType: hard +"@types/debounce@npm:^1.2.0": + version: 1.2.4 + resolution: "@types/debounce@npm:1.2.4" + checksum: decef3eee65d681556d50f7fac346f1b33134f6b21f806d41326f9dfb362fa66b0282ff0640ae6791b690694c9dc3dad4e146e909e707e6f96650f3aa325b9da + languageName: node + linkType: hard + "@types/debug@npm:^4.1.7": version: 4.1.12 resolution: "@types/debug@npm:4.1.12" @@ -4497,6 +4972,13 @@ __metadata: languageName: node linkType: hard +"@types/deep-eql@npm:*": + version: 4.0.2 + resolution: "@types/deep-eql@npm:4.0.2" + checksum: 249a27b0bb22f6aa28461db56afa21ec044fa0e303221a62dff81831b20c8530502175f1a49060f7099e7be06181078548ac47c668de79ff9880241968d43d0c + languageName: node + linkType: hard + "@types/detect-node@npm:^2.0.0": version: 2.0.2 resolution: "@types/detect-node@npm:2.0.2" @@ -4549,7 +5031,7 @@ __metadata: languageName: node linkType: hard -"@types/estree@npm:^1.0.6": +"@types/estree@npm:1.0.6, @types/estree@npm:^1.0.0, @types/estree@npm:^1.0.6": version: 1.0.6 resolution: "@types/estree@npm:1.0.6" checksum: 8825d6e729e16445d9a1dd2fb1db2edc5ed400799064cd4d028150701031af012ba30d6d03fe9df40f4d7a437d0de6d2b256020152b7b09bde9f2e420afdffd9 @@ -4641,7 +5123,7 @@ __metadata: languageName: node linkType: hard -"@types/istanbul-lib-coverage@npm:*, @types/istanbul-lib-coverage@npm:^2.0.0, @types/istanbul-lib-coverage@npm:^2.0.1": +"@types/istanbul-lib-coverage@npm:*, @types/istanbul-lib-coverage@npm:^2.0.0, @types/istanbul-lib-coverage@npm:^2.0.1, @types/istanbul-lib-coverage@npm:^2.0.3": version: 2.0.6 resolution: "@types/istanbul-lib-coverage@npm:2.0.6" checksum: 3feac423fd3e5449485afac999dcfcb3d44a37c830af898b689fadc65d26526460bedb889db278e0d4d815a670331796494d073a10ee6e3a6526301fe7415778 @@ -4778,7 +5260,7 @@ __metadata: languageName: node linkType: hard -"@types/koa@npm:*, @types/koa@npm:^2.13.5, @types/koa@npm:^2.13.6, @types/koa@npm:^2.13.9, @types/koa@npm:^2.15.0": +"@types/koa@npm:*, @types/koa@npm:^2.11.6, @types/koa@npm:^2.13.5, @types/koa@npm:^2.13.6, @types/koa@npm:^2.13.9, @types/koa@npm:^2.15.0": version: 2.15.0 resolution: "@types/koa@npm:2.15.0" dependencies: @@ -4967,6 +5449,22 @@ __metadata: languageName: node linkType: hard +"@types/mocha-each@npm:^2.0.4": + version: 2.0.4 + resolution: "@types/mocha-each@npm:2.0.4" + dependencies: + "@types/mocha": "*" + checksum: 2588284db079e2d0a17735c8fb5c12cba9feabf2de55c9ab49e1f3b38cc522691d30ed3abb1bcb21c087b27f373e3f4123ef7bd8d9a4f95cef38f6c8045c71f3 + languageName: node + linkType: hard + +"@types/mocha@npm:*, @types/mocha@npm:^10.0.10": + version: 10.0.10 + resolution: "@types/mocha@npm:10.0.10" + checksum: 17a56add60a8cc8362d3c62cb6798be3f89f4b6ccd5b9abd12b46e31ff299be21ff2faebf5993de7e0099559f58ca5a3b49a505d302dfa5d65c5a4edfc089195 + languageName: node + linkType: hard + "@types/ms@npm:*": version: 0.7.34 resolution: "@types/ms@npm:0.7.34" @@ -5058,6 +5556,13 @@ __metadata: languageName: node linkType: hard +"@types/parse5@npm:^6.0.1": + version: 6.0.3 + resolution: "@types/parse5@npm:6.0.3" + checksum: ddb59ee4144af5dfcc508a8dcf32f37879d11e12559561e65788756b95b33e6f03ea027d88e1f5408f9b7bfb656bf630ace31a2169edf44151daaf8dd58df1b7 + languageName: node + linkType: hard + "@types/qs@npm:*": version: 6.9.15 resolution: "@types/qs@npm:6.9.15" @@ -5072,6 +5577,13 @@ __metadata: languageName: node linkType: hard +"@types/resolve@npm:1.20.2": + version: 1.20.2 + resolution: "@types/resolve@npm:1.20.2" + checksum: 61c2cad2499ffc8eab36e3b773945d337d848d3ac6b7b0a87c805ba814bc838ef2f262fc0f109bfd8d2e0898ff8bd80ad1025f9ff64f1f71d3d4294c9f14e5f6 + languageName: node + linkType: hard + "@types/retry@npm:0.12.2": version: 0.12.2 resolution: "@types/retry@npm:0.12.2" @@ -5195,13 +5707,6 @@ __metadata: languageName: node linkType: hard -"@types/triple-beam@npm:^1.3.2": - version: 1.3.5 - resolution: "@types/triple-beam@npm:1.3.5" - checksum: 519b6a1b30d4571965c9706ad5400a200b94e4050feca3e7856e3ea7ac00ec9903e32e9a10e2762d0f7e472d5d03e5f4b29c16c0bd8c1f77c8876c683b2231f1 - languageName: node - linkType: hard - "@types/wrap-ansi@npm:^3.0.0": version: 3.0.0 resolution: "@types/wrap-ansi@npm:3.0.0" @@ -5209,6 +5714,15 @@ __metadata: languageName: node linkType: hard +"@types/ws@npm:^7.4.0": + version: 7.4.7 + resolution: "@types/ws@npm:7.4.7" + dependencies: + "@types/node": "*" + checksum: b4c9b8ad209620c9b21e78314ce4ff07515c0cadab9af101c1651e7bfb992d7fd933bd8b9c99d110738fd6db523ed15f82f29f50b45510288da72e964dedb1a3 + languageName: node + linkType: hard + "@types/ws@npm:^8.5.10": version: 8.5.13 resolution: "@types/ws@npm:8.5.13" @@ -5476,6 +5990,227 @@ __metadata: languageName: node linkType: hard +"@web/browser-logs@npm:^0.4.0": + version: 0.4.0 + resolution: "@web/browser-logs@npm:0.4.0" + dependencies: + errorstacks: ^2.2.0 + checksum: 65c6c4312b1ff00ff40fd15c07708f5e113bb8b4f87c972356010546a0664287dd4f64c5475f8be27b6abb29b9272ecc4162aada90ffb732f1779ceec5fd8ad6 + languageName: node + linkType: hard + +"@web/config-loader@npm:^0.3.0": + version: 0.3.2 + resolution: "@web/config-loader@npm:0.3.2" + checksum: 8f9b9d9283279b65702e9251c18311d5f8b67cc6c2df3d1d2c9a0d7f9e311f112001879e1328a4c88907b5aa66a279386d126a51b0e98f5ecd9da6ccbb4fd5da + languageName: node + linkType: hard + +"@web/dev-server-core@npm:^0.7.2, @web/dev-server-core@npm:^0.7.3, @web/dev-server-core@npm:^0.7.4": + version: 0.7.4 + resolution: "@web/dev-server-core@npm:0.7.4" + dependencies: + "@types/koa": ^2.11.6 + "@types/ws": ^7.4.0 + "@web/parse5-utils": ^2.1.0 + chokidar: ^4.0.1 + clone: ^2.1.2 + es-module-lexer: ^1.0.0 + get-stream: ^6.0.0 + is-stream: ^2.0.0 + isbinaryfile: ^5.0.0 + koa: ^2.13.0 + koa-etag: ^4.0.0 + koa-send: ^5.0.1 + koa-static: ^5.0.0 + lru-cache: ^8.0.4 + mime-types: ^2.1.27 + parse5: ^6.0.1 + picomatch: ^2.2.2 + ws: ^7.5.10 + checksum: 93493c07fc732989f575bdd17f860063e7d78d4f410fb47ea41aa81f3274d29246b7ea449709ab946650e4bb09bb06c878926d721af2702d1b365012bfc7ba63 + languageName: node + linkType: hard + +"@web/dev-server-esbuild@npm:^1.0.3": + version: 1.0.3 + resolution: "@web/dev-server-esbuild@npm:1.0.3" + dependencies: + "@mdn/browser-compat-data": ^4.0.0 + "@web/dev-server-core": ^0.7.4 + esbuild: ^0.24.0 + parse5: ^6.0.1 + ua-parser-js: ^1.0.33 + checksum: 8dba030098191260eca9aef5d2f8a986a8829300882f3563a925df36807ab49b1cacd33c9a581ae7d6c83fc32fdbaf779a2b4d3b47cb61edbdd2b67eb09534a7 + languageName: node + linkType: hard + +"@web/dev-server-rollup@npm:^0.6.1": + version: 0.6.4 + resolution: "@web/dev-server-rollup@npm:0.6.4" + dependencies: + "@rollup/plugin-node-resolve": ^15.0.1 + "@web/dev-server-core": ^0.7.2 + nanocolors: ^0.2.1 + parse5: ^6.0.1 + rollup: ^4.4.0 + whatwg-url: ^14.0.0 + checksum: 19ab662541fe9f7364b43745e4e2179619afc016fa293b0b0ee9bb8b8a064987f144ebaf531c0b963e676a801a5e93658e3b160148022879a0c86d2acaed95bf + languageName: node + linkType: hard + +"@web/dev-server@npm:^0.4.0": + version: 0.4.6 + resolution: "@web/dev-server@npm:0.4.6" + dependencies: + "@babel/code-frame": ^7.12.11 + "@types/command-line-args": ^5.0.0 + "@web/config-loader": ^0.3.0 + "@web/dev-server-core": ^0.7.2 + "@web/dev-server-rollup": ^0.6.1 + camelcase: ^6.2.0 + command-line-args: ^5.1.1 + command-line-usage: ^7.0.1 + debounce: ^1.2.0 + deepmerge: ^4.2.2 + internal-ip: ^6.2.0 + nanocolors: ^0.2.1 + open: ^8.0.2 + portfinder: ^1.0.32 + bin: + wds: dist/bin.js + web-dev-server: dist/bin.js + checksum: 7dc70749b3bdd9eaa0e8a8f491927bd4562a0a718ac79dbce53f80f55f945e8448a7f8fad34b94c7e056ab10ee3e513ac4b85aa1ed26d9b93ca89252de9b2bdf + languageName: node + linkType: hard + +"@web/parse5-utils@npm:^2.1.0": + version: 2.1.0 + resolution: "@web/parse5-utils@npm:2.1.0" + dependencies: + "@types/parse5": ^6.0.1 + parse5: ^6.0.1 + checksum: 0faa93c51d61934e0006bebc2e257036f8cedeb455c7bf22b8fdbc17919929518c2cc99ced3769f8eb3b1d6694dd4a7186d66ad2b3c4330140fd2ce03dc6c4d2 + languageName: node + linkType: hard + +"@web/test-runner-chrome@npm:^0.17.0": + version: 0.17.0 + resolution: "@web/test-runner-chrome@npm:0.17.0" + dependencies: + "@web/test-runner-core": ^0.13.0 + "@web/test-runner-coverage-v8": ^0.8.0 + async-mutex: 0.4.0 + chrome-launcher: ^0.15.0 + puppeteer-core: ^23.2.0 + checksum: 6779c82d8989b57f90d95fbe3cf1a62974583862129c988b81cb1b3327c865a9b8631d0172a762ebcbffd156ba3f61585b849a2268fecdfd9a95e58305feacae + languageName: node + linkType: hard + +"@web/test-runner-commands@npm:^0.9.0": + version: 0.9.0 + resolution: "@web/test-runner-commands@npm:0.9.0" + dependencies: + "@web/test-runner-core": ^0.13.0 + mkdirp: ^1.0.4 + checksum: df226f76148c5967df68c2589549b10ffe75f3d34a31d63bea132447271cdf073de7350aa680fcbf4315737b909fc44faad23d9f8e7e3ce37e93e05e67a7f295 + languageName: node + linkType: hard + +"@web/test-runner-core@npm:^0.13.0": + version: 0.13.4 + resolution: "@web/test-runner-core@npm:0.13.4" + dependencies: + "@babel/code-frame": ^7.12.11 + "@types/babel__code-frame": ^7.0.2 + "@types/co-body": ^6.1.0 + "@types/convert-source-map": ^2.0.0 + "@types/debounce": ^1.2.0 + "@types/istanbul-lib-coverage": ^2.0.3 + "@types/istanbul-reports": ^3.0.0 + "@web/browser-logs": ^0.4.0 + "@web/dev-server-core": ^0.7.3 + chokidar: ^4.0.1 + cli-cursor: ^3.1.0 + co-body: ^6.1.0 + convert-source-map: ^2.0.0 + debounce: ^1.2.0 + dependency-graph: ^0.11.0 + globby: ^11.0.1 + internal-ip: ^6.2.0 + istanbul-lib-coverage: ^3.0.0 + istanbul-lib-report: ^3.0.1 + istanbul-reports: ^3.0.2 + log-update: ^4.0.0 + nanocolors: ^0.2.1 + nanoid: ^3.1.25 + open: ^8.0.2 + picomatch: ^2.2.2 + source-map: ^0.7.3 + checksum: cc80f0873e59d2809d6438c1e5a5a110fa43ce5ce33bf3d0e5c65aa05b214e71d8ffba9f67ee45f75d36c6d6fb1c23662725be0b4ecb2f294a932d5eb76eb911 + languageName: node + linkType: hard + +"@web/test-runner-coverage-v8@npm:^0.8.0": + version: 0.8.0 + resolution: "@web/test-runner-coverage-v8@npm:0.8.0" + dependencies: + "@web/test-runner-core": ^0.13.0 + istanbul-lib-coverage: ^3.0.0 + lru-cache: ^8.0.4 + picomatch: ^2.2.2 + v8-to-istanbul: ^9.0.1 + checksum: 343f834372b3aeb2c24f4b03ce956d8ad851ef2a85b94507651c2a65321fcdff1b26a2c44d7516e97d9c42786bb003b9c245ad0798a414a814d0264fdbe0761e + languageName: node + linkType: hard + +"@web/test-runner-mocha@npm:^0.9.0": + version: 0.9.0 + resolution: "@web/test-runner-mocha@npm:0.9.0" + dependencies: + "@web/test-runner-core": ^0.13.0 + checksum: bcc9410ac9d679e7bb804fc5720b2a0ed3b4d08f2b49c03f2157f5b54c7f525a432712e1da644f04e5190c2480af2dc46a4c736cdba3fda3ba5fa98fd0f01a94 + languageName: node + linkType: hard + +"@web/test-runner-playwright@npm:^0.11.0": + version: 0.11.0 + resolution: "@web/test-runner-playwright@npm:0.11.0" + dependencies: + "@web/test-runner-core": ^0.13.0 + "@web/test-runner-coverage-v8": ^0.8.0 + playwright: ^1.22.2 + checksum: 3618b0b559d865af1211b3d86ec57487b32722a0dd640c9a3faca6c692cceec75176d742d0db73c823d300fbe761e5c15b2fad0c5096c3dbfedab313ed1aa7fe + languageName: node + linkType: hard + +"@web/test-runner@npm:^0.19.0": + version: 0.19.0 + resolution: "@web/test-runner@npm:0.19.0" + dependencies: + "@web/browser-logs": ^0.4.0 + "@web/config-loader": ^0.3.0 + "@web/dev-server": ^0.4.0 + "@web/test-runner-chrome": ^0.17.0 + "@web/test-runner-commands": ^0.9.0 + "@web/test-runner-core": ^0.13.0 + "@web/test-runner-mocha": ^0.9.0 + camelcase: ^6.2.0 + command-line-args: ^5.1.1 + command-line-usage: ^7.0.1 + convert-source-map: ^2.0.0 + diff: ^5.0.0 + globby: ^11.0.1 + nanocolors: ^0.2.1 + portfinder: ^1.0.32 + source-map: ^0.7.3 + bin: + web-test-runner: dist/bin.js + wtr: dist/bin.js + checksum: b1e0cdd53540c3c9d4c79389e0045e942731e0011d489705ca9913579ba15d7d7cce167dceadd62ecf32dbed9d44a4218cee82bc999a51888701fa10b77b40c5 + languageName: node + linkType: hard + "@webassemblyjs/ast@npm:1.12.1, @webassemblyjs/ast@npm:^1.12.1": version: 1.12.1 resolution: "@webassemblyjs/ast@npm:1.12.1" @@ -5728,15 +6463,6 @@ __metadata: languageName: node linkType: hard -"abort-controller@npm:^3.0.0": - version: 3.0.0 - resolution: "abort-controller@npm:3.0.0" - dependencies: - event-target-shim: ^5.0.0 - checksum: 170bdba9b47b7e65906a28c8ce4f38a7a369d78e2271706f020849c1bfe0ee2067d4261df8bbb66eb84f79208fd5b710df759d64191db58cfba7ce8ef9c54b75 - languageName: node - linkType: hard - "abortable-iterator@npm:^5.0.1": version: 5.0.1 resolution: "abortable-iterator@npm:5.0.1" @@ -5939,7 +6665,14 @@ __metadata: languageName: node linkType: hard -"ansi-escapes@npm:^4.2.1, ansi-escapes@npm:^4.3.2": +"ansi-colors@npm:^4.1.3": + version: 4.1.3 + resolution: "ansi-colors@npm:4.1.3" + checksum: a9c2ec842038a1fabc7db9ece7d3177e2fe1c5dc6f0c51ecfbf5f39911427b89c00b5dc6b8bd95f82a26e9b16aaae2e83d45f060e98070ce4d1333038edceb0e + languageName: node + linkType: hard + +"ansi-escapes@npm:^4.2.1, ansi-escapes@npm:^4.3.0, ansi-escapes@npm:^4.3.2": version: 4.3.2 resolution: "ansi-escapes@npm:4.3.2" dependencies: @@ -6106,6 +6839,13 @@ __metadata: languageName: node linkType: hard +"array-back@npm:^6.2.2": + version: 6.2.2 + resolution: "array-back@npm:6.2.2" + checksum: baae1e3a1687300a307d3bdf09715f6415e1099b5729d3d8e397309fb1e43d90b939d694602892172aaca7e0aeed38da89d04aa4951637d31c2a21350809e003 + languageName: node + linkType: hard + "array-buffer-byte-length@npm:^1.0.1": version: 1.0.1 resolution: "array-buffer-byte-length@npm:1.0.1" @@ -6244,6 +6984,13 @@ __metadata: languageName: node linkType: hard +"assertion-error@npm:^2.0.1": + version: 2.0.1 + resolution: "assertion-error@npm:2.0.1" + checksum: a0789dd882211b87116e81e2648ccb7f60340b34f19877dd020b39ebb4714e475eb943e14ba3e22201c221ef6645b7bfe10297e76b6ac95b48a9898c1211ce66 + languageName: node + linkType: hard + "ast-module-types@npm:^2.7.1": version: 2.7.1 resolution: "ast-module-types@npm:2.7.1" @@ -6274,10 +7021,28 @@ __metadata: languageName: node linkType: hard -"async@npm:^3.2.3": - version: 3.2.5 - resolution: "async@npm:3.2.5" - checksum: 5ec77f1312301dee02d62140a6b1f7ee0edd2a0f983b6fd2b0849b969f245225b990b47b8243e7b9ad16451a53e7f68e753700385b706198ced888beedba3af4 +"astral-regex@npm:^2.0.0": + version: 2.0.0 + resolution: "astral-regex@npm:2.0.0" + checksum: 876231688c66400473ba505731df37ea436e574dd524520294cc3bbc54ea40334865e01fa0d074d74d036ee874ee7e62f486ea38bc421ee8e6a871c06f011766 + languageName: node + linkType: hard + +"async-mutex@npm:0.4.0": + version: 0.4.0 + resolution: "async-mutex@npm:0.4.0" + dependencies: + tslib: ^2.4.0 + checksum: 813a71728b35a4fbfd64dba719f04726d9133c67b577fcd951b7028c4a675a13ee34e69beb82d621f87bf81f5d4f135c4c44be0448550c7db728547244ef71fc + languageName: node + linkType: hard + +"async@npm:^2.6.4": + version: 2.6.4 + resolution: "async@npm:2.6.4" + dependencies: + lodash: ^4.17.14 + checksum: a52083fb32e1ebe1d63e5c5624038bb30be68ff07a6c8d7dfe35e47c93fc144bd8652cbec869e0ac07d57dde387aa5f1386be3559cdee799cb1f789678d88e19 languageName: node linkType: hard @@ -6288,6 +7053,13 @@ __metadata: languageName: node linkType: hard +"atomic-sleep@npm:^1.0.0": + version: 1.0.0 + resolution: "atomic-sleep@npm:1.0.0" + checksum: b95275afb2f80732f22f43a60178430c468906a415a7ff18bcd0feeebc8eec3930b51250aeda91a476062a90e07132b43a1794e8d8ffcf9b650e8139be75fa36 + languageName: node + linkType: hard + "available-typed-arrays@npm:^1.0.7": version: 1.0.7 resolution: "available-typed-arrays@npm:1.0.7" @@ -6660,6 +7432,13 @@ __metadata: languageName: node linkType: hard +"browser-stdout@npm:^1.3.1": + version: 1.3.1 + resolution: "browser-stdout@npm:1.3.1" + checksum: b717b19b25952dd6af483e368f9bcd6b14b87740c3d226c2977a65e84666ffd67000bddea7d911f111a9b6ddc822b234de42d52ab6507bce4119a4cc003ef7b3 + languageName: node + linkType: hard + "browserify-aes@npm:^1.0.4, browserify-aes@npm:^1.2.0": version: 1.2.0 resolution: "browserify-aes@npm:1.2.0" @@ -7021,7 +7800,7 @@ __metadata: languageName: node linkType: hard -"camelcase@npm:^6.2.0": +"camelcase@npm:^6.0.0, camelcase@npm:^6.2.0": version: 6.3.0 resolution: "camelcase@npm:6.3.0" checksum: 8c96818a9076434998511251dcb2761a94817ea17dbdc37f47ac080bd088fc62c7369429a19e2178b993497132c8cbcf5cc1f44ba963e76782ba469c0474938d @@ -7056,7 +7835,31 @@ __metadata: languageName: node linkType: hard -"chalk-template@npm:0.4.0": +"chai-as-promised@npm:^8.0.1": + version: 8.0.1 + resolution: "chai-as-promised@npm:8.0.1" + dependencies: + check-error: ^2.0.0 + peerDependencies: + chai: ">= 2.1.2 < 6" + checksum: 7fa517bc70fd355ec91b543c8c1a1cf545355224cf75063f93b3ed32bb562f1f23a850135737d6337fa4e098c8baffbb9c74b05b1fcb3c4d290985a8c3c4890d + languageName: node + linkType: hard + +"chai@npm:^5.1.2": + version: 5.1.2 + resolution: "chai@npm:5.1.2" + dependencies: + assertion-error: ^2.0.1 + check-error: ^2.1.1 + deep-eql: ^5.0.1 + loupe: ^3.1.0 + pathval: ^2.0.0 + checksum: f2341967ab5632612548d372c27b46219adad3af35021d8cba2ae3c262f588de2c60cb3f004e6ad40e363a9cad6d20d0de51f00e7e9ac31cce17fb05d4efa316 + languageName: node + linkType: hard + +"chalk-template@npm:0.4.0, chalk-template@npm:^0.4.0": version: 0.4.0 resolution: "chalk-template@npm:0.4.0" dependencies: @@ -7134,7 +7937,14 @@ __metadata: languageName: node linkType: hard -"chokidar@npm:^3.6.0": +"check-error@npm:^2.0.0, check-error@npm:^2.1.1": + version: 2.1.1 + resolution: "check-error@npm:2.1.1" + checksum: d785ed17b1d4a4796b6e75c765a9a290098cf52ff9728ce0756e8ffd4293d2e419dd30c67200aee34202463b474306913f2fcfaf1890641026d9fc6966fea27a + languageName: node + linkType: hard + +"chokidar@npm:^3.5.3, chokidar@npm:^3.6.0": version: 3.6.0 resolution: "chokidar@npm:3.6.0" dependencies: @@ -7153,6 +7963,15 @@ __metadata: languageName: node linkType: hard +"chokidar@npm:^4.0.1": + version: 4.0.1 + resolution: "chokidar@npm:4.0.1" + dependencies: + readdirp: ^4.0.1 + checksum: 193da9786b0422a895d59c7552195d15c6c636e6a2293ae43d09e34e243e24ccd02d693f007c767846a65abbeae5fea6bfacb8fc2ddec4ea4d397620d552010d + languageName: node + linkType: hard + "chownr@npm:^2.0.0": version: 2.0.0 resolution: "chownr@npm:2.0.0" @@ -7160,6 +7979,20 @@ __metadata: languageName: node linkType: hard +"chrome-launcher@npm:^0.15.0": + version: 0.15.2 + resolution: "chrome-launcher@npm:0.15.2" + dependencies: + "@types/node": "*" + escape-string-regexp: ^4.0.0 + is-wsl: ^2.2.0 + lighthouse-logger: ^1.0.0 + bin: + print-chrome-path: bin/print-chrome-path.js + checksum: e1f8131b9f7bd931248ea85f413c6cdb93a0d41440ff5bf0987f36afb081d2b2c7b60ba6062ee7ae2dd9b052143f6b275b38c9eb115d11b49c3ea8829bad7db0 + languageName: node + linkType: hard + "chrome-trace-event@npm:^1.0.2": version: 1.0.3 resolution: "chrome-trace-event@npm:1.0.3" @@ -7193,6 +8026,19 @@ __metadata: languageName: node linkType: hard +"chromium-bidi@npm:0.8.0": + version: 0.8.0 + resolution: "chromium-bidi@npm:0.8.0" + dependencies: + mitt: 3.0.1 + urlpattern-polyfill: 10.0.0 + zod: 3.23.8 + peerDependencies: + devtools-protocol: "*" + checksum: 5c5b12e00564b6f145511ca4c159db24b07bfc6a1eb6add26d88c4331d74ef6cc8cd2f58bc169e0726c689910a1888313722f39ccab1bac14284d1918155d5e9 + languageName: node + linkType: hard + "ci-info@npm:^3.2.0": version: 3.9.0 resolution: "ci-info@npm:3.9.0" @@ -7274,6 +8120,17 @@ __metadata: languageName: node linkType: hard +"cliui@npm:^7.0.2": + version: 7.0.4 + resolution: "cliui@npm:7.0.4" + dependencies: + string-width: ^4.2.0 + strip-ansi: ^6.0.0 + wrap-ansi: ^7.0.0 + checksum: ce2e8f578a4813806788ac399b9e866297740eecd4ad1823c27fd344d78b22c5f8597d548adbcc46f0573e43e21e751f39446c5a5e804a12aace402b7a315d7f + languageName: node + linkType: hard + "cliui@npm:^8.0.1": version: 8.0.1 resolution: "cliui@npm:8.0.1" @@ -7303,6 +8160,13 @@ __metadata: languageName: node linkType: hard +"clone@npm:^2.1.2": + version: 2.1.2 + resolution: "clone@npm:2.1.2" + checksum: aaf106e9bc025b21333e2f4c12da539b568db4925c0501a1bf4070836c9e848c892fa22c35548ce0d1132b08bbbfa17a00144fe58fccdab6fa900fec4250f67d + languageName: node + linkType: hard + "co-body@npm:^6.0.0": version: 6.1.0 resolution: "co-body@npm:6.1.0" @@ -7315,6 +8179,19 @@ __metadata: languageName: node linkType: hard +"co-body@npm:^6.1.0": + version: 6.2.0 + resolution: "co-body@npm:6.2.0" + dependencies: + "@hapi/bourne": ^3.0.0 + inflation: ^2.0.0 + qs: ^6.5.2 + raw-body: ^2.3.3 + type-is: ^1.6.16 + checksum: c89336086bb746291b5efd8999403eadce34810f2f1936ab4d38d2cb4290b7fc6b966d1d4e993a2788b3e954b8df63195dbdcb431a06ef2b0ac086fce8ae5c4c + languageName: node + linkType: hard + "co@npm:^4.6.0": version: 4.6.0 resolution: "co@npm:4.6.0" @@ -7336,7 +8213,7 @@ __metadata: languageName: node linkType: hard -"color-convert@npm:^1.9.0, color-convert@npm:^1.9.3": +"color-convert@npm:^1.9.0": version: 1.9.3 resolution: "color-convert@npm:1.9.3" dependencies: @@ -7361,50 +8238,20 @@ __metadata: languageName: node linkType: hard -"color-name@npm:^1.0.0, color-name@npm:^1.1.4, color-name@npm:~1.1.4": +"color-name@npm:^1.1.4, color-name@npm:~1.1.4": version: 1.1.4 resolution: "color-name@npm:1.1.4" checksum: b0445859521eb4021cd0fb0cc1a75cecf67fceecae89b63f62b201cca8d345baf8b952c966862a9d9a2632987d4f6581f0ec8d957dfacece86f0a7919316f610 languageName: node linkType: hard -"color-string@npm:^1.6.0": - version: 1.9.1 - resolution: "color-string@npm:1.9.1" - dependencies: - color-name: ^1.0.0 - simple-swizzle: ^0.2.2 - checksum: c13fe7cff7885f603f49105827d621ce87f4571d78ba28ef4a3f1a104304748f620615e6bf065ecd2145d0d9dad83a3553f52bb25ede7239d18e9f81622f1cc5 - languageName: node - linkType: hard - -"color@npm:^3.1.3": - version: 3.2.1 - resolution: "color@npm:3.2.1" - dependencies: - color-convert: ^1.9.3 - color-string: ^1.6.0 - checksum: f81220e8b774d35865c2561be921f5652117638dcda7ca4029262046e37fc2444ac7bbfdd110cf1fd9c074a4ee5eda8f85944ffbdda26186b602dd9bb05f6400 - languageName: node - linkType: hard - -"colorette@npm:^2.0.10, colorette@npm:^2.0.14": +"colorette@npm:^2.0.10, colorette@npm:^2.0.14, colorette@npm:^2.0.20, colorette@npm:^2.0.7": version: 2.0.20 resolution: "colorette@npm:2.0.20" checksum: 0c016fea2b91b733eb9f4bcdb580018f52c0bc0979443dad930e5037a968237ac53d9beb98e218d2e9235834f8eebce7f8e080422d6194e957454255bde71d3d languageName: node linkType: hard -"colorspace@npm:1.1.x": - version: 1.1.4 - resolution: "colorspace@npm:1.1.4" - dependencies: - color: ^3.1.3 - text-hex: 1.0.x - checksum: bb3934ef3c417e961e6d03d7ca60ea6e175947029bfadfcdb65109b01881a1c0ecf9c2b0b59abcd0ee4a0d7c1eae93beed01b0e65848936472270a0b341ebce8 - languageName: node - linkType: hard - "combine-source-map@npm:^0.8.0, combine-source-map@npm:~0.8.0": version: 0.8.0 resolution: "combine-source-map@npm:0.8.0" @@ -7464,6 +8311,18 @@ __metadata: languageName: node linkType: hard +"command-line-usage@npm:^7.0.1": + version: 7.0.3 + resolution: "command-line-usage@npm:7.0.3" + dependencies: + array-back: ^6.2.2 + chalk-template: ^0.4.0 + table-layout: ^4.1.0 + typical: ^7.1.1 + checksum: cb65d94c71ac380d6133460fa16d15c3d6dde00746498d60dcd12989fffeb90d1373230135c97e0bd7019874edd913f9df8b87b0afc7180811117342ae950ff4 + languageName: node + linkType: hard + "commander@npm:^10.0.1": version: 10.0.1 resolution: "commander@npm:10.0.1" @@ -8161,6 +9020,13 @@ __metadata: languageName: node linkType: hard +"dateformat@npm:^4.6.3": + version: 4.6.3 + resolution: "dateformat@npm:4.6.3" + checksum: c3aa0617c0a5b30595122bc8d1bee6276a9221e4d392087b41cbbdf175d9662ae0e50d0d6dcdf45caeac5153c4b5b0844265f8cd2b2245451e3da19e39e3b65d + languageName: node + linkType: hard + "debounce@npm:^1.2.0": version: 1.2.1 resolution: "debounce@npm:1.2.1" @@ -8168,7 +9034,7 @@ __metadata: languageName: node linkType: hard -"debug@npm:2.6.9": +"debug@npm:2.6.9, debug@npm:^2.6.9": version: 2.6.9 resolution: "debug@npm:2.6.9" dependencies: @@ -8198,7 +9064,7 @@ __metadata: languageName: node linkType: hard -"debug@npm:^4.3.5, debug@npm:^4.3.6": +"debug@npm:^4.3.5, debug@npm:^4.3.6, debug@npm:^4.3.7": version: 4.3.7 resolution: "debug@npm:4.3.7" dependencies: @@ -8227,6 +9093,13 @@ __metadata: languageName: node linkType: hard +"decamelize@npm:^4.0.0": + version: 4.0.0 + resolution: "decamelize@npm:4.0.0" + checksum: b7d09b82652c39eead4d6678bb578e3bebd848add894b76d0f6b395bc45b2d692fb88d977e7cfb93c4ed6c119b05a1347cef261174916c2e75c0a8ca57da1809 + languageName: node + linkType: hard + "dedent@npm:^1.0.0": version: 1.5.3 resolution: "dedent@npm:1.5.3" @@ -8239,6 +9112,13 @@ __metadata: languageName: node linkType: hard +"deep-eql@npm:^5.0.1": + version: 5.0.2 + resolution: "deep-eql@npm:5.0.2" + checksum: 6aaaadb4c19cbce42e26b2bbe5bd92875f599d2602635dc97f0294bae48da79e89470aedee05f449e0ca8c65e9fd7e7872624d1933a1db02713d99c2ca8d1f24 + languageName: node + linkType: hard + "deep-equal@npm:~1.0.1": version: 1.0.1 resolution: "deep-equal@npm:1.0.1" @@ -8284,6 +9164,15 @@ __metadata: languageName: node linkType: hard +"default-gateway@npm:^6.0.0": + version: 6.0.3 + resolution: "default-gateway@npm:6.0.3" + dependencies: + execa: ^5.0.0 + checksum: 126f8273ecac8ee9ff91ea778e8784f6cd732d77c3157e8c5bdd6ed03651b5291f71446d05bc02d04073b1e67583604db5394ea3cf992ede0088c70ea15b7378 + languageName: node + linkType: hard + "defaults@npm:^1.0.3": version: 1.0.4 resolution: "defaults@npm:1.0.4" @@ -8314,6 +9203,13 @@ __metadata: languageName: node linkType: hard +"define-lazy-prop@npm:^2.0.0": + version: 2.0.0 + resolution: "define-lazy-prop@npm:2.0.0" + checksum: 0115fdb065e0490918ba271d7339c42453d209d4cb619dfe635870d906731eff3e1ade8028bb461ea27ce8264ec5e22c6980612d332895977e89c1bbc80fcee2 + languageName: node + linkType: hard + "define-lazy-prop@npm:^3.0.0": version: 3.0.0 resolution: "define-lazy-prop@npm:3.0.0" @@ -8392,6 +9288,13 @@ __metadata: languageName: node linkType: hard +"dependency-graph@npm:^0.11.0": + version: 0.11.0 + resolution: "dependency-graph@npm:0.11.0" + checksum: 477204beaa9be69e642bc31ffe7a8c383d0cf48fa27acbc91c5df01431ab913e65c154213d2ef83d034c98d77280743ec85e5da018a97a18dd43d3c0b78b28cd + languageName: node + linkType: hard + "dependency-tree@npm:^9.0.0": version: 9.0.0 resolution: "dependency-tree@npm:9.0.0" @@ -8671,6 +9574,13 @@ __metadata: languageName: node linkType: hard +"devtools-protocol@npm:0.0.1367902": + version: 0.0.1367902 + resolution: "devtools-protocol@npm:0.0.1367902" + checksum: ef1115f4b287ab033c5342f7ba7fbf45314c3b46db2195978db0096b368ffbb79157a69dc361fa539874a37fea87101267049957285b1ecbaa1a96f6df6cf344 + languageName: node + linkType: hard + "dezalgo@npm:^1.0.4": version: 1.0.4 resolution: "dezalgo@npm:1.0.4" @@ -8695,6 +9605,13 @@ __metadata: languageName: node linkType: hard +"diff@npm:^5.0.0, diff@npm:^5.2.0": + version: 5.2.0 + resolution: "diff@npm:5.2.0" + checksum: 12b63ca9c36c72bafa3effa77121f0581b4015df18bc16bac1f8e263597735649f1a173c26f7eba17fb4162b073fee61788abe49610e6c70a2641fe1895443fd + languageName: node + linkType: hard + "diffie-hellman@npm:^5.0.0": version: 5.0.3 resolution: "diffie-hellman@npm:5.0.3" @@ -8915,13 +9832,6 @@ __metadata: languageName: node linkType: hard -"enabled@npm:2.0.x": - version: 2.0.0 - resolution: "enabled@npm:2.0.0" - checksum: 9d256d89f4e8a46ff988c6a79b22fa814b4ffd82826c4fdacd9b42e9b9465709d3b748866d0ab4d442dfc6002d81de7f7b384146ccd1681f6a7f868d2acca063 - languageName: node - linkType: hard - "encodeurl@npm:^1.0.2, encodeurl@npm:~1.0.2": version: 1.0.2 resolution: "encodeurl@npm:1.0.2" @@ -9027,6 +9937,13 @@ __metadata: languageName: node linkType: hard +"errorstacks@npm:^2.2.0": + version: 2.4.1 + resolution: "errorstacks@npm:2.4.1" + checksum: 1b46bdd3c40d3e30dbb6945c0529ffbef6ccdf2260eeecff6cc1ee95b708ec732094597d6adaa53ffe18d045150b366e3f7472d8594946f430941bfa4ad54479 + languageName: node + linkType: hard + "es-abstract@npm:^1.22.1, es-abstract@npm:^1.22.3, es-abstract@npm:^1.23.0, es-abstract@npm:^1.23.2": version: 1.23.3 resolution: "es-abstract@npm:1.23.3" @@ -9097,6 +10014,13 @@ __metadata: languageName: node linkType: hard +"es-module-lexer@npm:^1.0.0": + version: 1.5.4 + resolution: "es-module-lexer@npm:1.5.4" + checksum: a0cf04fb92d052647ac7d818d1913b98d3d3d0f5b9d88f0eafb993436e4c3e2c958599db68839d57f2dfa281fdf0f60e18d448eb78fc292c33c0f25635b6854f + languageName: node + linkType: hard + "es-module-lexer@npm:^1.2.1": version: 1.5.2 resolution: "es-module-lexer@npm:1.5.2" @@ -9221,6 +10145,89 @@ __metadata: languageName: node linkType: hard +"esbuild@npm:^0.24.0": + version: 0.24.0 + resolution: "esbuild@npm:0.24.0" + dependencies: + "@esbuild/aix-ppc64": 0.24.0 + "@esbuild/android-arm": 0.24.0 + "@esbuild/android-arm64": 0.24.0 + "@esbuild/android-x64": 0.24.0 + "@esbuild/darwin-arm64": 0.24.0 + "@esbuild/darwin-x64": 0.24.0 + "@esbuild/freebsd-arm64": 0.24.0 + "@esbuild/freebsd-x64": 0.24.0 + "@esbuild/linux-arm": 0.24.0 + "@esbuild/linux-arm64": 0.24.0 + "@esbuild/linux-ia32": 0.24.0 + "@esbuild/linux-loong64": 0.24.0 + "@esbuild/linux-mips64el": 0.24.0 + "@esbuild/linux-ppc64": 0.24.0 + "@esbuild/linux-riscv64": 0.24.0 + "@esbuild/linux-s390x": 0.24.0 + "@esbuild/linux-x64": 0.24.0 + "@esbuild/netbsd-x64": 0.24.0 + "@esbuild/openbsd-arm64": 0.24.0 + "@esbuild/openbsd-x64": 0.24.0 + "@esbuild/sunos-x64": 0.24.0 + "@esbuild/win32-arm64": 0.24.0 + "@esbuild/win32-ia32": 0.24.0 + "@esbuild/win32-x64": 0.24.0 + dependenciesMeta: + "@esbuild/aix-ppc64": + optional: true + "@esbuild/android-arm": + optional: true + "@esbuild/android-arm64": + optional: true + "@esbuild/android-x64": + optional: true + "@esbuild/darwin-arm64": + optional: true + "@esbuild/darwin-x64": + optional: true + "@esbuild/freebsd-arm64": + optional: true + "@esbuild/freebsd-x64": + optional: true + "@esbuild/linux-arm": + optional: true + "@esbuild/linux-arm64": + optional: true + "@esbuild/linux-ia32": + optional: true + "@esbuild/linux-loong64": + optional: true + "@esbuild/linux-mips64el": + optional: true + "@esbuild/linux-ppc64": + optional: true + "@esbuild/linux-riscv64": + optional: true + "@esbuild/linux-s390x": + optional: true + "@esbuild/linux-x64": + optional: true + "@esbuild/netbsd-x64": + optional: true + "@esbuild/openbsd-arm64": + optional: true + "@esbuild/openbsd-x64": + optional: true + "@esbuild/sunos-x64": + optional: true + "@esbuild/win32-arm64": + optional: true + "@esbuild/win32-ia32": + optional: true + "@esbuild/win32-x64": + optional: true + bin: + esbuild: bin/esbuild + checksum: dd386d92a05c7eb03078480522cdd8b40c434777b5f08487c27971d30933ecaae3f08bd221958dd8f9c66214915cdc85f844283ca9bdbf8ee703d889ae526edd + languageName: node + linkType: hard + "escalade@npm:^3.1.1, escalade@npm:^3.1.2": version: 3.1.2 resolution: "escalade@npm:3.1.2" @@ -9536,6 +10543,13 @@ __metadata: languageName: node linkType: hard +"estree-walker@npm:^2.0.2": + version: 2.0.2 + resolution: "estree-walker@npm:2.0.2" + checksum: 6151e6f9828abe2259e57f5fd3761335bb0d2ebd76dc1a01048ccee22fabcfef3c0859300f6d83ff0d1927849368775ec5a6d265dde2f6de5a1be1721cd94efc + languageName: node + linkType: hard + "esutils@npm:^2.0.2": version: 2.0.3 resolution: "esutils@npm:2.0.3" @@ -9543,7 +10557,7 @@ __metadata: languageName: node linkType: hard -"etag@npm:~1.8.1": +"etag@npm:^1.8.1, etag@npm:~1.8.1": version: 1.8.1 resolution: "etag@npm:1.8.1" checksum: 571aeb3dbe0f2bbd4e4fadbdb44f325fc75335cd5f6f6b6a091e6a06a9f25ed5392f0863c5442acb0646787446e816f13cbfc6edce5b07658541dff573cab1ff @@ -9577,13 +10591,6 @@ __metadata: languageName: node linkType: hard -"event-target-shim@npm:^5.0.0": - version: 5.0.1 - resolution: "event-target-shim@npm:5.0.1" - checksum: 1ffe3bb22a6d51bdeb6bf6f7cf97d2ff4a74b017ad12284cc9e6a279e727dc30a5de6bb613e5596ff4dc3e517841339ad09a7eec44266eccb1aa201a30448166 - languageName: node - linkType: hard - "eventemitter3@npm:^4.0.0": version: 4.0.7 resolution: "eventemitter3@npm:4.0.7" @@ -9753,6 +10760,13 @@ __metadata: languageName: node linkType: hard +"fast-copy@npm:^3.0.2": + version: 3.0.2 + resolution: "fast-copy@npm:3.0.2" + checksum: 47f584bcede08ab3198559d3e0e093a547d567715b86be2198da6e3366c3c73eed550d97b86f9fb90dae179982b89c15d68187def960f522cdce14bacdfc6184 + languageName: node + linkType: hard + "fast-deep-equal@npm:^3.1.1, fast-deep-equal@npm:^3.1.3": version: 3.1.3 resolution: "fast-deep-equal@npm:3.1.3" @@ -9794,6 +10808,13 @@ __metadata: languageName: node linkType: hard +"fast-redact@npm:^3.1.1": + version: 3.5.0 + resolution: "fast-redact@npm:3.5.0" + checksum: ef03f0d1849da074a520a531ad299bf346417b790a643931ab4e01cb72275c8d55b60dc8512fb1f1818647b696790edefaa96704228db9f012da935faa1940af + languageName: node + linkType: hard + "fast-safe-stringify@npm:^2.0.7, fast-safe-stringify@npm:^2.1.1": version: 2.1.1 resolution: "fast-safe-stringify@npm:2.1.1" @@ -9851,13 +10872,6 @@ __metadata: languageName: node linkType: hard -"fecha@npm:^4.2.0": - version: 4.2.3 - resolution: "fecha@npm:4.2.3" - checksum: f94e2fb3acf5a7754165d04549460d3ae6c34830394d20c552197e3e000035d69732d74af04b9bed3283bf29fe2a9ebdcc0085e640b0be3cc3658b9726265e31 - languageName: node - linkType: hard - "fflate@npm:^0.8.0": version: 0.8.2 resolution: "fflate@npm:0.8.2" @@ -9874,15 +10888,6 @@ __metadata: languageName: node linkType: hard -"file-stream-rotator@npm:^0.6.1": - version: 0.6.1 - resolution: "file-stream-rotator@npm:0.6.1" - dependencies: - moment: ^2.29.1 - checksum: ebdf6a9e7ca886a50f4dafb2284d4569cefd5bdf4e4451ead25f4d68b7f9776b2620a3d110d534edd40935d1e17f37d818e2129303201870ff89c71b19b49ac1 - languageName: node - linkType: hard - "file-uri-to-path@npm:1.0.0": version: 1.0.0 resolution: "file-uri-to-path@npm:1.0.0" @@ -10009,13 +11014,6 @@ __metadata: languageName: node linkType: hard -"fn.name@npm:1.x.x": - version: 1.1.0 - resolution: "fn.name@npm:1.1.0" - checksum: e357144f48cfc9a7f52a82bbc6c23df7c8de639fce049cac41d41d62cabb740cdb9f14eddc6485e29c933104455bdd7a69bb14a9012cef9cd4fa252a4d0cf293 - languageName: node - linkType: hard - "follow-redirects@npm:^1.0.0, follow-redirects@npm:^1.12.1, follow-redirects@npm:^1.15.6": version: 1.15.6 resolution: "follow-redirects@npm:1.15.6" @@ -10398,6 +11396,19 @@ __metadata: languageName: node linkType: hard +"glob@npm:^8.1.0": + version: 8.1.0 + resolution: "glob@npm:8.1.0" + dependencies: + fs.realpath: ^1.0.0 + inflight: ^1.0.4 + inherits: 2 + minimatch: ^5.0.1 + once: ^1.3.0 + checksum: 92fbea3221a7d12075f26f0227abac435de868dd0736a17170663783296d0dd8d3d532a5672b4488a439bf5d7fb85cdd07c11185d6cd39184f0385cbdfb86a47 + languageName: node + linkType: hard + "globals@npm:^11.1.0": version: 11.12.0 resolution: "globals@npm:11.12.0" @@ -10639,6 +11650,13 @@ __metadata: languageName: node linkType: hard +"help-me@npm:^5.0.0": + version: 5.0.0 + resolution: "help-me@npm:5.0.0" + checksum: 474436627b6c7d2f406a2768453895889eb2712c8ded4c47658d5c6dd46c2ff3f742be4e4e8dedd57b7f1ac6b28803896a2e026a32a977f507222c16f23ab2e1 + languageName: node + linkType: hard + "hexoid@npm:^1.0.0": version: 1.0.0 resolution: "hexoid@npm:1.0.0" @@ -10934,6 +11952,13 @@ __metadata: languageName: node linkType: hard +"idb@npm:^8.0.0": + version: 8.0.0 + resolution: "idb@npm:8.0.0" + checksum: a9c6176c176dc1a73520ae906d33fcda8a6f6068cf64027e196763d4ad70b088b7141650ed68f3604e0f0ccd1a123f6b8a435ba5e4514f42ada3460c23b6747a + languageName: node + linkType: hard + "ieee754@npm:^1.1.13, ieee754@npm:^1.1.4, ieee754@npm:^1.2.1": version: 1.2.1 resolution: "ieee754@npm:1.2.1" @@ -11090,6 +12115,18 @@ __metadata: languageName: node linkType: hard +"internal-ip@npm:^6.2.0": + version: 6.2.0 + resolution: "internal-ip@npm:6.2.0" + dependencies: + default-gateway: ^6.0.0 + ipaddr.js: ^1.9.1 + is-ip: ^3.1.0 + p-event: ^4.2.0 + checksum: 6d08299c052c4ec926fa4e3643049d81daacbc33d3ab90fc30cd59cd7f12902152107bc75778e1202a269eb82cab0c412ff04a78f0361e056de0c5c2628881fa + languageName: node + linkType: hard + "internal-slot@npm:^1.0.7": version: 1.0.7 resolution: "internal-slot@npm:1.0.7" @@ -11125,6 +12162,13 @@ __metadata: languageName: node linkType: hard +"ip-regex@npm:^4.0.0": + version: 4.3.0 + resolution: "ip-regex@npm:4.3.0" + checksum: 7ff904b891221b1847f3fdf3dbb3e6a8660dc39bc283f79eb7ed88f5338e1a3d1104b779bc83759159be266249c59c2160e779ee39446d79d4ed0890dfd06f08 + languageName: node + linkType: hard + "ip-regex@npm:^5.0.0": version: 5.0.0 resolution: "ip-regex@npm:5.0.0" @@ -11132,7 +12176,7 @@ __metadata: languageName: node linkType: hard -"ipaddr.js@npm:1.9.1": +"ipaddr.js@npm:1.9.1, ipaddr.js@npm:^1.9.1": version: 1.9.1 resolution: "ipaddr.js@npm:1.9.1" checksum: f88d3825981486f5a1942414c8d77dd6674dd71c065adcfa46f578d677edcb99fda25af42675cb59db492fdf427b34a5abfcde3982da11a8fd83a500b41cfe77 @@ -11173,13 +12217,6 @@ __metadata: languageName: node linkType: hard -"is-arrayish@npm:^0.3.1": - version: 0.3.2 - resolution: "is-arrayish@npm:0.3.2" - checksum: 977e64f54d91c8f169b59afcd80ff19227e9f5c791fa28fa2e5bce355cbaf6c2c356711b734656e80c9dd4a854dd7efcf7894402f1031dfc5de5d620775b4d5f - languageName: node - linkType: hard - "is-bigint@npm:^1.0.1": version: 1.0.4 resolution: "is-bigint@npm:1.0.4" @@ -11263,7 +12300,7 @@ __metadata: languageName: node linkType: hard -"is-docker@npm:^2.0.0": +"is-docker@npm:^2.0.0, is-docker@npm:^2.1.1": version: 2.2.1 resolution: "is-docker@npm:2.2.1" bin: @@ -11354,6 +12391,15 @@ __metadata: languageName: node linkType: hard +"is-ip@npm:^3.1.0": + version: 3.1.0 + resolution: "is-ip@npm:3.1.0" + dependencies: + ip-regex: ^4.0.0 + checksum: da2c2b282407194adf2320bade0bad94be9c9d0bdab85ff45b1b62d8185f31c65dff3884519d57bf270277e5ea2046c7916a6e5a6db22fe4b7ddcdd3760f23eb + languageName: node + linkType: hard + "is-lambda@npm:^1.0.1": version: 1.0.1 resolution: "is-lambda@npm:1.0.1" @@ -11368,6 +12414,13 @@ __metadata: languageName: node linkType: hard +"is-module@npm:^1.0.0": + version: 1.0.0 + resolution: "is-module@npm:1.0.0" + checksum: 8cd5390730c7976fb4e8546dd0b38865ee6f7bacfa08dfbb2cc07219606755f0b01709d9361e01f13009bbbd8099fa2927a8ed665118a6105d66e40f1b838c3f + languageName: node + linkType: hard + "is-negative-zero@npm:^2.0.3": version: 2.0.3 resolution: "is-negative-zero@npm:2.0.3" @@ -11592,6 +12645,13 @@ __metadata: languageName: node linkType: hard +"isbinaryfile@npm:^5.0.0": + version: 5.0.4 + resolution: "isbinaryfile@npm:5.0.4" + checksum: d88982a889369d83a5937b4b4d2288ed3b3dbbcee8fc74db40058f3c089a2c7beb9e5305b7177e82d87ff38fb62be8d60960f7a2d669ca08240ef31c1435b884 + languageName: node + linkType: hard + "isexe@npm:^2.0.0": version: 2.0.0 resolution: "isexe@npm:2.0.0" @@ -11655,7 +12715,7 @@ __metadata: languageName: node linkType: hard -"istanbul-lib-report@npm:^3.0.0": +"istanbul-lib-report@npm:^3.0.0, istanbul-lib-report@npm:^3.0.1": version: 3.0.1 resolution: "istanbul-lib-report@npm:3.0.1" dependencies: @@ -11677,7 +12737,7 @@ __metadata: languageName: node linkType: hard -"istanbul-reports@npm:^3.1.3": +"istanbul-reports@npm:^3.0.2, istanbul-reports@npm:^3.1.3": version: 3.1.7 resolution: "istanbul-reports@npm:3.1.7" dependencies: @@ -12434,6 +13494,13 @@ __metadata: languageName: node linkType: hard +"joycon@npm:^3.1.1": + version: 3.1.1 + resolution: "joycon@npm:3.1.1" + checksum: 8003c9c3fc79c5c7602b1c7e9f7a2df2e9916f046b0dbad862aa589be78c15734d11beb9fe846f5e06138df22cb2ad29961b6a986ba81c4920ce2b15a7f11067 + languageName: node + linkType: hard + "js-sha3@npm:0.8.0": version: 0.8.0 resolution: "js-sha3@npm:0.8.0" @@ -12668,6 +13735,15 @@ __metadata: languageName: node linkType: hard +"koa-etag@npm:^4.0.0": + version: 4.0.0 + resolution: "koa-etag@npm:4.0.0" + dependencies: + etag: ^1.8.1 + checksum: b5f413574e1edbd60fbbd0d31720e66565d51bfcb407d1bc3f48d9dd5b45fa5a9e4f69a60e749fad7397348e90de23e943307578d007a69da30faaae432deaf6 + languageName: node + linkType: hard + "koa-is-json@npm:^1.0.0": version: 1.0.0 resolution: "koa-is-json@npm:1.0.0" @@ -12688,7 +13764,7 @@ __metadata: languageName: node linkType: hard -"koa-send@npm:^5.0.0": +"koa-send@npm:^5.0.0, koa-send@npm:^5.0.1": version: 5.0.1 resolution: "koa-send@npm:5.0.1" dependencies: @@ -12709,7 +13785,7 @@ __metadata: languageName: node linkType: hard -"koa@npm:^2.14.2, koa@npm:^2.15.3": +"koa@npm:^2.13.0, koa@npm:^2.14.2, koa@npm:^2.15.3": version: 2.15.3 resolution: "koa@npm:2.15.3" dependencies: @@ -12740,13 +13816,6 @@ __metadata: languageName: node linkType: hard -"kuler@npm:^2.0.0": - version: 2.0.0 - resolution: "kuler@npm:2.0.0" - checksum: 9e10b5a1659f9ed8761d38df3c35effabffbd19fc6107324095238e4ef0ff044392cae9ac64a1c2dda26e532426485342226b93806bd97504b174b0dcf04ed81 - languageName: node - linkType: hard - "labeled-stream-splicer@npm:^2.0.0": version: 2.0.2 resolution: "labeled-stream-splicer@npm:2.0.2" @@ -12875,6 +13944,16 @@ __metadata: languageName: node linkType: hard +"lighthouse-logger@npm:^1.0.0": + version: 1.4.2 + resolution: "lighthouse-logger@npm:1.4.2" + dependencies: + debug: ^2.6.9 + marky: ^1.2.2 + checksum: ba6b73d93424318fab58b4e07c9ed246e3e969a3313f26b69515ed4c06457dd9a0b11bc706948398fdaef26aa4ba5e65cb848c37ce59f470d3c6c450b9b79a33 + languageName: node + linkType: hard + "lines-and-columns@npm:^1.1.6": version: 1.2.4 resolution: "lines-and-columns@npm:1.2.4" @@ -13059,7 +14138,7 @@ __metadata: languageName: node linkType: hard -"lodash@npm:^4.17.20, lodash@npm:^4.17.21": +"lodash@npm:^4.17.14, lodash@npm:^4.17.20, lodash@npm:^4.17.21": version: 4.17.21 resolution: "lodash@npm:4.17.21" checksum: eb835a2e51d381e561e508ce932ea50a8e5a68f4ebdd771ea240d3048244a8d13658acbd502cd4829768c56f2e16bdd4340b9ea141297d472517b83868e677f7 @@ -13076,31 +14155,15 @@ __metadata: languageName: node linkType: hard -"logform@npm:^2.3.2, logform@npm:^2.4.0": - version: 2.6.0 - resolution: "logform@npm:2.6.0" - dependencies: - "@colors/colors": 1.6.0 - "@types/triple-beam": ^1.3.2 - fecha: ^4.2.0 - ms: ^2.1.1 - safe-stable-stringify: ^2.3.1 - triple-beam: ^1.3.0 - checksum: b9ea74bb75e55379ad0eb3e4d65ae6e8d02bc45b431c218162878bf663997ab9258a73104c2b30e09dd2db288bb83c8bf8748e46689d75f5e7e34cf69378d6df - languageName: node - linkType: hard - -"logform@npm:^2.6.1": - version: 2.6.1 - resolution: "logform@npm:2.6.1" +"log-update@npm:^4.0.0": + version: 4.0.0 + resolution: "log-update@npm:4.0.0" dependencies: - "@colors/colors": 1.6.0 - "@types/triple-beam": ^1.3.2 - fecha: ^4.2.0 - ms: ^2.1.1 - safe-stable-stringify: ^2.3.1 - triple-beam: ^1.3.0 - checksum: 0c6b95fa8350ccc33c7c33d77de2a9920205399706fc1b125151c857b61eb90873f4670d9e0e58e58c165b68a363206ae670d6da8b714527c838da3c84449605 + ansi-escapes: ^4.3.0 + cli-cursor: ^3.1.0 + slice-ansi: ^4.0.0 + wrap-ansi: ^6.2.0 + checksum: ae2f85bbabc1906034154fb7d4c4477c79b3e703d22d78adee8b3862fa913942772e7fa11713e3d96fb46de4e3cabefbf5d0a544344f03b58d3c4bff52aa9eb2 languageName: node linkType: hard @@ -13111,6 +14174,13 @@ __metadata: languageName: node linkType: hard +"loupe@npm:^3.1.0": + version: 3.1.2 + resolution: "loupe@npm:3.1.2" + checksum: 4a75bbe8877a1ced3603e08b1095cd6f4c987c50fe63719fdc3009029560f91e07a915e7f6eff1322bb62bfb2a2beeef06b13ccb3c12f81bda9f3674434dcab9 + languageName: node + linkType: hard + "lower-case@npm:^1.1.1": version: 1.1.4 resolution: "lower-case@npm:1.1.4" @@ -13159,6 +14229,13 @@ __metadata: languageName: node linkType: hard +"lru-cache@npm:^8.0.4": + version: 8.0.5 + resolution: "lru-cache@npm:8.0.5" + checksum: 87d72196d8f46e8299c4ab576ed2ec8a07e3cbef517dc9874399c0b2470bd9bf62aacec3b67f84ed6d74aaa1ef31636d048edf996f76248fd17db72bfb631609 + languageName: node + linkType: hard + "ltgt@npm:^2.2.0": version: 2.2.1 resolution: "ltgt@npm:2.2.1" @@ -13303,6 +14380,13 @@ __metadata: languageName: node linkType: hard +"marky@npm:^1.2.2": + version: 1.2.5 + resolution: "marky@npm:1.2.5" + checksum: 823b946677749551cdfc3b5221685478b5d1b9cc0dc03eff977c6f9a615fb05c67559f9556cb3c0fcb941a9ea0e195e37befd83026443396ccee8b724f54f4c5 + languageName: node + linkType: hard + "md5.js@npm:^1.3.4": version: 1.3.5 resolution: "md5.js@npm:1.3.5" @@ -13553,6 +14637,15 @@ __metadata: languageName: node linkType: hard +"minimatch@npm:^5.0.1, minimatch@npm:^5.1.6": + version: 5.1.6 + resolution: "minimatch@npm:5.1.6" + dependencies: + brace-expansion: ^2.0.1 + checksum: 7564208ef81d7065a370f788d337cd80a689e981042cb9a1d0e6580b6c6a8c9279eba80010516e258835a988363f99f54a6f711a315089b8b42694f5da9d0d77 + languageName: node + linkType: hard + "minimatch@npm:^9.0.0, minimatch@npm:^9.0.1": version: 9.0.4 resolution: "minimatch@npm:9.0.4" @@ -13678,7 +14771,18 @@ __metadata: languageName: node linkType: hard -"mkdirp@npm:^1.0.3": +"mkdirp@npm:^0.5.6": + version: 0.5.6 + resolution: "mkdirp@npm:0.5.6" + dependencies: + minimist: ^1.2.6 + bin: + mkdirp: bin/cmd.js + checksum: 0c91b721bb12c3f9af4b77ebf73604baf350e64d80df91754dc509491ae93bf238581e59c7188360cec7cb62fc4100959245a42cfe01834efedc5e9d068376c2 + languageName: node + linkType: hard + +"mkdirp@npm:^1.0.3, mkdirp@npm:^1.0.4": version: 1.0.4 resolution: "mkdirp@npm:1.0.4" bin: @@ -13687,6 +14791,46 @@ __metadata: languageName: node linkType: hard +"mocha-each@npm:^2.0.1": + version: 2.0.1 + resolution: "mocha-each@npm:2.0.1" + dependencies: + sprintf-js: ^1.0.3 + checksum: 0de01ce517c2f7e7c3e19ef3f444809913f2f1602cb2571e6a3f8cb7ef3040f4f01b0f9f11a317e4ec1aeb9d39ceae4947c96668560cf638fe4d02ea549c2d4c + languageName: node + linkType: hard + +"mocha@npm:^10.8.2": + version: 10.8.2 + resolution: "mocha@npm:10.8.2" + dependencies: + ansi-colors: ^4.1.3 + browser-stdout: ^1.3.1 + chokidar: ^3.5.3 + debug: ^4.3.5 + diff: ^5.2.0 + escape-string-regexp: ^4.0.0 + find-up: ^5.0.0 + glob: ^8.1.0 + he: ^1.2.0 + js-yaml: ^4.1.0 + log-symbols: ^4.1.0 + minimatch: ^5.1.6 + ms: ^2.1.3 + serialize-javascript: ^6.0.2 + strip-json-comments: ^3.1.1 + supports-color: ^8.1.1 + workerpool: ^6.5.1 + yargs: ^16.2.0 + yargs-parser: ^20.2.9 + yargs-unparser: ^2.0.0 + bin: + _mocha: bin/_mocha + mocha: bin/mocha.js + checksum: 68cb519503f1e8ffd9b0651e1aef75dfe4754425186756b21e53169da44b5bcb1889e2b743711205082763d3f9a42eb8eb2c13bb1a718a08cb3a5f563bfcacdc + languageName: node + linkType: hard + "module-definition@npm:^3.3.1": version: 3.4.0 resolution: "module-definition@npm:3.4.0" @@ -13751,13 +14895,6 @@ __metadata: languageName: node linkType: hard -"moment@npm:^2.29.1": - version: 2.30.1 - resolution: "moment@npm:2.30.1" - checksum: 859236bab1e88c3e5802afcf797fc801acdbd0ee509d34ea3df6eea21eb6bcc2abd4ae4e4e64aa7c986aa6cba563c6e62806218e6412a765010712e5fa121ba6 - languageName: node - linkType: hard - "morphdom@npm:^2.3.3": version: 2.7.4 resolution: "morphdom@npm:2.7.4" @@ -13923,6 +15060,13 @@ __metadata: languageName: node linkType: hard +"nanocolors@npm:^0.2.1": + version: 0.2.13 + resolution: "nanocolors@npm:0.2.13" + checksum: 01ac5aab77295c66cef83ea5f595e22f5f91518f19fae12f93ca2cba98703f971e32611fea2983f333eb7e60604043005690f61d9759e7c0a32314942fe6ddb8 + languageName: node + linkType: hard + "nanohtml@npm:^1.4.0": version: 1.10.0 resolution: "nanohtml@npm:1.10.0" @@ -13942,6 +15086,15 @@ __metadata: languageName: node linkType: hard +"nanoid@npm:^3.1.25": + version: 3.3.8 + resolution: "nanoid@npm:3.3.8" + bin: + nanoid: bin/nanoid.cjs + checksum: dfe0adbc0c77e9655b550c333075f51bb28cfc7568afbf3237249904f9c86c9aaaed1f113f0fddddba75673ee31c758c30c43d4414f014a52a7a626efc5958c9 + languageName: node + linkType: hard + "nanoid@npm:^3.3.7": version: 3.3.7 resolution: "nanoid@npm:3.3.7" @@ -14200,13 +15353,6 @@ __metadata: languageName: node linkType: hard -"object-hash@npm:^2.0.1": - version: 2.2.0 - resolution: "object-hash@npm:2.2.0" - checksum: 55ba841e3adce9c4f1b9b46b41983eda40f854e0d01af2802d3ae18a7085a17168d6b81731d43fdf1d6bcbb3c9f9c56d22c8fea992203ad90a38d7d919bc28f1 - languageName: node - linkType: hard - "object-inspect@npm:^1.13.1": version: 1.13.1 resolution: "object-inspect@npm:1.13.1" @@ -14281,6 +15427,13 @@ __metadata: languageName: node linkType: hard +"on-exit-leak-free@npm:^2.1.0": + version: 2.1.2 + resolution: "on-exit-leak-free@npm:2.1.2" + checksum: 6ce7acdc7b9ceb51cf029b5239cbf41937ee4c8dcd9d4e475e1777b41702564d46caa1150a744e00da0ac6d923ab83471646a39a4470f97481cf6e2d8d253c3f + languageName: node + linkType: hard + "on-finished@npm:2.4.1, on-finished@npm:^2.3.0, on-finished@npm:^2.4.1": version: 2.4.1 resolution: "on-finished@npm:2.4.1" @@ -14313,15 +15466,6 @@ __metadata: languageName: node linkType: hard -"one-time@npm:^1.0.0": - version: 1.0.0 - resolution: "one-time@npm:1.0.0" - dependencies: - fn.name: 1.x.x - checksum: fd008d7e992bdec1c67f53a2f9b46381ee12a9b8c309f88b21f0223546003fb47e8ad7c1fd5843751920a8d276c63bd4b45670ef80c61fb3e07dbccc962b5c7d - languageName: node - linkType: hard - "onetime@npm:^5.1.0, onetime@npm:^5.1.2": version: 5.1.2 resolution: "onetime@npm:5.1.2" @@ -14359,6 +15503,17 @@ __metadata: languageName: node linkType: hard +"open@npm:^8.0.2": + version: 8.4.2 + resolution: "open@npm:8.4.2" + dependencies: + define-lazy-prop: ^2.0.0 + is-docker: ^2.1.1 + is-wsl: ^2.2.0 + checksum: 6388bfff21b40cb9bd8f913f9130d107f2ed4724ea81a8fd29798ee322b361ca31fa2cdfb491a5c31e43a3996cfe9566741238c7a741ada8d7af1cb78d85cf26 + languageName: node + linkType: hard + "opn@npm:^5.4.0": version: 5.5.0 resolution: "opn@npm:5.5.0" @@ -14427,6 +15582,15 @@ __metadata: languageName: node linkType: hard +"p-event@npm:^4.2.0": + version: 4.2.0 + resolution: "p-event@npm:4.2.0" + dependencies: + p-timeout: ^3.1.0 + checksum: 8a3588f7a816a20726a3262dfeee70a631e3997e4773d23219176333eda55cce9a76219e3d2b441b331eb746e14fdb381eb2694ab9ff2fcf87c846462696fe89 + languageName: node + linkType: hard + "p-event@npm:^6.0.0": version: 6.0.1 resolution: "p-event@npm:6.0.1" @@ -14436,6 +15600,13 @@ __metadata: languageName: node linkType: hard +"p-finally@npm:^1.0.0": + version: 1.0.0 + resolution: "p-finally@npm:1.0.0" + checksum: 93a654c53dc805dd5b5891bab16eb0ea46db8f66c4bfd99336ae929323b1af2b70a8b0654f8f1eae924b2b73d037031366d645f1fd18b3d30cbd15950cc4b1d4 + languageName: node + linkType: hard + "p-limit@npm:^2.2.0": version: 2.3.0 resolution: "p-limit@npm:2.3.0" @@ -14512,6 +15683,15 @@ __metadata: languageName: node linkType: hard +"p-timeout@npm:^3.1.0": + version: 3.2.0 + resolution: "p-timeout@npm:3.2.0" + dependencies: + p-finally: ^1.0.0 + checksum: 3dd0eaa048780a6f23e5855df3dd45c7beacff1f820476c1d0d1bcd6648e3298752ba2c877aa1c92f6453c7dd23faaf13d9f5149fc14c0598a142e2c5e8d649c + languageName: node + linkType: hard + "p-timeout@npm:^5.0.2": version: 5.1.0 resolution: "p-timeout@npm:5.1.0" @@ -14634,6 +15814,13 @@ __metadata: languageName: node linkType: hard +"parse5@npm:^6.0.1": + version: 6.0.1 + resolution: "parse5@npm:6.0.1" + checksum: 7d569a176c5460897f7c8f3377eff640d54132b9be51ae8a8fa4979af940830b2b0c296ce75e5bd8f4041520aadde13170dbdec44889975f906098ea0002f4bd + languageName: node + linkType: hard + "parseurl@npm:^1.3.2, parseurl@npm:~1.3.2, parseurl@npm:~1.3.3": version: 1.3.3 resolution: "parseurl@npm:1.3.3" @@ -14752,6 +15939,13 @@ __metadata: languageName: node linkType: hard +"pathval@npm:^2.0.0": + version: 2.0.0 + resolution: "pathval@npm:2.0.0" + checksum: 682b6a6289de7990909effef7dae9aa7bb6218c0426727bccf66a35b34e7bfbc65615270c5e44e3c9557a5cb44b1b9ef47fc3cb18bce6ad3ba92bcd28467ed7d + languageName: node + linkType: hard + "pause-stream@npm:0.0.11": version: 0.0.11 resolution: "pause-stream@npm:0.0.11" @@ -14795,13 +15989,80 @@ __metadata: languageName: node linkType: hard -"picomatch@npm:^2.0.4, picomatch@npm:^2.2.1, picomatch@npm:^2.2.3, picomatch@npm:^2.3.1": +"picomatch@npm:^2.0.4, picomatch@npm:^2.2.1, picomatch@npm:^2.2.2, picomatch@npm:^2.2.3, picomatch@npm:^2.3.1": version: 2.3.1 resolution: "picomatch@npm:2.3.1" checksum: 050c865ce81119c4822c45d3c84f1ced46f93a0126febae20737bd05ca20589c564d6e9226977df859ed5e03dc73f02584a2b0faad36e896936238238b0446cf languageName: node linkType: hard +"picomatch@npm:^4.0.2": + version: 4.0.2 + resolution: "picomatch@npm:4.0.2" + checksum: a7a5188c954f82c6585720e9143297ccd0e35ad8072231608086ca950bee672d51b0ef676254af0788205e59bd4e4deb4e7708769226bed725bf13370a7d1464 + languageName: node + linkType: hard + +"pino-abstract-transport@npm:^2.0.0": + version: 2.0.0 + resolution: "pino-abstract-transport@npm:2.0.0" + dependencies: + split2: ^4.0.0 + checksum: 4db0cd8a1a7b6d13e76dbb58e6adc057c39e4591c70f601f4a427c030d57dff748ab53954e1ecd3aa6e21c1a22dd38de96432606c6d906a7b9f610543bf1d6e2 + languageName: node + linkType: hard + +"pino-pretty@npm:^13.0.0": + version: 13.0.0 + resolution: "pino-pretty@npm:13.0.0" + dependencies: + colorette: ^2.0.7 + dateformat: ^4.6.3 + fast-copy: ^3.0.2 + fast-safe-stringify: ^2.1.1 + help-me: ^5.0.0 + joycon: ^3.1.1 + minimist: ^1.2.6 + on-exit-leak-free: ^2.1.0 + pino-abstract-transport: ^2.0.0 + pump: ^3.0.0 + secure-json-parse: ^2.4.0 + sonic-boom: ^4.0.1 + strip-json-comments: ^3.1.1 + bin: + pino-pretty: bin.js + checksum: a529219b3ccc99ed6a3e2de00ae6a8d4003344614bce39f836352317c962db8c3f4e9ee45843edc218cb9be618a7318b06fa6fab366d4314b9297d0130bc06f5 + languageName: node + linkType: hard + +"pino-std-serializers@npm:^7.0.0": + version: 7.0.0 + resolution: "pino-std-serializers@npm:7.0.0" + checksum: 08cd1d7b7adc4cfca39e42c2d5fd21bcf4513153734e7b8fa278b0e9e9f62df78c4c202886343fe882a462539c931cb8110b661775ad7f7217c96856795b5a86 + languageName: node + linkType: hard + +"pino@npm:^9.5.0": + version: 9.5.0 + resolution: "pino@npm:9.5.0" + dependencies: + atomic-sleep: ^1.0.0 + fast-redact: ^3.1.1 + on-exit-leak-free: ^2.1.0 + pino-abstract-transport: ^2.0.0 + pino-std-serializers: ^7.0.0 + process-warning: ^4.0.0 + quick-format-unescaped: ^4.0.3 + real-require: ^0.2.0 + safe-stable-stringify: ^2.3.1 + sonic-boom: ^4.0.1 + thread-stream: ^3.0.0 + bin: + pino: bin.js + checksum: 650c3087619a619e92948641f0d9acc60cca594175b02fe1ce9c0923a8d07a8d120866f50b0848c26a5898837b8c1ae086adf67066180f686ea21e6e515a8558 + languageName: node + linkType: hard + "pirates@npm:^4.0.4": version: 4.0.6 resolution: "pirates@npm:4.0.6" @@ -14827,6 +16088,15 @@ __metadata: languageName: node linkType: hard +"playwright-core@npm:1.49.0": + version: 1.49.0 + resolution: "playwright-core@npm:1.49.0" + bin: + playwright-core: cli.js + checksum: d8423ad0cab2e672856529bf6b98b406e7e605da098b847b9b54ee8ebd8d716ed8880a9afff4b38f0a2e3f59b95661c74589116ce3ff2b5e0ae3561507086c94 + languageName: node + linkType: hard + "playwright@npm:1.48.2, playwright@npm:^1.48.2": version: 1.48.2 resolution: "playwright@npm:1.48.2" @@ -14842,6 +16112,21 @@ __metadata: languageName: node linkType: hard +"playwright@npm:^1.22.2": + version: 1.49.0 + resolution: "playwright@npm:1.49.0" + dependencies: + fsevents: 2.3.2 + playwright-core: 1.49.0 + dependenciesMeta: + fsevents: + optional: true + bin: + playwright: cli.js + checksum: f1bfb2fff65cad2ce996edab74ec231dfd21aeb5961554b765ce1eaec27efb87eaba37b00e91ecd27727b82861e5d8c230abe4960e93f6ada8be5ad1020df306 + languageName: node + linkType: hard + "pluralize@npm:^8.0.0": version: 8.0.0 resolution: "pluralize@npm:8.0.0" @@ -14849,6 +16134,17 @@ __metadata: languageName: node linkType: hard +"portfinder@npm:^1.0.32": + version: 1.0.32 + resolution: "portfinder@npm:1.0.32" + dependencies: + async: ^2.6.4 + debug: ^3.2.7 + mkdirp: ^0.5.6 + checksum: 116b4aed1b9e16f6d5503823d966d9ffd41b1c2339e27f54c06cd2f3015a9d8ef53e2a53b57bc0a25af0885977b692007353aa28f9a0a98a44335cb50487240d + languageName: node + linkType: hard + "possible-typed-array-names@npm:^1.0.0": version: 1.0.0 resolution: "possible-typed-array-names@npm:1.0.0" @@ -15022,6 +16318,13 @@ __metadata: languageName: node linkType: hard +"process-warning@npm:^4.0.0": + version: 4.0.0 + resolution: "process-warning@npm:4.0.0" + checksum: 39d5cee53649132f12479965857cb01793d62ee1a702f06d079ee8aceee935cd0f79c250faab60e86705d8a5226856a61c419778d48ac67f72e160cceb60a1e5 + languageName: node + linkType: hard + "process@npm:^0.11.10, process@npm:~0.11.0": version: 0.11.10 resolution: "process@npm:0.11.10" @@ -15190,7 +16493,7 @@ __metadata: languageName: node linkType: hard -"punycode@npm:^2.1.0": +"punycode@npm:^2.1.0, punycode@npm:^2.3.1": version: 2.3.1 resolution: "punycode@npm:2.3.1" checksum: bb0a0ceedca4c3c57a9b981b90601579058903c62be23c5e8e843d2c2d4148a3ecf029d5133486fb0e1822b098ba8bba09e89d6b21742d02fa26bda6441a6fb2 @@ -15223,6 +16526,20 @@ __metadata: languageName: node linkType: hard +"puppeteer-core@npm:^23.2.0": + version: 23.9.0 + resolution: "puppeteer-core@npm:23.9.0" + dependencies: + "@puppeteer/browsers": 2.4.1 + chromium-bidi: 0.8.0 + debug: ^4.3.7 + devtools-protocol: 0.0.1367902 + typed-query-selector: ^2.12.0 + ws: ^8.18.0 + checksum: 0195e3422eee84d0645e8b5f2bcd33a304cb54378bf6a5c9f2a020a59c1435e502987fdbe2766864b7cddb1732d8a27524023c75fbba83c8d2ed9b5209bb931b + languageName: node + linkType: hard + "puppeteer@npm:^22.2": version: 22.8.0 resolution: "puppeteer@npm:22.8.0" @@ -15313,6 +16630,13 @@ __metadata: languageName: node linkType: hard +"quick-format-unescaped@npm:^4.0.3": + version: 4.0.4 + resolution: "quick-format-unescaped@npm:4.0.4" + checksum: 7bc32b99354a1aa46c089d2a82b63489961002bb1d654cee3e6d2d8778197b68c2d854fd23d8422436ee1fdfd0abaddc4d4da120afe700ade68bd357815b26fd + languageName: node + linkType: hard + "quick-lru@npm:^4.0.1": version: 4.0.1 resolution: "quick-lru@npm:4.0.1" @@ -15465,16 +16789,10 @@ __metadata: languageName: node linkType: hard -"readable-stream@npm:^4.5.2": - version: 4.5.2 - resolution: "readable-stream@npm:4.5.2" - dependencies: - abort-controller: ^3.0.0 - buffer: ^6.0.3 - events: ^3.3.0 - process: ^0.11.10 - string_decoder: ^1.3.0 - checksum: c4030ccff010b83e4f33289c535f7830190773e274b3fcb6e2541475070bdfd69c98001c3b0cb78763fc00c8b62f514d96c2b10a8bd35d5ce45203a25fa1d33a +"readdirp@npm:^4.0.1": + version: 4.0.2 + resolution: "readdirp@npm:4.0.2" + checksum: 309376e717f94fb7eb61bec21e2603243a9e2420cd2e9bf94ddf026aefea0d7377ed1a62f016d33265682e44908049a55c3cfc2307450a1421654ea008489b39 languageName: node linkType: hard @@ -15487,6 +16805,13 @@ __metadata: languageName: node linkType: hard +"real-require@npm:^0.2.0": + version: 0.2.0 + resolution: "real-require@npm:0.2.0" + checksum: fa060f19f2f447adf678d1376928c76379dce5f72bd334da301685ca6cdcb7b11356813332cc243c88470796bc2e2b1e2917fc10df9143dd93c2ea608694971d + languageName: node + linkType: hard + "receptacle@npm:^1.3.2": version: 1.3.2 resolution: "receptacle@npm:1.3.2" @@ -15693,7 +17018,7 @@ __metadata: languageName: node linkType: hard -"resolve@npm:^1.1.4, resolve@npm:^1.10.0, resolve@npm:^1.17.0, resolve@npm:^1.20.0, resolve@npm:^1.21.0, resolve@npm:^1.22.4, resolve@npm:^1.4.0": +"resolve@npm:^1.1.4, resolve@npm:^1.10.0, resolve@npm:^1.17.0, resolve@npm:^1.20.0, resolve@npm:^1.21.0, resolve@npm:^1.22.1, resolve@npm:^1.22.4, resolve@npm:^1.4.0": version: 1.22.8 resolution: "resolve@npm:1.22.8" dependencies: @@ -15716,7 +17041,7 @@ __metadata: languageName: node linkType: hard -"resolve@patch:resolve@^1.1.4#~builtin, resolve@patch:resolve@^1.10.0#~builtin, resolve@patch:resolve@^1.17.0#~builtin, resolve@patch:resolve@^1.20.0#~builtin, resolve@patch:resolve@^1.21.0#~builtin, resolve@patch:resolve@^1.22.4#~builtin, resolve@patch:resolve@^1.4.0#~builtin": +"resolve@patch:resolve@^1.1.4#~builtin, resolve@patch:resolve@^1.10.0#~builtin, resolve@patch:resolve@^1.17.0#~builtin, resolve@patch:resolve@^1.20.0#~builtin, resolve@patch:resolve@^1.21.0#~builtin, resolve@patch:resolve@^1.22.1#~builtin, resolve@patch:resolve@^1.22.4#~builtin, resolve@patch:resolve@^1.4.0#~builtin": version: 1.22.8 resolution: "resolve@patch:resolve@npm%3A1.22.8#~builtin::version=1.22.8&hash=c3c19d" dependencies: @@ -15816,6 +17141,75 @@ __metadata: languageName: node linkType: hard +"rollup@npm:^4.4.0": + version: 4.27.4 + resolution: "rollup@npm:4.27.4" + dependencies: + "@rollup/rollup-android-arm-eabi": 4.27.4 + "@rollup/rollup-android-arm64": 4.27.4 + "@rollup/rollup-darwin-arm64": 4.27.4 + "@rollup/rollup-darwin-x64": 4.27.4 + "@rollup/rollup-freebsd-arm64": 4.27.4 + "@rollup/rollup-freebsd-x64": 4.27.4 + "@rollup/rollup-linux-arm-gnueabihf": 4.27.4 + "@rollup/rollup-linux-arm-musleabihf": 4.27.4 + "@rollup/rollup-linux-arm64-gnu": 4.27.4 + "@rollup/rollup-linux-arm64-musl": 4.27.4 + "@rollup/rollup-linux-powerpc64le-gnu": 4.27.4 + "@rollup/rollup-linux-riscv64-gnu": 4.27.4 + "@rollup/rollup-linux-s390x-gnu": 4.27.4 + "@rollup/rollup-linux-x64-gnu": 4.27.4 + "@rollup/rollup-linux-x64-musl": 4.27.4 + "@rollup/rollup-win32-arm64-msvc": 4.27.4 + "@rollup/rollup-win32-ia32-msvc": 4.27.4 + "@rollup/rollup-win32-x64-msvc": 4.27.4 + "@types/estree": 1.0.6 + fsevents: ~2.3.2 + dependenciesMeta: + "@rollup/rollup-android-arm-eabi": + optional: true + "@rollup/rollup-android-arm64": + optional: true + "@rollup/rollup-darwin-arm64": + optional: true + "@rollup/rollup-darwin-x64": + optional: true + "@rollup/rollup-freebsd-arm64": + optional: true + "@rollup/rollup-freebsd-x64": + optional: true + "@rollup/rollup-linux-arm-gnueabihf": + optional: true + "@rollup/rollup-linux-arm-musleabihf": + optional: true + "@rollup/rollup-linux-arm64-gnu": + optional: true + "@rollup/rollup-linux-arm64-musl": + optional: true + "@rollup/rollup-linux-powerpc64le-gnu": + optional: true + "@rollup/rollup-linux-riscv64-gnu": + optional: true + "@rollup/rollup-linux-s390x-gnu": + optional: true + "@rollup/rollup-linux-x64-gnu": + optional: true + "@rollup/rollup-linux-x64-musl": + optional: true + "@rollup/rollup-win32-arm64-msvc": + optional: true + "@rollup/rollup-win32-ia32-msvc": + optional: true + "@rollup/rollup-win32-x64-msvc": + optional: true + fsevents: + optional: true + bin: + rollup: dist/bin/rollup + checksum: 7268678ce9a645fda79efa2dc3c9b458357683b0bbd8cc44f8e52d406df4d40468ea3efdf24ad01e25210594cd40902b2b3d20730e2d58e9b226cb3c48dcbd8b + languageName: node + linkType: hard + "run-applescript@npm:^7.0.0": version: 7.0.0 resolution: "run-applescript@npm:7.0.0" @@ -15933,6 +17327,13 @@ __metadata: languageName: node linkType: hard +"secure-json-parse@npm:^2.4.0": + version: 2.7.0 + resolution: "secure-json-parse@npm:2.7.0" + checksum: d9d7d5a01fc6db6115744ba23cf9e67ecfe8c524d771537c062ee05ad5c11b64c730bc58c7f33f60bd6877f96b86f0ceb9ea29644e4040cb757f6912d4dd6737 + languageName: node + linkType: hard + "select-hose@npm:^2.0.0": version: 2.0.0 resolution: "select-hose@npm:2.0.0" @@ -16265,15 +17666,6 @@ __metadata: languageName: node linkType: hard -"simple-swizzle@npm:^0.2.2": - version: 0.2.2 - resolution: "simple-swizzle@npm:0.2.2" - dependencies: - is-arrayish: ^0.3.1 - checksum: a7f3f2ab5c76c4472d5c578df892e857323e452d9f392e1b5cf74b74db66e6294a1e1b8b390b519fa1b96b5b613f2a37db6cffef52c3f1f8f3c5ea64eb2d54c0 - languageName: node - linkType: hard - "single-line-log@npm:^1.0.1": version: 1.1.2 resolution: "single-line-log@npm:1.1.2" @@ -16304,6 +17696,17 @@ __metadata: languageName: node linkType: hard +"slice-ansi@npm:^4.0.0": + version: 4.0.0 + resolution: "slice-ansi@npm:4.0.0" + dependencies: + ansi-styles: ^4.0.0 + astral-regex: ^2.0.0 + is-fullwidth-code-point: ^3.0.0 + checksum: 4a82d7f085b0e1b070e004941ada3c40d3818563ac44766cca4ceadd2080427d337554f9f99a13aaeb3b4a94d9964d9466c807b3d7b7541d1ec37ee32d308756 + languageName: node + linkType: hard + "smart-buffer@npm:^4.2.0": version: 4.2.0 resolution: "smart-buffer@npm:4.2.0" @@ -16311,6 +17714,54 @@ __metadata: languageName: node linkType: hard +"snappy@npm:^7.2.2": + version: 7.2.2 + resolution: "snappy@npm:7.2.2" + dependencies: + "@napi-rs/snappy-android-arm-eabi": 7.2.2 + "@napi-rs/snappy-android-arm64": 7.2.2 + "@napi-rs/snappy-darwin-arm64": 7.2.2 + "@napi-rs/snappy-darwin-x64": 7.2.2 + "@napi-rs/snappy-freebsd-x64": 7.2.2 + "@napi-rs/snappy-linux-arm-gnueabihf": 7.2.2 + "@napi-rs/snappy-linux-arm64-gnu": 7.2.2 + "@napi-rs/snappy-linux-arm64-musl": 7.2.2 + "@napi-rs/snappy-linux-x64-gnu": 7.2.2 + "@napi-rs/snappy-linux-x64-musl": 7.2.2 + "@napi-rs/snappy-win32-arm64-msvc": 7.2.2 + "@napi-rs/snappy-win32-ia32-msvc": 7.2.2 + "@napi-rs/snappy-win32-x64-msvc": 7.2.2 + dependenciesMeta: + "@napi-rs/snappy-android-arm-eabi": + optional: true + "@napi-rs/snappy-android-arm64": + optional: true + "@napi-rs/snappy-darwin-arm64": + optional: true + "@napi-rs/snappy-darwin-x64": + optional: true + "@napi-rs/snappy-freebsd-x64": + optional: true + "@napi-rs/snappy-linux-arm-gnueabihf": + optional: true + "@napi-rs/snappy-linux-arm64-gnu": + optional: true + "@napi-rs/snappy-linux-arm64-musl": + optional: true + "@napi-rs/snappy-linux-x64-gnu": + optional: true + "@napi-rs/snappy-linux-x64-musl": + optional: true + "@napi-rs/snappy-win32-arm64-msvc": + optional: true + "@napi-rs/snappy-win32-ia32-msvc": + optional: true + "@napi-rs/snappy-win32-x64-msvc": + optional: true + checksum: cc6ee627d32325c3b3a7220f57bf7f87906372431072b77dfacf5d875a21c54043df8d6f328eadf8d58bda3d9bb558b3f00e1daaa757441cfa1ec20004f715f1 + languageName: node + linkType: hard + "sockjs@npm:^0.3.24": version: 0.3.24 resolution: "sockjs@npm:0.3.24" @@ -16360,6 +17811,15 @@ __metadata: languageName: node linkType: hard +"sonic-boom@npm:^4.0.1": + version: 4.2.0 + resolution: "sonic-boom@npm:4.2.0" + dependencies: + atomic-sleep: ^1.0.0 + checksum: e5e1ffdd3bcb0dee3bf6f7b2ff50dd3ffa2df864dc9d53463f33e225021a28601e91d0ec7e932739824bafd6f4ff3b7090939ac3e34ab1022e01692b41f7e8a3 + languageName: node + linkType: hard + "source-map-js@npm:^1.2.0": version: 1.2.0 resolution: "source-map-js@npm:1.2.0" @@ -16401,7 +17861,7 @@ __metadata: languageName: node linkType: hard -"source-map@npm:^0.7.4": +"source-map@npm:^0.7.3, source-map@npm:^0.7.4": version: 0.7.4 resolution: "source-map@npm:0.7.4" checksum: 01cc5a74b1f0e1d626a58d36ad6898ea820567e87f18dfc9d24a9843a351aaa2ec09b87422589906d6ff1deed29693e176194dc88bcae7c9a852dc74b311dbf5 @@ -16499,7 +17959,7 @@ __metadata: languageName: node linkType: hard -"sprintf-js@npm:^1.1.3": +"sprintf-js@npm:^1.0.3, sprintf-js@npm:^1.1.3": version: 1.1.3 resolution: "sprintf-js@npm:1.1.3" checksum: a3fdac7b49643875b70864a9d9b469d87a40dfeaf5d34d9d0c5b1cda5fd7d065531fcb43c76357d62254c57184a7b151954156563a4d6a747015cfb41021cad0 @@ -16522,13 +17982,6 @@ __metadata: languageName: node linkType: hard -"stack-trace@npm:0.0.x": - version: 0.0.10 - resolution: "stack-trace@npm:0.0.10" - checksum: 473036ad32f8c00e889613153d6454f9be0536d430eb2358ca51cad6b95cea08a3cc33cc0e34de66b0dad221582b08ed2e61ef8e13f4087ab690f388362d6610 - languageName: node - linkType: hard - "stack-utils@npm:^2.0.3": version: 2.0.6 resolution: "stack-utils@npm:2.0.6" @@ -16740,7 +18193,7 @@ __metadata: languageName: node linkType: hard -"string_decoder@npm:^1.1.1, string_decoder@npm:^1.3.0": +"string_decoder@npm:^1.1.1": version: 1.3.0 resolution: "string_decoder@npm:1.3.0" dependencies: @@ -16968,6 +18421,16 @@ __metadata: languageName: node linkType: hard +"table-layout@npm:^4.1.0": + version: 4.1.1 + resolution: "table-layout@npm:4.1.1" + dependencies: + array-back: ^6.2.2 + wordwrapjs: ^5.1.0 + checksum: 6de52785440b3b2ca9522a06b9ce20f81a3a999c15ef7e5d10c38a2e0008b286bf145e7f88b00f0346e874a548a922906107c492d6da5d438332e7c1bb62307a + languageName: node + linkType: hard + "tachyons@npm:^4.9.1": version: 4.12.0 resolution: "tachyons@npm:4.12.0" @@ -17111,13 +18574,6 @@ __metadata: languageName: node linkType: hard -"text-hex@npm:1.0.x": - version: 1.0.0 - resolution: "text-hex@npm:1.0.0" - checksum: 1138f68adc97bf4381a302a24e2352f04992b7b1316c5003767e9b0d3367ffd0dc73d65001ea02b07cd0ecc2a9d186de0cf02f3c2d880b8a522d4ccb9342244a - languageName: node - linkType: hard - "text-table@npm:^0.2.0": version: 0.2.0 resolution: "text-table@npm:0.2.0" @@ -17134,6 +18590,15 @@ __metadata: languageName: node linkType: hard +"thread-stream@npm:^3.0.0": + version: 3.1.0 + resolution: "thread-stream@npm:3.1.0" + dependencies: + real-require: ^0.2.0 + checksum: 3c5b494ce776f832dfd696792cc865f78c1e850db93e07979349bbc1a5845857cd447aea95808892906cc0178a2fd3233907329f3376e7fc9951e2833f5b7896 + languageName: node + linkType: hard + "through2@npm:^2.0.0, through2@npm:^2.0.3": version: 2.0.5 resolution: "through2@npm:2.0.5" @@ -17225,6 +18690,15 @@ __metadata: languageName: node linkType: hard +"tr46@npm:^5.0.0": + version: 5.0.0 + resolution: "tr46@npm:5.0.0" + dependencies: + punycode: ^2.3.1 + checksum: 8d8b021f8e17675ebf9e672c224b6b6cfdb0d5b92141349e9665c14a2501c54a298d11264bbb0b17b447581e1e83d4fc3c038c929f3d210e3964d4be47460288 + languageName: node + linkType: hard + "transform-ast@npm:^2.4.0": version: 2.4.4 resolution: "transform-ast@npm:2.4.4" @@ -17265,13 +18739,6 @@ __metadata: languageName: node linkType: hard -"triple-beam@npm:^1.3.0": - version: 1.4.1 - resolution: "triple-beam@npm:1.4.1" - checksum: 2e881a3e8e076b6f2b85b9ec9dd4a900d3f5016e6d21183ed98e78f9abcc0149e7d54d79a3f432b23afde46b0885bdcdcbff789f39bc75de796316961ec07f61 - languageName: node - linkType: hard - "ts-api-utils@npm:^1.0.1": version: 1.3.0 resolution: "ts-api-utils@npm:1.3.0" @@ -17624,6 +19091,13 @@ __metadata: languageName: node linkType: hard +"typed-query-selector@npm:^2.12.0": + version: 2.12.0 + resolution: "typed-query-selector@npm:2.12.0" + checksum: c4652f2eec16112d69e0da30c2effab3f03d1710f9559da1e1209bbfc9a20990d5de4ba97890c11f9d17d85c8ae3310953a86c198166599d4c36abc63664f169 + languageName: node + linkType: hard + "typedarray@npm:^0.0.6": version: 0.0.6 resolution: "typedarray@npm:0.0.6" @@ -17721,6 +19195,22 @@ __metadata: languageName: node linkType: hard +"typical@npm:^7.1.1": + version: 7.3.0 + resolution: "typical@npm:7.3.0" + checksum: edbb9beed7ffb355806d434d1dd0d41a2b78be0a41d9f1684fabbd4fb512ee220989b5ff91b04c79d19b850d6025d6c07417d63b8e7c9a3b2229a4a0676e17da + languageName: node + linkType: hard + +"ua-parser-js@npm:^1.0.33": + version: 1.0.39 + resolution: "ua-parser-js@npm:1.0.39" + bin: + ua-parser-js: script/cli.js + checksum: 19455df8c2348ef53f2e150e7406d3a025a619c2fd69722a1e63363d5ba8d91731ef7585f2dce7d8f14c8782734b4d704c05f246dca5f7565b5ae7d318084f2a + languageName: node + linkType: hard + "uint8-varint@npm:^2.0.0, uint8-varint@npm:^2.0.1, uint8-varint@npm:^2.0.2, uint8-varint@npm:^2.0.4": version: 2.0.4 resolution: "uint8-varint@npm:2.0.4" @@ -18176,6 +19666,13 @@ __metadata: languageName: node linkType: hard +"webidl-conversions@npm:^7.0.0": + version: 7.0.0 + resolution: "webidl-conversions@npm:7.0.0" + checksum: f05588567a2a76428515333eff87200fae6c83c3948a7482ebb109562971e77ef6dc49749afa58abb993391227c5697b3ecca52018793e0cb4620a48f10bd21b + languageName: node + linkType: hard + "webpack-cli@npm:^5.1.4": version: 5.1.4 resolution: "webpack-cli@npm:5.1.4" @@ -18381,6 +19878,16 @@ __metadata: languageName: node linkType: hard +"whatwg-url@npm:^14.0.0": + version: 14.0.0 + resolution: "whatwg-url@npm:14.0.0" + dependencies: + tr46: ^5.0.0 + webidl-conversions: ^7.0.0 + checksum: 4b5887e50f786583bead70916413e67a381d2126899b9eb5c67ce664bba1e7ec07cdff791404581ce73c6190d83c359c9ca1d50711631217905db3877dec075c + languageName: node + linkType: hard + "wherearewe@npm:^2.0.1": version: 2.0.1 resolution: "wherearewe@npm:2.0.1" @@ -18454,61 +19961,6 @@ __metadata: languageName: node linkType: hard -"winston-daily-rotate-file@npm:^4.7.1": - version: 4.7.1 - resolution: "winston-daily-rotate-file@npm:4.7.1" - dependencies: - file-stream-rotator: ^0.6.1 - object-hash: ^2.0.1 - triple-beam: ^1.3.0 - winston-transport: ^4.4.0 - peerDependencies: - winston: ^3 - checksum: 227daea41f722caa017fc7d6f1f80d0e6c428491e57693e6bebc8312b85bcf3aace53cb3a925bda72fab59a6898fa127411d29348ec4b295e2263a7544cda611 - languageName: node - linkType: hard - -"winston-transport@npm:4.*": - version: 4.8.0 - resolution: "winston-transport@npm:4.8.0" - dependencies: - logform: ^2.6.1 - readable-stream: ^4.5.2 - triple-beam: ^1.3.0 - checksum: f84092188176d49a6f4f75321ba3e50107ac0942a51a6d7e36b80af19dafb22b57258aaa6d8220763044ea23e30bffd597d3280d2a2298e6a491fe424896bac7 - languageName: node - linkType: hard - -"winston-transport@npm:^4.4.0, winston-transport@npm:^4.7.0": - version: 4.7.0 - resolution: "winston-transport@npm:4.7.0" - dependencies: - logform: ^2.3.2 - readable-stream: ^3.6.0 - triple-beam: ^1.3.0 - checksum: ce074b5c76a99bee5236cf2b4d30fadfaf1e551d566f654f1eba303dc5b5f77169c21545ff5c5e4fdad9f8e815fc6d91b989f1db34161ecca6e860e62fd3a862 - languageName: node - linkType: hard - -"winston@npm:^3.10.0": - version: 3.13.0 - resolution: "winston@npm:3.13.0" - dependencies: - "@colors/colors": ^1.6.0 - "@dabh/diagnostics": ^2.0.2 - async: ^3.2.3 - is-stream: ^2.0.0 - logform: ^2.4.0 - one-time: ^1.0.0 - readable-stream: ^3.4.0 - safe-stable-stringify: ^2.3.1 - stack-trace: 0.0.x - triple-beam: ^1.3.0 - winston-transport: ^4.7.0 - checksum: 66f9fbbadb58e1632701e9c89391f217310c9455462148e163e060dcd25aed21351b0413bdbbf90e5c5fe9bc945fc5de6f53875ac7c7ef3061133a354fc678c0 - languageName: node - linkType: hard - "word-wrap@npm:^1.2.5": version: 1.2.5 resolution: "word-wrap@npm:1.2.5" @@ -18526,6 +19978,20 @@ __metadata: languageName: node linkType: hard +"wordwrapjs@npm:^5.1.0": + version: 5.1.0 + resolution: "wordwrapjs@npm:5.1.0" + checksum: 063c7a5a85b694be1a5fd96f7ae0c0f4d717a087909e5c70cf25edec6eb5df5f2f5561f23e939cf6d7514cf81902610f74f288ef1612a49bf5451de15e0e29db + languageName: node + linkType: hard + +"workerpool@npm:^6.5.1": + version: 6.5.1 + resolution: "workerpool@npm:6.5.1" + checksum: f86d13f9139c3a57c5a5867e81905cd84134b499849405dec2ffe5b1acd30dabaa1809f6f6ee603a7c65e1e4325f21509db6b8398eaf202c8b8f5809e26a2e16 + languageName: node + linkType: hard + "wrap-ansi-cjs@npm:wrap-ansi@^7.0.0, wrap-ansi@npm:^7.0.0": version: 7.0.0 resolution: "wrap-ansi@npm:7.0.0" @@ -18606,6 +20072,21 @@ __metadata: languageName: node linkType: hard +"ws@npm:^7.5.10": + version: 7.5.10 + resolution: "ws@npm:7.5.10" + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ^5.0.2 + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + checksum: f9bb062abf54cc8f02d94ca86dcd349c3945d63851f5d07a3a61c2fcb755b15a88e943a63cf580cbdb5b74436d67ef6b67f745b8f7c0814e411379138e1863cb + languageName: node + linkType: hard + "ws@npm:^8.18.0": version: 8.18.0 resolution: "ws@npm:8.18.0" @@ -18628,6 +20109,13 @@ __metadata: languageName: node linkType: hard +"xxhash-wasm@npm:^1.1.0": + version: 1.1.0 + resolution: "xxhash-wasm@npm:1.1.0" + checksum: 2ccecb3b1dac5fefe11002d5ff5d106bbb5b506f9ee817ecf1bda65e132ebff3c82701c6727df3cb90b94a6dc1d8b294337678606f2304bcb0fd6b8dc68afe0d + languageName: node + linkType: hard + "y18n@npm:^5.0.5": version: 5.0.8 resolution: "y18n@npm:5.0.8" @@ -18668,6 +20156,13 @@ __metadata: languageName: node linkType: hard +"yargs-parser@npm:^20.2.2, yargs-parser@npm:^20.2.9": + version: 20.2.9 + resolution: "yargs-parser@npm:20.2.9" + checksum: 8bb69015f2b0ff9e17b2c8e6bfe224ab463dd00ca211eece72a4cd8a906224d2703fb8a326d36fdd0e68701e201b2a60ed7cf81ce0fd9b3799f9fe7745977ae3 + languageName: node + linkType: hard + "yargs-parser@npm:^21.0.1, yargs-parser@npm:^21.1.1": version: 21.1.1 resolution: "yargs-parser@npm:21.1.1" @@ -18675,6 +20170,18 @@ __metadata: languageName: node linkType: hard +"yargs-unparser@npm:^2.0.0": + version: 2.0.0 + resolution: "yargs-unparser@npm:2.0.0" + dependencies: + camelcase: ^6.0.0 + decamelize: ^4.0.0 + flat: ^5.0.2 + is-plain-obj: ^2.1.0 + checksum: 68f9a542c6927c3768c2f16c28f71b19008710abd6b8f8efbac6dcce26bbb68ab6503bed1d5994bdbc2df9a5c87c161110c1dfe04c6a3fe5c6ad1b0e15d9a8a3 + languageName: node + linkType: hard + "yargs@npm:17.7.2, yargs@npm:^17.3.1, yargs@npm:^17.7.2": version: 17.7.2 resolution: "yargs@npm:17.7.2" @@ -18690,6 +20197,21 @@ __metadata: languageName: node linkType: hard +"yargs@npm:^16.2.0": + version: 16.2.0 + resolution: "yargs@npm:16.2.0" + dependencies: + cliui: ^7.0.2 + escalade: ^3.1.1 + get-caller-file: ^2.0.5 + require-directory: ^2.1.1 + string-width: ^4.2.0 + y18n: ^5.0.5 + yargs-parser: ^20.2.2 + checksum: b14afbb51e3251a204d81937c86a7e9d4bdbf9a2bcee38226c900d00f522969ab675703bee2a6f99f8e20103f608382936034e64d921b74df82b63c07c5e8f59 + languageName: node + linkType: hard + "yauzl@npm:^2.10.0": version: 2.10.0 resolution: "yauzl@npm:2.10.0"