diff --git a/.env b/.env deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/.github/scripts/rate_limit_check.sh b/.github/scripts/rate_limit_check.sh new file mode 100755 index 000000000000..6594c685d847 --- /dev/null +++ b/.github/scripts/rate_limit_check.sh @@ -0,0 +1,43 @@ +#!/bin/bash + +set -o errexit +set -o pipefail + + +api_endpoint="https://api.github.com/users/zksync-era-bot" +wait_time=60 +max_retries=60 +retry_count=0 + +while [[ $retry_count -lt $max_retries ]]; do + response=$(run_retried curl -s -w "%{http_code}" -o temp.json "$api_endpoint") + http_code=$(echo "$response" | tail -n1) + + if [[ "$http_code" == "200" ]]; then + echo "Request successful. Not rate-limited." + cat temp.json + rm temp.json + exit 0 + elif [[ "$http_code" == "403" ]]; then + rate_limit_exceeded=$(jq -r '.message' temp.json | grep -i "API rate limit exceeded") + if [[ -n "$rate_limit_exceeded" ]]; then + retry_count=$((retry_count+1)) + echo "API rate limit exceeded. Retry $retry_count of $max_retries. Retrying in $wait_time seconds..." + sleep $wait_time + else + echo "Request failed with HTTP status $http_code." + cat temp.json + rm temp.json + exit 1 + fi + else + echo "Request failed with HTTP status $http_code." + cat temp.json + rm temp.json + exit 1 + fi +done + +echo "Reached the maximum number of retries ($max_retries). Exiting." +rm temp.json +exit 1 diff --git a/.github/workflows/build-base.yml b/.github/workflows/build-base.yml new file mode 100644 index 000000000000..d8e557225620 --- /dev/null +++ b/.github/workflows/build-base.yml @@ -0,0 +1,159 @@ +name: Build base Docker image +on: + workflow_dispatch: + inputs: + repo_ref: + description: "git reference of the zksync-era to build" + required: true + default: main +jobs: + build-images: + name: Build and Push Docker Images + runs-on: ${{ fromJSON('["matterlabs-ci-runner-high-performance", "matterlabs-ci-runner-arm"]')[contains(matrix.arch, 'arm')] }} + outputs: + image_tag_sha: ${{ steps.get-sha.outputs.image_tag_sha }} + # Needed to push to Gihub Package Registry + permissions: + packages: write + contents: read + env: + DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} + DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} + REPO_REF: ${{ github.event.inputs.repo_ref }} + strategy: + matrix: + name: [ build-base ] + repository: [ zksync-build-base ] + arch: [ amd64, arm64 ] + + steps: + - uses: actions/checkout@6d193bf28034eafb982f37bd894289fe649468fc # v4.1.7 + with: + submodules: "recursive" + + - name: Login to google container registry + run: | + gcloud auth configure-docker us-docker.pkg.dev -q + + - name: Login to DockerHub + uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 + with: + username: ${{ secrets.DOCKERHUB_USER }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Login to GitHub Container Registry + uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Get tag + id: get-sha + run: | + echo IMAGE_TAG_SHA=$(git rev-parse --short HEAD) >> $GITHUB_ENV + echo image_tag_sha=$(git rev-parse --short HEAD) >> $GITHUB_OUTPUT + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1 + + - name: Build and push + uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 + with: + push: true + context: . + file: docker/build-base/Dockerfile + labels: | + org.opencontainers.image.source=https://github.com/matter-labs/zksync-era + org.opencontainers.image.licenses="MIT OR Apache-2.0" + tags: | + matterlabs/zksync-build-base:${{ steps.get-sha.outputs.image_tag_sha }}-${{ matrix.arch }} + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/zksync-build-base:${{ steps.get-sha.outputs.image_tag_sha }}-${{ matrix.arch }} + ghcr.io/${{ github.repository_owner }}/zksync-build-base:${{ steps.get-sha.outputs.image_tag_sha }}-${{ matrix.arch }} + + multiarch_manifest: + # Needed to push to Gihub Package Registry + permissions: + packages: write + contents: read + needs: [ build-images ] + env: + IMAGE_TAG_SUFFIX: ${{ needs.build-images.outputs.image_tag_sha }} + runs-on: [ matterlabs-ci-runner-high-performance ] + steps: + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1 + + - name: Login to google container registry + run: | + gcloud auth configure-docker us-docker.pkg.dev -q + + - name: Login to DockerHub + uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 + with: + username: ${{ secrets.DOCKERHUB_USER }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Login to GitHub Container Registry + uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Create and push multi-arch manifests for Dockerhub + shell: bash + run: | + images=("zksync-build-base") + archs=("amd64" "arm64") + + for img in "${images[@]}"; do + multiarch_tag="matterlabs/zksync-build-base:latest" + individual_images=() + + for arch in "${archs[@]}"; do + TAG="$IMAGE_TAG_SUFFIX" + docker pull matterlabs/zksync-build-base:${TAG}-${arch} --platform linux/${arch} + individual_images+=("matterlabs/zksync-build-base:${TAG}-${arch}") + done + + docker buildx imagetools create --tag "${multiarch_tag}" "${individual_images[@]}" + done + + - name: Create and push multi-arch manifests for GitHub Container Registry + shell: bash + run: | + images=("zksync-build-base") + archs=("amd64" "arm64") + + for img in "${images[@]}"; do + multiarch_tag="ghcr.io/${{ github.repository_owner }}/zksync-build-base:latest" + individual_images=() + + for arch in "${archs[@]}"; do + TAG="$IMAGE_TAG_SUFFIX" + docker pull ghcr.io/${{ github.repository_owner }}/zksync-build-base:${TAG}-${arch} --platform linux/${arch} + individual_images+=("ghcr.io/${{ github.repository_owner }}/zksync-build-base:${TAG}-${arch}") + done + + docker buildx imagetools create --tag "${multiarch_tag}" "${individual_images[@]}" + done + + - name: Create and push multi-arch manifests for Google Artifact Registry + shell: bash + run: | + images=("zksync-build-base") + archs=("amd64" "arm64") + + for img in "${images[@]}"; do + multiarch_tag="us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/zksync-build-base:latest" + individual_images=() + + for arch in "${archs[@]}"; do + TAG="$IMAGE_TAG_SUFFIX" + docker pull us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/zksync-build-base:${TAG}-${arch} --platform linux/${arch} + individual_images+=("us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/zksync-build-base:${TAG}-${arch}") + done + + docker buildx imagetools create --tag "${multiarch_tag}" "${individual_images[@]}" + done diff --git a/.github/workflows/build-contract-verifier-template.yml b/.github/workflows/build-contract-verifier-template.yml index db7c4ba387f4..bb385b2797b2 100644 --- a/.github/workflows/build-contract-verifier-template.yml +++ b/.github/workflows/build-contract-verifier-template.yml @@ -113,18 +113,15 @@ jobs: ci_run git config --global --add safe.directory /usr/src/zksync/sdk/binaryen ci_run git config --global --add safe.directory /usr/src/zksync/contracts/system-contracts ci_run git config --global --add safe.directory /usr/src/zksync/contracts - ci_run zk || true - ci_run yarn zk build + ci_run ./bin/zkt || true + ci_run ./bin/zk || true ci_run run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^26.key - name: build contracts if: env.BUILD_CONTRACTS == 'true' run: | - ci_run zk run yarn ci_run cp etc/tokens/{test,localhost}.json - ci_run zk compiler all - ci_run zk contract build - ci_run zk f yarn run l2-contracts build + ci_run zk_supervisor contracts - name: Login to Docker registries if: ${{ inputs.action == 'push' }} diff --git a/.github/workflows/build-core-template.yml b/.github/workflows/build-core-template.yml index 7e5dcc10a939..dc46c4ba95e9 100644 --- a/.github/workflows/build-core-template.yml +++ b/.github/workflows/build-core-template.yml @@ -71,11 +71,15 @@ jobs: if [ $(jq length <<<"$tags") -eq 0 ]; then echo "No tag found on all pages." echo "BUILD_CONTRACTS=true" >> "$GITHUB_ENV" + # TODO Remove it when we migrate to foundry inside contracts repository + mkdir -p contracts/l1-contracts/artifacts/ exit 0 fi filtered_tag=$(jq -r --arg commit_sha "$commit_sha" 'map(select(.commit.sha == $commit_sha)) | .[].name' <<<"$tags") if [[ ! -z "$filtered_tag" ]]; then echo "BUILD_CONTRACTS=false" >> "$GITHUB_ENV" + # TODO Remove it when we migrate to foundry inside contracts repository + mkdir -p contracts/l1-contracts/out break fi ((page++)) @@ -122,18 +126,15 @@ jobs: ci_run git config --global --add safe.directory /usr/src/zksync/sdk/binaryen ci_run git config --global --add safe.directory /usr/src/zksync/contracts/system-contracts ci_run git config --global --add safe.directory /usr/src/zksync/contracts - ci_run zk || true - ci_run yarn zk build + ci_run ./bin/zk || true + ci_run ./bin/zkt || true ci_run run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^26.key - name: build contracts if: env.BUILD_CONTRACTS == 'true' run: | - ci_run zk run yarn ci_run cp etc/tokens/{test,localhost}.json - ci_run zk compiler all - ci_run zk contract build - ci_run zk f yarn run l2-contracts build + ci_run zk_supervisor contracts - name: Login to Docker registries if: ${{ inputs.action == 'push' }} diff --git a/.github/workflows/build-docker-from-tag.yml b/.github/workflows/build-docker-from-tag.yml index 791f44117477..29d26a713d89 100644 --- a/.github/workflows/build-docker-from-tag.yml +++ b/.github/workflows/build-docker-from-tag.yml @@ -102,7 +102,7 @@ jobs: with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }}-avx512 ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} - CUDA_ARCH: "60;70;75;89" + CUDA_ARCH: "60;70;75;80;89" WITNESS_GENERATOR_RUST_FLAGS: "-Ctarget_feature=+avx512bw,+avx512cd,+avx512dq,+avx512f,+avx512vl" secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} diff --git a/.github/workflows/build-local-node-docker.yml b/.github/workflows/build-local-node-docker.yml index 7f36f28f2864..f664bfaaa00a 100644 --- a/.github/workflows/build-local-node-docker.yml +++ b/.github/workflows/build-local-node-docker.yml @@ -16,7 +16,7 @@ on: jobs: build-images: name: Local Node - Build and Push Docker Image - runs-on: [matterlabs-ci-runner-high-performance] + runs-on: [ matterlabs-ci-runner-high-performance ] steps: - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4 with: @@ -61,9 +61,9 @@ jobs: ci_run git config --global --add safe.directory /usr/src/zksync/contracts ci_run zk + ci_run zkt ci_run cp etc/tokens/{test,localhost}.json - ci_run zk compiler all - ci_run zk contract build + ci_run zk_supervisor contracts - name: update-image run: | diff --git a/.github/workflows/ci-common-reusable.yml b/.github/workflows/ci-common-reusable.yml index d4667a273ef4..2e5d36feebff 100644 --- a/.github/workflows/ci-common-reusable.yml +++ b/.github/workflows/ci-common-reusable.yml @@ -18,21 +18,23 @@ jobs: echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH echo IN_DOCKER=1 >> .env + echo "SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage" >> .env + echo "SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com" >> .env + echo "SCCACHE_GCS_RW_MODE=READ_WRITE" >> .env + echo "RUSTC_WRAPPER=sccache" >> .env - name: Start services run: | run_retried docker-compose -f ${RUNNER_COMPOSE_FILE} pull mkdir -p ./volumes/postgres docker-compose -f ${RUNNER_COMPOSE_FILE} up --build -d zk postgres - ci_run sccache --start-server - name: Init run: | - ci_run zk - ci_run run_retried rustup show - ci_run zk db setup + ci_run zkt # This does both linting and "building". We're using `zk lint prover` as it's common practice within our repo # `zk lint prover` = cargo clippy, which does cargo check behind the scenes, which is a lightweight version of cargo build - name: Lints - run: ci_run zk lint prover + run: ci_run zk_supervisor lint -t rs --check + diff --git a/.github/workflows/ci-core-lint-reusable.yml b/.github/workflows/ci-core-lint-reusable.yml index e46a67dd8af4..404f0966b405 100644 --- a/.github/workflows/ci-core-lint-reusable.yml +++ b/.github/workflows/ci-core-lint-reusable.yml @@ -15,13 +15,16 @@ jobs: echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH echo IN_DOCKER=1 >> .env + echo "SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage" >> .env + echo "SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com" >> .env + echo "SCCACHE_GCS_RW_MODE=READ_WRITE" >> .env + echo "RUSTC_WRAPPER=sccache" >> .env echo "prover_url=postgres://postgres:notsecurepassword@localhost:5432/zksync_local_prover" >> $GITHUB_ENV echo "core_url=postgres://postgres:notsecurepassword@localhost:5432/zksync_local" >> $GITHUB_ENV - name: Start services run: | ci_localnet_up - ci_run sccache --start-server - name: Build run: | diff --git a/.github/workflows/ci-core-reusable.yml b/.github/workflows/ci-core-reusable.yml index 18cbc2c2afa3..53ff64398291 100644 --- a/.github/workflows/ci-core-reusable.yml +++ b/.github/workflows/ci-core-reusable.yml @@ -26,6 +26,10 @@ jobs: echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH echo IN_DOCKER=1 >> .env + echo "SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage" >> .env + echo "SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com" >> .env + echo "SCCACHE_GCS_RW_MODE=READ_WRITE" >> .env + echo "RUSTC_WRAPPER=sccache" >> .env # TODO: Remove when we after upgrade of hardhat-plugins - name: pre-download compilers @@ -48,7 +52,6 @@ jobs: - name: Start services run: | ci_localnet_up - ci_run sccache --start-server - name: Init run: | @@ -71,7 +74,7 @@ jobs: strategy: fail-fast: false matrix: - vm_mode: [ "old", "new" ] + vm_mode: [ "OLD", "NEW" ] steps: - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4 @@ -84,16 +87,19 @@ jobs: echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH echo IN_DOCKER=1 >> .env + echo "SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage" >> .env + echo "SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com" >> .env + echo "SCCACHE_GCS_RW_MODE=READ_WRITE" >> .env + echo "RUSTC_WRAPPER=sccache" >> .env - name: Loadtest configuration run: | - echo EXPECTED_TX_COUNT=${{ matrix.vm_mode == 'new' && 21000 || 16000 }} >> .env + echo EXPECTED_TX_COUNT=${{ matrix.vm_mode == 'NEW' && 21000 || 16000 }} >> .env echo ACCOUNTS_AMOUNT="100" >> .env echo MAX_INFLIGHT_TXS="10" >> .env echo SYNC_API_REQUESTS_LIMIT="15" >> .env echo FAIL_FAST=true >> .env echo IN_DOCKER=1 >> .env - echo DATABASE_MERKLE_TREE_MODE=lightweight >> .env - name: Start services run: | @@ -107,23 +113,32 @@ jobs: ci_run git config --global --add safe.directory /usr/src/zksync/contracts/system-contracts ci_run git config --global --add safe.directory /usr/src/zksync/contracts - ci_run zk - ci_run zk init --local-legacy-bridge-testing + ci_run ./bin/zkt + ci_run zk_inception chain create \ + --chain-name legacy \ + --chain-id sequential \ + --prover-mode no-proofs \ + --wallet-creation localhost \ + --l1-batch-commit-data-generator-mode rollup \ + --base-token-address 0x0000000000000000000000000000000000000001 \ + --base-token-price-nominator 1 \ + --base-token-price-denominator 1 \ + --set-as-default false \ + --ignore-prerequisites \ + --legacy-bridge + + ci_run zk_inception ecosystem init --dev --verbose + ci_run zk_supervisor contracts --test-contracts # `sleep 60` because we need to wait until server added all the tokens - name: Run server run: | - EXPERIMENTAL_VM_STATE_KEEPER_FAST_VM_MODE=${{ matrix.vm_mode }} \ - CHAIN_MEMPOOL_DELAY_INTERVAL=50 \ - PASSED_ENV_VARS="EXPERIMENTAL_VM_STATE_KEEPER_FAST_VM_MODE,CHAIN_MEMPOOL_DELAY_INTERVAL" \ - ci_run zk server --uring --components api,tree,eth,state_keeper,housekeeper,commitment_generator,vm_runner_protective_reads &>server.log & + ci_run zk_supervisor config-writer --path ${{ matrix.vm_mode == 'NEW' && 'etc/env/file_based/overrides/tests/loadtest-new.yaml' || 'etc/env/file_based/overrides/tests/loadtest-old.yaml' }} --chain legacy + ci_run zk_inception server --uring --chain=legacy --components api,tree,eth,state_keeper,housekeeper,commitment_generator,vm_runner_protective_reads &>server.log & ci_run sleep 60 - - name: Deploy legacy era contracts - run: ci_run zk contract setup-legacy-bridge-era - - name: Perform loadtest - run: ci_run zk run loadtest + run: ci_run zk_supervisor t loadtest -v --chain=legacy - name: Show server.log logs if: always() @@ -135,304 +150,344 @@ jobs: ci_run sccache --show-stats || true ci_run cat /tmp/sccache_log.txt || true - integration: - name: Integration (consensus=${{ matrix.consensus }}, base_token=${{ matrix.base_token }}, deployment_mode=${{ matrix.deployment_mode }}) - strategy: - # In matrix jobs, fail-fast is true by default. - # To be consistent with the rest of the workflow we disable it explicitly. - fail-fast: false - matrix: - consensus: [ false, true ] - base_token: [ "Eth", "Custom" ] - deployment_mode: [ "Rollup", "Validium" ] - env: - SERVER_COMPONENTS: "api,tree,eth,state_keeper,housekeeper,commitment_generator,vm_runner_protective_reads,vm_runner_bwip,vm_playground,da_dispatcher${{ matrix.consensus && ',consensus' || '' }}${{ matrix.base_token == 'Custom' && ',base_token_ratio_persister' || '' }}" - - runs-on: [ matterlabs-ci-runner-highmem-long ] + integration-tests: + runs-on: [ matterlabs-ci-runner-ultra-performance ] steps: - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4 with: submodules: "recursive" fetch-depth: 0 + - name: Setup environment run: | echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH echo IN_DOCKER=1 >> .env - echo RUN_CONTRACT_VERIFICATION_TEST=true >> .env - echo ZKSYNC_DEBUG_LOGS=true >> .env - - - name: Download zksolc/solc and zkvyper/vyper - run: | - sudo apt update && sudo apt install wget -y - - mkdir -p $(pwd)/etc/solc-bin/0.8.23 - wget https://github.com/ethereum/solc-bin/raw/gh-pages/linux-amd64/solc-linux-amd64-v0.8.23%2Bcommit.f704f362 - mv solc-linux-amd64-v0.8.23+commit.f704f362 $(pwd)/etc/solc-bin/0.8.23/solc - chmod +x $(pwd)/etc/solc-bin/0.8.23/solc - - mkdir -p $(pwd)/etc/solc-bin/zkVM-0.8.23-1.0.0 - wget https://github.com/matter-labs/era-solidity/releases/download/0.8.23-1.0.0/solc-linux-amd64-0.8.23-1.0.0 -O $(pwd)/etc/solc-bin/zkVM-0.8.23-1.0.0/solc - chmod +x $(pwd)/etc/solc-bin/zkVM-0.8.23-1.0.0/solc - - mkdir -p $(pwd)/etc/zksolc-bin/v1.3.21 - wget https://github.com/matter-labs/zksolc-bin/raw/main/linux-amd64/zksolc-linux-amd64-musl-v1.3.21 - mv zksolc-linux-amd64-musl-v1.3.21 $(pwd)/etc/zksolc-bin/v1.3.21/zksolc - chmod +x $(pwd)/etc/zksolc-bin/v1.3.21/zksolc - - mkdir -p $(pwd)/etc/vyper-bin/0.3.10 - wget -O vyper0.3.10 https://github.com/vyperlang/vyper/releases/download/v0.3.10/vyper.0.3.10%2Bcommit.91361694.linux - mv vyper0.3.10 $(pwd)/etc/vyper-bin/0.3.10/vyper - chmod +x $(pwd)/etc/vyper-bin/0.3.10/vyper - - mkdir -p $(pwd)/etc/zkvyper-bin/v1.3.13 - wget https://github.com/matter-labs/zkvyper-bin/raw/main/linux-amd64/zkvyper-linux-amd64-musl-v1.3.13 - mv zkvyper-linux-amd64-musl-v1.3.13 $(pwd)/etc/zkvyper-bin/v1.3.13/zkvyper - chmod +x $(pwd)/etc/zkvyper-bin/v1.3.13/zkvyper + echo "SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage" >> .env + echo "SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com" >> .env + echo "SCCACHE_GCS_RW_MODE=READ_WRITE" >> .env + echo "RUSTC_WRAPPER=sccache" >> .env + echo RUN_CONTRACT_VERIFICATION_TEST=true >> $GITHUB_ENV - name: Start services run: | ci_localnet_up - ci_run pre_download_compilers.sh - ci_run sccache --start-server - - name: Init + - name: Build zk_toolbox + run: ci_run bash -c "./bin/zkt" + + - name: Create log directories + run: | + SERVER_LOGS_DIR=logs/server + INTEGRATION_TESTS_LOGS_DIR=logs/integration_tests + INTEGRATION_TESTS_EN_LOGS_DIR=logs/integration_tests/en + SNAPSHOT_RECOVERY_LOGS_DIR=logs/snapshot_recovery/ + GENESIS_RECOVERY_LOGS_DIR=logs/genesis_recovery/ + EXTERNAL_NODE_LOGS_DIR=logs/external_node + REVERT_LOGS_DIR=logs/revert + + mkdir -p $SERVER_LOGS_DIR + mkdir -p $INTEGRATION_TESTS_LOGS_DIR + mkdir -p $INTEGRATION_TESTS_EN_LOGS_DIR + mkdir -p $SNAPSHOT_RECOVERY_LOGS_DIR + mkdir -p $GENESIS_RECOVERY_LOGS_DIR + mkdir -p $EXTERNAL_NODE_LOGS_DIR + mkdir -p $REVERT_LOGS_DIR + + echo "SERVER_LOGS_DIR=$SERVER_LOGS_DIR" >> $GITHUB_ENV + echo "INTEGRATION_TESTS_LOGS_DIR=$INTEGRATION_TESTS_LOGS_DIR" >> $GITHUB_ENV + echo "INTEGRATION_TESTS_EN_LOGS_DIR=$INTEGRATION_TESTS_EN_LOGS_DIR" >> $GITHUB_ENV + echo "SNAPSHOT_RECOVERY_LOGS_DIR=$SNAPSHOT_RECOVERY_LOGS_DIR" >> $GITHUB_ENV + echo "GENESIS_RECOVERY_LOGS_DIR=$GENESIS_RECOVERY_LOGS_DIR" >> $GITHUB_ENV + echo "EXTERNAL_NODE_LOGS_DIR=$EXTERNAL_NODE_LOGS_DIR" >> $GITHUB_ENV + echo "REVERT_LOGS_DIR=$REVERT_LOGS_DIR" >> $GITHUB_ENV + + - name: Initialize ecosystem run: | ci_run git config --global --add safe.directory /usr/src/zksync - ci_run git config --global --add safe.directory /usr/src/zksync/sdk/binaryen ci_run git config --global --add safe.directory /usr/src/zksync/contracts/system-contracts ci_run git config --global --add safe.directory /usr/src/zksync/contracts - ci_run zk - ci_run run_retried rustup show - if [[ "${{ matrix.deployment_mode }}" == "Validium" ]]; then - ci_run zk env dev_validium_docker - ci_run zk config compile dev_validium_docker - else - ci_run zk config compile - fi - ci_run zk init ${{ matrix.base_token == 'Custom' && '--base-token-name BAT' || ''}} ${{ matrix.deployment_mode == 'Validium' && '--validium-mode' || ''}} - # `sleep 5` because we need to wait until server started properly - - name: Run server + ci_run zk_inception ecosystem init --deploy-paymaster --deploy-erc20 \ + --deploy-ecosystem --l1-rpc-url=http://localhost:8545 \ + --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ + --server-db-name=zksync_server_localhost_era \ + --prover-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ + --prover-db-name=zksync_prover_localhost_era \ + --ignore-prerequisites --verbose \ + --observability=false + + - name: Read Custom Token address and set as environment variable run: | - ci_run zk server --components=$SERVER_COMPONENTS &>server.log & - ci_run sleep 5 + address=$(awk -F": " '/tokens:/ {found_tokens=1} found_tokens && /DAI:/ {found_dai=1} found_dai && /address:/ {print $2; exit}' ./configs/erc20.yaml) + echo "address=$address" + echo "address=$address" >> $GITHUB_ENV - - name: Run contract verifier + - name: Create and initialize Validium chain + run: | + ci_run zk_inception chain create \ + --chain-name validium \ + --chain-id sequential \ + --prover-mode no-proofs \ + --wallet-creation localhost \ + --l1-batch-commit-data-generator-mode validium \ + --base-token-address 0x0000000000000000000000000000000000000001 \ + --base-token-price-nominator 1 \ + --base-token-price-denominator 1 \ + --set-as-default false \ + --ignore-prerequisites + + ci_run zk_inception chain init \ + --deploy-paymaster \ + --l1-rpc-url=http://localhost:8545 \ + --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ + --server-db-name=zksync_server_localhost_validium \ + --prover-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ + --prover-db-name=zksync_prover_localhost_validium \ + --port-offset 2000 \ + --chain validium + + - name: Create and initialize chain with Custom Token run: | - ci_run zk contract_verifier &>contract_verifier.log & - ci_run sleep 2 - - - name: Server integration tests - run: ci_run zk test i server - - - name: Snapshot recovery test - # We use `yarn` directly because the test launches `zk` commands in both server and EN envs. - # An empty topmost environment helps avoid a mess when redefining env vars shared between both envs - # (e.g., DATABASE_URL). - # - # Since `base_token` doesn't meaningfully influence the test, we use it as a flag for - # enabling / disabling tree during pruning. + ci_run zk_inception chain create \ + --chain-name custom_token \ + --chain-id sequential \ + --prover-mode no-proofs \ + --wallet-creation localhost \ + --l1-batch-commit-data-generator-mode rollup \ + --base-token-address ${{ env.address }} \ + --base-token-price-nominator 3 \ + --base-token-price-denominator 2 \ + --set-as-default false \ + --ignore-prerequisites + + ci_run zk_inception chain init \ + --deploy-paymaster \ + --l1-rpc-url=http://localhost:8545 \ + --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ + --server-db-name=zksync_server_localhost_custom_token \ + --prover-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ + --prover-db-name=zksync_prover_localhost_custom_token \ + --port-offset 3000 \ + --chain custom_token + + - name: Create and register chain with transactions signed "offline" run: | - if [[ "${{ matrix.deployment_mode }}" == "Validium" ]]; then - ci_run zk config compile ext-node-validium - ci_run zk config compile ext-node-validium-docker + ci_run zk_inception chain create \ + --chain-name offline_chain \ + --chain-id sequential \ + --prover-mode no-proofs \ + --wallet-creation localhost \ + --l1-batch-commit-data-generator-mode rollup \ + --base-token-address 0x0000000000000000000000000000000000000001 \ + --base-token-price-nominator 1 \ + --base-token-price-denominator 1 \ + --set-as-default false \ + --ignore-prerequisites + + ci_run zk_inception chain build-transactions --chain offline_chain --l1-rpc-url http://127.0.0.1:8545 + + governor_pk=$(awk '/governor:/ {flag=1} flag && /private_key:/ {print $2; exit}' ./configs/wallets.yaml) + + ci_run zk_supervisor send-transactions \ + --file ./transactions/chain/offline_chain/register-hyperchain-txns.json \ + --l1-rpc-url http://127.0.0.1:8545 \ + --private-key $governor_pk + + bridge_hub=$(awk '/bridgehub_proxy_addr/ {print $2}' ./configs/contracts.yaml) + chain_id=$(awk '/chain_id:/ {print $2}' ./chains/offline_chain/ZkStack.yaml) + + hyperchain_output=$(ci_run cast call $bridge_hub "getHyperchain(uint256)" $chain_id) + + if [[ $hyperchain_output == 0x* && ${#hyperchain_output} -eq 66 ]]; then + echo "Chain successfully registered: $hyperchain_output" + else + echo "Failed to register chain: $hyperchain_output" + exit 1 fi - ENABLE_CONSENSUS=${{ matrix.consensus }} \ - DEPLOYMENT_MODE=${{ matrix.deployment_mode }} \ - SNAPSHOTS_CREATOR_VERSION=${{ matrix.deployment_mode == 'Validium' && '0' || '1' }} \ - DISABLE_TREE_DURING_PRUNING=${{ matrix.base_token == 'Eth' }} \ - ETH_CLIENT_WEB3_URL="http://localhost:8545" \ - PASSED_ENV_VARS="ENABLE_CONSENSUS,DEPLOYMENT_MODE,DISABLE_TREE_DURING_PRUNING,SNAPSHOTS_CREATOR_VERSION,ETH_CLIENT_WEB3_URL" \ - ci_run yarn recovery-test snapshot-recovery-test - - - name: Genesis recovery test + + - name: Create and initialize Consensus chain run: | - ENABLE_CONSENSUS=${{ matrix.consensus }} \ - DEPLOYMENT_MODE=${{ matrix.deployment_mode }} \ - ETH_CLIENT_WEB3_URL="http://localhost:8545" \ - PASSED_ENV_VARS="ENABLE_CONSENSUS,DEPLOYMENT_MODE,ETH_CLIENT_WEB3_URL" \ - ci_run yarn recovery-test genesis-recovery-test + ci_run zk_inception chain create \ + --chain-name consensus \ + --chain-id sequential \ + --prover-mode no-proofs \ + --wallet-creation localhost \ + --l1-batch-commit-data-generator-mode validium \ + --base-token-address ${{ env.address }} \ + --base-token-price-nominator 3 \ + --base-token-price-denominator 2 \ + --set-as-default false \ + --ignore-prerequisites + + ci_run zk_inception chain init \ + --deploy-paymaster \ + --l1-rpc-url=http://localhost:8545 \ + --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ + --server-db-name=zksync_server_localhost_consensus \ + --prover-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ + --prover-db-name=zksync_prover_localhost_consensus \ + --port-offset 4000 \ + --chain consensus + + - name: Build test dependencies + run: | + ci_run zk_supervisor test build - - name: Fee projection tests - run: ci_run zk test i fees + - name: Initialize Contract verifier + run: | + ci_run zk_inception contract-verifier init --zksolc-version=v1.5.3 --zkvyper-version=v1.5.4 --solc-version=0.8.26 --vyper-version=v0.3.10 --era-vm-solc-version=0.8.26-1.0.1 --only --chain era + ci_run zk_inception contract-verifier run --chain era &> ${{ env.SERVER_LOGS_DIR }}/contract-verifier-rollup.log & - - name: Run revert test + - name: Run servers run: | - ci_run pkill zksync_server || true - ci_run sleep 2 - ENABLE_CONSENSUS=${{ matrix.consensus }} DEPLOYMENT_MODE=${{ matrix.deployment_mode }} PASSED_ENV_VARS="ENABLE_CONSENSUS,DEPLOYMENT_MODE" ci_run zk test i revert + ci_run zk_inception server --ignore-prerequisites --chain era &> ${{ env.SERVER_LOGS_DIR }}/rollup.log & + ci_run zk_inception server --ignore-prerequisites --chain validium &> ${{ env.SERVER_LOGS_DIR }}/validium.log & + ci_run zk_inception server --ignore-prerequisites --chain custom_token &> ${{ env.SERVER_LOGS_DIR }}/custom_token.log & + ci_run zk_inception server --ignore-prerequisites --chain consensus \ + --components=api,tree,eth,state_keeper,housekeeper,commitment_generator,vm_runner_protective_reads,vm_runner_bwip,vm_playground,da_dispatcher,consensus \ + &> ${{ env.SERVER_LOGS_DIR }}/consensus.log & + ci_run sleep 5 - # This test should be the last one as soon as it - # finished bootloader will be different - - name: Run upgrade test + - name: Run integration tests run: | - ci_run pkill zksync_server || true - ci_run sleep 10 - ci_run zk test i upgrade + PASSED_ENV_VARS="RUN_CONTRACT_VERIFICATION_TEST" \ + ci_run zk_supervisor test integration --no-deps --ignore-prerequisites --chain era &> ${{ env.INTEGRATION_TESTS_LOGS_DIR }}/rollup.log & + PID1=$! - - name: Show server.log logs - if: always() - run: ci_run cat server.log || true + ci_run zk_supervisor test integration --no-deps --ignore-prerequisites --chain validium &> ${{ env.INTEGRATION_TESTS_LOGS_DIR }}/validium.log & + PID2=$! - - name: Show contract_verifier.log logs - if: always() - run: ci_run cat contract_verifier.log || true + ci_run zk_supervisor test integration --no-deps --ignore-prerequisites --chain custom_token &> ${{ env.INTEGRATION_TESTS_LOGS_DIR }}/custom_token.log & + PID3=$! - - name: Show snapshot-creator.log logs - if: always() - run: ci_run cat core/tests/recovery-test/snapshot-creator.log || true - - name: Show snapshot-recovery.log logs - if: always() - run: ci_run cat core/tests/recovery-test/snapshot-recovery.log || true - - name: Show genesis-recovery.log logs - if: always() - run: ci_run cat core/tests/recovery-test/genesis-recovery.log || true + ci_run zk_supervisor test integration --no-deps --ignore-prerequisites --chain consensus &> ${{ env.INTEGRATION_TESTS_LOGS_DIR }}/consensus.log & + PID4=$! - - name: Show revert.log logs - if: always() - run: ci_run cat logs/revert/default/server.log || true + wait $PID1 + wait $PID2 + wait $PID3 + wait $PID4 - - name: Show upgrade.log logs - if: always() - run: ci_run cat core/tests/upgrade-test/upgrade.log || true + - name: Init external nodes + run: | + ci_run zk_inception external-node configs --db-url=postgres://postgres:notsecurepassword@localhost:5432 \ + --db-name=zksync_en_localhost_era_rollup --l1-rpc-url=http://localhost:8545 --chain era + ci_run zk_inception external-node init --ignore-prerequisites --chain era - - name: Show fee-projection.log logs - if: always() - run: ci_run cat core/tests/ts-integration/fees.log || true + ci_run zk_inception external-node configs --db-url=postgres://postgres:notsecurepassword@localhost:5432 \ + --db-name=zksync_en_localhost_era_validium1 --l1-rpc-url=http://localhost:8545 --chain validium + ci_run zk_inception external-node init --ignore-prerequisites --chain validium - - name: Show sccache logs - if: always() + ci_run zk_inception external-node configs --db-url=postgres://postgres:notsecurepassword@localhost:5432 \ + --db-name=zksync_en_localhost_era_custom_token --l1-rpc-url=http://localhost:8545 --chain custom_token + ci_run zk_inception external-node init --ignore-prerequisites --chain custom_token + + ci_run zk_inception external-node configs --db-url=postgres://postgres:notsecurepassword@localhost:5432 \ + --db-name=zksync_en_localhost_era_consensus --l1-rpc-url=http://localhost:8545 --chain consensus + ci_run zk_inception external-node init --ignore-prerequisites --chain consensus + + - name: Run recovery tests (from snapshot) run: | - ci_run sccache --show-stats || true - ci_run cat /tmp/sccache_log.txt || true - external-node: - name: External node (consensus=${{ matrix.consensus }}, base_token=${{ matrix.base_token }}, deployment_mode=${{ matrix.deployment_mode }}) - strategy: - fail-fast: false - matrix: - consensus: [ false, true ] - base_token: [ "Eth", "Custom" ] - deployment_mode: [ "Rollup", "Validium" ] - runs-on: [ matterlabs-ci-runner-highmem-long ] + ci_run zk_supervisor test recovery --snapshot --no-deps --ignore-prerequisites --verbose --chain era &> ${{ env.SNAPSHOT_RECOVERY_LOGS_DIR }}/rollup.log & + PID1=$! - env: - SERVER_COMPONENTS: "api,tree,eth,state_keeper,housekeeper,commitment_generator,vm_runner_protective_reads,vm_runner_bwip,da_dispatcher${{ matrix.consensus && ',consensus' || '' }}${{ matrix.base_token == 'Custom' && ',base_token_ratio_persister' || '' }}" - EXT_NODE_FLAGS: "${{ matrix.consensus && '-- --enable-consensus' || '' }}" + ci_run zk_supervisor test recovery --snapshot --no-deps --ignore-prerequisites --verbose --chain validium &> ${{ env.SNAPSHOT_RECOVERY_LOGS_DIR }}/validium.log & + PID2=$! - steps: - - name: Checkout code # Checks out the repository under $GITHUB_WORKSPACE, so the job can access it. - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4 - with: - submodules: "recursive" - fetch-depth: 0 + ci_run zk_supervisor test recovery --snapshot --no-deps --ignore-prerequisites --verbose --chain custom_token &> ${{ env.SNAPSHOT_RECOVERY_LOGS_DIR }}/custom_token.log & + PID3=$! - - name: Setup environment - run: | - echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV - echo $(pwd)/bin >> $GITHUB_PATH - echo IN_DOCKER=1 >> .env - echo RUN_CONTRACT_VERIFICATION_TEST=true >> .env - echo ZKSYNC_DEBUG_LOGS=true >> .env + ci_run zk_supervisor test recovery --snapshot --no-deps --ignore-prerequisites --verbose --chain consensus &> ${{ env.SNAPSHOT_RECOVERY_LOGS_DIR }}/consensus.log & + PID4=$! - - name: Start services - run: | - ci_localnet_up - ci_run pre_download_compilers.sh - ci_run sccache --start-server + wait $PID1 + wait $PID2 + wait $PID3 + wait $PID4 - - name: Init + - name: Run recovery tests (from genesis) run: | - ci_run git config --global --add safe.directory /usr/src/zksync - ci_run git config --global --add safe.directory /usr/src/zksync/sdk/binaryen - ci_run git config --global --add safe.directory /usr/src/zksync/contracts/system-contracts - ci_run git config --global --add safe.directory /usr/src/zksync/contracts - ci_run zk - ci_run run_retried rustup show - if [[ "${{ matrix.deployment_mode }}" == "Rollup" ]]; then - ci_run zk config compile - elif [[ "${{ matrix.deployment_mode }}" == "Validium" ]]; then - ci_run zk env dev_validium_docker - ci_run zk config compile dev_validium_docker - fi - ci_run zk init ${{ matrix.base_token == 'Custom' && '--base-token-name BAT' || ''}} ${{ matrix.deployment_mode == 'Validium' && '--validium-mode' || ''}} + ci_run zk_supervisor test recovery --no-deps --no-kill --ignore-prerequisites --verbose --chain era &> ${{ env.GENESIS_RECOVERY_LOGS_DIR }}/rollup.log & + PID1=$! - # `sleep 30` because we need to wait until server started properly - - name: Run server + ci_run zk_supervisor test recovery --no-deps --no-kill --ignore-prerequisites --verbose --chain validium &> ${{ env.GENESIS_RECOVERY_LOGS_DIR }}/validium.log & + PID2=$! + + ci_run zk_supervisor test recovery --no-deps --no-kill --ignore-prerequisites --verbose --chain custom_token &> ${{ env.GENESIS_RECOVERY_LOGS_DIR }}/custom_token.log & + PID3=$! + + ci_run zk_supervisor test recovery --no-deps --no-kill --ignore-prerequisites --verbose --chain consensus &> ${{ env.GENESIS_RECOVERY_LOGS_DIR }}/consensus.log & + PID4=$! + + wait $PID1 + wait $PID2 + wait $PID3 + wait $PID4 + + - name: Run external node server run: | - ci_run zk server --components=$SERVER_COMPONENTS &>>server.log & - ci_run sleep 30 + ci_run zk_inception external-node run --ignore-prerequisites --chain era &> ${{ env.EXTERNAL_NODE_LOGS_DIR }}/rollup.log & + ci_run zk_inception external-node run --ignore-prerequisites --chain validium &> ${{ env.EXTERNAL_NODE_LOGS_DIR }}/validium.log & + ci_run zk_inception external-node run --ignore-prerequisites --chain custom_token &> ${{ env.EXTERNAL_NODE_LOGS_DIR }}/custom_token.log & + ci_run zk_inception external-node run --ignore-prerequisites --chain consensus --enable-consensus &> ${{ env.EXTERNAL_NODE_LOGS_DIR }}/consensus.log & - - name: Run external node + - name: Run integration tests en run: | - if [[ "${{ matrix.deployment_mode }}" == "Rollup" ]]; then - ci_run zk env ext-node-docker - elif [[ "${{ matrix.deployment_mode }}" == "Validium" ]]; then - ci_run zk env ext-node-validium-docker - fi - ci_run zk db setup - ci_run zk external-node $EXT_NODE_FLAGS &>>ext-node.log & - ci_run sleep 30 + ci_run zk_supervisor test integration --no-deps --ignore-prerequisites --external-node --chain era &> ${{ env.INTEGRATION_TESTS_EN_LOGS_DIR }}/rollup.log & + PID1=$! - - name: Integration tests - run: ci_run zk test i server --testPathIgnorePatterns 'contract-verification|snapshots-creator' + ci_run zk_supervisor test integration --no-deps --ignore-prerequisites --external-node --chain validium &> ${{ env.INTEGRATION_TESTS_EN_LOGS_DIR }}/validium.log & + PID2=$! - - name: Run revert test - run: | - ENABLE_CONSENSUS=${{ matrix.consensus }} \ - DEPLOYMENT_MODE=${{ matrix.deployment_mode }} \ - PASSED_ENV_VARS="ENABLE_CONSENSUS,DEPLOYMENT_MODE" \ - ci_run zk test i revert-en - - # test terminates the nodes, so we restart them. - if [[ "${{ matrix.deployment_mode }}" == "Rollup" ]]; then - ZKSYNC_ENV=docker ci_run zk server --components=$SERVER_COMPONENTS &>>server.log & - ZKSYNC_ENV=ext-node-docker ci_run zk external-node $EXT_NODE_FLAGS &>>ext-node.log & - elif [[ "${{ matrix.deployment_mode }}" == "Validium" ]]; then - ZKSYNC_ENV=dev_validium_docker ci_run zk server --components=$SERVER_COMPONENTS &>>server.log & - ZKSYNC_ENV=ext-node-validium-docker ci_run zk external-node $EXT_NODE_FLAGS &>>ext-node.log & - fi - ci_run sleep 30 + ci_run zk_supervisor test integration --no-deps --ignore-prerequisites --external-node --chain custom_token &> ${{ env.INTEGRATION_TESTS_EN_LOGS_DIR }}/custom_token.log & + PID3=$! - - name: Run upgrade test + ci_run zk_supervisor test integration --no-deps --ignore-prerequisites --external-node --chain consensus &> ${{ env.INTEGRATION_TESTS_EN_LOGS_DIR }}/consensus.log & + PID4=$! + + wait $PID1 + wait $PID2 + wait $PID3 + wait $PID4 + + - name: Run revert tests run: | - if [[ "${{ matrix.deployment_mode }}" == "Rollup" ]]; then - ci_run zk env docker - elif [[ "${{ matrix.deployment_mode }}" == "Validium" ]]; then - ci_run zk env dev_validium_docker - fi - CHECK_EN_URL="http://0.0.0.0:3060" ci_run zk test i upgrade + ci_run killall -INT zksync_server || true + ci_run killall -INT zksync_external_node || true - - name: Show server.log logs - if: always() - run: ci_run cat server.log || true + ci_run zk_supervisor test revert --no-deps --external-node --no-kill --ignore-prerequisites --chain era &> ${{ env.REVERT_LOGS_DIR }}/rollup.log & + PID1=$! - - name: Show ext-node.log logs - if: always() - run: ci_run cat ext-node.log || true + ci_run zk_supervisor test revert --no-deps --external-node --no-kill --ignore-prerequisites --chain validium &> ${{ env.REVERT_LOGS_DIR }}/validium.log & + PID2=$! - - name: Show contract_verifier.log logs - if: always() - run: ci_run cat ext-node.log || true + ci_run zk_supervisor test revert --no-deps --external-node --no-kill --ignore-prerequisites --chain custom_token &> ${{ env.REVERT_LOGS_DIR }}/custom_token.log & + PID3=$! - - name: Show revert logs (main node) - if: always() - run: ci_run cat logs/revert/en/default/server.log || true + ci_run zk_supervisor test revert --no-deps --external-node --no-kill --ignore-prerequisites --chain consensus &> ${{ env.REVERT_LOGS_DIR }}/consensus.log & + PID4=$! - - name: Show revert logs (EN) - if: always() - run: ci_run cat logs/revert/en/default/external_node.log || true + wait $PID1 + wait $PID2 + wait $PID3 + wait $PID4 - - name: Show upgrade.log logs - if: always() - run: ci_run cat core/tests/upgrade-test/upgrade.log || true - - name: Show sccache logs - if: always() + # Upgrade tests should run last, because as soon as they + # finish the bootloader will be different + # TODO make upgrade tests safe to run multiple times + - name: Run upgrade test run: | - ci_run sccache --show-stats || true - ci_run cat /tmp/sccache_log.txt || true + ci_run zk_supervisor test upgrade --no-deps --chain era + + - name: Upload logs + uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0 + if: always() + with: + name: logs + path: logs diff --git a/.github/workflows/ci-docs-reusable.yml b/.github/workflows/ci-docs-reusable.yml index 2b8eea15a827..5b1d5a9bcdfa 100644 --- a/.github/workflows/ci-docs-reusable.yml +++ b/.github/workflows/ci-docs-reusable.yml @@ -17,12 +17,16 @@ jobs: echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH echo IN_DOCKER=1 >> .env + echo "SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage" >> .env + echo "SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com" >> .env + echo "SCCACHE_GCS_RW_MODE=READ_WRITE" >> .env + echo "RUSTC_WRAPPER=sccache" >> .env - name: Start services run: | run_retried docker compose pull zk docker compose up -d zk - + - name: Build run: | ci_run ./bin/zkt diff --git a/.github/workflows/ci-prover-reusable.yml b/.github/workflows/ci-prover-reusable.yml index d1d4a9ab96b2..367a86c5f40f 100644 --- a/.github/workflows/ci-prover-reusable.yml +++ b/.github/workflows/ci-prover-reusable.yml @@ -3,7 +3,7 @@ on: workflow_call: jobs: lint: - runs-on: [matterlabs-ci-runner-highmem-long] + runs-on: [ matterlabs-ci-runner-highmem-long ] env: RUNNER_COMPOSE_FILE: "docker-compose-runner-nightly.yml" @@ -17,24 +17,29 @@ jobs: echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH echo IN_DOCKER=1 >> .env + echo "SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage" >> .env + echo "SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com" >> .env + echo "SCCACHE_GCS_RW_MODE=READ_WRITE" >> .env + echo "RUSTC_WRAPPER=sccache" >> .env + echo "prover_url=postgres://postgres:notsecurepassword@localhost:5432/zksync_local_prover" >> $GITHUB_ENV + echo "core_url=postgres://postgres:notsecurepassword@localhost:5432/zksync_local" >> $GITHUB_ENV - name: Start services run: | run_retried docker-compose -f ${RUNNER_COMPOSE_FILE} pull mkdir -p ./volumes/postgres docker-compose -f ${RUNNER_COMPOSE_FILE} up --build -d zk postgres - ci_run sccache --start-server - name: Init run: | - ci_run zk - ci_run zk db setup + ci_run zkt + ci_run zk_supervisor db setup --prover-url=${{ env.prover_url }} --core-url=${{ env.core_url }} - name: Formatting run: ci_run bash -c "cd prover && cargo fmt --check" unit-tests: - runs-on: [matterlabs-ci-runner-highmem-long] + runs-on: [ matterlabs-ci-runner-highmem-long ] env: RUNNER_COMPOSE_FILE: "docker-compose-runner-nightly.yml" @@ -48,21 +53,23 @@ jobs: echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH echo IN_DOCKER=1 >> .env + echo "SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage" >> .env + echo "SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com" >> .env + echo "SCCACHE_GCS_RW_MODE=READ_WRITE" >> .env + echo "RUSTC_WRAPPER=sccache" >> .env - name: Start services run: | run_retried docker-compose -f ${RUNNER_COMPOSE_FILE} pull mkdir -p ./volumes/postgres docker-compose -f ${RUNNER_COMPOSE_FILE} up --build -d zk postgres - ci_run sccache --start-server - name: Init run: | - ci_run zk + ci_run zkt ci_run run_retried rustup show - ci_run zk db setup - name: Prover unit tests run: | # Not all tests are enabled, since prover and setup_key_generator_and_server requires bellman-cuda to be present - ci_run zk test prover + ci_run zk_supervisor test prover diff --git a/.github/workflows/ci-zk-toolbox-reusable.yml b/.github/workflows/ci-zk-toolbox-reusable.yml deleted file mode 100644 index 78e1e485cafc..000000000000 --- a/.github/workflows/ci-zk-toolbox-reusable.yml +++ /dev/null @@ -1,261 +0,0 @@ -name: Workflow template for CI jobs for Core Components -on: - workflow_call: - -env: - CLICOLOR: 1 - # We run multiple binaries in parallel, and by default they will try to utilize all the - # available CPUs. In tests, there is not much CPU-intensive work (rayon), but a lot of - # async work (tokio), so we prioritize tokio. - TOKIO_WORKER_THREADS: 4 - RAYON_NUM_THREADS: 2 - -jobs: - lint: - name: lint - uses: ./.github/workflows/ci-core-lint-reusable.yml - - tests: - runs-on: [ matterlabs-ci-runner-ultra-performance ] - steps: - - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4 - with: - submodules: "recursive" - fetch-depth: 0 - - - - name: Setup environment - run: | - echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV - echo $(pwd)/bin >> $GITHUB_PATH - echo IN_DOCKER=1 >> .env - - - name: Start services - run: | - ci_localnet_up - ci_run sccache --start-server - - - name: Build zk_toolbox - run: ci_run bash -c "./bin/zkt" - - - name: Create log directories - run: | - SERVER_LOGS_DIR=logs/server - INTEGRATION_TESTS_LOGS_DIR=logs/integration_tests - INTEGRATION_TESTS_EN_LOGS_DIR=logs/integration_tests/en - SNAPSHOT_RECOVERY_LOGS_DIR=logs/integration_tests/en - GENESIS_RECOVERY_LOGS_DIR=logs/integration_tests/en - EXTERNAL_NODE_LOGS_DIR=logs/external_node - REVERT_LOGS_DIR=logs/revert - - mkdir -p $SERVER_LOGS_DIR - mkdir -p $INTEGRATION_TESTS_LOGS_DIR - mkdir -p $INTEGRATION_TESTS_EN_LOGS_DIR - mkdir -p $SNAPSHOT_RECOVERY_LOGS_DIR - mkdir -p $GENESIS_RECOVERY_LOGS_DIR - mkdir -p $EXTERNAL_NODE_LOGS_DIR - mkdir -p $REVERT_LOGS_DIR - - echo "SERVER_LOGS_DIR=$SERVER_LOGS_DIR" >> $GITHUB_ENV - echo "INTEGRATION_TESTS_LOGS_DIR=$INTEGRATION_TESTS_LOGS_DIR" >> $GITHUB_ENV - echo "INTEGRATION_TESTS_EN_LOGS_DIR=$INTEGRATION_TESTS_EN_LOGS_DIR" >> $GITHUB_ENV - echo "SNAPSHOT_RECOVERY_LOGS_DIR=$SNAPSHOT_RECOVERY_LOGS_DIR" >> $GITHUB_ENV - echo "GENESIS_RECOVERY_LOGS_DIR=$GENESIS_RECOVERY_LOGS_DIR" >> $GITHUB_ENV - echo "EXTERNAL_NODE_LOGS_DIR=$EXTERNAL_NODE_LOGS_DIR" >> $GITHUB_ENV - echo "REVERT_LOGS_DIR=$REVERT_LOGS_DIR" >> $GITHUB_ENV - - - name: Initialize ecosystem - run: | - ci_run git config --global --add safe.directory /usr/src/zksync - ci_run git config --global --add safe.directory /usr/src/zksync/contracts/system-contracts - ci_run git config --global --add safe.directory /usr/src/zksync/contracts - - ci_run zk_inception ecosystem init --deploy-paymaster --deploy-erc20 \ - --deploy-ecosystem --l1-rpc-url=http://localhost:8545 \ - --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --server-db-name=zksync_server_localhost_era \ - --prover-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --prover-db-name=zksync_prover_localhost_era \ - --ignore-prerequisites --verbose \ - --observability=false - - - name: Read Custom Token address and set as environment variable - run: | - address=$(awk -F": " '/tokens:/ {found_tokens=1} found_tokens && /DAI:/ {found_dai=1} found_dai && /address:/ {print $2; exit}' ./configs/erc20.yaml) - echo "address=$address" - echo "address=$address" >> $GITHUB_ENV - - - name: Create and initialize Validium chain - run: | - ci_run zk_inception chain create \ - --chain-name validium \ - --chain-id sequential \ - --prover-mode no-proofs \ - --wallet-creation localhost \ - --l1-batch-commit-data-generator-mode validium \ - --base-token-address 0x0000000000000000000000000000000000000001 \ - --base-token-price-nominator 1 \ - --base-token-price-denominator 1 \ - --set-as-default false \ - --ignore-prerequisites - - ci_run zk_inception chain init \ - --deploy-paymaster \ - --l1-rpc-url=http://localhost:8545 \ - --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --server-db-name=zksync_server_localhost_validium \ - --prover-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --prover-db-name=zksync_prover_localhost_validium \ - --port-offset 2000 \ - --chain validium - - - name: Create and initialize chain with Custom Token - run: | - ci_run zk_inception chain create \ - --chain-name custom_token \ - --chain-id sequential \ - --prover-mode no-proofs \ - --wallet-creation localhost \ - --l1-batch-commit-data-generator-mode rollup \ - --base-token-address ${{ env.address }} \ - --base-token-price-nominator 3 \ - --base-token-price-denominator 2 \ - --set-as-default false \ - --ignore-prerequisites - - ci_run zk_inception chain init \ - --deploy-paymaster \ - --l1-rpc-url=http://localhost:8545 \ - --server-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --server-db-name=zksync_server_localhost_custom_token \ - --prover-db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --prover-db-name=zksync_prover_localhost_custom_token \ - --port-offset 3000 \ - --chain custom_token - - - name: Build test dependencies - run: | - ci_run zk_supervisor test build - - - name: Run servers - run: | - ci_run zk_inception server --ignore-prerequisites --chain era &> ${{ env.SERVER_LOGS_DIR }}/rollup.log & - ci_run zk_inception server --ignore-prerequisites --chain validium &> ${{ env.SERVER_LOGS_DIR }}/validium.log & - ci_run zk_inception server --ignore-prerequisites --chain custom_token &> ${{ env.SERVER_LOGS_DIR }}/custom_token.log & - ci_run sleep 5 - - - name: Run integration tests - run: | - ci_run zk_supervisor test integration --no-deps --ignore-prerequisites --chain era &> ${{ env.INTEGRATION_TESTS_LOGS_DIR }}/rollup.log & - PID1=$! - - ci_run zk_supervisor test integration --no-deps --ignore-prerequisites --chain validium &> ${{ env.INTEGRATION_TESTS_LOGS_DIR }}/validium.log & - PID2=$! - - ci_run zk_supervisor test integration --no-deps --ignore-prerequisites --chain custom_token &> ${{ env.INTEGRATION_TESTS_LOGS_DIR }}/custom_token.log & - PID3=$! - - wait $PID1 - wait $PID2 - wait $PID3 - - - name: Init external nodes - run: | - ci_run zk_inception external-node configs --db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --db-name=zksync_en_localhost_era_rollup --l1-rpc-url=http://localhost:8545 --chain era - ci_run zk_inception external-node init --ignore-prerequisites --chain era - - ci_run zk_inception external-node configs --db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --db-name=zksync_en_localhost_era_validium1 --l1-rpc-url=http://localhost:8545 --chain validium - ci_run zk_inception external-node init --ignore-prerequisites --chain validium - - ci_run zk_inception external-node configs --db-url=postgres://postgres:notsecurepassword@localhost:5432 \ - --db-name=zksync_en_localhost_era_custom_token --l1-rpc-url=http://localhost:8545 --chain custom_token - ci_run zk_inception external-node init --ignore-prerequisites --chain custom_token - - - name: Run recovery tests (from snapshot) - run: | - - ci_run zk_supervisor test recovery --snapshot --no-deps --ignore-prerequisites --verbose --chain era &> ${{ env.SNAPSHOT_RECOVERY_LOGS_DIR }}/rollup.log & - PID1=$! - - ci_run zk_supervisor test recovery --snapshot --no-deps --ignore-prerequisites --verbose --chain validium &> ${{ env.SNAPSHOT_RECOVERY_LOGS_DIR }}//validium.log & - PID2=$! - - ci_run zk_supervisor test recovery --snapshot --no-deps --ignore-prerequisites --verbose --chain custom_token &> ${{ env.SNAPSHOT_RECOVERY_LOGS_DIR }}//custom_token.log & - PID3=$! - - wait $PID1 - wait $PID2 - wait $PID3 - - - name: Run recovery tests (from genesis) - run: | - ci_run zk_supervisor test recovery --no-deps --no-kill --ignore-prerequisites --verbose --chain era &> ${{ env.GENESIS_RECOVERY_LOGS_DIR }}/rollup.log & - PID1=$! - - ci_run zk_supervisor test recovery --no-deps --no-kill --ignore-prerequisites --verbose --chain validium &> ${{ env.GENESIS_RECOVERY_LOGS_DIR }}/validium.log & - PID2=$! - - ci_run zk_supervisor test recovery --no-deps --no-kill --ignore-prerequisites --verbose --chain custom_token &> ${{ env.GENESIS_RECOVERY_LOGS_DIR }}/custom_token.log & - PID3=$! - - wait $PID1 - wait $PID2 - wait $PID3 - - - name: Run external node server - run: | - ci_run zk_inception external-node run --ignore-prerequisites --chain era &> ${{ env.EXTERNAL_NODE_LOGS_DIR }}/rollup.log & - ci_run zk_inception external-node run --ignore-prerequisites --chain validium &> ${{ env.EXTERNAL_NODE_LOGS_DIR }}/validium.log & - ci_run zk_inception external-node run --ignore-prerequisites --chain custom_token &> ${{ env.EXTERNAL_NODE_LOGS_DIR }}/custom_token.log & - ci_run sleep 5 - - - name: Run integration tests en - run: | - ci_run zk_supervisor test integration --no-deps --ignore-prerequisites --external-node --chain era &> ${{ env.INTEGRATION_TESTS_EN_LOGS_DIR }}/rollup.log & - PID1=$! - - ci_run zk_supervisor test integration --no-deps --ignore-prerequisites --external-node --chain validium &> ${{ env.INTEGRATION_TESTS_EN_LOGS_DIR }}/validium.log & - PID2=$! - - ci_run zk_supervisor test integration --no-deps --ignore-prerequisites --external-node --chain custom_token &> ${{ env.INTEGRATION_TESTS_EN_LOGS_DIR }}/custom_token.log & - PID3=$! - - wait $PID1 - wait $PID2 - wait $PID3 - - - name: Run revert tests - run: | - ci_run killall -INT zksync_server || true - ci_run killall -INT zksync_external_node || true - - ci_run zk_supervisor test revert --no-deps --external-node --no-kill --ignore-prerequisites --chain era &> ${{ env.REVERT_LOGS_DIR }}/rollup.log & - PID1=$! - - ci_run zk_supervisor test revert --no-deps --external-node --no-kill --ignore-prerequisites --chain validium &> ${{ env.REVERT_LOGS_DIR }}/validium.log & - PID2=$! - - ci_run zk_supervisor test revert --no-deps --external-node --no-kill --ignore-prerequisites --chain custom_token &> ${{ env.REVERT_LOGS_DIR }}/custom_token.log & - PID3=$! - - wait $PID1 - wait $PID2 - wait $PID3 - - - # Upgrade tests should run last, because as soon as they - # finish the bootloader will be different - # TODO make upgrade tests safe to run multiple times - - name: Run upgrade test - run: | - ci_run zk_supervisor test upgrade --no-deps --chain era - - - - name: Upload logs - uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0 - if: always() - with: - name: logs - path: logs diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 53c169114915..0a27a719aeb6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -58,14 +58,10 @@ jobs: - '.github/workflows/ci-core-lint-reusable.yml' - 'Cargo.toml' - 'Cargo.lock' - - '!**/*.md' - - '!**/*.MD' - - 'docker-compose.yml' - zk_toolbox: - - '.github/workflows/ci-zk-toolbox-reusable.yml' - 'zk_toolbox/**' - '!**/*.md' - '!**/*.MD' + - 'docker-compose.yml' docs: - '**/*.md' - '**/*.MD' @@ -76,6 +72,7 @@ jobs: - 'etc/**' - 'contracts/**' - 'infrastructure/zk/**' + - 'docker/zk-environment/**' - '!**/*.md' - '!**/*.MD' @@ -97,12 +94,6 @@ jobs: name: CI for Prover Components uses: ./.github/workflows/ci-prover-reusable.yml - ci-for-zk-toolbox: - needs: changed_files - if: ${{ (needs.changed_files.outputs.core == 'true' || needs.changed_files.outputs.zk_toolbox == 'true' || needs.changed_files.outputs.all == 'true') && !contains(github.ref_name, 'release-please--branches') }} - name: CI for zk_toolbox - uses: ./.github/workflows/ci-zk-toolbox-reusable.yml - ci-for-docs: needs: changed_files if: needs.changed_files.outputs.docs == 'true' @@ -120,7 +111,7 @@ jobs: name: Build core images needs: changed_files if: ${{ (needs.changed_files.outputs.core == 'true' || needs.changed_files.outputs.all == 'true') && !contains(github.ref_name, 'release-please--branches') }} - uses: ./.github/workflows/build-core-template.yml + uses: ./.github/workflows/new-build-core-template.yml with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} action: "build" @@ -145,7 +136,7 @@ jobs: name: Build contract verifier needs: changed_files if: ${{ (needs.changed_files.outputs.core == 'true' || needs.changed_files.outputs.all == 'true') && !contains(github.ref_name, 'release-please--branches') }} - uses: ./.github/workflows/build-contract-verifier-template.yml + uses: ./.github/workflows/new-build-contract-verifier-template.yml with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} action: "build" @@ -157,7 +148,7 @@ jobs: name: Build prover images needs: changed_files if: ${{ (needs.changed_files.outputs.prover == 'true' || needs.changed_files.outputs.all == 'true') && !contains(github.ref_name, 'release-please--branches') }} - uses: ./.github/workflows/build-prover-template.yml + uses: ./.github/workflows/new-build-prover-template.yml with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} action: "build" @@ -171,12 +162,10 @@ jobs: name: Build prover images with avx512 instructions needs: changed_files if: ${{ (needs.changed_files.outputs.prover == 'true' || needs.changed_files.outputs.all == 'true') && !contains(github.ref_name, 'release-please--branches') }} - uses: ./.github/workflows/build-witness-generator-template.yml + uses: ./.github/workflows/new-build-witness-generator-template.yml with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }}-avx512 action: "build" - ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} - is_pr_from_fork: ${{ github.event.pull_request.head.repo.fork == true }} WITNESS_GENERATOR_RUST_FLAGS: "-Ctarget_feature=+avx512bw,+avx512cd,+avx512dq,+avx512f,+avx512vl" secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} @@ -186,7 +175,7 @@ jobs: name: Github Status Check runs-on: ubuntu-latest if: always() && !cancelled() - needs: [ci-for-core-lint, ci-for-common, ci-for-core, ci-for-prover, ci-for-docs, build-core-images, build-contract-verifier, build-prover-images] + needs: [ ci-for-core-lint, ci-for-common, ci-for-core, ci-for-prover, ci-for-docs, build-core-images, build-contract-verifier, build-prover-images ] steps: - name: Status run: | diff --git a/.github/workflows/new-build-contract-verifier-template.yml b/.github/workflows/new-build-contract-verifier-template.yml new file mode 100644 index 000000000000..42791eab6669 --- /dev/null +++ b/.github/workflows/new-build-contract-verifier-template.yml @@ -0,0 +1,271 @@ +name: Build contract verifier +on: + workflow_call: + secrets: + DOCKERHUB_USER: + description: "DOCKERHUB_USER" + required: true + DOCKERHUB_TOKEN: + description: "DOCKERHUB_TOKEN" + required: true + inputs: + image_tag_suffix: + description: "Optional suffix to override tag name generation" + type: string + required: false + compilers: + description: 'JSON of required compilers and their versions' + type: string + required: false + default: '[{ "zksolc": ["1.3.14", "1.3.16", "1.3.17", "1.3.1", "1.3.7", "1.3.18", "1.3.19", "1.3.21"] } , { "zkvyper": ["1.3.13"] }]' + action: + type: string + default: non-push + required: false + +jobs: + prepare-contracts: + name: Prepare contracts + runs-on: matterlabs-ci-runner-high-performance + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + with: + submodules: "recursive" + + - name: Prepare ENV + shell: bash + run: | + echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV + echo CI=1 >> $GITHUB_ENV + echo $(pwd)/bin >> $GITHUB_PATH + echo CI=1 >> .env + echo IN_DOCKER=1 >> .env + + - name: Download contracts + shell: bash + run: | + commit_sha=$(git submodule status contracts | awk '{print $1}' | tr -d '-') + page=1 + filtered_tag="" + while [ true ]; do + echo "Page: $page" + tags=$(run_retried curl -s -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github+json" \ + "https://api.github.com/repos/matter-labs/era-contracts/tags?per_page=100&page=${page}" | jq .) + if [ $(jq length <<<"$tags") -eq 0 ]; then + echo "No tag found on all pages." + echo "BUILD_CONTRACTS=true" >> "$GITHUB_ENV" + exit 0 + fi + filtered_tag=$(jq -r --arg commit_sha "$commit_sha" 'map(select(.commit.sha == $commit_sha)) | .[].name' <<<"$tags") + if [[ ! -z "$filtered_tag" ]]; then + echo "BUILD_CONTRACTS=false" >> "$GITHUB_ENV" + break + fi + ((page++)) + done + echo "Contracts tag is: ${filtered_tag}" + mkdir -p ./contracts + run_retried curl -s -LO https://github.com/matter-labs/era-contracts/releases/download/${filtered_tag}/l1-contracts.tar.gz + run_retried curl -s -LO https://github.com/matter-labs/era-contracts/releases/download/${filtered_tag}/l2-contracts.tar.gz + run_retried curl -s -LO https://github.com/matter-labs/era-contracts/releases/download/${filtered_tag}/system-contracts.tar.gz + tar -C ./contracts -zxf l1-contracts.tar.gz + tar -C ./contracts -zxf l2-contracts.tar.gz + tar -C ./contracts -zxf system-contracts.tar.gz + + - name: Install Apt dependencies + shell: bash + run: | + sudo apt-get update && sudo apt-get install -y libssl-dev pkg-config + + - name: Install Node + uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3 + with: + node-version: 20 + cache: 'npm' + + - name: Install Yarn + run: npm install -g yarn + + - name: Setup rust + uses: actions-rust-lang/setup-rust-toolchain@1fbea72663f6d4c03efaab13560c8a24cfd2a7cc # v1.9.0 + with: + toolchain: nightly-2024-08-01 + + - name: Install cargo-nextest from crates.io + uses: baptiste0928/cargo-install@904927dbe77864e0f2281519fe9d5bd097a220b3 # v3.1.1 + with: + crate: cargo-nextest + + - name: Install sqlx-cli from crates.io + uses: baptiste0928/cargo-install@904927dbe77864e0f2281519fe9d5bd097a220b3 # v3.1.1 + with: + crate: sqlx-cli + tag: 0.8.1 + + - name: Install Foundry + uses: foundry-rs/foundry-toolchain@8f1998e9878d786675189ef566a2e4bf24869773 # v1.2.0 + + - name: Pre-download compilers + shell: bash + run: | + # Download needed versions of vyper compiler + # Not sanitized due to unconventional path and tags + mkdir -p ./hardhat-nodejs/compilers-v2/vyper/linux + wget -nv -O ./hardhat-nodejs/compilers-v2/vyper/linux/0.3.10 https://github.com/vyperlang/vyper/releases/download/v0.3.10/vyper.0.3.10+commit.91361694.linux + wget -nv -O ./hardhat-nodejs/compilers-v2/vyper/linux/0.3.3 https://github.com/vyperlang/vyper/releases/download/v0.3.3/vyper.0.3.3+commit.48e326f0.linux + chmod +x ./hardhat-nodejs/compilers-v2/vyper/linux/0.3.10 + chmod +x ./hardhat-nodejs/compilers-v2/vyper/linux/0.3.3 + + COMPILERS_JSON='${{ inputs.compilers }}' + echo "$COMPILERS_JSON" | jq -r '.[] | to_entries[] | .key as $compiler | .value[] | "\(.),\($compiler)"' | while IFS=, read -r version compiler; do + mkdir -p "./hardhat-nodejs/compilers-v2/$compiler" + wget -nv -O "./hardhat-nodejs/compilers-v2/$compiler/${compiler}-v${version}" "https://github.com/matter-labs/${compiler}-bin/releases/download/v${version}/${compiler}-linux-amd64-musl-v${version}" + chmod +x "./hardhat-nodejs/compilers-v2/$compiler/${compiler}-v${version}" + done + + - name: init + shell: bash + run: | + mkdir -p ./volumes/postgres + docker compose up -d postgres + zkt || true + + - name: build contracts + shell: bash + run: | + cp etc/tokens/{test,localhost}.json + zk_supervisor contracts + + - name: Upload contracts + uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0 + with: + name: contacts-verifier + path: | + ./contracts + + build-images: + name: Build and Push Docker Images + needs: prepare-contracts + runs-on: ${{ fromJSON('["matterlabs-ci-runner-high-performance", "matterlabs-ci-runner-arm"]')[contains(matrix.platforms, 'arm')] }} + strategy: + matrix: + components: + - contract-verifier + - verified-sources-fetcher + platforms: + - linux/amd64 + + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + with: + submodules: "recursive" + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1 + + - name: Setup env + shell: bash + run: | + echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV + echo CI=1 >> $GITHUB_ENV + echo $(pwd)/bin >> $GITHUB_PATH + echo CI=1 >> .env + echo IN_DOCKER=1 >> .env + + - name: Download setup key + shell: bash + run: | + run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^26.key + + - name: Set env vars + shell: bash + run: | + echo PLATFORM=$(echo ${{ matrix.platforms }} | tr '/' '-') >> $GITHUB_ENV + echo IMAGE_TAG_SHA=$(git rev-parse --short HEAD) >> $GITHUB_ENV + # Support for custom tag suffix + if [ -n "${{ inputs.image_tag_suffix }}" ]; then + echo IMAGE_TAG_SHA_TS="${{ inputs.image_tag_suffix }}" >> $GITHUB_ENV + else + echo IMAGE_TAG_SHA_TS=$(git rev-parse --short HEAD)-$(date +%s) >> $GITHUB_ENV + fi + + - name: Download contracts + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 + with: + name: contacts-verifier + path: | + ./contracts + + - name: login to Docker registries + if: ${{ inputs.action == 'push' }} + shell: bash + run: | + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} + gcloud auth configure-docker us-docker.pkg.dev -q + + - name: Build and push + uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 + with: + context: . + push: ${{ inputs.action == 'push' }} + file: docker/${{ matrix.components }}/Dockerfile + build-args: | + SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage + SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com + SCCACHE_GCS_RW_MODE=READ_WRITE + RUSTC_WRAPPER=sccache + tags: | + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:latest + matterlabs/${{ matrix.components }}:latest + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:latest2.0 + matterlabs/${{ matrix.components }}:latest2.0 + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA }} + matterlabs/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA }} + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA }} + matterlabs/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA }} + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} + matterlabs/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }} + matterlabs/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }} + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} + matterlabs/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }} + matterlabs/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} + + create_manifest: + name: Create release manifest + runs-on: matterlabs-ci-runner + needs: build-images + if: ${{ inputs.action == 'push' }} + strategy: + matrix: + component: + - name: contract-verifier + platform: linux/amd64 + - name: verified-sources-fetcher + platform: linux/amd64 + env: + IMAGE_TAG_SUFFIX: ${{ inputs.image_tag_suffix }} + steps: + - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4 + + - name: login to Docker registries + run: | + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} + gcloud auth configure-docker us-docker.pkg.dev -q + + - name: Create Docker manifest + run: | + docker_repositories=("matterlabs/${{ matrix.component.name }}" "us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component.name }}") + platforms=${{ matrix.component.platform }} + for repo in "${docker_repositories[@]}"; do + platform_tags="" + for platform in ${platforms//,/ }; do + platform=$(echo $platform | tr '/' '-') + platform_tags+=" --amend ${repo}:${IMAGE_TAG_SUFFIX}-${platform}" + done + for manifest in "${repo}:${IMAGE_TAG_SUFFIX}" "${repo}:2.0-${IMAGE_TAG_SUFFIX}" "${repo}:latest" "${repo}:latest2.0"; do + docker manifest create ${manifest} ${platform_tags} + docker manifest push ${manifest} + done + done diff --git a/.github/workflows/new-build-core-template.yml b/.github/workflows/new-build-core-template.yml new file mode 100644 index 000000000000..fba6a68b8eec --- /dev/null +++ b/.github/workflows/new-build-core-template.yml @@ -0,0 +1,287 @@ +name: Build Core images +on: + workflow_call: + secrets: + DOCKERHUB_USER: + description: "DOCKERHUB_USER" + required: true + DOCKERHUB_TOKEN: + description: "DOCKERHUB_TOKEN" + required: true + inputs: + image_tag_suffix: + description: "Optional suffix to override tag name generation" + type: string + required: false + compilers: + description: 'JSON of required compilers and their versions' + type: string + required: false + default: '[{ "zksolc": ["1.3.14", "1.3.16", "1.3.17", "1.3.1", "1.3.7", "1.3.18", "1.3.19", "1.3.21"] } , { "zkvyper": ["1.3.13"] }]' + en_alpha_release: + description: 'Flag that determins if EN release should be marked as alpha' + type: boolean + required: false + default: false + action: + type: string + required: false + default: "do nothing" + +jobs: + prepare-contracts: + name: Prepare contracts + runs-on: matterlabs-ci-runner-high-performance + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + with: + submodules: "recursive" + + - name: Prepare ENV + shell: bash + run: | + echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV + echo CI=1 >> $GITHUB_ENV + echo $(pwd)/bin >> $GITHUB_PATH + echo CI=1 >> .env + echo IN_DOCKER=1 >> .env + + - name: Download contracts + shell: bash + run: | + commit_sha=$(git submodule status contracts | awk '{print $1}' | tr -d '-') + page=1 + filtered_tag="" + while [ true ]; do + echo "Page: $page" + tags=$(run_retried curl -s -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github+json" \ + "https://api.github.com/repos/matter-labs/era-contracts/tags?per_page=100&page=${page}" | jq .) + if [ $(jq length <<<"$tags") -eq 0 ]; then + echo "No tag found on all pages." + echo "BUILD_CONTRACTS=true" >> "$GITHUB_ENV" + exit 0 + fi + filtered_tag=$(jq -r --arg commit_sha "$commit_sha" 'map(select(.commit.sha == $commit_sha)) | .[].name' <<<"$tags") + if [[ ! -z "$filtered_tag" ]]; then + echo "BUILD_CONTRACTS=false" >> "$GITHUB_ENV" + break + fi + ((page++)) + done + echo "Contracts tag is: ${filtered_tag}" + mkdir -p ./contracts + run_retried curl -s -LO https://github.com/matter-labs/era-contracts/releases/download/${filtered_tag}/l1-contracts.tar.gz + run_retried curl -s -LO https://github.com/matter-labs/era-contracts/releases/download/${filtered_tag}/l2-contracts.tar.gz + run_retried curl -s -LO https://github.com/matter-labs/era-contracts/releases/download/${filtered_tag}/system-contracts.tar.gz + tar -C ./contracts -zxf l1-contracts.tar.gz + tar -C ./contracts -zxf l2-contracts.tar.gz + tar -C ./contracts -zxf system-contracts.tar.gz + + - name: Install Apt dependencies + shell: bash + run: | + sudo apt-get update && sudo apt-get install -y libssl-dev pkg-config + + - name: Install Node + uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3 + with: + node-version: 20 + cache: 'npm' + + - name: Install Yarn + run: npm install -g yarn + + - name: Setup rust + uses: actions-rust-lang/setup-rust-toolchain@1fbea72663f6d4c03efaab13560c8a24cfd2a7cc # v1.9.0 + with: + toolchain: nightly-2024-08-01 + + - name: Install cargo-nextest from crates.io + uses: baptiste0928/cargo-install@904927dbe77864e0f2281519fe9d5bd097a220b3 # v3.1.1 + with: + crate: cargo-nextest + + - name: Install sqlx-cli from crates.io + uses: baptiste0928/cargo-install@904927dbe77864e0f2281519fe9d5bd097a220b3 # v3.1.1 + with: + crate: sqlx-cli + tag: 0.8.1 + + - name: Install Foundry + uses: foundry-rs/foundry-toolchain@8f1998e9878d786675189ef566a2e4bf24869773 # v1.2.0 + + - name: Pre-download compilers + shell: bash + run: | + # Download needed versions of vyper compiler + # Not sanitized due to unconventional path and tags + mkdir -p ./hardhat-nodejs/compilers-v2/vyper/linux + wget -nv -O ./hardhat-nodejs/compilers-v2/vyper/linux/0.3.10 https://github.com/vyperlang/vyper/releases/download/v0.3.10/vyper.0.3.10+commit.91361694.linux + wget -nv -O ./hardhat-nodejs/compilers-v2/vyper/linux/0.3.3 https://github.com/vyperlang/vyper/releases/download/v0.3.3/vyper.0.3.3+commit.48e326f0.linux + chmod +x ./hardhat-nodejs/compilers-v2/vyper/linux/0.3.10 + chmod +x ./hardhat-nodejs/compilers-v2/vyper/linux/0.3.3 + + COMPILERS_JSON='${{ inputs.compilers }}' + echo "$COMPILERS_JSON" | jq -r '.[] | to_entries[] | .key as $compiler | .value[] | "\(.),\($compiler)"' | while IFS=, read -r version compiler; do + mkdir -p "./hardhat-nodejs/compilers-v2/$compiler" + wget -nv -O "./hardhat-nodejs/compilers-v2/$compiler/${compiler}-v${version}" "https://github.com/matter-labs/${compiler}-bin/releases/download/v${version}/${compiler}-linux-amd64-musl-v${version}" + chmod +x "./hardhat-nodejs/compilers-v2/$compiler/${compiler}-v${version}" + done + + - name: init + shell: bash + run: | + mkdir -p ./volumes/postgres + docker compose up -d postgres + zkt || true + + - name: build contracts + shell: bash + run: | + cp etc/tokens/{test,localhost}.json + zk_supervisor contracts + + - name: Upload contracts + uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0 + with: + name: contacts + path: | + ./contracts + + build-images: + name: Build and Push Docker Images + needs: prepare-contracts + env: + IMAGE_TAG_SUFFIX: ${{ inputs.image_tag_suffix }}${{ (inputs.en_alpha_release && matrix.components == 'external-node') && '-alpha' || '' }} + runs-on: ${{ fromJSON('["matterlabs-ci-runner-high-performance", "matterlabs-ci-runner-arm"]')[contains(matrix.platforms, 'arm')] }} + strategy: + matrix: + components: + - server-v2 + - external-node + - snapshots-creator + platforms: + - linux/amd64 + include: + - components: external-node + platforms: linux/arm64 + + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + with: + submodules: "recursive" + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1 + + - name: Setup env + shell: bash + run: | + echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV + echo CI=1 >> $GITHUB_ENV + echo $(pwd)/bin >> $GITHUB_PATH + echo CI=1 >> .env + echo IN_DOCKER=1 >> .env + + - name: Download setup key + shell: bash + run: | + run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^26.key + + - name: Set env vars + shell: bash + run: | + echo PLATFORM=$(echo ${{ matrix.platforms }} | tr '/' '-') >> $GITHUB_ENV + echo IMAGE_TAG_SHA=$(git rev-parse --short HEAD) >> $GITHUB_ENV + # Support for custom tag suffix + if [ -n "${{ inputs.image_tag_suffix }}" ]; then + echo IMAGE_TAG_SHA_TS="${{ inputs.image_tag_suffix }}" >> $GITHUB_ENV + else + echo IMAGE_TAG_SHA_TS=$(git rev-parse --short HEAD)-$(date +%s) >> $GITHUB_ENV + fi + + - name: Download contracts + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 + with: + name: contacts + path: | + ./contracts + + - name: login to Docker registries + if: ${{ inputs.action == 'push' }} + shell: bash + run: | + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} + gcloud auth configure-docker us-docker.pkg.dev -q + + - name: Build and push + uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 + with: + context: . + push: ${{ inputs.action == 'push' }} + file: docker/${{ matrix.components }}/Dockerfile + build-args: | + SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage + SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com + SCCACHE_GCS_RW_MODE=READ_WRITE + RUSTC_WRAPPER=sccache + tags: | + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:latest + matterlabs/${{ matrix.components }}:latest + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:latest2.0 + matterlabs/${{ matrix.components }}:latest2.0 + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA }} + matterlabs/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA }} + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA }} + matterlabs/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA }} + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} + matterlabs/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }} + matterlabs/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }} + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }} + matterlabs/${{ matrix.components }}:2.0-${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }} + matterlabs/${{ matrix.components }}:${{ env.IMAGE_TAG_SHA_TS }}-${{ env.PLATFORM }} + + create_manifest: + name: Create release manifest + runs-on: matterlabs-ci-runner + needs: build-images + if: ${{ inputs.action == 'push' }} + strategy: + matrix: + component: + - name: server-v2 + platform: linux/amd64 + - name: external-node + platform: linux/amd64,linux/arm64 + - name: snapshots-creator + platform: linux/amd64 + + env: + IMAGE_TAG_SUFFIX: ${{ inputs.image_tag_suffix }}${{ (inputs.en_alpha_release && matrix.component.name == 'external-node') && '-alpha' || '' }} + steps: + - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4 + + - name: login to Docker registries + shell: bash + run: | + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} + gcloud auth configure-docker us-docker.pkg.dev -q + + - name: Create Docker manifest + shell: bash + run: | + docker_repositories=("matterlabs/${{ matrix.component.name }}" "us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.component.name }}") + platforms=${{ matrix.component.platform }} + for repo in "${docker_repositories[@]}"; do + platform_tags="" + for platform in ${platforms//,/ }; do + platform=$(echo $platform | tr '/' '-') + platform_tags+=" --amend ${repo}:${IMAGE_TAG_SUFFIX}-${platform}" + done + for manifest in "${repo}:${IMAGE_TAG_SUFFIX}" "${repo}:2.0-${IMAGE_TAG_SUFFIX}" "${repo}:latest" "${repo}:latest2.0"; do + docker manifest create ${manifest} ${platform_tags} + docker manifest push ${manifest} + done + done diff --git a/.github/workflows/new-build-prover-template.yml b/.github/workflows/new-build-prover-template.yml new file mode 100644 index 000000000000..60c152213e60 --- /dev/null +++ b/.github/workflows/new-build-prover-template.yml @@ -0,0 +1,198 @@ +name: Build Prover images +on: + workflow_call: + secrets: + DOCKERHUB_USER: + description: "DOCKERHUB_USER" + required: true + DOCKERHUB_TOKEN: + description: "DOCKERHUB_TOKEN" + required: true + inputs: + ERA_BELLMAN_CUDA_RELEASE: + description: "ERA_BELLMAN_CUDA_RELEASE" + type: string + required: true + image_tag_suffix: + description: "Optional suffix to override tag name generation" + type: string + required: false + action: + description: "Action with docker image" + type: string + default: "push" + required: false + is_pr_from_fork: + description: "Indicates whether the workflow is invoked from a PR created from fork" + type: boolean + default: false + required: false + CUDA_ARCH: + description: "CUDA Arch to build" + type: string + default: "89" + required: false + outputs: + protocol_version: + description: "Protocol version of the binary" + value: ${{ jobs.get-protocol-version.outputs.protocol_version }} + +jobs: + get-protocol-version: + name: Get protocol version + runs-on: [ matterlabs-ci-runner-high-performance ] + outputs: + protocol_version: ${{ steps.protocolversion.outputs.protocol_version }} + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + with: + submodules: "recursive" + + - name: setup-env + run: | + echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV + echo CI=1 >> $GITHUB_ENV + echo $(pwd)/bin >> $GITHUB_PATH + echo CI=1 >> .env + echo IN_DOCKER=1 >> .env + + - name: setup rust + uses: actions-rust-lang/setup-rust-toolchain@1fbea72663f6d4c03efaab13560c8a24cfd2a7cc # v1.9.0 + with: + toolchain: nightly-2024-08-01 + + - name: Prepare sccache-cache env vars + shell: bash + run: | + echo SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage >> $GITHUB_ENV + echo SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com >> $GITHUB_ENV + echo SCCACHE_ERROR_LOG=/tmp/sccache_log.txt >> $GITHUB_ENV + echo SCCACHE_GCS_RW_MODE=READ_WRITE >> $GITHUB_ENV + echo RUSTC_WRAPPER=sccache >> $GITHUB_ENV + + - name: protocol-version + id: protocolversion + # TODO: use -C flag, when it will become stable. + shell: bash + run: | + cd prover + cargo build --release --bin prover_version + PPV=$(target/release/prover_version) + echo Protocol version is ${PPV} + echo "protocol_version=${PPV}" >> $GITHUB_OUTPUT + + build-images: + name: Build and Push Docker Images + needs: get-protocol-version + env: + PROTOCOL_VERSION: ${{ needs.get-protocol-version.outputs.protocol_version }} + runs-on: [ matterlabs-ci-runner-high-performance ] + strategy: + matrix: + components: + - witness-generator + - prover-gpu-fri + - witness-vector-generator + - prover-fri-gateway + - prover-job-monitor + - proof-fri-gpu-compressor + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + with: + submodules: "recursive" + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1 + + - name: setup-env + run: | + echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV + echo CI=1 >> $GITHUB_ENV + echo $(pwd)/bin >> $GITHUB_PATH + echo CI=1 >> .env + echo IN_DOCKER=1 >> .env + + - name: Set env vars + shell: bash + run: | + # Support for custom tag suffix + if [ -n "${{ inputs.image_tag_suffix }}" ]; then + echo IMAGE_TAG_SHA_TS="${{ inputs.image_tag_suffix }}" >> $GITHUB_ENV + else + echo IMAGE_TAG_SHA_TS=$(git rev-parse --short HEAD)-$(date +%s) >> $GITHUB_ENV + fi + + - name: download CRS for GPU compressor + if: matrix.components == 'proof-fri-gpu-compressor' + run: | + run_retried curl -LO https://storage.googleapis.com/matterlabs-setup-keys-us/setup-keys/setup_2\^24.key + + # We need to run this only when ERA_BELLMAN_CUDA_RELEASE is not available + # In our case it happens only when PR is created from fork + - name: Wait for runner IP to be not rate-limited against GH API + if: ( inputs.is_pr_from_fork == true && matrix.components == 'proof-fri-gpu-compressor' ) + run: ./.github/scripts/rate_limit_check.sh + + - name: Hack to set env vars inside docker container + shell: bash + run: | + sed -i '/^FROM matterlabs\/zksync-build-base:latest as builder/a ENV SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage\nENV SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com\nENV SCCACHE_GCS_RW_MODE=READ_WRITE\nENV RUSTC_WRAPPER=sccache' ./docker/${{ matrix.components }}/Dockerfile + #TODO: remove AS version =) + sed -i '/^FROM matterlabs\/zksync-build-base:latest AS builder/a ENV SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage\nENV SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com\nENV SCCACHE_GCS_RW_MODE=READ_WRITE\nENV RUSTC_WRAPPER=sccache' ./docker/${{ matrix.components }}/Dockerfile + cat ./docker/${{ matrix.components }}/Dockerfile + + - name: login to Docker registries + if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')) + shell: bash + run: | + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} + gcloud auth configure-docker us-docker.pkg.dev -q + + - name: Build and push + uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 + with: + context: . + push: ${{ inputs.action == 'push' }} + build-args: | + CUDA_ARCH=${{ inputs.CUDA_ARCH }} + SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage + SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com + SCCACHE_GCS_RW_MODE=READ_WRITE + RUSTC_WRAPPER=sccache + file: docker/${{ matrix.components }}/Dockerfile + tags: | + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} + matterlabs/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} + + copy-images: + name: Copy images between docker registries + needs: [ build-images, get-protocol-version ] + env: + PROTOCOL_VERSION: ${{ needs.get-protocol-version.outputs.protocol_version }} + runs-on: matterlabs-ci-runner + if: ${{ inputs.action == 'push' }} + strategy: + matrix: + component: + - witness-vector-generator + steps: + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1 + + - name: Login to us-central1 GAR + run: | + gcloud auth print-access-token --lifetime=7200 --impersonate-service-account=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com | docker login -u oauth2accesstoken --password-stdin https://us-docker.pkg.dev + + - name: Login and push to Asia GAR + run: | + gcloud auth print-access-token --lifetime=7200 --impersonate-service-account=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com | docker login -u oauth2accesstoken --password-stdin https://asia-docker.pkg.dev + docker buildx imagetools create \ + --tag asia-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ needs.build-images.outputs.protocol_version }}-${{ inputs.image_tag_suffix }} \ + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ needs.build-images.outputs.protocol_version }}-${{ inputs.image_tag_suffix }} + + - name: Login and push to Europe GAR + run: | + gcloud auth print-access-token --lifetime=7200 --impersonate-service-account=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com | docker login -u oauth2accesstoken --password-stdin https://europe-docker.pkg.dev + docker buildx imagetools create \ + --tag europe-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ needs.build-images.outputs.protocol_version }}-${{ inputs.image_tag_suffix }} \ + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ needs.build-images.outputs.protocol_version }}-${{ inputs.image_tag_suffix }} diff --git a/.github/workflows/new-build-witness-generator-template.yml b/.github/workflows/new-build-witness-generator-template.yml new file mode 100644 index 000000000000..2f1fc0b2dd86 --- /dev/null +++ b/.github/workflows/new-build-witness-generator-template.yml @@ -0,0 +1,133 @@ +name: Build witness generator image with custom compiler flags +on: + workflow_call: + secrets: + DOCKERHUB_USER: + description: "DOCKERHUB_USER" + required: true + DOCKERHUB_TOKEN: + description: "DOCKERHUB_TOKEN" + required: true + inputs: + image_tag_suffix: + description: "Optional suffix to override tag name generation" + type: string + required: false + action: + type: string + default: non-push + required: false + CUDA_ARCH: + description: "CUDA Arch to build" + type: string + default: "89" + required: false + WITNESS_GENERATOR_RUST_FLAGS: + description: "Rust flags for witness_generator compilation" + type: string + default: "" + required: false + outputs: + protocol_version: + description: "Protocol version of the binary" + value: ${{ jobs.get-protocol-version.outputs.protocol_version }} + +jobs: + get-protocol-version: + name: Get protocol version + runs-on: [ matterlabs-ci-runner-high-performance ] + outputs: + protocol_version: ${{ steps.protocolversion.outputs.protocol_version }} + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + with: + submodules: "recursive" + + - name: setup-env + run: | + echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV + echo CI=1 >> $GITHUB_ENV + echo $(pwd)/bin >> $GITHUB_PATH + echo CI=1 >> .env + echo IN_DOCKER=1 >> .env + + - name: setup rust + uses: actions-rust-lang/setup-rust-toolchain@1fbea72663f6d4c03efaab13560c8a24cfd2a7cc # v1.9.0 + with: + toolchain: nightly-2024-08-01 + + - name: Prepare sccache-cache env vars + shell: bash + run: | + echo SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage >> $GITHUB_ENV + echo SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com >> $GITHUB_ENV + echo SCCACHE_ERROR_LOG=/tmp/sccache_log.txt >> $GITHUB_ENV + echo SCCACHE_GCS_RW_MODE=READ_WRITE >> $GITHUB_ENV + echo RUSTC_WRAPPER=sccache >> $GITHUB_ENV + + - name: protocol-version + id: protocolversion + # TODO: use -C flag, when it will become stable. + shell: bash + run: | + cd prover + cargo build --release --bin prover_version + PPV=$(target/release/prover_version) + echo Protocol version is ${PPV} + echo "protocol_version=${PPV}" >> $GITHUB_OUTPUT + + build-images: + name: Build and Push Docker Images + needs: get-protocol-version + env: + PROTOCOL_VERSION: ${{ needs.get-protocol-version.outputs.protocol_version }} + runs-on: [ matterlabs-ci-runner-c3d ] + strategy: + matrix: + components: + - witness-generator + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + with: + submodules: "recursive" + + - name: setup-env + run: | + echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV + echo CI=1 >> $GITHUB_ENV + echo $(pwd)/bin >> $GITHUB_PATH + echo CI=1 >> .env + echo IN_DOCKER=1 >> .env + + - name: Set env vars + shell: bash + run: | + # Support for custom tag suffix + if [ -n "${{ inputs.image_tag_suffix }}" ]; then + echo IMAGE_TAG_SHA_TS="${{ inputs.image_tag_suffix }}" >> $GITHUB_ENV + else + echo IMAGE_TAG_SHA_TS=$(git rev-parse --short HEAD)-$(date +%s) >> $GITHUB_ENV + fi + + - name: login to Docker registries + if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')) + shell: bash + run: | + docker login -u ${{ secrets.DOCKERHUB_USER }} -p ${{ secrets.DOCKERHUB_TOKEN }} + gcloud auth configure-docker us-docker.pkg.dev -q + + - name: Build and push + uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 + with: + context: . + push: ${{ inputs.action == 'push' }} + build-args: | + CUDA_ARCH=${{ inputs.CUDA_ARCH }} + SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage + SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com + SCCACHE_GCS_RW_MODE=READ_WRITE + RUSTC_WRAPPER=sccache + file: docker/${{ matrix.components }}/Dockerfile + tags: | + us-docker.pkg.dev/matterlabs-infra/matterlabs-docker/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} + matterlabs/${{ matrix.components }}:2.0-${{ env.PROTOCOL_VERSION }}-${{ env.IMAGE_TAG_SHA_TS }} diff --git a/.github/workflows/release-test-stage.yml b/.github/workflows/release-test-stage.yml index 1da5aa9ac928..ce74b76a6b7c 100644 --- a/.github/workflows/release-test-stage.yml +++ b/.github/workflows/release-test-stage.yml @@ -39,7 +39,7 @@ jobs: - '!prover/**' setup: name: Setup - runs-on: [matterlabs-deployer-stage] + runs-on: [ matterlabs-deployer-stage ] outputs: image_tag_suffix: ${{ steps.generate-tag-suffix.outputs.image_tag_suffix }} prover_fri_gpu_key_id: ${{ steps.extract-prover-fri-setup-key-ids.outputs.gpu_short_commit_sha }} @@ -61,7 +61,7 @@ jobs: build-push-core-images: name: Build and push images - needs: [setup, changed_files] + needs: [ setup, changed_files ] uses: ./.github/workflows/build-core-template.yml if: needs.changed_files.outputs.core == 'true' || needs.changed_files.outputs.all == 'true' with: @@ -72,7 +72,7 @@ jobs: build-push-tee-prover-images: name: Build and push images - needs: [setup, changed_files] + needs: [ setup, changed_files ] uses: ./.github/workflows/build-tee-prover-template.yml if: needs.changed_files.outputs.core == 'true' || needs.changed_files.outputs.all == 'true' with: @@ -84,7 +84,7 @@ jobs: build-push-contract-verifier: name: Build and push images - needs: [setup, changed_files] + needs: [ setup, changed_files ] uses: ./.github/workflows/build-contract-verifier-template.yml if: needs.changed_files.outputs.core == 'true' || needs.changed_files.outputs.all == 'true' with: @@ -95,26 +95,26 @@ jobs: build-push-prover-images: name: Build and push images - needs: [setup, changed_files] + needs: [ setup, changed_files ] uses: ./.github/workflows/build-prover-template.yml if: needs.changed_files.outputs.prover == 'true' || needs.changed_files.outputs.all == 'true' with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }} ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} - CUDA_ARCH: "60;70;75;89" + CUDA_ARCH: "60;70;75;80;89" secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }} build-push-witness-generator-image-avx512: name: Build and push prover images with avx512 instructions - needs: [setup, changed_files] + needs: [ setup, changed_files ] uses: ./.github/workflows/build-witness-generator-template.yml if: needs.changed_files.outputs.prover == 'true' || needs.changed_files.outputs.all == 'true' with: image_tag_suffix: ${{ needs.setup.outputs.image_tag_suffix }}-avx512 ERA_BELLMAN_CUDA_RELEASE: ${{ vars.ERA_BELLMAN_CUDA_RELEASE }} - CUDA_ARCH: "60;70;75;89" + CUDA_ARCH: "60;70;75;80;89" WITNESS_GENERATOR_RUST_FLAGS: "-Ctarget_feature=+avx512bw,+avx512cd,+avx512dq,+avx512f,+avx512vl" secrets: DOCKERHUB_USER: ${{ secrets.DOCKERHUB_USER }} @@ -122,7 +122,7 @@ jobs: build-gar-prover-fri-gpu: name: Build GAR prover FRI GPU - needs: [setup, build-push-prover-images] + needs: [ setup, build-push-prover-images ] uses: ./.github/workflows/build-prover-fri-gpu-gar.yml if: needs.changed_files.outputs.prover == 'true' || needs.changed_files.outputs.all == 'true' with: diff --git a/.github/workflows/vm-perf-comparison.yml b/.github/workflows/vm-perf-comparison.yml index cfcfff93037f..6e044287ad3d 100644 --- a/.github/workflows/vm-perf-comparison.yml +++ b/.github/workflows/vm-perf-comparison.yml @@ -8,7 +8,7 @@ on: jobs: vm-benchmarks: name: Run VM benchmarks - runs-on: [matterlabs-ci-runner-highmem-long] + runs-on: [ matterlabs-ci-runner-highmem-long ] steps: - name: checkout base branch @@ -35,6 +35,10 @@ jobs: touch .env echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH + echo "SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage" >> .env + echo "SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com" >> .env + echo "SCCACHE_GCS_RW_MODE=READ_WRITE" >> .env + echo "RUSTC_WRAPPER=sccache" >> .env - name: init run: | @@ -44,20 +48,20 @@ jobs: - name: run benchmarks on base branch shell: bash run: | - ci_run zk - ci_run zk compiler system-contracts + ci_run zkt + ci_run zk_supervisor contracts --system-contracts ci_run cargo bench --package vm-benchmark --bench iai | tee base-iai ci_run cargo run --package vm-benchmark --release --bin instruction_counts | tee base-opcodes || touch base-opcodes - ci_run yarn workspace system-contracts clean - name: checkout PR - run: git checkout --force FETCH_HEAD --recurse-submodules + run: | + git checkout --force FETCH_HEAD --recurse-submodules - name: run benchmarks on PR shell: bash run: | - ci_run zk - ci_run zk compiler system-contracts + ci_run zkt + ci_run zk_supervisor contracts --system-contracts ci_run cargo bench --package vm-benchmark --bench iai | tee pr-iai ci_run cargo run --package vm-benchmark --release --bin instruction_counts | tee pr-opcodes || touch pr-opcodes diff --git a/.github/workflows/vm-perf-to-prometheus.yml b/.github/workflows/vm-perf-to-prometheus.yml index 4d90b2a24ebb..2c82b796d70e 100644 --- a/.github/workflows/vm-perf-to-prometheus.yml +++ b/.github/workflows/vm-perf-to-prometheus.yml @@ -12,7 +12,7 @@ concurrency: vm-benchmarks jobs: vm-benchmarks: name: Run VM benchmarks - runs-on: [matterlabs-ci-runner-highmem-long] + runs-on: [ matterlabs-ci-runner-highmem-long ] steps: - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4 @@ -22,7 +22,10 @@ jobs: - name: setup-env run: | echo BENCHMARK_PROMETHEUS_PUSHGATEWAY_URL=${{ secrets.BENCHMARK_PROMETHEUS_PUSHGATEWAY_URL }} >> .env - + echo "SCCACHE_GCS_BUCKET=matterlabs-infra-sccache-storage" >> .env + echo "SCCACHE_GCS_SERVICE_ACCOUNT=gha-ci-runners@matterlabs-infra.iam.gserviceaccount.com" >> .env + echo "SCCACHE_GCS_RW_MODE=READ_WRITE" >> .env + echo "RUSTC_WRAPPER=sccache" >> .env echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH @@ -30,8 +33,8 @@ jobs: run: | run_retried docker compose pull zk docker compose up -d zk - ci_run zk - ci_run zk compiler all + ci_run zkt + ci_run zk_supervisor contracts all - name: run benchmarks run: | diff --git a/.gitignore b/.gitignore index 725b5940afeb..c3de7a2df84d 100644 --- a/.gitignore +++ b/.gitignore @@ -117,3 +117,4 @@ chains/era/configs/* configs/* era-observability/ core/tests/ts-integration/deployments-zk +transactions/ diff --git a/Cargo.lock b/Cargo.lock index 8634bff8a594..3d425b0d0a46 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -44,7 +44,7 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac1f845298e95f983ff1944b728ae08b8cebab80d684f0a832ed0fc74dfa27e2" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cipher", "cpufeatures", ] @@ -80,7 +80,7 @@ version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "once_cell", "version_check", "zerocopy", @@ -239,6 +239,132 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9" +[[package]] +name = "async-channel" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" +dependencies = [ + "concurrent-queue", + "event-listener-strategy", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-executor" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30ca9a001c1e8ba5149f91a74362376cc6bc5b919d92d988668657bd570bdcec" +dependencies = [ + "async-task", + "concurrent-queue", + "fastrand", + "futures-lite", + "slab", +] + +[[package]] +name = "async-fs" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebcd09b382f40fcd159c2d695175b2ae620ffa5f3bd6f664131efff4e8b9e04a" +dependencies = [ + "async-lock", + "blocking", + "futures-lite", +] + +[[package]] +name = "async-io" +version = "2.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "444b0228950ee6501b3568d3c93bf1176a1fdbc3b758dcd9475046d30f4dc7e8" +dependencies = [ + "async-lock", + "cfg-if", + "concurrent-queue", + "futures-io", + "futures-lite", + "parking", + "polling", + "rustix", + "slab", + "tracing", + "windows-sys 0.59.0", +] + +[[package]] +name = "async-lock" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" +dependencies = [ + "event-listener 5.3.1", + "event-listener-strategy", + "pin-project-lite", +] + +[[package]] +name = "async-net" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b948000fad4873c1c9339d60f2623323a0cfd3816e5181033c6a5cb68b2accf7" +dependencies = [ + "async-io", + "blocking", + "futures-lite", +] + +[[package]] +name = "async-process" +version = "2.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8a07789659a4d385b79b18b9127fc27e1a59e1e89117c78c5ea3b806f016374" +dependencies = [ + "async-channel", + "async-io", + "async-lock", + "async-signal", + "async-task", + "blocking", + "cfg-if", + "event-listener 5.3.1", + "futures-lite", + "rustix", + "tracing", + "windows-sys 0.59.0", +] + +[[package]] +name = "async-recursion" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.72", +] + +[[package]] +name = "async-signal" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "637e00349800c0bdf8bfc21ebbc0b6524abea702b0da4168ac00d070d0c0b9f3" +dependencies = [ + "async-io", + "async-lock", + "atomic-waker", + "cfg-if", + "futures-core", + "futures-io", + "rustix", + "signal-hook-registry", + "slab", + "windows-sys 0.59.0", +] + [[package]] name = "async-stream" version = "0.3.5" @@ -261,6 +387,12 @@ dependencies = [ "syn 2.0.72", ] +[[package]] +name = "async-task" +version = "4.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" + [[package]] name = "async-trait" version = "0.1.74" @@ -281,6 +413,12 @@ dependencies = [ "num-traits", ] +[[package]] +name = "atomic-take" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8ab6b55fe97976e46f91ddbed8d147d966475dc29b2032757ba47e02376fbc3" + [[package]] name = "atomic-waker" version = "1.1.2" @@ -407,7 +545,7 @@ checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" dependencies = [ "addr2line", "cc", - "cfg-if 1.0.0", + "cfg-if", "libc", "miniz_oxide", "object", @@ -426,6 +564,12 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" +[[package]] +name = "base58" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6107fe1be6682a68940da878d9e9f5e90ca5745b3dec9fd1bb393c8777d4f581" + [[package]] name = "base64" version = "0.13.1" @@ -468,30 +612,6 @@ dependencies = [ "serde", ] -[[package]] -name = "bellman_ce" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ea340d5c1394ee4daf4415dd80e06f74e0ad9b08e21f73f6bb1fa3a9dfae80d" -dependencies = [ - "arrayvec 0.7.4", - "bit-vec", - "blake2s_const", - "blake2s_simd", - "byteorder", - "cfg-if 1.0.0", - "crossbeam 0.7.3", - "futures 0.3.28", - "hex", - "lazy_static", - "num_cpus", - "pairing_ce", - "rand 0.4.6", - "serde", - "smallvec", - "tiny-keccak 1.5.0", -] - [[package]] name = "bigdecimal" version = "0.4.5" @@ -558,6 +678,17 @@ dependencies = [ "which", ] +[[package]] +name = "bip39" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93f2635620bf0b9d4576eb7bb9a38a55df78bd1205d26fa994b25911a69f212f" +dependencies = [ + "bitcoin_hashes", + "serde", + "unicode-normalization", +] + [[package]] name = "bit-vec" version = "0.6.3" @@ -567,6 +698,12 @@ dependencies = [ "serde", ] +[[package]] +name = "bitcoin_hashes" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90064b8dee6815a6470d60bad07bbbaee885c0e12d04177138fa3291a01b7bc4" + [[package]] name = "bitflags" version = "1.3.2" @@ -623,6 +760,16 @@ dependencies = [ "digest 0.10.7", ] +[[package]] +name = "blake2-rfc" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d6d530bdd2d52966a6d03b7a964add7ae1a288d25214066fd4b600f0f796400" +dependencies = [ + "arrayvec 0.4.12", + "constant_time_eq 0.1.5", +] + [[package]] name = "blake2-rfc_bellman_edition" version = "0.0.1" @@ -631,7 +778,7 @@ checksum = "fdc60350286c7c3db13b98e91dbe5c8b6830a6821bc20af5b0c310ce94d74915" dependencies = [ "arrayvec 0.4.12", "byteorder", - "constant_time_eq", + "constant_time_eq 0.1.5", ] [[package]] @@ -644,14 +791,14 @@ dependencies = [ ] [[package]] -name = "blake2s_const" -version = "0.7.0" +name = "blake2b_simd" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f39d933cb38939f885001867874c65699c36f30f0c78aae9f4c9f01b3e4b306a" +checksum = "23285ad32269793932e830392f2fe2f83e26488fd3ec778883a93c8323735780" dependencies = [ "arrayref", - "arrayvec 0.5.2", - "constant_time_eq", + "arrayvec 0.7.4", + "constant_time_eq 0.3.1", ] [[package]] @@ -662,7 +809,7 @@ checksum = "9e461a7034e85b211a4acb57ee2e6730b32912b06c08cc242243c39fc21ae6a2" dependencies = [ "arrayref", "arrayvec 0.5.2", - "constant_time_eq", + "constant_time_eq 0.1.5", ] [[package]] @@ -709,6 +856,19 @@ dependencies = [ "zksync_vlog", ] +[[package]] +name = "blocking" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "703f41c54fc768e63e091340b424302bb1c29ef4aa0c7f10fe849dfb114d29ea" +dependencies = [ + "async-channel", + "async-task", + "futures-io", + "futures-lite", + "piper", +] + [[package]] name = "blst" version = "0.3.13" @@ -723,18 +883,17 @@ dependencies = [ [[package]] name = "boojum" -version = "0.2.2" +version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df88daa33db46d683967ca09a4f04817c38950483f2501a771d497669a8a4bb1" +checksum = "68ec2f007ff8f90cc459f03e9f30ca1065440170f013c868823646e2e48d0234" dependencies = [ "arrayvec 0.7.4", "bincode", "blake2 0.10.6", "const_format", - "convert_case", - "crossbeam 0.8.4", + "convert_case 0.6.0", + "crossbeam", "crypto-bigint 0.5.3", - "cs_derive", "derivative", "ethereum-types", "firestorm", @@ -742,7 +901,6 @@ dependencies = [ "lazy_static", "num-modular", "num_cpus", - "pairing_ce", "rand 0.8.5", "rayon", "serde", @@ -750,6 +908,8 @@ dependencies = [ "sha3_ce", "smallvec", "unroll", + "zksync_cs_derive", + "zksync_pairing", ] [[package]] @@ -776,6 +936,15 @@ dependencies = [ "syn_derive", ] +[[package]] +name = "bs58" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf88ba1141d185c399bee5288d850d63b8369520c1eafc32a0430b5b6c287bf4" +dependencies = [ + "tinyvec", +] + [[package]] name = "build_html" version = "2.5.0" @@ -914,12 +1083,6 @@ dependencies = [ "nom", ] -[[package]] -name = "cfg-if" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" - [[package]] name = "cfg-if" version = "1.0.0" @@ -938,7 +1101,7 @@ version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3613f74bd2eac03dad61bd53dbe620703d4371614fe0bc3b9f04dd36fe4e818" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cipher", "cpufeatures", ] @@ -1047,82 +1210,82 @@ dependencies = [ [[package]] name = "circuit_encodings" -version = "0.150.4" +version = "0.150.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2593c02ad6b4b31ba63506c3f807f666133dd36bf47422f99b1d2947cf3c8dc1" +checksum = "e67617688c66640c84f9b98ff26d48f7898dca4faeb45241a4f21ec333788e7b" dependencies = [ "derivative", "serde", - "zk_evm 0.150.4", - "zkevm_circuits 0.150.4", + "zk_evm 0.150.5", + "zkevm_circuits 0.150.5", ] [[package]] name = "circuit_sequencer_api" -version = "0.133.0" +version = "0.133.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a87dc7bee6630d4954ac7982eb77e2007476662250cf18e5c460bbc5ee435f1" +checksum = "eb959b1f8c6bbd8be711994d182e85452a26a5d2213a709290b71c8262af1331" dependencies = [ - "bellman_ce", "derivative", "rayon", "serde", "zk_evm 0.133.0", + "zksync_bellman", ] [[package]] name = "circuit_sequencer_api" -version = "0.140.0" +version = "0.140.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b5138e6524c73e6d49fc1d0822b26e62a8d78b2c07e4e1c56061a447c10bec0" +checksum = "fa5f22311ce609d852d7d9f4943535ea4610aeb785129ae6ff83d5201c4fb387" dependencies = [ - "bellman_ce", "circuit_encodings 0.140.1", "derivative", "rayon", "serde", "zk_evm 0.140.0", + "zksync_bellman", ] [[package]] name = "circuit_sequencer_api" -version = "0.141.1" +version = "0.141.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55a257b31a8ea1c1723cab4fb5661c6b4c0ebe022d4b73bea9eb7c9150bd3bc1" +checksum = "4c47c71d6ba83a8beb0af13af70beffd627f5497caf3d44c6f96363e788b07ea" dependencies = [ - "bellman_ce", "circuit_encodings 0.141.1", "derivative", "rayon", "serde", "zk_evm 0.141.0", + "zksync_bellman", ] [[package]] name = "circuit_sequencer_api" -version = "0.142.0" +version = "0.142.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1d861a7a9b8df9389c63092985fc993c46954771da86462d7cab8cbf55a6497" +checksum = "e264723359e6a1aad98110bdccf1ae3ad596e93e7d31da9e40f6adc07e4add54" dependencies = [ - "bellman_ce", "circuit_encodings 0.142.1", "derivative", "rayon", "serde", "zk_evm 0.141.0", + "zksync_bellman", ] [[package]] name = "circuit_sequencer_api" -version = "0.150.4" +version = "0.150.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42d1a86b9c2207f3bb2dff5f00d1af1cb95004b6d07e9bacb6519fe08f12c04b" +checksum = "21017310971d4a051e4a52ad70eed11d1ae69defeca8314f73a3a4bad16705a9" dependencies = [ - "bellman_ce", - "circuit_encodings 0.150.4", + "circuit_encodings 0.150.5", "derivative", "rayon", "serde", + "zksync_bellman", ] [[package]] @@ -1258,7 +1421,7 @@ version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" dependencies = [ - "crossbeam-utils 0.8.20", + "crossbeam-utils", ] [[package]] @@ -1305,6 +1468,18 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" +[[package]] +name = "constant_time_eq" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + [[package]] name = "convert_case" version = "0.6.0" @@ -1360,7 +1535,7 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -1399,41 +1574,17 @@ dependencies = [ "itertools 0.10.5", ] -[[package]] -name = "crossbeam" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69323bff1fb41c635347b8ead484a5ca6c3f11914d784170b158d8449ab07f8e" -dependencies = [ - "cfg-if 0.1.10", - "crossbeam-channel 0.4.4", - "crossbeam-deque 0.7.4", - "crossbeam-epoch 0.8.2", - "crossbeam-queue 0.2.3", - "crossbeam-utils 0.7.2", -] - [[package]] name = "crossbeam" version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1137cd7e7fc0fb5d3c5a8678be38ec56e819125d8d7907411fe24ccb943faca8" dependencies = [ - "crossbeam-channel 0.5.13", - "crossbeam-deque 0.8.5", - "crossbeam-epoch 0.9.18", - "crossbeam-queue 0.3.11", - "crossbeam-utils 0.8.20", -] - -[[package]] -name = "crossbeam-channel" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b153fe7cbef478c567df0f972e02e6d736db11affe43dfc9c56a9374d1adfb87" -dependencies = [ - "crossbeam-utils 0.7.2", - "maybe-uninit", + "crossbeam-channel", + "crossbeam-deque", + "crossbeam-epoch", + "crossbeam-queue", + "crossbeam-utils", ] [[package]] @@ -1442,18 +1593,7 @@ version = "0.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" dependencies = [ - "crossbeam-utils 0.8.20", -] - -[[package]] -name = "crossbeam-deque" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c20ff29ded3204c5106278a81a38f4b482636ed4fa1e6cfbeef193291beb29ed" -dependencies = [ - "crossbeam-epoch 0.8.2", - "crossbeam-utils 0.7.2", - "maybe-uninit", + "crossbeam-utils", ] [[package]] @@ -1462,23 +1602,8 @@ version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" dependencies = [ - "crossbeam-epoch 0.9.18", - "crossbeam-utils 0.8.20", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "058ed274caafc1f60c4997b5fc07bf7dc7cca454af7c6e81edffe5f33f70dace" -dependencies = [ - "autocfg", - "cfg-if 0.1.10", - "crossbeam-utils 0.7.2", - "lazy_static", - "maybe-uninit", - "memoffset", - "scopeguard", + "crossbeam-epoch", + "crossbeam-utils", ] [[package]] @@ -1487,18 +1612,7 @@ version = "0.9.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" dependencies = [ - "crossbeam-utils 0.8.20", -] - -[[package]] -name = "crossbeam-queue" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "774ba60a54c213d409d5353bda12d49cd68d14e45036a285234c8d6f91f92570" -dependencies = [ - "cfg-if 0.1.10", - "crossbeam-utils 0.7.2", - "maybe-uninit", + "crossbeam-utils", ] [[package]] @@ -1507,18 +1621,7 @@ version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df0346b5d5e76ac2fe4e327c5fd1118d6be7c51dfb18f9b7922923f287471e35" dependencies = [ - "crossbeam-utils 0.8.20", -] - -[[package]] -name = "crossbeam-utils" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" -dependencies = [ - "autocfg", - "cfg-if 0.1.10", - "lazy_static", + "crossbeam-utils", ] [[package]] @@ -1625,7 +1728,7 @@ version = "4.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "curve25519-dalek-derive", "digest 0.10.7", @@ -1652,8 +1755,28 @@ version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" dependencies = [ - "darling_core", - "darling_macro", + "darling_core 0.13.4", + "darling_macro 0.13.4", +] + +[[package]] +name = "darling" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b750cb3417fd1b327431a470f388520309479ab0bf5e323505daf0290cd3850" +dependencies = [ + "darling_core 0.14.4", + "darling_macro 0.14.4", +] + +[[package]] +name = "darling" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" +dependencies = [ + "darling_core 0.20.10", + "darling_macro 0.20.10", ] [[package]] @@ -1670,24 +1793,74 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "darling_core" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2 1.0.86", + "quote 1.0.36", + "strsim 0.10.0", + "syn 1.0.109", +] + +[[package]] +name = "darling_core" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2 1.0.86", + "quote 1.0.36", + "strsim 0.11.1", + "syn 2.0.72", +] + [[package]] name = "darling_macro" version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" dependencies = [ - "darling_core", + "darling_core 0.13.4", "quote 1.0.36", "syn 1.0.109", ] +[[package]] +name = "darling_macro" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e" +dependencies = [ + "darling_core 0.14.4", + "quote 1.0.36", + "syn 1.0.109", +] + +[[package]] +name = "darling_macro" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" +dependencies = [ + "darling_core 0.20.10", + "quote 1.0.36", + "syn 2.0.72", +] + [[package]] name = "dashmap" version = "5.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "hashbrown 0.14.5", "lock_api", "once_cell", @@ -1746,6 +1919,19 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "derive_more" +version = "0.99.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" +dependencies = [ + "convert_case 0.4.0", + "proc-macro2 1.0.86", + "quote 1.0.36", + "rustc_version", + "syn 2.0.72", +] + [[package]] name = "derive_more" version = "1.0.0-beta.6" @@ -1800,6 +1986,12 @@ version = "0.15.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" +[[package]] +name = "downcast-rs" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75b325c5dbd37f80359721ad39aca5a29fb04c89279657cffdda8736d0c0b9d2" + [[package]] name = "dtoa" version = "1.0.9" @@ -1862,6 +2054,21 @@ dependencies = [ "zeroize", ] +[[package]] +name = "ed25519-zebra" +version = "4.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d9ce6874da5d4415896cd45ffbc4d1cfc0c4f9c079427bd870742c30f2f65a9" +dependencies = [ + "curve25519-dalek", + "ed25519", + "hashbrown 0.14.5", + "hex", + "rand_core 0.6.4", + "sha2 0.10.8", + "zeroize", +] + [[package]] name = "either" version = "1.9.0" @@ -1932,7 +2139,7 @@ version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -1981,14 +2188,6 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" -[[package]] -name = "eravm-stable-interface" -version = "0.1.0" -source = "git+https://github.com/matter-labs/vm2.git?rev=4ef15d46410ffc11744771a3a6c7c09dd9470c90#4ef15d46410ffc11744771a3a6c7c09dd9470c90" -dependencies = [ - "primitive-types", -] - [[package]] name = "errno" version = "0.3.9" @@ -2014,7 +2213,7 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "home", "windows-sys 0.48.0", ] @@ -2063,6 +2262,16 @@ dependencies = [ "uint", ] +[[package]] +name = "event-listener" +version = "4.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b215c49b2b248c855fb73579eb1f4f26c38ffdc12973e20e07b91d78d5646e" +dependencies = [ + "concurrent-queue", + "pin-project-lite", +] + [[package]] name = "event-listener" version = "5.3.1" @@ -2074,6 +2283,16 @@ dependencies = [ "pin-project-lite", ] +[[package]] +name = "event-listener-strategy" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f214dc438f977e6d4e3500aaa277f5ad94ca83fbbd9b1a15713ce2344ccc5a1" +dependencies = [ + "event-listener 5.3.1", + "pin-project-lite", +] + [[package]] name = "fastrand" version = "2.0.1" @@ -2107,27 +2326,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b538e4231443a5b9c507caee3356f016d832cf7393d2d90f03ea3180d4e3fbc" dependencies = [ "byteorder", - "ff_derive_ce", "hex", "rand 0.4.6", "serde", ] -[[package]] -name = "ff_derive_ce" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b96fbccd88dbb1fac4ee4a07c2fcc4ca719a74ffbd9d2b9d41d8c8eb073d8b20" -dependencies = [ - "num-bigint 0.4.6", - "num-integer", - "num-traits", - "proc-macro2 1.0.86", - "quote 1.0.36", - "serde", - "syn 1.0.109", -] - [[package]] name = "fiat-crypto" version = "0.2.3" @@ -2220,9 +2423,9 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] @@ -2237,19 +2440,43 @@ dependencies = [ "num", ] +[[package]] +name = "frame-metadata" +version = "15.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "878babb0b136e731cc77ec2fd883ff02745ff21e6fb662729953d44923df009c" +dependencies = [ + "cfg-if", + "parity-scale-codec", + "scale-info", +] + +[[package]] +name = "frame-metadata" +version = "16.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cf1549fba25a6fcac22785b61698317d958e96cac72a59102ea45b9ae64692" +dependencies = [ + "cfg-if", + "parity-scale-codec", + "scale-info", + "serde", +] + [[package]] name = "franklin-crypto" -version = "0.1.0" +version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "178bca54fc449a6f4cb45321ed9d769353143ac7ef314ea310f3a0c61bed2da2" +checksum = "971289216ea5c91872e5e0bb6989214b537bbce375d09fabea5c3ccfe031b204" dependencies = [ "arr_macro", - "bellman_ce", "bit-vec", "blake2 0.9.2", "blake2-rfc_bellman_edition", "blake2s_simd", + "boojum", "byteorder", + "derivative", "digest 0.9.0", "hex", "indexmap 1.9.3", @@ -2266,6 +2493,7 @@ dependencies = [ "smallvec", "splitmut", "tiny-keccak 1.5.0", + "zksync_bellman", ] [[package]] @@ -2294,9 +2522,9 @@ checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" [[package]] name = "futures" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" dependencies = [ "futures-channel", "futures-core", @@ -2309,9 +2537,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" dependencies = [ "futures-core", "futures-sink", @@ -2319,15 +2547,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" [[package]] name = "futures-executor" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" dependencies = [ "futures-core", "futures-task", @@ -2348,15 +2576,28 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.28" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" + +[[package]] +name = "futures-lite" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" +checksum = "52527eb5074e35e9339c6b4e8d12600c7128b68fb25dcb9fa9dec18f7c25f3a5" +dependencies = [ + "fastrand", + "futures-core", + "futures-io", + "parking", + "pin-project-lite", +] [[package]] name = "futures-macro" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2 1.0.86", "quote 1.0.36", @@ -2365,15 +2606,15 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" [[package]] name = "futures-task" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" [[package]] name = "futures-timer" @@ -2387,9 +2628,9 @@ dependencies = [ [[package]] name = "futures-util" -version = "0.3.28" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" dependencies = [ "futures 0.1.31", "futures-channel", @@ -2421,7 +2662,7 @@ version = "0.1.0" dependencies = [ "anyhow", "clap 4.4.6", - "futures 0.3.28", + "futures 0.3.30", "serde", "serde_json", "serde_yaml", @@ -2445,13 +2686,23 @@ version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "js-sys", "libc", "wasi 0.11.0+wasi-snapshot-preview1", "wasm-bindgen", ] +[[package]] +name = "getrandom_or_panic" +version = "0.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ea1015b5a70616b688dc230cfe50c8af89d972cb132d5a622814d29773b10b9" +dependencies = [ + "rand 0.8.5", + "rand_core 0.6.4", +] + [[package]] name = "ghash" version = "0.5.0" @@ -2602,7 +2853,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19775995ee20209163239355bc3ad2f33f83da35d9ef72dea26e5af753552c87" dependencies = [ "dashmap", - "futures 0.3.28", + "futures 0.3.30", "futures-timer", "no-std-compat", "nonzero_ext", @@ -2709,6 +2960,7 @@ checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ "ahash 0.8.7", "allocator-api2", + "serde", ] [[package]] @@ -2756,6 +3008,12 @@ version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" +[[package]] +name = "hermit-abi" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" + [[package]] name = "hex" version = "0.4.3" @@ -2768,7 +3026,17 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437" dependencies = [ - "hmac", + "hmac 0.12.1", +] + +[[package]] +name = "hmac" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "126888268dcc288495a26bf004b38c5fdbb31682f992c84ceb046a1f0fe38840" +dependencies = [ + "crypto-mac", + "digest 0.9.0", ] [[package]] @@ -2780,6 +3048,17 @@ dependencies = [ "digest 0.10.7", ] +[[package]] +name = "hmac-drbg" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17ea0a1394df5b6574da6e0c1ade9e78868c9fb0a4e5ef4428e32da4676b85b1" +dependencies = [ + "digest 0.9.0", + "generic-array", + "hmac 0.8.1", +] + [[package]] name = "home" version = "0.5.5" @@ -2913,6 +3192,22 @@ dependencies = [ "want", ] +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http 0.2.9", + "hyper 0.14.29", + "log", + "rustls 0.21.12", + "rustls-native-certs 0.6.3", + "tokio", + "tokio-rustls 0.24.1", +] + [[package]] name = "hyper-rustls" version = "0.27.2" @@ -2924,10 +3219,10 @@ dependencies = [ "hyper 1.3.1", "hyper-util", "log", - "rustls", + "rustls 0.23.10", "rustls-pki-types", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.0", "tower-service", ] @@ -3030,9 +3325,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" dependencies = [ "unicode-bidi", "unicode-normalization", @@ -3110,6 +3405,12 @@ dependencies = [ "hashbrown 0.14.5", ] +[[package]] +name = "indexmap-nostd" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e04e2fd2b8188ea827b32ef11de88377086d690286ab35747ef7f9bf3ccb590" + [[package]] name = "inout" version = "0.1.3" @@ -3133,6 +3434,15 @@ dependencies = [ "yaml-rust", ] +[[package]] +name = "instant" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" +dependencies = [ + "cfg-if", +] + [[package]] name = "ipnet" version = "2.9.0" @@ -3210,24 +3520,57 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "jsonrpsee" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9579d0ca9fb30da026bac2f0f7d9576ec93489aeb7cd4971dd5b4617d82c79b2" +dependencies = [ + "jsonrpsee-client-transport 0.21.0", + "jsonrpsee-core 0.21.0", + "jsonrpsee-http-client 0.21.0", + "jsonrpsee-types 0.21.0", +] + [[package]] name = "jsonrpsee" version = "0.23.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62b089779ad7f80768693755a031cc14a7766aba707cbe886674e3f79e9b7e47" dependencies = [ - "jsonrpsee-client-transport", - "jsonrpsee-core", - "jsonrpsee-http-client", + "jsonrpsee-client-transport 0.23.2", + "jsonrpsee-core 0.23.2", + "jsonrpsee-http-client 0.23.2", "jsonrpsee-proc-macros", "jsonrpsee-server", - "jsonrpsee-types", + "jsonrpsee-types 0.23.2", "jsonrpsee-wasm-client", "jsonrpsee-ws-client", "tokio", "tracing", ] +[[package]] +name = "jsonrpsee-client-transport" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9f9ed46590a8d5681975f126e22531698211b926129a40a2db47cbca429220" +dependencies = [ + "futures-util", + "http 0.2.9", + "jsonrpsee-core 0.21.0", + "pin-project", + "rustls-native-certs 0.7.0", + "rustls-pki-types", + "soketto 0.7.1", + "thiserror", + "tokio", + "tokio-rustls 0.25.0", + "tokio-util", + "tracing", + "url", +] + [[package]] name = "jsonrpsee-client-transport" version = "0.23.2" @@ -3239,20 +3582,44 @@ dependencies = [ "futures-util", "gloo-net", "http 1.1.0", - "jsonrpsee-core", + "jsonrpsee-core 0.23.2", "pin-project", - "rustls", + "rustls 0.23.10", "rustls-pki-types", "rustls-platform-verifier", - "soketto", + "soketto 0.8.0", "thiserror", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.0", "tokio-util", "tracing", "url", ] +[[package]] +name = "jsonrpsee-core" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "776d009e2f591b78c038e0d053a796f94575d66ca4e77dd84bfc5e81419e436c" +dependencies = [ + "anyhow", + "async-lock", + "async-trait", + "beef", + "futures-timer", + "futures-util", + "hyper 0.14.29", + "jsonrpsee-types 0.21.0", + "pin-project", + "rustc-hash", + "serde", + "serde_json", + "thiserror", + "tokio", + "tokio-stream", + "tracing", +] + [[package]] name = "jsonrpsee-core" version = "0.23.2" @@ -3268,7 +3635,7 @@ dependencies = [ "http 1.1.0", "http-body 1.0.0", "http-body-util", - "jsonrpsee-types", + "jsonrpsee-types 0.23.2", "parking_lot", "pin-project", "rand 0.8.5", @@ -3282,6 +3649,26 @@ dependencies = [ "wasm-bindgen-futures", ] +[[package]] +name = "jsonrpsee-http-client" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78b7de9f3219d95985eb77fd03194d7c1b56c19bce1abfcc9d07462574b15572" +dependencies = [ + "async-trait", + "hyper 0.14.29", + "hyper-rustls 0.24.2", + "jsonrpsee-core 0.21.0", + "jsonrpsee-types 0.21.0", + "serde", + "serde_json", + "thiserror", + "tokio", + "tower", + "tracing", + "url", +] + [[package]] name = "jsonrpsee-http-client" version = "0.23.2" @@ -3292,11 +3679,11 @@ dependencies = [ "base64 0.22.1", "http-body 1.0.0", "hyper 1.3.1", - "hyper-rustls", + "hyper-rustls 0.27.2", "hyper-util", - "jsonrpsee-core", - "jsonrpsee-types", - "rustls", + "jsonrpsee-core 0.23.2", + "jsonrpsee-types 0.23.2", + "rustls 0.23.10", "rustls-platform-verifier", "serde", "serde_json", @@ -3333,13 +3720,13 @@ dependencies = [ "http-body-util", "hyper 1.3.1", "hyper-util", - "jsonrpsee-core", - "jsonrpsee-types", + "jsonrpsee-core 0.23.2", + "jsonrpsee-types 0.23.2", "pin-project", "route-recognizer", "serde", "serde_json", - "soketto", + "soketto 0.8.0", "thiserror", "tokio", "tokio-stream", @@ -3348,6 +3735,19 @@ dependencies = [ "tracing", ] +[[package]] +name = "jsonrpsee-types" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3266dfb045c9174b24c77c2dfe0084914bb23a6b2597d70c9dc6018392e1cd1b" +dependencies = [ + "anyhow", + "beef", + "serde", + "serde_json", + "thiserror", +] + [[package]] name = "jsonrpsee-types" version = "0.23.2" @@ -3367,9 +3767,9 @@ version = "0.23.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4727ac037f834c6f04c0912cada7532dbddb54e92fbc64e33d6cb8c24af313c9" dependencies = [ - "jsonrpsee-client-transport", - "jsonrpsee-core", - "jsonrpsee-types", + "jsonrpsee-client-transport 0.23.2", + "jsonrpsee-core 0.23.2", + "jsonrpsee-types 0.23.2", ] [[package]] @@ -3379,9 +3779,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c28759775f5cb2f1ea9667672d3fe2b0e701d1f4b7b67954e60afe7fd058b5e" dependencies = [ "http 1.1.0", - "jsonrpsee-client-transport", - "jsonrpsee-core", - "jsonrpsee-types", + "jsonrpsee-client-transport 0.23.2", + "jsonrpsee-core 0.23.2", + "jsonrpsee-types 0.23.2", "url", ] @@ -3406,7 +3806,7 @@ version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72c1e0b51e7ec0a97369623508396067a486bd0cbed95a2659a4b863d28cfc8b" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "ecdsa 0.14.8", "elliptic-curve 0.12.3", "sha2 0.10.8", @@ -3418,7 +3818,7 @@ version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "956ff9b67e26e1a6a866cb758f12c6f8746208489e3e4a4b5580802f2f0a587b" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "ecdsa 0.16.9", "elliptic-curve 0.13.8", "once_cell", @@ -3468,7 +3868,7 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "winapi", ] @@ -3494,6 +3894,54 @@ dependencies = [ "zstd-sys", ] +[[package]] +name = "libsecp256k1" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95b09eff1b35ed3b33b877ced3a691fc7a481919c7e29c53c906226fcf55e2a1" +dependencies = [ + "arrayref", + "base64 0.13.1", + "digest 0.9.0", + "hmac-drbg", + "libsecp256k1-core", + "libsecp256k1-gen-ecmult", + "libsecp256k1-gen-genmult", + "rand 0.8.5", + "serde", + "sha2 0.9.9", + "typenum", +] + +[[package]] +name = "libsecp256k1-core" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5be9b9bb642d8522a44d533eab56c16c738301965504753b03ad1de3425d5451" +dependencies = [ + "crunchy", + "digest 0.9.0", + "subtle", +] + +[[package]] +name = "libsecp256k1-gen-ecmult" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3038c808c55c87e8a172643a7d87187fc6c4174468159cb3090659d55bcb4809" +dependencies = [ + "libsecp256k1-core", +] + +[[package]] +name = "libsecp256k1-gen-genmult" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3db8d6ba2cec9eacc40e6e8ccc98931840301f1006e95647ceb2dd5c3aa06f7c" +dependencies = [ + "libsecp256k1-core", +] + [[package]] name = "libsqlite3-sys" version = "0.30.1" @@ -3535,7 +3983,7 @@ dependencies = [ "anyhow", "async-trait", "envy", - "futures 0.3.28", + "futures 0.3.30", "hex", "num", "once_cell", @@ -3613,6 +4061,9 @@ name = "lru" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2994eeba8ed550fd9b47a0b38f0242bc3344e496483c6180b69139cc2fa5d1d7" +dependencies = [ + "hashbrown 0.14.5", +] [[package]] name = "lz4-sys" @@ -3654,19 +4105,13 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" -[[package]] -name = "maybe-uninit" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" - [[package]] name = "md-5" version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "digest 0.10.7", ] @@ -3676,15 +4121,6 @@ version = "2.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" -[[package]] -name = "memoffset" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "043175f069eda7b85febe4a74abbaeff828d9f8b448515d3151a14a3542811aa" -dependencies = [ - "autocfg", -] - [[package]] name = "merkle_tree_consistency_checker" version = "0.1.0" @@ -3700,6 +4136,18 @@ dependencies = [ "zksync_vlog", ] +[[package]] +name = "merlin" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58c38e2799fc0978b65dfff8023ec7843e2330bb462f19198840b34b6582397d" +dependencies = [ + "byteorder", + "keccak", + "rand_core 0.6.4", + "zeroize", +] + [[package]] name = "miette" version = "5.10.0" @@ -3745,8 +4193,8 @@ version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23e0b72e7c9042467008b10279fc732326bd605459ae03bda88825909dd19b56" dependencies = [ - "crossbeam-channel 0.5.13", - "crossbeam-utils 0.8.20", + "crossbeam-channel", + "crossbeam-utils", "dashmap", "skeptic", "smallvec", @@ -3835,7 +4283,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053" dependencies = [ "bitflags 2.6.0", - "cfg-if 1.0.0", + "cfg-if", "libc", ] @@ -3845,6 +4293,12 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b93853da6d84c2e3c7d730d6473e8817692dd89be387eb01b94d7f108ecb5b8c" +[[package]] +name = "no-std-net" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43794a0ace135be66a25d3ae77d41b91615fb68ae937f904090203e81f755b65" + [[package]] name = "nodrop" version = "0.1.14" @@ -4095,7 +4549,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1" dependencies = [ "bitflags 2.6.0", - "cfg-if 1.0.0", + "cfg-if", "foreign-types", "libc", "once_cell", @@ -4274,24 +4728,11 @@ dependencies = [ "sha2 0.10.8", ] -[[package]] -name = "pairing_ce" -version = "0.28.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "843b5b6fb63f00460f611dbc87a50bbbb745f0dfe5cbf67ca89299c79098640e" -dependencies = [ - "byteorder", - "cfg-if 1.0.0", - "ff_ce", - "rand 0.4.6", - "serde", -] - [[package]] name = "parity-scale-codec" -version = "3.6.5" +version = "3.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dec8a8073036902368c2cdc0387e85ff9a37054d7e7c98e592145e0c92cd4fb" +checksum = "306800abfa29c7f16596b5970a588435e3d5b3149683d00c12b699cc19f895ee" dependencies = [ "arrayvec 0.7.4", "bitvec", @@ -4303,11 +4744,11 @@ dependencies = [ [[package]] name = "parity-scale-codec-derive" -version = "3.6.5" +version = "3.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "312270ee71e1cd70289dacf597cab7b207aa107d2f28191c2ae45b2ece18a260" +checksum = "d830939c76d294956402033aee57a6da7b438f2294eb94864c37b0569053a42c" dependencies = [ - "proc-macro-crate 1.3.1", + "proc-macro-crate 3.1.0", "proc-macro2 1.0.86", "quote 1.0.36", "syn 1.0.109", @@ -4335,7 +4776,7 @@ version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "redox_syscall", "smallvec", @@ -4348,6 +4789,15 @@ version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" +[[package]] +name = "pbkdf2" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2" +dependencies = [ + "digest 0.10.7", +] + [[package]] name = "peeking_take_while" version = "0.1.2" @@ -4375,9 +4825,9 @@ dependencies = [ [[package]] name = "percent-encoding" -version = "2.3.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" @@ -4466,6 +4916,17 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "piper" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066" +dependencies = [ + "atomic-waker", + "fastrand", + "futures-io", +] + [[package]] name = "pkcs1" version = "0.7.5" @@ -4531,6 +4992,21 @@ dependencies = [ "plotters-backend", ] +[[package]] +name = "polling" +version = "3.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc2790cd301dec6cd3b7a025e4815cf825724a51c98dccfe6a3e55f05ffb6511" +dependencies = [ + "cfg-if", + "concurrent-queue", + "hermit-abi 0.4.0", + "pin-project-lite", + "rustix", + "tracing", + "windows-sys 0.59.0", +] + [[package]] name = "poly1305" version = "0.8.0" @@ -4548,7 +5024,7 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d52cff9d1d4dee5fe6d03729099f4a310a41179e0a10dbf542039873f2e826fb" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "opaque-debug", "universal-hash", @@ -4605,6 +5081,7 @@ dependencies = [ "impl-codec", "impl-rlp", "impl-serde", + "scale-info", "uint", ] @@ -4864,7 +5341,7 @@ version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "20afe714292d5e879d8b12740aa223c6a88f118af41870e8b6196e39a02238a8" dependencies = [ - "crossbeam-utils 0.8.20", + "crossbeam-utils", "libc", "mach", "once_cell", @@ -5005,8 +5482,8 @@ version = "1.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" dependencies = [ - "crossbeam-deque 0.8.5", - "crossbeam-utils 0.8.20", + "crossbeam-deque", + "crossbeam-utils", ] [[package]] @@ -5029,13 +5506,13 @@ dependencies = [ [[package]] name = "regex" -version = "1.10.2" +version = "1.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" +checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.3", + "regex-automata 0.4.7", "regex-syntax 0.8.2", ] @@ -5050,9 +5527,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.3" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" +checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" dependencies = [ "aho-corasick", "memchr", @@ -5135,7 +5612,7 @@ dependencies = [ "http-body 1.0.0", "http-body-util", "hyper 1.3.1", - "hyper-rustls", + "hyper-rustls 0.27.2", "hyper-tls 0.6.0", "hyper-util", "ipnet", @@ -5147,7 +5624,7 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls-pemfile", + "rustls-pemfile 2.0.0", "serde", "serde_json", "serde_urlencoded", @@ -5182,15 +5659,18 @@ dependencies = [ [[package]] name = "rescue_poseidon" -version = "0.4.1" +version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ada2124f92cf32b813e50f6f7d9e92f05addc321edb8b68f9b4e2bb6e0d5af8b" +checksum = "82900c877a0ba5362ac5756efbd82c5b795dc509011c1253e2389d8708f1389d" dependencies = [ "addchain", "arrayvec 0.7.4", "blake2 0.10.6", "byteorder", + "derivative", "franklin-crypto", + "lazy_static", + "log", "num-bigint 0.3.3", "num-integer", "num-iter", @@ -5199,6 +5679,7 @@ dependencies = [ "serde", "sha3 0.9.1", "smallvec", + "typemap_rev", ] [[package]] @@ -5208,7 +5689,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb" dependencies = [ "crypto-bigint 0.4.9", - "hmac", + "hmac 0.12.1", "zeroize", ] @@ -5218,7 +5699,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" dependencies = [ - "hmac", + "hmac 0.12.1", "subtle", ] @@ -5367,6 +5848,32 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "rustls" +version = "0.21.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" +dependencies = [ + "log", + "ring", + "rustls-webpki 0.101.7", + "sct", +] + +[[package]] +name = "rustls" +version = "0.22.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf4ef73721ac7bcd79b2b315da7779d8fc09718c6b3d2d1b2d94850eb8c18432" +dependencies = [ + "log", + "ring", + "rustls-pki-types", + "rustls-webpki 0.102.4", + "subtle", + "zeroize", +] + [[package]] name = "rustls" version = "0.23.10" @@ -5378,11 +5885,23 @@ dependencies = [ "once_cell", "ring", "rustls-pki-types", - "rustls-webpki", + "rustls-webpki 0.102.4", "subtle", "zeroize", ] +[[package]] +name = "rustls-native-certs" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" +dependencies = [ + "openssl-probe", + "rustls-pemfile 1.0.4", + "schannel", + "security-framework", +] + [[package]] name = "rustls-native-certs" version = "0.7.0" @@ -5390,12 +5909,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f1fb85efa936c42c6d5fc28d2629bb51e4b2f4b8a5211e297d599cc5a093792" dependencies = [ "openssl-probe", - "rustls-pemfile", + "rustls-pemfile 2.0.0", "rustls-pki-types", "schannel", "security-framework", ] +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.5", +] + [[package]] name = "rustls-pemfile" version = "2.0.0" @@ -5423,10 +5951,10 @@ dependencies = [ "jni", "log", "once_cell", - "rustls", - "rustls-native-certs", + "rustls 0.23.10", + "rustls-native-certs 0.7.0", "rustls-platform-verifier-android", - "rustls-webpki", + "rustls-webpki 0.102.4", "security-framework", "security-framework-sys", "webpki-roots", @@ -5439,6 +5967,16 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "84e217e7fdc8466b5b35d30f8c0a30febd29173df4a3a0c2115d306b9c4117ad" +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "rustls-webpki" version = "0.102.4" @@ -5457,6 +5995,17 @@ version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" +[[package]] +name = "ruzstd" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58c4eb8a81997cf040a091d1f7e1938aeab6749d3a0dfa73af43cdc32393483d" +dependencies = [ + "byteorder", + "derive_more 0.99.18", + "twox-hash", +] + [[package]] name = "ryu" version = "1.0.15" @@ -5472,6 +6021,132 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "scale-bits" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "036575c29af9b6e4866ffb7fa055dbf623fe7a9cc159b33786de6013a6969d89" +dependencies = [ + "parity-scale-codec", + "scale-info", + "serde", +] + +[[package]] +name = "scale-decode" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7caaf753f8ed1ab4752c6afb20174f03598c664724e0e32628e161c21000ff76" +dependencies = [ + "derive_more 0.99.18", + "parity-scale-codec", + "primitive-types", + "scale-bits", + "scale-decode-derive", + "scale-info", + "smallvec", +] + +[[package]] +name = "scale-decode-derive" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3475108a1b62c7efd1b5c65974f30109a598b2f45f23c9ae030acb9686966db" +dependencies = [ + "darling 0.14.4", + "proc-macro-crate 1.3.1", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 1.0.109", +] + +[[package]] +name = "scale-encode" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d70cb4b29360105483fac1ed567ff95d65224a14dd275b6303ed0a654c78de5" +dependencies = [ + "derive_more 0.99.18", + "parity-scale-codec", + "primitive-types", + "scale-bits", + "scale-encode-derive", + "scale-info", + "smallvec", +] + +[[package]] +name = "scale-encode-derive" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "995491f110efdc6bea96d6a746140e32bfceb4ea47510750a5467295a4707a25" +dependencies = [ + "darling 0.14.4", + "proc-macro-crate 1.3.1", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 1.0.109", +] + +[[package]] +name = "scale-info" +version = "2.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eca070c12893629e2cc820a9761bedf6ce1dcddc9852984d1dc734b8bd9bd024" +dependencies = [ + "bitvec", + "cfg-if", + "derive_more 0.99.18", + "parity-scale-codec", + "scale-info-derive", + "serde", +] + +[[package]] +name = "scale-info-derive" +version = "2.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d35494501194174bda522a32605929eefc9ecf7e0a326c26db1fdd85881eb62" +dependencies = [ + "proc-macro-crate 3.1.0", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 1.0.109", +] + +[[package]] +name = "scale-typegen" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00860983481ac590ac87972062909bef0d6a658013b592ccc0f2feb272feab11" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.36", + "scale-info", + "syn 2.0.72", + "thiserror", +] + +[[package]] +name = "scale-value" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58223c7691bf0bd46b43c9aea6f0472d1067f378d574180232358d7c6e0a8089" +dependencies = [ + "base58", + "blake2 0.10.6", + "derive_more 0.99.18", + "either", + "frame-metadata 15.1.0", + "parity-scale-codec", + "scale-bits", + "scale-decode", + "scale-encode", + "scale-info", + "serde", + "yap", +] + [[package]] name = "schannel" version = "0.1.22" @@ -5481,12 +6156,41 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "schnorrkel" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de18f6d8ba0aad7045f5feae07ec29899c1112584a38509a84ad7b04451eaa0" +dependencies = [ + "aead", + "arrayref", + "arrayvec 0.7.4", + "curve25519-dalek", + "getrandom_or_panic", + "merlin", + "rand_core 0.6.4", + "serde_bytes", + "sha2 0.10.8", + "subtle", + "zeroize", +] + [[package]] name = "scopeguard" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "seahash" version = "4.1.0" @@ -5573,6 +6277,20 @@ dependencies = [ "libc", ] +[[package]] +name = "selector_generator" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap 4.4.6", + "ethabi", + "glob", + "hex", + "serde", + "serde_json", + "tokio", +] + [[package]] name = "semver" version = "1.0.23" @@ -5704,9 +6422,9 @@ checksum = "a3f0bf26fd526d2a95683cd0f87bf103b8539e2ca1ef48ce002d67aad59aa0b4" [[package]] name = "serde" -version = "1.0.208" +version = "1.0.210" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cff085d2cb684faa248efb494c39b68e522822ac0de72ccf08109abde717cfb2" +checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" dependencies = [ "serde_derive", ] @@ -5721,11 +6439,20 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_bytes" +version = "0.11.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "387cc504cb06bb40a96c8e04e951fe01854cf6bc921053c954e4a606d9675c6a" +dependencies = [ + "serde", +] + [[package]] name = "serde_derive" -version = "1.0.208" +version = "1.0.210" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24008e81ff7613ed8e5ba0cfaf24e2c2f1e5b8a0495711e44fcd4882fca62bcf" +checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" dependencies = [ "proc-macro2 1.0.86", "quote 1.0.36", @@ -5734,11 +6461,12 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.107" +version = "1.0.128" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65" +checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" dependencies = [ "itoa", + "memchr", "ryu", "serde", ] @@ -5783,7 +6511,7 @@ version = "1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" dependencies = [ - "darling", + "darling 0.13.4", "proc-macro2 1.0.86", "quote 1.0.36", "syn 1.0.109", @@ -5802,13 +6530,26 @@ dependencies = [ "unsafe-libyaml", ] +[[package]] +name = "sha-1" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6" +dependencies = [ + "block-buffer 0.9.0", + "cfg-if", + "cpufeatures", + "digest 0.9.0", + "opaque-debug", +] + [[package]] name = "sha1" version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.10.7", ] @@ -5820,7 +6561,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" dependencies = [ "block-buffer 0.9.0", - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.9.0", "opaque-debug", @@ -5832,7 +6573,7 @@ version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.10.7", ] @@ -5843,7 +6584,7 @@ version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eca2daa77078f4ddff27e75c4bf59e4c2697525f56dbb3c842d34a5d1f2b04a2" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.10.7", ] @@ -5948,6 +6689,12 @@ dependencies = [ "time", ] +[[package]] +name = "siphasher" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" + [[package]] name = "sized-chunks" version = "0.6.5" @@ -5991,14 +6738,123 @@ dependencies = [ "serde", ] +[[package]] +name = "smol" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a33bd3e260892199c3ccfc487c88b2da2265080acb316cd920da72fdfd7c599f" +dependencies = [ + "async-channel", + "async-executor", + "async-fs", + "async-io", + "async-lock", + "async-net", + "async-process", + "blocking", + "futures-lite", +] + +[[package]] +name = "smoldot" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6d1eaa97d77be4d026a1e7ffad1bb3b78448763b357ea6f8188d3e6f736a9b9" +dependencies = [ + "arrayvec 0.7.4", + "async-lock", + "atomic-take", + "base64 0.21.5", + "bip39", + "blake2-rfc", + "bs58", + "chacha20", + "crossbeam-queue", + "derive_more 0.99.18", + "ed25519-zebra", + "either", + "event-listener 4.0.3", + "fnv", + "futures-lite", + "futures-util", + "hashbrown 0.14.5", + "hex", + "hmac 0.12.1", + "itertools 0.12.0", + "libm", + "libsecp256k1", + "merlin", + "no-std-net", + "nom", + "num-bigint 0.4.6", + "num-rational", + "num-traits", + "pbkdf2", + "pin-project", + "poly1305", + "rand 0.8.5", + "rand_chacha", + "ruzstd", + "schnorrkel", + "serde", + "serde_json", + "sha2 0.10.8", + "sha3 0.10.8", + "siphasher", + "slab", + "smallvec", + "soketto 0.7.1", + "twox-hash", + "wasmi", + "x25519-dalek", + "zeroize", +] + +[[package]] +name = "smoldot-light" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5496f2d116b7019a526b1039ec2247dd172b8670633b1a64a614c9ea12c9d8c7" +dependencies = [ + "async-channel", + "async-lock", + "base64 0.21.5", + "blake2-rfc", + "derive_more 0.99.18", + "either", + "event-listener 4.0.3", + "fnv", + "futures-channel", + "futures-lite", + "futures-util", + "hashbrown 0.14.5", + "hex", + "itertools 0.12.0", + "log", + "lru", + "no-std-net", + "parking_lot", + "pin-project", + "rand 0.8.5", + "rand_chacha", + "serde", + "serde_json", + "siphasher", + "slab", + "smol", + "smoldot", + "zeroize", +] + [[package]] name = "snapshots_creator" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.28", + "futures 0.3.30", "rand 0.8.5", "structopt", + "test-casing", "tokio", "tracing", "vise", @@ -6037,6 +6893,21 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "soketto" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d1c5305e39e09653383c2c7244f2f78b3bcae37cf50c64cb4789c9f5096ec2" +dependencies = [ + "base64 0.13.1", + "bytes", + "futures 0.3.30", + "httparse", + "log", + "rand 0.8.5", + "sha-1", +] + [[package]] name = "soketto" version = "0.8.0" @@ -6045,7 +6916,7 @@ checksum = "37468c595637c10857701c990f93a40ce0e357cedb0953d1c26c8d8027f9bb53" dependencies = [ "base64 0.22.1", "bytes", - "futures 0.3.28", + "futures 0.3.30", "http 1.1.0", "httparse", "log", @@ -6053,6 +6924,20 @@ dependencies = [ "sha1", ] +[[package]] +name = "sp-core-hashing" +version = "15.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e0f4990add7b2cefdeca883c0efa99bb4d912cb2196120e1500c0cc099553b0" +dependencies = [ + "blake2b_simd", + "byteorder", + "digest 0.10.7", + "sha2 0.10.8", + "sha3 0.10.8", + "twox-hash", +] + [[package]] name = "spin" version = "0.9.8" @@ -6124,9 +7009,9 @@ dependencies = [ "bytes", "chrono", "crc", - "crossbeam-queue 0.3.11", + "crossbeam-queue", "either", - "event-listener", + "event-listener 5.3.1", "futures-channel", "futures-core", "futures-intrusive", @@ -6219,7 +7104,7 @@ dependencies = [ "generic-array", "hex", "hkdf", - "hmac", + "hmac 0.12.1", "itoa", "log", "md-5", @@ -6261,7 +7146,7 @@ dependencies = [ "futures-util", "hex", "hkdf", - "hmac", + "hmac 0.12.1", "home", "ipnetwork", "itoa", @@ -6342,6 +7227,12 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + [[package]] name = "structopt" version = "0.3.26" @@ -6354,45 +7245,168 @@ dependencies = [ ] [[package]] -name = "structopt-derive" -version = "0.4.18" +name = "structopt-derive" +version = "0.4.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" +dependencies = [ + "heck 0.3.3", + "proc-macro-error", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 1.0.109", +] + +[[package]] +name = "strum" +version = "0.26.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" +dependencies = [ + "heck 0.5.0", + "proc-macro2 1.0.86", + "quote 1.0.36", + "rustversion", + "syn 2.0.72", +] + +[[package]] +name = "subtle" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" + +[[package]] +name = "subxt" +version = "0.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3323d5c27898b139d043dc1ee971f602f937b99354ee33ee933bd90e0009fbd" +dependencies = [ + "async-trait", + "base58", + "blake2 0.10.6", + "derivative", + "either", + "frame-metadata 16.0.0", + "futures 0.3.30", + "hex", + "impl-serde", + "instant", + "jsonrpsee 0.21.0", + "parity-scale-codec", + "primitive-types", + "scale-bits", + "scale-decode", + "scale-encode", + "scale-info", + "scale-value", + "serde", + "serde_json", + "sp-core-hashing", + "subxt-lightclient", + "subxt-macro", + "subxt-metadata", + "thiserror", + "tokio-util", + "tracing", + "url", +] + +[[package]] +name = "subxt-codegen" +version = "0.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" +checksum = "2d0e58c3f88651cff26aa52bae0a0a85f806a2e923a20eb438c16474990743ea" dependencies = [ - "heck 0.3.3", - "proc-macro-error", + "frame-metadata 16.0.0", + "heck 0.4.1", + "hex", + "jsonrpsee 0.21.0", + "parity-scale-codec", "proc-macro2 1.0.86", "quote 1.0.36", - "syn 1.0.109", + "scale-info", + "scale-typegen", + "subxt-metadata", + "syn 2.0.72", + "thiserror", + "tokio", ] [[package]] -name = "strum" -version = "0.26.3" +name = "subxt-lightclient" +version = "0.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" +checksum = "ecec7066ba7bc0c3608fcd1d0c7d9584390990cd06095b6ae4f114f74c4b8550" dependencies = [ - "strum_macros", + "futures 0.3.30", + "futures-util", + "serde", + "serde_json", + "smoldot-light", + "thiserror", + "tokio", + "tokio-stream", + "tracing", ] [[package]] -name = "strum_macros" -version = "0.26.4" +name = "subxt-macro" +version = "0.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" +checksum = "365251668613323064803427af8c7c7bc366cd8b28e33639640757669dafebd5" dependencies = [ - "heck 0.5.0", - "proc-macro2 1.0.86", + "darling 0.20.10", + "parity-scale-codec", + "proc-macro-error", "quote 1.0.36", - "rustversion", + "scale-typegen", + "subxt-codegen", "syn 2.0.72", ] [[package]] -name = "subtle" -version = "2.5.0" +name = "subxt-metadata" +version = "0.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" +checksum = "c02aca8d39a1f6c55fff3a8fd81557d30a610fedc1cef03f889a81bc0f8f0b52" +dependencies = [ + "frame-metadata 16.0.0", + "parity-scale-codec", + "scale-info", + "sp-core-hashing", + "thiserror", +] + +[[package]] +name = "subxt-signer" +version = "0.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f88a76a5d114bfae2f6f9cc1491c46173ecc3fb2b9e53948eb3c8d43d4b43ab5" +dependencies = [ + "bip39", + "hex", + "hmac 0.12.1", + "parity-scale-codec", + "pbkdf2", + "regex", + "schnorrkel", + "secrecy", + "sha2 0.10.8", + "sp-core-hashing", + "subxt", + "thiserror", + "zeroize", +] [[package]] name = "syn" @@ -6504,7 +7518,7 @@ version = "3.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "fastrand", "rustix", "windows-sys 0.52.0", @@ -6577,18 +7591,18 @@ checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" [[package]] name = "thiserror" -version = "1.0.50" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2" +checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.50" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8" +checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" dependencies = [ "proc-macro2 1.0.86", "quote 1.0.36", @@ -6601,7 +7615,7 @@ version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "once_cell", ] @@ -6718,7 +7732,7 @@ dependencies = [ "pin-project-lite", "thiserror", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.0", ] [[package]] @@ -6760,13 +7774,34 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls 0.21.12", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" +dependencies = [ + "rustls 0.22.4", + "rustls-pki-types", + "tokio", +] + [[package]] name = "tokio-rustls" version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" dependencies = [ - "rustls", + "rustls 0.23.10", "rustls-pki-types", "tokio", ] @@ -6785,9 +7820,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.9" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d68074620f57a0b21594d9735eb2e98ab38b17f80d3fcb189fca266771ca60d" +checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" dependencies = [ "bytes", "futures-core", @@ -6795,7 +7830,6 @@ dependencies = [ "futures-sink", "pin-project-lite", "tokio", - "tracing", ] [[package]] @@ -7048,6 +8082,23 @@ dependencies = [ "termcolor", ] +[[package]] +name = "twox-hash" +version = "1.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" +dependencies = [ + "cfg-if", + "digest 0.10.7", + "static_assertions", +] + +[[package]] +name = "typemap_rev" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74b08b0c1257381af16a5c3605254d529d3e7e109f3c62befc5d168968192998" + [[package]] name = "typenum" version = "1.17.0" @@ -7188,9 +8239,9 @@ dependencies = [ [[package]] name = "url" -version = "2.4.1" +version = "2.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5" +checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" dependencies = [ "form_urlencoded", "idna", @@ -7312,18 +8363,6 @@ dependencies = [ "zksync_vlog", ] -[[package]] -name = "vm2" -version = "0.1.0" -source = "git+https://github.com/matter-labs/vm2.git?rev=4ef15d46410ffc11744771a3a6c7c09dd9470c90#4ef15d46410ffc11744771a3a6c7c09dd9470c90" -dependencies = [ - "enum_dispatch", - "eravm-stable-interface", - "primitive-types", - "zk_evm_abstractions 0.150.4", - "zkevm_opcode_defs 0.150.4", -] - [[package]] name = "walkdir" version = "2.4.0" @@ -7367,7 +8406,7 @@ version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "wasm-bindgen-macro", ] @@ -7392,7 +8431,7 @@ version = "0.4.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "js-sys", "wasm-bindgen", "web-sys", @@ -7440,6 +8479,46 @@ dependencies = [ "web-sys", ] +[[package]] +name = "wasmi" +version = "0.31.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8281d1d660cdf54c76a3efa9ddd0c270cada1383a995db3ccb43d166456c7" +dependencies = [ + "smallvec", + "spin", + "wasmi_arena", + "wasmi_core", + "wasmparser-nostd", +] + +[[package]] +name = "wasmi_arena" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "104a7f73be44570cac297b3035d76b169d6599637631cf37a1703326a0727073" + +[[package]] +name = "wasmi_core" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcf1a7db34bff95b85c261002720c00c3a6168256dcb93041d3fa2054d19856a" +dependencies = [ + "downcast-rs", + "libm", + "num-traits", + "paste", +] + +[[package]] +name = "wasmparser-nostd" +version = "0.100.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5a015fe95f3504a94bb1462c717aae75253e39b9dd6c3fb1062c934535c64aa" +dependencies = [ + "indexmap-nostd", +] + [[package]] name = "web-sys" version = "0.3.64" @@ -7558,6 +8637,15 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + [[package]] name = "windows-targets" version = "0.42.2" @@ -7751,7 +8839,7 @@ version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "windows-sys 0.48.0", ] @@ -7761,7 +8849,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "windows-sys 0.48.0", ] @@ -7774,6 +8862,18 @@ dependencies = [ "tap", ] +[[package]] +name = "x25519-dalek" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7e468321c81fb07fa7f4c636c3972b9100f0346e5b6a9f2bd0603a52f7ed277" +dependencies = [ + "curve25519-dalek", + "rand_core 0.6.4", + "serde", + "zeroize", +] + [[package]] name = "yaml-rust" version = "0.4.5" @@ -7789,6 +8889,12 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" +[[package]] +name = "yap" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff4524214bc4629eba08d78ceb1d6507070cc0bcbbed23af74e19e6e924a24cf" + [[package]] name = "zerocopy" version = "0.7.31" @@ -7897,9 +9003,9 @@ dependencies = [ [[package]] name = "zk_evm" -version = "0.150.4" +version = "0.150.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2dbb0ed38d61fbd04bd7575755924d1303e129c04c909abba7f5bfcc6260bcf" +checksum = "5a6e69931f24db5cf333b714721e8d80ff88bfdb7da8c3dc7882612ffddb8d27" dependencies = [ "anyhow", "lazy_static", @@ -7907,7 +9013,7 @@ dependencies = [ "serde", "serde_json", "static_assertions", - "zk_evm_abstractions 0.150.4", + "zk_evm_abstractions 0.150.5", ] [[package]] @@ -7938,15 +9044,15 @@ dependencies = [ [[package]] name = "zk_evm_abstractions" -version = "0.150.4" +version = "0.150.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31460aacfe65b39ac484a2a2e0bbb02baf141f65264bf48e1e4f59ab375fe933" +checksum = "93d6b0720261ab55490fe3a96e96de30d5d7b277940b52ea7f52dbf564eb1748" dependencies = [ "anyhow", "num_enum 0.6.1", "serde", "static_assertions", - "zkevm_opcode_defs 0.150.4", + "zkevm_opcode_defs 0.150.5", ] [[package]] @@ -7995,13 +9101,12 @@ dependencies = [ [[package]] name = "zkevm_circuits" -version = "0.150.4" +version = "0.150.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abdfaa95dfe0878fda219dd17a6cc8c28711e2067785910c0e06d3ffdca78629" +checksum = "784fa7cfb51e17c5ced112bca43da30b3468b2347b7af0427ad9638759fb140e" dependencies = [ "arrayvec 0.7.4", "boojum", - "cs_derive", "derivative", "hex", "itertools 0.10.5", @@ -8010,7 +9115,8 @@ dependencies = [ "seq-macro", "serde", "smallvec", - "zkevm_opcode_defs 0.150.4", + "zkevm_opcode_defs 0.150.5", + "zksync_cs_derive", ] [[package]] @@ -8057,9 +9163,9 @@ dependencies = [ [[package]] name = "zkevm_opcode_defs" -version = "0.150.4" +version = "0.150.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb7c5c7b4481a646f8696b08cee64a8dec097509a6378d18242f81022f327f1e" +checksum = "79055eae1b6c1ab80793ed9d77d2964c9c896afa4b5dfed278cf58cd10acfe8f" dependencies = [ "bitflags 2.6.0", "blake2 0.10.6", @@ -8114,6 +9220,29 @@ dependencies = [ "url", ] +[[package]] +name = "zksync_bellman" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ffa03efe9bdb137a4b36b97d1a74237e18c9ae42b755163d903a9d48c1a5d80" +dependencies = [ + "arrayvec 0.7.4", + "bit-vec", + "blake2s_simd", + "byteorder", + "cfg-if", + "crossbeam", + "futures 0.3.30", + "hex", + "lazy_static", + "num_cpus", + "rand 0.4.6", + "serde", + "smallvec", + "tiny-keccak 1.5.0", + "zksync_pairing", +] + [[package]] name = "zksync_block_reverter" version = "0.1.0" @@ -8121,7 +9250,7 @@ dependencies = [ "anyhow", "assert_matches", "async-trait", - "futures 0.3.28", + "futures 0.3.30", "serde", "tempfile", "test-casing", @@ -8157,10 +9286,10 @@ name = "zksync_commitment_generator" version = "0.1.0" dependencies = [ "anyhow", - "circuit_sequencer_api 0.140.0", - "circuit_sequencer_api 0.141.1", - "circuit_sequencer_api 0.150.4", - "futures 0.3.28", + "circuit_sequencer_api 0.140.3", + "circuit_sequencer_api 0.141.2", + "circuit_sequencer_api 0.150.5", + "futures 0.3.30", "itertools 0.10.5", "num_cpus", "rand 0.8.5", @@ -8171,7 +9300,7 @@ dependencies = [ "vise", "zk_evm 0.133.0", "zk_evm 0.141.0", - "zk_evm 0.150.4", + "zk_evm 0.150.5", "zksync_contracts", "zksync_dal", "zksync_eth_client", @@ -8187,9 +9316,9 @@ dependencies = [ [[package]] name = "zksync_concurrency" -version = "0.1.0-rc.12" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a49ad68bfaf6fb8542c68894b68b28be31514786549855aaa8a46b36defbb100" +checksum = "c1c8cf6c689ab5922b52d81b775cd2d9cffbfc8fb8da65985e11b06546dfb3bf" dependencies = [ "anyhow", "once_cell", @@ -8224,9 +9353,9 @@ dependencies = [ [[package]] name = "zksync_consensus_bft" -version = "0.1.0-rc.12" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1dcab481683131c093271c19602bd495b1d682f7a94f764f2227111a0a104f0" +checksum = "45c409ae915056cf9cadd9304dbc8718fa38edfcb346d06e5b3582dcd2489ef9" dependencies = [ "anyhow", "async-trait", @@ -8246,9 +9375,9 @@ dependencies = [ [[package]] name = "zksync_consensus_crypto" -version = "0.1.0-rc.12" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ace39bdf50b8421c4d546381fe1ecc5212f953ce61cf93d4fa69172078dbe4af" +checksum = "cc7baced4e811015038322dad10239f2d631d9e339e8d6b7b6e6b146bee30f41" dependencies = [ "anyhow", "blst", @@ -8259,7 +9388,6 @@ dependencies = [ "k256 0.13.3", "num-bigint 0.4.6", "num-traits", - "pairing_ce", "rand 0.4.6", "rand 0.8.5", "sha3 0.10.8", @@ -8270,9 +9398,9 @@ dependencies = [ [[package]] name = "zksync_consensus_executor" -version = "0.1.0-rc.12" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "216e3d9f3df8c119e037e44c41db12fa6448dafbf1eaf5015d13b22400866980" +checksum = "6b018b8a76fc2cbecb51683ce97532501c45d44cbc8bb856d1956e5998259335" dependencies = [ "anyhow", "async-trait", @@ -8292,9 +9420,9 @@ dependencies = [ [[package]] name = "zksync_consensus_network" -version = "0.1.0-rc.12" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19d7dd832b1bbcd0a2b977b2d85986437105fd5e1e82bd4becb2e6a9db112655" +checksum = "f5bb2988e41af3083cebfc11f47f2615adae8d829bf9237aa084dede9629a687" dependencies = [ "anyhow", "async-trait", @@ -8314,7 +9442,7 @@ dependencies = [ "thiserror", "tls-listener", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.0", "tracing", "vise", "zksync_concurrency", @@ -8328,9 +9456,9 @@ dependencies = [ [[package]] name = "zksync_consensus_roles" -version = "0.1.0-rc.12" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06277266e31efdc1465f6a27ce96c7435392a270978a91956b8a848732df2cfa" +checksum = "0aab4ddf62f6001903c5fe9f65afb1bdc42464928c9d1c6ce52e4d7e9944f5dc" dependencies = [ "anyhow", "bit-vec", @@ -8350,9 +9478,9 @@ dependencies = [ [[package]] name = "zksync_consensus_storage" -version = "0.1.0-rc.12" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9099b2295f550176d824b5287f2f31b7739c4d10247faec1132f1c6e9d18059c" +checksum = "7b9dbcb923fa201af03f49f70c11a923b416915d2ddf8b2de3a2e861f22898a4" dependencies = [ "anyhow", "async-trait", @@ -8370,9 +9498,9 @@ dependencies = [ [[package]] name = "zksync_consensus_utils" -version = "0.1.0-rc.12" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d624f55e2449f43b2c85588b5dd2a28b3c5ea629effc89df76e3254f8d9d2fb" +checksum = "29e69dffc0fbc7c096548c997f5ca157a490b34b3d49fd524fa3d51840f7fb22" dependencies = [ "anyhow", "rand 0.8.5", @@ -8428,7 +9556,7 @@ version = "0.1.0" dependencies = [ "anyhow", "ctrlc", - "futures 0.3.28", + "futures 0.3.30", "structopt", "tokio", "tracing", @@ -8514,6 +9642,18 @@ dependencies = [ "zksync_utils", ] +[[package]] +name = "zksync_cs_derive" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5939e2df4288c263c706ff18ac718e984149223ad4289d6d957d767dcfc04c81" +dependencies = [ + "proc-macro-error", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 1.0.109", +] + [[package]] name = "zksync_da_client" version = "0.1.0" @@ -8523,13 +9663,41 @@ dependencies = [ "serde", ] +[[package]] +name = "zksync_da_clients" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "base58", + "blake2 0.10.6", + "blake2b_simd", + "flate2", + "futures 0.3.30", + "hex", + "jsonrpsee 0.23.2", + "parity-scale-codec", + "scale-encode", + "serde", + "serde_json", + "subxt-metadata", + "subxt-signer", + "tokio", + "tracing", + "zksync_config", + "zksync_da_client", + "zksync_env_config", + "zksync_object_store", + "zksync_types", +] + [[package]] name = "zksync_da_dispatcher" version = "0.1.0" dependencies = [ "anyhow", "chrono", - "futures 0.3.28", + "futures 0.3.30", "rand 0.8.5", "tokio", "tracing", @@ -8593,23 +9761,6 @@ dependencies = [ "zksync_basic_types", ] -[[package]] -name = "zksync_default_da_clients" -version = "0.1.0" -dependencies = [ - "anyhow", - "async-trait", - "flate2", - "serde", - "tracing", - "zksync_config", - "zksync_da_client", - "zksync_env_config", - "zksync_node_framework", - "zksync_object_store", - "zksync_types", -] - [[package]] name = "zksync_env_config" version = "0.1.0" @@ -8629,7 +9780,7 @@ dependencies = [ "assert_matches", "async-trait", "hex", - "jsonrpsee", + "jsonrpsee 0.23.2", "pretty_assertions", "rlp", "serde_json", @@ -8689,6 +9840,7 @@ name = "zksync_eth_watch" version = "0.1.0" dependencies = [ "anyhow", + "async-recursion", "async-trait", "thiserror", "tokio", @@ -8712,7 +9864,7 @@ dependencies = [ "async-trait", "clap 4.4.6", "envy", - "futures 0.3.28", + "futures 0.3.30", "rustc_version", "serde", "serde_json", @@ -8795,13 +9947,41 @@ dependencies = [ "zksync_prover_interface", ] +[[package]] +name = "zksync_ff" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9524b06780b5e164e84b38840c7c428c739f051f35af6efc4d1285f629ceb88e" +dependencies = [ + "byteorder", + "hex", + "rand 0.4.6", + "serde", + "zksync_ff_derive", +] + +[[package]] +name = "zksync_ff_derive" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f91e58e75d65877f09f83bc3dca8f054847ae7ec4f3e64bfa610a557edd8e8e" +dependencies = [ + "num-bigint 0.4.6", + "num-integer", + "num-traits", + "proc-macro2 1.0.86", + "quote 1.0.36", + "serde", + "syn 1.0.109", +] + [[package]] name = "zksync_health_check" version = "0.1.0" dependencies = [ "assert_matches", "async-trait", - "futures 0.3.28", + "futures 0.3.30", "serde", "serde_json", "thiserror", @@ -8827,9 +10007,9 @@ dependencies = [ [[package]] name = "zksync_kzg" -version = "0.150.4" +version = "0.150.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9949f48ea1a9f9a0e73242d4d1e87e681095181827486b3fcc2cf93e5aa03280" +checksum = "edb8a9c76c172a6d639855ee342b9a670e3ba472f5ae302f771b1c3ee777dc88" dependencies = [ "boojum", "derivative", @@ -8839,7 +10019,7 @@ dependencies = [ "serde", "serde_json", "serde_with", - "zkevm_circuits 0.150.4", + "zkevm_circuits 0.150.5", ] [[package]] @@ -8917,7 +10097,7 @@ dependencies = [ "assert_matches", "async-trait", "axum", - "futures 0.3.28", + "futures 0.3.30", "itertools 0.10.5", "once_cell", "reqwest 0.12.5", @@ -8960,11 +10140,11 @@ version = "0.1.0" dependencies = [ "anyhow", "assert_matches", - "circuit_sequencer_api 0.133.0", - "circuit_sequencer_api 0.140.0", - "circuit_sequencer_api 0.141.1", - "circuit_sequencer_api 0.142.0", - "circuit_sequencer_api 0.150.4", + "circuit_sequencer_api 0.133.1", + "circuit_sequencer_api 0.140.3", + "circuit_sequencer_api 0.141.2", + "circuit_sequencer_api 0.142.2", + "circuit_sequencer_api 0.150.5", "ethabi", "hex", "itertools 0.10.5", @@ -8974,12 +10154,11 @@ dependencies = [ "tokio", "tracing", "vise", - "vm2", "zk_evm 0.131.0-rc.2", "zk_evm 0.133.0", "zk_evm 0.140.0", "zk_evm 0.141.0", - "zk_evm 0.150.4", + "zk_evm 0.150.5", "zksync_contracts", "zksync_eth_signer", "zksync_state", @@ -8987,6 +10166,7 @@ dependencies = [ "zksync_test_account", "zksync_types", "zksync_utils", + "zksync_vm2", "zksync_vm_interface", ] @@ -8999,7 +10179,7 @@ dependencies = [ "async-trait", "axum", "chrono", - "futures 0.3.28", + "futures 0.3.30", "governor", "hex", "http 1.1.0", @@ -9135,7 +10315,7 @@ dependencies = [ "assert_matches", "async-trait", "ctrlc", - "futures 0.3.28", + "futures 0.3.30", "pin-project-lite", "semver", "thiserror", @@ -9152,6 +10332,7 @@ dependencies = [ "zksync_contract_verification_server", "zksync_contracts", "zksync_da_client", + "zksync_da_clients", "zksync_da_dispatcher", "zksync_dal", "zksync_db_connection", @@ -9247,7 +10428,7 @@ dependencies = [ "assert_matches", "async-trait", "chrono", - "futures 0.3.28", + "futures 0.3.30", "once_cell", "serde", "serde_json", @@ -9312,6 +10493,19 @@ dependencies = [ "zksync_types", ] +[[package]] +name = "zksync_pairing" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8412ae5574472fa567a097e183f9a01974b99dd0b5da3bfa1bbe6c57c579aa2" +dependencies = [ + "byteorder", + "cfg-if", + "rand 0.4.6", + "serde", + "zksync_ff", +] + [[package]] name = "zksync_proof_data_handler" version = "0.1.0" @@ -9337,9 +10531,9 @@ dependencies = [ [[package]] name = "zksync_protobuf" -version = "0.1.0-rc.12" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d26fb2beb3aeafb5e9babf1acf6494662cc7157b893fa248bd151494f931d07f" +checksum = "df5467dfe2f845ca1fd6ceec623bbd32187589793d3c4023dcd2f5172369d198" dependencies = [ "anyhow", "bit-vec", @@ -9358,9 +10552,9 @@ dependencies = [ [[package]] name = "zksync_protobuf_build" -version = "0.1.0-rc.12" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58e86c198e056d921b4f3f1d2755c23d090e942b5a70b03bcb7e7c02445aa491" +checksum = "33d35280660b11be2a4ebdf531184eb729acebfdc3368d27176ec104f8bf9c5f" dependencies = [ "anyhow", "heck 0.5.0", @@ -9398,7 +10592,7 @@ version = "0.1.0" dependencies = [ "bincode", "chrono", - "circuit_sequencer_api 0.150.4", + "circuit_sequencer_api 0.150.5", "serde", "serde_json", "serde_with", @@ -9448,7 +10642,7 @@ version = "0.1.0" dependencies = [ "anyhow", "clap 4.4.6", - "futures 0.3.28", + "futures 0.3.30", "serde_json", "tikv-jemallocator", "tokio", @@ -9459,7 +10653,7 @@ dependencies = [ "zksync_consensus_executor", "zksync_consensus_roles", "zksync_core_leftovers", - "zksync_default_da_clients", + "zksync_da_clients", "zksync_env_config", "zksync_eth_client", "zksync_metadata_calculator", @@ -9491,7 +10685,7 @@ dependencies = [ "anyhow", "assert_matches", "async-trait", - "futures 0.3.28", + "futures 0.3.30", "serde", "test-casing", "thiserror", @@ -9509,9 +10703,9 @@ dependencies = [ [[package]] name = "zksync_solidity_vk_codegen" -version = "0.1.0" +version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bac71750012656b207e8cdb67415823318909077d8c8e235111f0d2feeeeeda" +checksum = "b310ab8a21681270e73f177ddf7974cabb7a96f0624ab8b008fd6ee1f9b4f687" dependencies = [ "ethereum-types", "franklin-crypto", @@ -9557,7 +10751,7 @@ dependencies = [ "anyhow", "assert_matches", "async-trait", - "futures 0.3.28", + "futures 0.3.30", "hex", "itertools 0.10.5", "once_cell", @@ -9696,7 +10890,7 @@ dependencies = [ "bincode", "blake2 0.10.6", "chrono", - "derive_more", + "derive_more 1.0.0-beta.6", "hex", "itertools 0.10.5", "num", @@ -9730,7 +10924,7 @@ dependencies = [ "assert_matches", "bigdecimal", "bincode", - "futures 0.3.28", + "futures 0.3.30", "hex", "num", "once_cell", @@ -9771,6 +10965,26 @@ dependencies = [ "vise-exporter", ] +[[package]] +name = "zksync_vm2" +version = "0.1.0" +source = "git+https://github.com/matter-labs/vm2.git?rev=cd6136c42ec56856e0abcf2a98d1a9e120161482#cd6136c42ec56856e0abcf2a98d1a9e120161482" +dependencies = [ + "enum_dispatch", + "primitive-types", + "zk_evm_abstractions 0.150.5", + "zkevm_opcode_defs 0.150.5", + "zksync_vm2_interface", +] + +[[package]] +name = "zksync_vm2_interface" +version = "0.1.0" +source = "git+https://github.com/matter-labs/vm2.git?rev=cd6136c42ec56856e0abcf2a98d1a9e120161482#cd6136c42ec56856e0abcf2a98d1a9e120161482" +dependencies = [ + "primitive-types", +] + [[package]] name = "zksync_vm_executor" version = "0.1.0" @@ -9815,7 +11029,7 @@ dependencies = [ "async-trait", "backon", "dashmap", - "futures 0.3.28", + "futures 0.3.30", "once_cell", "rand 0.8.5", "serde", @@ -9847,12 +11061,12 @@ dependencies = [ "anyhow", "assert_matches", "async-trait", - "futures 0.3.28", - "jsonrpsee", + "futures 0.3.30", + "jsonrpsee 0.23.2", "pin-project-lite", "rand 0.8.5", "rlp", - "rustls", + "rustls 0.23.10", "serde", "serde_json", "test-casing", diff --git a/Cargo.toml b/Cargo.toml index 075f5007be4c..1e2fb9e0c7aa 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,6 +6,7 @@ members = [ "core/bin/external_node", "core/bin/merkle_tree_consistency_checker", "core/bin/snapshots_creator", + "core/bin/selector_generator", "core/bin/system-constants-generator", "core/bin/verified_sources_fetcher", "core/bin/zksync_server", @@ -38,6 +39,7 @@ members = [ "core/node/base_token_adjuster", "core/node/external_proof_integration_api", "core/node/logs_bloom_backfill", + "core/node/da_clients", # Libraries "core/lib/db_connection", "core/lib/zksync_core_leftovers", @@ -50,7 +52,6 @@ members = [ "core/lib/dal", "core/lib/env_config", "core/lib/da_client", - "core/lib/default_da_clients", "core/lib/eth_client", "core/lib/eth_signer", "core/lib/l1_contract_interface", @@ -104,6 +105,7 @@ categories = ["cryptography"] anyhow = "1" assert_matches = "1.5" async-trait = "0.1" +async-recursion = "1" axum = "0.7.5" backon = "0.4.4" bigdecimal = "0.4.5" @@ -120,6 +122,7 @@ envy = "0.4" ethabi = "18.0.0" flate2 = "1.0.28" futures = "0.3" +glob = "0.3" google-cloud-auth = "0.16.0" google-cloud-storage = "0.20.0" governor = "0.4.2" @@ -196,6 +199,15 @@ trybuild = "1.0" vise = "0.2.0" vise-exporter = "0.2.0" +# DA clients' dependencies +# Avail +base58 = "0.2.0" +scale-encode = "0.5.0" +blake2b_simd = "1.0.2" +subxt-metadata = "0.34.0" +parity-scale-codec = { version = "3.6.9", default-features = false } +subxt-signer = { version = "0.34", default-features = false } + # Here and below: # We *always* pin the latest version of protocol to disallow accidental changes in the execution logic. # However, for the historical version of protocol crates, we have lax requirements. Otherwise, @@ -204,30 +216,30 @@ circuit_sequencer_api_1_3_3 = { package = "circuit_sequencer_api", version = "0. circuit_sequencer_api_1_4_0 = { package = "circuit_sequencer_api", version = "0.140" } circuit_sequencer_api_1_4_1 = { package = "circuit_sequencer_api", version = "0.141" } circuit_sequencer_api_1_4_2 = { package = "circuit_sequencer_api", version = "0.142" } -circuit_sequencer_api_1_5_0 = { package = "circuit_sequencer_api", version = "=0.150.4" } -crypto_codegen = { package = "zksync_solidity_vk_codegen", version = "=0.1.0" } -kzg = { package = "zksync_kzg", version = "=0.150.4" } +circuit_sequencer_api_1_5_0 = { package = "circuit_sequencer_api", version = "=0.150.5" } +crypto_codegen = { package = "zksync_solidity_vk_codegen", version = "=0.30.1" } +kzg = { package = "zksync_kzg", version = "=0.150.5" } zk_evm = { version = "=0.133.0" } zk_evm_1_3_1 = { package = "zk_evm", version = "0.131.0-rc.2" } -zk_evm_1_3_3 = { package = "zk_evm", version = "0.133.0" } -zk_evm_1_4_0 = { package = "zk_evm", version = "0.140.0" } -zk_evm_1_4_1 = { package = "zk_evm", version = "0.141.0" } -zk_evm_1_5_0 = { package = "zk_evm", version = "=0.150.4" } +zk_evm_1_3_3 = { package = "zk_evm", version = "0.133" } +zk_evm_1_4_0 = { package = "zk_evm", version = "0.140" } +zk_evm_1_4_1 = { package = "zk_evm", version = "0.141" } +zk_evm_1_5_0 = { package = "zk_evm", version = "=0.150.5" } # New VM; pinned to a specific commit because of instability -vm2 = { git = "https://github.com/matter-labs/vm2.git", rev = "4ef15d46410ffc11744771a3a6c7c09dd9470c90" } +zksync_vm2 = { git = "https://github.com/matter-labs/vm2.git", rev = "cd6136c42ec56856e0abcf2a98d1a9e120161482" } # Consensus dependencies. -zksync_concurrency = "=0.1.0-rc.12" -zksync_consensus_bft = "=0.1.0-rc.12" -zksync_consensus_crypto = "=0.1.0-rc.12" -zksync_consensus_executor = "=0.1.0-rc.12" -zksync_consensus_network = "=0.1.0-rc.12" -zksync_consensus_roles = "=0.1.0-rc.12" -zksync_consensus_storage = "=0.1.0-rc.12" -zksync_consensus_utils = "=0.1.0-rc.12" -zksync_protobuf = "=0.1.0-rc.12" -zksync_protobuf_build = "=0.1.0-rc.12" +zksync_concurrency = "=0.1.1" +zksync_consensus_bft = "=0.1.1" +zksync_consensus_crypto = "=0.1.1" +zksync_consensus_executor = "=0.1.1" +zksync_consensus_network = "=0.1.1" +zksync_consensus_roles = "=0.1.1" +zksync_consensus_storage = "=0.1.1" +zksync_consensus_utils = "=0.1.1" +zksync_protobuf = "=0.1.1" +zksync_protobuf_build = "=0.1.1" # "Local" dependencies zksync_multivm = { version = "0.1.0", path = "core/lib/multivm" } @@ -245,7 +257,6 @@ zksync_db_connection = { version = "0.1.0", path = "core/lib/db_connection" } zksync_env_config = { version = "0.1.0", path = "core/lib/env_config" } zksync_eth_client = { version = "0.1.0", path = "core/lib/eth_client" } zksync_da_client = { version = "0.1.0", path = "core/lib/da_client" } -zksync_default_da_clients = { version = "0.1.0", path = "core/lib/default_da_clients" } zksync_eth_signer = { version = "0.1.0", path = "core/lib/eth_signer" } zksync_health_check = { version = "0.1.0", path = "core/lib/health_check" } zksync_l1_contract_interface = { version = "0.1.0", path = "core/lib/l1_contract_interface" } @@ -279,6 +290,7 @@ zksync_commitment_generator = { version = "0.1.0", path = "core/node/commitment_ zksync_house_keeper = { version = "0.1.0", path = "core/node/house_keeper" } zksync_node_genesis = { version = "0.1.0", path = "core/node/genesis" } zksync_da_dispatcher = { version = "0.1.0", path = "core/node/da_dispatcher" } +zksync_da_clients = { version = "0.1.0", path = "core/node/da_clients" } zksync_eth_sender = { version = "0.1.0", path = "core/node/eth_sender" } zksync_node_db_pruner = { version = "0.1.0", path = "core/node/db_pruner" } zksync_node_fee_model = { version = "0.1.0", path = "core/node/fee_model" } diff --git a/contracts b/contracts index 361d19af8346..2edbd6912c6a 160000 --- a/contracts +++ b/contracts @@ -1 +1 @@ -Subproject commit 361d19af8346efde0b53243e29612b15f232459e +Subproject commit 2edbd6912c6a73b120377f5e53fe1cc5343407fc diff --git a/core/bin/external_node/src/config/mod.rs b/core/bin/external_node/src/config/mod.rs index cd4e845b8f3e..f8241deae26c 100644 --- a/core/bin/external_node/src/config/mod.rs +++ b/core/bin/external_node/src/config/mod.rs @@ -1217,6 +1217,7 @@ pub(crate) struct ExternalNodeConfig { pub observability: ObservabilityENConfig, pub experimental: ExperimentalENConfig, pub consensus: Option, + pub consensus_secrets: Option, pub api_component: ApiComponentConfig, pub tree_component: TreeComponentConfig, pub remote: R, @@ -1240,6 +1241,8 @@ impl ExternalNodeConfig<()> { tree_component: envy::prefixed("EN_TREE_") .from_env::() .context("could not load external node config (tree component params)")?, + consensus_secrets: read_consensus_secrets() + .context("config::read_consensus_secrets()")?, remote: (), }) } @@ -1262,7 +1265,7 @@ impl ExternalNodeConfig<()> { .map(read_yaml_repr::) .transpose() .context("failed decoding consensus YAML config")?; - + let consensus_secrets = secrets_config.consensus.clone(); let required = RequiredENConfig::from_configs( &general_config, &external_node_config, @@ -1298,6 +1301,7 @@ impl ExternalNodeConfig<()> { consensus, api_component, tree_component, + consensus_secrets, remote: (), }) } @@ -1332,6 +1336,7 @@ impl ExternalNodeConfig<()> { consensus: self.consensus, tree_component: self.tree_component, api_component: self.api_component, + consensus_secrets: self.consensus_secrets, remote, }) } @@ -1348,6 +1353,7 @@ impl ExternalNodeConfig { observability: ObservabilityENConfig::default(), experimental: ExperimentalENConfig::mock(), consensus: None, + consensus_secrets: None, api_component: ApiComponentConfig { tree_api_remote_url: None, }, diff --git a/core/bin/external_node/src/config/observability.rs b/core/bin/external_node/src/config/observability.rs index 0dd83f3bd35b..91b721bf77c9 100644 --- a/core/bin/external_node/src/config/observability.rs +++ b/core/bin/external_node/src/config/observability.rs @@ -95,11 +95,10 @@ impl ObservabilityENConfig { ) }) .transpose()?; - let guard = zksync_vlog::ObservabilityBuilder::new() + zksync_vlog::ObservabilityBuilder::new() .with_logs(Some(logs)) .with_sentry(sentry) - .build(); - Ok(guard) + .try_build() } pub(crate) fn from_configs(general_config: &GeneralConfig) -> anyhow::Result { diff --git a/core/bin/external_node/src/config/tests.rs b/core/bin/external_node/src/config/tests.rs index 43210a765723..a32be3eff725 100644 --- a/core/bin/external_node/src/config/tests.rs +++ b/core/bin/external_node/src/config/tests.rs @@ -63,7 +63,10 @@ fn parsing_observability_config() { fn using_unset_sentry_url() { let env_vars = MockEnvironment::new(&[("MISC_SENTRY_URL", "unset")]); let config = ObservabilityENConfig::new(&env_vars).unwrap(); - config.build_observability().unwrap(); + if let Err(err) = config.build_observability() { + // Global tracer may be installed by another test, but the logic shouldn't fail before that. + assert!(format!("{err:?}").contains("global tracer"), "{err:?}"); + } } #[test] diff --git a/core/bin/external_node/src/node_builder.rs b/core/bin/external_node/src/node_builder.rs index 7b94ca7a0c2a..98e286c253a2 100644 --- a/core/bin/external_node/src/node_builder.rs +++ b/core/bin/external_node/src/node_builder.rs @@ -56,11 +56,7 @@ use zksync_node_framework::{ }; use zksync_state::RocksdbStorageOptions; -use crate::{ - config::{self, ExternalNodeConfig}, - metrics::framework::ExternalNodeMetricsLayer, - Component, -}; +use crate::{config::ExternalNodeConfig, metrics::framework::ExternalNodeMetricsLayer, Component}; /// Builder for the external node. #[derive(Debug)] @@ -240,8 +236,7 @@ impl ExternalNodeBuilder { fn add_consensus_layer(mut self) -> anyhow::Result { let config = self.config.consensus.clone(); - let secrets = - config::read_consensus_secrets().context("config::read_consensus_secrets()")?; + let secrets = self.config.consensus_secrets.clone(); let layer = ExternalNodeConsensusLayer { build_version: crate::metadata::SERVER_VERSION .parse() diff --git a/core/bin/external_node/src/tests/mod.rs b/core/bin/external_node/src/tests/mod.rs index 5e9e7b3eeb38..b21dbd0db9a3 100644 --- a/core/bin/external_node/src/tests/mod.rs +++ b/core/bin/external_node/src/tests/mod.rs @@ -21,7 +21,7 @@ const POLL_INTERVAL: Duration = Duration::from_millis(100); #[tokio::test] #[tracing::instrument] // Add args to the test logs async fn external_node_basics(components_str: &'static str) { - let _guard = zksync_vlog::ObservabilityBuilder::new().build(); // Enable logging to simplify debugging + let _guard = zksync_vlog::ObservabilityBuilder::new().try_build().ok(); // Enable logging to simplify debugging let (env, env_handles) = utils::TestEnvironment::with_genesis_block(components_str).await; @@ -92,7 +92,7 @@ async fn external_node_basics(components_str: &'static str) { #[tokio::test] async fn node_reacts_to_stop_signal_during_initial_reorg_detection() { - let _guard = zksync_vlog::ObservabilityBuilder::new().build(); // Enable logging to simplify debugging + let _guard = zksync_vlog::ObservabilityBuilder::new().try_build().ok(); // Enable logging to simplify debugging let (env, env_handles) = utils::TestEnvironment::with_genesis_block("core").await; let l2_client = utils::mock_l2_client_hanging(); @@ -128,7 +128,7 @@ async fn node_reacts_to_stop_signal_during_initial_reorg_detection() { #[tokio::test] async fn running_tree_without_core_is_not_allowed() { - let _guard = zksync_vlog::ObservabilityBuilder::new().build(); // Enable logging to simplify debugging + let _guard = zksync_vlog::ObservabilityBuilder::new().try_build().ok(); // Enable logging to simplify debugging let (env, _env_handles) = utils::TestEnvironment::with_genesis_block("tree").await; let l2_client = utils::mock_l2_client(&env); @@ -165,7 +165,7 @@ async fn running_tree_without_core_is_not_allowed() { #[tokio::test] async fn running_tree_api_without_tree_is_not_allowed() { - let _guard = zksync_vlog::ObservabilityBuilder::new().build(); // Enable logging to simplify debugging + let _guard = zksync_vlog::ObservabilityBuilder::new().try_build().ok(); // Enable logging to simplify debugging let (env, _env_handles) = utils::TestEnvironment::with_genesis_block("core,tree_api").await; let l2_client = utils::mock_l2_client(&env); diff --git a/core/bin/selector_generator/Cargo.toml b/core/bin/selector_generator/Cargo.toml new file mode 100644 index 000000000000..b3425c11b4ec --- /dev/null +++ b/core/bin/selector_generator/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "selector_generator" +version = "0.1.0" +edition.workspace = true +authors.workspace = true +homepage.workspace = true +repository.workspace = true +license.workspace = true +keywords.workspace = true +categories.workspace = true +publish = false + +[dependencies] +anyhow.workspace = true +serde = { workspace = true, features = ["derive"] } +serde_json.workspace = true +glob.workspace = true +clap = { workspace = true, features = ["derive"] } +ethabi.workspace = true +hex.workspace = true +tokio = { workspace = true, features = ["full"] } diff --git a/core/bin/selector_generator/README.md b/core/bin/selector_generator/README.md new file mode 100644 index 000000000000..a954613c7e45 --- /dev/null +++ b/core/bin/selector_generator/README.md @@ -0,0 +1,13 @@ +# Generates the list of solidity selectors + +This tool generates a mapping from solidity selectors to function names. + +The output json file can be used by multiple tools to improve debugging and readability. + +By default, it appends the newly found selectors into the list. + +To run, first make sure that you have your contracts compiled and then run: + +``` +cargo run ../../../contracts ../../../etc/selector-generator-data/selectors.json +``` diff --git a/core/bin/selector_generator/src/app.rs b/core/bin/selector_generator/src/app.rs new file mode 100644 index 000000000000..425bf9f42826 --- /dev/null +++ b/core/bin/selector_generator/src/app.rs @@ -0,0 +1,105 @@ +use std::path::PathBuf; + +use anyhow::Context; +use glob::glob; +use tokio::io::AsyncWriteExt as _; + +use crate::selectors::Selectors; + +#[derive(Debug, Default)] +pub(crate) struct App { + /// Selectors file. + file_path: PathBuf, + /// All the selectors. Initially, will be loaded from the file. + /// All the discovered selectors will be merged into it. + selectors: Selectors, + /// Number of selectors before processing the files. + /// Used for reporting. + selectors_before: usize, + /// Number of files analyzed. + /// Used for reporting. + analyzed_files: usize, +} + +impl App { + /// Loads the selectors from the file, or returns a new instance if the file doesn't exist. + pub async fn load(file_path: impl Into) -> anyhow::Result { + let file_path = file_path.into(); + // If doesn't exist, return default. + if !file_path.exists() { + return Ok(Self::default()); + } + + let file = tokio::fs::read(&file_path) + .await + .context("Failed to read file")?; + let selectors: Selectors = + serde_json::from_slice(&file).context("Failed to deserialize file")?; + let selectors_before = selectors.len(); + Ok(Self { + file_path, + selectors, + selectors_before, + analyzed_files: 0, + }) + } + + /// Analyses all the JSON files, looking for 'abi' entries, and then computing the selectors for them. + pub async fn process_files(&mut self, directory: &str) -> anyhow::Result<()> { + for file_path in Self::load_file_paths(directory) { + let Ok(new_selectors) = Selectors::load(&file_path).await.inspect_err(|e| { + eprintln!("Error parsing file {file_path:?}: {e:?}"); + }) else { + continue; + }; + self.merge(new_selectors); + } + Ok(()) + } + + /// Saves the selectors to the file. + pub async fn save(self) -> anyhow::Result<()> { + let mut file = tokio::fs::OpenOptions::new() + .write(true) + .create(true) + .truncate(true) + .open(self.file_path) + .await + .context("Failed to open file")?; + let json = serde_json::to_string_pretty(&self.selectors)?; + file.write_all(json.as_bytes()) + .await + .context("Failed to save file")?; + Ok(()) + } + + /// Merges the new selectors into the current ones. + pub fn merge(&mut self, new: Selectors) { + self.selectors.merge(new); + self.analyzed_files += 1; + } + + /// Reports the number of analyzed files and the number of added selectors. + pub fn report(&self) { + println!( + "Analyzed {} files. Added {} selectors (before: {} after: {})", + self.analyzed_files, + self.selectors.len() - self.selectors_before, + self.selectors_before, + self.selectors.len() + ); + } + + fn load_file_paths(dir: &str) -> Vec { + glob(&format!("{}/**/*.json", dir)) + .expect("Failed to read glob pattern") + .filter_map(|entry| match entry { + Ok(path) => Some(path), + Err(e) => { + eprintln!("Error reading file: {:?}", e); + None + } + }) + .collect() + } +} diff --git a/core/bin/selector_generator/src/main.rs b/core/bin/selector_generator/src/main.rs new file mode 100644 index 000000000000..f5ed2e01c582 --- /dev/null +++ b/core/bin/selector_generator/src/main.rs @@ -0,0 +1,33 @@ +use app::App; +use clap::Parser; + +pub(crate) mod app; +pub(crate) mod selectors; + +/// Selector generator tool. +/// +/// Generates a mapping of short (4-byte) function selectors to their corresponding function names. +/// +/// The generated JSON can be used to lookup function names by their selectors, when interacting +/// with Ethereum contracts. +#[derive(Debug, Parser)] +#[command(author, version, about, long_about)] +struct Cli { + /// Path to the directory with JSON files containing ABI. + /// All JSON files in this directory will be processed. + contracts_dir: String, + /// Path to the output file. + /// The file will contain the list of function selectors. + /// If the file already exists, new selectors will be appended to it. + output_file: String, +} + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + let args = Cli::parse(); + let mut app = App::load(args.output_file).await?; + app.process_files(&args.contracts_dir).await?; + app.report(); + app.save().await?; + Ok(()) +} diff --git a/core/bin/selector_generator/src/selectors.rs b/core/bin/selector_generator/src/selectors.rs new file mode 100644 index 000000000000..3b69854a9478 --- /dev/null +++ b/core/bin/selector_generator/src/selectors.rs @@ -0,0 +1,118 @@ +use std::{collections::HashMap, path::PathBuf}; + +use anyhow::Context; +use serde::{Deserialize, Serialize}; + +/// Short (4-byte) function selector. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash)] +#[serde(transparent)] +struct Selector(String); + +/// Function name without parameters. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(transparent)] +struct FunctionName(String); + +/// A set of function selectors and their corresponding function names. +#[derive(Debug, Default, Serialize, Deserialize)] +pub(crate) struct Selectors { + #[serde(flatten)] + selectors: HashMap, +} + +impl Selectors { + /// Loads the selectors from the file, or returns a new instance if the file is a valid + /// JSON, but doesn't contain `ABI` section. + /// + /// Will return an error if file doesn't exist or cannot be deserialized. + pub async fn load(file_path: &PathBuf) -> anyhow::Result { + let file = tokio::fs::read(file_path) + .await + .context("Failed to read file")?; + let json: serde_json::Value = + serde_json::from_slice(&file).context("Failed to deserialize file")?; + let Some(abi) = json.get("abi").cloned() else { + return Ok(Selectors::default()); + }; + + let contract: ethabi::Contract = + serde_json::from_value(abi).context("Failed to parse abi")?; + Ok(Self::new(contract)) + } + + /// Loads selectors from a given contract. + pub fn new(contract: ethabi::Contract) -> Self { + let selectors: HashMap<_, _> = contract + .functions + .into_values() + .flatten() + .map(|function| { + let selector = hex::encode(function.short_signature()); + (Selector(selector), FunctionName(function.name)) + }) + .collect(); + Self { selectors } + } + + /// Merges new selectors into the existing set. + pub fn merge(&mut self, new: Self) { + for (selector, name) in new.selectors { + self.selectors + .entry(selector.clone()) + .and_modify(|e| { + assert_eq!( + e, &name, + "Function name mismatch for selector '{:?}'", + selector + ) + }) + .or_insert(name); + } + } + + pub fn len(&self) -> usize { + self.selectors.len() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_selectors() { + let contract_json = r#"[ + { + "type": "function", + "name": "transfer", + "inputs": [ + { "name": "to", "type": "address" }, + { "name": "value", "type": "uint256" } + ], + "outputs": [], + "stateMutability": "nonpayable" + }, + { + "type": "function", + "name": "bar", + "inputs": [], + "outputs": [], + "stateMutability": "nonpayable" + } + ] + "#; + + let contract: ethabi::Contract = serde_json::from_str(contract_json).unwrap(); + let selectors = Selectors::new(contract); + assert_eq!(selectors.len(), 2); + + // Check the generated selectors. + assert_eq!( + selectors + .selectors + .get(&Selector("a9059cbb".to_string())) + .expect("No selector for transfer found"), + &FunctionName("transfer".to_string()) + ); + } +} diff --git a/core/bin/snapshots_creator/Cargo.toml b/core/bin/snapshots_creator/Cargo.toml index 530b9635cd4f..5a36c646e88e 100644 --- a/core/bin/snapshots_creator/Cargo.toml +++ b/core/bin/snapshots_creator/Cargo.toml @@ -29,3 +29,4 @@ futures.workspace = true [dev-dependencies] rand.workspace = true +test-casing.workspace = true diff --git a/core/bin/snapshots_creator/src/creator.rs b/core/bin/snapshots_creator/src/creator.rs index 18212a7d2055..29150cd6b698 100644 --- a/core/bin/snapshots_creator/src/creator.rs +++ b/core/bin/snapshots_creator/src/creator.rs @@ -291,25 +291,38 @@ impl SnapshotCreator { .get_sealed_l1_batch_number() .await?; let sealed_l1_batch_number = sealed_l1_batch_number.context("No L1 batches in Postgres")?; - let requested_l1_batch_number = if let Some(l1_batch_number) = config.l1_batch_number { + let (requested_l1_batch_number, existing_snapshot) = if let Some(l1_batch_number) = + config.l1_batch_number + { anyhow::ensure!( l1_batch_number <= sealed_l1_batch_number, "Requested a snapshot for L1 batch #{l1_batch_number} that doesn't exist in Postgres (latest L1 batch: {sealed_l1_batch_number})" ); - l1_batch_number + + let existing_snapshot = master_conn + .snapshots_dal() + .get_snapshot_metadata(l1_batch_number) + .await?; + (l1_batch_number, existing_snapshot) } else { // We subtract 1 so that after restore, EN node has at least one L1 batch to fetch. anyhow::ensure!( sealed_l1_batch_number != L1BatchNumber(0), "Cannot create snapshot when only the genesis L1 batch is present in Postgres" ); - sealed_l1_batch_number - 1 - }; + let requested_l1_batch_number = sealed_l1_batch_number - 1; - let existing_snapshot = master_conn - .snapshots_dal() - .get_snapshot_metadata(requested_l1_batch_number) - .await?; + // Continue creating a pending snapshot if it exists, even if it doesn't correspond to the latest L1 batch. + // OTOH, a completed snapshot does not matter, unless it corresponds to `requested_l1_batch_number` (in which case it doesn't need to be created again). + let existing_snapshot = master_conn + .snapshots_dal() + .get_newest_snapshot_metadata() + .await? + .filter(|snapshot| { + !snapshot.is_complete() || snapshot.l1_batch_number == requested_l1_batch_number + }); + (requested_l1_batch_number, existing_snapshot) + }; drop(master_conn); match existing_snapshot { @@ -317,18 +330,7 @@ impl SnapshotCreator { tracing::info!("Snapshot for the requested L1 batch is complete: {snapshot:?}"); Ok(None) } - Some(snapshot) if config.l1_batch_number.is_some() => { - Ok(Some(SnapshotProgress::from_existing_snapshot(&snapshot))) - } - Some(snapshot) => { - // Unless creating a snapshot for a specific L1 batch is requested, we never continue an existing snapshot, even if it's incomplete. - // This it to make running multiple snapshot creator instances in parallel easier to reason about. - tracing::warn!( - "Snapshot at expected L1 batch #{requested_l1_batch_number} exists, but is incomplete: {snapshot:?}. If you need to resume creating it, \ - specify the L1 batch number in the snapshot creator config" - ); - Ok(None) - } + Some(snapshot) => Ok(Some(SnapshotProgress::from_existing_snapshot(&snapshot))), None => { Self::initialize_snapshot_progress( config, diff --git a/core/bin/snapshots_creator/src/tests.rs b/core/bin/snapshots_creator/src/tests.rs index 990dd672975a..a440d836b4c9 100644 --- a/core/bin/snapshots_creator/src/tests.rs +++ b/core/bin/snapshots_creator/src/tests.rs @@ -10,6 +10,7 @@ use std::{ }; use rand::{thread_rng, Rng}; +use test_casing::test_casing; use zksync_config::SnapshotsCreatorConfig; use zksync_dal::{Connection, CoreDal}; use zksync_object_store::{MockObjectStore, ObjectStore}; @@ -64,6 +65,15 @@ impl HandleEvent for TestEventListener { } } +#[derive(Debug)] +struct UnreachableEventListener; + +impl HandleEvent for UnreachableEventListener { + fn on_chunk_started(&self) -> TestBehavior { + unreachable!("should not be reached"); + } +} + impl SnapshotCreator { fn for_tests(blob_store: Arc, pool: ConnectionPool) -> Self { Self { @@ -80,6 +90,13 @@ impl SnapshotCreator { ..self } } + + fn panic_on_chunk_start(self) -> Self { + Self { + event_listener: Box::new(UnreachableEventListener), + ..self + } + } } #[derive(Debug)] @@ -431,8 +448,9 @@ async fn persisting_snapshot_logs_for_v0_snapshot() { assert_eq!(actual_logs, expected_outputs.storage_logs); } +#[test_casing(2, [false, true])] #[tokio::test] -async fn recovery_workflow() { +async fn recovery_workflow(specify_batch_after_recovery: bool) { let pool = ConnectionPool::::test_pool().await; let mut rng = thread_rng(); let object_store = MockObjectStore::arc(); @@ -462,29 +480,9 @@ async fn recovery_workflow() { let actual_deps: HashSet<_> = factory_deps.into_iter().collect(); assert_eq!(actual_deps, expected_outputs.deps); - // Check that the creator does nothing unless it's requested to create a new snapshot. - SnapshotCreator::for_tests(object_store.clone(), pool.clone()) - .stop_after_chunk_count(2) - .run(SEQUENTIAL_TEST_CONFIG, MIN_CHUNK_COUNT) - .await - .unwrap(); - let snapshot_metadata = conn - .snapshots_dal() - .get_snapshot_metadata(snapshot_l1_batch_number) - .await - .unwrap() - .expect("No snapshot metadata"); - assert!( - snapshot_metadata - .storage_logs_filepaths - .iter() - .all(Option::is_none), - "{snapshot_metadata:?}" - ); - // Process 2 storage log chunks, then stop. let recovery_config = SnapshotsCreatorConfig { - l1_batch_number: Some(snapshot_l1_batch_number), + l1_batch_number: specify_batch_after_recovery.then_some(snapshot_l1_batch_number), ..SEQUENTIAL_TEST_CONFIG }; SnapshotCreator::for_tests(object_store.clone(), pool.clone()) @@ -510,11 +508,68 @@ async fn recovery_workflow() { // Process the remaining chunks. SnapshotCreator::for_tests(object_store.clone(), pool.clone()) + .run(recovery_config.clone(), MIN_CHUNK_COUNT) + .await + .unwrap(); + + assert_storage_logs(&*object_store, snapshot_l1_batch_number, &expected_outputs).await; + + // Check that the snapshot is not created anew after it is completed. + SnapshotCreator::for_tests(object_store.clone(), pool.clone()) + .panic_on_chunk_start() .run(recovery_config, MIN_CHUNK_COUNT) .await .unwrap(); + let snapshot_metadata = conn + .snapshots_dal() + .get_snapshot_metadata(snapshot_l1_batch_number) + .await + .unwrap() + .expect("No snapshot metadata"); + assert!(snapshot_metadata.is_complete(), "{snapshot_metadata:#?}"); +} + +#[tokio::test] +async fn recovery_workflow_with_new_l1_batch() { + let pool = ConnectionPool::::test_pool().await; + let mut rng = thread_rng(); + let object_store = MockObjectStore::arc(); + let mut conn = pool.connection().await.unwrap(); + let expected_outputs = prepare_postgres(&mut rng, &mut conn, 10).await; + + SnapshotCreator::for_tests(object_store.clone(), pool.clone()) + .stop_after_chunk_count(2) + .run(SEQUENTIAL_TEST_CONFIG, MIN_CHUNK_COUNT) + .await + .unwrap(); + + let snapshot_l1_batch_number = L1BatchNumber(8); + let snapshot_metadata = conn + .snapshots_dal() + .get_snapshot_metadata(snapshot_l1_batch_number) + .await + .unwrap() + .expect("No snapshot metadata"); + assert!(!snapshot_metadata.is_complete(), "{snapshot_metadata:#?}"); + + let new_logs = gen_storage_logs(&mut thread_rng(), 50); + create_l1_batch(&mut conn, snapshot_l1_batch_number + 2, &new_logs).await; + + // The old snapshot should be completed. + SnapshotCreator::for_tests(object_store.clone(), pool.clone()) + .run(SEQUENTIAL_TEST_CONFIG, MIN_CHUNK_COUNT) + .await + .unwrap(); assert_storage_logs(&*object_store, snapshot_l1_batch_number, &expected_outputs).await; + + let snapshot_metadata = conn + .snapshots_dal() + .get_snapshot_metadata(snapshot_l1_batch_number) + .await + .unwrap() + .expect("No snapshot metadata"); + assert!(snapshot_metadata.is_complete(), "{snapshot_metadata:#?}"); } #[tokio::test] diff --git a/core/bin/system-constants-generator/src/utils.rs b/core/bin/system-constants-generator/src/utils.rs index 989b235de61c..32b4d04506d1 100644 --- a/core/bin/system-constants-generator/src/utils.rs +++ b/core/bin/system-constants-generator/src/utils.rs @@ -2,7 +2,7 @@ use std::{cell::RefCell, rc::Rc}; use once_cell::sync::Lazy; use zksync_contracts::{ - load_sys_contract, read_bootloader_code, read_sys_contract_bytecode, read_zbin_bytecode, + load_sys_contract, read_bootloader_code, read_bytecode_from_path, read_sys_contract_bytecode, BaseSystemContracts, ContractLanguage, SystemContractCode, }; use zksync_multivm::{ @@ -176,9 +176,8 @@ pub(super) fn get_l1_txs(number_of_txs: usize) -> (Vec, Vec Vec { - read_zbin_bytecode(format!( - "contracts/system-contracts/bootloader/tests/artifacts/{}.yul.zbin", - test + read_bytecode_from_path(format!( + "contracts/system-contracts/zkout/{test}.yul/contracts-preprocessed/bootloader/{test}.yul.json", )) } diff --git a/core/bin/zksync_server/Cargo.toml b/core/bin/zksync_server/Cargo.toml index 72eff1384e2d..031183924064 100644 --- a/core/bin/zksync_server/Cargo.toml +++ b/core/bin/zksync_server/Cargo.toml @@ -21,7 +21,7 @@ zksync_utils.workspace = true zksync_types.workspace = true zksync_core_leftovers.workspace = true zksync_node_genesis.workspace = true -zksync_default_da_clients.workspace = true +zksync_da_clients.workspace = true # Consensus dependenices zksync_consensus_crypto.workspace = true diff --git a/core/bin/zksync_server/src/main.rs b/core/bin/zksync_server/src/main.rs index 7e0ff0e49201..84898d6da067 100644 --- a/core/bin/zksync_server/src/main.rs +++ b/core/bin/zksync_server/src/main.rs @@ -17,9 +17,9 @@ use zksync_config::{ L1Secrets, ObservabilityConfig, PrometheusConfig, ProofDataHandlerConfig, ProtectiveReadsWriterConfig, Secrets, }, - ApiConfig, BaseTokenAdjusterConfig, ContractVerifierConfig, DADispatcherConfig, DBConfig, - EthConfig, EthWatchConfig, ExternalProofIntegrationApiConfig, GasAdjusterConfig, GenesisConfig, - ObjectStoreConfig, PostgresConfig, SnapshotsCreatorConfig, + ApiConfig, BaseTokenAdjusterConfig, ContractVerifierConfig, DAClientConfig, DADispatcherConfig, + DBConfig, EthConfig, EthWatchConfig, ExternalProofIntegrationApiConfig, GasAdjusterConfig, + GenesisConfig, ObjectStoreConfig, PostgresConfig, SnapshotsCreatorConfig, }; use zksync_core_leftovers::{ temp_config_store::{decode_yaml_repr, TempConfigStore}, @@ -199,6 +199,7 @@ fn load_env_config() -> anyhow::Result { gas_adjuster_config: GasAdjusterConfig::from_env().ok(), observability: ObservabilityConfig::from_env().ok(), snapshot_creator: SnapshotsCreatorConfig::from_env().ok(), + da_client_config: DAClientConfig::from_env().ok(), da_dispatcher_config: DADispatcherConfig::from_env().ok(), protective_reads_writer_config: ProtectiveReadsWriterConfig::from_env().ok(), basic_witness_input_producer_config: BasicWitnessInputProducerConfig::from_env().ok(), diff --git a/core/bin/zksync_server/src/node_builder.rs b/core/bin/zksync_server/src/node_builder.rs index e2a0c5846b5d..069a7a799ab5 100644 --- a/core/bin/zksync_server/src/node_builder.rs +++ b/core/bin/zksync_server/src/node_builder.rs @@ -3,14 +3,13 @@ use anyhow::Context; use zksync_config::{ - configs::{eth_sender::PubdataSendingMode, wallets::Wallets, GeneralConfig, Secrets}, + configs::{ + da_client::DAClient, eth_sender::PubdataSendingMode, wallets::Wallets, GeneralConfig, + Secrets, + }, ContractsConfig, GenesisConfig, }; use zksync_core_leftovers::Component; -use zksync_default_da_clients::{ - no_da::wiring_layer::NoDAClientWiringLayer, - object_store::{config::DAObjectStoreConfig, wiring_layer::ObjectStorageClientWiringLayer}, -}; use zksync_metadata_calculator::MetadataCalculatorConfig; use zksync_node_api_server::{ tx_sender::{ApiContracts, TxSenderConfig}, @@ -28,6 +27,10 @@ use zksync_node_framework::{ commitment_generator::CommitmentGeneratorLayer, consensus::MainNodeConsensusLayer, contract_verification_api::ContractVerificationApiLayer, + da_clients::{ + avail::AvailWiringLayer, no_da::NoDAClientWiringLayer, + object_store::ObjectStorageClientWiringLayer, + }, da_dispatcher::DataAvailabilityDispatcherLayer, eth_sender::{EthTxAggregatorLayer, EthTxManagerLayer}, eth_watch::EthWatchLayer, @@ -500,16 +503,23 @@ impl MainNodeBuilder { Ok(self) } - fn add_no_da_client_layer(mut self) -> anyhow::Result { - self.node.add_layer(NoDAClientWiringLayer); - Ok(self) - } + fn add_da_client_layer(mut self) -> anyhow::Result { + let Some(da_client_config) = self.configs.da_client_config.clone() else { + tracing::warn!("No config for DA client, using the NoDA client"); + self.node.add_layer(NoDAClientWiringLayer); + return Ok(self); + }; + + match da_client_config.client { + DAClient::Avail(config) => { + self.node.add_layer(AvailWiringLayer::new(config)); + } + DAClient::ObjectStore(config) => { + self.node + .add_layer(ObjectStorageClientWiringLayer::new(config)); + } + } - #[allow(dead_code)] - fn add_object_storage_da_client_layer(mut self) -> anyhow::Result { - let object_store_config = DAObjectStoreConfig::from_env()?; - self.node - .add_layer(ObjectStorageClientWiringLayer::new(object_store_config.0)); Ok(self) } @@ -750,7 +760,7 @@ impl MainNodeBuilder { self = self.add_commitment_generator_layer()?; } Component::DADispatcher => { - self = self.add_no_da_client_layer()?.add_da_dispatcher_layer()?; + self = self.add_da_client_layer()?.add_da_dispatcher_layer()?; } Component::VmRunnerProtectiveReads => { self = self.add_vm_runner_protective_reads_layer()?; diff --git a/core/lib/basic_types/src/basic_fri_types.rs b/core/lib/basic_types/src/basic_fri_types.rs index 5969cca6b8c0..9de9920e86f6 100644 --- a/core/lib/basic_types/src/basic_fri_types.rs +++ b/core/lib/basic_types/src/basic_fri_types.rs @@ -152,6 +152,29 @@ impl AggregationRound { AggregationRound::Scheduler => None, } } + + /// Returns all the circuit IDs that correspond to a particular + /// aggregation round. + /// + /// For example, in aggregation round 0, the circuit ids should be 1 to 15 + 255 (EIP4844). + /// In aggregation round 1, the circuit ids should be 3 to 18. + /// In aggregation round 2, the circuit ids should be 2. + /// In aggregation round 3, the circuit ids should be 255. + /// In aggregation round 4, the circuit ids should be 1. + pub fn circuit_ids(self) -> Vec { + match self { + AggregationRound::BasicCircuits => (1..=15) + .chain(once(255)) + .map(|circuit_id| CircuitIdRoundTuple::new(circuit_id, self as u8)) + .collect(), + AggregationRound::LeafAggregation => (3..=18) + .map(|circuit_id| CircuitIdRoundTuple::new(circuit_id, self as u8)) + .collect(), + AggregationRound::NodeAggregation => vec![CircuitIdRoundTuple::new(2, self as u8)], + AggregationRound::RecursionTip => vec![CircuitIdRoundTuple::new(255, self as u8)], + AggregationRound::Scheduler => vec![CircuitIdRoundTuple::new(1, self as u8)], + } + } } impl std::fmt::Display for AggregationRound { @@ -265,33 +288,17 @@ impl CircuitProverStats { impl Default for CircuitProverStats { fn default() -> Self { - let mut stats = HashMap::new(); - for circuit in (1..=15).chain(once(255)) { - stats.insert( - CircuitIdRoundTuple::new(circuit, 0), - JobCountStatistics::default(), - ); - } - for circuit in 3..=18 { - stats.insert( - CircuitIdRoundTuple::new(circuit, 1), - JobCountStatistics::default(), - ); - } - stats.insert( - CircuitIdRoundTuple::new(2, 2), - JobCountStatistics::default(), - ); - stats.insert( - CircuitIdRoundTuple::new(255, 3), - JobCountStatistics::default(), - ); - stats.insert( - CircuitIdRoundTuple::new(1, 4), - JobCountStatistics::default(), - ); + let circuits_prover_stats = AggregationRound::ALL_ROUNDS + .into_iter() + .flat_map(|round| { + let circuit_ids = round.circuit_ids(); + circuit_ids.into_iter().map(|circuit_id_round_tuple| { + (circuit_id_round_tuple, JobCountStatistics::default()) + }) + }) + .collect(); Self { - circuits_prover_stats: stats, + circuits_prover_stats, } } } diff --git a/core/lib/config/src/configs/consensus.rs b/core/lib/config/src/configs/consensus.rs index e5e01f880feb..759e13128338 100644 --- a/core/lib/config/src/configs/consensus.rs +++ b/core/lib/config/src/configs/consensus.rs @@ -1,6 +1,7 @@ use std::collections::{BTreeMap, BTreeSet}; -use secrecy::{ExposeSecret as _, Secret}; +use secrecy::ExposeSecret as _; +pub use secrecy::Secret; use zksync_basic_types::{ethabi, L2ChainId}; use zksync_concurrency::{limiter, time}; diff --git a/core/lib/config/src/configs/da_client/avail.rs b/core/lib/config/src/configs/da_client/avail.rs new file mode 100644 index 000000000000..e8d119787912 --- /dev/null +++ b/core/lib/config/src/configs/da_client/avail.rs @@ -0,0 +1,11 @@ +use serde::Deserialize; + +#[derive(Clone, Debug, PartialEq, Deserialize)] +pub struct AvailConfig { + pub api_node_url: String, + pub bridge_api_url: String, + pub seed: String, + pub app_id: u32, + pub timeout: usize, + pub max_retries: usize, +} diff --git a/core/lib/config/src/configs/da_client/mod.rs b/core/lib/config/src/configs/da_client/mod.rs new file mode 100644 index 000000000000..38337438c10e --- /dev/null +++ b/core/lib/config/src/configs/da_client/mod.rs @@ -0,0 +1,20 @@ +use serde::Deserialize; + +use crate::{AvailConfig, ObjectStoreConfig}; + +pub mod avail; + +pub const AVAIL_CLIENT_CONFIG_NAME: &str = "Avail"; +pub const OBJECT_STORE_CLIENT_CONFIG_NAME: &str = "ObjectStore"; + +#[derive(Debug, Clone, PartialEq)] +pub struct DAClientConfig { + pub client: DAClient, +} + +#[derive(Debug, Clone, PartialEq, Deserialize)] +#[serde(tag = "client")] +pub enum DAClient { + Avail(AvailConfig), + ObjectStore(ObjectStoreConfig), +} diff --git a/core/lib/config/src/configs/fri_prover.rs b/core/lib/config/src/configs/fri_prover.rs index f6a21beaa6dc..32558dd2244b 100644 --- a/core/lib/config/src/configs/fri_prover.rs +++ b/core/lib/config/src/configs/fri_prover.rs @@ -4,7 +4,7 @@ use serde::Deserialize; use crate::ObjectStoreConfig; -#[derive(Debug, Deserialize, Clone, PartialEq)] +#[derive(Debug, Deserialize, Clone, Copy, PartialEq)] pub enum SetupLoadMode { FromDisk, FromMemory, diff --git a/core/lib/config/src/configs/fri_prover_group.rs b/core/lib/config/src/configs/fri_prover_group.rs index 0fd752b5c286..294d4d1bbd44 100644 --- a/core/lib/config/src/configs/fri_prover_group.rs +++ b/core/lib/config/src/configs/fri_prover_group.rs @@ -1,7 +1,7 @@ use std::collections::HashSet; use serde::Deserialize; -use zksync_basic_types::basic_fri_types::CircuitIdRoundTuple; +use zksync_basic_types::basic_fri_types::{AggregationRound, CircuitIdRoundTuple}; /// Configuration for the grouping of specialized provers. #[derive(Debug, Deserialize, Clone, PartialEq)] @@ -81,6 +81,7 @@ impl FriProverGroupConfig { .flatten() .collect() } + /// check all_circuit ids present exactly once /// and For each aggregation round, check that the circuit ids are in the correct range. /// For example, in aggregation round 0, the circuit ids should be 1 to 15 + 255 (EIP4844). @@ -89,7 +90,6 @@ impl FriProverGroupConfig { /// In aggregation round 3, the circuit ids should be 255. /// In aggregation round 4, the circuit ids should be 1. pub fn validate(&self) -> anyhow::Result<()> { - let mut rounds: Vec> = vec![Vec::new(); 5]; let groups = [ &self.group_0, &self.group_1, @@ -107,110 +107,45 @@ impl FriProverGroupConfig { &self.group_13, &self.group_14, ]; - for group in groups { - for circuit_round in group { - let round = match rounds.get_mut(circuit_round.aggregation_round as usize) { - Some(round) => round, - None => anyhow::bail!( - "Invalid aggregation round {}.", - circuit_round.aggregation_round - ), - }; - round.push(circuit_round.clone()); - } - } - - for (round, round_data) in rounds.iter().enumerate() { - let circuit_ids: Vec = round_data.iter().map(|x| x.circuit_id).collect(); - let unique_circuit_ids: HashSet = circuit_ids.iter().copied().collect(); - let duplicates: HashSet = circuit_ids - .iter() - .filter(|id| circuit_ids.iter().filter(|x| x == id).count() > 1) - .copied() - .collect(); + let mut expected_circuit_ids: HashSet<_> = AggregationRound::ALL_ROUNDS + .into_iter() + .flat_map(|r| r.circuit_ids()) + .collect(); - let (missing_ids, not_in_range, expected_circuits_description) = match round { - 0 => { - let mut expected_range: Vec<_> = (1..=15).collect(); - expected_range.push(255); - let missing_ids: Vec<_> = expected_range - .iter() - .copied() - .filter(|id| !circuit_ids.contains(id)) - .collect(); - - let not_in_range: Vec<_> = circuit_ids - .iter() - .filter(|&id| !expected_range.contains(id)) - .collect(); - (missing_ids, not_in_range, "circuit IDs 1 to 15 and 255") - } - 1 => { - let expected_range: Vec<_> = (3..=18).collect(); - let missing_ids: Vec<_> = expected_range - .iter() - .copied() - .filter(|id| !circuit_ids.contains(id)) - .collect(); - let not_in_range: Vec<_> = circuit_ids - .iter() - .filter(|&id| !expected_range.contains(id)) - .collect(); - (missing_ids, not_in_range, "circuit IDs 3 to 18") - } - 2 => { - let expected_range: Vec<_> = vec![2]; - let missing_ids: Vec<_> = expected_range - .iter() - .copied() - .filter(|id| !circuit_ids.contains(id)) - .collect(); - let not_in_range: Vec<_> = circuit_ids - .iter() - .filter(|&id| !expected_range.contains(id)) - .collect(); - (missing_ids, not_in_range, "circuit ID 2") + let mut provided_circuit_ids = HashSet::new(); + for (group_id, group) in groups.iter().enumerate() { + for circuit_id_round in group.iter() { + // Make sure that it's a known circuit. + if !expected_circuit_ids.contains(circuit_id_round) { + anyhow::bail!( + "Group {} contains unexpected circuit id: {:?}", + group_id, + circuit_id_round + ); } - 3 => { - let expected_range: Vec<_> = vec![255]; - let missing_ids: Vec<_> = expected_range - .iter() - .copied() - .filter(|id| !circuit_ids.contains(id)) - .collect(); - let not_in_range: Vec<_> = circuit_ids - .iter() - .filter(|&id| !expected_range.contains(id)) - .collect(); - (missing_ids, not_in_range, "circuit ID 255") - } - 4 => { - let expected_range: Vec<_> = vec![1]; - let missing_ids: Vec<_> = expected_range - .iter() - .copied() - .filter(|id| !circuit_ids.contains(id)) - .collect(); - let not_in_range: Vec<_> = circuit_ids - .iter() - .filter(|&id| !expected_range.contains(id)) - .collect(); - (missing_ids, not_in_range, "circuit ID 1") - } - _ => { - anyhow::bail!("Unknown round {}", round); + // Remove this circuit from the expected set: later we will check that all circuits + // are present. + expected_circuit_ids.remove(circuit_id_round); + + // Make sure that the circuit is not duplicated. + if provided_circuit_ids.contains(circuit_id_round) { + anyhow::bail!( + "Group {} contains duplicate circuit id: {:?}", + group_id, + circuit_id_round + ); } - }; - if !missing_ids.is_empty() { - anyhow::bail!("Circuit IDs for round {round} are missing: {missing_ids:?}"); - } - if circuit_ids.len() != unique_circuit_ids.len() { - anyhow::bail!("Circuit IDs: {duplicates:?} should be unique for round {round}.",); - } - if !not_in_range.is_empty() { - anyhow::bail!("Aggregation round {round} should only contain {expected_circuits_description}. Ids out of range: {not_in_range:?}"); + provided_circuit_ids.insert(circuit_id_round.clone()); } } + // All the circuit IDs should have been removed from the expected set. + if !expected_circuit_ids.is_empty() { + anyhow::bail!( + "Some circuit ids are missing from the groups: {:?}", + expected_circuit_ids + ); + } + Ok(()) } } diff --git a/core/lib/config/src/configs/general.rs b/core/lib/config/src/configs/general.rs index 38ffd3d45fac..bb733510f77d 100644 --- a/core/lib/config/src/configs/general.rs +++ b/core/lib/config/src/configs/general.rs @@ -3,6 +3,7 @@ use crate::{ base_token_adjuster::BaseTokenAdjusterConfig, chain::{CircuitBreakerConfig, MempoolConfig, OperationsManagerConfig, StateKeeperConfig}, consensus::ConsensusConfig, + da_client::DAClientConfig, da_dispatcher::DADispatcherConfig, fri_prover_group::FriProverGroupConfig, house_keeper::HouseKeeperConfig, @@ -41,6 +42,7 @@ pub struct GeneralConfig { pub eth: Option, pub snapshot_creator: Option, pub observability: Option, + pub da_client_config: Option, pub da_dispatcher_config: Option, pub protective_reads_writer_config: Option, pub basic_witness_input_producer_config: Option, diff --git a/core/lib/config/src/configs/mod.rs b/core/lib/config/src/configs/mod.rs index b213060f7ced..1ad503e0687f 100644 --- a/core/lib/config/src/configs/mod.rs +++ b/core/lib/config/src/configs/mod.rs @@ -5,6 +5,7 @@ pub use self::{ commitment_generator::CommitmentGeneratorConfig, contract_verifier::ContractVerifierConfig, contracts::{ContractsConfig, EcosystemContracts}, + da_client::{avail::AvailConfig, DAClientConfig}, da_dispatcher::DADispatcherConfig, database::{DBConfig, PostgresConfig}, eth_sender::{EthConfig, GasAdjusterConfig}, @@ -38,6 +39,7 @@ mod commitment_generator; pub mod consensus; pub mod contract_verifier; pub mod contracts; +pub mod da_client; pub mod da_dispatcher; pub mod database; pub mod en_config; diff --git a/core/lib/config/src/lib.rs b/core/lib/config/src/lib.rs index ae8288fa72ea..9191edc39822 100644 --- a/core/lib/config/src/lib.rs +++ b/core/lib/config/src/lib.rs @@ -1,9 +1,10 @@ #![allow(clippy::upper_case_acronyms, clippy::derive_partial_eq_without_eq)] pub use crate::configs::{ - ApiConfig, BaseTokenAdjusterConfig, ContractVerifierConfig, ContractsConfig, - DADispatcherConfig, DBConfig, EthConfig, EthWatchConfig, ExternalProofIntegrationApiConfig, - GasAdjusterConfig, GenesisConfig, ObjectStoreConfig, PostgresConfig, SnapshotsCreatorConfig, + ApiConfig, AvailConfig, BaseTokenAdjusterConfig, ContractVerifierConfig, ContractsConfig, + DAClientConfig, DADispatcherConfig, DBConfig, EthConfig, EthWatchConfig, + ExternalProofIntegrationApiConfig, GasAdjusterConfig, GenesisConfig, ObjectStoreConfig, + PostgresConfig, SnapshotsCreatorConfig, }; pub mod configs; diff --git a/core/lib/config/src/testonly.rs b/core/lib/config/src/testonly.rs index f6e6fcafdb5e..4b26c94a0cd9 100644 --- a/core/lib/config/src/testonly.rs +++ b/core/lib/config/src/testonly.rs @@ -12,8 +12,12 @@ use zksync_basic_types::{ use zksync_consensus_utils::EncodeDist; use zksync_crypto_primitives::K256PrivateKey; -use crate::configs::{ - self, eth_sender::PubdataSendingMode, external_price_api_client::ForcedPriceClientConfig, +use crate::{ + configs::{ + self, da_client::DAClient::Avail, eth_sender::PubdataSendingMode, + external_price_api_client::ForcedPriceClientConfig, + }, + AvailConfig, }; trait Sample { @@ -924,6 +928,21 @@ impl Distribution for EncodeDist { } } +impl Distribution for EncodeDist { + fn sample(&self, rng: &mut R) -> configs::da_client::DAClientConfig { + configs::da_client::DAClientConfig { + client: Avail(AvailConfig { + api_node_url: self.sample(rng), + bridge_api_url: self.sample(rng), + seed: self.sample(rng), + app_id: self.sample(rng), + timeout: self.sample(rng), + max_retries: self.sample(rng), + }), + } + } +} + impl Distribution for EncodeDist { fn sample(&self, rng: &mut R) -> configs::da_dispatcher::DADispatcherConfig { configs::da_dispatcher::DADispatcherConfig { @@ -1123,6 +1142,7 @@ impl Distribution for EncodeDist { eth: self.sample(rng), snapshot_creator: self.sample(rng), observability: self.sample(rng), + da_client_config: self.sample(rng), da_dispatcher_config: self.sample(rng), protective_reads_writer_config: self.sample(rng), basic_witness_input_producer_config: self.sample(rng), diff --git a/core/lib/constants/src/contracts.rs b/core/lib/constants/src/contracts.rs index 3edfc3585d92..fe37ef6c69fd 100644 --- a/core/lib/constants/src/contracts.rs +++ b/core/lib/constants/src/contracts.rs @@ -100,6 +100,11 @@ pub const SHA256_PRECOMPILE_ADDRESS: Address = H160([ 0x00, 0x00, 0x00, 0x02, ]); +pub const SECP256R1_VERIFY_PRECOMPILE_ADDRESS: Address = H160([ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x01, 0x00, +]); + pub const EC_ADD_PRECOMPILE_ADDRESS: Address = H160([ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, diff --git a/core/lib/contracts/src/lib.rs b/core/lib/contracts/src/lib.rs index 5ab977a5dfd5..0d3a2a853f0d 100644 --- a/core/lib/contracts/src/lib.rs +++ b/core/lib/contracts/src/lib.rs @@ -215,7 +215,7 @@ pub fn known_codes_contract() -> Contract { } /// Reads bytecode from a given path. -fn read_bytecode_from_path(artifact_path: impl AsRef + std::fmt::Debug) -> Vec { +pub fn read_bytecode_from_path(artifact_path: impl AsRef + std::fmt::Debug) -> Vec { let artifact = read_file_to_json_value(&artifact_path); let bytecode = artifact["bytecode"] diff --git a/core/lib/da_client/src/types.rs b/core/lib/da_client/src/types.rs index 2b15cbe905ed..e7e4453d727e 100644 --- a/core/lib/da_client/src/types.rs +++ b/core/lib/da_client/src/types.rs @@ -35,6 +35,12 @@ pub struct DispatchResponse { pub blob_id: String, } +impl From for DispatchResponse { + fn from(blob_id: String) -> Self { + DispatchResponse { blob_id } + } +} + /// `InclusionData` is the data needed to verify on L1 that a blob is included in the DA layer. #[derive(Default, Serialize)] pub struct InclusionData { diff --git a/core/lib/dal/src/consensus/mod.rs b/core/lib/dal/src/consensus/mod.rs index 88620575c88a..f54938e8ec1a 100644 --- a/core/lib/dal/src/consensus/mod.rs +++ b/core/lib/dal/src/consensus/mod.rs @@ -401,11 +401,10 @@ impl ProtoRepr for proto::Transaction { } }, execute: Execute { - contract_address: Some( - required(&execute.contract_address) - .and_then(|x| parse_h160(x)) - .context("execute.contract_address")?, - ), + contract_address: execute + .contract_address + .as_ref() + .and_then(|x| parse_h160(x).ok()), calldata: required(&execute.calldata).context("calldata")?.clone(), value: required(&execute.value) .and_then(|x| parse_h256(x)) @@ -489,13 +488,7 @@ impl ProtoRepr for proto::Transaction { } }; let execute = proto::Execute { - contract_address: Some( - this.execute - .contract_address - .unwrap_or_default() - .as_bytes() - .into(), - ), + contract_address: this.execute.contract_address.map(|x| x.as_bytes().into()), calldata: Some(this.execute.calldata.clone()), value: Some(u256_to_h256(this.execute.value).as_bytes().into()), factory_deps: this.execute.factory_deps.clone(), diff --git a/core/lib/dal/src/consensus/proto/mod.proto b/core/lib/dal/src/consensus/proto/mod.proto index da9151f10f4d..3ea49e9c0cd6 100644 --- a/core/lib/dal/src/consensus/proto/mod.proto +++ b/core/lib/dal/src/consensus/proto/mod.proto @@ -102,7 +102,7 @@ message ProtocolUpgradeTxCommonData { } message Execute { - optional bytes contract_address = 1; // required; H160 + optional bytes contract_address = 1; // optional; H160 optional bytes calldata = 2; // required optional bytes value = 3; // required; U256 repeated bytes factory_deps = 4; diff --git a/core/lib/dal/src/consensus/tests.rs b/core/lib/dal/src/consensus/tests.rs index f21d09290a2f..7059f1a74ea0 100644 --- a/core/lib/dal/src/consensus/tests.rs +++ b/core/lib/dal/src/consensus/tests.rs @@ -17,7 +17,7 @@ use crate::tests::mock_protocol_upgrade_transaction; fn execute(rng: &mut impl Rng) -> Execute { Execute { - contract_address: rng.gen(), + contract_address: Some(rng.gen()), value: rng.gen::().into(), calldata: (0..10 * 32).map(|_| rng.gen()).collect(), // TODO: find a way to generate valid random bytecode. diff --git a/core/lib/dal/src/transactions_dal.rs b/core/lib/dal/src/transactions_dal.rs index bc44083ea319..0a72289b48a4 100644 --- a/core/lib/dal/src/transactions_dal.rs +++ b/core/lib/dal/src/transactions_dal.rs @@ -59,12 +59,7 @@ impl TransactionsDal<'_, '_> { l1_block_number: L1BlockNumber, ) -> DalResult<()> { let contract_address = tx.execute.contract_address; - let unwrapped_contract_address = contract_address.unwrap_or_default(); - let contract_address_b: &[u8] = if contract_address.is_none() { - &[] - } else { - unwrapped_contract_address.as_bytes() - }; + let contract_address_as_bytes = contract_address.map(|addr| addr.as_bytes().to_vec()); let tx_hash = tx.hash(); let tx_hash_bytes = tx_hash.as_bytes(); let json_data = serde_json::to_value(&tx.execute) @@ -149,7 +144,7 @@ impl TransactionsDal<'_, '_> { serial_id, full_fee, layer_2_tip_fee, - contract_address_b, + contract_address_as_bytes, l1_block_number.0 as i32, value, empty_address.as_bytes(), @@ -168,12 +163,7 @@ impl TransactionsDal<'_, '_> { pub async fn insert_system_transaction(&mut self, tx: &ProtocolUpgradeTx) -> DalResult<()> { let contract_address = tx.execute.contract_address; - let unwrapped_contract_address = contract_address.unwrap_or_default(); - let contract_address_b: &[u8] = if contract_address.is_none() { - &[] - } else { - unwrapped_contract_address.as_bytes() - }; + let contract_address_as_bytes = contract_address.map(|addr| addr.as_bytes().to_vec()); let tx_hash = tx.common_data.hash().0.to_vec(); let json_data = serde_json::to_value(&tx.execute) .unwrap_or_else(|_| panic!("cannot serialize tx {:?} to json", tx.common_data.hash())); @@ -250,7 +240,7 @@ impl TransactionsDal<'_, '_> { gas_per_pubdata_limit, json_data, upgrade_id, - contract_address_b, + contract_address_as_bytes, l1_block_number, value, &Address::default().0.to_vec(), @@ -297,12 +287,7 @@ impl TransactionsDal<'_, '_> { let initiator_address = tx.initiator_account(); let contract_address = tx.execute.contract_address; - let unwrapped_contract_address = contract_address.unwrap_or_default(); - let contract_address_b: &[u8] = if contract_address.is_none() { - &[] - } else { - unwrapped_contract_address.as_bytes() - }; + let contract_address_as_bytes = contract_address.map(|addr| addr.as_bytes().to_vec()); let json_data = serde_json::to_value(&tx.execute) .unwrap_or_else(|_| panic!("cannot serialize tx {:?} to json", tx.hash())); let gas_limit = u256_to_big_decimal(tx.common_data.fee.gas_limit); @@ -431,7 +416,7 @@ impl TransactionsDal<'_, '_> { input_data, &json_data, tx_format, - contract_address_b, + contract_address_as_bytes, value, &paymaster, &paymaster_input, @@ -716,14 +701,9 @@ impl TransactionsDal<'_, '_> { })?; let contract_address = transaction.execute.contract_address; - let unwrapped_contract_address = contract_address.unwrap_or_default(); - let contract_address_b: Vec = if contract_address.is_none() { - Vec::new() - } else { - unwrapped_contract_address.as_bytes().to_vec() - }; + let contract_address_as_bytes = contract_address.map(|addr| addr.as_bytes().to_vec()); l2_values.push(u256_to_big_decimal(transaction.execute.value)); - l2_contract_addresses.push(contract_address_b); + l2_contract_addresses.push(contract_address_as_bytes); l2_paymaster_input.push(&common_data.paymaster_params.paymaster_input[..]); l2_paymaster.push(common_data.paymaster_params.paymaster.as_bytes()); l2_hashes.push(tx_res.hash.as_bytes()); @@ -843,7 +823,7 @@ impl TransactionsDal<'_, '_> { &l2_inputs as &[&[u8]], &l2_datas, &l2_tx_formats, - &l2_contract_addresses, + &l2_contract_addresses as &[Option>], &l2_values, &l2_paymaster as &[&[u8]], &l2_paymaster_input as &[&[u8]], @@ -927,14 +907,9 @@ impl TransactionsDal<'_, '_> { })?; let contract_address = transaction.execute.contract_address; - let unwrapped_contract_address = contract_address.unwrap_or_default(); - let contract_address_b: Vec = if contract_address.is_none() { - Vec::new() - } else { - unwrapped_contract_address.as_bytes().to_vec() - }; + let contract_address_as_bytes = contract_address.map(|addr| addr.as_bytes().to_vec()); l2_values.push(u256_to_big_decimal(transaction.execute.value)); - l2_contract_addresses.push(contract_address_b); + l2_contract_addresses.push(contract_address_as_bytes); l2_paymaster_input.push(&common_data.paymaster_params.paymaster_input[..]); l2_paymaster.push(common_data.paymaster_params.paymaster.as_bytes()); l2_hashes.push(tx_res.hash.as_bytes()); @@ -1045,7 +1020,7 @@ impl TransactionsDal<'_, '_> { &l2_datas, &l2_refunded_gas, &l2_values, - &l2_contract_addresses, + &l2_contract_addresses as &[Option>], &l2_paymaster as &[&[u8]], &l2_paymaster_input as &[&[u8]], l2_block_number.0 as i32, @@ -1116,12 +1091,7 @@ impl TransactionsDal<'_, '_> { })?; let contract_address = transaction.execute.contract_address; - let unwrapped_contract_address = contract_address.unwrap_or_default(); - let contract_address_b: Vec = if contract_address.is_none() { - Vec::new() - } else { - unwrapped_contract_address.as_bytes().to_vec() - }; + let contract_address_as_bytes = contract_address.map(|addr| addr.as_bytes().to_vec()); let tx = &tx_res.transaction; l1_hashes.push(tx_res.hash.as_bytes()); l1_initiator_address.push(common_data.sender.as_bytes()); @@ -1135,7 +1105,7 @@ impl TransactionsDal<'_, '_> { l1_priority_op_id.push(common_data.serial_id.0 as i64); l1_full_fee.push(u256_to_big_decimal(common_data.full_fee)); l1_layer_2_tip_fee.push(u256_to_big_decimal(common_data.layer_2_tip_fee)); - l1_contract_address.push(contract_address_b); + l1_contract_address.push(contract_address_as_bytes); l1_l1_block_number.push(common_data.eth_block as i32); l1_value.push(u256_to_big_decimal(tx.execute.value)); l1_tx_format.push(common_data.tx_format() as i32); @@ -1242,7 +1212,7 @@ impl TransactionsDal<'_, '_> { &l1_priority_op_id, &l1_full_fee, &l1_layer_2_tip_fee, - &l1_contract_address, + &l1_contract_address as &[Option>], &l1_l1_block_number, &l1_value, &l1_tx_format, @@ -1413,12 +1383,7 @@ impl TransactionsDal<'_, '_> { })?; let contract_address = transaction.execute.contract_address; - let unwrapped_contract_address = contract_address.unwrap_or_default(); - let contract_address_b: Vec = if contract_address.is_none() { - Vec::new() - } else { - unwrapped_contract_address.as_bytes().to_vec() - }; + let contract_address_as_bytes = contract_address.map(|addr| addr.as_bytes().to_vec()); let tx = &tx_res.transaction; upgrade_hashes.push(tx_res.hash.as_bytes()); upgrade_initiator_address.push(common_data.sender.as_bytes()); @@ -1431,7 +1396,7 @@ impl TransactionsDal<'_, '_> { .unwrap_or_else(|_| panic!("cannot serialize tx {:?} to json", tx.hash())), ); upgrade_upgrade_id.push(common_data.upgrade_id as i32); - upgrade_contract_address.push(contract_address_b); + upgrade_contract_address.push(contract_address_as_bytes); upgrade_l1_block_number.push(common_data.eth_block as i32); upgrade_value.push(u256_to_big_decimal(tx.execute.value)); upgrade_tx_format.push(common_data.tx_format() as i32); @@ -1530,7 +1495,7 @@ impl TransactionsDal<'_, '_> { &upgrade_gas_per_pubdata_limit, &upgrade_data, &upgrade_upgrade_id, - &upgrade_contract_address, + &upgrade_contract_address as &[Option>], &upgrade_l1_block_number, &upgrade_value, &upgrade_tx_format, diff --git a/core/lib/default_da_clients/README.md b/core/lib/default_da_clients/README.md deleted file mode 100644 index 17ced715b268..000000000000 --- a/core/lib/default_da_clients/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# Default DA Clients - -This crate contains the default implementations of the Data Availability clients. Default clients are maintained within -this repo because they are tightly coupled with the codebase, and would cause the circular dependency if they were to be -moved to the [hyperchain-da](https://github.com/matter-labs/hyperchain-da) repository. - -Currently, the following DataAvailability clients are implemented: - -- `NoDA client` that does not send or store any pubdata, it is needed to run the zkSync network in the "no-DA" mode - utilizing the DA framework. -- `Object Store client` that stores the pubdata in the Object Store(GCS). diff --git a/core/lib/default_da_clients/src/no_da/mod.rs b/core/lib/default_da_clients/src/no_da/mod.rs deleted file mode 100644 index 814cf30c2cbd..000000000000 --- a/core/lib/default_da_clients/src/no_da/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod client; -pub mod wiring_layer; diff --git a/core/lib/default_da_clients/src/object_store/config.rs b/core/lib/default_da_clients/src/object_store/config.rs deleted file mode 100644 index 285c39827c79..000000000000 --- a/core/lib/default_da_clients/src/object_store/config.rs +++ /dev/null @@ -1,12 +0,0 @@ -use zksync_config::ObjectStoreConfig; -use zksync_env_config::envy_load; - -#[derive(Debug)] -pub struct DAObjectStoreConfig(pub ObjectStoreConfig); - -impl DAObjectStoreConfig { - pub fn from_env() -> anyhow::Result { - let config = envy_load("object_store", "DA_CLIENT_OBJECT_STORE_")?; - Ok(Self(config)) - } -} diff --git a/core/lib/default_da_clients/src/object_store/mod.rs b/core/lib/default_da_clients/src/object_store/mod.rs deleted file mode 100644 index 1600941b0572..000000000000 --- a/core/lib/default_da_clients/src/object_store/mod.rs +++ /dev/null @@ -1,4 +0,0 @@ -pub mod client; -pub mod config; -mod types; -pub mod wiring_layer; diff --git a/core/lib/default_da_clients/src/object_store/types.rs b/core/lib/default_da_clients/src/object_store/types.rs deleted file mode 100644 index b8ec9303e71e..000000000000 --- a/core/lib/default_da_clients/src/object_store/types.rs +++ /dev/null @@ -1,38 +0,0 @@ -use std::io::{Read, Write}; - -use flate2::{read::GzDecoder, write::GzEncoder, Compression}; -use zksync_object_store::{Bucket, StoredObject, _reexports::BoxedError}; -use zksync_types::L1BatchNumber; - -/// Used as a wrapper for the pubdata to be stored in the GCS. -#[derive(Debug)] -pub struct StorablePubdata { - pub data: Vec, -} - -impl StoredObject for StorablePubdata { - const BUCKET: Bucket = Bucket::DataAvailability; - type Key<'a> = L1BatchNumber; - - fn encode_key(key: Self::Key<'_>) -> String { - format!("l1_batch_{key}_pubdata.gzip") - } - - fn serialize(&self) -> Result, BoxedError> { - let mut encoder = GzEncoder::new(Vec::new(), Compression::default()); - encoder.write_all(&self.data[..])?; - encoder.finish().map_err(From::from) - } - - fn deserialize(bytes: Vec) -> Result { - let mut decoder = GzDecoder::new(&bytes[..]); - let mut decompressed_bytes = Vec::new(); - decoder - .read_to_end(&mut decompressed_bytes) - .map_err(BoxedError::from)?; - - Ok(Self { - data: decompressed_bytes, - }) - } -} diff --git a/core/lib/env_config/src/da_client.rs b/core/lib/env_config/src/da_client.rs new file mode 100644 index 000000000000..f2da3b83f18a --- /dev/null +++ b/core/lib/env_config/src/da_client.rs @@ -0,0 +1,115 @@ +use zksync_config::configs::da_client::{ + DAClient, DAClientConfig, AVAIL_CLIENT_CONFIG_NAME, OBJECT_STORE_CLIENT_CONFIG_NAME, +}; + +use crate::{envy_load, FromEnv}; + +impl FromEnv for DAClientConfig { + fn from_env() -> anyhow::Result { + let client_tag = std::env::var("DA_CLIENT")?; + let client = match client_tag.as_str() { + AVAIL_CLIENT_CONFIG_NAME => DAClient::Avail(envy_load("da_avail_config", "DA_")?), + OBJECT_STORE_CLIENT_CONFIG_NAME => { + DAClient::ObjectStore(envy_load("da_object_store", "DA_")?) + } + _ => anyhow::bail!("Unknown DA client name: {}", client_tag), + }; + + Ok(Self { client }) + } +} + +#[cfg(test)] +mod tests { + use zksync_config::{ + configs::{ + da_client::{DAClient, DAClient::ObjectStore}, + object_store::ObjectStoreMode::GCS, + }, + AvailConfig, DAClientConfig, ObjectStoreConfig, + }; + + use super::*; + use crate::test_utils::EnvMutex; + + static MUTEX: EnvMutex = EnvMutex::new(); + + fn expected_object_store_da_client_config(url: String, max_retries: u16) -> DAClientConfig { + DAClientConfig { + client: ObjectStore(ObjectStoreConfig { + mode: GCS { + bucket_base_url: url, + }, + max_retries, + local_mirror_path: None, + }), + } + } + + #[test] + fn from_env_object_store() { + let mut lock = MUTEX.lock(); + let config = r#" + DA_CLIENT="ObjectStore" + + DA_BUCKET_BASE_URL="sometestpath" + DA_MODE="GCS" + DA_MAX_RETRIES="5" + "#; + lock.set_env(config); + let actual = DAClientConfig::from_env().unwrap(); + assert_eq!( + actual, + expected_object_store_da_client_config("sometestpath".to_string(), 5) + ); + } + + fn expected_avail_da_layer_config( + api_node_url: &str, + bridge_api_url: &str, + seed: &str, + app_id: u32, + timeout: usize, + max_retries: usize, + ) -> DAClientConfig { + DAClientConfig { + client: DAClient::Avail(AvailConfig { + api_node_url: api_node_url.to_string(), + bridge_api_url: bridge_api_url.to_string(), + seed: seed.to_string(), + app_id, + timeout, + max_retries, + }), + } + } + + #[test] + fn from_env_avail_client() { + let mut lock = MUTEX.lock(); + let config = r#" + DA_CLIENT="Avail" + DA_API_NODE_URL="localhost:12345" + DA_BRIDGE_API_URL="localhost:54321" + DA_SEED="bottom drive obey lake curtain smoke basket hold race lonely fit walk" + DA_APP_ID="1" + DA_TIMEOUT="2" + DA_MAX_RETRIES="3" + "#; + + lock.set_env(config); + + let actual = DAClientConfig::from_env().unwrap(); + assert_eq!( + actual, + expected_avail_da_layer_config( + "localhost:12345", + "localhost:54321", + "bottom drive obey lake curtain smoke basket hold race lonely fit walk", + "1".parse::().unwrap(), + "2".parse::().unwrap(), + "3".parse::().unwrap(), + ) + ); + } +} diff --git a/core/lib/env_config/src/lib.rs b/core/lib/env_config/src/lib.rs index 8cfa7b58a31c..b72c2c5d5b94 100644 --- a/core/lib/env_config/src/lib.rs +++ b/core/lib/env_config/src/lib.rs @@ -32,6 +32,8 @@ mod test_utils; mod vm_runner; mod wallets; +mod da_client; + pub trait FromEnv: Sized { fn from_env() -> anyhow::Result; } diff --git a/core/lib/multivm/Cargo.toml b/core/lib/multivm/Cargo.toml index 28495da33021..6156c1138890 100644 --- a/core/lib/multivm/Cargo.toml +++ b/core/lib/multivm/Cargo.toml @@ -16,7 +16,7 @@ zk_evm_1_4_1.workspace = true zk_evm_1_4_0.workspace = true zk_evm_1_3_3.workspace = true zk_evm_1_3_1.workspace = true -vm2.workspace = true +zksync_vm2.workspace = true circuit_sequencer_api_1_3_3.workspace = true circuit_sequencer_api_1_4_0.workspace = true diff --git a/core/lib/multivm/src/versions/vm_1_4_1/tests/gas_limit.rs b/core/lib/multivm/src/versions/vm_1_4_1/tests/gas_limit.rs index 0ec921450daf..9dfda9e1a68c 100644 --- a/core/lib/multivm/src/versions/vm_1_4_1/tests/gas_limit.rs +++ b/core/lib/multivm/src/versions/vm_1_4_1/tests/gas_limit.rs @@ -21,10 +21,8 @@ fn test_tx_gas_limit_offset() { let gas_limit = 9999.into(); let tx = vm.rich_accounts[0].get_l2_tx_for_execute( Execute { - contract_address: Default::default(), - calldata: vec![], - value: Default::default(), - factory_deps: None, + contract_address: Some(Default::default()), + ..Default::default() }, Some(Fee { gas_limit, diff --git a/core/lib/multivm/src/versions/vm_1_4_2/tests/gas_limit.rs b/core/lib/multivm/src/versions/vm_1_4_2/tests/gas_limit.rs index 6a57fd07ae71..b84e9d32126c 100644 --- a/core/lib/multivm/src/versions/vm_1_4_2/tests/gas_limit.rs +++ b/core/lib/multivm/src/versions/vm_1_4_2/tests/gas_limit.rs @@ -20,10 +20,8 @@ fn test_tx_gas_limit_offset() { let gas_limit = 9999.into(); let tx = vm.rich_accounts[0].get_l2_tx_for_execute( Execute { - contract_address: Default::default(), - calldata: vec![], - value: Default::default(), - factory_deps: None, + contract_address: Some(Default::default()), + ..Default::default() }, Some(Fee { gas_limit, diff --git a/core/lib/multivm/src/versions/vm_boojum_integration/tests/gas_limit.rs b/core/lib/multivm/src/versions/vm_boojum_integration/tests/gas_limit.rs index 30a65097111d..637fd94c1c89 100644 --- a/core/lib/multivm/src/versions/vm_boojum_integration/tests/gas_limit.rs +++ b/core/lib/multivm/src/versions/vm_boojum_integration/tests/gas_limit.rs @@ -21,10 +21,8 @@ fn test_tx_gas_limit_offset() { let gas_limit = 9999.into(); let tx = vm.rich_accounts[0].get_l2_tx_for_execute( Execute { - contract_address: Default::default(), - calldata: vec![], - value: Default::default(), - factory_deps: None, + contract_address: Some(Default::default()), + ..Default::default() }, Some(Fee { gas_limit, diff --git a/core/lib/multivm/src/versions/vm_fast/circuits_tracer.rs b/core/lib/multivm/src/versions/vm_fast/circuits_tracer.rs index 061d91be60b7..de6ead71e655 100644 --- a/core/lib/multivm/src/versions/vm_fast/circuits_tracer.rs +++ b/core/lib/multivm/src/versions/vm_fast/circuits_tracer.rs @@ -1,5 +1,5 @@ use circuit_sequencer_api_1_5_0::{geometry_config::get_geometry_config, toolset::GeometryConfig}; -use vm2::{CycleStats, Opcode, OpcodeType, StateInterface, Tracer}; +use zksync_vm2::{CycleStats, Opcode, OpcodeType, StateInterface, Tracer}; use zksync_vm_interface::CircuitStatistic; use crate::vm_latest::tracers::circuits_capacity::*; diff --git a/core/lib/multivm/src/versions/vm_fast/events.rs b/core/lib/multivm/src/versions/vm_fast/events.rs index 798a1e12bdd8..2312c3d97b40 100644 --- a/core/lib/multivm/src/versions/vm_fast/events.rs +++ b/core/lib/multivm/src/versions/vm_fast/events.rs @@ -1,6 +1,6 @@ -use vm2::Event; use zksync_types::{L1BatchNumber, H256}; use zksync_utils::h256_to_account_address; +use zksync_vm2::Event; use crate::interface::VmEvent; diff --git a/core/lib/multivm/src/versions/vm_fast/glue.rs b/core/lib/multivm/src/versions/vm_fast/glue.rs index cbf22f9122b0..f24c82af11e9 100644 --- a/core/lib/multivm/src/versions/vm_fast/glue.rs +++ b/core/lib/multivm/src/versions/vm_fast/glue.rs @@ -3,9 +3,9 @@ use zksync_utils::u256_to_h256; use crate::glue::GlueFrom; -impl GlueFrom<&vm2::L2ToL1Log> for SystemL2ToL1Log { - fn glue_from(value: &vm2::L2ToL1Log) -> Self { - let vm2::L2ToL1Log { +impl GlueFrom<&zksync_vm2::L2ToL1Log> for SystemL2ToL1Log { + fn glue_from(value: &zksync_vm2::L2ToL1Log) -> Self { + let zksync_vm2::L2ToL1Log { key, value, is_service, diff --git a/core/lib/multivm/src/versions/vm_fast/tests/bootloader.rs b/core/lib/multivm/src/versions/vm_fast/tests/bootloader.rs index 8e1a273bc7b1..5c1158a5909d 100644 --- a/core/lib/multivm/src/versions/vm_fast/tests/bootloader.rs +++ b/core/lib/multivm/src/versions/vm_fast/tests/bootloader.rs @@ -1,5 +1,6 @@ use assert_matches::assert_matches; use zksync_types::U256; +use zksync_vm2::HeapId; use crate::{ interface::{ExecutionResult, Halt, TxExecutionMode, VmExecutionMode, VmInterfaceExt}, @@ -25,10 +26,7 @@ fn test_dummy_bootloader() { let correct_first_cell = U256::from_str_radix("123123123", 16).unwrap(); - verify_required_memory( - &vm.vm.inner.state, - vec![(correct_first_cell, vm2::FIRST_HEAP, 0)], - ); + verify_required_memory(&vm.vm.inner, vec![(correct_first_cell, HeapId::FIRST, 0)]); } #[test] diff --git a/core/lib/multivm/src/versions/vm_fast/tests/code_oracle.rs b/core/lib/multivm/src/versions/vm_fast/tests/code_oracle.rs index f29684260051..5e7b7748fb3a 100644 --- a/core/lib/multivm/src/versions/vm_fast/tests/code_oracle.rs +++ b/core/lib/multivm/src/versions/vm_fast/tests/code_oracle.rs @@ -210,7 +210,7 @@ fn refunds_in_code_oracle() { let account = &mut vm.rich_accounts[0]; if decommit { - let (_, is_fresh) = vm.vm.inner.world_diff.decommit_opcode( + let (_, is_fresh) = vm.vm.inner.world_diff_mut().decommit_opcode( &mut vm.vm.world, &mut CircuitsTracer::default(), h256_to_u256(normal_zkevm_bytecode_hash), diff --git a/core/lib/multivm/src/versions/vm_fast/tests/default_aa.rs b/core/lib/multivm/src/versions/vm_fast/tests/default_aa.rs index f809af81b165..c2ce02d39fe1 100644 --- a/core/lib/multivm/src/versions/vm_fast/tests/default_aa.rs +++ b/core/lib/multivm/src/versions/vm_fast/tests/default_aa.rs @@ -61,7 +61,7 @@ fn test_default_aa_interaction() { verify_required_storage( &expected_slots, &mut vm.vm.world.storage, - vm.vm.inner.world_diff.get_storage_state(), + vm.vm.inner.world_diff().get_storage_state(), ); let expected_fee = maximal_fee @@ -71,7 +71,7 @@ fn test_default_aa_interaction() { AccountTreeId::new(L2_BASE_TOKEN_ADDRESS), &vm.fee_account, &mut vm.vm.world.storage, - vm.vm.inner.world_diff.get_storage_state(), + vm.vm.inner.world_diff().get_storage_state(), ); assert_eq!( diff --git a/core/lib/multivm/src/versions/vm_fast/tests/gas_limit.rs b/core/lib/multivm/src/versions/vm_fast/tests/gas_limit.rs index e0c55c5a685a..3f0a47b980e2 100644 --- a/core/lib/multivm/src/versions/vm_fast/tests/gas_limit.rs +++ b/core/lib/multivm/src/versions/vm_fast/tests/gas_limit.rs @@ -18,7 +18,10 @@ fn test_tx_gas_limit_offset() { let gas_limit = 9999.into(); let tx = vm.rich_accounts[0].get_l2_tx_for_execute( - Execute::default(), + Execute { + contract_address: Some(Default::default()), + ..Default::default() + }, Some(Fee { gas_limit, ..Account::default_fee() @@ -27,7 +30,7 @@ fn test_tx_gas_limit_offset() { vm.vm.push_transaction(tx); - assert!(vm.vm.inner.state.previous_frames.is_empty()); + assert!(!vm.vm.has_previous_far_calls()); let gas_limit_from_memory = vm .vm .read_word_from_bootloader_heap(TX_DESCRIPTION_OFFSET + TX_GAS_LIMIT_OFFSET); diff --git a/core/lib/multivm/src/versions/vm_fast/tests/l1_tx_execution.rs b/core/lib/multivm/src/versions/vm_fast/tests/l1_tx_execution.rs index 2079629fadc9..5897ec5f2662 100644 --- a/core/lib/multivm/src/versions/vm_fast/tests/l1_tx_execution.rs +++ b/core/lib/multivm/src/versions/vm_fast/tests/l1_tx_execution.rs @@ -82,7 +82,7 @@ fn test_l1_tx_execution() { ] { assert_eq!( expected_value, - vm.vm.inner.world_diff.get_storage_state()[&( + vm.vm.inner.world_diff().get_storage_state()[&( *storage_location.address(), h256_to_u256(*storage_location.key()) )] diff --git a/core/lib/multivm/src/versions/vm_fast/tests/require_eip712.rs b/core/lib/multivm/src/versions/vm_fast/tests/require_eip712.rs index 89b8788f52fe..bc0a07381b00 100644 --- a/core/lib/multivm/src/versions/vm_fast/tests/require_eip712.rs +++ b/core/lib/multivm/src/versions/vm_fast/tests/require_eip712.rs @@ -26,7 +26,7 @@ impl VmTester { ); self.vm .inner - .world_diff + .world_diff() .get_storage_state() .get(&(L2_BASE_TOKEN_ADDRESS, h256_to_u256(*key.key()))) .copied() diff --git a/core/lib/multivm/src/versions/vm_fast/tests/rollbacks.rs b/core/lib/multivm/src/versions/vm_fast/tests/rollbacks.rs index 446de84b19cb..81d7303b74dc 100644 --- a/core/lib/multivm/src/versions/vm_fast/tests/rollbacks.rs +++ b/core/lib/multivm/src/versions/vm_fast/tests/rollbacks.rs @@ -74,7 +74,7 @@ fn test_vm_rollbacks() { ), ]); - assert_eq!(result_without_rollbacks, result_with_rollbacks); + pretty_assertions::assert_eq!(result_without_rollbacks, result_with_rollbacks); } #[test] diff --git a/core/lib/multivm/src/versions/vm_fast/tests/tester/transaction_test_info.rs b/core/lib/multivm/src/versions/vm_fast/tests/tester/transaction_test_info.rs index 359ba38d07a5..6e51dcd2615f 100644 --- a/core/lib/multivm/src/versions/vm_fast/tests/tester/transaction_test_info.rs +++ b/core/lib/multivm/src/versions/vm_fast/tests/tester/transaction_test_info.rs @@ -1,3 +1,5 @@ +use std::fmt; + use zksync_types::{ExecuteTransactionCommon, Transaction, H160, U256}; use super::VmTester; @@ -7,7 +9,7 @@ use crate::{ VmExecutionMode, VmExecutionResultAndLogs, VmInterface, VmInterfaceExt, VmInterfaceHistoryEnabled, VmRevertReason, }, - vm_fast::{circuits_tracer::CircuitsTracer, vm::World, Vm}, + vm_fast::Vm, }; #[derive(Debug, Clone)] @@ -177,12 +179,12 @@ impl TransactionTestInfo { // TODO this doesn't include all the state of ModifiedWorld #[derive(Debug)] struct VmStateDump { - state: vm2::State>, + state: S, storage_writes: Vec<((H160, U256), U256)>, - events: Box<[vm2::Event]>, + events: Box<[zksync_vm2::Event]>, } -impl PartialEq for VmStateDump { +impl PartialEq for VmStateDump { fn eq(&self, other: &Self) -> bool { self.state == other.state && self.storage_writes == other.storage_writes @@ -191,17 +193,17 @@ impl PartialEq for VmStateDump { } impl Vm { - fn dump_state(&self) -> VmStateDump { + fn dump_state(&self) -> VmStateDump { VmStateDump { - state: self.inner.state.clone(), + state: self.inner.dump_state(), storage_writes: self .inner - .world_diff + .world_diff() .get_storage_state() .iter() .map(|(k, v)| (*k, *v)) .collect(), - events: self.inner.world_diff.events().into(), + events: self.inner.world_diff().events().into(), } } } diff --git a/core/lib/multivm/src/versions/vm_fast/tests/tester/vm_tester.rs b/core/lib/multivm/src/versions/vm_fast/tests/tester/vm_tester.rs index 335ec752c7d4..8071bcf51d4a 100644 --- a/core/lib/multivm/src/versions/vm_fast/tests/tester/vm_tester.rs +++ b/core/lib/multivm/src/versions/vm_fast/tests/tester/vm_tester.rs @@ -1,6 +1,5 @@ use std::{cell::RefCell, rc::Rc}; -use vm2::WorldDiff; use zksync_contracts::BaseSystemContracts; use zksync_test_account::{Account, TxType}; use zksync_types::{ @@ -13,6 +12,7 @@ use zksync_types::{ StorageKey, U256, }; use zksync_utils::{bytecode::hash_bytecode, u256_to_h256}; +use zksync_vm2::WorldDiff; use crate::{ interface::{ @@ -53,7 +53,7 @@ impl VmTester { pub(crate) fn reset_with_empty_storage(&mut self) { self.storage = Rc::new(RefCell::new(get_empty_storage())); - self.vm.inner.world_diff = WorldDiff::default(); + *self.vm.inner.world_diff_mut() = WorldDiff::default(); self.reset_state(false); } @@ -78,7 +78,7 @@ impl VmTester { { let mut storage = storage.borrow_mut(); // Commit pending storage changes (old VM versions commit them on successful execution) - for (&(address, slot), &value) in self.vm.inner.world_diff.get_storage_state() { + for (&(address, slot), &value) in self.vm.inner.world_diff().get_storage_state() { let key = StorageKey::new(AccountTreeId::new(address), u256_to_h256(slot)); storage.set_value(key, u256_to_h256(value)); } diff --git a/core/lib/multivm/src/versions/vm_fast/tests/transfer.rs b/core/lib/multivm/src/versions/vm_fast/tests/transfer.rs index 3370f8bce354..662e014ef85b 100644 --- a/core/lib/multivm/src/versions/vm_fast/tests/transfer.rs +++ b/core/lib/multivm/src/versions/vm_fast/tests/transfer.rs @@ -92,7 +92,7 @@ fn test_send_or_transfer(test_option: TestOptions) { AccountTreeId::new(L2_BASE_TOKEN_ADDRESS), &recipient_address, &mut vm.vm.world.storage, - vm.vm.inner.world_diff.get_storage_state(), + vm.vm.inner.world_diff().get_storage_state(), ); assert_eq!(new_recipient_balance, value); diff --git a/core/lib/multivm/src/versions/vm_fast/tests/upgrade.rs b/core/lib/multivm/src/versions/vm_fast/tests/upgrade.rs index f0ed2fb30c87..ba4863f7c457 100644 --- a/core/lib/multivm/src/versions/vm_fast/tests/upgrade.rs +++ b/core/lib/multivm/src/versions/vm_fast/tests/upgrade.rs @@ -164,7 +164,7 @@ fn test_force_deploy_upgrade() { verify_required_storage( &expected_slots, &mut *vm.storage.borrow_mut(), - vm.vm.inner.world_diff.get_storage_state(), + vm.vm.inner.world_diff().get_storage_state(), ); } @@ -223,7 +223,7 @@ fn test_complex_upgrader() { verify_required_storage( &expected_slots, &mut *vm.storage.borrow_mut(), - vm.vm.inner.world_diff.get_storage_state(), + vm.vm.inner.world_diff().get_storage_state(), ); } diff --git a/core/lib/multivm/src/versions/vm_fast/tests/utils.rs b/core/lib/multivm/src/versions/vm_fast/tests/utils.rs index d75ae12c30c1..d91e13076514 100644 --- a/core/lib/multivm/src/versions/vm_fast/tests/utils.rs +++ b/core/lib/multivm/src/versions/vm_fast/tests/utils.rs @@ -2,7 +2,6 @@ use std::collections::BTreeMap; use ethabi::Contract; use once_cell::sync::Lazy; -use vm2::{instruction_handlers::HeapInterface, HeapId, State}; use zksync_contracts::{ load_contract, read_bytecode, read_zbin_bytecode, BaseSystemContracts, SystemContractCode, }; @@ -11,18 +10,19 @@ use zksync_types::{ U256, }; use zksync_utils::{bytecode::hash_bytecode, bytes_to_be_words, h256_to_u256, u256_to_h256}; +use zksync_vm2::{HeapId, StateInterface}; use crate::interface::storage::ReadStorage; pub(crate) static BASE_SYSTEM_CONTRACTS: Lazy = Lazy::new(BaseSystemContracts::load_from_disk); -pub(crate) fn verify_required_memory( - state: &State, +pub(crate) fn verify_required_memory( + state: &impl StateInterface, required_values: Vec<(U256, HeapId, u32)>, ) { for (required_value, memory_page, cell) in required_values { - let current_value = state.heaps[memory_page].read_u256(cell * 32); + let current_value = state.read_heap_u256(memory_page, cell * 32); assert_eq!(current_value, required_value); } } diff --git a/core/lib/multivm/src/versions/vm_fast/vm.rs b/core/lib/multivm/src/versions/vm_fast/vm.rs index 0cc6f1baa688..5b6a6708f9b7 100644 --- a/core/lib/multivm/src/versions/vm_fast/vm.rs +++ b/core/lib/multivm/src/versions/vm_fast/vm.rs @@ -1,9 +1,5 @@ use std::{collections::HashMap, fmt}; -use vm2::{ - decode::decode_program, fat_pointer::FatPointer, instruction_handlers::HeapInterface, - ExecutionEnd, Program, Settings, Tracer, VirtualMachine, -}; use zk_evm_1_5_0::zkevm_opcode_defs::system_params::INITIAL_FRAME_FORMAL_EH_LOCATION; use zksync_contracts::SystemContractCode; use zksync_types::{ @@ -19,6 +15,10 @@ use zksync_types::{ L2_BASE_TOKEN_ADDRESS, U256, }; use zksync_utils::{bytecode::hash_bytecode, h256_to_u256, u256_to_h256}; +use zksync_vm2::{ + decode::decode_program, CallframeInterface, ExecutionEnd, FatPointer, HeapId, Program, + Settings, StateInterface, Tracer, VirtualMachine, +}; use super::{ bootloader_state::{BootloaderState, BootloaderStateSnapshot}, @@ -79,7 +79,7 @@ impl Vm { operator_suggested_refund: 0, }; let mut last_tx_result = None; - let mut pubdata_before = self.inner.world_diff.pubdata() as u32; + let mut pubdata_before = self.inner.world_diff().pubdata() as u32; let result = loop { let hook = match self.inner.run(&mut self.world, tracer) { @@ -93,7 +93,7 @@ impl Vm { } ExecutionEnd::Panicked => { break ExecutionResult::Halt { - reason: if self.inner.state.current_frame.gas == 0 { + reason: if self.gas_remaining() == 0 { Halt::BootloaderOutOfGas } else { Halt::VMPanic @@ -125,7 +125,7 @@ impl Vm { ) .as_u64(); - let pubdata_published = self.inner.world_diff.pubdata() as u32; + let pubdata_published = self.inner.world_diff().pubdata() as u32; refunds.operator_suggested_refund = compute_refund( &self.batch_env, @@ -161,10 +161,7 @@ impl Vm { let result = self.get_hook_params()[0]; let value = self.get_hook_params()[1]; let fp = FatPointer::from(value); - assert_eq!(fp.offset, 0); - - let return_data = self.inner.state.heaps[fp.memory_page] - .read_range_big_endian(fp.start..fp.start + fp.length); + let return_data = self.read_bytes_from_heap(fp); last_tx_result = Some(if result.is_zero() { ExecutionResult::Revert { @@ -190,7 +187,7 @@ impl Vm { } let events = - merge_events(self.inner.world_diff.events(), self.batch_env.number); + merge_events(self.inner.world_diff().events(), self.batch_env.number); let published_bytecodes = events .iter() @@ -276,7 +273,20 @@ impl Vm { /// Should only be used when the bootloader is executing (e.g., when handling hooks). pub(crate) fn read_word_from_bootloader_heap(&self, word: usize) -> U256 { - self.inner.state.heaps[vm2::FIRST_HEAP].read_u256(word as u32 * 32) + let start_address = word as u32 * 32; + self.inner.read_heap_u256(HeapId::FIRST, start_address) + } + + fn read_bytes_from_heap(&self, ptr: FatPointer) -> Vec { + assert_eq!(ptr.offset, 0); + (ptr.start..ptr.start + ptr.length) + .map(|addr| self.inner.read_heap_byte(ptr.memory_page, addr)) + .collect() + } + + pub(crate) fn has_previous_far_calls(&mut self) -> bool { + let callframe_count = self.inner.number_of_callframes(); + (1..callframe_count).any(|i| !self.inner.callframe(i).is_near_call()) } /// Should only be used when the bootloader is executing (e.g., when handling hooks). @@ -284,12 +294,15 @@ impl Vm { &mut self, memory: impl IntoIterator, ) { - assert!(self.inner.state.previous_frames.is_empty()); + assert!( + !self.has_previous_far_calls(), + "Cannot write to bootloader heap when not in root call frame" + ); + for (slot, value) in memory { + let start_address = slot as u32 * 32; self.inner - .state - .heaps - .write_u256(vm2::FIRST_HEAP, slot as u32 * 32, value); + .write_heap_u256(HeapId::FIRST, start_address, value); } } @@ -317,7 +330,7 @@ impl Vm { } else { compress_bytecodes(&tx.factory_deps, |hash| { self.inner - .world_diff + .world_diff() .get_storage_state() .get(&(KNOWN_CODES_STORAGE_ADDRESS, h256_to_u256(hash))) .map(|x| !x.is_zero()) @@ -351,7 +364,7 @@ impl Vm { } let storage = &mut self.world.storage; - let diffs = self.inner.world_diff.get_storage_changes().map( + let diffs = self.inner.world_diff().get_storage_changes().map( move |((address, key), (initial_value, final_value))| { let storage_key = StorageKey::new(AccountTreeId::new(address), u256_to_h256(key)); StateDiffRecord { @@ -375,11 +388,11 @@ impl Vm { } pub(crate) fn decommitted_hashes(&self) -> impl Iterator + '_ { - self.inner.world_diff.decommitted_hashes() + self.inner.world_diff().decommitted_hashes() } - pub(super) fn gas_remaining(&self) -> u32 { - self.inner.state.current_frame.gas + pub(super) fn gas_remaining(&mut self) -> u32 { + self.inner.current_frame().gas() } } @@ -429,12 +442,13 @@ impl Vm { }, ); - inner.state.current_frame.sp = 0; - + inner.current_frame().set_stack_pointer(0); // The bootloader writes results to high addresses in its heap, so it makes sense to preallocate it. - inner.state.current_frame.heap_size = u32::MAX; - inner.state.current_frame.aux_heap_size = u32::MAX; - inner.state.current_frame.exception_handler = INITIAL_FRAME_FORMAL_EH_LOCATION; + inner.current_frame().set_heap_bound(u32::MAX); + inner.current_frame().set_aux_heap_bound(u32::MAX); + inner + .current_frame() + .set_exception_handler(INITIAL_FRAME_FORMAL_EH_LOCATION); let mut this = Self { world: World::new(storage, program_cache), @@ -457,7 +471,7 @@ impl Vm { // visible for testing pub(super) fn get_current_execution_state(&self) -> CurrentExecutionState { - let world_diff = &self.inner.world_diff; + let world_diff = self.inner.world_diff(); let events = merge_events(world_diff.events(), self.batch_env.number); let user_l2_to_l1_logs = extract_l2tol1logs_from_l1_messenger(&events) @@ -489,7 +503,7 @@ impl Vm { } fn delete_history_if_appropriate(&mut self) { - if self.snapshot.is_none() && self.inner.state.previous_frames.is_empty() { + if self.snapshot.is_none() && !self.has_previous_far_calls() { self.inner.delete_history(); } } @@ -515,8 +529,8 @@ impl VmInterface for Vm { } let mut tracer = CircuitsTracer::default(); - let start = self.inner.world_diff.snapshot(); - let pubdata_before = self.inner.world_diff.pubdata(); + let start = self.inner.world_diff().snapshot(); + let pubdata_before = self.inner.world_diff().pubdata(); let gas_before = self.gas_remaining(); let (result, refunds) = self.run(execution_mode, &mut tracer, track_refunds); @@ -530,7 +544,7 @@ impl VmInterface for Vm { } else { let storage_logs = self .inner - .world_diff + .world_diff() .get_storage_changes_after(&start) .map(|((address, key), change)| StorageLogWithPreviousValue { log: StorageLog { @@ -546,7 +560,7 @@ impl VmInterface for Vm { }) .collect(); let events = merge_events( - self.inner.world_diff.events_after(&start), + self.inner.world_diff().events_after(&start), self.batch_env.number, ); let user_l2_to_l1_logs = extract_l2tol1logs_from_l1_messenger(&events) @@ -556,7 +570,7 @@ impl VmInterface for Vm { .collect(); let system_l2_to_l1_logs = self .inner - .world_diff + .world_diff() .l2_to_l1_logs_after(&start) .iter() .map(|x| x.glue_into()) @@ -570,7 +584,7 @@ impl VmInterface for Vm { } }; - let pubdata_after = self.inner.world_diff.pubdata(); + let pubdata_after = self.inner.world_diff().pubdata(); let circuit_statistic = tracer.circuit_statistic(); let gas_remaining = self.gas_remaining(); VmExecutionResultAndLogs { @@ -646,7 +660,7 @@ impl VmInterface for Vm { #[derive(Debug)] struct VmSnapshot { - vm_snapshot: vm2::Snapshot, + vm_snapshot: zksync_vm2::Snapshot, bootloader_snapshot: BootloaderStateSnapshot, gas_for_account_validation: u32, } @@ -756,7 +770,7 @@ impl World { } } -impl vm2::StorageInterface for World { +impl zksync_vm2::StorageInterface for World { fn read_storage(&mut self, contract: H160, key: U256) -> Option { let key = &StorageKey::new(AccountTreeId::new(contract), u256_to_h256(key)); if self.storage.is_write_initial(key) { @@ -801,7 +815,7 @@ impl vm2::StorageInterface for World { } } -impl vm2::World for World { +impl zksync_vm2::World for World { fn decommit(&mut self, hash: U256) -> Program { self.program_cache .entry(hash) diff --git a/core/lib/multivm/src/versions/vm_latest/tests/gas_limit.rs b/core/lib/multivm/src/versions/vm_latest/tests/gas_limit.rs index 34e1e2d25f31..cc9aac5bb91b 100644 --- a/core/lib/multivm/src/versions/vm_latest/tests/gas_limit.rs +++ b/core/lib/multivm/src/versions/vm_latest/tests/gas_limit.rs @@ -21,7 +21,10 @@ fn test_tx_gas_limit_offset() { let gas_limit = 9999.into(); let tx = vm.rich_accounts[0].get_l2_tx_for_execute( - Execute::default(), + Execute { + contract_address: Some(Default::default()), + ..Default::default() + }, Some(Fee { gas_limit, ..Account::default_fee() diff --git a/core/lib/multivm/src/versions/vm_latest/tracers/utils.rs b/core/lib/multivm/src/versions/vm_latest/tracers/utils.rs index 1ecb75c28071..0a11f5d3f849 100644 --- a/core/lib/multivm/src/versions/vm_latest/tracers/utils.rs +++ b/core/lib/multivm/src/versions/vm_latest/tracers/utils.rs @@ -6,7 +6,8 @@ use zk_evm_1_5_0::{ }, }; use zksync_system_constants::{ - ECRECOVER_PRECOMPILE_ADDRESS, KECCAK256_PRECOMPILE_ADDRESS, SHA256_PRECOMPILE_ADDRESS, + ECRECOVER_PRECOMPILE_ADDRESS, KECCAK256_PRECOMPILE_ADDRESS, + SECP256R1_VERIFY_PRECOMPILE_ADDRESS, SHA256_PRECOMPILE_ADDRESS, }; use zksync_types::U256; use zksync_utils::u256_to_h256; @@ -187,6 +188,7 @@ pub(crate) fn computational_gas_price( if address == KECCAK256_PRECOMPILE_ADDRESS || address == SHA256_PRECOMPILE_ADDRESS || address == ECRECOVER_PRECOMPILE_ADDRESS + || address == SECP256R1_VERIFY_PRECOMPILE_ADDRESS { data.src1_value.value.low_u32() } else { diff --git a/core/lib/multivm/src/versions/vm_refunds_enhancement/tests/gas_limit.rs b/core/lib/multivm/src/versions/vm_refunds_enhancement/tests/gas_limit.rs index 0ea1669cf217..1ff6ce12557f 100644 --- a/core/lib/multivm/src/versions/vm_refunds_enhancement/tests/gas_limit.rs +++ b/core/lib/multivm/src/versions/vm_refunds_enhancement/tests/gas_limit.rs @@ -21,10 +21,8 @@ fn test_tx_gas_limit_offset() { let gas_limit = 9999.into(); let tx = vm.rich_accounts[0].get_l2_tx_for_execute( Execute { - contract_address: Default::default(), - calldata: vec![], - value: Default::default(), - factory_deps: None, + contract_address: Some(Default::default()), + ..Default::default() }, Some(Fee { gas_limit, diff --git a/core/lib/multivm/src/versions/vm_virtual_blocks/tests/gas_limit.rs b/core/lib/multivm/src/versions/vm_virtual_blocks/tests/gas_limit.rs index 01ebe4c0d225..e51b8cab570e 100644 --- a/core/lib/multivm/src/versions/vm_virtual_blocks/tests/gas_limit.rs +++ b/core/lib/multivm/src/versions/vm_virtual_blocks/tests/gas_limit.rs @@ -21,10 +21,8 @@ fn test_tx_gas_limit_offset() { let gas_limit = 9999.into(); let tx = vm.rich_accounts[0].get_l2_tx_for_execute( Execute { - contract_address: Default::default(), - calldata: vec![], - value: Default::default(), - factory_deps: None, + contract_address: Some(Default::default()), + ..Default::default() }, Some(Fee { gas_limit, diff --git a/core/lib/protobuf_config/src/da_client.rs b/core/lib/protobuf_config/src/da_client.rs new file mode 100644 index 000000000000..2009d32db17c --- /dev/null +++ b/core/lib/protobuf_config/src/da_client.rs @@ -0,0 +1,61 @@ +use anyhow::Context; +use zksync_config::{ + configs::{ + da_client::DAClient::{Avail, ObjectStore}, + {self}, + }, + AvailConfig, +}; +use zksync_protobuf::{required, ProtoRepr}; + +use crate::proto::{da_client as proto, object_store as object_store_proto}; + +impl ProtoRepr for proto::DataAvailabilityClient { + type Type = configs::DAClientConfig; + + fn read(&self) -> anyhow::Result { + let config = required(&self.config).context("config")?; + + let client = match config { + proto::data_availability_client::Config::Avail(conf) => Avail(AvailConfig { + api_node_url: required(&conf.api_node_url) + .context("api_node_url")? + .clone(), + bridge_api_url: required(&conf.bridge_api_url) + .context("bridge_api_url")? + .clone(), + seed: required(&conf.seed).context("seed")?.clone(), + app_id: *required(&conf.app_id).context("app_id")?, + timeout: *required(&conf.timeout).context("timeout")? as usize, + max_retries: *required(&conf.max_retries).context("max_retries")? as usize, + }), + proto::data_availability_client::Config::ObjectStore(conf) => { + ObjectStore(object_store_proto::ObjectStore::read(conf)?) + } + }; + + Ok(configs::DAClientConfig { client }) + } + + fn build(this: &Self::Type) -> Self { + match &this.client { + Avail(config) => Self { + config: Some(proto::data_availability_client::Config::Avail( + proto::AvailConfig { + api_node_url: Some(config.api_node_url.clone()), + bridge_api_url: Some(config.bridge_api_url.clone()), + seed: Some(config.seed.clone()), + app_id: Some(config.app_id), + timeout: Some(config.timeout as u64), + max_retries: Some(config.max_retries as u64), + }, + )), + }, + ObjectStore(config) => Self { + config: Some(proto::data_availability_client::Config::ObjectStore( + object_store_proto::ObjectStore::build(config), + )), + }, + } + } +} diff --git a/core/lib/protobuf_config/src/general.rs b/core/lib/protobuf_config/src/general.rs index 87bca88db387..b73539a0897f 100644 --- a/core/lib/protobuf_config/src/general.rs +++ b/core/lib/protobuf_config/src/general.rs @@ -28,6 +28,7 @@ impl ProtoRepr for proto::GeneralConfig { eth: read_optional_repr(&self.eth), snapshot_creator: read_optional_repr(&self.snapshot_creator), observability: read_optional_repr(&self.observability), + da_client_config: read_optional_repr(&self.da_client), da_dispatcher_config: read_optional_repr(&self.da_dispatcher), protective_reads_writer_config: read_optional_repr(&self.protective_reads_writer), basic_witness_input_producer_config: read_optional_repr( @@ -76,6 +77,7 @@ impl ProtoRepr for proto::GeneralConfig { eth: this.eth.as_ref().map(ProtoRepr::build), snapshot_creator: this.snapshot_creator.as_ref().map(ProtoRepr::build), observability: this.observability.as_ref().map(ProtoRepr::build), + da_client: this.da_client_config.as_ref().map(ProtoRepr::build), da_dispatcher: this.da_dispatcher_config.as_ref().map(ProtoRepr::build), protective_reads_writer: this .protective_reads_writer_config diff --git a/core/lib/protobuf_config/src/lib.rs b/core/lib/protobuf_config/src/lib.rs index f4d0188ea20f..a4822edbe8e4 100644 --- a/core/lib/protobuf_config/src/lib.rs +++ b/core/lib/protobuf_config/src/lib.rs @@ -29,6 +29,7 @@ mod pruning; mod secrets; mod snapshots_creator; +mod da_client; mod external_price_api_client; mod external_proof_integration_api; mod prover_job_monitor; diff --git a/core/lib/protobuf_config/src/proto/config/da_client.proto b/core/lib/protobuf_config/src/proto/config/da_client.proto new file mode 100644 index 000000000000..ef58fbcecb4f --- /dev/null +++ b/core/lib/protobuf_config/src/proto/config/da_client.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; + +package zksync.config.da_client; + +import "zksync/config/object_store.proto"; + +message AvailConfig { + optional string api_node_url = 1; + optional string bridge_api_url = 2; + optional string seed = 3; + optional uint32 app_id = 4; + optional uint64 timeout = 5; + optional uint64 max_retries = 6; +} + +message DataAvailabilityClient { + // oneof in protobuf allows for None + oneof config { + AvailConfig avail = 1; + object_store.ObjectStore object_store = 2; + } +} diff --git a/core/lib/protobuf_config/src/proto/config/general.proto b/core/lib/protobuf_config/src/proto/config/general.proto index 3595468949b1..ee70b61b18b3 100644 --- a/core/lib/protobuf_config/src/proto/config/general.proto +++ b/core/lib/protobuf_config/src/proto/config/general.proto @@ -25,6 +25,7 @@ import "zksync/config/external_price_api_client.proto"; import "zksync/config/external_proof_integration_api.proto"; import "zksync/core/consensus.proto"; import "zksync/config/prover_job_monitor.proto"; +import "zksync/config/da_client.proto"; message GeneralConfig { optional database.Postgres postgres = 1; @@ -60,4 +61,5 @@ message GeneralConfig { optional external_proof_integration_api.ExternalProofIntegrationApi external_proof_integration_api = 43; optional experimental.Vm experimental_vm = 44; optional prover_job_monitor.ProverJobMonitor prover_job_monitor = 45; + optional da_client.DataAvailabilityClient da_client = 46; } diff --git a/core/lib/protobuf_config/src/tests.rs b/core/lib/protobuf_config/src/tests.rs index 695f404f64d1..d653b9b92bfd 100644 --- a/core/lib/protobuf_config/src/tests.rs +++ b/core/lib/protobuf_config/src/tests.rs @@ -42,6 +42,7 @@ fn test_encoding() { test_encode_all_formats::>(rng); test_encode_all_formats::>(rng); test_encode_all_formats::>(rng); + test_encode_all_formats::>(rng); test_encode_all_formats::>(rng); test_encode_all_formats::>(rng); test_encode_all_formats::>(rng); diff --git a/core/lib/state/src/lib.rs b/core/lib/state/src/lib.rs index 205579552a30..fa06599357ca 100644 --- a/core/lib/state/src/lib.rs +++ b/core/lib/state/src/lib.rs @@ -21,6 +21,7 @@ pub use self::{ shadow_storage::ShadowStorage, storage_factory::{ BatchDiff, CommonStorage, OwnedStorage, ReadStorageFactory, RocksdbWithMemory, + SnapshotStorage, }, }; diff --git a/core/lib/state/src/storage_factory/metrics.rs b/core/lib/state/src/storage_factory/metrics.rs new file mode 100644 index 000000000000..822db90820ce --- /dev/null +++ b/core/lib/state/src/storage_factory/metrics.rs @@ -0,0 +1,37 @@ +use std::time::Duration; + +use vise::{Buckets, EncodeLabelSet, EncodeLabelValue, Family, Histogram, Metrics, Unit}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, EncodeLabelValue, EncodeLabelSet)] +#[metrics(label = "stage", rename_all = "snake_case")] +pub(super) enum SnapshotStage { + BatchHeader, + ProtectiveReads, + TouchedSlots, + PreviousValues, + InitialWrites, + Bytecodes, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, EncodeLabelValue, EncodeLabelSet)] +#[metrics(label = "kind", rename_all = "snake_case")] +pub(super) enum AccessKind { + ReadValue, + IsWriteInitial, + LoadFactoryDep, + GetEnumerationIndex, +} + +#[derive(Debug, Metrics)] +#[metrics(prefix = "state_snapshot")] +pub(super) struct SnapshotMetrics { + /// Latency of loading a batch snapshot split by stage. + #[metrics(buckets = Buckets::LATENCIES, unit = Unit::Seconds)] + pub load_latency: Family>, + /// Latency of accessing the fallback storage for a batch snapshot. + #[metrics(buckets = Buckets::LATENCIES, unit = Unit::Seconds)] + pub fallback_access_latency: Family>, +} + +#[vise::register] +pub(super) static SNAPSHOT_METRICS: vise::Global = vise::Global::new(); diff --git a/core/lib/state/src/storage_factory.rs b/core/lib/state/src/storage_factory/mod.rs similarity index 80% rename from core/lib/state/src/storage_factory.rs rename to core/lib/state/src/storage_factory/mod.rs index 2ef9b249af2e..0b514f8f9644 100644 --- a/core/lib/state/src/storage_factory.rs +++ b/core/lib/state/src/storage_factory/mod.rs @@ -1,7 +1,4 @@ -use std::{ - collections::{HashMap, HashSet}, - fmt::Debug, -}; +use std::{collections::HashSet, fmt}; use anyhow::Context as _; use async_trait::async_trait; @@ -10,64 +7,18 @@ use zksync_dal::{Connection, ConnectionPool, Core, CoreDal}; use zksync_storage::RocksDB; use zksync_types::{L1BatchNumber, StorageKey, StorageValue, H256}; use zksync_utils::u256_to_h256; -use zksync_vm_interface::storage::{ReadStorage, StorageSnapshot, StorageWithSnapshot}; +use zksync_vm_interface::storage::{ReadStorage, StorageSnapshot}; +use self::metrics::{SnapshotStage, SNAPSHOT_METRICS}; +pub use self::{ + rocksdb_with_memory::{BatchDiff, RocksdbWithMemory}, + snapshot::SnapshotStorage, +}; use crate::{PostgresStorage, RocksdbStorage, RocksdbStorageBuilder, StateKeeperColumnFamily}; -/// Storage with a static lifetime that can be sent to Tokio tasks etc. -pub type OwnedStorage = CommonStorage<'static>; - -/// Factory that can produce storage instances on demand. The storage type is encapsulated as a type param -/// (mostly for testing purposes); the default is [`OwnedStorage`]. -#[async_trait] -pub trait ReadStorageFactory: Debug + Send + Sync + 'static { - /// Creates a storage instance, e.g. over a Postgres connection or a RocksDB instance. - /// The specific criteria on which one are left up to the implementation. - /// - /// Implementations may be cancel-aware and return `Ok(None)` iff `stop_receiver` receives - /// a stop signal; this is the only case in which `Ok(None)` should be returned. - async fn access_storage( - &self, - stop_receiver: &watch::Receiver, - l1_batch_number: L1BatchNumber, - ) -> anyhow::Result>; -} - -/// [`ReadStorageFactory`] producing Postgres-backed storage instances. Hence, it is slower than more advanced -/// alternatives with RocksDB caches and should be used sparingly (e.g., for testing). -#[async_trait] -impl ReadStorageFactory for ConnectionPool { - async fn access_storage( - &self, - _stop_receiver: &watch::Receiver, - l1_batch_number: L1BatchNumber, - ) -> anyhow::Result> { - let connection = self.connection().await?; - let storage = OwnedStorage::postgres(connection, l1_batch_number).await?; - Ok(Some(storage.into())) - } -} - -/// DB difference introduced by one batch. -#[derive(Debug, Clone)] -pub struct BatchDiff { - /// Storage slots touched by this batch along with new values there. - pub state_diff: HashMap, - /// Initial write indices introduced by this batch. - pub enum_index_diff: HashMap, - /// Factory dependencies introduced by this batch. - pub factory_dep_diff: HashMap>, -} - -/// A RocksDB cache instance with in-memory DB diffs that gives access to DB state at batches `N` to -/// `N + K`, where `K` is the number of diffs. -#[derive(Debug)] -pub struct RocksdbWithMemory { - /// RocksDB cache instance caught up to batch `N`. - pub rocksdb: RocksdbStorage, - /// Diffs for batches `N + 1` to `N + K`. - pub batch_diffs: Vec, -} +mod metrics; +mod rocksdb_with_memory; +mod snapshot; /// Union of all [`ReadStorage`] implementations that are returned by [`ReadStorageFactory`], such as /// Postgres- and RocksDB-backed storages. @@ -83,7 +34,7 @@ pub enum CommonStorage<'a> { /// Implementation over a RocksDB cache instance with in-memory DB diffs. RocksdbWithMemory(RocksdbWithMemory), /// In-memory storage snapshot with the Postgres storage fallback. - Snapshot(StorageWithSnapshot>), + Snapshot(SnapshotStorage<'a>), /// Generic implementation. Should be used for testing purposes only since it has performance penalty because /// of the dynamic dispatch. Boxed(Box), @@ -176,6 +127,7 @@ impl CommonStorage<'static> { connection: &mut Connection<'static, Core>, l1_batch_number: L1BatchNumber, ) -> anyhow::Result> { + let latency = SNAPSHOT_METRICS.load_latency[&SnapshotStage::BatchHeader].start(); let Some(header) = connection .blocks_dal() .get_l1_batch_header(l1_batch_number) @@ -188,8 +140,10 @@ impl CommonStorage<'static> { .into_iter() .map(u256_to_h256) .collect(); + latency.observe(); // Check protective reads early on. + let latency = SNAPSHOT_METRICS.load_latency[&SnapshotStage::ProtectiveReads].start(); let protective_reads = connection .storage_logs_dedup_dal() .get_protective_reads_for_l1_batch(l1_batch_number) @@ -199,14 +153,18 @@ impl CommonStorage<'static> { return Ok(None); } let protective_reads_len = protective_reads.len(); - tracing::debug!("Loaded {protective_reads_len} protective reads"); + let latency = latency.observe(); + tracing::debug!("Loaded {protective_reads_len} protective reads in {latency:?}"); + let latency = SNAPSHOT_METRICS.load_latency[&SnapshotStage::TouchedSlots].start(); let touched_slots = connection .storage_logs_dal() .get_touched_slots_for_l1_batch(l1_batch_number) .await?; - tracing::debug!("Loaded {} touched keys", touched_slots.len()); + let latency = latency.observe(); + tracing::debug!("Loaded {} touched keys in {latency:?}", touched_slots.len()); + let latency = SNAPSHOT_METRICS.load_latency[&SnapshotStage::PreviousValues].start(); let all_accessed_keys: Vec<_> = protective_reads .into_iter() .map(|key| key.hashed_key()) @@ -216,21 +174,31 @@ impl CommonStorage<'static> { .storage_logs_dal() .get_previous_storage_values(&all_accessed_keys, l1_batch_number) .await?; + let latency = latency.observe(); tracing::debug!( - "Obtained {} previous values for accessed keys", + "Obtained {} previous values for accessed keys in {latency:?}", previous_values.len() ); + + let latency = SNAPSHOT_METRICS.load_latency[&SnapshotStage::InitialWrites].start(); let initial_write_info = connection .storage_logs_dal() .get_l1_batches_and_indices_for_initial_writes(&all_accessed_keys) .await?; - tracing::debug!("Obtained initial write info for accessed keys"); + let latency = latency.observe(); + tracing::debug!("Obtained initial write info for accessed keys in {latency:?}"); + let latency = SNAPSHOT_METRICS.load_latency[&SnapshotStage::Bytecodes].start(); let bytecodes = connection .factory_deps_dal() .get_factory_deps(&bytecode_hashes) .await; - tracing::debug!("Loaded {} bytecodes used in the batch", bytecodes.len()); + let latency = latency.observe(); + tracing::debug!( + "Loaded {} bytecodes used in the batch in {latency:?}", + bytecodes.len() + ); + let factory_deps = bytecodes .into_iter() .map(|(hash_u256, words)| { @@ -256,54 +224,6 @@ impl CommonStorage<'static> { } } -impl ReadStorage for RocksdbWithMemory { - fn read_value(&mut self, key: &StorageKey) -> StorageValue { - let hashed_key = key.hashed_key(); - match self - .batch_diffs - .iter() - .rev() - .find_map(|b| b.state_diff.get(&hashed_key)) - { - None => self.rocksdb.read_value(key), - Some(value) => *value, - } - } - - fn is_write_initial(&mut self, key: &StorageKey) -> bool { - match self - .batch_diffs - .iter() - .find_map(|b| b.enum_index_diff.get(&key.hashed_key())) - { - None => self.rocksdb.is_write_initial(key), - Some(_) => false, - } - } - - fn load_factory_dep(&mut self, hash: H256) -> Option> { - match self - .batch_diffs - .iter() - .find_map(|b| b.factory_dep_diff.get(&hash)) - { - None => self.rocksdb.load_factory_dep(hash), - Some(value) => Some(value.clone()), - } - } - - fn get_enumeration_index(&mut self, key: &StorageKey) -> Option { - match self - .batch_diffs - .iter() - .find_map(|b| b.enum_index_diff.get(&key.hashed_key())) - { - None => self.rocksdb.get_enumeration_index(key), - Some(value) => Some(*value), - } - } -} - impl ReadStorage for CommonStorage<'_> { fn read_value(&mut self, key: &StorageKey) -> StorageValue { match self { @@ -358,8 +278,42 @@ impl From for CommonStorage<'_> { } } -impl<'a> From>> for CommonStorage<'a> { - fn from(value: StorageWithSnapshot>) -> Self { +impl<'a> From> for CommonStorage<'a> { + fn from(value: SnapshotStorage<'a>) -> Self { Self::Snapshot(value) } } + +/// Storage with a static lifetime that can be sent to Tokio tasks etc. +pub type OwnedStorage = CommonStorage<'static>; + +/// Factory that can produce storage instances on demand. The storage type is encapsulated as a type param +/// (mostly for testing purposes); the default is [`OwnedStorage`]. +#[async_trait] +pub trait ReadStorageFactory: fmt::Debug + Send + Sync + 'static { + /// Creates a storage instance, e.g. over a Postgres connection or a RocksDB instance. + /// The specific criteria on which one are left up to the implementation. + /// + /// Implementations may be cancel-aware and return `Ok(None)` iff `stop_receiver` receives + /// a stop signal; this is the only case in which `Ok(None)` should be returned. + async fn access_storage( + &self, + stop_receiver: &watch::Receiver, + l1_batch_number: L1BatchNumber, + ) -> anyhow::Result>; +} + +/// [`ReadStorageFactory`] producing Postgres-backed storage instances. Hence, it is slower than more advanced +/// alternatives with RocksDB caches and should be used sparingly (e.g., for testing). +#[async_trait] +impl ReadStorageFactory for ConnectionPool { + async fn access_storage( + &self, + _stop_receiver: &watch::Receiver, + l1_batch_number: L1BatchNumber, + ) -> anyhow::Result> { + let connection = self.connection().await?; + let storage = OwnedStorage::postgres(connection, l1_batch_number).await?; + Ok(Some(storage.into())) + } +} diff --git a/core/lib/state/src/storage_factory/rocksdb_with_memory.rs b/core/lib/state/src/storage_factory/rocksdb_with_memory.rs new file mode 100644 index 000000000000..411460dad18e --- /dev/null +++ b/core/lib/state/src/storage_factory/rocksdb_with_memory.rs @@ -0,0 +1,75 @@ +use std::collections::HashMap; + +use zksync_types::{StorageKey, StorageValue, H256}; +use zksync_vm_interface::storage::ReadStorage; + +use crate::RocksdbStorage; + +/// DB difference introduced by one batch. +#[derive(Debug, Clone)] +pub struct BatchDiff { + /// Storage slots touched by this batch along with new values there. + pub state_diff: HashMap, + /// Initial write indices introduced by this batch. + pub enum_index_diff: HashMap, + /// Factory dependencies introduced by this batch. + pub factory_dep_diff: HashMap>, +} + +/// A RocksDB cache instance with in-memory DB diffs that gives access to DB state at batches `N` to +/// `N + K`, where `K` is the number of diffs. +#[derive(Debug)] +pub struct RocksdbWithMemory { + /// RocksDB cache instance caught up to batch `N`. + pub rocksdb: RocksdbStorage, + /// Diffs for batches `N + 1` to `N + K`. + pub batch_diffs: Vec, +} + +impl ReadStorage for RocksdbWithMemory { + fn read_value(&mut self, key: &StorageKey) -> StorageValue { + let hashed_key = key.hashed_key(); + match self + .batch_diffs + .iter() + .rev() + .find_map(|b| b.state_diff.get(&hashed_key)) + { + None => self.rocksdb.read_value(key), + Some(value) => *value, + } + } + + fn is_write_initial(&mut self, key: &StorageKey) -> bool { + match self + .batch_diffs + .iter() + .find_map(|b| b.enum_index_diff.get(&key.hashed_key())) + { + None => self.rocksdb.is_write_initial(key), + Some(_) => false, + } + } + + fn load_factory_dep(&mut self, hash: H256) -> Option> { + match self + .batch_diffs + .iter() + .find_map(|b| b.factory_dep_diff.get(&hash)) + { + None => self.rocksdb.load_factory_dep(hash), + Some(value) => Some(value.clone()), + } + } + + fn get_enumeration_index(&mut self, key: &StorageKey) -> Option { + match self + .batch_diffs + .iter() + .find_map(|b| b.enum_index_diff.get(&key.hashed_key())) + { + None => self.rocksdb.get_enumeration_index(key), + Some(value) => Some(*value), + } + } +} diff --git a/core/lib/state/src/storage_factory/snapshot.rs b/core/lib/state/src/storage_factory/snapshot.rs new file mode 100644 index 000000000000..05a79125dd30 --- /dev/null +++ b/core/lib/state/src/storage_factory/snapshot.rs @@ -0,0 +1,49 @@ +use zksync_types::{StorageKey, StorageValue, H256}; +use zksync_vm_interface::storage::StorageWithSnapshot; + +use super::metrics::{AccessKind, SNAPSHOT_METRICS}; +use crate::{interface::ReadStorage, PostgresStorage}; + +/// Wrapper around [`PostgresStorage`] used to track frequency of fallback access. +#[derive(Debug)] +pub struct FallbackStorage<'a>(PostgresStorage<'a>); + +impl<'a> From> for FallbackStorage<'a> { + fn from(storage: PostgresStorage<'a>) -> Self { + Self(storage) + } +} + +impl ReadStorage for FallbackStorage<'_> { + fn read_value(&mut self, key: &StorageKey) -> StorageValue { + let latency = SNAPSHOT_METRICS.fallback_access_latency[&AccessKind::ReadValue].start(); + let output = self.0.read_value(key); + latency.observe(); + output + } + + fn is_write_initial(&mut self, key: &StorageKey) -> bool { + let latency = SNAPSHOT_METRICS.fallback_access_latency[&AccessKind::IsWriteInitial].start(); + let output = self.0.is_write_initial(key); + latency.observe(); + output + } + + fn load_factory_dep(&mut self, hash: H256) -> Option> { + let latency = SNAPSHOT_METRICS.fallback_access_latency[&AccessKind::LoadFactoryDep].start(); + let output = self.0.load_factory_dep(hash); + latency.observe(); + output + } + + fn get_enumeration_index(&mut self, key: &StorageKey) -> Option { + let latency = + SNAPSHOT_METRICS.fallback_access_latency[&AccessKind::GetEnumerationIndex].start(); + let output = self.0.get_enumeration_index(key); + latency.observe(); + output + } +} + +/// Snapshot-backed storage used for batch processing. +pub type SnapshotStorage<'a> = StorageWithSnapshot>; diff --git a/core/lib/types/src/l1/mod.rs b/core/lib/types/src/l1/mod.rs index 215836cb52f5..e8144c75db2e 100644 --- a/core/lib/types/src/l1/mod.rs +++ b/core/lib/types/src/l1/mod.rs @@ -274,7 +274,9 @@ impl From for abi::NewPriorityRequest { transaction: abi::L2CanonicalTransaction { tx_type: PRIORITY_OPERATION_L2_TX_TYPE.into(), from: address_to_u256(&t.common_data.sender), - to: address_to_u256(&t.execute.contract_address.unwrap_or_default()), + // Unwrap used here because the contract address should always be present for L1 transactions. + // TODO: Consider restricting the contract address to not be optional in L1Tx. + to: address_to_u256(&t.execute.contract_address.unwrap()), gas_limit: t.common_data.gas_limit, gas_per_pubdata_byte_limit: t.common_data.gas_per_pubdata_limit, max_fee_per_gas: t.common_data.max_fee_per_gas, diff --git a/core/lib/types/src/l2/mod.rs b/core/lib/types/src/l2/mod.rs index abd60491af38..036d2a7a036d 100644 --- a/core/lib/types/src/l2/mod.rs +++ b/core/lib/types/src/l2/mod.rs @@ -232,8 +232,8 @@ impl L2Tx { } /// Returns recipient account of the transaction. - pub fn recipient_account(&self) -> Address { - self.execute.contract_address.unwrap_or_default() + pub fn recipient_account(&self) -> Option
{ + self.execute.contract_address } /// Returns the account nonce associated with transaction. @@ -324,7 +324,7 @@ impl From for TransactionRequest { let mut base_tx_req = TransactionRequest { nonce: U256::from(tx.common_data.nonce.0), from: Some(tx.common_data.initiator_address), - to: Some(tx.recipient_account()), + to: tx.recipient_account(), value: tx.execute.value, gas_price: tx.common_data.fee.max_fee_per_gas, max_priority_fee_per_gas: None, @@ -400,7 +400,7 @@ impl From for api::Transaction { chain_id: U256::from(tx.common_data.extract_chain_id().unwrap_or_default()), nonce: U256::from(tx.common_data.nonce.0), from: Some(tx.common_data.initiator_address), - to: Some(tx.recipient_account()), + to: tx.recipient_account(), value: tx.execute.value, gas_price: Some(tx.common_data.fee.max_fee_per_gas), max_priority_fee_per_gas: Some(tx.common_data.fee.max_priority_fee_per_gas), diff --git a/core/lib/types/src/lib.rs b/core/lib/types/src/lib.rs index 54e1b40b5b33..86b2e3f03d51 100644 --- a/core/lib/types/src/lib.rs +++ b/core/lib/types/src/lib.rs @@ -104,8 +104,8 @@ impl Eq for Transaction {} impl Transaction { /// Returns recipient account of the transaction. - pub fn recipient_account(&self) -> Address { - self.execute.contract_address.unwrap_or_default() + pub fn recipient_account(&self) -> Option
{ + self.execute.contract_address } pub fn nonce(&self) -> Option { diff --git a/core/lib/types/src/tx/execute.rs b/core/lib/types/src/tx/execute.rs index c5d31c0f8a03..c133261bc232 100644 --- a/core/lib/types/src/tx/execute.rs +++ b/core/lib/types/src/tx/execute.rs @@ -72,10 +72,9 @@ impl EIP712TypedStructure for Execute { const TYPE_NAME: &'static str = "Transaction"; fn build_structure(&self, builder: &mut BUILDER) { - builder.add_member( - "to", - &U256::from(self.contract_address.unwrap_or_default().as_bytes()), - ); + if let Some(contract_address) = self.contract_address { + builder.add_member("to", &contract_address); + } builder.add_member("value", &self.value); builder.add_member("data", &self.calldata.as_slice()); // Factory deps are not included into the transaction signature, since they are parsed from the diff --git a/core/lib/vlog/src/lib.rs b/core/lib/vlog/src/lib.rs index 268fbd0b39eb..598d17879b84 100644 --- a/core/lib/vlog/src/lib.rs +++ b/core/lib/vlog/src/lib.rs @@ -4,6 +4,7 @@ use std::time::Duration; use ::sentry::ClientInitGuard; +use anyhow::Context as _; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; pub use crate::{logs::Logs, opentelemetry::OpenTelemetry, sentry::Sentry}; @@ -126,8 +127,9 @@ impl ObservabilityBuilder { self } - /// Initializes the observability subsystem. - pub fn build(self) -> ObservabilityGuard { + /// Tries to initialize the observability subsystem. Returns an error if it's already initialized. + /// This is mostly useful in tests. + pub fn try_build(self) -> anyhow::Result { let logs = self.logs.unwrap_or_default(); logs.install_panic_hook(); @@ -151,14 +153,20 @@ impl ObservabilityBuilder { .with(logs_layer) .with(otlp_tracing_layer) .with(otlp_logging_layer) - .init(); + .try_init() + .context("failed installing global tracer / logger")?; let sentry_guard = self.sentry.map(|sentry| sentry.install()); - ObservabilityGuard { + Ok(ObservabilityGuard { otlp_tracing_provider, otlp_logging_provider, sentry_guard, - } + }) + } + + /// Initializes the observability subsystem. + pub fn build(self) -> ObservabilityGuard { + self.try_build().unwrap() } } diff --git a/core/lib/vm_executor/src/batch/factory.rs b/core/lib/vm_executor/src/batch/factory.rs index 68a3769ee622..d6f7555b7672 100644 --- a/core/lib/vm_executor/src/batch/factory.rs +++ b/core/lib/vm_executor/src/batch/factory.rs @@ -1,4 +1,4 @@ -use std::{marker::PhantomData, rc::Rc, sync::Arc}; +use std::{marker::PhantomData, rc::Rc, sync::Arc, time::Duration}; use anyhow::Context as _; use once_cell::sync::OnceCell; @@ -6,7 +6,7 @@ use tokio::sync::mpsc; use zksync_multivm::{ interface::{ executor::{BatchExecutor, BatchExecutorFactory}, - storage::{ReadStorage, StorageView}, + storage::{ReadStorage, StorageView, StorageViewStats}, BatchTransactionExecutionResult, ExecutionResult, FinishedL1Batch, Halt, L1BatchEnv, L2BlockEnv, SystemEnv, VmInterface, VmInterfaceHistoryEnabled, }, @@ -20,7 +20,7 @@ use super::{ executor::{Command, MainBatchExecutor}, metrics::{TxExecutionStage, BATCH_TIP_METRICS, EXECUTOR_METRICS, KEEPER_METRICS}, }; -use crate::shared::InteractionType; +use crate::shared::{InteractionType, STORAGE_METRICS}; /// The default implementation of [`BatchExecutorFactory`]. /// Creates real batch executors which maintain the VM (as opposed to the test factories which don't use the VM). @@ -35,6 +35,7 @@ pub struct MainBatchExecutorFactory { /// regardless of its configuration, this flag should be set to `true`. optional_bytecode_compression: bool, fast_vm_mode: FastVmMode, + observe_storage_metrics: bool, } impl MainBatchExecutorFactory { @@ -43,9 +44,11 @@ impl MainBatchExecutorFactory { save_call_traces, optional_bytecode_compression, fast_vm_mode: FastVmMode::Old, + observe_storage_metrics: false, } } + /// Sets the fast VM mode used by this executor. pub fn set_fast_vm_mode(&mut self, fast_vm_mode: FastVmMode) { if !matches!(fast_vm_mode, FastVmMode::Old) { tracing::warn!( @@ -54,6 +57,13 @@ impl MainBatchExecutorFactory { } self.fast_vm_mode = fast_vm_mode; } + + /// Enables storage metrics reporting for this executor. Storage metrics will be reported for each transaction. + // The reason this isn't on by default is that storage metrics don't distinguish between "batch-executed" and "oneshot-executed" transactions; + // this optimally needs some improvements in `vise` (ability to add labels for groups of metrics). + pub fn observe_storage_metrics(&mut self) { + self.observe_storage_metrics = true; + } } impl BatchExecutorFactory for MainBatchExecutorFactory { @@ -70,6 +80,7 @@ impl BatchExecutorFactory for MainBatchExecu save_call_traces: self.save_call_traces, optional_bytecode_compression: self.optional_bytecode_compression, fast_vm_mode: self.fast_vm_mode, + observe_storage_metrics: self.observe_storage_metrics, commands: commands_receiver, _storage: PhantomData, }; @@ -91,6 +102,7 @@ struct CommandReceiver { save_call_traces: bool, optional_bytecode_compression: bool, fast_vm_mode: FastVmMode, + observe_storage_metrics: bool, commands: mpsc::Receiver, _storage: PhantomData, } @@ -112,14 +124,22 @@ impl CommandReceiver { self.fast_vm_mode, ); let mut batch_finished = false; + let mut prev_storage_stats = StorageViewStats::default(); while let Some(cmd) = self.commands.blocking_recv() { match cmd { Command::ExecuteTx(tx, resp) => { let tx_hash = tx.hash(); - let result = self.execute_tx(*tx, &mut vm).with_context(|| { + let (result, latency) = self.execute_tx(*tx, &mut vm).with_context(|| { format!("fatal error executing transaction {tx_hash:?}") })?; + + if self.observe_storage_metrics { + let storage_stats = storage_view.borrow().stats(); + let stats_diff = storage_stats.saturating_sub(&prev_storage_stats); + STORAGE_METRICS.observe(&format!("Tx {tx_hash:?}"), latency, &stats_diff); + prev_storage_stats = storage_stats; + } if resp.send(result).is_err() { break; } @@ -152,11 +172,11 @@ impl CommandReceiver { .context("storage view leaked")? .into_inner(); if batch_finished { - let metrics = storage_view.metrics(); + let stats = storage_view.stats(); EXECUTOR_METRICS.batch_storage_interaction_duration[&InteractionType::GetValue] - .observe(metrics.time_spent_on_get_value); + .observe(stats.time_spent_on_get_value); EXECUTOR_METRICS.batch_storage_interaction_duration[&InteractionType::SetValue] - .observe(metrics.time_spent_on_set_value); + .observe(stats.time_spent_on_set_value); } else { // State keeper can exit because of stop signal, so it's OK to exit mid-batch. tracing::info!("State keeper exited with an unfinished L1 batch"); @@ -168,7 +188,7 @@ impl CommandReceiver { &self, transaction: Transaction, vm: &mut VmInstance, - ) -> anyhow::Result { + ) -> anyhow::Result<(BatchTransactionExecutionResult, Duration)> { // Executing a next transaction means that a previous transaction was either rolled back (in which case its snapshot // was already removed), or that we build on top of it (in which case, it can be removed now). vm.pop_snapshot_no_rollback(); @@ -182,9 +202,8 @@ impl CommandReceiver { } else { self.execute_tx_in_vm(&transaction, vm)? }; - latency.observe(); - Ok(result) + Ok((result, latency.observe())) } fn rollback_last_tx(&self, vm: &mut VmInstance) { diff --git a/core/lib/vm_executor/src/oneshot/metrics.rs b/core/lib/vm_executor/src/oneshot/metrics.rs index 8a89ce0a9a4f..475463300f16 100644 --- a/core/lib/vm_executor/src/oneshot/metrics.rs +++ b/core/lib/vm_executor/src/oneshot/metrics.rs @@ -1,9 +1,9 @@ use std::time::Duration; use vise::{Buckets, EncodeLabelSet, EncodeLabelValue, Family, Histogram, Metrics}; -use zksync_multivm::interface::{storage::StorageViewMetrics, VmMemoryMetrics}; +use zksync_multivm::interface::{storage::StorageViewStats, VmMemoryMetrics}; -use crate::shared::InteractionType; +use crate::shared::STORAGE_METRICS; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, EncodeLabelValue, EncodeLabelSet)] #[metrics(label = "type", rename_all = "snake_case")] @@ -46,29 +46,11 @@ struct RuntimeContextMemoryMetrics { #[vise::register] static MEMORY_METRICS: vise::Global = vise::Global::new(); -const INTERACTION_AMOUNT_BUCKETS: Buckets = Buckets::exponential(10.0..=10_000_000.0, 10.0); - -#[derive(Debug, Metrics)] -#[metrics(prefix = "runtime_context_storage_interaction")] -struct RuntimeContextStorageMetrics { - #[metrics(buckets = INTERACTION_AMOUNT_BUCKETS)] - amount: Family>, - #[metrics(buckets = Buckets::LATENCIES)] - duration: Family>, - #[metrics(buckets = Buckets::LATENCIES)] - duration_per_unit: Family>, - #[metrics(buckets = Buckets::ZERO_TO_ONE)] - ratio: Histogram, -} - -#[vise::register] -static STORAGE_METRICS: vise::Global = vise::Global::new(); - pub(super) fn report_vm_memory_metrics( tx_id: &str, memory_metrics: &VmMemoryMetrics, vm_execution_took: Duration, - storage_metrics: StorageViewMetrics, + storage_metrics: &StorageViewStats, ) { MEMORY_METRICS.event_sink_size[&SizeType::Inner].observe(memory_metrics.event_sink_inner); MEMORY_METRICS.event_sink_size[&SizeType::History].observe(memory_metrics.event_sink_history); @@ -88,56 +70,5 @@ pub(super) fn report_vm_memory_metrics( .full .observe(memory_metrics.full_size() + storage_metrics.cache_size); - let total_storage_invocations = storage_metrics.get_value_storage_invocations - + storage_metrics.set_value_storage_invocations; - let total_time_spent_in_storage = - storage_metrics.time_spent_on_get_value + storage_metrics.time_spent_on_set_value; - - STORAGE_METRICS.amount[&InteractionType::Missed] - .observe(storage_metrics.storage_invocations_missed); - STORAGE_METRICS.amount[&InteractionType::GetValue] - .observe(storage_metrics.get_value_storage_invocations); - STORAGE_METRICS.amount[&InteractionType::SetValue] - .observe(storage_metrics.set_value_storage_invocations); - STORAGE_METRICS.amount[&InteractionType::Total].observe(total_storage_invocations); - - STORAGE_METRICS.duration[&InteractionType::Missed] - .observe(storage_metrics.time_spent_on_storage_missed); - STORAGE_METRICS.duration[&InteractionType::GetValue] - .observe(storage_metrics.time_spent_on_get_value); - STORAGE_METRICS.duration[&InteractionType::SetValue] - .observe(storage_metrics.time_spent_on_set_value); - STORAGE_METRICS.duration[&InteractionType::Total].observe(total_time_spent_in_storage); - - if total_storage_invocations > 0 { - STORAGE_METRICS.duration_per_unit[&InteractionType::Total] - .observe(total_time_spent_in_storage.div_f64(total_storage_invocations as f64)); - } - if storage_metrics.storage_invocations_missed > 0 { - let duration_per_unit = storage_metrics - .time_spent_on_storage_missed - .div_f64(storage_metrics.storage_invocations_missed as f64); - STORAGE_METRICS.duration_per_unit[&InteractionType::Missed].observe(duration_per_unit); - } - - STORAGE_METRICS - .ratio - .observe(total_time_spent_in_storage.as_secs_f64() / vm_execution_took.as_secs_f64()); - - const STORAGE_INVOCATIONS_DEBUG_THRESHOLD: usize = 1_000; - - if total_storage_invocations > STORAGE_INVOCATIONS_DEBUG_THRESHOLD { - tracing::info!( - "Tx {tx_id} resulted in {total_storage_invocations} storage_invocations, {} new_storage_invocations, \ - {} get_value_storage_invocations, {} set_value_storage_invocations, \ - vm execution took {vm_execution_took:?}, storage interaction took {total_time_spent_in_storage:?} \ - (missed: {:?} get: {:?} set: {:?})", - storage_metrics.storage_invocations_missed, - storage_metrics.get_value_storage_invocations, - storage_metrics.set_value_storage_invocations, - storage_metrics.time_spent_on_storage_missed, - storage_metrics.time_spent_on_get_value, - storage_metrics.time_spent_on_set_value, - ); - } + STORAGE_METRICS.observe(&format!("Tx {tx_id}"), vm_execution_took, storage_metrics); } diff --git a/core/lib/vm_executor/src/oneshot/mod.rs b/core/lib/vm_executor/src/oneshot/mod.rs index cac8edfdfdf8..1838381d2a01 100644 --- a/core/lib/vm_executor/src/oneshot/mod.rs +++ b/core/lib/vm_executor/src/oneshot/mod.rs @@ -284,7 +284,7 @@ impl VmSandbox { &tx_id, &memory_metrics, vm_execution_took, - self.storage_view.as_ref().borrow_mut().metrics(), + &self.storage_view.borrow().stats(), ); result } diff --git a/core/lib/vm_executor/src/shared.rs b/core/lib/vm_executor/src/shared.rs index 420005be05d6..8ac4dce2e01e 100644 --- a/core/lib/vm_executor/src/shared.rs +++ b/core/lib/vm_executor/src/shared.rs @@ -1,6 +1,9 @@ //! Functionality shared among different types of executors. -use vise::{EncodeLabelSet, EncodeLabelValue}; +use std::time::Duration; + +use vise::{Buckets, EncodeLabelSet, EncodeLabelValue, Family, Histogram, Metrics}; +use zksync_multivm::interface::storage::StorageViewStats; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, EncodeLabelValue, EncodeLabelSet)] #[metrics(label = "interaction", rename_all = "snake_case")] @@ -10,3 +13,79 @@ pub(crate) enum InteractionType { SetValue, Total, } + +const INTERACTION_AMOUNT_BUCKETS: Buckets = Buckets::exponential(10.0..=10_000_000.0, 10.0); + +#[derive(Debug, Metrics)] +#[metrics(prefix = "runtime_context_storage_interaction")] +pub(crate) struct RuntimeContextStorageMetrics { + #[metrics(buckets = INTERACTION_AMOUNT_BUCKETS)] + amount: Family>, + #[metrics(buckets = Buckets::LATENCIES)] + duration: Family>, + #[metrics(buckets = Buckets::LATENCIES)] + duration_per_unit: Family>, + #[metrics(buckets = Buckets::ZERO_TO_ONE)] + ratio: Histogram, +} + +impl RuntimeContextStorageMetrics { + pub fn observe( + &self, + op: &str, + total_vm_latency: Duration, + storage_metrics: &StorageViewStats, + ) { + const STORAGE_INVOCATIONS_DEBUG_THRESHOLD: usize = 1_000; + + let total_storage_invocations = storage_metrics.get_value_storage_invocations + + storage_metrics.set_value_storage_invocations; + let total_time_spent_in_storage = + storage_metrics.time_spent_on_get_value + storage_metrics.time_spent_on_set_value; + + self.amount[&InteractionType::Missed].observe(storage_metrics.storage_invocations_missed); + self.amount[&InteractionType::GetValue] + .observe(storage_metrics.get_value_storage_invocations); + self.amount[&InteractionType::SetValue] + .observe(storage_metrics.set_value_storage_invocations); + self.amount[&InteractionType::Total].observe(total_storage_invocations); + + self.duration[&InteractionType::Missed] + .observe(storage_metrics.time_spent_on_storage_missed); + self.duration[&InteractionType::GetValue].observe(storage_metrics.time_spent_on_get_value); + self.duration[&InteractionType::SetValue].observe(storage_metrics.time_spent_on_set_value); + self.duration[&InteractionType::Total].observe(total_time_spent_in_storage); + + if total_storage_invocations > 0 { + self.duration_per_unit[&InteractionType::Total] + .observe(total_time_spent_in_storage.div_f64(total_storage_invocations as f64)); + } + if storage_metrics.storage_invocations_missed > 0 { + let duration_per_unit = storage_metrics + .time_spent_on_storage_missed + .div_f64(storage_metrics.storage_invocations_missed as f64); + self.duration_per_unit[&InteractionType::Missed].observe(duration_per_unit); + } + + self.ratio + .observe(total_time_spent_in_storage.as_secs_f64() / total_vm_latency.as_secs_f64()); + + if total_storage_invocations > STORAGE_INVOCATIONS_DEBUG_THRESHOLD { + tracing::info!( + "{op} resulted in {total_storage_invocations} storage_invocations, {} new_storage_invocations, \ + {} get_value_storage_invocations, {} set_value_storage_invocations, \ + vm execution took {total_vm_latency:?}, storage interaction took {total_time_spent_in_storage:?} \ + (missed: {:?} get: {:?} set: {:?})", + storage_metrics.storage_invocations_missed, + storage_metrics.get_value_storage_invocations, + storage_metrics.set_value_storage_invocations, + storage_metrics.time_spent_on_storage_missed, + storage_metrics.time_spent_on_get_value, + storage_metrics.time_spent_on_set_value, + ); + } + } +} + +#[vise::register] +pub(crate) static STORAGE_METRICS: vise::Global = vise::Global::new(); diff --git a/core/lib/vm_interface/src/storage/mod.rs b/core/lib/vm_interface/src/storage/mod.rs index 9b92ef8b7705..6cdcd33db682 100644 --- a/core/lib/vm_interface/src/storage/mod.rs +++ b/core/lib/vm_interface/src/storage/mod.rs @@ -6,7 +6,7 @@ pub use self::{ // Note, that `test_infra` of the bootloader tests relies on this value to be exposed in_memory::{InMemoryStorage, IN_MEMORY_STORAGE_DEFAULT_NETWORK_ID}, snapshot::{StorageSnapshot, StorageWithSnapshot}, - view::{ImmutableStorageView, StorageView, StorageViewCache, StorageViewMetrics}, + view::{ImmutableStorageView, StorageView, StorageViewCache, StorageViewStats}, }; mod in_memory; diff --git a/core/lib/vm_interface/src/storage/view.rs b/core/lib/vm_interface/src/storage/view.rs index 101f5c82f497..ec9267609e23 100644 --- a/core/lib/vm_interface/src/storage/view.rs +++ b/core/lib/vm_interface/src/storage/view.rs @@ -10,9 +10,9 @@ use zksync_types::{StorageKey, StorageValue, H256}; use super::{ReadStorage, StoragePtr, WriteStorage}; -/// Metrics for [`StorageView`]. +/// Statistics for [`StorageView`]. #[derive(Debug, Default, Clone, Copy)] -pub struct StorageViewMetrics { +pub struct StorageViewStats { /// Estimated byte size of the cache used by the `StorageView`. pub cache_size: usize, /// Number of read / write ops for which the value was read from the underlying storage. @@ -29,6 +29,33 @@ pub struct StorageViewMetrics { pub time_spent_on_set_value: Duration, } +impl StorageViewStats { + /// Subtracts two sets of statistics. This can be used to measure increment between these stats and older stats for the same VM. + pub fn saturating_sub(&self, older: &Self) -> Self { + Self { + cache_size: self.cache_size.saturating_sub(older.cache_size), + storage_invocations_missed: self + .storage_invocations_missed + .saturating_sub(older.storage_invocations_missed), + get_value_storage_invocations: self + .get_value_storage_invocations + .saturating_sub(older.get_value_storage_invocations), + set_value_storage_invocations: self + .set_value_storage_invocations + .saturating_sub(older.set_value_storage_invocations), + time_spent_on_storage_missed: self + .time_spent_on_storage_missed + .saturating_sub(older.time_spent_on_storage_missed), + time_spent_on_get_value: self + .time_spent_on_get_value + .saturating_sub(older.time_spent_on_get_value), + time_spent_on_set_value: self + .time_spent_on_set_value + .saturating_sub(older.time_spent_on_set_value), + } + } +} + /// `StorageView` is a buffer for `StorageLog`s between storage and transaction execution code. /// In order to commit transactions logs should be submitted to the underlying storage /// after a transaction is executed. @@ -46,7 +73,7 @@ pub struct StorageView { // Used for caching and to get the list/count of modified keys modified_storage_keys: HashMap, cache: StorageViewCache, - metrics: StorageViewMetrics, + stats: StorageViewStats, } /// `StorageViewCache` is a struct for caching storage reads and `contains_key()` checks. @@ -112,7 +139,7 @@ impl StorageView { read_storage_keys: HashMap::new(), initial_writes: HashMap::new(), }, - metrics: StorageViewMetrics::default(), + stats: StorageViewStats::default(), } } @@ -126,8 +153,8 @@ impl StorageView { cached_value.copied().unwrap_or_else(|| { let value = self.storage_handle.read_value(key); self.cache.read_storage_keys.insert(*key, value); - self.metrics.time_spent_on_storage_missed += started_at.elapsed(); - self.metrics.storage_invocations_missed += 1; + self.stats.time_spent_on_storage_missed += started_at.elapsed(); + self.stats.storage_invocations_missed += 1; value }) } @@ -138,11 +165,11 @@ impl StorageView { + self.cache.read_storage_keys.len() * mem::size_of::<(StorageKey, StorageValue)>() } - /// Returns the current metrics. - pub fn metrics(&self) -> StorageViewMetrics { - StorageViewMetrics { + /// Returns the current storage access stats. + pub fn stats(&self) -> StorageViewStats { + StorageViewStats { cache_size: self.cache_size(), - ..self.metrics + ..self.stats } } @@ -155,7 +182,7 @@ impl StorageView { impl ReadStorage for StorageView { fn read_value(&mut self, key: &StorageKey) -> StorageValue { let started_at = Instant::now(); - self.metrics.get_value_storage_invocations += 1; + self.stats.get_value_storage_invocations += 1; let value = self.get_value_no_log(key); tracing::trace!( @@ -166,7 +193,7 @@ impl ReadStorage for StorageView { key.key() ); - self.metrics.time_spent_on_get_value += started_at.elapsed(); + self.stats.time_spent_on_get_value += started_at.elapsed(); value } @@ -198,7 +225,7 @@ impl WriteStorage for StorageView { fn set_value(&mut self, key: StorageKey, value: StorageValue) -> StorageValue { let started_at = Instant::now(); - self.metrics.set_value_storage_invocations += 1; + self.stats.set_value_storage_invocations += 1; let original = self.get_value_no_log(&key); tracing::trace!( @@ -210,7 +237,7 @@ impl WriteStorage for StorageView { key.key() ); self.modified_storage_keys.insert(key, value); - self.metrics.time_spent_on_set_value += started_at.elapsed(); + self.stats.time_spent_on_set_value += started_at.elapsed(); original } @@ -220,7 +247,7 @@ impl WriteStorage for StorageView { } fn missed_storage_invocations(&self) -> usize { - self.metrics.storage_invocations_missed + self.stats.storage_invocations_missed } } @@ -245,8 +272,8 @@ impl ReadStorage for ImmutableStorageView { cached_value.copied().unwrap_or_else(|| { let value = this.storage_handle.read_value(key); this.cache.read_storage_keys.insert(*key, value); - this.metrics.time_spent_on_storage_missed += started_at.elapsed(); - this.metrics.storage_invocations_missed += 1; + this.stats.time_spent_on_storage_missed += started_at.elapsed(); + this.stats.storage_invocations_missed += 1; value }) } @@ -289,7 +316,7 @@ mod test { assert_eq!(storage_view.read_value(&key), value); assert!(storage_view.is_write_initial(&key)); // key was inserted during the view lifetime - assert_eq!(storage_view.metrics().storage_invocations_missed, 1); + assert_eq!(storage_view.stats().storage_invocations_missed, 1); // ^ We should only read a value at `key` once, and then used the cached value. raw_storage.set_value(key, value); @@ -307,10 +334,10 @@ mod test { assert_eq!(storage_view.read_value(&new_key), new_value); assert!(storage_view.is_write_initial(&new_key)); - let metrics = storage_view.metrics(); - assert_eq!(metrics.storage_invocations_missed, 2); - assert_eq!(metrics.get_value_storage_invocations, 3); - assert_eq!(metrics.set_value_storage_invocations, 2); + let stats = storage_view.stats(); + assert_eq!(stats.storage_invocations_missed, 2); + assert_eq!(stats.get_value_storage_invocations, 3); + assert_eq!(stats.set_value_storage_invocations, 2); } #[test] diff --git a/core/lib/zksync_core_leftovers/src/temp_config_store/mod.rs b/core/lib/zksync_core_leftovers/src/temp_config_store/mod.rs index 8224b03da071..2d6af705f482 100644 --- a/core/lib/zksync_core_leftovers/src/temp_config_store/mod.rs +++ b/core/lib/zksync_core_leftovers/src/temp_config_store/mod.rs @@ -18,8 +18,8 @@ use zksync_config::{ GeneralConfig, ObservabilityConfig, PrometheusConfig, ProofDataHandlerConfig, ProtectiveReadsWriterConfig, ProverJobMonitorConfig, PruningConfig, SnapshotRecoveryConfig, }, - ApiConfig, BaseTokenAdjusterConfig, ContractVerifierConfig, DADispatcherConfig, DBConfig, - EthConfig, EthWatchConfig, ExternalProofIntegrationApiConfig, GasAdjusterConfig, + ApiConfig, BaseTokenAdjusterConfig, ContractVerifierConfig, DAClientConfig, DADispatcherConfig, + DBConfig, EthConfig, EthWatchConfig, ExternalProofIntegrationApiConfig, GasAdjusterConfig, ObjectStoreConfig, PostgresConfig, SnapshotsCreatorConfig, }; use zksync_env_config::FromEnv; @@ -68,6 +68,7 @@ pub struct TempConfigStore { pub gas_adjuster_config: Option, pub observability: Option, pub snapshot_creator: Option, + pub da_client_config: Option, pub da_dispatcher_config: Option, pub protective_reads_writer_config: Option, pub basic_witness_input_producer_config: Option, @@ -105,6 +106,7 @@ impl TempConfigStore { eth: self.eth_sender_config.clone(), snapshot_creator: self.snapshot_creator.clone(), observability: self.observability.clone(), + da_client_config: self.da_client_config.clone(), da_dispatcher_config: self.da_dispatcher_config.clone(), protective_reads_writer_config: self.protective_reads_writer_config.clone(), basic_witness_input_producer_config: self.basic_witness_input_producer_config.clone(), @@ -188,6 +190,7 @@ fn load_env_config() -> anyhow::Result { gas_adjuster_config: GasAdjusterConfig::from_env().ok(), observability: ObservabilityConfig::from_env().ok(), snapshot_creator: SnapshotsCreatorConfig::from_env().ok(), + da_client_config: DAClientConfig::from_env().ok(), da_dispatcher_config: DADispatcherConfig::from_env().ok(), protective_reads_writer_config: ProtectiveReadsWriterConfig::from_env().ok(), basic_witness_input_producer_config: BasicWitnessInputProducerConfig::from_env().ok(), diff --git a/core/lib/default_da_clients/Cargo.toml b/core/node/da_clients/Cargo.toml similarity index 51% rename from core/lib/default_da_clients/Cargo.toml rename to core/node/da_clients/Cargo.toml index 737d209aed31..60b65067f48d 100644 --- a/core/lib/default_da_clients/Cargo.toml +++ b/core/node/da_clients/Cargo.toml @@ -1,6 +1,6 @@ [package] -name = "zksync_default_da_clients" -description = "ZKsync DA client implementations" +name = "zksync_da_clients" +description = "ZKsync data availability clients" version.workspace = true edition.workspace = true authors.workspace = true @@ -16,10 +16,24 @@ tracing.workspace = true async-trait.workspace = true anyhow.workspace = true flate2.workspace = true +tokio.workspace = true zksync_config.workspace = true zksync_types.workspace = true zksync_object_store.workspace = true zksync_da_client.workspace = true -zksync_node_framework.workspace = true zksync_env_config.workspace = true +futures.workspace = true + +# Avail dependencies +scale-encode.workspace = true +subxt-metadata.workspace = true +blake2.workspace = true +base58.workspace = true +serde_json.workspace = true +hex.workspace = true +blake2b_simd.workspace = true + +jsonrpsee = { workspace = true, features = ["ws-client"] } +parity-scale-codec = { workspace = true, features = ["derive"] } +subxt-signer = { workspace = true, features = ["sr25519", "native"] } diff --git a/core/node/da_clients/README.md b/core/node/da_clients/README.md new file mode 100644 index 000000000000..df06cef24197 --- /dev/null +++ b/core/node/da_clients/README.md @@ -0,0 +1,10 @@ +# Data Availability Clients + +This crate contains the implementations of the Data Availability clients. + +Currently, the following DataAvailability clients are implemented: + +- `NoDA client` that does not send or store any pubdata, it is needed to run the zkSync network in the "no-DA" mode + utilizing the DA framework. +- `Object Store client` that stores the pubdata in the Object Store(GCS). +- `Avail` that sends the pubdata to the Avail DA layer. diff --git a/core/node/da_clients/src/avail/client.rs b/core/node/da_clients/src/avail/client.rs new file mode 100644 index 000000000000..021906d73a01 --- /dev/null +++ b/core/node/da_clients/src/avail/client.rs @@ -0,0 +1,85 @@ +use std::{fmt::Debug, sync::Arc}; + +use async_trait::async_trait; +use jsonrpsee::ws_client::WsClientBuilder; +use zksync_config::AvailConfig; +use zksync_da_client::{ + types::{DAError, DispatchResponse, InclusionData}, + DataAvailabilityClient, +}; + +use crate::avail::sdk::RawAvailClient; + +/// An implementation of the `DataAvailabilityClient` trait that interacts with the Avail network. +#[derive(Debug, Clone)] +pub struct AvailClient { + config: AvailConfig, + sdk_client: Arc, +} + +impl AvailClient { + pub async fn new(config: AvailConfig) -> anyhow::Result { + let sdk_client = RawAvailClient::new(config.app_id, config.seed.clone()).await?; + + Ok(Self { + config, + sdk_client: Arc::new(sdk_client), + }) + } +} + +#[async_trait] +impl DataAvailabilityClient for AvailClient { + async fn dispatch_blob( + &self, + _: u32, // batch_number + data: Vec, + ) -> anyhow::Result { + let client = WsClientBuilder::default() + .build(self.config.api_node_url.as_str()) + .await + .map_err(to_non_retriable_da_error)?; + + let extrinsic = self + .sdk_client + .build_extrinsic(&client, data) + .await + .map_err(to_non_retriable_da_error)?; + + let block_hash = self + .sdk_client + .submit_extrinsic(&client, extrinsic.as_str()) + .await + .map_err(to_non_retriable_da_error)?; + let tx_id = self + .sdk_client + .get_tx_id(&client, block_hash.as_str(), extrinsic.as_str()) + .await + .map_err(to_non_retriable_da_error)?; + + Ok(DispatchResponse::from(format!("{}:{}", block_hash, tx_id))) + } + + async fn get_inclusion_data( + &self, + _blob_id: &str, + ) -> anyhow::Result, DAError> { + // TODO: implement inclusion data retrieval + Ok(Some(InclusionData { data: vec![] })) + } + + fn clone_boxed(&self) -> Box { + Box::new(self.clone()) + } + + fn blob_size_limit(&self) -> Option { + Some(RawAvailClient::MAX_BLOB_SIZE) + } +} + +pub fn to_non_retriable_da_error(error: impl Into) -> DAError { + DAError { + error: error.into(), + is_retriable: false, + } +} diff --git a/core/node/da_clients/src/avail/mod.rs b/core/node/da_clients/src/avail/mod.rs new file mode 100644 index 000000000000..82073448ba15 --- /dev/null +++ b/core/node/da_clients/src/avail/mod.rs @@ -0,0 +1,4 @@ +mod client; +mod sdk; + +pub use self::client::AvailClient; diff --git a/core/node/da_clients/src/avail/sdk.rs b/core/node/da_clients/src/avail/sdk.rs new file mode 100644 index 000000000000..5e67540fcc69 --- /dev/null +++ b/core/node/da_clients/src/avail/sdk.rs @@ -0,0 +1,371 @@ +//! Minimal reimplementation of the Avail SDK client required for the DA client implementation. +//! This is considered to be a temporary solution until a mature SDK is available on crates.io + +use std::fmt::Debug; + +use jsonrpsee::{ + core::client::{Client, ClientT, Subscription, SubscriptionClientT}, + rpc_params, +}; +use parity_scale_codec::{Compact, Decode, Encode}; +use scale_encode::EncodeAsFields; +use subxt_signer::{ + bip39::Mnemonic, + sr25519::{Keypair, Signature}, +}; + +use crate::avail::client::to_non_retriable_da_error; + +const PROTOCOL_VERSION: u8 = 4; + +/// An implementation of the `DataAvailabilityClient` trait that interacts with the Avail network. +#[derive(Debug, Clone)] +pub(crate) struct RawAvailClient { + app_id: u32, + keypair: Keypair, +} + +/// Utility type needed for encoding the call data +#[derive(parity_scale_codec::Encode, scale_encode::EncodeAsType)] +#[encode_as_type(crate_path = "scale_encode")] +struct SubmitData { + pub data: BoundedVec, +} + +/// Utility type needed for encoding the call data +#[derive(parity_scale_codec::Encode, scale_encode::EncodeAsType)] +#[encode_as_type(crate_path = "scale_encode")] +struct BoundedVec<_0>(pub Vec<_0>); + +impl RawAvailClient { + pub(crate) const MAX_BLOB_SIZE: usize = 512 * 1024; // 512kb + + pub(crate) async fn new(app_id: u32, seed: String) -> anyhow::Result { + let mnemonic = Mnemonic::parse(seed)?; + let keypair = Keypair::from_phrase(&mnemonic, None)?; + + Ok(Self { app_id, keypair }) + } + + /// Returns a hex-encoded extrinsic + pub(crate) async fn build_extrinsic( + &self, + client: &Client, + data: Vec, + ) -> anyhow::Result { + let call_data = self + .get_encoded_call(client, data) + .await + .map_err(to_non_retriable_da_error)?; + let extra_params = self + .get_extended_params(client) + .await + .map_err(to_non_retriable_da_error)?; + let additional_params = self + .get_additional_params(client) + .await + .map_err(to_non_retriable_da_error)?; + + let signature = self.get_signature( + call_data.as_slice(), + extra_params.as_slice(), + additional_params.as_slice(), + ); + + let ext = self.get_submittable_extrinsic( + signature, + extra_params.as_slice(), + call_data.as_slice(), + ); + + Ok(hex::encode(&ext)) + } + + /// Returns an encoded call data + async fn get_encoded_call( + &self, + client: &Client, + data: Vec, + ) -> anyhow::Result, anyhow::Error> { + let resp: serde_json::Value = client.request("state_getMetadata", rpc_params![]).await?; + + let resp = resp + .as_str() + .ok_or_else(|| anyhow::anyhow!("Invalid metadata"))? + .to_string(); + + let metadata_bytes = hex::decode( + resp.strip_prefix("0x") + .ok_or_else(|| anyhow::anyhow!("Metadata doesn't have 0x prefix"))?, + )?; + let meta = subxt_metadata::Metadata::decode(&mut &metadata_bytes[..])?; + + let pallet = meta + .pallet_by_name("DataAvailability") + .ok_or_else(|| anyhow::anyhow!("DataAvailability pallet not found"))?; + + let call = pallet + .call_variant_by_name("submit_data") + .ok_or_else(|| anyhow::anyhow!("submit_data call not found"))?; + + let mut fields = call + .fields + .iter() + .map(|f| scale_encode::Field::new(f.ty.id, f.name.as_deref())); + + let mut bytes = Vec::new(); + pallet.index().encode_to(&mut bytes); + call.index.encode_to(&mut bytes); + + SubmitData { + data: BoundedVec(data), + } + .encode_as_fields_to(&mut fields, meta.types(), &mut bytes)?; + + Ok(bytes) + } + + /// Queries a node for a nonce + async fn fetch_account_nonce(&self, client: &Client) -> anyhow::Result { + let address = to_addr(self.keypair.clone()); + let resp: serde_json::Value = client + .request("system_accountNextIndex", rpc_params![address]) + .await?; + + let nonce = resp + .as_u64() + .ok_or_else(|| anyhow::anyhow!("Invalid nonce"))?; + + Ok(nonce) + } + + /// Returns a Compact-encoded extended extrinsic parameters + /// Extrinsic params used here: + /// - CheckMortality + /// - CheckNonce + /// - ChargeTransactionPayment + /// - CheckAppId + async fn get_extended_params(&self, client: &Client) -> anyhow::Result> { + let era = 0u8; // immortal era + let tip = 0u128; // no tip + let nonce = self.fetch_account_nonce(client).await?; + + // Encode the params + let mut bytes = vec![era]; + Compact(nonce).encode_to(&mut bytes); + Compact(tip).encode_to(&mut bytes); + Compact(self.app_id).encode_to(&mut bytes); + + Ok(bytes) + } + + /// Returns a Compact-encoded additional extrinsic parameters + /// Extrinsic params used here + /// - CheckSpecVersion + /// - CheckTxVersion + /// - CheckGenesis + async fn get_additional_params(&self, client: &Client) -> anyhow::Result> { + let (spec_version, tx_version) = self.get_runtime_version(client).await?; + let genesis_hash = self.fetch_genesis_hash(client).await?; + + let mut bytes = Vec::new(); + spec_version.encode_to(&mut bytes); + tx_version.encode_to(&mut bytes); + // adding genesis hash twice (that's what API requires ¯\_(ツ)_/¯) + bytes.extend(hex::decode(&genesis_hash)?); + bytes.extend(hex::decode(&genesis_hash)?); + + Ok(bytes) + } + + /// Returns the specification and transaction versions of a runtime + async fn get_runtime_version(&self, client: &Client) -> anyhow::Result<(u32, u32)> { + let resp: serde_json::Value = client + .request("chain_getRuntimeVersion", rpc_params![]) + .await?; + + let sv = resp + .get("specVersion") + .ok_or_else(|| anyhow::anyhow!("Invalid runtime version"))?; + let tv = resp + .get("transactionVersion") + .ok_or_else(|| anyhow::anyhow!("Invalid runtime version"))?; + + let spec_version = sv + .as_u64() + .ok_or_else(|| anyhow::anyhow!("Invalid spec version"))?; + let transaction_version = tv + .as_u64() + .ok_or_else(|| anyhow::anyhow!("Invalid transaction version"))?; + + Ok((spec_version as u32, transaction_version as u32)) + } + + async fn fetch_genesis_hash(&self, client: &Client) -> anyhow::Result { + let resp: serde_json::Value = client.request("chain_getBlockHash", rpc_params![0]).await?; + + let genesis_hash = resp + .as_str() + .ok_or_else(|| anyhow::anyhow!("Invalid genesis hash"))?; + + Ok(genesis_hash + .strip_prefix("0x") + .ok_or_else(|| anyhow::anyhow!("Genesis hash doesn't have a 0x prefix"))? + .to_string()) + } + + /// Returns a signature for partially-encoded extrinsic + fn get_signature( + &self, + call_data: &[u8], + extra_params: &[u8], + additional_params: &[u8], + ) -> Signature { + let mut bytes = vec![]; + bytes.extend_from_slice(call_data); + bytes.extend_from_slice(extra_params); + bytes.extend_from_slice(additional_params); + + if bytes.len() > 256 { + bytes = blake2::<32>(bytes).to_vec(); + } + + self.keypair.sign(&bytes) + } + + /// Encodes all the components of an extrinsic into a single vector + fn get_submittable_extrinsic( + &self, + signature: Signature, + extra_params: &[u8], + call_data: &[u8], + ) -> Vec { + let mut encoded_inner = Vec::new(); + (0b10000000 + PROTOCOL_VERSION).encode_to(&mut encoded_inner); // "is signed" + transaction protocol version + + // sender + encoded_inner.push(0); // 0 as an id param in MultiAddress enum + self.keypair.public_key().0.encode_to(&mut encoded_inner); // from address for signature + + // signature + encoded_inner.push(1); // 1 as an Sr25519 in MultiSignature enum + signature.0.encode_to(&mut encoded_inner); + + // extra params + encoded_inner.extend_from_slice(extra_params); + + // call data + encoded_inner.extend_from_slice(call_data); + + // now, prefix with byte length: + let len = Compact( + u32::try_from(encoded_inner.len()).expect("extrinsic size expected to be <4GB"), + ); + let mut encoded = Vec::new(); + len.encode_to(&mut encoded); + encoded.extend(encoded_inner); + + encoded + } + + /// Submits an extrinsic. Subscribes to a stream and waits for a tx to be included in a block + /// to return the block hash + pub(crate) async fn submit_extrinsic( + &self, + client: &Client, + extrinsic: &str, + ) -> anyhow::Result { + let mut sub: Subscription = client + .subscribe( + "author_submitAndWatchExtrinsic", + rpc_params![extrinsic], + "author_unwatchExtrinsic", + ) + .await?; + + let block_hash = loop { + let status = sub.next().await.transpose()?; + + if status.is_some() && status.as_ref().unwrap().is_object() { + if let Some(block_hash) = status.unwrap().get("inBlock") { + break block_hash + .as_str() + .ok_or_else(|| anyhow::anyhow!("Invalid block hash"))? + .strip_prefix("0x") + .ok_or_else(|| anyhow::anyhow!("Block hash doesn't have 0x prefix"))? + .to_string(); + } + } + }; + sub.unsubscribe().await?; + + Ok(block_hash) + } + + /// Iterates over all transaction in the block and finds an ID of the one provided as an argument + pub(crate) async fn get_tx_id( + &self, + client: &Client, + block_hash: &str, + hex_ext: &str, + ) -> anyhow::Result { + let resp: serde_json::Value = client + .request("chain_getBlock", rpc_params![block_hash]) + .await?; + + let block = resp + .get("block") + .ok_or_else(|| anyhow::anyhow!("Invalid block"))?; + let extrinsics = block + .get("extrinsics") + .ok_or_else(|| anyhow::anyhow!("No field named extrinsics in block"))? + .as_array() + .ok_or_else(|| anyhow::anyhow!("Extrinsics field is not an array"))?; + + let hex_ext = format!("0x{}", hex_ext); + + let tx_id = extrinsics + .iter() + .position(|extrinsic| extrinsic.as_str() == Some(hex_ext.as_str())) + .ok_or_else(|| anyhow::anyhow!("Extrinsic not found in block"))?; + + Ok(tx_id) + } +} + +fn blake2(data: Vec) -> [u8; N] { + blake2b_simd::Params::new() + .hash_length(N) + .hash(data.as_slice()) + .as_bytes() + .try_into() + .expect("slice is always the necessary length") +} + +// Taken from subxt accountId implementation +fn to_addr(keypair: Keypair) -> String { + // For serializing to a string to obtain the account nonce, we use the default substrate + // prefix (since we have no way to otherwise pick one). It doesn't really matter, since when + // it's deserialized back in system_accountNextIndex, we ignore this (so long as it's valid). + const SUBSTRATE_SS58_PREFIX: u8 = 42; + // prefix <= 63 just take up one byte at the start: + let mut v = vec![SUBSTRATE_SS58_PREFIX]; + // then push the account ID bytes. + v.extend(keypair.public_key().0); + // then push a 2 byte checksum of what we have so far. + let r = ss58hash(&v); + v.extend(&r[0..2]); + // then encode to base58. + use base58::ToBase58; + v.to_base58() +} + +// Taken from subxt accountId implementation +fn ss58hash(data: &[u8]) -> Vec { + use blake2::{Blake2b512, Digest}; + const PREFIX: &[u8] = b"SS58PRE"; + let mut ctx = Blake2b512::new(); + ctx.update(PREFIX); + ctx.update(data); + ctx.finalize().to_vec() +} diff --git a/core/lib/default_da_clients/src/lib.rs b/core/node/da_clients/src/lib.rs similarity index 71% rename from core/lib/default_da_clients/src/lib.rs rename to core/node/da_clients/src/lib.rs index 3aa2a18cdcec..48311ce4c3f2 100644 --- a/core/lib/default_da_clients/src/lib.rs +++ b/core/node/da_clients/src/lib.rs @@ -1,2 +1,3 @@ +pub mod avail; pub mod no_da; pub mod object_store; diff --git a/core/lib/default_da_clients/src/no_da/client.rs b/core/node/da_clients/src/no_da.rs similarity index 100% rename from core/lib/default_da_clients/src/no_da/client.rs rename to core/node/da_clients/src/no_da.rs diff --git a/core/lib/default_da_clients/src/object_store/client.rs b/core/node/da_clients/src/object_store.rs similarity index 51% rename from core/lib/default_da_clients/src/object_store/client.rs rename to core/node/da_clients/src/object_store.rs index f05029a8eb1c..55764e8260e0 100644 --- a/core/lib/default_da_clients/src/object_store/client.rs +++ b/core/node/da_clients/src/object_store.rs @@ -1,16 +1,20 @@ -use std::sync::Arc; +use std::{ + io::{Read, Write}, + sync::Arc, +}; use async_trait::async_trait; +use flate2::{read::GzDecoder, write::GzEncoder, Compression}; use zksync_config::ObjectStoreConfig; use zksync_da_client::{ types::{DAError, DispatchResponse, InclusionData}, DataAvailabilityClient, }; -use zksync_object_store::{ObjectStore, ObjectStoreFactory}; +use zksync_object_store::{ + Bucket, ObjectStore, ObjectStoreFactory, StoredObject, _reexports::BoxedError, +}; use zksync_types::L1BatchNumber; -use crate::object_store::types::StorablePubdata; - /// An implementation of the `DataAvailabilityClient` trait that stores the pubdata in the GCS. #[derive(Clone, Debug)] pub struct ObjectStoreDAClient { @@ -84,3 +88,79 @@ impl DataAvailabilityClient for ObjectStoreDAClient { None } } + +/// Used as a wrapper for the pubdata to be stored in the GCS. +#[derive(Debug)] +struct StorablePubdata { + pub data: Vec, +} + +impl StoredObject for StorablePubdata { + const BUCKET: Bucket = Bucket::DataAvailability; + type Key<'a> = L1BatchNumber; + + fn encode_key(key: Self::Key<'_>) -> String { + format!("l1_batch_{key}_pubdata.gzip") + } + + fn serialize(&self) -> Result, BoxedError> { + let mut encoder = GzEncoder::new(Vec::new(), Compression::default()); + encoder.write_all(&self.data[..])?; + encoder.finish().map_err(From::from) + } + + fn deserialize(bytes: Vec) -> Result { + let mut decoder = GzDecoder::new(&bytes[..]); + let mut decompressed_bytes = Vec::new(); + decoder + .read_to_end(&mut decompressed_bytes) + .map_err(BoxedError::from)?; + + Ok(Self { + data: decompressed_bytes, + }) + } +} + +#[cfg(test)] +mod tests { + use tokio::fs; + use zksync_object_store::{MockObjectStore, StoredObject}; + use zksync_types::L1BatchNumber; + + use super::StorablePubdata; + + #[tokio::test] + async fn test_storable_pubdata_deserialization() { + let serialized = fs::read("./src/test_data/l1_batch_123_pubdata.gzip") + .await + .unwrap(); + + let data = StorablePubdata::deserialize(serialized).unwrap().data; + assert_eq!(data[12], 0); + assert_eq!(data[123], 129); + assert_eq!(data[1234], 153); + } + + #[tokio::test] + async fn stored_object_serialization() { + let batch_number = 123; + let data = vec![1, 2, 3, 4, 5, 6, 123, 255, 0, 0]; + + let store = MockObjectStore::arc(); + store + .put( + L1BatchNumber(batch_number), + &StorablePubdata { data: data.clone() }, + ) + .await + .unwrap(); + + let resp = store + .get::(L1BatchNumber(batch_number)) + .await + .unwrap(); + + assert_eq!(data, resp.data); + } +} diff --git a/core/node/da_clients/src/test_data/l1_batch_123_pubdata.gzip b/core/node/da_clients/src/test_data/l1_batch_123_pubdata.gzip new file mode 100644 index 000000000000..78bc67e94efb Binary files /dev/null and b/core/node/da_clients/src/test_data/l1_batch_123_pubdata.gzip differ diff --git a/core/node/eth_watch/Cargo.toml b/core/node/eth_watch/Cargo.toml index bbdc4ba27d34..a3d6325f4a24 100644 --- a/core/node/eth_watch/Cargo.toml +++ b/core/node/eth_watch/Cargo.toml @@ -24,6 +24,7 @@ anyhow.workspace = true thiserror.workspace = true async-trait.workspace = true tracing.workspace = true +async-recursion.workspace = true [dev-dependencies] zksync_concurrency.workspace = true diff --git a/core/node/eth_watch/src/client.rs b/core/node/eth_watch/src/client.rs index 67e603041e6c..237c8e5bc2e6 100644 --- a/core/node/eth_watch/src/client.rs +++ b/core/node/eth_watch/src/client.rs @@ -100,21 +100,24 @@ impl EthHttpQueryClient { .collect() } + #[async_recursion::async_recursion] async fn get_events_inner( &self, from: BlockNumber, to: BlockNumber, - topics1: Vec, - topics2: Vec, - addresses: Vec
, + topics1: Option>, + topics2: Option>, + addresses: Option>, retries_left: usize, ) -> EnrichedClientResult> { - let filter = FilterBuilder::default() + let mut builder = FilterBuilder::default() .from_block(from) .to_block(to) - .topics(Some(topics1), Some(topics2), None, None) - .address(addresses) - .build(); + .topics(topics1.clone(), topics2.clone(), None, None); + if let Some(addresses) = addresses.clone() { + builder = builder.address(addresses); + } + let filter = builder.build(); let mut result = self.client.logs(&filter).await; // This code is compatible with both Infura and Alchemy API providers. @@ -168,17 +171,33 @@ impl EthHttpQueryClient { tracing::warn!("Splitting block range in half: {from:?} - {mid:?} - {to:?}"); let mut first_half = self - .get_events(from, BlockNumber::Number(mid), RETRY_LIMIT) + .get_events_inner( + from, + BlockNumber::Number(mid), + topics1.clone(), + topics2.clone(), + addresses.clone(), + RETRY_LIMIT, + ) .await?; let mut second_half = self - .get_events(BlockNumber::Number(mid + 1u64), to, RETRY_LIMIT) + .get_events_inner( + BlockNumber::Number(mid + 1u64), + to, + topics1, + topics2, + addresses, + RETRY_LIMIT, + ) .await?; first_half.append(&mut second_half); result = Ok(first_half); } else if should_retry(err_code, err_message) && retries_left > 0 { tracing::warn!("Retrying. Retries left: {retries_left}"); - result = self.get_events(from, to, retries_left - 1).await; + result = self + .get_events_inner(from, to, topics1, topics2, addresses, retries_left - 1) + .await; } } @@ -216,9 +235,9 @@ impl EthClient for EthHttpQueryClient { .get_events_inner( from_block.into(), to_block.into(), - vec![self.new_upgrade_cut_data_signature], - vec![packed_version], - vec![state_transition_manager_address], + Some(vec![self.new_upgrade_cut_data_signature]), + Some(vec![packed_version]), + Some(vec![state_transition_manager_address]), RETRY_LIMIT, ) .await?; @@ -235,9 +254,9 @@ impl EthClient for EthHttpQueryClient { self.get_events_inner( from, to, - self.topics.clone(), - Vec::new(), - self.get_default_address_list(), + Some(self.topics.clone()), + None, + Some(self.get_default_address_list()), retries_left, ) .await diff --git a/core/node/external_proof_integration_api/src/lib.rs b/core/node/external_proof_integration_api/src/lib.rs index 4355896e2a2e..d152ea265cb8 100644 --- a/core/node/external_proof_integration_api/src/lib.rs +++ b/core/node/external_proof_integration_api/src/lib.rs @@ -19,10 +19,7 @@ use types::{ExternalProof, ProofGenerationDataResponse}; use zksync_basic_types::L1BatchNumber; pub use crate::processor::Processor; -use crate::{ - metrics::{CallOutcome, Method}, - middleware::MetricsMiddleware, -}; +use crate::{metrics::Method, middleware::MetricsMiddleware}; /// External API implementation. #[derive(Debug)] @@ -37,11 +34,7 @@ impl Api { axum::middleware::from_fn(move |req: Request, next: Next| async move { let middleware = MetricsMiddleware::new(method); let response = next.run(req).await; - let outcome = match response.status().is_success() { - true => CallOutcome::Success, - false => CallOutcome::Failure, - }; - middleware.observe(outcome); + middleware.observe(response.status()); response }) }; diff --git a/core/node/external_proof_integration_api/src/metrics.rs b/core/node/external_proof_integration_api/src/metrics.rs index f43b49b7b1c0..6b909a278d62 100644 --- a/core/node/external_proof_integration_api/src/metrics.rs +++ b/core/node/external_proof_integration_api/src/metrics.rs @@ -2,13 +2,6 @@ use std::time::Duration; use vise::{EncodeLabelSet, EncodeLabelValue, Histogram, LabeledFamily, Metrics}; -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, EncodeLabelSet, EncodeLabelValue)] -#[metrics(label = "outcome", rename_all = "snake_case")] -pub(crate) enum CallOutcome { - Success, - Failure, -} - #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, EncodeLabelSet, EncodeLabelValue)] #[metrics(label = "type", rename_all = "snake_case")] pub(crate) enum Method { @@ -20,8 +13,8 @@ pub(crate) enum Method { #[derive(Debug, Metrics)] #[metrics(prefix = "external_proof_integration_api")] pub(crate) struct ProofIntegrationApiMetrics { - #[metrics(labels = ["method", "outcome"], buckets = vise::Buckets::LATENCIES)] - pub call_latency: LabeledFamily<(Method, CallOutcome), Histogram, 2>, + #[metrics(labels = ["method", "status"], buckets = vise::Buckets::LATENCIES)] + pub call_latency: LabeledFamily<(Method, u16), Histogram, 2>, } #[vise::register] diff --git a/core/node/external_proof_integration_api/src/middleware.rs b/core/node/external_proof_integration_api/src/middleware.rs index 1dc6aefe9171..ebd4ef5bfb09 100644 --- a/core/node/external_proof_integration_api/src/middleware.rs +++ b/core/node/external_proof_integration_api/src/middleware.rs @@ -1,6 +1,7 @@ +use axum::http::StatusCode; use tokio::time::Instant; -use crate::metrics::{CallOutcome, Method, METRICS}; +use crate::metrics::{Method, METRICS}; #[derive(Debug)] pub(crate) struct MetricsMiddleware { @@ -16,7 +17,8 @@ impl MetricsMiddleware { } } - pub fn observe(&self, outcome: CallOutcome) { - METRICS.call_latency[&(self.method, outcome)].observe(self.started_at.elapsed()); + pub fn observe(&self, status_code: StatusCode) { + METRICS.call_latency[&(self.method, status_code.as_u16())] + .observe(self.started_at.elapsed()); } } diff --git a/core/node/node_framework/Cargo.toml b/core/node/node_framework/Cargo.toml index 2288c0ddbe8f..1df47e775539 100644 --- a/core/node/node_framework/Cargo.toml +++ b/core/node/node_framework/Cargo.toml @@ -34,6 +34,7 @@ zksync_house_keeper.workspace = true zksync_node_fee_model.workspace = true zksync_eth_sender.workspace = true zksync_da_client.workspace = true +zksync_da_clients.workspace = true zksync_da_dispatcher.workspace = true zksync_block_reverter.workspace = true zksync_vm_executor.workspace = true diff --git a/core/node/node_framework/src/implementations/layers/da_clients/avail.rs b/core/node/node_framework/src/implementations/layers/da_clients/avail.rs new file mode 100644 index 000000000000..7c3d82b6d25b --- /dev/null +++ b/core/node/node_framework/src/implementations/layers/da_clients/avail.rs @@ -0,0 +1,45 @@ +use zksync_config::AvailConfig; +use zksync_da_client::DataAvailabilityClient; +use zksync_da_clients::avail::AvailClient; + +use crate::{ + implementations::resources::da_client::DAClientResource, + wiring_layer::{WiringError, WiringLayer}, + IntoContext, +}; + +#[derive(Debug)] +pub struct AvailWiringLayer { + config: AvailConfig, +} + +impl AvailWiringLayer { + pub fn new(config: AvailConfig) -> Self { + Self { config } + } +} + +#[derive(Debug, IntoContext)] +#[context(crate = crate)] +pub struct Output { + pub client: DAClientResource, +} + +#[async_trait::async_trait] +impl WiringLayer for AvailWiringLayer { + type Input = (); + type Output = Output; + + fn layer_name(&self) -> &'static str { + "avail_client_layer" + } + + async fn wire(self, _input: Self::Input) -> Result { + let client: Box = + Box::new(AvailClient::new(self.config).await?); + + Ok(Self::Output { + client: DAClientResource(client), + }) + } +} diff --git a/core/node/node_framework/src/implementations/layers/da_clients/mod.rs b/core/node/node_framework/src/implementations/layers/da_clients/mod.rs new file mode 100644 index 000000000000..48311ce4c3f2 --- /dev/null +++ b/core/node/node_framework/src/implementations/layers/da_clients/mod.rs @@ -0,0 +1,3 @@ +pub mod avail; +pub mod no_da; +pub mod object_store; diff --git a/core/lib/default_da_clients/src/no_da/wiring_layer.rs b/core/node/node_framework/src/implementations/layers/da_clients/no_da.rs similarity index 90% rename from core/lib/default_da_clients/src/no_da/wiring_layer.rs rename to core/node/node_framework/src/implementations/layers/da_clients/no_da.rs index 71a2ee7ce582..5a81ce9b3400 100644 --- a/core/lib/default_da_clients/src/no_da/wiring_layer.rs +++ b/core/node/node_framework/src/implementations/layers/da_clients/no_da.rs @@ -1,18 +1,19 @@ use std::fmt::Debug; use zksync_da_client::DataAvailabilityClient; -use zksync_node_framework::{ +use zksync_da_clients::no_da::NoDAClient; + +use crate::{ implementations::resources::da_client::DAClientResource, wiring_layer::{WiringError, WiringLayer}, IntoContext, }; -use crate::no_da::client::NoDAClient; - #[derive(Debug, Default)] pub struct NoDAClientWiringLayer; #[derive(Debug, IntoContext)] +#[context(crate = crate)] pub struct Output { pub client: DAClientResource, } diff --git a/core/lib/default_da_clients/src/object_store/wiring_layer.rs b/core/node/node_framework/src/implementations/layers/da_clients/object_store.rs similarity index 91% rename from core/lib/default_da_clients/src/object_store/wiring_layer.rs rename to core/node/node_framework/src/implementations/layers/da_clients/object_store.rs index 6fc84fb707b7..3fb720696da5 100644 --- a/core/lib/default_da_clients/src/object_store/wiring_layer.rs +++ b/core/node/node_framework/src/implementations/layers/da_clients/object_store.rs @@ -1,13 +1,13 @@ use zksync_config::ObjectStoreConfig; use zksync_da_client::DataAvailabilityClient; -use zksync_node_framework::{ +use zksync_da_clients::object_store::ObjectStoreDAClient; + +use crate::{ implementations::resources::da_client::DAClientResource, wiring_layer::{WiringError, WiringLayer}, IntoContext, }; -use crate::object_store::client::ObjectStoreDAClient; - #[derive(Debug)] pub struct ObjectStorageClientWiringLayer { config: ObjectStoreConfig, @@ -20,6 +20,7 @@ impl ObjectStorageClientWiringLayer { } #[derive(Debug, IntoContext)] +#[context(crate = crate)] pub struct Output { pub client: DAClientResource, } diff --git a/core/node/node_framework/src/implementations/layers/mod.rs b/core/node/node_framework/src/implementations/layers/mod.rs index 6f3500a82cb9..75828da19023 100644 --- a/core/node/node_framework/src/implementations/layers/mod.rs +++ b/core/node/node_framework/src/implementations/layers/mod.rs @@ -6,6 +6,7 @@ pub mod commitment_generator; pub mod consensus; pub mod consistency_checker; pub mod contract_verification_api; +pub mod da_clients; pub mod da_dispatcher; pub mod eth_sender; pub mod eth_watch; diff --git a/core/node/vm_runner/src/impls/bwip.rs b/core/node/vm_runner/src/impls/bwip.rs index 30473b6b3997..5364f93a9736 100644 --- a/core/node/vm_runner/src/impls/bwip.rs +++ b/core/node/vm_runner/src/impls/bwip.rs @@ -52,9 +52,9 @@ impl BasicWitnessInputProducer { ConcurrentOutputHandlerFactory::new(pool.clone(), io.clone(), output_handler_factory); let vm_runner = VmRunner::new( pool, - Box::new(io), + Arc::new(io), Arc::new(loader), - Box::new(output_handler_factory), + Arc::new(output_handler_factory), batch_executor_factory, ); Ok(( @@ -168,7 +168,7 @@ impl OutputHandler for BasicWitnessInputProducerOutputHandler { )] async fn handle_l1_batch(self: Box, output: Arc) -> anyhow::Result<()> { let l1_batch_number = self.l1_batch_number; - let mut connection = self.pool.connection().await?; + let mut connection = self.pool.connection_tagged("bwip").await?; tracing::info!(%l1_batch_number, "Started saving VM run data"); @@ -398,7 +398,7 @@ struct BasicWitnessInputProducerOutputHandlerFactory { #[async_trait] impl OutputHandlerFactory for BasicWitnessInputProducerOutputHandlerFactory { async fn create_handler( - &mut self, + &self, system_env: SystemEnv, l1_batch_env: L1BatchEnv, ) -> anyhow::Result> { diff --git a/core/node/vm_runner/src/impls/playground.rs b/core/node/vm_runner/src/impls/playground.rs index 091fa15fc953..dc21d5a32036 100644 --- a/core/node/vm_runner/src/impls/playground.rs +++ b/core/node/vm_runner/src/impls/playground.rs @@ -129,6 +129,7 @@ impl VmPlayground { let mut batch_executor_factory = MainBatchExecutorFactory::new(false, false); batch_executor_factory.set_fast_vm_mode(vm_mode); + batch_executor_factory.observe_storage_metrics(); let io = VmPlaygroundIo { cursor_file_path, @@ -246,9 +247,9 @@ impl VmPlayground { }; let vm_runner = VmRunner::new( self.pool, - Box::new(self.io), + Arc::new(self.io), loader, - Box::new(self.output_handler_factory), + Arc::new(self.output_handler_factory), Box::new(self.batch_executor_factory), ); vm_runner.run(&stop_receiver).await @@ -412,7 +413,7 @@ impl OutputHandler for VmPlaygroundOutputHandler { #[async_trait] impl OutputHandlerFactory for VmPlaygroundOutputHandler { async fn create_handler( - &mut self, + &self, _system_env: SystemEnv, _l1_batch_env: L1BatchEnv, ) -> anyhow::Result> { diff --git a/core/node/vm_runner/src/impls/protective_reads.rs b/core/node/vm_runner/src/impls/protective_reads.rs index b620675b78e2..b1aff9fe3825 100644 --- a/core/node/vm_runner/src/impls/protective_reads.rs +++ b/core/node/vm_runner/src/impls/protective_reads.rs @@ -41,9 +41,9 @@ impl ProtectiveReadsWriter { let batch_processor = MainBatchExecutorFactory::new(false, false); let vm_runner = VmRunner::new( pool, - Box::new(io), + Arc::new(io), Arc::new(loader), - Box::new(output_handler_factory), + Arc::new(output_handler_factory), Box::new(batch_processor), ); Ok(( @@ -219,7 +219,7 @@ struct ProtectiveReadsOutputHandlerFactory { #[async_trait] impl OutputHandlerFactory for ProtectiveReadsOutputHandlerFactory { async fn create_handler( - &mut self, + &self, _system_env: SystemEnv, l1_batch_env: L1BatchEnv, ) -> anyhow::Result> { diff --git a/core/node/vm_runner/src/io.rs b/core/node/vm_runner/src/io.rs index 2e118f6cfd13..6d758f816f8a 100644 --- a/core/node/vm_runner/src/io.rs +++ b/core/node/vm_runner/src/io.rs @@ -1,4 +1,4 @@ -use std::fmt::Debug; +use std::{fmt::Debug, sync::Arc}; use async_trait::async_trait; use zksync_dal::{Connection, Core}; @@ -31,8 +31,9 @@ pub trait VmRunnerIo: Debug + Send + Sync + 'static { conn: &mut Connection<'_, Core>, ) -> anyhow::Result; - /// Marks the specified batch as being in progress. Must be called before a batch can be marked - /// as completed. + /// Marks the specified batch as being in progress. Will be called at least once before a batch can be marked + /// as completed; can be called multiple times in case of a crash. The order in which this method is called + /// is not specified; i.e., it is **not** guaranteed to be called sequentially. /// /// # Errors /// @@ -44,7 +45,8 @@ pub trait VmRunnerIo: Debug + Send + Sync + 'static { ) -> anyhow::Result<()>; /// Marks the specified batch as the latest completed batch. All earlier batches are considered - /// to be completed too. No guarantees about later batches. + /// to be completed too. No guarantees about later batches. This method is guaranteed to be called + /// with monotonically increasing batch numbers. /// /// # Errors /// @@ -55,3 +57,44 @@ pub trait VmRunnerIo: Debug + Send + Sync + 'static { l1_batch_number: L1BatchNumber, ) -> anyhow::Result<()>; } + +#[async_trait] +impl VmRunnerIo for Arc { + fn name(&self) -> &'static str { + (**self).name() + } + + async fn latest_processed_batch( + &self, + conn: &mut Connection<'_, Core>, + ) -> anyhow::Result { + (**self).latest_processed_batch(conn).await + } + + async fn last_ready_to_be_loaded_batch( + &self, + conn: &mut Connection<'_, Core>, + ) -> anyhow::Result { + (**self).last_ready_to_be_loaded_batch(conn).await + } + + async fn mark_l1_batch_as_processing( + &self, + conn: &mut Connection<'_, Core>, + l1_batch_number: L1BatchNumber, + ) -> anyhow::Result<()> { + (**self) + .mark_l1_batch_as_processing(conn, l1_batch_number) + .await + } + + async fn mark_l1_batch_as_completed( + &self, + conn: &mut Connection<'_, Core>, + l1_batch_number: L1BatchNumber, + ) -> anyhow::Result<()> { + (**self) + .mark_l1_batch_as_completed(conn, l1_batch_number) + .await + } +} diff --git a/core/node/vm_runner/src/metrics.rs b/core/node/vm_runner/src/metrics.rs index 4252ad5f0d4f..cc588fd02630 100644 --- a/core/node/vm_runner/src/metrics.rs +++ b/core/node/vm_runner/src/metrics.rs @@ -2,7 +2,28 @@ use std::time::Duration; -use vise::{Buckets, Gauge, Histogram, Metrics}; +use vise::{Buckets, EncodeLabelSet, EncodeLabelValue, Family, Gauge, Histogram, Metrics, Unit}; +use zksync_state::OwnedStorage; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, EncodeLabelValue, EncodeLabelSet)] +#[metrics(label = "storage", rename_all = "snake_case")] +pub(super) enum StorageKind { + Postgres, + Snapshot, + Rocksdb, + Unknown, +} + +impl StorageKind { + pub fn new(storage: &OwnedStorage) -> Self { + match storage { + OwnedStorage::Rocksdb(_) | OwnedStorage::RocksdbWithMemory(_) => Self::Rocksdb, + OwnedStorage::Postgres(_) => Self::Postgres, + OwnedStorage::Snapshot(_) => Self::Snapshot, + OwnedStorage::Boxed(_) => Self::Unknown, + } + } +} #[derive(Debug, Metrics)] #[metrics(prefix = "vm_runner")] @@ -16,6 +37,9 @@ pub(super) struct VmRunnerMetrics { /// Total latency of loading an L1 batch (RocksDB mode only). #[metrics(buckets = Buckets::LATENCIES)] pub storage_load_time: Histogram, + /// Latency of loading data and storage for a batch, grouped by the storage kind. + #[metrics(buckets = Buckets::LATENCIES, unit = Unit::Seconds)] + pub data_and_storage_latency: Family>, /// Total latency of running VM on an L1 batch. #[metrics(buckets = Buckets::LATENCIES)] pub run_vm_time: Histogram, diff --git a/core/node/vm_runner/src/output_handler.rs b/core/node/vm_runner/src/output_handler.rs index 25eae5e36845..7a8d1e41e529 100644 --- a/core/node/vm_runner/src/output_handler.rs +++ b/core/node/vm_runner/src/output_handler.rs @@ -61,7 +61,7 @@ pub trait OutputHandler: fmt::Debug + Send { /// simultaneously. Implementing this trait signifies that this property is held for the data the /// implementation is responsible for. #[async_trait] -pub trait OutputHandlerFactory: fmt::Debug + Send { +pub trait OutputHandlerFactory: fmt::Debug + Send + Sync { /// Creates a [`StateKeeperOutputHandler`] implementation for the provided L1 batch. Only /// supposed to be used for the L1 batch data it was created against. Using it for anything else /// will lead to errors. @@ -70,7 +70,7 @@ pub trait OutputHandlerFactory: fmt::Debug + Send { /// /// Propagates DB errors. async fn create_handler( - &mut self, + &self, system_env: SystemEnv, l1_batch_env: L1BatchEnv, ) -> anyhow::Result>; @@ -139,7 +139,7 @@ impl OutputHandlerFactory for ConcurrentOutputHandlerFactory { async fn create_handler( - &mut self, + &self, system_env: SystemEnv, l1_batch_env: L1BatchEnv, ) -> anyhow::Result> { diff --git a/core/node/vm_runner/src/process.rs b/core/node/vm_runner/src/process.rs index e2a678ccdce4..4f7ac1f97284 100644 --- a/core/node/vm_runner/src/process.rs +++ b/core/node/vm_runner/src/process.rs @@ -1,20 +1,26 @@ -use std::{sync::Arc, time::Duration}; +use std::{ + sync::Arc, + time::{Duration, Instant}, +}; use anyhow::Context; -use tokio::{sync::watch, task::JoinHandle}; +use tokio::{ + sync::{watch, Mutex}, + task::JoinHandle, +}; use zksync_dal::{ConnectionPool, Core}; use zksync_state::OwnedStorage; -use zksync_types::{block::L2BlockExecutionData, L1BatchNumber}; -use zksync_vm_interface::{ - executor::{BatchExecutor, BatchExecutorFactory}, - L2BlockEnv, -}; +use zksync_types::L1BatchNumber; +use zksync_vm_interface::{executor::BatchExecutorFactory, L2BlockEnv}; use crate::{ - metrics::METRICS, output_handler::OutputHandler, storage::StorageLoader, L1BatchOutput, - L2BlockOutput, OutputHandlerFactory, VmRunnerIo, + metrics::{StorageKind, METRICS}, + storage::StorageLoader, + L1BatchOutput, L2BlockOutput, OutputHandlerFactory, VmRunnerIo, }; +const SLEEP_INTERVAL: Duration = Duration::from_millis(50); + /// VM runner represents a logic layer of L1 batch / L2 block processing flow akin to that of state /// keeper. The difference is that VM runner is designed to be run on batches/blocks that have /// already been processed by state keeper but still require some extra handling as regulated by @@ -26,13 +32,13 @@ use crate::{ /// /// You can think of VM runner as a concurrent processor of a continuous stream of newly committed /// batches/blocks. -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct VmRunner { pool: ConnectionPool, - io: Box, + io: Arc, loader: Arc, - output_handler_factory: Box, - batch_executor_factory: Box>, + output_handler_factory: Arc, + batch_executor_factory: Arc>>>, } impl VmRunner { @@ -44,9 +50,9 @@ impl VmRunner { /// an underlying implementation of [`OutputHandlerFactory`]. pub fn new( pool: ConnectionPool, - io: Box, + io: Arc, loader: Arc, - output_handler_factory: Box, + output_handler_factory: Arc, batch_executor_factory: Box>, ) -> Self { Self { @@ -54,17 +60,42 @@ impl VmRunner { io, loader, output_handler_factory, - batch_executor_factory, + batch_executor_factory: Arc::new(Mutex::new(batch_executor_factory)), } } - async fn process_batch( - mut batch_executor: Box>, - l2_blocks: Vec, - mut output_handler: Box, - ) -> anyhow::Result<()> { + async fn process_batch(self, number: L1BatchNumber) -> anyhow::Result<()> { + let stage_started_at = Instant::now(); + let (batch_data, storage) = loop { + match self.loader.load_batch(number).await? { + Some(data_and_storage) => break data_and_storage, + None => { + // Next batch has not been loaded yet + tokio::time::sleep(SLEEP_INTERVAL).await; + } + } + }; + let kind = StorageKind::new(&storage); + METRICS.data_and_storage_latency[&kind].observe(stage_started_at.elapsed()); + + let mut batch_executor = self.batch_executor_factory.lock().await.init_batch( + storage, + batch_data.l1_batch_env.clone(), + batch_data.system_env.clone(), + ); + let mut output_handler = self + .output_handler_factory + .create_handler(batch_data.system_env, batch_data.l1_batch_env) + .await?; + self.io + .mark_l1_batch_as_processing( + &mut self.pool.connection_tagged("vm_runner").await?, + number, + ) + .await?; + let latency = METRICS.run_vm_time.start(); - for (i, l2_block) in l2_blocks.into_iter().enumerate() { + for (i, l2_block) in batch_data.l2_blocks.into_iter().enumerate() { let block_env = L2BlockEnv::from_l2_block_data(&l2_block); if i > 0 { // First L2 block in every batch is already preloaded @@ -112,14 +143,12 @@ impl VmRunner { /// Consumes VM runner to execute a loop that continuously pulls data from [`VmRunnerIo`] and /// processes it. - pub async fn run(mut self, stop_receiver: &watch::Receiver) -> anyhow::Result<()> { - const SLEEP_INTERVAL: Duration = Duration::from_millis(50); - + pub async fn run(self, stop_receiver: &watch::Receiver) -> anyhow::Result<()> { // Join handles for asynchronous tasks that are being run in the background let mut task_handles: Vec<(L1BatchNumber, JoinHandle>)> = Vec::new(); let mut next_batch = self .io - .latest_processed_batch(&mut self.pool.connection().await?) + .latest_processed_batch(&mut self.pool.connection_tagged("vm_runner").await?) .await? + 1; loop { @@ -148,7 +177,7 @@ impl VmRunner { let last_ready_batch = self .io - .last_ready_to_be_loaded_batch(&mut self.pool.connection().await?) + .last_ready_to_be_loaded_batch(&mut self.pool.connection_tagged("vm_runner").await?) .await?; METRICS.last_ready_batch.set(last_ready_batch.0.into()); if next_batch > last_ready_batch { @@ -156,31 +185,8 @@ impl VmRunner { tokio::time::sleep(SLEEP_INTERVAL).await; continue; } - let Some((batch_data, storage)) = self.loader.load_batch(next_batch).await? else { - // Next batch has not been loaded yet - tokio::time::sleep(SLEEP_INTERVAL).await; - continue; - }; - let batch_executor = self.batch_executor_factory.init_batch( - storage, - batch_data.l1_batch_env.clone(), - batch_data.system_env.clone(), - ); - let output_handler = self - .output_handler_factory - .create_handler(batch_data.system_env, batch_data.l1_batch_env) - .await?; - - self.io - .mark_l1_batch_as_processing(&mut self.pool.connection().await?, next_batch) - .await?; - let handle = tokio::task::spawn(Self::process_batch( - batch_executor, - batch_data.l2_blocks, - output_handler, - )); + let handle = tokio::spawn(self.clone().process_batch(next_batch)); task_handles.push((next_batch, handle)); - next_batch += 1; } } diff --git a/core/node/vm_runner/src/storage.rs b/core/node/vm_runner/src/storage.rs index baee426007c5..cd746e4e1d97 100644 --- a/core/node/vm_runner/src/storage.rs +++ b/core/node/vm_runner/src/storage.rs @@ -49,7 +49,7 @@ pub(crate) struct PostgresLoader { impl PostgresLoader { pub async fn new(pool: ConnectionPool, chain_id: L2ChainId) -> anyhow::Result { let mut l1_batch_params_provider = L1BatchParamsProvider::new(); - let mut conn = pool.connection().await?; + let mut conn = pool.connection_tagged("vm_runner").await?; l1_batch_params_provider.initialize(&mut conn).await?; Ok(Self { pool, @@ -72,7 +72,7 @@ impl StorageLoader for PostgresLoader { &self, l1_batch_number: L1BatchNumber, ) -> anyhow::Result> { - let mut conn = self.pool.connection().await?; + let mut conn = self.pool.connection_tagged("vm_runner").await?; let Some(data) = load_batch_execute_data( &mut conn, l1_batch_number, @@ -86,7 +86,7 @@ impl StorageLoader for PostgresLoader { if let Some(snapshot) = OwnedStorage::snapshot(&mut conn, l1_batch_number).await? { let postgres = OwnedStorage::postgres(conn, l1_batch_number - 1).await?; - let storage = snapshot.with_fallback(postgres, self.shadow_snapshots); + let storage = snapshot.with_fallback(postgres.into(), self.shadow_snapshots); let storage = OwnedStorage::from(storage); return Ok(Some((data, storage))); } @@ -94,7 +94,7 @@ impl StorageLoader for PostgresLoader { tracing::info!( "Incomplete data to create storage snapshot for batch; will use sequential storage" ); - let conn = self.pool.connection().await?; + let conn = self.pool.connection_tagged("vm_runner").await?; let storage = OwnedStorage::postgres(conn, l1_batch_number - 1).await?; Ok(Some((data, storage.into()))) } diff --git a/core/node/vm_runner/src/tests/mod.rs b/core/node/vm_runner/src/tests/mod.rs index ef1d37ef00e7..15fc30259337 100644 --- a/core/node/vm_runner/src/tests/mod.rs +++ b/core/node/vm_runner/src/tests/mod.rs @@ -39,7 +39,7 @@ struct IoMock { } #[async_trait] -impl VmRunnerIo for Arc> { +impl VmRunnerIo for RwLock { fn name(&self) -> &'static str { "io_mock" } @@ -153,7 +153,7 @@ struct TestOutputFactory { #[async_trait] impl OutputHandlerFactory for TestOutputFactory { async fn create_handler( - &mut self, + &self, _system_env: SystemEnv, l1_batch_env: L1BatchEnv, ) -> anyhow::Result> { diff --git a/core/node/vm_runner/src/tests/process.rs b/core/node/vm_runner/src/tests/process.rs index fec3fd2ba60a..115410ce8fbd 100644 --- a/core/node/vm_runner/src/tests/process.rs +++ b/core/node/vm_runner/src/tests/process.rs @@ -57,9 +57,9 @@ async fn process_batches((batch_count, window): (u32, u32)) -> anyhow::Result<() let batch_executor = MainBatchExecutorFactory::new(false, false); let vm_runner = VmRunner::new( connection_pool, - Box::new(io.clone()), + io.clone(), storage, - Box::new(output_factory), + Arc::new(output_factory), Box::new(batch_executor), ); tokio::task::spawn(async move { vm_runner.run(&stop_receiver).await.unwrap() }); diff --git a/core/node/vm_runner/src/tests/storage_writer.rs b/core/node/vm_runner/src/tests/storage_writer.rs index 76d0867125a8..c377cf95b5a8 100644 --- a/core/node/vm_runner/src/tests/storage_writer.rs +++ b/core/node/vm_runner/src/tests/storage_writer.rs @@ -57,6 +57,8 @@ impl VmRunnerIo for StorageWriterIo { l1_batch_number: L1BatchNumber, ) -> anyhow::Result<()> { assert_eq!(l1_batch_number, self.batch() + 1); + // ^ The assertion works because of `last_ready_to_be_loaded_batch()` implementation; it wouldn't hold if we allowed + // to process multiple batches concurrently. Ok(()) } @@ -147,7 +149,7 @@ impl OutputHandler for StorageWriterIo { #[async_trait] impl OutputHandlerFactory for StorageWriterIo { async fn create_handler( - &mut self, + &self, _system_env: SystemEnv, l1_batch_env: L1BatchEnv, ) -> anyhow::Result> { @@ -167,7 +169,7 @@ pub(super) async fn write_storage_logs(pool: ConnectionPool, insert_protec .unwrap() .expect("No L1 batches in storage"); drop(conn); - let io = Box::new(StorageWriterIo { + let io = Arc::new(StorageWriterIo { last_processed_batch: Arc::new(watch::channel(L1BatchNumber(0)).0), last_processed_block: L2BlockNumber(0), pool: pool.clone(), @@ -240,9 +242,9 @@ async fn storage_writer_works(insert_protective_reads: bool) { let batch_executor = MainBatchExecutorFactory::new(false, false); let vm_runner = VmRunner::new( pool, - Box::new(io.clone()), + io.clone(), loader, - Box::new(output_factory), + Arc::new(output_factory), Box::new(batch_executor), ); diff --git a/core/tests/recovery-test/src/index.ts b/core/tests/recovery-test/src/index.ts index 6599e7c5d298..462404af6065 100644 --- a/core/tests/recovery-test/src/index.ts +++ b/core/tests/recovery-test/src/index.ts @@ -271,7 +271,7 @@ export class FundedWallet { await depositTx.waitFinalize(); } - /** Generates at least one L1 batch by transfering funds to itself. */ + /** Generates at least one L1 batch by transferring funds to itself. */ async generateL1Batch(): Promise { const transactionResponse = await this.wallet.transfer({ to: this.wallet.address, @@ -279,15 +279,15 @@ export class FundedWallet { token: zksync.utils.ETH_ADDRESS }); console.log('Generated a transaction from funded wallet', transactionResponse); - const receipt = await transactionResponse.wait(); - console.log('Got finalized transaction receipt', receipt); - // Wait until an L1 batch with the transaction is sealed. - const pastL1BatchNumber = await this.wallet.provider.getL1BatchNumber(); - let newL1BatchNumber: number; - while ((newL1BatchNumber = await this.wallet.provider.getL1BatchNumber()) <= pastL1BatchNumber) { + let receipt: zksync.types.TransactionReceipt; + while (!(receipt = await transactionResponse.wait()).l1BatchNumber) { + console.log('Transaction is not included in L1 batch; sleeping'); await sleep(1000); } + + console.log('Got finalized transaction receipt', receipt); + const newL1BatchNumber = receipt.l1BatchNumber; console.log(`Sealed L1 batch #${newL1BatchNumber}`); return newL1BatchNumber; } diff --git a/core/tests/ts-integration/contracts/zkasm/deep_stak.zkasm b/core/tests/ts-integration/contracts/zkasm/deep_stak.zkasm index bdfb4e70de7c..4cdf8542cab7 100644 --- a/core/tests/ts-integration/contracts/zkasm/deep_stak.zkasm +++ b/core/tests/ts-integration/contracts/zkasm/deep_stak.zkasm @@ -1,26 +1,25 @@ .text - .file "loop.yul" - .globl __entry + .file "loop.yul" + .globl __entry __entry: .func_begin0: - and! 1, r2, r1 - jump.ne @.BB0_2 + and! 1, r2, r1 + jump.ne @.BB0_2 __LOOP: - near_call r0, @__LOOP, @__LOOP + call r0, @__LOOP, @__LOOP .BB0_1: - add 1, r0, r2 - ret + add 1, r0, r2 + ret .BB0_2: - add 32, r0, r1 - st.2 256, r1 - st.2 288, r0 - add @CPI0_0[0], r0, r1 - ret.ok.to_label r1, @DEFAULT_FAR_RETURN + add 32, r0, r1 + stm.ah 256, r1 + stm.ah 288, r0 + add code[@CPI0_0], r0, r1 + retl r1, @DEFAULT_FAR_RETURN .func_end0: - -.func_end1: - - .note.GNU-stack - .rodata +;; landing pad for returns +DEFAULT_FAR_RETURN: + retl @DEFAULT_FAR_RETURN + .rodata CPI0_0: - .cell 53919893334301279589334030174039261352344891250716429051063678533632 \ No newline at end of file + .cell 53919893334301279589334030174039261352344891250716429051063678533632 diff --git a/core/tests/ts-integration/hardhat.config.ts b/core/tests/ts-integration/hardhat.config.ts index 00abe2b32efb..a96a83ca3ee3 100644 --- a/core/tests/ts-integration/hardhat.config.ts +++ b/core/tests/ts-integration/hardhat.config.ts @@ -4,14 +4,14 @@ import '@matterlabs/hardhat-zksync-vyper'; export default { zksolc: { - version: '1.3.21', + version: '1.5.3', compilerSource: 'binary', settings: { - isSystem: true + enableEraVMExtensions: true } }, zkvyper: { - version: '1.3.13', + version: '1.5.4', compilerSource: 'binary' }, networks: { @@ -20,7 +20,11 @@ export default { } }, solidity: { - version: '0.8.23' + version: '0.8.26', + eraVersion: '1.0.1', + settings: { + evmVersion: 'cancun' + } }, vyper: { version: '0.3.10' diff --git a/core/tests/ts-integration/jest.config.json b/core/tests/ts-integration/jest.config.json index 8fa5ea1eb721..1756de1bb02d 100644 --- a/core/tests/ts-integration/jest.config.json +++ b/core/tests/ts-integration/jest.config.json @@ -14,6 +14,7 @@ "testTimeout": 605000, "globalSetup": "/src/jest-setup/global-setup.ts", "globalTeardown": "/src/jest-setup/global-teardown.ts", + "testEnvironment": "/src/jest-setup/env.ts", "setupFilesAfterEnv": [ "/src/jest-setup/add-matchers.ts" ], diff --git a/core/tests/ts-integration/package.json b/core/tests/ts-integration/package.json index b2494ed3878b..6196355e95a0 100644 --- a/core/tests/ts-integration/package.json +++ b/core/tests/ts-integration/package.json @@ -13,9 +13,9 @@ "build-yul": "hardhat run scripts/compile-yul.ts" }, "devDependencies": { - "@matterlabs/hardhat-zksync-deploy": "^1.3.0", - "@matterlabs/hardhat-zksync-solc": "^1.1.4", - "@matterlabs/hardhat-zksync-vyper": "^1.0.8", + "@matterlabs/hardhat-zksync-deploy": "^1.5.0", + "@matterlabs/hardhat-zksync-solc": "^1.2.4", + "@matterlabs/hardhat-zksync-vyper": "^1.1.0", "@nomiclabs/hardhat-vyper": "^3.0.6", "@types/jest": "^29.0.3", "@types/node": "^18.19.15", @@ -25,6 +25,7 @@ "ethers": "^6.7.1", "hardhat": "=2.22.2", "jest": "^29.0.3", + "jest-environment-node": "^29.0.3", "jest-matcher-utils": "^29.0.3", "node-fetch": "^2.6.1", "ts-jest": "^29.0.1", diff --git a/core/tests/ts-integration/scripts/compile-yul.ts b/core/tests/ts-integration/scripts/compile-yul.ts index dda65456a6c6..876caacdfab3 100644 --- a/core/tests/ts-integration/scripts/compile-yul.ts +++ b/core/tests/ts-integration/scripts/compile-yul.ts @@ -7,7 +7,7 @@ import { getZksolcUrl, saltFromUrl } from '@matterlabs/hardhat-zksync-solc'; import { getCompilersDir } from 'hardhat/internal/util/global-dir'; import path from 'path'; -const COMPILER_VERSION = '1.3.21'; +const COMPILER_VERSION = '1.5.3'; const IS_COMPILER_PRE_RELEASE = false; async function compilerLocation(): Promise { @@ -48,18 +48,24 @@ export async function compile( } let paths = preparePaths(pathToHome, path, files, outputDirName); - let systemMode = type === 'yul' ? '--system-mode --optimization 3' : ''; + let eraVmExtensions = type === 'yul' ? '--enable-eravm-extensions --optimization 3' : ''; const zksolcLocation = await compilerLocation(); await spawn( - `${zksolcLocation} ${paths.absolutePathSources}/${paths.outputDir} ${systemMode} --${type} --bin --overwrite -o ${paths.absolutePathArtifacts}/${paths.outputDir}` + `${zksolcLocation} ${paths.absolutePathSources}/${paths.outputDir} ${eraVmExtensions} --${type} --bin --overwrite -o ${paths.absolutePathArtifacts}/${paths.outputDir}` ); } export async function compileFolder(pathToHome: string, path: string, type: string) { + let compilationMode; + if (type === 'zkasm') { + compilationMode = 'eravm-assembly'; + } else { + compilationMode = type; + } let files: string[] = (await fs.promises.readdir(path)).filter((fn) => fn.endsWith(`.${type}`)); for (const file of files) { - await compile(pathToHome, path, [file], `${file}`, type); + await compile(pathToHome, path, [file], `${file}`, compilationMode); } } diff --git a/core/tests/ts-integration/src/jest-setup/env.ts b/core/tests/ts-integration/src/jest-setup/env.ts new file mode 100644 index 000000000000..77bbfc929111 --- /dev/null +++ b/core/tests/ts-integration/src/jest-setup/env.ts @@ -0,0 +1,14 @@ +import NodeEnvironment from 'jest-environment-node'; +import type { EnvironmentContext, JestEnvironmentConfig } from '@jest/environment'; + +export default class IntegrationTestEnvironment extends NodeEnvironment { + constructor(config: JestEnvironmentConfig, context: EnvironmentContext) { + super(config, context); + } + + override async setup() { + await super.setup(); + // Provide access to raw console in order to produce less cluttered debug messages + this.global.rawWriteToConsole = console.log; + } +} diff --git a/core/tests/ts-integration/src/jest-setup/global-setup.ts b/core/tests/ts-integration/src/jest-setup/global-setup.ts index d84d70fe69da..ffb1a8c35030 100644 --- a/core/tests/ts-integration/src/jest-setup/global-setup.ts +++ b/core/tests/ts-integration/src/jest-setup/global-setup.ts @@ -11,11 +11,12 @@ declare global { */ async function performSetup(_globalConfig: any, _projectConfig: any) { // Perform the test initialization. - // This is an expensive operation that preceeds running any tests, as we need + // This is an expensive operation that precedes running any tests, as we need // to deposit & distribute funds, deploy some contracts, and perform basic server checks. // Jest writes an initial message without a newline, so we have to do it manually. console.log(''); + globalThis.rawWriteToConsole = console.log; // Before starting any actual logic, we need to ensure that the server is running (it may not // be the case, for example, right after deployment on stage). diff --git a/core/tests/ts-integration/src/l1-provider.ts b/core/tests/ts-integration/src/l1-provider.ts new file mode 100644 index 000000000000..39b0397cd069 --- /dev/null +++ b/core/tests/ts-integration/src/l1-provider.ts @@ -0,0 +1,82 @@ +import { + ethers, + JsonRpcProvider, + Network, + TransactionRequest, + TransactionResponse, + TransactionResponseParams +} from 'ethers'; +import { Reporter } from './reporter'; +import { AugmentedTransactionResponse } from './transaction-response'; + +export class L1Provider extends JsonRpcProvider { + readonly reporter: Reporter; + + constructor(url: string, reporter?: Reporter) { + super(url, undefined, { batchMaxCount: 1 }); + this.reporter = reporter ?? new Reporter(); + } + + override _wrapTransactionResponse(tx: TransactionResponseParams, network: Network): L1TransactionResponse { + const base = super._wrapTransactionResponse(tx, network); + return new L1TransactionResponse(base, this.reporter); + } +} + +class L1TransactionResponse extends ethers.TransactionResponse implements AugmentedTransactionResponse { + public readonly kind = 'L1'; + private isWaitingReported: boolean = false; + private isReceiptReported: boolean = false; + + constructor(base: ethers.TransactionResponse, public readonly reporter: Reporter) { + super(base, base.provider); + } + + override async wait(confirmations?: number, timeout?: number) { + if (!this.isWaitingReported) { + this.reporter.debug( + `Started waiting for L1 transaction ${this.hash} (from=${this.from}, nonce=${this.nonce})` + ); + this.isWaitingReported = true; + } + + const receipt = await super.wait(confirmations, timeout); + if (receipt !== null && !this.isReceiptReported) { + this.reporter.debug( + `Obtained receipt for L1 transaction ${this.hash}: blockNumber=${receipt.blockNumber}, status=${receipt.status}` + ); + this.isReceiptReported = true; + } + return receipt; + } + + override replaceableTransaction(startBlock: number): L1TransactionResponse { + const base = super.replaceableTransaction(startBlock); + return new L1TransactionResponse(base, this.reporter); + } +} + +/** Wallet that retries `sendTransaction` requests on "nonce expired" errors, provided that it's possible (i.e., no nonce is set in the request). */ +export class RetryableL1Wallet extends ethers.Wallet { + constructor(key: string, provider: L1Provider) { + super(key, provider); + } + + override async sendTransaction(tx: TransactionRequest): Promise { + const reporter = (this.provider!).reporter; + while (true) { + try { + return await super.sendTransaction(tx); + } catch (err: any) { + // For unknown reason, `reth` sometimes returns outdated transaction count under load, leading to transactions getting rejected. + // This is a workaround for this issue. + reporter.debug('L1 transaction request failed', tx, err); + if (err.code === 'NONCE_EXPIRED' && (tx.nonce === null || tx.nonce === undefined)) { + reporter.debug('Retrying L1 transaction request', tx); + } else { + throw err; + } + } + } + } +} diff --git a/core/tests/ts-integration/src/matchers/transaction.ts b/core/tests/ts-integration/src/matchers/transaction.ts index 89e90b6d5f16..ac5bf8e77eaf 100644 --- a/core/tests/ts-integration/src/matchers/transaction.ts +++ b/core/tests/ts-integration/src/matchers/transaction.ts @@ -1,7 +1,8 @@ import { TestMessage } from './matcher-helpers'; import { MatcherModifier } from '../modifiers'; import * as zksync from 'zksync-ethers'; -import { AugmentedTransactionResponse } from '../retry-provider'; +import { AugmentedTransactionResponse } from '../transaction-response'; +import { ethers } from 'ethers'; // This file contains implementation of matchers for ZKsync/ethereum transaction. // For actual doc-comments, see `typings/jest.d.ts` file. @@ -207,7 +208,7 @@ function fail(message: string) { * * @returns If check has failed, returns a Jest error object. Otherwise, returns `undefined`. */ -function checkReceiptFields(request: zksync.types.TransactionResponse, receipt: zksync.types.TransactionReceipt) { +function checkReceiptFields(request: ethers.TransactionResponseParams, receipt: zksync.types.TransactionReceipt) { const errorMessageBuilder = new TestMessage() .matcherHint('.checkReceiptFields') .line('Transaction receipt is not properly formatted. Transaction request:') diff --git a/core/tests/ts-integration/src/reporter.ts b/core/tests/ts-integration/src/reporter.ts index 903ff3101ef9..e6a11f0725bf 100644 --- a/core/tests/ts-integration/src/reporter.ts +++ b/core/tests/ts-integration/src/reporter.ts @@ -102,7 +102,7 @@ export class Reporter { // Timestamps only make sense to include in tests. const timestampString = testName === undefined ? '' : timestamp(`${new Date().toISOString()} `); const testString = testName ? info(` [${testName}]`) : ''; - console.log(this.indent(`${timestampString}DEBUG${testString}: ${message}`), ...args); + rawWriteToConsole(this.indent(`${timestampString}DEBUG${testString}: ${message}`), ...args); } } diff --git a/core/tests/ts-integration/src/retry-provider.ts b/core/tests/ts-integration/src/retry-provider.ts index 1763c0e4edf5..51d88357c6c3 100644 --- a/core/tests/ts-integration/src/retry-provider.ts +++ b/core/tests/ts-integration/src/retry-provider.ts @@ -1,12 +1,15 @@ import * as zksync from 'zksync-ethers'; import * as ethers from 'ethers'; import { Reporter } from './reporter'; +import { AugmentedTransactionResponse } from './transaction-response'; +import { L1Provider, RetryableL1Wallet } from './l1-provider'; /** * RetryProvider retries every RPC request if it detects a timeout-related issue on the server side. */ export class RetryProvider extends zksync.Provider { private readonly reporter: Reporter; + private readonly knownTransactionHashes: Set = new Set(); constructor(_url?: string | { url: string; timeout: number }, network?: ethers.Networkish, reporter?: Reporter) { let url; @@ -55,15 +58,63 @@ export class RetryProvider extends zksync.Provider { } } + override _wrapTransactionResponse(txResponse: any): L2TransactionResponse { + const base = super._wrapTransactionResponse(txResponse); + this.knownTransactionHashes.add(base.hash); + return new L2TransactionResponse(base, this.reporter); + } + override _wrapTransactionReceipt(receipt: any): zksync.types.TransactionReceipt { const wrapped = super._wrapTransactionReceipt(receipt); - this.reporter.debug( - `Obtained receipt for transaction ${receipt.transactionHash}: blockNumber=${receipt.blockNumber}, status=${receipt.status}` - ); + if (!this.knownTransactionHashes.has(receipt.transactionHash)) { + this.knownTransactionHashes.add(receipt.transactionHash); + this.reporter.debug( + `Obtained receipt for L2 transaction ${receipt.transactionHash}: blockNumber=${receipt.blockNumber}, status=${receipt.status}` + ); + } return wrapped; } } -export interface AugmentedTransactionResponse extends zksync.types.TransactionResponse { - readonly reporter?: Reporter; +class L2TransactionResponse extends zksync.types.TransactionResponse implements AugmentedTransactionResponse { + public readonly kind = 'L2'; + private isWaitingReported: boolean = false; + private isReceiptReported: boolean = false; + + constructor(base: zksync.types.TransactionResponse, public readonly reporter: Reporter) { + super(base, base.provider); + } + + override async wait(confirmations?: number) { + if (!this.isWaitingReported) { + this.reporter.debug( + `Started waiting for L2 transaction ${this.hash} (from=${this.from}, nonce=${this.nonce})` + ); + this.isWaitingReported = true; + } + const receipt = await super.wait(confirmations); + if (receipt !== null && !this.isReceiptReported) { + this.reporter.debug( + `Obtained receipt for L2 transaction ${this.hash}: blockNumber=${receipt.blockNumber}, status=${receipt.status}` + ); + this.isReceiptReported = true; + } + return receipt; + } + + override replaceableTransaction(startBlock: number): L2TransactionResponse { + const base = super.replaceableTransaction(startBlock); + return new L2TransactionResponse(base, this.reporter); + } +} + +/** Wallet that retries expired nonce errors for L1 transactions. */ +export class RetryableWallet extends zksync.Wallet { + constructor(privateKey: string, l2Provider: RetryProvider, l1Provider: L1Provider) { + super(privateKey, l2Provider, l1Provider); + } + + override ethWallet(): RetryableL1Wallet { + return new RetryableL1Wallet(this.privateKey, this._providerL1()); + } } diff --git a/core/tests/ts-integration/src/test-master.ts b/core/tests/ts-integration/src/test-master.ts index 09fddd1589ca..297116b0b512 100644 --- a/core/tests/ts-integration/src/test-master.ts +++ b/core/tests/ts-integration/src/test-master.ts @@ -2,9 +2,10 @@ import * as zksync from 'zksync-ethers'; import * as ethers from 'ethers'; import { TestEnvironment, TestContext } from './types'; import { claimEtherBack } from './context-owner'; -import { RetryProvider } from './retry-provider'; +import { RetryableWallet, RetryProvider } from './retry-provider'; import { Reporter } from './reporter'; import { bigIntReviver } from './helpers'; +import { L1Provider } from './l1-provider'; /** * Test master is a singleton class (per suite) that is capable of providing wallets to the suite. @@ -19,8 +20,8 @@ export class TestMaster { private readonly env: TestEnvironment; readonly reporter: Reporter; - private readonly l1Provider: ethers.JsonRpcProvider; - private readonly l2Provider: zksync.Provider; + private readonly l1Provider: L1Provider; + private readonly l2Provider: RetryProvider; private readonly mainWallet: zksync.Wallet; private readonly subAccounts: zksync.Wallet[] = []; @@ -52,7 +53,7 @@ export class TestMaster { if (!suiteWalletPK) { throw new Error(`Wallet for ${suiteName} suite was not provided`); } - this.l1Provider = new ethers.JsonRpcProvider(this.env.l1NodeUrl); + this.l1Provider = new L1Provider(this.env.l1NodeUrl, this.reporter); this.l2Provider = new RetryProvider( { url: this.env.l2NodeUrl, @@ -71,7 +72,7 @@ export class TestMaster { this.l2Provider.pollingInterval = 5000; } - this.mainWallet = new zksync.Wallet(suiteWalletPK, this.l2Provider, this.l1Provider); + this.mainWallet = new RetryableWallet(suiteWalletPK, this.l2Provider, this.l1Provider); } /** @@ -112,7 +113,7 @@ export class TestMaster { */ newEmptyAccount(): zksync.Wallet { const randomPK = ethers.Wallet.createRandom().privateKey; - const newWallet = new zksync.Wallet(randomPK, this.l2Provider, this.l1Provider); + const newWallet = new RetryableWallet(randomPK, this.l2Provider, this.l1Provider); this.subAccounts.push(newWallet); return newWallet; } diff --git a/core/tests/ts-integration/src/transaction-response.ts b/core/tests/ts-integration/src/transaction-response.ts new file mode 100644 index 000000000000..a104b0107edd --- /dev/null +++ b/core/tests/ts-integration/src/transaction-response.ts @@ -0,0 +1,9 @@ +import { ethers } from 'ethers'; +import { Reporter } from './reporter'; + +export interface AugmentedTransactionResponse extends ethers.TransactionResponseParams { + readonly kind: 'L1' | 'L2'; + readonly reporter?: Reporter; + + wait(confirmations?: number, timeout?: number): Promise; +} diff --git a/core/tests/ts-integration/tests/api/contract-verification.test.ts b/core/tests/ts-integration/tests/api/contract-verification.test.ts index c0cd887bcf7d..8f8830ce7516 100644 --- a/core/tests/ts-integration/tests/api/contract-verification.test.ts +++ b/core/tests/ts-integration/tests/api/contract-verification.test.ts @@ -10,11 +10,11 @@ import { NodeMode } from '../../src/types'; // Regular expression to match ISO dates. const DATE_REGEX = /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{6})?/; -const ZKSOLC_VERSION = 'v1.3.21'; -const SOLC_VERSION = '0.8.23'; -const ZK_VM_SOLC_VERSION = 'zkVM-0.8.23-1.0.0'; +const ZKSOLC_VERSION = 'v1.5.3'; +const SOLC_VERSION = '0.8.26'; +const ZK_VM_SOLC_VERSION = 'zkVM-0.8.26-1.0.1'; -const ZKVYPER_VERSION = 'v1.3.13'; +const ZKVYPER_VERSION = 'v1.5.4'; const VYPER_VERSION = '0.3.10'; type HttpMethod = 'POST' | 'GET'; @@ -54,32 +54,6 @@ describe('Tests for the contract verification API', () => { const counterContract = await deployContract(alice, contracts.counter, []); const constructorArguments = counterContract.interface.encodeDeploy([]); - const requestBody = { - contractAddress: await counterContract.getAddress(), - contractName: 'contracts/counter/counter.sol:Counter', - sourceCode: getContractSource('counter/counter.sol'), - compilerZksolcVersion: ZKSOLC_VERSION, - compilerSolcVersion: SOLC_VERSION, - optimizationUsed: true, - constructorArguments, - isSystem: true - }; - let requestId = await query('POST', '/contract_verification', undefined, requestBody); - - await expectVerifyRequestToSucceed(requestId, requestBody); - }); - - test('should test zkVM solc contract verification', async () => { - let artifact = contracts.counter; - // TODO: use plugin compilation when it's ready instead of pre-compiled bytecode. - artifact.bytecode = fs.readFileSync( - `${testMaster.environment().pathToHome}/core/tests/ts-integration/contracts/counter/zkVM_bytecode.txt`, - 'utf8' - ); - - const counterContract = await deployContract(alice, artifact, []); - const constructorArguments = counterContract.interface.encodeDeploy([]); - const requestBody = { contractAddress: await counterContract.getAddress(), contractName: 'contracts/counter/counter.sol:Counter', @@ -127,7 +101,7 @@ describe('Tests for the contract verification API', () => { sourceCode: standardJsonInput, codeFormat: 'solidity-standard-json-input', compilerZksolcVersion: ZKSOLC_VERSION, - compilerSolcVersion: SOLC_VERSION, + compilerSolcVersion: ZK_VM_SOLC_VERSION, optimizationUsed: true, constructorArguments }; @@ -144,8 +118,8 @@ describe('Tests for the contract verification API', () => { const bytecodePath = `${ testMaster.environment().pathToHome - }/core/tests/ts-integration/contracts/yul/artifacts/Empty.yul/Empty.yul.zbin`; - const bytecode = fs.readFileSync(bytecodePath); + }/core/tests/ts-integration/contracts/yul/artifacts/Empty.yul/yul/Empty.yul.zbin`; + const bytecode = fs.readFileSync(bytecodePath, 'utf8'); const contractFactory = new zksync.ContractFactory([], bytecode, alice); const deployTx = await contractFactory.deploy(); @@ -157,7 +131,7 @@ describe('Tests for the contract verification API', () => { sourceCode, codeFormat: 'yul-single-file', compilerZksolcVersion: ZKSOLC_VERSION, - compilerSolcVersion: SOLC_VERSION, + compilerSolcVersion: ZK_VM_SOLC_VERSION, optimizationUsed: true, constructorArguments: '0x', isSystem: true diff --git a/core/tests/ts-integration/tests/api/debug.test.ts b/core/tests/ts-integration/tests/api/debug.test.ts index dd1ea141a419..054aa57cf64e 100644 --- a/core/tests/ts-integration/tests/api/debug.test.ts +++ b/core/tests/ts-integration/tests/api/debug.test.ts @@ -29,8 +29,8 @@ describe('Debug methods', () => { test('Should not fail for infinity recursion', async () => { const bytecodePath = `${ testMaster.environment().pathToHome - }/core/tests/ts-integration/contracts/zkasm/artifacts/deep_stak.zkasm/deep_stak.zkasm.zbin`; - const bytecode = fs.readFileSync(bytecodePath); + }/core/tests/ts-integration/contracts/zkasm/artifacts/deep_stak.zkasm/zkasm/deep_stak.zkasm.zbin`; + const bytecode = fs.readFileSync(bytecodePath, 'utf-8'); const contractFactory = new zksync.ContractFactory([], bytecode, testMaster.mainAccount()); const deployTx = await contractFactory.deploy(); diff --git a/core/tests/ts-integration/typings/jest.d.ts b/core/tests/ts-integration/typings/jest.d.ts index 4d8f1c3530c5..3bb62732cf70 100644 --- a/core/tests/ts-integration/typings/jest.d.ts +++ b/core/tests/ts-integration/typings/jest.d.ts @@ -1,6 +1,8 @@ import { MatcherModifier } from '../src/matchers/transaction-modifiers'; export declare global { + function rawWriteToConsole(message: string, ...args: any[]); + namespace jest { interface Matchers { // Generic matchers diff --git a/core/tests/upgrade-test/tests/upgrade.test.ts b/core/tests/upgrade-test/tests/upgrade.test.ts index 0f70e751b844..2e223b9d7441 100644 --- a/core/tests/upgrade-test/tests/upgrade.test.ts +++ b/core/tests/upgrade-test/tests/upgrade.test.ts @@ -89,25 +89,29 @@ describe('Upgrade test', function () { alice = tester.emptyWallet(); if (fileConfig.loadFromFile) { - let walletConfig = loadConfig({ pathToHome, chain: fileConfig.chain, config: 'wallets.yaml' }); + const chainWalletConfig = loadConfig({ pathToHome, chain: fileConfig.chain, config: 'wallets.yaml' }); - adminGovWallet = new ethers.Wallet(walletConfig.governor.private_key, alice._providerL1()); + adminGovWallet = new ethers.Wallet(chainWalletConfig.governor.private_key, alice._providerL1()); - walletConfig = loadConfig({ + const ecosystemWalletConfig = loadConfig({ pathToHome, chain: fileConfig.chain, configsFolder: '../../configs/', config: 'wallets.yaml' }); - ecosystemGovWallet = new ethers.Wallet(walletConfig.governor.private_key, alice._providerL1()); + if (ecosystemWalletConfig.governor.private_key == chainWalletConfig.governor.private_key) { + ecosystemGovWallet = adminGovWallet; + } else { + ecosystemGovWallet = new ethers.Wallet(ecosystemWalletConfig.governor.private_key, alice._providerL1()); + } } else { let govMnemonic = ethers.Mnemonic.fromPhrase( require('../../../../etc/test_config/constant/eth.json').mnemonic ); let govWalletHD = ethers.HDNodeWallet.fromMnemonic(govMnemonic, "m/44'/60'/0'/0/1"); adminGovWallet = new ethers.Wallet(govWalletHD.privateKey, alice._providerL1()); - ecosystemGovWallet = new ethers.Wallet(govWalletHD.privateKey, alice._providerL1()); + ecosystemGovWallet = adminGovWallet; } logs = fs.createWriteStream('upgrade.log', { flags: 'a' }); diff --git a/deny.toml b/deny.toml index b840ec5176e8..c2775fc057c8 100644 --- a/deny.toml +++ b/deny.toml @@ -8,8 +8,6 @@ feature-depth = 1 [advisories] ignore = [ - "RUSTSEC-2023-0045", # memoffset vulnerability, dependency coming from bellman_ce - "RUSTSEC-2022-0041", # crossbeam-utils vulnerability, dependency coming from bellman_ce "RUSTSEC-2024-0320", # yaml_rust dependency being unmaintained, dependency in core, we should consider moving to yaml_rust2 fork "RUSTSEC-2020-0168", # mach dependency being unmaintained, dependency in consensus, we should consider moving to mach2 fork "RUSTSEC-2024-0370", # `cs_derive` needs to be updated to not rely on `proc-macro-error` @@ -31,6 +29,8 @@ allow = [ "BSD-3-Clause", "Zlib", "OpenSSL", + "Apache-2.0 WITH LLVM-exception", + "0BSD", ] confidence-threshold = 0.8 diff --git a/docker/build-base/Dockerfile b/docker/build-base/Dockerfile index be3c6ddb452e..16ea566cef70 100644 --- a/docker/build-base/Dockerfile +++ b/docker/build-base/Dockerfile @@ -13,3 +13,4 @@ RUN curl https://sh.rustup.rs -sSf | bash -s -- -y && \ rustup default nightly-2024-08-01 RUN cargo install sqlx-cli --version 0.8.0 +RUN cargo install sccache --version 0.8.1 diff --git a/docker/contract-verifier/Dockerfile b/docker/contract-verifier/Dockerfile index 7ed1906b8574..bc9a07c7d375 100644 --- a/docker/contract-verifier/Dockerfile +++ b/docker/contract-verifier/Dockerfile @@ -1,5 +1,18 @@ # syntax=docker/dockerfile:experimental -FROM matterlabs/zksync-build-base:latest as builder +FROM matterlabs/zksync-build-base:latest AS builder + +ARG CUDA_ARCH=89 +ENV CUDAARCHS=${CUDA_ARCH} +# set of args for use of sccache +ARG SCCACHE_GCS_BUCKET="" +ARG SCCACHE_GCS_SERVICE_ACCOUNT="" +ARG SCCACHE_GCS_RW_MODE="" +ARG RUSTC_WRAPPER="" + +ENV SCCACHE_GCS_BUCKET=${SCCACHE_GCS_BUCKET} +ENV SCCACHE_GCS_SERVICE_ACCOUNT=${SCCACHE_GCS_SERVICE_ACCOUNT} +ENV SCCACHE_GCS_RW_MODE=${SCCACHE_GCS_RW_MODE} +ENV RUSTC_WRAPPER=${RUSTC_WRAPPER} WORKDIR /usr/src/zksync COPY . . diff --git a/docker/external-node/Dockerfile b/docker/external-node/Dockerfile index dc989f9ba4e7..a12bd71bca39 100644 --- a/docker/external-node/Dockerfile +++ b/docker/external-node/Dockerfile @@ -1,6 +1,17 @@ # Will work locally only after prior contracts build -FROM matterlabs/zksync-build-base:latest as builder +FROM matterlabs/zksync-build-base:latest AS builder + +# set of args for use of sccache +ARG SCCACHE_GCS_BUCKET="" +ARG SCCACHE_GCS_SERVICE_ACCOUNT="" +ARG SCCACHE_GCS_RW_MODE="" +ARG RUSTC_WRAPPER="" + +ENV SCCACHE_GCS_BUCKET=${SCCACHE_GCS_BUCKET} +ENV SCCACHE_GCS_SERVICE_ACCOUNT=${SCCACHE_GCS_SERVICE_ACCOUNT} +ENV SCCACHE_GCS_RW_MODE=${SCCACHE_GCS_RW_MODE} +ENV RUSTC_WRAPPER=${RUSTC_WRAPPER} WORKDIR /usr/src/zksync COPY . . @@ -18,6 +29,8 @@ COPY contracts/system-contracts/bootloader/build/artifacts/ /contracts/system-co COPY contracts/system-contracts/contracts-preprocessed/artifacts/ /contracts/system-contracts/contracts-preprocessed/artifacts/ COPY contracts/system-contracts/contracts-preprocessed/precompiles/artifacts/ /contracts/system-contracts/contracts-preprocessed/precompiles/artifacts/ COPY contracts/system-contracts/artifacts-zk /contracts/system-contracts/artifacts-zk +COPY contracts/l1-contracts/out/ /contracts/l1-contracts/out/ +# TODO Remove once we use foundry inside contracts repo COPY contracts/l1-contracts/artifacts/ /contracts/l1-contracts/artifacts/ COPY contracts/l2-contracts/artifacts-zk/ /contracts/l2-contracts/artifacts-zk/ COPY etc/tokens/ /etc/tokens/ diff --git a/docker/proof-fri-gpu-compressor/Dockerfile b/docker/proof-fri-gpu-compressor/Dockerfile index 45f2ffa51b04..e744787c8259 100644 --- a/docker/proof-fri-gpu-compressor/Dockerfile +++ b/docker/proof-fri-gpu-compressor/Dockerfile @@ -1,10 +1,20 @@ # Will work locally only after prior universal setup key download -FROM nvidia/cuda:12.2.0-devel-ubuntu22.04 as builder +FROM nvidia/cuda:12.2.0-devel-ubuntu22.04 AS builder ARG DEBIAN_FRONTEND=noninteractive ARG CUDA_ARCH=89 ENV CUDAARCHS=${CUDA_ARCH} +# set of args for use of sccache +ARG SCCACHE_GCS_BUCKET="" +ARG SCCACHE_GCS_SERVICE_ACCOUNT="" +ARG SCCACHE_GCS_RW_MODE="" +ARG RUSTC_WRAPPER="" + +ENV SCCACHE_GCS_BUCKET=${SCCACHE_GCS_BUCKET} +ENV SCCACHE_GCS_SERVICE_ACCOUNT=${SCCACHE_GCS_SERVICE_ACCOUNT} +ENV SCCACHE_GCS_RW_MODE=${SCCACHE_GCS_RW_MODE} +ENV RUSTC_WRAPPER=${RUSTC_WRAPPER} RUN apt-get update && apt-get install -y curl clang openssl libssl-dev gcc g++ git \ pkg-config build-essential libclang-dev && \ @@ -22,6 +32,14 @@ RUN curl -Lo cmake-3.24.2-linux-x86_64.sh https://github.com/Kitware/CMake/relea chmod +x cmake-3.24.2-linux-x86_64.sh && \ ./cmake-3.24.2-linux-x86_64.sh --skip-license --prefix=/usr/local +# install sccache +RUN curl -Lo sccache-v0.8.1-x86_64-unknown-linux-musl.tar.gz https://github.com/mozilla/sccache/releases/download/v0.8.1/sccache-v0.8.1-x86_64-unknown-linux-musl.tar.gz && \ + tar -xzf sccache-v0.8.1-x86_64-unknown-linux-musl.tar.gz && \ + cp sccache-v0.8.1-x86_64-unknown-linux-musl/sccache /usr/local/sbin/ && \ + rm -rf sccache-v0.8.1-x86_64-unknown-linux-musl.tar.gz && \ + rm -rf sccache-v0.8.1-x86_64-unknown-linux-musl && \ + chmod +x /usr/local/sbin/sccache + WORKDIR /usr/src/zksync COPY . . diff --git a/docker/prover-fri-gateway/Dockerfile b/docker/prover-fri-gateway/Dockerfile index de59451fee8f..2ad8d346956c 100644 --- a/docker/prover-fri-gateway/Dockerfile +++ b/docker/prover-fri-gateway/Dockerfile @@ -1,7 +1,18 @@ -FROM matterlabs/zksync-build-base:latest as builder +FROM matterlabs/zksync-build-base:latest AS builder ARG DEBIAN_FRONTEND=noninteractive +# set of args for use of sccache +ARG SCCACHE_GCS_BUCKET="" +ARG SCCACHE_GCS_SERVICE_ACCOUNT="" +ARG SCCACHE_GCS_RW_MODE="" +ARG RUSTC_WRAPPER="" + +ENV SCCACHE_GCS_BUCKET=${SCCACHE_GCS_BUCKET} +ENV SCCACHE_GCS_SERVICE_ACCOUNT=${SCCACHE_GCS_SERVICE_ACCOUNT} +ENV SCCACHE_GCS_RW_MODE=${SCCACHE_GCS_RW_MODE} +ENV RUSTC_WRAPPER=${RUSTC_WRAPPER} + WORKDIR /usr/src/zksync COPY . . diff --git a/docker/prover-gpu-fri/Dockerfile b/docker/prover-gpu-fri/Dockerfile index ad3ff1ff7197..2a680a49c5de 100644 --- a/docker/prover-gpu-fri/Dockerfile +++ b/docker/prover-gpu-fri/Dockerfile @@ -1,10 +1,21 @@ -FROM nvidia/cuda:12.2.0-devel-ubuntu22.04 as builder +FROM nvidia/cuda:12.2.0-devel-ubuntu22.04 AS builder ARG DEBIAN_FRONTEND=noninteractive ARG CUDA_ARCH=89 ENV CUDAARCHS=${CUDA_ARCH} +# set of args for use of sccache +ARG SCCACHE_GCS_BUCKET="" +ARG SCCACHE_GCS_SERVICE_ACCOUNT="" +ARG SCCACHE_GCS_RW_MODE="" +ARG RUSTC_WRAPPER="" + +ENV SCCACHE_GCS_BUCKET=${SCCACHE_GCS_BUCKET} +ENV SCCACHE_GCS_SERVICE_ACCOUNT=${SCCACHE_GCS_SERVICE_ACCOUNT} +ENV SCCACHE_GCS_RW_MODE=${SCCACHE_GCS_RW_MODE} +ENV RUSTC_WRAPPER=${RUSTC_WRAPPER} + RUN apt-get update && apt-get install -y curl clang openssl libssl-dev gcc g++ \ pkg-config build-essential libclang-dev && \ rm -rf /var/lib/apt/lists/* @@ -21,6 +32,14 @@ RUN curl -Lo cmake-3.24.2-linux-x86_64.sh https://github.com/Kitware/CMake/relea chmod +x cmake-3.24.2-linux-x86_64.sh && \ ./cmake-3.24.2-linux-x86_64.sh --skip-license --prefix=/usr/local +# install sccache +RUN curl -Lo sccache-v0.8.1-x86_64-unknown-linux-musl.tar.gz https://github.com/mozilla/sccache/releases/download/v0.8.1/sccache-v0.8.1-x86_64-unknown-linux-musl.tar.gz && \ + tar -xzf sccache-v0.8.1-x86_64-unknown-linux-musl.tar.gz && \ + cp sccache-v0.8.1-x86_64-unknown-linux-musl/sccache /usr/local/sbin/ && \ + rm -rf sccache-v0.8.1-x86_64-unknown-linux-musl.tar.gz && \ + rm -rf sccache-v0.8.1-x86_64-unknown-linux-musl && \ + chmod +x /usr/local/sbin/sccache + WORKDIR /usr/src/zksync COPY . . diff --git a/docker/prover-job-monitor/Dockerfile b/docker/prover-job-monitor/Dockerfile index 25d5dcd3af95..b15379d06621 100644 --- a/docker/prover-job-monitor/Dockerfile +++ b/docker/prover-job-monitor/Dockerfile @@ -1,7 +1,18 @@ -FROM matterlabs/zksync-build-base:latest as builder +FROM matterlabs/zksync-build-base:latest AS builder ARG DEBIAN_FRONTEND=noninteractive +# set of args for use of sccache +ARG SCCACHE_GCS_BUCKET="" +ARG SCCACHE_GCS_SERVICE_ACCOUNT="" +ARG SCCACHE_GCS_RW_MODE="" +ARG RUSTC_WRAPPER="" + +ENV SCCACHE_GCS_BUCKET=${SCCACHE_GCS_BUCKET} +ENV SCCACHE_GCS_SERVICE_ACCOUNT=${SCCACHE_GCS_SERVICE_ACCOUNT} +ENV SCCACHE_GCS_RW_MODE=${SCCACHE_GCS_RW_MODE} +ENV RUSTC_WRAPPER=${RUSTC_WRAPPER} + WORKDIR /usr/src/zksync COPY . . diff --git a/docker/server-v2/Dockerfile b/docker/server-v2/Dockerfile index e5d378c3b6d8..e7b036274bda 100644 --- a/docker/server-v2/Dockerfile +++ b/docker/server-v2/Dockerfile @@ -1,6 +1,17 @@ # Will work locally only after prior contracts build # syntax=docker/dockerfile:experimental -FROM matterlabs/zksync-build-base:latest as builder +FROM matterlabs/zksync-build-base:latest AS builder + +# set of args for use of sccache +ARG SCCACHE_GCS_BUCKET="" +ARG SCCACHE_GCS_SERVICE_ACCOUNT="" +ARG SCCACHE_GCS_RW_MODE="" +ARG RUSTC_WRAPPER="" + +ENV SCCACHE_GCS_BUCKET=${SCCACHE_GCS_BUCKET} +ENV SCCACHE_GCS_SERVICE_ACCOUNT=${SCCACHE_GCS_SERVICE_ACCOUNT} +ENV SCCACHE_GCS_RW_MODE=${SCCACHE_GCS_RW_MODE} +ENV RUSTC_WRAPPER=${RUSTC_WRAPPER} WORKDIR /usr/src/zksync @@ -24,6 +35,8 @@ COPY contracts/system-contracts/bootloader/build/artifacts/ /contracts/system-co COPY contracts/system-contracts/contracts-preprocessed/artifacts/ /contracts/system-contracts/contracts-preprocessed/artifacts/ COPY contracts/system-contracts/contracts-preprocessed/precompiles/artifacts/ /contracts/system-contracts/contracts-preprocessed/precompiles/artifacts/ COPY contracts/system-contracts/artifacts-zk /contracts/system-contracts/artifacts-zk +COPY contracts/l1-contracts/out/ /contracts/l1-contracts/out/ +# TODO Remove once we use foundry inside contracts repo COPY contracts/l1-contracts/artifacts/ /contracts/l1-contracts/artifacts/ COPY contracts/l2-contracts/artifacts-zk/ /contracts/l2-contracts/artifacts-zk/ COPY etc/tokens/ /etc/tokens/ diff --git a/docker/snapshots-creator/Dockerfile b/docker/snapshots-creator/Dockerfile index 10eef06dfbbc..ee31c5c42d48 100644 --- a/docker/snapshots-creator/Dockerfile +++ b/docker/snapshots-creator/Dockerfile @@ -1,5 +1,16 @@ # syntax=docker/dockerfile:experimental -FROM matterlabs/zksync-build-base:latest as builder +FROM matterlabs/zksync-build-base:latest AS builder + +# set of args for use of sccache +ARG SCCACHE_GCS_BUCKET="" +ARG SCCACHE_GCS_SERVICE_ACCOUNT="" +ARG SCCACHE_GCS_RW_MODE="" +ARG RUSTC_WRAPPER="" + +ENV SCCACHE_GCS_BUCKET=${SCCACHE_GCS_BUCKET} +ENV SCCACHE_GCS_SERVICE_ACCOUNT=${SCCACHE_GCS_SERVICE_ACCOUNT} +ENV SCCACHE_GCS_RW_MODE=${SCCACHE_GCS_RW_MODE} +ENV RUSTC_WRAPPER=${RUSTC_WRAPPER} WORKDIR /usr/src/zksync COPY . . diff --git a/docker/verified-sources-fetcher/Dockerfile b/docker/verified-sources-fetcher/Dockerfile index 972f85d0faf5..faf36f27f5b0 100644 --- a/docker/verified-sources-fetcher/Dockerfile +++ b/docker/verified-sources-fetcher/Dockerfile @@ -1,7 +1,18 @@ -FROM matterlabs/zksync-build-base:latest as builder +FROM matterlabs/zksync-build-base:latest AS builder ARG DEBIAN_FRONTEND=noninteractive +# set of args for use of sccache +ARG SCCACHE_GCS_BUCKET="" +ARG SCCACHE_GCS_SERVICE_ACCOUNT="" +ARG SCCACHE_GCS_RW_MODE="" +ARG RUSTC_WRAPPER="" + +ENV SCCACHE_GCS_BUCKET=${SCCACHE_GCS_BUCKET} +ENV SCCACHE_GCS_SERVICE_ACCOUNT=${SCCACHE_GCS_SERVICE_ACCOUNT} +ENV SCCACHE_GCS_RW_MODE=${SCCACHE_GCS_RW_MODE} +ENV RUSTC_WRAPPER=${RUSTC_WRAPPER} + WORKDIR /usr/src/zksync COPY . . diff --git a/docker/witness-generator/Dockerfile b/docker/witness-generator/Dockerfile index 2eebe07515e4..5c5b2429aa80 100644 --- a/docker/witness-generator/Dockerfile +++ b/docker/witness-generator/Dockerfile @@ -4,6 +4,17 @@ ARG DEBIAN_FRONTEND=noninteractive ARG RUST_FLAGS="" ENV RUSTFLAGS=${RUST_FLAGS} +# set of args for use of sccache +ARG SCCACHE_GCS_BUCKET="" +ARG SCCACHE_GCS_SERVICE_ACCOUNT="" +ARG SCCACHE_GCS_RW_MODE="" +ARG RUSTC_WRAPPER="" + +ENV SCCACHE_GCS_BUCKET=${SCCACHE_GCS_BUCKET} +ENV SCCACHE_GCS_SERVICE_ACCOUNT=${SCCACHE_GCS_SERVICE_ACCOUNT} +ENV SCCACHE_GCS_RW_MODE=${SCCACHE_GCS_RW_MODE} +ENV RUSTC_WRAPPER=${RUSTC_WRAPPER} + WORKDIR /usr/src/zksync COPY . . diff --git a/docker/witness-vector-generator/Dockerfile b/docker/witness-vector-generator/Dockerfile index e315f670101a..cfcc8be7efaf 100644 --- a/docker/witness-vector-generator/Dockerfile +++ b/docker/witness-vector-generator/Dockerfile @@ -1,7 +1,18 @@ -FROM matterlabs/zksync-build-base:latest as builder +FROM matterlabs/zksync-build-base:latest AS builder ARG DEBIAN_FRONTEND=noninteractive +# set of args for use of sccache +ARG SCCACHE_GCS_BUCKET="" +ARG SCCACHE_GCS_SERVICE_ACCOUNT="" +ARG SCCACHE_GCS_RW_MODE="" +ARG RUSTC_WRAPPER="" + +ENV SCCACHE_GCS_BUCKET=${SCCACHE_GCS_BUCKET} +ENV SCCACHE_GCS_SERVICE_ACCOUNT=${SCCACHE_GCS_SERVICE_ACCOUNT} +ENV SCCACHE_GCS_RW_MODE=${SCCACHE_GCS_RW_MODE} +ENV RUSTC_WRAPPER=${RUSTC_WRAPPER} + WORKDIR /usr/src/zksync COPY . . diff --git a/docker/zk-environment/Dockerfile b/docker/zk-environment/Dockerfile index 375384bf7fca..53e532653111 100644 --- a/docker/zk-environment/Dockerfile +++ b/docker/zk-environment/Dockerfile @@ -25,10 +25,34 @@ RUN git submodule update --init --recursive # Build Solidity WORKDIR /solidity/build -RUN cmake .. -RUN make +# The default compilation is Release with Debug symbols, which is quite large. +RUN cmake .. -DCMAKE_BUILD_TYPE="Release" +RUN make -j -FROM debian:bookworm as rust-lightweight +# Rust binaries - with a separate builder. +FROM rust:slim-bookworm as rust-builder + +ARG ARCH=amd64 +RUN apt-get update && apt-get install -y \ + libssl-dev \ + pkg-config \ + libclang-15-dev \ + g++ \ + cmake \ + git + +RUN cargo install --version=0.8.0 sqlx-cli +RUN cargo install cargo-nextest +RUN cargo install cargo-spellcheck +RUN cargo install sccache + +RUN git clone https://github.com/matter-labs/foundry-zksync +RUN cd foundry-zksync && cargo build --release --bins +RUN mv ./foundry-zksync/target/release/forge /usr/local/cargo/bin/ +RUN mv ./foundry-zksync/target/release/cast /usr/local/cargo/bin/ + +# Main builder. +FROM debian:bookworm as rust-lightweight-base ARG ARCH=amd64 @@ -69,7 +93,7 @@ RUN apt-get update && \ lldb-15 \ lld-15 \ liburing-dev \ - libclang-dev + libclang-15-dev # Install Docker RUN apt-get update && \ @@ -97,27 +121,28 @@ ENV RUSTUP_HOME=/usr/local/rustup \ PATH=/usr/local/cargo/bin:$PATH # Install gloud for GCR/GAR login +# Google was super lazy, and their package is around 1 GB. +# So we trim it a little bit based on info from `https://github.com/GoogleCloudPlatform/gsutil/issues/1732` ENV GCLOUD_VERSION=451.0.1 RUN echo "deb [arch=${ARCH}] http://packages.cloud.google.com/apt cloud-sdk main" > /etc/apt/sources.list.d/google-cloud-sdk.list && \ wget -c -O - https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - && \ apt-get update -y && apt-get install google-cloud-cli=${GCLOUD_VERSION}-0 --no-install-recommends -y && \ gcloud config set core/disable_usage_reporting true && \ gcloud config set component_manager/disable_update_check true && \ - gcloud config set metrics/environment github_docker_image - -RUN wget -c -O - https://sh.rustup.rs | bash -s -- -y && \ - rustup default stable - -RUN cargo install --version=0.8.0 sqlx-cli -RUN cargo install cargo-nextest - -# Installing foundry-zksync from git is failing, we will build it from sources -# Install foundry -RUN git clone https://github.com/matter-labs/foundry-zksync -RUN cd foundry-zksync && cargo build --release --bins -RUN mv ./foundry-zksync/target/release/forge /usr/local/bin/ -RUN mv ./foundry-zksync/target/release/cast /usr/local/bin/ - + gcloud config set metrics/environment github_docker_image && \ + rm -rf $(find /usr/lib/google-cloud-sdk/ -regex ".*/__pycache__") && \ + rm -rf /usr/lib/google-cloud-sdk/bin/anthoscli && \ + rm -rf /usr/lib/google-cloud-sdk/platform/bundledpythonunix && \ + rm -rf /usr/lib/google-cloud-sdk/data/gcloud.json + +COPY --from=rust-builder /usr/local/cargo/bin/sqlx \ + /usr/local/cargo/bin/cargo-sqlx \ + /usr/local/cargo/bin/cargo-nextest \ + /usr/local/cargo/bin/cargo-spellcheck \ + /usr/local/cargo/bin/sccache \ + /usr/local/cargo/bin/forge \ + /usr/local/cargo/bin/cast /usr/local/cargo/bin/ + # Copy compiler (both solc and zksolc) binaries # Obtain `solc` 0.8.20. COPY --from=solidity-builder /solidity/build/solc/solc /usr/bin/ @@ -133,7 +158,7 @@ RUN apt-get remove valgrind -y # We need valgrind 3.20, which is unavailable in repos or ppa, so we will build it from source RUN wget -c https://sourceware.org/pub/valgrind/valgrind-3.20.0.tar.bz2 && \ tar -xf valgrind-3.20.0.tar.bz2 && \ - cd valgrind-3.20.0 && ./configure && make && make install && \ + cd valgrind-3.20.0 && ./configure && make -j && make install && \ cd ../ && rm -rf valgrind-3.20.0.tar.bz2 && rm -rf valgrind-3.20.0 @@ -141,10 +166,13 @@ RUN wget -c https://sourceware.org/pub/valgrind/valgrind-3.20.0.tar.bz2 && \ ENV ZKSYNC_HOME=/usr/src/zksync ENV PATH="${ZKSYNC_HOME}/bin:${PATH}" ENV CI=1 -RUN cargo install sccache ENV RUSTC_WRAPPER=/usr/local/cargo/bin/sccache -FROM rust-lightweight as rust-lightweight-nightly +# If target is 'main' - then install default rust. +FROM rust-lightweight-base as rust-lightweight +RUN wget -c -O - https://sh.rustup.rs | bash -s -- -y + -RUN rustup install nightly-2024-08-01 && \ - rustup default nightly-2024-08-01 +# If target is nightly - then install only nightly rust. +FROM rust-lightweight-base as rust-lightweight-nightly +RUN wget -c -O - https://sh.rustup.rs | bash -s -- -y --default-toolchain nightly-2024-08-01 \ No newline at end of file diff --git a/docs/guides/external-node/09_decentralization.md b/docs/guides/external-node/09_decentralization.md index 41f59486bef6..caf93a85a923 100644 --- a/docs/guides/external-node/09_decentralization.md +++ b/docs/guides/external-node/09_decentralization.md @@ -7,85 +7,8 @@ will eventually be used instead of ZKsync API for synchronizing data. On the gossipnet, the data integrity will be protected by the BFT (byzantine fault-tolerant) consensus algorithm (currently data is signed just by the main node though). -## Enabling gossipnet on your node - -> [!NOTE] -> -> Because the data transmitted over the gossipnet is signed by the main node (and eventually by the consensus quorum), -> the signatures need to be backfilled to the node's local storage the first time you switch from centralized (ZKsync -> API based) synchronization to the decentralized (gossipnet based) synchronization (this is a one-time thing). With the -> current implementation it may take a couple of hours and gets faster the more nodes you add to the -> `gossip_static_outbound` list (see below). We are working to remove this inconvenience. - -> [!NOTE] -> -> The minimal supported server version for this is -> [24.11.0](https://github.com/matter-labs/zksync-era/releases/tag/core-v24.11.0) - -### Generating secrets - -Each participant node of the gossipnet has to have an identity (a public/secret key pair). When running your node for -the first time, generate the secrets by running: - -``` -docker run --entrypoint /usr/bin/zksync_external_node "matterlabs/external-node:2.0-v24.12.0" generate-secrets > consensus_secrets.yaml -chmod 600 consensus_secrets.yaml -``` - -> [!NOTE] -> -> NEVER reveal the secret keys used by your node. Otherwise, someone can impersonate your node on the gossipnet. If you -> suspect that your secret key has been leaked, you can generate fresh keys using the same tool. -> -> If you want someone else to connect to your node, give them your PUBLIC key instead. Both public and secret keys are -> present in the `consensus_secrets.yaml` (public keys are in comments). - -### Preparing configuration file - -Copy the template of the consensus configuration file (for -[mainnet](https://github.com/matter-labs/zksync-era/blob/main/docs/guides/external-node/prepared_configs/mainnet_consensus_config.yaml) -or -[testnet](https://github.com/matter-labs/zksync-era/blob/main/docs/guides/external-node/prepared_configs/testnet_consensus_config.yaml) -). - -> [!NOTE] -> -> You need to fill in the `public_addr` field. This is the address that will (not implemented yet) be advertised over -> gossipnet to other nodes, so that they can establish connections to your node. If you don't want to expose your node -> to the public internet, you can use IP in your local network. - -Currently the config contains the following fields (refer to config -[schema](https://github.com/matter-labs/zksync-era/blob/990676c5f84afd2ff8cd337f495c82e8d1f305a4/core/lib/protobuf_config/src/proto/core/consensus.proto#L66) -for more details): - -- `server_addr` - local TCP socket address that the node should listen on for incoming connections. Note that this is an - additional TCP port that will be opened by the node. -- `public_addr` - the public address of your node that will be advertised over the gossipnet. -- `max_payload_size` - limit (in bytes) on the sized of the ZKsync ERA block received from the gossipnet. This protects - your node from getting DoS`ed by too large network messages. Use the value from the template. -- `gossip_dynamic_inbound_limit` - maximal number of unauthenticated concurrent inbound connections that can be - established to your node. This is a DDoS protection measure. -- `gossip_static_outbound` - list of trusted peers that your node should always try to connect to. The template contains - the nodes maintained by Matterlabs, but you can add more if you know any. Note that the list contains both the network - address AND the public key of the node - this prevents spoofing attacks. - -### Setting environment variables - -Uncomment (or add) the following lines in your `.env` config: - -``` -EN_CONSENSUS_CONFIG_PATH=... -EN_CONSENSUS_SECRETS_PATH=... -``` - -These variables should point to your consensus config and secrets files that we have just created. Tweak the paths to -the files if you have placed them differently. - ### Add `--enable-consensus` flag to your entry point command -For the consensus configuration to take effect you have to add `--enable-consensus` flag to the command line when -running the node, for example: - -``` -docker run "matterlabs/external-node:2.0-v24.12.0" --enable-consensus -``` +For the consensus configuration to take effect you have to add `--enable-consensus` flag when running the node. You can +do that by editing the docker compose files (mainnet-external-node-docker-compose.yml or +testnet-external-node-docker-compose.yml) and uncommenting the line with `--enable-consensus`. diff --git a/docs/guides/external-node/building-from-scratch/Dockerfile b/docs/guides/external-node/building-from-scratch/Dockerfile index da098df91d51..5b015a4545b7 100644 --- a/docs/guides/external-node/building-from-scratch/Dockerfile +++ b/docs/guides/external-node/building-from-scratch/Dockerfile @@ -11,8 +11,7 @@ ENV ZKSYNC_HOME=/usr/src/zksync/zksync-era ENV PATH="${ZKSYNC_HOME}/bin:${PATH}" # build zk tool -RUN zk -RUN yarn zk build +RUN zkt # build rust RUN cargo build --release @@ -20,12 +19,7 @@ RUN cp target/release/zksync_external_node /usr/bin # build contracts RUN git submodule update --init --recursive -RUN zk run yarn -RUN zk compiler all || true -RUN rm /root/.cache/hardhat-nodejs/compilers-v2/linux-amd64/solc-*.does.not.work || true -RUN zk compiler all -RUN zk contract build -RUN zk f yarn run l2-contracts build +RUN zk_supervisor contracts # copy migrations (node expects them to be in specific directory) RUN cp -r core/lib/dal/migrations/ migrations diff --git a/docs/guides/external-node/docker-compose-examples/configs/generate_secrets.sh b/docs/guides/external-node/docker-compose-examples/configs/generate_secrets.sh new file mode 100755 index 000000000000..e4d8ceed67b6 --- /dev/null +++ b/docs/guides/external-node/docker-compose-examples/configs/generate_secrets.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +if [ ! -s $1 ]; then + /usr/bin/zksync_external_node generate-secrets > $1 +fi diff --git a/docs/guides/external-node/docker-compose-examples/configs/mainnet_consensus_config.yaml b/docs/guides/external-node/docker-compose-examples/configs/mainnet_consensus_config.yaml new file mode 100644 index 000000000000..01c9d323a931 --- /dev/null +++ b/docs/guides/external-node/docker-compose-examples/configs/mainnet_consensus_config.yaml @@ -0,0 +1,10 @@ +server_addr: '0.0.0.0:3054' +public_addr: '127.0.0.1:3054' +max_payload_size: 5000000 +gossip_dynamic_inbound_limit: 100 +gossip_static_outbound: + # preconfigured ENs owned by Matterlabs that you can connect to + - key: 'node:public:ed25519:68d29127ab03408bf5c838553b19c32bdb3aaaae9bf293e5e078c3a0d265822a' + addr: 'external-node-consensus-mainnet.zksync.dev:3054' + - key: 'node:public:ed25519:b521e1bb173d04bc83d46b859d1296378e94a40427a6beb9e7fdd17cbd934c11' + addr: 'external-node-moby-consensus-mainnet.zksync.dev:3054' diff --git a/docs/guides/external-node/docker-compose-examples/configs/testnet_consensus_config.yaml b/docs/guides/external-node/docker-compose-examples/configs/testnet_consensus_config.yaml new file mode 100644 index 000000000000..cfcc6b9d43e5 --- /dev/null +++ b/docs/guides/external-node/docker-compose-examples/configs/testnet_consensus_config.yaml @@ -0,0 +1,10 @@ +server_addr: '0.0.0.0:3054' +public_addr: '127.0.0.1:3054' +max_payload_size: 5000000 +gossip_dynamic_inbound_limit: 100 +gossip_static_outbound: + # preconfigured ENs owned by Matterlabs that you can connect to + - key: 'node:public:ed25519:4a94067664e7b8d0927ab1443491dab71a1d0c63f861099e1852f2b6d0831c3e' + addr: 'external-node-consensus-sepolia.zksync.dev:3054' + - key: 'node:public:ed25519:cfbbebc74127099680584f07a051a2573e2dd7463abdd000d31aaa44a7985045' + addr: 'external-node-moby-consensus-sepolia.zksync.dev:3054' diff --git a/docs/guides/external-node/docker-compose-examples/mainnet-external-node-docker-compose.yml b/docs/guides/external-node/docker-compose-examples/mainnet-external-node-docker-compose.yml index 369ce50be0b2..64bef02b17a1 100644 --- a/docs/guides/external-node/docker-compose-examples/mainnet-external-node-docker-compose.yml +++ b/docs/guides/external-node/docker-compose-examples/mainnet-external-node-docker-compose.yml @@ -1,16 +1,16 @@ -version: "3.2" +name: "mainnet-node" services: prometheus: image: prom/prometheus:v2.35.0 volumes: - - mainnet-prometheus-data:/prometheus + - prometheus-data:/prometheus - ./prometheus/prometheus.yml:/etc/prometheus/prometheus.yml expose: - 9090 grafana: image: grafana/grafana:9.3.6 volumes: - - mainnet-grafana-data:/var/lib/grafana + - grafana-data:/var/lib/grafana - ./grafana/provisioning:/etc/grafana/provisioning environment: GF_AUTH_ANONYMOUS_ORG_ROLE: "Admin" @@ -37,7 +37,7 @@ services: expose: - 5430 volumes: - - mainnet-postgres:/var/lib/postgresql/data + - postgres:/var/lib/postgresql/data healthcheck: interval: 1s timeout: 3s @@ -49,17 +49,39 @@ services: environment: - POSTGRES_PASSWORD=notsecurepassword - PGPORT=5430 + # Generation of consensus secrets. + # The secrets are generated iff the secrets file doesn't already exist. + generate-secrets: + image: "matterlabs/external-node:2.0-v24.16.0" + entrypoint: + [ + "/configs/generate_secrets.sh", + "/configs/mainnet_consensus_secrets.yaml", + ] + volumes: + - ./configs:/configs external-node: image: "matterlabs/external-node:2.0-v24.16.0" + entrypoint: + [ + "/usr/bin/entrypoint.sh", + # Uncomment the following line to enable consensus + # "--enable-consensus", + ] + restart: always depends_on: postgres: condition: service_healthy + generate-secrets: + condition: service_completed_successfully ports: + - "0.0.0.0:3054:3054" # consensus public port - "127.0.0.1:3060:3060" - "127.0.0.1:3061:3061" - "127.0.0.1:3081:3081" volumes: - - mainnet-rocksdb:/db + - rocksdb:/db + - ./configs:/configs expose: - 3322 environment: @@ -83,8 +105,11 @@ services: EN_SNAPSHOTS_OBJECT_STORE_MODE: "GCSAnonymousReadOnly" RUST_LOG: "warn,zksync=info,zksync_core::metadata_calculator=debug,zksync_state=debug,zksync_utils=debug,zksync_web3_decl::client=error" + EN_CONSENSUS_CONFIG_PATH: "/configs/mainnet_consensus_config.yaml" + EN_CONSENSUS_SECRETS_PATH: "/configs/mainnet_consensus_secrets.yaml" + volumes: - mainnet-postgres: {} - mainnet-rocksdb: {} - mainnet-prometheus-data: {} - mainnet-grafana-data: {} + postgres: {} + rocksdb: {} + prometheus-data: {} + grafana-data: {} diff --git a/docs/guides/external-node/docker-compose-examples/testnet-external-node-docker-compose.yml b/docs/guides/external-node/docker-compose-examples/testnet-external-node-docker-compose.yml index 1417c6cc360f..f865f500c5b3 100644 --- a/docs/guides/external-node/docker-compose-examples/testnet-external-node-docker-compose.yml +++ b/docs/guides/external-node/docker-compose-examples/testnet-external-node-docker-compose.yml @@ -1,16 +1,16 @@ -version: "3.2" +name: "testnet-node" services: prometheus: image: prom/prometheus:v2.35.0 volumes: - - testnet-prometheus-data:/prometheus + - prometheus-data:/prometheus - ./prometheus/prometheus.yml:/etc/prometheus/prometheus.yml expose: - 9090 grafana: image: grafana/grafana:9.3.6 volumes: - - testnet-grafana-data:/var/lib/grafana + - grafana-data:/var/lib/grafana - ./grafana/provisioning:/etc/grafana/provisioning environment: GF_AUTH_ANONYMOUS_ORG_ROLE: "Admin" @@ -37,7 +37,7 @@ services: expose: - 5430 volumes: - - testnet-postgres:/var/lib/postgresql/data + - postgres:/var/lib/postgresql/data healthcheck: interval: 1s timeout: 3s @@ -49,17 +49,39 @@ services: environment: - POSTGRES_PASSWORD=notsecurepassword - PGPORT=5430 + # Generation of consensus secrets. + # The secrets are generated iff the secrets file doesn't already exist. + generate-secrets: + image: "matterlabs/external-node:2.0-v24.16.0" + entrypoint: + [ + "/configs/generate_secrets.sh", + "/configs/testnet_consensus_secrets.yaml", + ] + volumes: + - ./configs:/configs external-node: image: "matterlabs/external-node:2.0-v24.16.0" + entrypoint: + [ + "/usr/bin/entrypoint.sh", + # Uncomment the following line to enable consensus + # "--enable-consensus", + ] + restart: always depends_on: postgres: condition: service_healthy + generate-secrets: + condition: service_completed_successfully ports: + - "0.0.0.0:3054:3054" # consensus public port - "127.0.0.1:3060:3060" - "127.0.0.1:3061:3061" - "127.0.0.1:3081:3081" volumes: - - testnet-rocksdb:/db + - rocksdb:/db + - ./configs:/configs expose: - 3322 environment: @@ -83,8 +105,11 @@ services: EN_SNAPSHOTS_OBJECT_STORE_MODE: "GCSAnonymousReadOnly" RUST_LOG: "warn,zksync=info,zksync_core::metadata_calculator=debug,zksync_state=debug,zksync_utils=debug,zksync_web3_decl::client=error" + EN_CONSENSUS_CONFIG_PATH: "/configs/testnet_consensus_config.yaml" + EN_CONSENSUS_SECRETS_PATH: "/configs/testnet_consensus_secrets.yaml" + volumes: - testnet-postgres: {} - testnet-rocksdb: {} - testnet-prometheus-data: {} - testnet-grafana-data: {} + postgres: {} + rocksdb: {} + prometheus-data: {} + grafana-data: {} diff --git a/etc/env/base/chain.toml b/etc/env/base/chain.toml index b5a4e4b2e75c..5bdb08e3f952 100644 --- a/etc/env/base/chain.toml +++ b/etc/env/base/chain.toml @@ -91,8 +91,8 @@ validation_computational_gas_limit = 300000 save_call_traces = true bootloader_hash = "0x010008bbde6fc402ea3a3d6cb15cb97e70245d3d4e48fb74362d4961b74c16b1" -default_aa_hash = "0x0100058d9eee51f4b9e9a9ecb7fd7e8301e90bef018c2bd913ed36e583fec8c2" -evm_simulator_hash = "0x01000ccb740e2345754450eda583f59b31a346920a22f968dfcfc63feae303ee" +default_aa_hash = "0x0100058de8a8fda78449f14bece247271bdbba5dc73fc96135c35a17ee4dd090" +evm_simulator_hash = "0x01000cdf5bb7dd8a97faf231a5e1e20f2fe308d6f200c3295c6e3629547cc4a4" protective_reads_persistence_enabled = false diff --git a/etc/env/base/contracts.toml b/etc/env/base/contracts.toml index fab072c07cec..69bcbf6dabec 100644 --- a/etc/env/base/contracts.toml +++ b/etc/env/base/contracts.toml @@ -26,8 +26,8 @@ RECURSION_NODE_LEVEL_VK_HASH = "0x1186ec268d49f1905f8d9c1e9d39fc33e98c74f91d91a2 RECURSION_LEAF_LEVEL_VK_HASH = "0x101e08b00193e529145ee09823378ef51a3bc8966504064f1f6ba3f1ba863210" RECURSION_CIRCUITS_SET_VKS_HASH = "0x18c1639094f58177409186e8c48d9f577c9410901d2f1d486b3e7d6cf553ae4c" GENESIS_TX_HASH = "0xb99ebfea46cbe05a21cd80fe5597d97b204befc52a16303f579c607dc1ac2e2e" -GENESIS_ROOT = "0xbfa72908d61bf6e208a1140eb8b66a8e9fc0cc3ecd76f73d994fa75c49778530" -GENESIS_BATCH_COMMITMENT = "0xeac224ce2445688015b7b88a168332657fb1de5ccb3c55407d6107fbd483459e" +GENESIS_ROOT = "0x79679719d4932b95d89e1ad1faeccb4982bed5ca79f738d3d72d3cfcea6f3722" +GENESIS_BATCH_COMMITMENT = "0xb793269d781342f2980720a9da3009d91cfebb5187977b69807fea8444c5cb2f" PRIORITY_TX_MAX_GAS_LIMIT = 72000000 DEPLOY_L2_BRIDGE_COUNTERPART_GAS_LIMIT = 10000000 GENESIS_ROLLUP_LEAF_INDEX = "56" diff --git a/etc/env/base/rust.toml b/etc/env/base/rust.toml index 1bb69374ab1a..d8bef020c642 100644 --- a/etc/env/base/rust.toml +++ b/etc/env/base/rust.toml @@ -7,7 +7,7 @@ RUST_LOG="""\ zksync_node_framework=info,\ zksync_block_reverter=info,\ -zksync_commitment_generator=info,\ +zksync_commitment_generator=debug,\ zksync_node_db_pruner=info,\ zksync_eth_sender=info,\ zksync_node_fee_model=info,\ diff --git a/etc/env/configs/ext-node.toml b/etc/env/configs/ext-node.toml index b2f740065591..a5eb22db5ec1 100644 --- a/etc/env/configs/ext-node.toml +++ b/etc/env/configs/ext-node.toml @@ -63,7 +63,7 @@ zksync_node_consensus=info,\ zksync_consensus_bft=info,\ zksync_consensus_network=info,\ zksync_consensus_storage=info,\ -zksync_commitment_generator=info,\ +zksync_commitment_generator=debug,\ zksync_core=debug,\ zksync_dal=info,\ zksync_db_connection=info,\ diff --git a/etc/env/file_based/general.yaml b/etc/env/file_based/general.yaml index 864bff15dedf..ca9c3fd0c796 100644 --- a/etc/env/file_based/general.yaml +++ b/etc/env/file_based/general.yaml @@ -36,7 +36,7 @@ api: filters_limit: 10000 subscriptions_limit: 10000 pubsub_polling_interval: 200 - max_nonce_ahead: 20 + max_nonce_ahead: 40 gas_price_scale_factor: 1.5 estimate_gas_scale_factor: 1.3 estimate_gas_acceptable_overestimation: 5000 @@ -311,7 +311,7 @@ prometheus: observability: log_format: plain - log_directives: "zksync_node_test_utils=info,zksync_state_keeper=info,zksync_reorg_detector=info,zksync_consistency_checker=info,zksync_metadata_calculator=info,zksync_node_sync=info,zksync_node_consensus=info,zksync_contract_verification_server=info,zksync_node_api_server=info,zksync_tee_verifier_input_producer=info,zksync_node_framework=info,zksync_block_reverter=info,zksync_commitment_generator=info,zksync_node_db_pruner=info,zksync_eth_sender=info,zksync_node_fee_model=info,zksync_node_genesis=info,zksync_house_keeper=info,zksync_proof_data_handler=info,zksync_shared_metrics=info,zksync_node_test_utils=info,zksync_vm_runner=info,zksync_consensus_bft=info,zksync_consensus_network=info,zksync_consensus_storage=info,zksync_core_leftovers=debug,zksync_server=debug,zksync_contract_verifier=debug,zksync_dal=info,zksync_db_connection=info,zksync_eth_client=info,zksync_eth_watch=debug,zksync_storage=info,zksync_db_manager=info,zksync_merkle_tree=info,zksync_state=debug,zksync_utils=debug,zksync_queued_job_processor=info,zksync_types=info,zksync_mempool=debug,loadnext=info,vm=info,zksync_object_store=info,zksync_external_node=info,zksync_witness_generator=info,zksync_prover_fri=info,zksync_witness_vector_generator=info,zksync_web3_decl=debug,zksync_health_check=debug,zksync_proof_fri_compressor=info,vise_exporter=error,snapshots_creator=debug,zksync_base_token_adjuster=debug,zksync_external_price_api=debug,zksync_external_proof_integration_api=info" + log_directives: "zksync_node_test_utils=info,zksync_state_keeper=info,zksync_reorg_detector=info,zksync_consistency_checker=info,zksync_metadata_calculator=info,zksync_node_sync=info,zksync_node_consensus=info,zksync_contract_verification_server=info,zksync_node_api_server=info,zksync_tee_verifier_input_producer=info,zksync_node_framework=info,zksync_block_reverter=info,zksync_commitment_generator=debug,zksync_node_db_pruner=info,zksync_eth_sender=info,zksync_node_fee_model=info,zksync_node_genesis=info,zksync_house_keeper=info,zksync_proof_data_handler=info,zksync_shared_metrics=info,zksync_node_test_utils=info,zksync_vm_runner=info,zksync_consensus_bft=info,zksync_consensus_network=info,zksync_consensus_storage=info,zksync_core_leftovers=debug,zksync_server=debug,zksync_contract_verifier=debug,zksync_dal=info,zksync_db_connection=info,zksync_eth_client=info,zksync_eth_watch=debug,zksync_storage=info,zksync_db_manager=info,zksync_merkle_tree=info,zksync_state=debug,zksync_utils=debug,zksync_queued_job_processor=info,zksync_types=info,zksync_mempool=debug,loadnext=info,vm=info,zksync_object_store=info,zksync_external_node=info,zksync_witness_generator=info,zksync_prover_fri=info,zksync_witness_vector_generator=info,zksync_web3_decl=debug,zksync_health_check=debug,zksync_proof_fri_compressor=info,vise_exporter=error,snapshots_creator=debug,zksync_base_token_adjuster=debug,zksync_external_price_api=debug,zksync_external_proof_integration_api=info" # Uncomment only if needed # sentry: # url: unset diff --git a/etc/env/file_based/overrides/mainnet/general.yaml b/etc/env/file_based/overrides/mainnet.yaml similarity index 92% rename from etc/env/file_based/overrides/mainnet/general.yaml rename to etc/env/file_based/overrides/mainnet.yaml index 7abe8eb54725..0600abf694c2 100644 --- a/etc/env/file_based/overrides/mainnet/general.yaml +++ b/etc/env/file_based/overrides/mainnet.yaml @@ -10,12 +10,13 @@ eth: aggregated_block_prove_deadline: 300 aggregated_block_execute_deadline: 300 timestamp_criteria_max_allowed_lag: 104000 # 29h + wait_confirmations: null gas_adjuster: pricing_formula_parameter_a: 1.06 internal_l1_pricing_multiplier: 1 internal_pubdata_pricing_multiplier: 1.50 poll_period: 60 + watcher: + confirmations_for_eth_event: null observability: log_directives: zksync=info,zksync_state_keeper=debug,zksync_core=debug,zksync_server=debug,zksync_contract_verifier=debug,zksync_dal=debug,zksync_state=debug,zksync_utils=debug,zksync_eth_sender=debug,loadnext=debug,dev_ticker=info,vm=info,block_sizes_test=info,setup_key_generator_and_server=info,zksync_queued_job_processor=debug,slot_index_consistency_checker=debug,zksync_health_check=debug,zksync_consensus_bft=debug,zksync_consensus_network=debug,zksync_consensus_storage=debug,zksync_consensus_executor=debug, - -# remove eth_sender_wait_confirmations, eth_watcher_confirmations_for_eth_event variables diff --git a/etc/env/file_based/overrides/only_real_proofs.yaml b/etc/env/file_based/overrides/only_real_proofs.yaml new file mode 100644 index 000000000000..527474675116 --- /dev/null +++ b/etc/env/file_based/overrides/only_real_proofs.yaml @@ -0,0 +1,3 @@ +eth: + sender: + proof_sending_mode: ONLY_REAL_PROOFS diff --git a/etc/env/file_based/overrides/testnet/general.yaml b/etc/env/file_based/overrides/testnet.yaml similarity index 95% rename from etc/env/file_based/overrides/testnet/general.yaml rename to etc/env/file_based/overrides/testnet.yaml index 43a62f3f0dd8..e4da1ac96e26 100644 --- a/etc/env/file_based/overrides/testnet/general.yaml +++ b/etc/env/file_based/overrides/testnet.yaml @@ -10,6 +10,7 @@ eth: aggregated_block_prove_deadline: 300 aggregated_block_execute_deadline: 300 timestamp_criteria_max_allowed_lag: 104000 # 29h + wait_confirmations: null gas_adjuster: pricing_formula_parameter_a: 1.1 internal_l1_pricing_multiplier: 1 @@ -18,5 +19,3 @@ eth: confirmations_for_eth_event: 10 observability: log_directives: zksync=info,zksync_state_keeper=debug,zksync_core=debug,zksync_server=debug,zksync_contract_verifier=debug,zksync_dal=debug,zksync_state=debug,zksync_utils=debug,zksync_eth_sender=debug,loadnext=debug,dev_ticker=info,vm=info,block_sizes_test=info,setup_key_generator_and_server=info,zksync_queued_job_processor=debug,slot_index_consistency_checker=debug,zksync_health_check=debug,zksync_consensus_bft=debug,zksync_consensus_network=debug,zksync_consensus_storage=debug,zksync_consensus_executor=debug, - -# remove eth_sender_wait_confirmations variable diff --git a/etc/env/file_based/overrides/tests/loadtest-new.yaml b/etc/env/file_based/overrides/tests/loadtest-new.yaml new file mode 100644 index 000000000000..2167f7347e09 --- /dev/null +++ b/etc/env/file_based/overrides/tests/loadtest-new.yaml @@ -0,0 +1,7 @@ +db: + merkle_tree: + mode: LIGHTWEIGHT +experimental_vm: + state_keeper_fast_vm_mode: NEW +mempool: + delay_interval: 50 diff --git a/etc/env/file_based/overrides/tests/loadtest-old.yaml b/etc/env/file_based/overrides/tests/loadtest-old.yaml new file mode 100644 index 000000000000..a2d66d1cf4a7 --- /dev/null +++ b/etc/env/file_based/overrides/tests/loadtest-old.yaml @@ -0,0 +1,7 @@ +db: + merkle_tree: + mode: LIGHTWEIGHT +experimental_vm: + state_keeper_fast_vm_mode: OLD +mempool: + delay_interval: 50 diff --git a/etc/env/file_based/overrides/validium.yaml b/etc/env/file_based/overrides/validium.yaml new file mode 100644 index 000000000000..1af02dd95893 --- /dev/null +++ b/etc/env/file_based/overrides/validium.yaml @@ -0,0 +1,6 @@ +eth: + sender: + pubdata_sending_mode: CUSTOM +state_keeper: + pubdata_overhead_part: 0 + compute_overhead_part: 1 diff --git a/etc/lint-config/ignore.yaml b/etc/lint-config/ignore.yaml index 108192b18438..3d0c4869df84 100644 --- a/etc/lint-config/ignore.yaml +++ b/etc/lint-config/ignore.yaml @@ -22,5 +22,6 @@ dirs: [ "system-contracts", "artifacts-zk", "cache-zk", - "contracts/" + "contracts/", + "era-observability" ] diff --git a/etc/nix/tee_prover.nix b/etc/nix/tee_prover.nix index 50273b91fb5a..0b424522dffb 100644 --- a/etc/nix/tee_prover.nix +++ b/etc/nix/tee_prover.nix @@ -7,4 +7,8 @@ craneLib.buildPackage (commonArgs // { version = (builtins.fromTOML (builtins.readFile ../../core/bin/zksync_tee_prover/Cargo.toml)).package.version; cargoExtraArgs = "-p zksync_tee_prover --bin zksync_tee_prover"; inherit cargoArtifacts; + + postInstall = '' + strip $out/bin/zksync_tee_prover + ''; }) diff --git a/etc/selector-generator-data/README.md b/etc/selector-generator-data/README.md new file mode 100644 index 000000000000..ddba2769e4f2 --- /dev/null +++ b/etc/selector-generator-data/README.md @@ -0,0 +1,3 @@ +# List of selectors from our contracts + +To regenerate the list, please use the selector_generator tool from core/bin directory. diff --git a/etc/selector-generator-data/selectors.json b/etc/selector-generator-data/selectors.json new file mode 100644 index 000000000000..f8a6d2e825b2 --- /dev/null +++ b/etc/selector-generator-data/selectors.json @@ -0,0 +1,1025 @@ +{ + "e341eaa4": "sign", + "600903ad": "keyExistsToml", + "174dea71": "aggregate3Value", + "21ed2977": "assertApproxEqRelDecimal", + "90c5013b": "stopPrank", + "e4948f43": "proveL2MessageInclusion", + "4f1ef286": "upgradeToAndCall", + "e985e9c5": "isApprovedForAll", + "6ba3ba2b": "createFork", + "23b872dd": "transferFrom", + "740211ce": "commitValidatorCommittee", + "b292f5f1": "proveL1ToL2TransactionStatus", + "dbfe3e96": "updateSecurityCouncil", + "b12fc005": "assertLt", + "868085b1": "getBatchProofPublicInput", + "64d62353": "updateDelay", + "48c3241f": "closeFile", + "60f9bb11": "readFile", + "c88a5e6d": "deal", + "6f497ac6": "executeBatchesSharedBridge", + "966c523e": "blockAndAggregate", + "c438a9f2": "L2_LEGACY_SHARED_BRIDGE", + "59890bcb": "setExecutedBatches", + "e30c3978": "pendingOwner", + "31ba3498": "createFork", + "669efca7": "assertNotEqDecimal", + "c75ac8fa": "processL2Logs", + "ddc2651b": "envBytes", + "4de2e468": "getRawCodeHash", + "129de7eb": "blobhashes", + "b3e47705": "envOr", + "e543e5bf": "setChainCreationParams", + "8466f415": "assertLe", + "cc3fbc63": "setEraPostDiamondUpgradeFirstBatch", + "be6f11cf": "setPriorityTxMaxGasLimit", + "8da5cb5b": "owner", + "9f5684a2": "readLink", + "994057ef": "changeAttesterKey", + "711043ac": "assertEq", + "5875da2b": "changeValidatorKey", + "5139839c": "nodeOwners", + "7b048ccd": "parseJsonInt", + "a37dc1d4": "forwardedBridgeClaimFailedBurn", + "adfca15e": "facetFunctionSelectors", + "3e64a696": "getBasefee", + "99624cb6": "getAttesterCommittee", + "1497876c": "readDir", + "d48bfca7": "addToken", + "17d7de7c": "getName", + "eeb8cb09": "executeTransactionFromOutside", + "0c56efe9": "initializeV2", + "01eae183": "depositAmount", + "561cd6f3": "serializeString", + "b7909320": "assertNotEq", + "41c841c3": "L1_WETH_TOKEN", + "7475e9ea": "chainAdminAcceptAdmin", + "ca669fa7": "prank", + "c1899c1d": "createBatchCommitment", + "38a78092": "increaseMinNonce", + "a9059cbb": "transfer", + "f54266a2": "l1TokenAddress", + "e0eb04d4": "isFile", + "e9f18c17": "forceDeployOnAddresses", + "3d1fe08a": "assertGeDecimal", + "a2b1a1ae": "expectCall", + "5d83b6da": "__DEPRECATED_baseToken", + "7890e5da": "side", + "40f0b4e0": "assertLtDecimal", + "71aad10d": "toString", + "29f172ad": "unsafeOverrideBatch", + "8289e621": "assertApproxEqAbs", + "eccd2437": "assertGtDecimal", + "cdf25430": "L1_ASSET_ROUTER", + "a22cb465": "setApprovalForAll", + "9983c28a": "parseJsonIntArray", + "f67a965b": "broadcast", + "aa5cf788": "assertLeDecimal", + "8775a591": "assertNotEq", + "57f3921f": "stmAssetIdToAddress", + "2f2fd63f": "getMappingLength", + "2c431917": "scheduleTransparent", + "7b574586": "publishedBlobCommitments", + "31d50750": "isOperation", + "21f603d7": "setTransactionFilterer", + "8129fc1c": "initialize", + "728cb93b": "bridgeClaimFailedBurn", + "19698bc9": "infiniteFunction", + "3ce969e6": "revokePersistent", + "a322c40e": "toString", + "c29f093f": "setSTM", + "fd921be8": "parseJsonBytes", + "0d4aae9b": "stopMappingRecording", + "e03e9177": "assertEq", + "0a30b771": "assertGe", + "5e1ac65f": "hashOperation", + "9a188371": "requestL2TransactionDirect", + "62c6f9fb": "assertNotEq", + "b8c2f66f": "getTotalBatchesExecuted", + "42181150": "envInt", + "d17d4b0d": "assertLe", + "6ab8f82e": "proveL2LogInclusion", + "580d6bff": "updateAllNodesAtHeight", + "c3bbd2d7": "isFacetFreezable", + "8a75bb09": "saveL2LogsRootHash", + "71dce7da": "toString", + "1091a261": "assertNotEq", + "6ee1dc20": "validateNonceUsage", + "65bc9481": "accesses", + "f088ccdc": "callCodeOracle", + "0956441b": "stopExpectSafeMemory", + "5aa6fa1f": "NATIVE_TOKEN_VAULT", + "8310f2c6": "transferFundsFromSharedBridge", + "484f0505": "getHyperchainLegacy", + "3977d71c": "getAggregatedRoot", + "0008efda": "runDefaultUpgrader", + "328ef4fe": "setBaseTokenGasMultiplierPrice", + "ad31b9fa": "envAddress", + "65c428e7": "parseTomlAddressArray", + "bf529569": "setFreezability", + "ef3f0bae": "getTotalBatchesVerified", + "ce8365f9": "envExists", + "30bda03e": "setL1Erc20Bridge", + "cdc4878b": "nodeCount", + "015f58d7": "genesisUpgrade", + "a84328dd": "assertGe", + "1cc5d103": "setPorterAvailability", + "588570a5": "initialize", + "ae00b630": "runDeployConsensusRegistry", + "a31ee5b0": "initialize", + "9a7fbd8f": "assertNotEq", + "e23d2563": "getEraChainId", + "24fd57fb": "requestL2TransactionTwoBridges", + "0cc9ee84": "assertEq", + "f56ff18b": "getBlobhashes", + "f92ad219": "initialize", + "f2830f7b": "rollFork", + "2f90b184": "L1_CHAIN_ID", + "5de097b1": "nullifyChainBalanceByNTV", + "14e75680": "assertNotEqDecimal", + "204e1c7a": "getProxyImplementation", + "240f839d": "assertApproxEqAbs", + "71623274": "l2TransactionBaseCost", + "d9a3c4d2": "assertGt", + "e23cd19f": "writeJson", + "a0803ef7": "currentBlockInfo", + "aa4593dc": "revertReceive", + "d0468156": "getPendingAdmin", + "3e9173c5": "assertEq", + "a6368557": "deleteSnapshot", + "f413f0b6": "assertEq", + "e03fe177": "getCodeHash", + "812a44b2": "parseTomlKeys", + "80b41246": "getBlockHashEVM", + "1f7b4f30": "roll", + "c87b56dd": "tokenURI", + "dc8e4b26": "registerSettlementLayer", + "949431dc": "approvalBased", + "35e1349b": "eth_getLogs", + "5d4edca7": "BRIDGE_HUB", + "607457f2": "setShouldRevertOnCommitBatches", + "18717dc1": "setPorterAvailability", + "ede25608": "protocolVersionToUpgradeTimestamp", + "9ebf6827": "selectFork", + "84d9fedd": "popFront", + "06d49e5b": "getPubdataPricingMode", + "b3a056d7": "loadAllocs", + "49a7cc72": "payForTransaction", + "b381724e": "setFeeParams", + "d30dced6": "parseTomlBool", + "72c7e0b5": "assertNotEq", + "6223258e": "setDAValidatorPair", + "f90eb963": "getPorterAvailability", + "3f58f5b5": "createNewChain", + "c126e860": "hashOperation", + "491cc7c2": "expectEmit", + "7eff275e": "changeProxyAdmin", + "252dba42": "aggregate", + "01ffc9a7": "supportsInterface", + "a5748aad": "getNonce", + "69c76df2": "readUint32", + "d0bf6fd4": "setSharedBridge", + "dbe8d88b": "assertLtDecimal", + "3d1f16d4": "commitAttesterCommittee", + "48016c04": "assertEqDecimal", + "8dd14802": "setBridge", + "923b3b56": "forceDeployOnAddress", + "def9d6af": "protocolVersionIsActive", + "714a2f13": "assertEq", + "cf1c049c": "assertEq", + "087e6e81": "parseBytes32", + "6d016688": "expectSafeMemory", + "aa970773": "validateAndPayForPaymasterTransaction", + "a635f01d": "delegateCall", + "2f103f22": "activeFork", + "8102d70d": "readDir", + "3cf78e28": "assertNotEq", + "97949042": "envBytes32", + "f5e69a47": "publishCompressedBytecode", + "39509351": "increaseAllowance", + "97bb3ce9": "tokenAddress", + "f9f3ee2d": "setResult", + "ae65def1": "node", + "d124dc4f": "send", + "64b554ad": "forwardedBridgeBurn", + "f0259e92": "breakpoint", + "ebc73ab4": "getMappingSlotAt", + "0e18b681": "acceptAdmin", + "f3385fb6": "forceDeployOnAddress", + "667f9d70": "load", + "8f5d232d": "parseBytes", + "515361f6": "assertEq", + "6d315d7e": "blobBaseFee", + "4724c5b9": "assertNotEq", + "3425eb89": "tokenMultiplierSetter", + "2d0335ab": "getNonce", + "07ee9355": "l2BridgeAddress", + "189a5a17": "nodes", + "9a8a0592": "chainId", + "1d9e269e": "makePersistent", + "3d5bc8bc": "assertApproxEqAbsDecimal", + "701f58c5": "commitBatches", + "7877a797": "blockGasLimit", + "e6962cdb": "broadcast", + "5aa9b6b5": "getRawNonce", + "876e24e6": "getMappingKeyAndParentOf", + "b67187f3": "assertNotEq", + "933999fb": "deposit", + "4074e0a8": "makePersistent", + "0dbad27e": "upgradeChainFromVersion", + "3601e63e": "bridgeRecoverFailedTransfer", + "f710b062": "assertApproxEqAbs", + "e516761e": "markFactoryDeps", + "57e6246b": "initialCutHash", + "c0406226": "run", + "7b30c8da": "getL2SystemContractsUpgradeTxHash", + "74637a7a": "computeCreateAddress", + "ba334825": "hyperchain", + "ca408c23": "bridgehubDeposit", + "6352211e": "ownerOf", + "9f86dc91": "parseJsonBool", + "ba75bbd8": "front", + "b473318e": "l2TransactionBaseCost", + "c304aab7": "assertLeDecimal", + "64bf8d66": "changeFeeParams", + "f4844814": "expectRevert", + "2878fe74": "genesisUpgrade", + "89160467": "ffi", + "27af7d9c": "assertEqDecimal", + "ebe4a3d7": "getTransactionHashes", + "7fec2a8d": "startBroadcast", + "d505accf": "permit", + "09824a80": "registerToken", + "c21a38e2": "proveL2MessageInclusion", + "c31eb0e0": "expectRevert", + "561fe540": "envOr", + "f8d33b9b": "assertGt", + "ea6c029c": "baseTokenGasPriceMultiplierNominator", + "ef277d72": "assertApproxEqRel", + "c846f6df": "transferFundsFromLegacy", + "7ca29682": "createFork", + "5ca1e165": "getRoot", + "37736e08": "parseToml", + "29b98c67": "isDiamondStorageFrozen", + "f0e9da23": "readAddress", + "1206c8a8": "rpc", + "f8ccbf47": "IS_SCRIPT", + "dd62ed3e": "allowance", + "56ca623e": "toString", + "f6370c7b": "setChainCreationParams", + "06447d56": "startPrank", + "05838bf4": "expectSafeMemoryCall", + "e1ad1162": "transfer", + "c7ca373c": "initFromCommitment", + "5a362d45": "assertGt", + "14b02bc9": "envString", + "f7d39a8d": "breakpoint", + "b2dad155": "trim", + "4ad0bac9": "readCallers", + "53ce2061": "revertBatches", + "bd7c5412": "isEthWithdrawalFinalized", + "a6ae0aac": "coinbase", + "ab07b2e9": "getL2GasPrice", + "e02e1bfd": "chainCount", + "d92d8efd": "isPersistent", + "79823c9a": "getFirstUnprocessedPriorityTx", + "78bdcea7": "assertNotEq", + "699b0fb9": "bridgeBurn", + "18e3a941": "getVerifierParams", + "ee7fb38b": "calculateRoot", + "f851a440": "admin", + "95f11a40": "bridgeInitialize", + "3f4ba83a": "unpause", + "33ce93fe": "getProtocolVersion", + "d9caed12": "withdraw", + "c37533bb": "proveBatchesSharedBridge", + "07168226": "deployBeaconProxy", + "9cd939e4": "l2LogsRootHash", + "af500fb7": "readBytes32", + "db4235f6": "keyExistsJson", + "8c5a3445": "general", + "607e2cb2": "setRevertReceive", + "27ae4c16": "freezeDiamond", + "e0bf0850": "setShouldRevertOnProveBatches", + "8cb7f3d0": "forceDeployOnAddresses", + "47fcedb8": "setFeeParams", + "5d18c73a": "assertEq", + "efb77a75": "makePersistent", + "421ae469": "deleteSnapshots", + "39607382": "getTotalBlocksExecuted", + "d145736c": "envOr", + "76eadd36": "stopBroadcast", + "2f9c8f0d": "add", + "84bc3eb0": "withdrawWithMessage", + "a28c1aee": "prepareForPaymaster", + "150b7a02": "onERC721Received", + "45c62011": "removeDir", + "8cf2b2f0": "uncheckedInc", + "74da756b": "execute", + "1c50cfea": "addTokenAssetId", + "c2e4ff97": "markAccountCodeHashAsConstructed", + "e91659ae": "addNewChainIfNeeded", + "59ec65a2": "baseToken", + "350d56bf": "envAddress", + "4d8abc4b": "transact", + "2077337e": "assertLtDecimal", + "938b5f32": "origin", + "f2fde38b": "transferOwnership", + "0d4651aa": "storeAccountConstructedCodeHash", + "b993549e": "getCommittedBatchTimestamp", + "7c84c69b": "assertEq", + "70f5c679": "setMessageRoot", + "e1239cd8": "incrementMinNonceIfEquals", + "04a5c7ab": "assertGtDecimal", + "f20265d2": "setRevertTransfer", + "95d89b41": "symbol", + "11a2ccc1": "finalizeWithdrawal", + "65b7b7cc": "expectCall", + "1f21fc80": "writeFileBinary", + "a457c2d7": "decreaseAllowance", + "7fb5297f": "startBroadcast", + "9ff531e3": "assertLt", + "127cfe9a": "parseTomlBoolArray", + "2b805192": "setNewVersionUpgrade", + "d241f618": "genesisUpgrade", + "a3bd0112": "genesisUpgrade", + "39b34c6e": "requestBytecodeL1Publication", + "3f33db60": "serializeInt", + "d3977322": "assertNotEq", + "bdfacbe8": "assertNotEq", + "8bb75533": "split", + "7ed1ec7d": "envBool", + "d2ef1b0e": "storedBatchZero", + "edecd035": "assertNotEq", + "7ebba672": "setTokenMultiplier", + "9d2ad72a": "rpcUrlStructs", + "a6f2c076": "setDataToBeReturnedInFinalizeWithdrawal", + "f8f7cd76": "validateTransaction", + "4777f3cf": "envOr", + "505e6d47": "updateAllLeaves", + "8eb7db57": "bridgehubConfirmL2Transaction", + "2f2769d1": "assertEq", + "548a5a33": "setAssetHandlerAddressThisChain", + "8c2a993e": "bridgeMint", + "3e9705c0": "startMappingRecording", + "d1a5b36f": "pauseGasMetering", + "84d52b7a": "createSelectFork", + "6dde7209": "l2TokenBeacon", + "dead6f7f": "getHyperchain", + "bbcb713e": "envOr", + "c0a16dda": "setAssetDeploymentTracker", + "ca65fe79": "finalizeDeposit", + "579952fc": "transferFromTo", + "805b5b74": "tokenIsRegistered", + "709ecd3f": "dumpState", + "c657c718": "label", + "fb4baf17": "changeFeeParams", + "e8295588": "zeros", + "85940ef1": "parseJson", + "213e4198": "parseJsonKeys", + "3f8be2c8": "toBase64", + "16d207c6": "assertApproxEqAbs", + "f26f3c8f": "proveL2MessageInclusion", + "b88d4fde": "safeTransferFrom", + "1e4fba05": "getChainRoot", + "facd743b": "isValidator", + "2c4f2a58": "bridgehubDepositBaseToken", + "82d6c8fd": "assertApproxEqRelDecimal", + "85df51fd": "blockHash", + "897e0a97": "writeFile", + "263b7f8e": "proveL2LogInclusion", + "ab93d6f3": "requestL2TransactionToGatewayMailbox", + "f5ba4232": "removeStateTransitionManager", + "f85894c5": "forwardedBridgeBurn", + "584b153e": "isOperationPending", + "95570d12": "getValidatorCommittee", + "70ca10bb": "store", + "aaaddeaf": "envBool", + "d0707b67": "aggregate", + "ec3d5f88": "setPriorityTxMaxGasLimit", + "8456cb59": "pause", + "6cd8c355": "reinitializeChainGovernance", + "48f50c0f": "txGasPrice", + "d930a0e6": "projectRoot", + "628b636e": "publishPubdataAndClearState", + "b5a85e9d": "forceDeploy", + "6a82600a": "parseJson", + "a85a8418": "rpcUrls", + "e5355c75": "getL2SystemContractsUpgradeBatchNumber", + "9cb1c0d4": "prevrandao", + "6a8237b3": "assertNotEq", + "26782247": "pendingAdmin", + "66869d49": "changeFeeParams", + "762008c2": "executeBatchesSharedBridge", + "426cb766": "attestersCommit", + "88da6d35": "serializeString", + "1e356e1a": "serializeAddress", + "8a0807b7": "indexOf", + "566338a9": "getL1TokenAddress", + "2555d2c1": "chunkAndPublishPubdata", + "e2f318e3": "payForTransaction", + "72d74cd7": "reinitializeToken", + "3997d064": "tryAggregate", + "57180981": "updateAccountVersion", + "f28dceb3": "expectRevert", + "db1f0bf9": "getTotalBatchesCommitted", + "51b3c157": "hyperbridgingEnabled", + "4be99e1d": "getCurrentPubdataCost", + "e34a329a": "executeUpgrade", + "546b6d2a": "SHARED_BRIDGE", + "7ab08472": "finalizeWithdrawalLegacyErc20Bridge", + "6a27e8b5": "getSettlementLayer", + "17338945": "unfreezeDiamond", + "df9c1589": "executeTransaction", + "99c16d1a": "proveL2MessageInclusion", + "98acd7a6": "getBaseToken", + "f4c004e3": "assertNotEq", + "a54a87d8": "copyFile", + "c1fa1ed0": "assertEq", + "47b4a7a6": "changeAttesterWeight", + "be646da1": "transact", + "d1132332": "attesterPubKeyHashes", + "30e5ccbd": "incrementTxNumberInBatch", + "7958004c": "getOperationState", + "045c55ce": "assertApproxEqAbsDecimal", + "56079ac8": "sendL2ToL1Log", + "d86970d8": "getL2BootloaderBytecodeHash", + "e8a71ca9": "forwardedBridgeMint", + "9a42c2c2": "zeroPointerTest", + "06fdde03": "name", + "086a56f8": "getBaseTokenBridge", + "18b1771f": "getAssetId", + "306395c6": "incrementDeploymentNonce", + "63dc94b1": "forceDeploy", + "1c9f0149": "updateChainBalancesFromSharedBridge", + "3591c1a0": "getBridgehub", + "d6abe642": "getAssetId", + "95fd154e": "assertLe", + "72425d9d": "getCurrentBlockDifficulty", + "b19f0ade": "executeUpgradeNoOverlap", + "44d7f0a4": "revertTo", + "51cff8d9": "withdraw", + "817b17f0": "postTransaction", + "33949f0b": "assertNotEqDecimal", + "83eddd19": "governanceAcceptOwner", + "301e7765": "getChainAdmin", + "a8b0574e": "getCurrentBlockCoinbase", + "52c9eacb": "upgradeCutHash", + "9c4d535b": "create", + "e8de12df": "validatorsCommit", + "b11a19e8": "toString", + "6d9860e1": "l1AssetRouter", + "9ec3f927": "changeValidatorWeight", + "9507540e": "assertNotEq", + "c0865ba7": "writeToml", + "e81e0ba1": "isFunctionFreezable", + "49c4fac8": "parseJsonString", + "0b72f4ef": "assertNotEq", + "235d9eb5": "setTokenMultiplier", + "c3d93e7c": "executeBatches", + "4af63f02": "deploy", + "a0ed82fa": "governanceAcceptAdmin", + "60144197": "setTokenMultiplierSetter", + "7ba8be34": "decodeUint8", + "f5a55558": "assertNotEqDecimal", + "6631aa99": "parseJsonBytesArray", + "b2332f51": "assertNotEq", + "08e4e116": "expectCallMinGas", + "d0e30db0": "deposit", + "fd791f3c": "getL2DefaultAccountBytecodeHash", + "b197c247": "parseTomlBytesArray", + "3e716f81": "parseTomlBytes32Array", + "74f4d30d": "storedBlockHash", + "2a72b707": "bridgehubRequestL2Transaction", + "7e44bc5e": "setImmutables", + "927c4bf7": "upgradeExternal", + "440ed10d": "expectEmit", + "b7b080ab": "transferTokenToSharedBridge", + "db541184": "setShouldRevertOnExecuteBatches", + "eff6b27d": "assertEq", + "fdbb0301": "__DEPRECATED_l2BridgeAddress", + "b873634c": "assertNotEq", + "7a675bb6": "createWallet", + "399542e9": "tryBlockAndAggregate", + "81bad6f3": "expectEmit", + "ae1f6aaf": "l2Bridge", + "46cc92d9": "difficulty", + "498fdcf4": "parseJsonStringArray", + "d3522ae6": "parseTomlIntArray", + "ae5a2ae8": "serializeUintToHex", + "23dc4a09": "keccakPerformUpgrade", + "07f8c636": "multicall", + "1dd93b33": "keccakValidationTest", + "fd3c6b55": "processCalldataDA", + "e6d9923b": "proveL2LogInclusion", + "c3077fa9": "blockAndAggregate", + "c6ce059d": "parseAddress", + "cf347e17": "setValidator", + "b22dd78e": "storedBatchHash", + "f21d52c7": "serializeBytes", + "975d5a12": "assertEq", + "7ac3a553": "withdrawLegacyBridge", + "c4d66de8": "initialize", + "d77bfdb9": "parseTomlBytes", + "7b315630": "upgradeChainFromVersion", + "168b64d3": "createDir", + "cdffacc6": "facetAddress", + "e0ab6368": "assetIdIsRegistered", + "e25242c0": "assertGe", + "d566afd3": "createBatchCommitment", + "afc98040": "broadcast", + "cc7b0487": "parseTomlUint", + "3fdf4e15": "clearMockedCalls", + "ee82ac5e": "getBlockHash", + "79c4f929": "markBytecodeAsPublished", + "e5d6bf02": "warp", + "03e0aca9": "revertToAndDelete", + "d83e4e03": "genesisUpgrade", + "4a2e35ba": "withdraw", + "ba238947": "getProtocolVersion", + "88b44c85": "assertEq", + "fee9a469": "serializeUint", + "09e14277": "setStateTransitionManager", + "c63c4e9b": "minDelay", + "95218ecd": "executeInstant", + "abbf21cc": "assertApproxEqRelDecimal", + "addde2b6": "parseJsonUint", + "a1a7cddb": "runDeploySharedBridge", + "3644e515": "DOMAIN_SEPARATOR", + "6a5066d4": "assertApproxEqAbsDecimal", + "6edd4f12": "commitBatchesSharedBridge", + "78611f0e": "assertGtDecimal", + "f113c88b": "createNewChain", + "f3dec099": "envUint", + "4700d74b": "envOr", + "ed7c5462": "createWallet", + "7676e127": "serializeInt", + "b6ea1757": "pushNewLeaf", + "b2ded522": "initialize", + "b298e36b": "push", + "c987336c": "upgrade", + "3cda3351": "create2", + "dd85df2d": "setEraLegacyBridgeLastDepositTime", + "32c8176d": "deriveKey", + "e9420f8c": "whitelistedSettlementLayers", + "e24fed00": "assertEq", + "d323826a": "computeCreate2Address", + "6006d8b5": "verifyCompressedStateDiffs", + "1e279d41": "promptSecret", + "0d14edf7": "registerAlreadyDeployedHyperchain", + "5d382700": "create2Account", + "38720778": "sharedBridge", + "4d2301cc": "getEthBalance", + "11d1364a": "assertLeDecimal", + "1ff5a783": "execute", + "5c975abb": "paused", + "cf22e3c9": "startStateDiffRecording", + "64bc3e64": "envOr", + "55d35d18": "getValueUnderNonce", + "592151f0": "parseToml", + "8fbb3711": "claimFailedDepositLegacyErc20Bridge", + "53e61bdc": "processL2RollupDAValidatorOutputHash", + "41cf49bb": "prepareChainCommitment", + "86d516e8": "getCurrentBlockGasLimit", + "cfe7af7c": "finalizeDeposit", + "3ce695e7": "registerSTMAssetOnL1", + "b71bcf90": "reinitializeToken", + "db07fcd2": "assertGt", + "969b53da": "l1Bridge", + "e717bab7": "proveL1ToL2TransactionStatusViaGateway", + "1f067457": "revertTransfer", + "eb85e83b": "envOr", + "19cae462": "difficulty", + "7a1d8d3a": "safeTransferFundsFromLegacy", + "619d897f": "writeLine", + "5518c73b": "getStateTransitionManager", + "ca8f93f1": "setLegacyBaseTokenAssetId", + "eced0bf0": "__DEPRECATED_tokenIsRegistered", + "51d218f7": "unfreezeChain", + "4c6314f0": "getMarker", + "b760faf9": "depositTo", + "a1954fc5": "getTotalPriorityTxs", + "f120e6c4": "encodeTxDataHash", + "a2d5a0cc": "proveBatchesSharedBridge", + "8cf25ef4": "assertApproxEqRel", + "b8776d4d": "chainRegistered", + "7528c2c6": "applyL1ToL2Alias", + "91c75bc3": "parseJsonBytes32Array", + "22100064": "rememberKey", + "46657fe9": "getVerifier", + "e8b99b1b": "deposit", + "f1a78aa6": "postTransaction", + "a888cc3a": "bridgehubRequestL2TransactionOnGateway", + "2bcd50e0": "resumeGasMetering", + "4db19e7e": "assertEq", + "af6ed122": "executeUpgrade", + "47e50cce": "prank", + "03c5d8af": "forwardTransactionOnGateway", + "d23cd037": "mockCallRevert", + "16ed7bc4": "readFileBinary", + "28e439f3": "tryBlockAndAggregate", + "ced531eb": "setHashes", + "038a24bc": "validateAndPayForPaymasterTransaction", + "4cc5b15e": "diamondCut", + "27e86d6e": "getLastBlockHash", + "ae3165b3": "toBase64URL", + "ce817d47": "startBroadcast", + "823f1d96": "l2TokenProxyBytecodeHash", + "4b561753": "addValidator", + "d9bbf3a1": "rollFork", + "6fadcf72": "forward", + "bd6af434": "expectCall", + "4dd18bf5": "setPendingAdmin", + "84c2ff75": "stmAssetId", + "681fe70c": "isEmpty", + "fe26699e": "getTotalBlocksCommitted", + "a75b496d": "getAllHyperchainChainIDs", + "35d6ad46": "writeJson", + "40c10f19": "mint", + "06e7517b": "appendTransactionToCurrentL2Block", + "b852ad36": "l1SharedBridge", + "3ea053eb": "deactivate", + "e02da327": "readUint256", + "3ebf73b4": "getDeployedCode", + "86b7f856": "publishPubdataAndClearState", + "4145ca27": "removePriorityQueueFront", + "ef0e2ff4": "setChainId", + "68c09202": "executeUpgradeNoOverlap", + "402efc91": "stateTransitionManager", + "c9d1c097": "stmAssetIdFromChainId", + "315fff4e": "THIS_ADDRESS", + "3558c188": "executeBatches", + "c0d5b949": "getCurrentPubdataSpent", + "3d5923ee": "setEnv", + "57e22dde": "makePersistent", + "2b589b28": "lastCallGas", + "896909dc": "getMinNonce", + "dbaad147": "mockCallRevert", + "3437949a": "l1GenesisUpgrade", + "c4d252f5": "cancel", + "dd82d13e": "skip", + "960dcf24": "getBaseTokenAssetId", + "1777e59d": "parseJsonBytes32", + "2e522851": "setNewVersionUpgrade", + "d4ce08c2": "addNewChain", + "c74e9deb": "envOr", + "f320d963": "assertEq", + "a5277a02": "initialize", + "9366518b": "createNewChain", + "7e77b0c5": "assertEqDecimal", + "29233b1f": "deriveKey", + "dc28c0f1": "assertGeDecimal", + "975a6ce9": "rpcUrl", + "65e7c844": "parseTomlAddress", + "e13a1834": "expectCallMinGas", + "4d7baf06": "envBytes", + "5c60da1b": "implementation", + "0f3fa211": "setNativeTokenVault", + "46746c7d": "commitBatchesSharedBridge", + "cbcf2e3c": "isTransactionAllowed", + "bcf284e5": "executeTransaction", + "a8d4d1d9": "assertGe", + "7a28adb2": "proveL2LogInclusion", + "972c6062": "serializeAddress", + "b4866c43": "setFeeParams", + "08dc3360": "validatorPubKeyHashes", + "e66c8c44": "validatorTimelock", + "d74c83a4": "rollFork", + "2ab0f529": "isOperationDone", + "187598a5": "getNewAddressCreate", + "f3b7dead": "getProxyAdmin", + "952a3ee7": "getERC20Getters", + "0f23da43": "revertBatchesSharedBridge", + "87d9d023": "verify", + "0ec6b0b7": "getPriorityTxMaxGasLimit", + "b25c5a25": "sign", + "3408e470": "getChainId", + "707df785": "assertEq", + "7a0ed627": "facets", + "85e4e16a": "assetDeploymentTracker", + "d0f2c663": "getBatchNumberAndTimestamp", + "01d23d4b": "diamondCut", + "3a3f36f9": "codeOracleTest", + "42842e0e": "safeTransferFrom", + "28a249b0": "getLabel", + "625387dc": "unixTime", + "997a0222": "revokePersistent", + "6478d8ed": "chainAdmin", + "823447c8": "setResult", + "47eaf474": "prompt", + "d4b9f4fa": "messageRoot", + "e76db865": "setPubdataPricingMode", + "f8e18b57": "setNonce", + "d4a4ca0d": "getBlockNumberAndTimestamp", + "6bcb2c1b": "deriveKey", + "89f9a072": "validatePubdata", + "6a0cd1f5": "removeValidator", + "fe173b97": "gasPrice", + "2fce7883": "parseJsonAddressArray", + "1f98fa08": "createNewChain", + "796b89b9": "getBlockTimestamp", + "9cd45184": "chainBalance", + "363bf964": "setAddresses", + "8e8acf87": "getL2BlockNumberAndTimestamp", + "191553a4": "getRecordedLogs", + "7b510fe8": "getAccountInfo", + "9caf9bac": "setX", + "fe74f05b": "assertEq", + "e00ad03e": "replace", + "97624631": "assertEq", + "6e9d7899": "legacyBridge", + "5e97348f": "envOr", + "7ba04809": "assertFalse", + "fc57565f": "upgradeChainFromVersion", + "3635f3e6": "resetTxNumberInBatch", + "522074ab": "parseJsonUintArray", + "97c09d34": "revertBatches", + "c87325f1": "finalizeWithdrawal", + "64af255d": "isContext", + "5af231c1": "envBytes32", + "b96213e4": "mockCall", + "f877cb19": "envString", + "6900a3ae": "toString", + "e52db4ca": "baseTokenAssetId", + "60429eb2": "assertApproxEqAbsDecimal", + "1dcd1f68": "assertNotEq", + "73c58a2d": "publishBlobs", + "7f61885c": "proveBatches", + "7ecebe00": "nonces", + "26e4ae25": "initialize", + "bb7044b6": "stateTransitionManagerIsRegistered", + "91b19874": "validators", + "c1adbbff": "expectCall", + "06bed036": "setL2Block", + "02fa5779": "setNewBatch", + "0f29772b": "rollFork", + "4f1e1be0": "storeAccountConstructingCodeHash", + "7c9bd1f3": "publishTimestampDataToL1", + "a3912ec8": "receiveEther", + "81d100a3": "scheduleTransparent", + "2ae9c600": "protocolVersion", + "ed1d7d97": "chainIndexToId", + "c4bc59e0": "readDir", + "b277f199": "uncheckedAdd", + "a5982885": "assertFalse", + "98680034": "createSelectFork", + "aa5cf90e": "stopAndReturnStateDiff", + "3b925549": "prevrandao", + "fcc73360": "updateLeaf", + "cab7e8eb": "isNonceUsed", + "7404f1d2": "createWallet", + "7321c485": "dummySetValidator", + "501e60d5": "setUpgradeDiamondCut", + "fa9d8713": "sleep", + "310ab089": "getImmutable", + "2e1a7d4d": "withdraw", + "2986c0e5": "index", + "2f745c59": "tokenOfOwnerByIndex", + "52d1902d": "proxiableUUID", + "898e83fc": "assertNotEq", + "4f6ccce7": "tokenByIndex", + "8e214810": "parseTomlBytes32", + "15f9a2fe": "prepareForPaymaster", + "91f3b94f": "parseJsonBoolArray", + "be65940a": "setEraPostLegacyBridgeUpgradeFirstBatch", + "40a434d5": "transferTokenToNTV", + "ef011dff": "ERA_CHAIN_ID", + "c2aaf9c4": "receiveEth", + "84b0196e": "eip712Domain", + "074ae3d7": "toUppercase", + "313ce567": "decimals", + "a9f6d941": "executeUpgrade", + "d0cbbdef": "assertEqDecimal", + "4049ddd2": "chainId", + "8466d8d1": "getBridgeHubAddress", + "bcd1b23d": "updateFullTree", + "fccc11c4": "assertApproxEqRelDecimal", + "f34d1868": "setExecutionDelay", + "892a0c61": "envInt", + "41af2f52": "recordLogs", + "b4a85892": "envOr", + "ad7e232e": "setImmutables", + "74f4f547": "bridgeBurn", + "f5407abe": "setValues", + "b1fde1a8": "sharedTree", + "a972d037": "assertLtDecimal", + "a225efcb": "setPubdataInfo", + "b0f40a17": "processBatch", + "d1ba7e97": "hyperchainAddress", + "4dfe692c": "assertLe", + "7fb67816": "setValidatorTimelock", + "36f656d8": "assertEq", + "890c283b": "computeCreate2Address", + "83211b40": "signP256", + "98461504": "setUpgradeDiamondCut", + "18876a04": "chunkPubdataToBlobs", + "f30c7ba3": "expectCall", + "eb672419": "requestL2Transaction", + "7069d0c0": "executeInstant", + "6229498b": "deriveKey", + "1ecb7d33": "assertApproxEqRel", + "bf1fe420": "setGasPrice", + "72c84445": "callKeccak", + "1c72346d": "resetNonce", + "4cd88b76": "initialize", + "8c374c65": "ensNamehash", + "b12e1694": "assertNotEq", + "e5fb9b4a": "assertEq", + "bf54096e": "MAX_NUMBER_OF_HYPERCHAINS", + "24a55db9": "markBytecodeAsPublished", + "fa91454d": "parseUint", + "62ee05f4": "promptAddress", + "b3160bad": "executeBatchesSharedBridge", + "08284e57": "upgrade", + "48ceb85e": "chainIndex", + "70f55728": "readLine", + "e4441b98": "initialize", + "65d5c135": "assertLt", + "8bff9133": "assertGeDecimal", + "191f1b30": "assertEq", + "ff483c54": "coinbase", + "3f704d2a": "setAssetHandlerAddress", + "c0991525": "claimFailedDeposit", + "86b9620d": "expectEmit", + "9cc7f708": "balanceOf", + "56f29cba": "assertNotEq", + "eb39e6d5": "stateTransitionManager", + "9cc395d0": "bridgeCheckCounterpartAddress", + "51ac6a33": "writeToml", + "0f28c97d": "getCurrentBlockTimestamp", + "99a88ec4": "upgrade", + "beda594a": "setHyperchain", + "60f78733": "chainSetTokenMultiplierSetter", + "9e8945d2": "verificationKeyHash", + "61f91b2e": "initialForceDeploymentHash", + "fa8f7ea6": "getAllHyperchains", + "205c2878": "withdrawTo", + "36ba0355": "bridgeMint", + "4c63e562": "assume", + "62f84b24": "sendToL1", + "1f6d6ef7": "getBlobBaseFee", + "b5b18fe5": "processL2Logs", + "9e6ea417": "depositLegacyErc20Bridge", + "c8bd0e4a": "toBase64URL", + "2281f367": "envOr", + "75fe6a99": "pushBack", + "ecf95b8a": "createAccount", + "ebf0c717": "root", + "81409b91": "mockCall", + "715018a6": "renounceOwnership", + "56142d7a": "priorityQueueFrontOperation", + "f5c1182c": "getSemverProtocolVersion", + "13bc9f20": "isOperationReady", + "a9b0d128": "setPriorityTreeStartIndex", + "95ce3e93": "decodeString", + "042901c7": "proveL1ToL2TransactionStatus", + "f7fe3477": "assertEq", + "98296c54": "assertEq", + "ac22e971": "serializeBool", + "92925aa1": "serializeBool", + "3868ac34": "assertEq", + "74044673": "addStateTransitionManager", + "1e19e657": "parseJsonAddress", + "9b3358b0": "serializeJson", + "671a7131": "settlementLayer", + "ddeaa8e6": "getBatchHash", + "7da01cd6": "executeUpgrade", + "0c9fd581": "assertTrue", + "39d7d4aa": "getPriorityTreeRoot", + "53b9e632": "assetHandlerAddress", + "9884b232": "serializeBytes", + "8d1cc925": "getCode", + "23361207": "expectCall", + "77421056": "setFunctionToCall", + "af368a08": "fsMetadata", + "689992b3": "undoL1ToL2Alias", + "5a590335": "getDAValidatorPair", + "19fa7f62": "claimFailedDeposit", + "2a79c611": "getCommitment", + "202bcce7": "validateTransaction", + "155fd27a": "setValueUnderNonce", + "bb0fd610": "extendedAccountVersion", + "46d0b252": "assertNotEq", + "f5f15168": "l2TokenAddress", + "fb644fc5": "addChainBatchRoot", + "3a9d7f8d": "stmDeployer", + "8b257989": "executionDelay", + "3e914080": "assertLt", + "c2eeeebd": "l1Address", + "71ee464d": "createSelectFork", + "a34edc03": "assertTrue", + "8bb8dd43": "parseTomlString", + "236e4d66": "assertNotEq", + "64e130cf": "nativeTokenVault", + "aad74262": "setProtocolVersionDeadline", + "9711715a": "snapshot", + "64949a8d": "assertGtDecimal", + "4d4a1eca": "setTokenMultiplier", + "79ba5097": "acceptOwnership", + "e2a9d554": "setUpgradeTimestamp", + "27eb6c0f": "securityCouncil", + "45b56078": "startPrank", + "652fd489": "promptUint", + "4623c91d": "setValidator", + "82b57749": "forwardedBridgeMint", + "f1afe04d": "removeFile", + "8ffe1b81": "setBridgeHubAddress", + "accdd16c": "freezeChain", + "12f43dab": "bridgehubRequestL2Transaction", + "c9f5c932": "requestL2TransactionTwoBridges", + "98f9bdbd": "assertNotEq", + "42346c5e": "parseInt", + "9b67b21c": "setNonceUnsafe", + "9f629281": "parseTomlStringArray", + "3659cfe6": "upgradeTo", + "631f4bac": "getPriorityQueueSize", + "bce38bd7": "tryAggregate", + "fea2d14f": "assertApproxEqRel", + "2d812b44": "serializeBytes32", + "94ca304b": "numNodes", + "799cd333": "sign", + "c2e90293": "bridgeRecoverFailedTransfer", + "69340beb": "multicall", + "70a08231": "balanceOf", + "4cd40a02": "setLegacyTokenAssetId", + "4bed8212": "isWithdrawalFinalized", + "84da1fb4": "getNewAddressCreate2", + "ffa18649": "addr", + "9f3f89dc": "getZero", + "081812fc": "getApproved", + "6ef25c3a": "baseFee", + "29092d0e": "remove", + "201e43e2": "serializeBytes32", + "74318528": "envOr", + "ec8067c7": "updateNonceOrdering", + "841a9d42": "aggregate3Value", + "b5df27c8": "parseTomlUintArray", + "f1d357e5": "L1_SHARED_BRIDGE", + "1de72e34": "baseTokenGasPriceMultiplierDenominator", + "a5cbfe65": "toBase64", + "528a683c": "keyExists", + "71abd109": "upgrade", + "381c3f13": "checkDA", + "0ef26743": "height", + "7fefbbe0": "assertLeDecimal", + "ef939455": "keccakUpgradeTest", + "1806aa18": "getCodeSize", + "f4943a20": "protocolVersionDeadline", + "de8fa431": "getSize", + "a851ae78": "setTxOrigin", + "f45c1ce7": "tryFfi", + "39b37ab0": "fee", + "261a323e": "exists", + "7cb9357e": "gasPerPubdataByte", + "8f283970": "changeAdmin", + "266cf109": "record", + "7efda2ae": "proveL2LeafInclusion", + "246a61de": "ERA_DIAMOND_PROXY", + "b4d6c782": "etch", + "c05afaa6": "initializeDevBridge", + "52ef6b2c": "facetAddresses", + "8c1aa205": "sign", + "129e9002": "serializeUint", + "d52471c1": "requestL2TransactionDirect", + "c2e047ff": "aggregate3", + "e48a8f8d": "assertEq", + "027f12e1": "changeFeeParams", + "095ea7b3": "approve", + "8f31f052": "isWithdrawalFinalized", + "7a592065": "calculateRoot", + "1c5a9d9c": "activate", + "dd354a06": "calculateCreate2TokenAddress", + "9d1b5a81": "getL2SystemContractsUpgradeBlockNumber", + "fb1a9a57": "getDeploymentNonce", + "6d1d8363": "scheduleShadow", + "42cbb15c": "getBlockNumber", + "18160ddd": "totalSupply", + "c1350739": "parseTomlInt", + "859216bc": "envOr", + "d92f86a2": "setLegacyChainAddress", + "f280efbe": "initializeChainGovernance", + "6e9960c3": "getAdmin", + "af6a2dcd": "getTotalBlocksVerified", + "50bb0884": "toLowercase", + "9623609d": "upgradeAndCall", + "8ac84c0e": "txNumberInBlock", + "9fa8826b": "depositHappened", + "b5872958": "timestamps", + "49eb3b50": "getTransactionHashes", + "6c0960f9": "finalizeEthWithdrawal", + "c1978d1f": "envUint", + "c4879440": "bridgehubDepositBaseToken", + "0603ea68": "assertNotEq", + "c924de35": "transferEthToSharedBridge", + "f8c1f0d2": "upgradeChainFromVersion", + "7d15d019": "isDir", + "ea060291": "allowCheatcodes", + "82ad56cb": "aggregate3", + "805b9869": "executeTransactionFromOutside", + "1624f6c6": "initialize", + "daa51a8c": "pushBack", + "286fafea": "assertNotEq", + "5df93c9b": "assertGeDecimal", + "974ef924": "parseBool" +} \ No newline at end of file diff --git a/etc/utils/src/index.ts b/etc/utils/src/index.ts index 28cd864a1bf6..e64439c53fcb 100644 --- a/etc/utils/src/index.ts +++ b/etc/utils/src/index.ts @@ -25,7 +25,8 @@ const IGNORED_DIRS = [ 'artifacts-zk', 'cache-zk', // Ignore directories with OZ and forge submodules. - 'contracts/l1-contracts/lib' + 'contracts/l1-contracts/lib', + 'era-observability' ]; const IGNORED_FILES = ['KeysWithPlonkVerifier.sol', 'TokenInit.sol', '.tslintrc.js', '.prettierrc.js']; @@ -33,6 +34,7 @@ const IGNORED_FILES = ['KeysWithPlonkVerifier.sol', 'TokenInit.sol', '.tslintrc. // spawns a new shell and can execute arbitrary commands, like "ls -la | grep .env" // returns { stdout, stderr } const promisified = promisify(_exec); + export function exec(command: string) { command = command.replace(/\n/g, ' '); return promisified(command); diff --git a/infrastructure/zk/src/docker.ts b/infrastructure/zk/src/docker.ts index 27de68d1d98d..035061a8ed0d 100644 --- a/infrastructure/zk/src/docker.ts +++ b/infrastructure/zk/src/docker.ts @@ -114,7 +114,7 @@ async function _build(image: string, tagList: string[], dockerOrg: string, platf if (platform != '') { buildArgs += `--platform=${platform} `; } - if (image === 'prover-gpu-fri') { + if (image === 'prover-gpu-fri' || image == 'proof-fri-gpu-compressor') { const cudaArch = process.env.CUDA_ARCH; buildArgs += `--build-arg CUDA_ARCH='${cudaArch}' `; } @@ -126,6 +126,8 @@ async function _build(image: string, tagList: string[], dockerOrg: string, platf } buildArgs += extraArgs; + console.log('Build args: ', buildArgs); + const buildCommand = `DOCKER_BUILDKIT=1 docker buildx build ${tagsToBuild}` + (buildArgs ? ` ${buildArgs}` : '') + diff --git a/prover/Cargo.lock b/prover/Cargo.lock index a838c8cbc0ef..d29f0110f217 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -45,7 +45,7 @@ version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "once_cell", "version_check", "zerocopy", @@ -347,18 +347,6 @@ dependencies = [ "tower-service", ] -[[package]] -name = "backon" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d67782c3f868daa71d3533538e98a8e13713231969def7536e8039606fc46bf0" -dependencies = [ - "fastrand", - "futures-core", - "pin-project", - "tokio", -] - [[package]] name = "backtrace" version = "0.3.72" @@ -367,7 +355,7 @@ checksum = "17c6a35df3749d2e8bb1b7b21a976d82b15548788d2735b9d82f329268f71a11" dependencies = [ "addr2line", "cc", - "cfg-if 1.0.0", + "cfg-if", "libc", "miniz_oxide", "object", @@ -419,54 +407,6 @@ dependencies = [ "serde", ] -[[package]] -name = "bellman_ce" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ea340d5c1394ee4daf4415dd80e06f74e0ad9b08e21f73f6bb1fa3a9dfae80d" -dependencies = [ - "arrayvec 0.7.4", - "bit-vec", - "blake2s_const 0.7.0", - "blake2s_simd", - "byteorder", - "cfg-if 1.0.0", - "crossbeam 0.7.3", - "futures 0.3.30", - "hex", - "lazy_static", - "num_cpus", - "pairing_ce", - "rand 0.4.6", - "serde", - "smallvec", - "tiny-keccak 1.5.0", -] - -[[package]] -name = "bellman_ce" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7aab6627603565b664e6c643a1dc7ea8bbff25b776f5fecd80ac88308fc7007b" -dependencies = [ - "arrayvec 0.7.4", - "bit-vec", - "blake2s_const 0.8.0", - "blake2s_simd", - "byteorder", - "cfg-if 1.0.0", - "crossbeam 0.7.3", - "futures 0.3.30", - "hex", - "lazy_static", - "num_cpus", - "pairing_ce", - "rand 0.4.6", - "serde", - "smallvec", - "tiny-keccak 1.5.0", -] - [[package]] name = "bigdecimal" version = "0.4.5" @@ -512,27 +452,6 @@ dependencies = [ "which", ] -[[package]] -name = "bindgen" -version = "0.65.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfdf7b466f9a4903edc73f95d6d2bcd5baf8ae620638762244d3f60143643cc5" -dependencies = [ - "bitflags 1.3.2", - "cexpr", - "clang-sys", - "lazy_static", - "lazycell", - "peeking_take_while", - "prettyplease", - "proc-macro2 1.0.85", - "quote 1.0.36", - "regex", - "rustc-hash", - "shlex", - "syn 2.0.66", -] - [[package]] name = "bindgen" version = "0.69.4" @@ -641,28 +560,6 @@ dependencies = [ "digest 0.10.7", ] -[[package]] -name = "blake2s_const" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f39d933cb38939f885001867874c65699c36f30f0c78aae9f4c9f01b3e4b306a" -dependencies = [ - "arrayref", - "arrayvec 0.5.2", - "constant_time_eq", -] - -[[package]] -name = "blake2s_const" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1db04f0f5f88d8c95977159949b23d2ed24d33309901cf7f7e48ed40f36de667" -dependencies = [ - "arrayref", - "arrayvec 0.5.2", - "constant_time_eq", -] - [[package]] name = "blake2s_simd" version = "0.5.11" @@ -713,18 +610,17 @@ dependencies = [ [[package]] name = "boojum" -version = "0.2.2" +version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df88daa33db46d683967ca09a4f04817c38950483f2501a771d497669a8a4bb1" +checksum = "68ec2f007ff8f90cc459f03e9f30ca1065440170f013c868823646e2e48d0234" dependencies = [ "arrayvec 0.7.4", "bincode", "blake2 0.10.6", "const_format", "convert_case", - "crossbeam 0.8.4", + "crossbeam", "crypto-bigint 0.5.5", - "cs_derive", "derivative", "ethereum-types", "firestorm", @@ -732,7 +628,6 @@ dependencies = [ "lazy_static", "num-modular", "num_cpus", - "pairing_ce", "rand 0.8.5", "rayon", "serde", @@ -741,13 +636,15 @@ dependencies = [ "smallvec", "tracing", "unroll", + "zksync_cs_derive", + "zksync_pairing", ] [[package]] name = "boojum-cuda" -version = "0.150.6" +version = "0.150.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "252c28bc729eb32a053de0cbd1c8c55b2f51d00ca0c656f30bc70d255c2d8753" +checksum = "ac7735446f2263e8d12435fc4d5a02c7727838eaffc7c518a961b3e839fb59e7" dependencies = [ "boojum", "cmake", @@ -826,12 +723,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "bytecount" -version = "0.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ce89b21cab1437276d2650d57e971f9d548a2d9037cc231abdc0562b97498ce" - [[package]] name = "byteorder" version = "1.5.0" @@ -844,48 +735,6 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" -[[package]] -name = "bzip2-sys" -version = "0.1.11+1.0.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc" -dependencies = [ - "cc", - "libc", - "pkg-config", -] - -[[package]] -name = "camino" -version = "1.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3" -dependencies = [ - "serde", -] - -[[package]] -name = "cargo-platform" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24b1f0365a6c6bb4020cd05806fd0d33c44d38046b8bd7f0e40814b9763cabfc" -dependencies = [ - "serde", -] - -[[package]] -name = "cargo_metadata" -version = "0.14.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4acbb09d9ee8e23699b9634375c72795d095bf268439da88562cf9b501f181fa" -dependencies = [ - "camino", - "cargo-platform", - "semver", - "serde", - "serde_json", -] - [[package]] name = "cc" version = "1.1.14" @@ -912,12 +761,6 @@ dependencies = [ "nom", ] -[[package]] -name = "cfg-if" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" - [[package]] name = "cfg-if" version = "1.0.0" @@ -947,12 +790,12 @@ dependencies = [ [[package]] name = "circuit_definitions" -version = "0.150.4" +version = "0.150.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fffaa17c1585fbf010b9340bb1fd7f4c4eedec2c15cb74a72162fd2d16435d55" +checksum = "9b532214f063e5e0ee5c0fc1d3afd56dec541efa68b8985f14cc55cc324f4c48" dependencies = [ - "circuit_encodings 0.150.4", - "crossbeam 0.8.4", + "circuit_encodings 0.150.5", + "crossbeam", "derivative", "seq-macro", "serde", @@ -997,82 +840,82 @@ dependencies = [ [[package]] name = "circuit_encodings" -version = "0.150.4" +version = "0.150.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2593c02ad6b4b31ba63506c3f807f666133dd36bf47422f99b1d2947cf3c8dc1" +checksum = "e67617688c66640c84f9b98ff26d48f7898dca4faeb45241a4f21ec333788e7b" dependencies = [ "derivative", "serde", - "zk_evm 0.150.4", - "zkevm_circuits 0.150.4", + "zk_evm 0.150.5", + "zkevm_circuits 0.150.5", ] [[package]] name = "circuit_sequencer_api" -version = "0.133.0" +version = "0.133.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a87dc7bee6630d4954ac7982eb77e2007476662250cf18e5c460bbc5ee435f1" +checksum = "eb959b1f8c6bbd8be711994d182e85452a26a5d2213a709290b71c8262af1331" dependencies = [ - "bellman_ce 0.7.0", "derivative", "rayon", "serde", "zk_evm 0.133.0", + "zksync_bellman", ] [[package]] name = "circuit_sequencer_api" -version = "0.140.0" +version = "0.140.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b5138e6524c73e6d49fc1d0822b26e62a8d78b2c07e4e1c56061a447c10bec0" +checksum = "fa5f22311ce609d852d7d9f4943535ea4610aeb785129ae6ff83d5201c4fb387" dependencies = [ - "bellman_ce 0.7.0", "circuit_encodings 0.140.1", "derivative", "rayon", "serde", "zk_evm 0.140.0", + "zksync_bellman", ] [[package]] name = "circuit_sequencer_api" -version = "0.141.1" +version = "0.141.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55a257b31a8ea1c1723cab4fb5661c6b4c0ebe022d4b73bea9eb7c9150bd3bc1" +checksum = "4c47c71d6ba83a8beb0af13af70beffd627f5497caf3d44c6f96363e788b07ea" dependencies = [ - "bellman_ce 0.8.0", "circuit_encodings 0.141.1", "derivative", "rayon", "serde", "zk_evm 0.141.0", + "zksync_bellman", ] [[package]] name = "circuit_sequencer_api" -version = "0.142.0" +version = "0.142.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1d861a7a9b8df9389c63092985fc993c46954771da86462d7cab8cbf55a6497" +checksum = "e264723359e6a1aad98110bdccf1ae3ad596e93e7d31da9e40f6adc07e4add54" dependencies = [ - "bellman_ce 0.7.0", "circuit_encodings 0.142.1", "derivative", "rayon", "serde", "zk_evm 0.141.0", + "zksync_bellman", ] [[package]] name = "circuit_sequencer_api" -version = "0.150.4" +version = "0.150.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42d1a86b9c2207f3bb2dff5f00d1af1cb95004b6d07e9bacb6519fe08f12c04b" +checksum = "21017310971d4a051e4a52ad70eed11d1ae69defeca8314f73a3a4bad16705a9" dependencies = [ - "bellman_ce 0.7.0", - "circuit_encodings 0.150.4", + "circuit_encodings 0.150.5", "derivative", "rayon", "serde", + "zksync_bellman", ] [[package]] @@ -1197,7 +1040,7 @@ version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" dependencies = [ - "crossbeam-utils 0.8.20", + "crossbeam-utils", ] [[package]] @@ -1306,21 +1149,7 @@ version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" dependencies = [ - "cfg-if 1.0.0", -] - -[[package]] -name = "crossbeam" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69323bff1fb41c635347b8ead484a5ca6c3f11914d784170b158d8449ab07f8e" -dependencies = [ - "cfg-if 0.1.10", - "crossbeam-channel 0.4.4", - "crossbeam-deque 0.7.4", - "crossbeam-epoch 0.8.2", - "crossbeam-queue 0.2.3", - "crossbeam-utils 0.7.2", + "cfg-if", ] [[package]] @@ -1329,21 +1158,11 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1137cd7e7fc0fb5d3c5a8678be38ec56e819125d8d7907411fe24ccb943faca8" dependencies = [ - "crossbeam-channel 0.5.13", - "crossbeam-deque 0.8.5", - "crossbeam-epoch 0.9.18", - "crossbeam-queue 0.3.11", - "crossbeam-utils 0.8.20", -] - -[[package]] -name = "crossbeam-channel" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b153fe7cbef478c567df0f972e02e6d736db11affe43dfc9c56a9374d1adfb87" -dependencies = [ - "crossbeam-utils 0.7.2", - "maybe-uninit", + "crossbeam-channel", + "crossbeam-deque", + "crossbeam-epoch", + "crossbeam-queue", + "crossbeam-utils", ] [[package]] @@ -1352,18 +1171,7 @@ version = "0.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" dependencies = [ - "crossbeam-utils 0.8.20", -] - -[[package]] -name = "crossbeam-deque" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c20ff29ded3204c5106278a81a38f4b482636ed4fa1e6cfbeef193291beb29ed" -dependencies = [ - "crossbeam-epoch 0.8.2", - "crossbeam-utils 0.7.2", - "maybe-uninit", + "crossbeam-utils", ] [[package]] @@ -1372,23 +1180,8 @@ version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" dependencies = [ - "crossbeam-epoch 0.9.18", - "crossbeam-utils 0.8.20", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "058ed274caafc1f60c4997b5fc07bf7dc7cca454af7c6e81edffe5f33f70dace" -dependencies = [ - "autocfg", - "cfg-if 0.1.10", - "crossbeam-utils 0.7.2", - "lazy_static", - "maybe-uninit", - "memoffset", - "scopeguard", + "crossbeam-epoch", + "crossbeam-utils", ] [[package]] @@ -1397,18 +1190,7 @@ version = "0.9.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" dependencies = [ - "crossbeam-utils 0.8.20", -] - -[[package]] -name = "crossbeam-queue" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "774ba60a54c213d409d5353bda12d49cd68d14e45036a285234c8d6f91f92570" -dependencies = [ - "cfg-if 0.1.10", - "crossbeam-utils 0.7.2", - "maybe-uninit", + "crossbeam-utils", ] [[package]] @@ -1417,18 +1199,7 @@ version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df0346b5d5e76ac2fe4e327c5fd1118d6be7c51dfb18f9b7922923f287471e35" dependencies = [ - "crossbeam-utils 0.8.20", -] - -[[package]] -name = "crossbeam-utils" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" -dependencies = [ - "autocfg", - "cfg-if 0.1.10", - "lazy_static", + "crossbeam-utils", ] [[package]] @@ -1525,7 +1296,7 @@ version = "4.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "curve25519-dalek-derive", "digest 0.10.7", @@ -1581,19 +1352,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "dashmap" -version = "5.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" -dependencies = [ - "cfg-if 1.0.0", - "hashbrown 0.14.5", - "lock_api", - "once_cell", - "parking_lot_core", -] - [[package]] name = "debugid" version = "0.8.0" @@ -1858,7 +1616,7 @@ version = "0.8.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -1924,9 +1682,9 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "era_cudart" -version = "0.150.6" +version = "0.150.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "803be147b389086e33254a6c9fe26a0d1d21a11f9f73181cad06cf5b1beb7d16" +checksum = "f76aa50bd291b43ad56fb7da3e63c4c3cecb3c7e19db76c8097856371bc0d84a" dependencies = [ "bitflags 2.6.0", "era_cudart_sys", @@ -1935,21 +1693,13 @@ dependencies = [ [[package]] name = "era_cudart_sys" -version = "0.150.6" +version = "0.150.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f9a3d87f3d45d11bc835e5fc78fe6e3fe243355d435f6b3e794b98df7d3323" +checksum = "e7d2db304df6b72141d45b140ec6df68ecd2300a7ab27de18b3e0e3af38c9776" dependencies = [ "serde_json", ] -[[package]] -name = "eravm-stable-interface" -version = "0.1.0" -source = "git+https://github.com/matter-labs/vm2.git?rev=4ef15d46410ffc11744771a3a6c7c09dd9470c90#4ef15d46410ffc11744771a3a6c7c09dd9470c90" -dependencies = [ - "primitive-types", -] - [[package]] name = "errno" version = "0.3.9" @@ -1960,22 +1710,13 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "error-chain" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d2f06b9cac1506ece98fe3231e3cc9c4410ec3d5b1f24ae1c8946f0742cdefc" -dependencies = [ - "version_check", -] - [[package]] name = "etcetera" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "home", "windows-sys 0.48.0", ] @@ -2068,27 +1809,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b538e4231443a5b9c507caee3356f016d832cf7393d2d90f03ea3180d4e3fbc" dependencies = [ "byteorder", - "ff_derive_ce", "hex", "rand 0.4.6", "serde", ] -[[package]] -name = "ff_derive_ce" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b96fbccd88dbb1fac4ee4a07c2fcc4ca719a74ffbd9d2b9d41d8c8eb073d8b20" -dependencies = [ - "num-bigint 0.4.5", - "num-integer", - "num-traits", - "proc-macro2 1.0.85", - "quote 1.0.36", - "serde", - "syn 1.0.109", -] - [[package]] name = "fiat-crypto" version = "0.2.9" @@ -2184,12 +1909,11 @@ dependencies = [ [[package]] name = "franklin-crypto" -version = "0.2.2" +version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05eab544ba915269919b5f158a061b540a4e3a04150c1346481f4f7b80eb6311" +checksum = "971289216ea5c91872e5e0bb6989214b537bbce375d09fabea5c3ccfe031b204" dependencies = [ "arr_macro", - "bellman_ce 0.8.0", "bit-vec", "blake2 0.9.2", "blake2-rfc_bellman_edition", @@ -2213,6 +1937,7 @@ dependencies = [ "smallvec", "splitmut", "tiny-keccak 1.5.0", + "zksync_bellman", ] [[package]] @@ -2379,7 +2104,7 @@ version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "js-sys", "libc", "wasi", @@ -3265,7 +2990,7 @@ version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72c1e0b51e7ec0a97369623508396067a486bd0cbed95a2659a4b863d28cfc8b" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "ecdsa 0.14.8", "elliptic-curve 0.12.3", "sha2 0.10.8", @@ -3277,7 +3002,7 @@ version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "956ff9b67e26e1a6a866cb758f12c6f8746208489e3e4a4b5580802f2f0a587b" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "ecdsa 0.16.9", "elliptic-curve 0.13.8", "once_cell", @@ -3321,7 +3046,7 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "windows-targets 0.52.5", ] @@ -3331,22 +3056,6 @@ version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" -[[package]] -name = "librocksdb-sys" -version = "0.11.0+8.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3386f101bcb4bd252d8e9d2fb41ec3b0862a15a62b478c355b2982efa469e3e" -dependencies = [ - "bindgen 0.65.1", - "bzip2-sys", - "cc", - "glob", - "libc", - "libz-sys", - "lz4-sys", - "zstd-sys", -] - [[package]] name = "libsqlite3-sys" version = "0.30.1" @@ -3358,17 +3067,6 @@ dependencies = [ "vcpkg", ] -[[package]] -name = "libz-sys" -version = "1.1.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2d16453e800a8cf6dd2fc3eb4bc99b786a9b90c663b8559a5b1a041bf89e472" -dependencies = [ - "cc", - "pkg-config", - "vcpkg", -] - [[package]] name = "linux-raw-sys" version = "0.4.14" @@ -3435,16 +3133,6 @@ dependencies = [ "logos-codegen", ] -[[package]] -name = "lz4-sys" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "109de74d5d2353660401699a4174a4ff23fcc649caf553df71933c7fb45ad868" -dependencies = [ - "cc", - "libc", -] - [[package]] name = "match_cfg" version = "0.1.0" @@ -3466,19 +3154,13 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" -[[package]] -name = "maybe-uninit" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" - [[package]] name = "md-5" version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "digest 0.10.7", ] @@ -3494,15 +3176,6 @@ version = "2.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" -[[package]] -name = "memoffset" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "043175f069eda7b85febe4a74abbaeff828d9f8b448515d3151a14a3542811aa" -dependencies = [ - "autocfg", -] - [[package]] name = "miette" version = "5.10.0" @@ -3542,21 +3215,6 @@ dependencies = [ "unicase", ] -[[package]] -name = "mini-moka" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c325dfab65f261f386debee8b0969da215b3fa0037e74c8a1234db7ba986d803" -dependencies = [ - "crossbeam-channel 0.5.13", - "crossbeam-utils 0.8.20", - "dashmap", - "skeptic", - "smallvec", - "tagptr", - "triomphe", -] - [[package]] name = "minimal-lexical" version = "0.2.1" @@ -3644,7 +3302,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4" dependencies = [ "bitflags 2.6.0", - "cfg-if 1.0.0", + "cfg-if", "cfg_aliases", "libc", ] @@ -3893,7 +3551,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1" dependencies = [ "bitflags 2.6.0", - "cfg-if 1.0.0", + "cfg-if", "foreign-types", "libc", "once_cell", @@ -4066,19 +3724,6 @@ dependencies = [ "sha2 0.10.8", ] -[[package]] -name = "pairing_ce" -version = "0.28.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "843b5b6fb63f00460f611dbc87a50bbbb745f0dfe5cbf67ca89299c79098640e" -dependencies = [ - "byteorder", - "cfg-if 1.0.0", - "ff_ce", - "rand 0.4.6", - "serde", -] - [[package]] name = "parity-scale-codec" version = "3.6.11" @@ -4127,7 +3772,7 @@ version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "redox_syscall 0.5.1", "smallvec", @@ -4629,17 +4274,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "pulldown-cmark" -version = "0.9.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b" -dependencies = [ - "bitflags 2.6.0", - "memchr", - "unicase", -] - [[package]] name = "quick-error" version = "1.2.3" @@ -4762,8 +4396,8 @@ version = "1.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" dependencies = [ - "crossbeam-deque 0.8.5", - "crossbeam-utils 0.8.20", + "crossbeam-deque", + "crossbeam-utils", ] [[package]] @@ -4950,9 +4584,9 @@ dependencies = [ [[package]] name = "rescue_poseidon" -version = "0.5.2" +version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f27fbc6ba44baf99a0ca8387b1fa1cf90d3d7062860c1afedbbb64454829acc5" +checksum = "82900c877a0ba5362ac5756efbd82c5b795dc509011c1253e2389d8708f1389d" dependencies = [ "addchain", "arrayvec 0.7.4", @@ -5001,7 +4635,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" dependencies = [ "cc", - "cfg-if 1.0.0", + "cfg-if", "getrandom", "libc", "spin", @@ -5048,16 +4682,6 @@ dependencies = [ "rustc-hex", ] -[[package]] -name = "rocksdb" -version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb6f170a4041d50a0ce04b0d2e14916d6ca863ea2e422689a5b694395d299ffe" -dependencies = [ - "libc", - "librocksdb-sys", -] - [[package]] name = "rsa" version = "0.9.6" @@ -5365,9 +4989,6 @@ name = "semver" version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" -dependencies = [ - "serde", -] [[package]] name = "send_wrapper" @@ -5585,7 +5206,7 @@ version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.10.7", ] @@ -5597,7 +5218,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" dependencies = [ "block-buffer 0.9.0", - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.9.0", "opaque-debug", @@ -5609,7 +5230,7 @@ version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.10.7", ] @@ -5620,7 +5241,7 @@ version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eca2daa77078f4ddff27e75c4bf59e4c2697525f56dbb3c842d34a5d1f2b04a2" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.10.7", ] @@ -5674,9 +5295,9 @@ checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" [[package]] name = "shivini" -version = "0.150.6" +version = "0.150.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "331868b8d92ffec8887c17e786632cf0c9bd4750986fc1400a6d1fbf3739cba4" +checksum = "3f11e6942c89861aecb72261f8220800a1b69b8a5463c07c24df75b81fd809b0" dependencies = [ "bincode", "blake2 0.10.6", @@ -5746,21 +5367,6 @@ dependencies = [ "time", ] -[[package]] -name = "skeptic" -version = "0.13.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16d23b015676c90a0f01c197bfdc786c20342c73a0afdda9025adb0bc42940a8" -dependencies = [ - "bytecount", - "cargo_metadata", - "error-chain", - "glob", - "pulldown-cmark", - "tempfile", - "walkdir", -] - [[package]] name = "slab" version = "0.4.9" @@ -5781,9 +5387,9 @@ dependencies = [ [[package]] name = "snark_wrapper" -version = "0.1.2" +version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71aa5bffe5e7daca634bf2fedf0bf566273cb7eae01711d1aa6e5223d36d987d" +checksum = "0b5dfdc3eed51d79541adff827593743750fe6626a65006814f8cfa4273371de" dependencies = [ "derivative", "rand 0.4.6", @@ -5886,7 +5492,7 @@ dependencies = [ "bytes", "chrono", "crc", - "crossbeam-queue 0.3.11", + "crossbeam-queue", "either", "event-listener", "futures-channel", @@ -6240,12 +5846,6 @@ dependencies = [ "libc", ] -[[package]] -name = "tagptr" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417" - [[package]] name = "tap" version = "1.0.1" @@ -6258,7 +5858,7 @@ version = "3.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "fastrand", "rustix", "windows-sys 0.52.0", @@ -6336,7 +5936,7 @@ version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "once_cell", ] @@ -6695,12 +6295,6 @@ dependencies = [ "tracing-serde", ] -[[package]] -name = "triomphe" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6631e42e10b40c0690bf92f404ebcfe6e1fdb480391d15f17cc8e96eeed5369" - [[package]] name = "try-lock" version = "0.2.5" @@ -6942,18 +6536,6 @@ dependencies = [ "syn 2.0.66", ] -[[package]] -name = "vm2" -version = "0.1.0" -source = "git+https://github.com/matter-labs/vm2.git?rev=4ef15d46410ffc11744771a3a6c7c09dd9470c90#4ef15d46410ffc11744771a3a6c7c09dd9470c90" -dependencies = [ - "enum_dispatch", - "eravm-stable-interface", - "primitive-types", - "zk_evm_abstractions 0.150.4", - "zkevm_opcode_defs 0.150.4", -] - [[package]] name = "wait-timeout" version = "0.2.0" @@ -7000,7 +6582,7 @@ version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "wasm-bindgen-macro", ] @@ -7025,7 +6607,7 @@ version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "js-sys", "wasm-bindgen", "web-sys", @@ -7318,7 +6900,7 @@ version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "windows-sys 0.48.0", ] @@ -7328,7 +6910,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "windows-sys 0.48.0", ] @@ -7455,9 +7037,9 @@ dependencies = [ [[package]] name = "zk_evm" -version = "0.150.4" +version = "0.150.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2dbb0ed38d61fbd04bd7575755924d1303e129c04c909abba7f5bfcc6260bcf" +checksum = "5a6e69931f24db5cf333b714721e8d80ff88bfdb7da8c3dc7882612ffddb8d27" dependencies = [ "anyhow", "lazy_static", @@ -7465,7 +7047,7 @@ dependencies = [ "serde", "serde_json", "static_assertions", - "zk_evm_abstractions 0.150.4", + "zk_evm_abstractions 0.150.5", ] [[package]] @@ -7496,22 +7078,22 @@ dependencies = [ [[package]] name = "zk_evm_abstractions" -version = "0.150.4" +version = "0.150.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31460aacfe65b39ac484a2a2e0bbb02baf141f65264bf48e1e4f59ab375fe933" +checksum = "93d6b0720261ab55490fe3a96e96de30d5d7b277940b52ea7f52dbf564eb1748" dependencies = [ "anyhow", "num_enum 0.6.1", "serde", "static_assertions", - "zkevm_opcode_defs 0.150.4", + "zkevm_opcode_defs 0.150.5", ] [[package]] name = "zkevm-assembly" -version = "0.150.4" +version = "0.150.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b69d09d125b94767847c4cdc4ae399654b9e2a2f9304bd8935a7033bef4b07c" +checksum = "e99106038062537c05b4e6e7754d1bbba28ba16185a3e5ee5ad22e2f8be883bb" dependencies = [ "env_logger 0.9.3", "hex", @@ -7524,7 +7106,7 @@ dependencies = [ "smallvec", "structopt", "thiserror", - "zkevm_opcode_defs 0.150.4", + "zkevm_opcode_defs 0.150.5", ] [[package]] @@ -7573,13 +7155,12 @@ dependencies = [ [[package]] name = "zkevm_circuits" -version = "0.150.4" +version = "0.150.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abdfaa95dfe0878fda219dd17a6cc8c28711e2067785910c0e06d3ffdca78629" +checksum = "784fa7cfb51e17c5ced112bca43da30b3468b2347b7af0427ad9638759fb140e" dependencies = [ "arrayvec 0.7.4", "boojum", - "cs_derive", "derivative", "hex", "itertools 0.10.5", @@ -7588,7 +7169,8 @@ dependencies = [ "seq-macro", "serde", "smallvec", - "zkevm_opcode_defs 0.150.4", + "zkevm_opcode_defs 0.150.5", + "zksync_cs_derive", ] [[package]] @@ -7635,9 +7217,9 @@ dependencies = [ [[package]] name = "zkevm_opcode_defs" -version = "0.150.4" +version = "0.150.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb7c5c7b4481a646f8696b08cee64a8dec097509a6378d18242f81022f327f1e" +checksum = "79055eae1b6c1ab80793ed9d77d2964c9c896afa4b5dfed278cf58cd10acfe8f" dependencies = [ "bitflags 2.6.0", "blake2 0.10.6", @@ -7652,15 +7234,15 @@ dependencies = [ [[package]] name = "zkevm_test_harness" -version = "0.150.4" +version = "0.150.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9416dc5fcf7bc403d4c24d37f0e9a492a81926ff0e89a7792dc8a29de69aec1b" +checksum = "550f82d3b7448c35168dc13bfadbccd5fd306097b6e1ea01793151c1c9137a36" dependencies = [ "bincode", "circuit_definitions", - "circuit_sequencer_api 0.150.4", + "circuit_sequencer_api 0.150.5", "codegen", - "crossbeam 0.8.4", + "crossbeam", "derivative", "env_logger 0.9.3", "hex", @@ -7679,13 +7261,13 @@ dependencies = [ [[package]] name = "zksync-gpu-ffi" -version = "0.150.6" +version = "0.150.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae694dc0ad818e4d45af70b2cf579ff46f1ac938b42ee55543529beb45ba1464" +checksum = "aecd7f624185b785e9d8457986ac34685d478e2baa78417d51b102b7d0fa27fd" dependencies = [ "bindgen 0.59.2", "cmake", - "crossbeam 0.8.4", + "crossbeam", "derivative", "era_cudart_sys", "futures 0.3.30", @@ -7695,13 +7277,13 @@ dependencies = [ [[package]] name = "zksync-gpu-prover" -version = "0.150.6" +version = "0.150.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8156dbaf36764409cc93424d43dc86c993601d73f5aa9a5938e6552a14dc2df" +checksum = "a089b11fcdbd37065acaf427545cb50b87e6712951a10f3761b3d370e4b8f9bc" dependencies = [ "bit-vec", - "cfg-if 1.0.0", - "crossbeam 0.8.4", + "cfg-if", + "crossbeam", "franklin-crypto", "itertools 0.10.5", "num_cpus", @@ -7712,9 +7294,9 @@ dependencies = [ [[package]] name = "zksync-wrapper-prover" -version = "0.150.6" +version = "0.150.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83975189451bfacfa97dbcce899fde9db15a0c072196a9b92ddfabbe756bab9d" +checksum = "dc764c21d4ae15c5bc2c07c14c814c5e3ba8d194ddcca543b8cec95456031832" dependencies = [ "circuit_definitions", "zkevm_test_harness", @@ -7739,11 +7321,34 @@ dependencies = [ "url", ] +[[package]] +name = "zksync_bellman" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ffa03efe9bdb137a4b36b97d1a74237e18c9ae42b755163d903a9d48c1a5d80" +dependencies = [ + "arrayvec 0.7.4", + "bit-vec", + "blake2s_simd", + "byteorder", + "cfg-if", + "crossbeam", + "futures 0.3.30", + "hex", + "lazy_static", + "num_cpus", + "rand 0.4.6", + "serde", + "smallvec", + "tiny-keccak 1.5.0", + "zksync_pairing", +] + [[package]] name = "zksync_concurrency" -version = "0.1.0-rc.12" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a49ad68bfaf6fb8542c68894b68b28be31514786549855aaa8a46b36defbb100" +checksum = "c1c8cf6c689ab5922b52d81b775cd2d9cffbfc8fb8da65985e11b06546dfb3bf" dependencies = [ "anyhow", "once_cell", @@ -7777,9 +7382,9 @@ dependencies = [ [[package]] name = "zksync_consensus_crypto" -version = "0.1.0-rc.12" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ace39bdf50b8421c4d546381fe1ecc5212f953ce61cf93d4fa69172078dbe4af" +checksum = "cc7baced4e811015038322dad10239f2d631d9e339e8d6b7b6e6b146bee30f41" dependencies = [ "anyhow", "blst", @@ -7790,7 +7395,6 @@ dependencies = [ "k256 0.13.3", "num-bigint 0.4.5", "num-traits", - "pairing_ce", "rand 0.4.6", "rand 0.8.5", "sha3 0.10.8", @@ -7801,9 +7405,9 @@ dependencies = [ [[package]] name = "zksync_consensus_roles" -version = "0.1.0-rc.12" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06277266e31efdc1465f6a27ce96c7435392a270978a91956b8a848732df2cfa" +checksum = "0aab4ddf62f6001903c5fe9f65afb1bdc42464928c9d1c6ce52e4d7e9944f5dc" dependencies = [ "anyhow", "bit-vec", @@ -7823,9 +7427,9 @@ dependencies = [ [[package]] name = "zksync_consensus_storage" -version = "0.1.0-rc.12" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9099b2295f550176d824b5287f2f31b7739c4d10247faec1132f1c6e9d18059c" +checksum = "7b9dbcb923fa201af03f49f70c11a923b416915d2ddf8b2de3a2e861f22898a4" dependencies = [ "anyhow", "async-trait", @@ -7843,9 +7447,9 @@ dependencies = [ [[package]] name = "zksync_consensus_utils" -version = "0.1.0-rc.12" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d624f55e2449f43b2c85588b5dd2a28b3c5ea629effc89df76e3254f8d9d2fb" +checksum = "29e69dffc0fbc7c096548c997f5ca157a490b34b3d49fd524fa3d51840f7fb22" dependencies = [ "anyhow", "rand 0.8.5", @@ -7863,8 +7467,6 @@ dependencies = [ "once_cell", "serde", "serde_json", - "zksync_config", - "zksync_env_config", "zksync_utils", ] @@ -7899,6 +7501,18 @@ dependencies = [ "zksync_utils", ] +[[package]] +name = "zksync_cs_derive" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5939e2df4288c263c706ff18ac718e984149223ad4289d6d957d767dcfc04c81" +dependencies = [ + "proc-macro-error", + "proc-macro2 1.0.85", + "quote 1.0.36", + "syn 1.0.109", +] + [[package]] name = "zksync_dal" version = "0.1.0" @@ -7985,11 +7599,39 @@ dependencies = [ "zksync_types", ] +[[package]] +name = "zksync_ff" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9524b06780b5e164e84b38840c7c428c739f051f35af6efc4d1285f629ceb88e" +dependencies = [ + "byteorder", + "hex", + "rand 0.4.6", + "serde", + "zksync_ff_derive", +] + +[[package]] +name = "zksync_ff_derive" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f91e58e75d65877f09f83bc3dca8f054847ae7ec4f3e64bfa610a557edd8e8e" +dependencies = [ + "num-bigint 0.4.5", + "num-integer", + "num-traits", + "proc-macro2 1.0.85", + "quote 1.0.36", + "serde", + "syn 1.0.109", +] + [[package]] name = "zksync_kzg" -version = "0.150.4" +version = "0.150.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9949f48ea1a9f9a0e73242d4d1e87e681095181827486b3fcc2cf93e5aa03280" +checksum = "edb8a9c76c172a6d639855ee342b9a670e3ba472f5ae302f771b1c3ee777dc88" dependencies = [ "boojum", "derivative", @@ -7999,7 +7641,7 @@ dependencies = [ "serde", "serde_json", "serde_with", - "zkevm_circuits 0.150.4", + "zkevm_circuits 0.150.5", ] [[package]] @@ -8016,12 +7658,11 @@ name = "zksync_multivm" version = "0.1.0" dependencies = [ "anyhow", - "circuit_sequencer_api 0.133.0", - "circuit_sequencer_api 0.140.0", - "circuit_sequencer_api 0.141.1", - "circuit_sequencer_api 0.142.0", - "circuit_sequencer_api 0.150.4", - "ethabi", + "circuit_sequencer_api 0.133.1", + "circuit_sequencer_api 0.140.3", + "circuit_sequencer_api 0.141.2", + "circuit_sequencer_api 0.142.2", + "circuit_sequencer_api 0.150.5", "hex", "itertools 0.10.5", "once_cell", @@ -8029,17 +7670,16 @@ dependencies = [ "thiserror", "tracing", "vise", - "vm2", "zk_evm 0.131.0-rc.2", "zk_evm 0.133.0", "zk_evm 0.140.0", "zk_evm 0.141.0", - "zk_evm 0.150.4", + "zk_evm 0.150.5", "zksync_contracts", - "zksync_state", "zksync_system_constants", "zksync_types", "zksync_utils", + "zksync_vm2", "zksync_vm_interface", ] @@ -8066,6 +7706,19 @@ dependencies = [ "zksync_types", ] +[[package]] +name = "zksync_pairing" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8412ae5574472fa567a097e183f9a01974b99dd0b5da3bfa1bbe6c57c579aa2" +dependencies = [ + "byteorder", + "cfg-if", + "rand 0.4.6", + "serde", + "zksync_ff", +] + [[package]] name = "zksync_proof_fri_compressor" version = "0.1.0" @@ -8073,7 +7726,7 @@ dependencies = [ "anyhow", "async-trait", "bincode", - "circuit_sequencer_api 0.150.4", + "circuit_sequencer_api 0.150.5", "clap 4.5.4", "ctrlc", "futures 0.3.30", @@ -8102,9 +7755,9 @@ dependencies = [ [[package]] name = "zksync_protobuf" -version = "0.1.0-rc.12" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d26fb2beb3aeafb5e9babf1acf6494662cc7157b893fa248bd151494f931d07f" +checksum = "df5467dfe2f845ca1fd6ceec623bbd32187589793d3c4023dcd2f5172369d198" dependencies = [ "anyhow", "bit-vec", @@ -8123,9 +7776,9 @@ dependencies = [ [[package]] name = "zksync_protobuf_build" -version = "0.1.0-rc.12" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58e86c198e056d921b4f3f1d2755c23d090e942b5a70b03bcb7e7c02445aa491" +checksum = "33d35280660b11be2a4ebdf531184eb729acebfdc3368d27176ec104f8bf9c5f" dependencies = [ "anyhow", "heck 0.5.0", @@ -8184,6 +7837,7 @@ dependencies = [ "shivini", "tokio", "tracing", + "tracing-subscriber", "vise", "zkevm_test_harness", "zksync_config", @@ -8259,7 +7913,7 @@ name = "zksync_prover_interface" version = "0.1.0" dependencies = [ "chrono", - "circuit_sequencer_api 0.150.4", + "circuit_sequencer_api 0.150.5", "serde", "serde_with", "strum", @@ -8320,51 +7974,6 @@ dependencies = [ "zksync_utils", ] -[[package]] -name = "zksync_shared_metrics" -version = "0.1.0" -dependencies = [ - "rustc_version", - "tracing", - "vise", - "zksync_dal", - "zksync_types", -] - -[[package]] -name = "zksync_state" -version = "0.1.0" -dependencies = [ - "anyhow", - "async-trait", - "backon", - "chrono", - "itertools 0.10.5", - "mini-moka", - "once_cell", - "tokio", - "tracing", - "vise", - "zksync_dal", - "zksync_shared_metrics", - "zksync_storage", - "zksync_types", - "zksync_utils", - "zksync_vm_interface", -] - -[[package]] -name = "zksync_storage" -version = "0.1.0" -dependencies = [ - "num_cpus", - "once_cell", - "rocksdb", - "thread_local", - "tracing", - "vise", -] - [[package]] name = "zksync_system_constants" version = "0.1.0" @@ -8473,6 +8082,26 @@ dependencies = [ "vise-exporter", ] +[[package]] +name = "zksync_vm2" +version = "0.1.0" +source = "git+https://github.com/matter-labs/vm2.git?rev=cd6136c42ec56856e0abcf2a98d1a9e120161482#cd6136c42ec56856e0abcf2a98d1a9e120161482" +dependencies = [ + "enum_dispatch", + "primitive-types", + "zk_evm_abstractions 0.150.5", + "zkevm_opcode_defs 0.150.5", + "zksync_vm2_interface", +] + +[[package]] +name = "zksync_vm2_interface" +version = "0.1.0" +source = "git+https://github.com/matter-labs/vm2.git?rev=cd6136c42ec56856e0abcf2a98d1a9e120161482#cd6136c42ec56856e0abcf2a98d1a9e120161482" +dependencies = [ + "primitive-types", +] + [[package]] name = "zksync_vm_interface" version = "0.1.0" @@ -8486,7 +8115,6 @@ dependencies = [ "zksync_contracts", "zksync_system_constants", "zksync_types", - "zksync_utils", ] [[package]] @@ -8573,13 +8201,3 @@ dependencies = [ "zksync_utils", "zksync_vlog", ] - -[[package]] -name = "zstd-sys" -version = "2.0.13+zstd.1.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38ff0f21cfee8f97d94cef41359e0c89aa6113028ab0291aa8ca0038995a95aa" -dependencies = [ - "cc", - "pkg-config", -] diff --git a/prover/Cargo.toml b/prover/Cargo.toml index 403314cc13ca..624661adc8dc 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -52,17 +52,17 @@ tempfile = "3" tokio = "1" toml_edit = "0.14.4" tracing = "0.1" -tracing-subscriber = { version = "0.3" } +tracing-subscriber = "0.3" vise = "0.2.0" # Proving dependencies -circuit_definitions = "=0.150.4" -circuit_sequencer_api = "=0.150.4" -zkevm_test_harness = "=0.150.4" +circuit_definitions = "=0.150.5" +circuit_sequencer_api = "=0.150.5" +zkevm_test_harness = "=0.150.5" # GPU proving dependencies -wrapper_prover = { package = "zksync-wrapper-prover", version = "=0.150.6" } -shivini = "=0.150.6" +wrapper_prover = { package = "zksync-wrapper-prover", version = "=0.150.7" } +shivini = "=0.150.7" # Core workspace dependencies zksync_multivm = { path = "../core/lib/multivm", version = "0.1.0" } diff --git a/prover/crates/bin/proof_fri_compressor/src/compressor.rs b/prover/crates/bin/proof_fri_compressor/src/compressor.rs index 077347bce9be..e462097e38d0 100644 --- a/prover/crates/bin/proof_fri_compressor/src/compressor.rs +++ b/prover/crates/bin/proof_fri_compressor/src/compressor.rs @@ -59,7 +59,6 @@ impl ProofCompressor { #[tracing::instrument(skip(proof, _compression_mode))] pub fn compress_proof( - l1_batch: L1BatchNumber, proof: ZkSyncRecursionLayerProof, _compression_mode: u8, keystore: Keystore, @@ -171,16 +170,13 @@ impl JobProcessor for ProofCompressor { async fn process_job( &self, - job_id: &L1BatchNumber, + _job_id: &L1BatchNumber, job: ZkSyncRecursionLayerProof, _started_at: Instant, ) -> JoinHandle> { let compression_mode = self.compression_mode; - let block_number = *job_id; let keystore = self.keystore.clone(); - tokio::task::spawn_blocking(move || { - Self::compress_proof(block_number, job, compression_mode, keystore) - }) + tokio::task::spawn_blocking(move || Self::compress_proof(job, compression_mode, keystore)) } async fn save_result( diff --git a/prover/crates/bin/prover_cli/src/cli.rs b/prover/crates/bin/prover_cli/src/cli.rs index 41ef94980056..de5d2d2525b4 100644 --- a/prover/crates/bin/prover_cli/src/cli.rs +++ b/prover/crates/bin/prover_cli/src/cli.rs @@ -44,6 +44,8 @@ pub struct ProverCLIConfig { env("PLI__DB_URL") )] pub db_url: SensitiveUrl, + #[clap(default_value = "10")] + pub max_failure_attempts: u32, } #[derive(Subcommand)] diff --git a/prover/crates/bin/prover_cli/src/commands/status/batch.rs b/prover/crates/bin/prover_cli/src/commands/status/batch.rs index 797695b02278..dc63f6bf837c 100644 --- a/prover/crates/bin/prover_cli/src/commands/status/batch.rs +++ b/prover/crates/bin/prover_cli/src/commands/status/batch.rs @@ -4,8 +4,6 @@ use anyhow::Context as _; use circuit_definitions::zkevm_circuits::scheduler::aux::BaseLayerCircuitType; use clap::Args as ClapArgs; use colored::*; -use zksync_config::configs::FriProverConfig; -use zksync_env_config::FromEnv; use zksync_prover_dal::{Connection, ConnectionPool, Prover, ProverDal}; use zksync_types::{ basic_fri_types::AggregationRound, @@ -57,9 +55,9 @@ pub(crate) async fn run(args: Args, config: ProverCLIConfig) -> anyhow::Result<( } if !args.verbose { - display_batch_status(batch_data); + display_batch_status(batch_data, config.max_failure_attempts); } else { - display_batch_info(batch_data); + display_batch_info(batch_data, config.max_failure_attempts); } } @@ -200,19 +198,19 @@ async fn get_proof_compression_job_info_for_batch<'a>( .await } -fn display_batch_status(batch_data: BatchData) { - display_status_for_stage(batch_data.basic_witness_generator); - display_status_for_stage(batch_data.leaf_witness_generator); - display_status_for_stage(batch_data.node_witness_generator); - display_status_for_stage(batch_data.recursion_tip_witness_generator); - display_status_for_stage(batch_data.scheduler_witness_generator); - display_status_for_stage(batch_data.compressor); +fn display_batch_status(batch_data: BatchData, max_failure_attempts: u32) { + display_status_for_stage(batch_data.basic_witness_generator, max_failure_attempts); + display_status_for_stage(batch_data.leaf_witness_generator, max_failure_attempts); + display_status_for_stage(batch_data.node_witness_generator, max_failure_attempts); + display_status_for_stage( + batch_data.recursion_tip_witness_generator, + max_failure_attempts, + ); + display_status_for_stage(batch_data.scheduler_witness_generator, max_failure_attempts); + display_status_for_stage(batch_data.compressor, max_failure_attempts); } -fn display_status_for_stage(stage_info: StageInfo) { - let max_attempts = FriProverConfig::from_env() - .expect("Fail to read prover config.") - .max_attempts; +fn display_status_for_stage(stage_info: StageInfo, max_attempts: u32) { display_aggregation_round(&stage_info); let status = stage_info.witness_generator_jobs_status(max_attempts); match status { @@ -231,19 +229,19 @@ fn display_status_for_stage(stage_info: StageInfo) { } } -fn display_batch_info(batch_data: BatchData) { - display_info_for_stage(batch_data.basic_witness_generator); - display_info_for_stage(batch_data.leaf_witness_generator); - display_info_for_stage(batch_data.node_witness_generator); - display_info_for_stage(batch_data.recursion_tip_witness_generator); - display_info_for_stage(batch_data.scheduler_witness_generator); - display_info_for_stage(batch_data.compressor); +fn display_batch_info(batch_data: BatchData, max_failure_attempts: u32) { + display_info_for_stage(batch_data.basic_witness_generator, max_failure_attempts); + display_info_for_stage(batch_data.leaf_witness_generator, max_failure_attempts); + display_info_for_stage(batch_data.node_witness_generator, max_failure_attempts); + display_info_for_stage( + batch_data.recursion_tip_witness_generator, + max_failure_attempts, + ); + display_info_for_stage(batch_data.scheduler_witness_generator, max_failure_attempts); + display_info_for_stage(batch_data.compressor, max_failure_attempts); } -fn display_info_for_stage(stage_info: StageInfo) { - let max_attempts = FriProverConfig::from_env() - .expect("Fail to read prover config.") - .max_attempts; +fn display_info_for_stage(stage_info: StageInfo, max_attempts: u32) { display_aggregation_round(&stage_info); let status = stage_info.witness_generator_jobs_status(max_attempts); match status { diff --git a/prover/crates/bin/prover_fri/Cargo.toml b/prover/crates/bin/prover_fri/Cargo.toml index ae7853427e96..e41244cecbf7 100644 --- a/prover/crates/bin/prover_fri/Cargo.toml +++ b/prover/crates/bin/prover_fri/Cargo.toml @@ -43,6 +43,9 @@ reqwest = { workspace = true, features = ["blocking"] } regex.workspace = true clap = { workspace = true, features = ["derive"] } +[dev-dependencies] +tracing-subscriber.workspace = true + [features] default = [] gpu = ["shivini", "zksync_prover_keystore/gpu"] diff --git a/prover/crates/bin/prover_fri/src/gpu_prover_job_processor.rs b/prover/crates/bin/prover_fri/src/gpu_prover_job_processor.rs index 240251df15bf..cfd588c26662 100644 --- a/prover/crates/bin/prover_fri/src/gpu_prover_job_processor.rs +++ b/prover/crates/bin/prover_fri/src/gpu_prover_job_processor.rs @@ -8,8 +8,9 @@ pub mod gpu_prover { ProverContextConfig, }; use tokio::task::JoinHandle; - use zksync_config::configs::{fri_prover_group::FriProverGroupConfig, FriProverConfig}; - use zksync_env_config::FromEnv; + use zksync_config::configs::{ + fri_prover::SetupLoadMode as SetupLoadModeConfig, FriProverConfig, + }; use zksync_object_store::ObjectStore; use zksync_prover_dal::{ConnectionPool, ProverDal}; use zksync_prover_fri_types::{ @@ -341,38 +342,84 @@ pub mod gpu_prover { } } - pub fn load_setup_data_cache( + #[tracing::instrument(skip_all, fields(setup_load_mode = ?setup_load_mode, specialized_group_id = %specialized_group_id))] + pub async fn load_setup_data_cache( keystore: &Keystore, - config: &FriProverConfig, + setup_load_mode: SetupLoadModeConfig, + specialized_group_id: u8, + circuit_ids: &[CircuitIdRoundTuple], ) -> anyhow::Result { - Ok(match config.setup_load_mode { - zksync_config::configs::fri_prover::SetupLoadMode::FromDisk => SetupLoadMode::FromDisk, - zksync_config::configs::fri_prover::SetupLoadMode::FromMemory => { + Ok(match setup_load_mode { + SetupLoadModeConfig::FromDisk => SetupLoadMode::FromDisk, + SetupLoadModeConfig::FromMemory => { + anyhow::ensure!( + !circuit_ids.is_empty(), + "Circuit IDs must be provided when using FromMemory mode" + ); let mut cache = HashMap::new(); tracing::info!( "Loading setup data cache for group {}", - &config.specialized_group_id + &specialized_group_id ); - let prover_setup_metadata_list = FriProverGroupConfig::from_env() - .context("FriProverGroupConfig::from_env()")? - .get_circuit_ids_for_group_id(config.specialized_group_id) - .context( - "At least one circuit should be configured for group when running in FromMemory mode", - )?; tracing::info!( "for group {} configured setup metadata are {:?}", - &config.specialized_group_id, - prover_setup_metadata_list + &specialized_group_id, + circuit_ids ); - for prover_setup_metadata in prover_setup_metadata_list { - let key = setup_metadata_to_setup_data_key(&prover_setup_metadata); - let setup_data = keystore - .load_gpu_setup_data_for_circuit_type(key.clone()) - .context("load_gpu_setup_data_for_circuit_type()")?; - cache.insert(key, Arc::new(setup_data)); + // Load each file in parallel. Note that FS access is not necessarily parallel, but + // deserialization is (and it's not insignificant, as setup keys are large). + // Note: `collect` is important, because iterators are lazy and otherwise we won't actually + // spawn threads. + let handles: Vec<_> = circuit_ids + .into_iter() + .map(|prover_setup_metadata| { + let keystore = keystore.clone(); + let prover_setup_metadata = prover_setup_metadata.clone(); + tokio::task::spawn_blocking(move || { + let key = setup_metadata_to_setup_data_key(&prover_setup_metadata); + let setup_data = keystore + .load_gpu_setup_data_for_circuit_type(key.clone()) + .context("load_gpu_setup_data_for_circuit_type()")?; + anyhow::Ok((key, Arc::new(setup_data))) + }) + }) + .collect(); + for handle in futures::future::join_all(handles).await { + let (key, setup_data) = handle.context("Key loading future panicked")??; + cache.insert(key, setup_data); } SetupLoadMode::FromMemory(cache) } }) } + + #[cfg(test)] + mod tests { + use zksync_types::basic_fri_types::AggregationRound; + + use super::*; + + #[tokio::test] + async fn test_load_setup_data_cache() { + tracing_subscriber::fmt::try_init().ok(); + + let keystore = Keystore::locate(); + let mode = SetupLoadModeConfig::FromMemory; + let specialized_group_id = 0; + let ids: Vec<_> = AggregationRound::ALL_ROUNDS + .into_iter() + .flat_map(|r| r.circuit_ids()) + .collect(); + if !keystore.is_setup_data_present(&setup_metadata_to_setup_data_key(&ids[0])) { + // We don't want this test to fail on envs where setup keys are not present. + return; + } + + let start = Instant::now(); + let _cache = load_setup_data_cache(&keystore, mode, specialized_group_id, &ids) + .await + .expect("Unable to load keys"); + tracing::info!("Cache load time: {:?}", start.elapsed()); + } + } } diff --git a/prover/crates/bin/prover_fri/src/main.rs b/prover/crates/bin/prover_fri/src/main.rs index 8191653efec6..cbba8d0ddb4f 100644 --- a/prover/crates/bin/prover_fri/src/main.rs +++ b/prover/crates/bin/prover_fri/src/main.rs @@ -231,8 +231,14 @@ async fn get_prover_tasks( let keystore = Keystore::locate().with_setup_path(Some(prover_config.setup_data_path.clone().into())); - let setup_load_mode = gpu_prover::load_setup_data_cache(&keystore, &prover_config) - .context("load_setup_data_cache()")?; + let setup_load_mode = gpu_prover::load_setup_data_cache( + &keystore, + prover_config.setup_load_mode, + prover_config.specialized_group_id, + &circuit_ids_for_round_to_be_proven, + ) + .await + .context("load_setup_data_cache()")?; let witness_vector_queue = FixedSizeQueue::new(prover_config.queue_capacity); let shared_witness_vector_queue = Arc::new(Mutex::new(witness_vector_queue)); let consumer = shared_witness_vector_queue.clone(); diff --git a/prover/crates/bin/prover_fri/src/utils.rs b/prover/crates/bin/prover_fri/src/utils.rs index 2941c15439a9..181dc857c364 100644 --- a/prover/crates/bin/prover_fri/src/utils.rs +++ b/prover/crates/bin/prover_fri/src/utils.rs @@ -143,9 +143,19 @@ pub fn verify_proof( pub fn setup_metadata_to_setup_data_key( setup_metadata: &CircuitIdRoundTuple, ) -> ProverServiceDataKey { - ProverServiceDataKey { - circuit_id: setup_metadata.circuit_id, - round: setup_metadata.aggregation_round.into(), + let round = setup_metadata.aggregation_round.into(); + match round { + AggregationRound::NodeAggregation => { + // For node aggregation only one key exist for all circuit types + ProverServiceDataKey { + circuit_id: ZkSyncRecursionLayerStorageType::NodeLayerCircuit as u8, + round, + } + } + _ => ProverServiceDataKey { + circuit_id: setup_metadata.circuit_id, + round, + }, } } diff --git a/prover/crates/bin/witness_generator/src/artifacts.rs b/prover/crates/bin/witness_generator/src/artifacts.rs new file mode 100644 index 000000000000..f509d3b2f64a --- /dev/null +++ b/prover/crates/bin/witness_generator/src/artifacts.rs @@ -0,0 +1,50 @@ +use std::time::Instant; + +use async_trait::async_trait; +use zksync_object_store::ObjectStore; +use zksync_prover_dal::{ConnectionPool, Prover}; + +#[derive(Debug)] +pub(crate) struct AggregationBlobUrls { + pub aggregations_urls: String, + pub circuit_ids_and_urls: Vec<(u8, String)>, +} + +#[derive(Debug)] +pub(crate) struct SchedulerBlobUrls { + pub circuit_ids_and_urls: Vec<(u8, String)>, + pub closed_form_inputs_and_urls: Vec<(u8, String, usize)>, + pub scheduler_witness_url: String, +} + +pub(crate) enum BlobUrls { + Url(String), + Aggregation(AggregationBlobUrls), + Scheduler(SchedulerBlobUrls), +} + +#[async_trait] +pub(crate) trait ArtifactsManager { + type InputMetadata; + type InputArtifacts; + type OutputArtifacts; + + async fn get_artifacts( + metadata: &Self::InputMetadata, + object_store: &dyn ObjectStore, + ) -> anyhow::Result; + + async fn save_artifacts( + job_id: u32, + artifacts: Self::OutputArtifacts, + object_store: &dyn ObjectStore, + ) -> BlobUrls; + + async fn update_database( + connection_pool: &ConnectionPool, + job_id: u32, + started_at: Instant, + blob_urls: BlobUrls, + artifacts: Self::OutputArtifacts, + ) -> anyhow::Result<()>; +} diff --git a/prover/crates/bin/witness_generator/src/basic_circuits/artifacts.rs b/prover/crates/bin/witness_generator/src/basic_circuits/artifacts.rs new file mode 100644 index 000000000000..3447659f8296 --- /dev/null +++ b/prover/crates/bin/witness_generator/src/basic_circuits/artifacts.rs @@ -0,0 +1,108 @@ +use std::time::Instant; + +use async_trait::async_trait; +use zksync_object_store::ObjectStore; +use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; +use zksync_prover_fri_types::AuxOutputWitnessWrapper; +use zksync_prover_fri_utils::get_recursive_layer_circuit_id_for_base_layer; +use zksync_types::{basic_fri_types::AggregationRound, L1BatchNumber}; + +use crate::{ + artifacts::{ArtifactsManager, BlobUrls}, + basic_circuits::{BasicCircuitArtifacts, BasicWitnessGenerator, BasicWitnessGeneratorJob}, + utils::SchedulerPartialInputWrapper, +}; + +#[async_trait] +impl ArtifactsManager for BasicWitnessGenerator { + type InputMetadata = L1BatchNumber; + type InputArtifacts = BasicWitnessGeneratorJob; + type OutputArtifacts = BasicCircuitArtifacts; + + async fn get_artifacts( + metadata: &Self::InputMetadata, + object_store: &dyn ObjectStore, + ) -> anyhow::Result { + let l1_batch_number = *metadata; + let data = object_store.get(l1_batch_number).await.unwrap(); + Ok(BasicWitnessGeneratorJob { + block_number: l1_batch_number, + data, + }) + } + + async fn save_artifacts( + job_id: u32, + artifacts: Self::OutputArtifacts, + object_store: &dyn ObjectStore, + ) -> BlobUrls { + let aux_output_witness_wrapper = AuxOutputWitnessWrapper(artifacts.aux_output_witness); + object_store + .put(L1BatchNumber(job_id), &aux_output_witness_wrapper) + .await + .unwrap(); + let wrapper = SchedulerPartialInputWrapper(artifacts.scheduler_witness); + let url = object_store + .put(L1BatchNumber(job_id), &wrapper) + .await + .unwrap(); + + BlobUrls::Url(url) + } + + #[tracing::instrument(skip_all, fields(l1_batch = %job_id))] + async fn update_database( + connection_pool: &ConnectionPool, + job_id: u32, + started_at: Instant, + blob_urls: BlobUrls, + _artifacts: Self::OutputArtifacts, + ) -> anyhow::Result<()> { + let blob_urls = match blob_urls { + BlobUrls::Scheduler(blobs) => blobs, + _ => unreachable!(), + }; + + let mut connection = connection_pool + .connection() + .await + .expect("failed to get database connection"); + let mut transaction = connection + .start_transaction() + .await + .expect("failed to get database transaction"); + let protocol_version_id = transaction + .fri_witness_generator_dal() + .protocol_version_for_l1_batch(L1BatchNumber(job_id)) + .await; + transaction + .fri_prover_jobs_dal() + .insert_prover_jobs( + L1BatchNumber(job_id), + blob_urls.circuit_ids_and_urls, + AggregationRound::BasicCircuits, + 0, + protocol_version_id, + ) + .await; + transaction + .fri_witness_generator_dal() + .create_aggregation_jobs( + L1BatchNumber(job_id), + &blob_urls.closed_form_inputs_and_urls, + &blob_urls.scheduler_witness_url, + get_recursive_layer_circuit_id_for_base_layer, + protocol_version_id, + ) + .await; + transaction + .fri_witness_generator_dal() + .mark_witness_job_as_successful(L1BatchNumber(job_id), started_at.elapsed()) + .await; + transaction + .commit() + .await + .expect("failed to commit database transaction"); + Ok(()) + } +} diff --git a/prover/crates/bin/witness_generator/src/basic_circuits/job_processor.rs b/prover/crates/bin/witness_generator/src/basic_circuits/job_processor.rs new file mode 100644 index 000000000000..08732689e3a6 --- /dev/null +++ b/prover/crates/bin/witness_generator/src/basic_circuits/job_processor.rs @@ -0,0 +1,153 @@ +use std::{sync::Arc, time::Instant}; + +use anyhow::Context as _; +use tracing::Instrument; +use zksync_prover_dal::ProverDal; +use zksync_prover_fri_types::{get_current_pod_name, AuxOutputWitnessWrapper}; +use zksync_queued_job_processor::{async_trait, JobProcessor}; +use zksync_types::{basic_fri_types::AggregationRound, L1BatchNumber}; + +use crate::{ + artifacts::{ArtifactsManager, BlobUrls, SchedulerBlobUrls}, + basic_circuits::{BasicCircuitArtifacts, BasicWitnessGenerator, BasicWitnessGeneratorJob}, + metrics::WITNESS_GENERATOR_METRICS, +}; + +#[async_trait] +impl JobProcessor for BasicWitnessGenerator { + type Job = BasicWitnessGeneratorJob; + type JobId = L1BatchNumber; + // The artifact is optional to support skipping blocks when sampling is enabled. + type JobArtifacts = Option; + + const SERVICE_NAME: &'static str = "fri_basic_circuit_witness_generator"; + + async fn get_next_job(&self) -> anyhow::Result> { + let mut prover_connection = self.prover_connection_pool.connection().await?; + let last_l1_batch_to_process = self.config.last_l1_batch_to_process(); + let pod_name = get_current_pod_name(); + match prover_connection + .fri_witness_generator_dal() + .get_next_basic_circuit_witness_job( + last_l1_batch_to_process, + self.protocol_version, + &pod_name, + ) + .await + { + Some(block_number) => { + tracing::info!( + "Processing FRI basic witness-gen for block {}", + block_number + ); + let started_at = Instant::now(); + let job = Self::get_artifacts(&block_number, &*self.object_store).await?; + + WITNESS_GENERATOR_METRICS.blob_fetch_time[&AggregationRound::BasicCircuits.into()] + .observe(started_at.elapsed()); + + Ok(Some((block_number, job))) + } + None => Ok(None), + } + } + + async fn save_failure(&self, job_id: L1BatchNumber, _started_at: Instant, error: String) -> () { + self.prover_connection_pool + .connection() + .await + .unwrap() + .fri_witness_generator_dal() + .mark_witness_job_failed(&error, job_id) + .await; + } + + #[allow(clippy::async_yields_async)] + async fn process_job( + &self, + _job_id: &Self::JobId, + job: BasicWitnessGeneratorJob, + started_at: Instant, + ) -> tokio::task::JoinHandle>> { + let object_store = Arc::clone(&self.object_store); + let max_circuits_in_flight = self.config.max_circuits_in_flight; + tokio::spawn(async move { + let block_number = job.block_number; + Ok( + Self::process_job_impl(object_store, job, started_at, max_circuits_in_flight) + .instrument(tracing::info_span!("basic_circuit", %block_number)) + .await, + ) + }) + } + + #[tracing::instrument(skip_all, fields(l1_batch = %job_id))] + async fn save_result( + &self, + job_id: L1BatchNumber, + started_at: Instant, + optional_artifacts: Option, + ) -> anyhow::Result<()> { + match optional_artifacts { + None => Ok(()), + Some(artifacts) => { + let blob_started_at = Instant::now(); + let circuit_urls = artifacts.circuit_urls.clone(); + let queue_urls = artifacts.queue_urls.clone(); + + let aux_output_witness_wrapper = + AuxOutputWitnessWrapper(artifacts.aux_output_witness.clone()); + if self.config.shall_save_to_public_bucket { + self.public_blob_store.as_deref() + .expect("public_object_store shall not be empty while running with shall_save_to_public_bucket config") + .put(job_id, &aux_output_witness_wrapper) + .await + .unwrap(); + } + + let scheduler_witness_url = + match Self::save_artifacts(job_id.0, artifacts.clone(), &*self.object_store) + .await + { + BlobUrls::Url(url) => url, + _ => unreachable!(), + }; + + WITNESS_GENERATOR_METRICS.blob_save_time[&AggregationRound::BasicCircuits.into()] + .observe(blob_started_at.elapsed()); + + Self::update_database( + &self.prover_connection_pool, + job_id.0, + started_at, + BlobUrls::Scheduler(SchedulerBlobUrls { + circuit_ids_and_urls: circuit_urls, + closed_form_inputs_and_urls: queue_urls, + scheduler_witness_url, + }), + artifacts, + ) + .await?; + Ok(()) + } + } + } + + fn max_attempts(&self) -> u32 { + self.config.max_attempts + } + + async fn get_job_attempts(&self, job_id: &L1BatchNumber) -> anyhow::Result { + let mut prover_storage = self + .prover_connection_pool + .connection() + .await + .context("failed to acquire DB connection for BasicWitnessGenerator")?; + prover_storage + .fri_witness_generator_dal() + .get_basic_circuit_witness_job_attempts(*job_id) + .await + .map(|attempts| attempts.unwrap_or(0)) + .context("failed to get job attempts for BasicWitnessGenerator") + } +} diff --git a/prover/crates/bin/witness_generator/src/basic_circuits.rs b/prover/crates/bin/witness_generator/src/basic_circuits/mod.rs similarity index 63% rename from prover/crates/bin/witness_generator/src/basic_circuits.rs rename to prover/crates/bin/witness_generator/src/basic_circuits/mod.rs index a1adfed438a6..9b97ed1cc11a 100644 --- a/prover/crates/bin/witness_generator/src/basic_circuits.rs +++ b/prover/crates/bin/witness_generator/src/basic_circuits/mod.rs @@ -1,49 +1,43 @@ use std::{ - collections::{hash_map::DefaultHasher, HashSet}, - hash::{Hash, Hasher}, + collections::HashSet, + hash::{DefaultHasher, Hash, Hasher}, sync::Arc, time::Instant, }; -use anyhow::Context as _; -use async_trait::async_trait; use circuit_definitions::{ circuit_definitions::base_layer::{ZkSyncBaseLayerCircuit, ZkSyncBaseLayerStorage}, encodings::recursion_request::RecursionQueueSimulator, - zkevm_circuits::fsm_input_output::ClosedFormInputCompactFormWitness, + zkevm_circuits::{ + fsm_input_output::ClosedFormInputCompactFormWitness, + scheduler::{ + block_header::BlockAuxilaryOutputWitness, input::SchedulerCircuitInstanceWitness, + }, + }, }; use tokio::sync::Semaphore; use tracing::Instrument; -use zkevm_test_harness::{ - geometry_config::get_geometry_config, witness::oracle::WitnessGenerationArtifact, -}; +use zkevm_test_harness::witness::oracle::WitnessGenerationArtifact; use zksync_config::configs::FriWitnessGeneratorConfig; use zksync_multivm::{ - interface::storage::StorageView, - vm_latest::{constants::MAX_CYCLES_FOR_TX, HistoryDisabled, StorageOracle as VmStorageOracle}, -}; -use zksync_object_store::ObjectStore; -use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; -use zksync_prover_fri_types::{ - circuit_definitions::{ + circuit_sequencer_api_latest::{ boojum::{ field::goldilocks::{GoldilocksExt2, GoldilocksField}, gadgets::recursion::recursive_tree_hasher::CircuitGoldilocksPoseidon2Sponge, }, - zkevm_circuits::scheduler::{ - block_header::BlockAuxilaryOutputWitness, input::SchedulerCircuitInstanceWitness, - }, + geometry_config::get_geometry_config, }, - get_current_pod_name, - keys::ClosedFormInputKey, - AuxOutputWitnessWrapper, CircuitAuxData, + interface::storage::StorageView, + vm_latest::{constants::MAX_CYCLES_FOR_TX, HistoryDisabled, StorageOracle as VmStorageOracle}, + zk_evm_latest::ethereum_types::Address, }; -use zksync_prover_fri_utils::get_recursive_layer_circuit_id_for_base_layer; +use zksync_object_store::ObjectStore; +use zksync_prover_dal::{ConnectionPool, Prover}; +use zksync_prover_fri_types::{keys::ClosedFormInputKey, CircuitAuxData}; use zksync_prover_interface::inputs::WitnessInputData; -use zksync_queued_job_processor::JobProcessor; +use zksync_system_constants::BOOTLOADER_ADDRESS; use zksync_types::{ - basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, Address, - L1BatchNumber, BOOTLOADER_ADDRESS, + basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, L1BatchNumber, }; use crate::{ @@ -52,33 +46,30 @@ use crate::{ storage_oracle::StorageOracle, utils::{ expand_bootloader_contents, save_circuit, save_ram_premutation_queue_witness, - ClosedFormInputWrapper, SchedulerPartialInputWrapper, KZG_TRUSTED_SETUP_FILE, + ClosedFormInputWrapper, KZG_TRUSTED_SETUP_FILE, }, witness::WitnessStorage, }; +mod artifacts; +pub mod job_processor; + +#[derive(Clone)] pub struct BasicCircuitArtifacts { - circuit_urls: Vec<(u8, String)>, - queue_urls: Vec<(u8, String, usize)>, - scheduler_witness: SchedulerCircuitInstanceWitness< + pub(super) circuit_urls: Vec<(u8, String)>, + pub(super) queue_urls: Vec<(u8, String, usize)>, + pub(super) scheduler_witness: SchedulerCircuitInstanceWitness< GoldilocksField, CircuitGoldilocksPoseidon2Sponge, GoldilocksExt2, >, - aux_output_witness: BlockAuxilaryOutputWitness, -} - -#[derive(Debug)] -struct BlobUrls { - circuit_ids_and_urls: Vec<(u8, String)>, - closed_form_inputs_and_urls: Vec<(u8, String, usize)>, - scheduler_witness_url: String, + pub(super) aux_output_witness: BlockAuxilaryOutputWitness, } #[derive(Clone)] pub struct BasicWitnessGeneratorJob { - block_number: L1BatchNumber, - job: WitnessInputData, + pub(super) block_number: L1BatchNumber, + pub(super) data: WitnessInputData, } #[derive(Debug)] @@ -90,6 +81,17 @@ pub struct BasicWitnessGenerator { protocol_version: ProtocolSemanticVersion, } +type Witness = ( + Vec<(u8, String)>, + Vec<(u8, String, usize)>, + SchedulerCircuitInstanceWitness< + GoldilocksField, + CircuitGoldilocksPoseidon2Sponge, + GoldilocksExt2, + >, + BlockAuxilaryOutputWitness, +); + impl BasicWitnessGenerator { pub fn new( config: FriWitnessGeneratorConfig, @@ -113,7 +115,10 @@ impl BasicWitnessGenerator { started_at: Instant, max_circuits_in_flight: usize, ) -> Option { - let BasicWitnessGeneratorJob { block_number, job } = basic_job; + let BasicWitnessGeneratorJob { + block_number, + data: job, + } = basic_job; tracing::info!( "Starting witness generation of type {:?} for block {}", @@ -134,135 +139,8 @@ impl BasicWitnessGenerator { } } -#[async_trait] -impl JobProcessor for BasicWitnessGenerator { - type Job = BasicWitnessGeneratorJob; - type JobId = L1BatchNumber; - // The artifact is optional to support skipping blocks when sampling is enabled. - type JobArtifacts = Option; - - const SERVICE_NAME: &'static str = "fri_basic_circuit_witness_generator"; - - async fn get_next_job(&self) -> anyhow::Result> { - let mut prover_connection = self.prover_connection_pool.connection().await?; - let last_l1_batch_to_process = self.config.last_l1_batch_to_process(); - let pod_name = get_current_pod_name(); - match prover_connection - .fri_witness_generator_dal() - .get_next_basic_circuit_witness_job( - last_l1_batch_to_process, - self.protocol_version, - &pod_name, - ) - .await - { - Some(block_number) => { - tracing::info!( - "Processing FRI basic witness-gen for block {}", - block_number - ); - let started_at = Instant::now(); - let job = get_artifacts(block_number, &*self.object_store).await; - - WITNESS_GENERATOR_METRICS.blob_fetch_time[&AggregationRound::BasicCircuits.into()] - .observe(started_at.elapsed()); - - Ok(Some((block_number, job))) - } - None => Ok(None), - } - } - - async fn save_failure(&self, job_id: L1BatchNumber, _started_at: Instant, error: String) -> () { - self.prover_connection_pool - .connection() - .await - .unwrap() - .fri_witness_generator_dal() - .mark_witness_job_failed(&error, job_id) - .await; - } - - #[allow(clippy::async_yields_async)] - async fn process_job( - &self, - _job_id: &Self::JobId, - job: BasicWitnessGeneratorJob, - started_at: Instant, - ) -> tokio::task::JoinHandle>> { - let object_store = Arc::clone(&self.object_store); - let max_circuits_in_flight = self.config.max_circuits_in_flight; - tokio::spawn(async move { - let block_number = job.block_number; - Ok( - Self::process_job_impl(object_store, job, started_at, max_circuits_in_flight) - .instrument(tracing::info_span!("basic_circuit", %block_number)) - .await, - ) - }) - } - - #[tracing::instrument(skip_all, fields(l1_batch = %job_id))] - async fn save_result( - &self, - job_id: L1BatchNumber, - started_at: Instant, - optional_artifacts: Option, - ) -> anyhow::Result<()> { - match optional_artifacts { - None => Ok(()), - Some(artifacts) => { - let blob_started_at = Instant::now(); - let scheduler_witness_url = save_scheduler_artifacts( - job_id, - artifacts.scheduler_witness, - artifacts.aux_output_witness, - &*self.object_store, - self.public_blob_store.as_deref(), - self.config.shall_save_to_public_bucket, - ) - .await; - - WITNESS_GENERATOR_METRICS.blob_save_time[&AggregationRound::BasicCircuits.into()] - .observe(blob_started_at.elapsed()); - - update_database( - &self.prover_connection_pool, - started_at, - job_id, - BlobUrls { - circuit_ids_and_urls: artifacts.circuit_urls, - closed_form_inputs_and_urls: artifacts.queue_urls, - scheduler_witness_url, - }, - ) - .await; - Ok(()) - } - } - } - - fn max_attempts(&self) -> u32 { - self.config.max_attempts - } - - async fn get_job_attempts(&self, job_id: &L1BatchNumber) -> anyhow::Result { - let mut prover_storage = self - .prover_connection_pool - .connection() - .await - .context("failed to acquire DB connection for BasicWitnessGenerator")?; - prover_storage - .fri_witness_generator_dal() - .get_basic_circuit_witness_job_attempts(*job_id) - .await - .map(|attempts| attempts.unwrap_or(0)) - .context("failed to get job attempts for BasicWitnessGenerator") - } -} - #[tracing::instrument(skip_all, fields(l1_batch = %block_number))] -async fn process_basic_circuits_job( +pub(super) async fn process_basic_circuits_job( object_store: Arc, started_at: Instant, block_number: L1BatchNumber, @@ -287,93 +165,6 @@ async fn process_basic_circuits_job( } } -#[tracing::instrument(skip_all, fields(l1_batch = %block_number))] -async fn update_database( - prover_connection_pool: &ConnectionPool, - started_at: Instant, - block_number: L1BatchNumber, - blob_urls: BlobUrls, -) { - let mut connection = prover_connection_pool - .connection() - .await - .expect("failed to get database connection"); - let mut transaction = connection - .start_transaction() - .await - .expect("failed to get database transaction"); - let protocol_version_id = transaction - .fri_witness_generator_dal() - .protocol_version_for_l1_batch(block_number) - .await; - transaction - .fri_prover_jobs_dal() - .insert_prover_jobs( - block_number, - blob_urls.circuit_ids_and_urls, - AggregationRound::BasicCircuits, - 0, - protocol_version_id, - ) - .await; - transaction - .fri_witness_generator_dal() - .create_aggregation_jobs( - block_number, - &blob_urls.closed_form_inputs_and_urls, - &blob_urls.scheduler_witness_url, - get_recursive_layer_circuit_id_for_base_layer, - protocol_version_id, - ) - .await; - transaction - .fri_witness_generator_dal() - .mark_witness_job_as_successful(block_number, started_at.elapsed()) - .await; - transaction - .commit() - .await - .expect("failed to commit database transaction"); -} - -#[tracing::instrument(skip_all, fields(l1_batch = %block_number))] -async fn get_artifacts( - block_number: L1BatchNumber, - object_store: &dyn ObjectStore, -) -> BasicWitnessGeneratorJob { - let job = object_store.get(block_number).await.unwrap(); - BasicWitnessGeneratorJob { block_number, job } -} - -#[tracing::instrument(skip_all, fields(l1_batch = %block_number))] -async fn save_scheduler_artifacts( - block_number: L1BatchNumber, - scheduler_partial_input: SchedulerCircuitInstanceWitness< - GoldilocksField, - CircuitGoldilocksPoseidon2Sponge, - GoldilocksExt2, - >, - aux_output_witness: BlockAuxilaryOutputWitness, - object_store: &dyn ObjectStore, - public_object_store: Option<&dyn ObjectStore>, - shall_save_to_public_bucket: bool, -) -> String { - let aux_output_witness_wrapper = AuxOutputWitnessWrapper(aux_output_witness); - if shall_save_to_public_bucket { - public_object_store - .expect("public_object_store shall not be empty while running with shall_save_to_public_bucket config") - .put(block_number, &aux_output_witness_wrapper) - .await - .unwrap(); - } - object_store - .put(block_number, &aux_output_witness_wrapper) - .await - .unwrap(); - let wrapper = SchedulerPartialInputWrapper(scheduler_partial_input); - object_store.put(block_number, &wrapper).await.unwrap() -} - #[tracing::instrument(skip_all, fields(l1_batch = %block_number, circuit_id = %circuit_id))] async fn save_recursion_queue( block_number: L1BatchNumber, @@ -396,17 +187,6 @@ async fn save_recursion_queue( (circuit_id, blob_url, basic_circuit_count) } -type Witness = ( - Vec<(u8, String)>, - Vec<(u8, String, usize)>, - SchedulerCircuitInstanceWitness< - GoldilocksField, - CircuitGoldilocksPoseidon2Sponge, - GoldilocksExt2, - >, - BlockAuxilaryOutputWitness, -); - #[tracing::instrument(skip_all, fields(l1_batch = %block_number))] async fn generate_witness( block_number: L1BatchNumber, diff --git a/prover/crates/bin/witness_generator/src/leaf_aggregation/artifacts.rs b/prover/crates/bin/witness_generator/src/leaf_aggregation/artifacts.rs new file mode 100644 index 000000000000..a94587d00ec6 --- /dev/null +++ b/prover/crates/bin/witness_generator/src/leaf_aggregation/artifacts.rs @@ -0,0 +1,150 @@ +use std::time::Instant; + +use async_trait::async_trait; +use zksync_object_store::ObjectStore; +use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; +use zksync_prover_fri_types::keys::ClosedFormInputKey; +use zksync_prover_fri_utils::get_recursive_layer_circuit_id_for_base_layer; +use zksync_types::{basic_fri_types::AggregationRound, prover_dal::LeafAggregationJobMetadata}; + +use crate::{ + artifacts::{AggregationBlobUrls, ArtifactsManager, BlobUrls}, + leaf_aggregation::{LeafAggregationArtifacts, LeafAggregationWitnessGenerator}, + metrics::WITNESS_GENERATOR_METRICS, + utils::{save_node_aggregations_artifacts, ClosedFormInputWrapper}, +}; + +#[async_trait] +impl ArtifactsManager for LeafAggregationWitnessGenerator { + type InputMetadata = LeafAggregationJobMetadata; + type InputArtifacts = ClosedFormInputWrapper; + type OutputArtifacts = LeafAggregationArtifacts; + + async fn get_artifacts( + metadata: &Self::InputMetadata, + object_store: &dyn ObjectStore, + ) -> anyhow::Result { + let key = ClosedFormInputKey { + block_number: metadata.block_number, + circuit_id: metadata.circuit_id, + }; + + let artifacts = object_store + .get(key) + .await + .unwrap_or_else(|_| panic!("leaf aggregation job artifacts missing: {:?}", key)); + + Ok(artifacts) + } + + #[tracing::instrument( + skip_all, + fields(l1_batch = %artifacts.block_number, circuit_id = %artifacts.circuit_id) + )] + async fn save_artifacts( + _job_id: u32, + artifacts: Self::OutputArtifacts, + object_store: &dyn ObjectStore, + ) -> BlobUrls { + let started_at = Instant::now(); + let aggregations_urls = save_node_aggregations_artifacts( + artifacts.block_number, + get_recursive_layer_circuit_id_for_base_layer(artifacts.circuit_id), + 0, + artifacts.aggregations, + object_store, + ) + .await; + WITNESS_GENERATOR_METRICS.blob_save_time[&AggregationRound::LeafAggregation.into()] + .observe(started_at.elapsed()); + + BlobUrls::Aggregation(AggregationBlobUrls { + aggregations_urls, + circuit_ids_and_urls: artifacts.circuit_ids_and_urls, + }) + } + + #[tracing::instrument( + skip_all, + fields(l1_batch = %job_id) + )] + async fn update_database( + connection_pool: &ConnectionPool, + job_id: u32, + started_at: Instant, + blob_urls: BlobUrls, + artifacts: Self::OutputArtifacts, + ) -> anyhow::Result<()> { + tracing::info!( + "Updating database for job_id {}, block {} with circuit id {}", + job_id, + artifacts.block_number.0, + artifacts.circuit_id, + ); + + let blob_urls = match blob_urls { + BlobUrls::Aggregation(blob_urls) => blob_urls, + _ => panic!("Unexpected blob urls type"), + }; + + let mut prover_connection = connection_pool.connection().await.unwrap(); + let mut transaction = prover_connection.start_transaction().await.unwrap(); + let number_of_dependent_jobs = blob_urls.circuit_ids_and_urls.len(); + let protocol_version_id = transaction + .fri_witness_generator_dal() + .protocol_version_for_l1_batch(artifacts.block_number) + .await; + tracing::info!( + "Inserting {} prover jobs for job_id {}, block {} with circuit id {}", + blob_urls.circuit_ids_and_urls.len(), + job_id, + artifacts.block_number.0, + artifacts.circuit_id, + ); + transaction + .fri_prover_jobs_dal() + .insert_prover_jobs( + artifacts.block_number, + blob_urls.circuit_ids_and_urls, + AggregationRound::LeafAggregation, + 0, + protocol_version_id, + ) + .await; + tracing::info!( + "Updating node aggregation jobs url for job_id {}, block {} with circuit id {}", + job_id, + artifacts.block_number.0, + artifacts.circuit_id, + ); + transaction + .fri_witness_generator_dal() + .update_node_aggregation_jobs_url( + artifacts.block_number, + get_recursive_layer_circuit_id_for_base_layer(artifacts.circuit_id), + number_of_dependent_jobs, + 0, + blob_urls.aggregations_urls, + ) + .await; + tracing::info!( + "Marking leaf aggregation job as successful for job id {}, block {} with circuit id {}", + job_id, + artifacts.block_number.0, + artifacts.circuit_id, + ); + transaction + .fri_witness_generator_dal() + .mark_leaf_aggregation_as_successful(job_id, started_at.elapsed()) + .await; + + tracing::info!( + "Committing transaction for job_id {}, block {} with circuit id {}", + job_id, + artifacts.block_number.0, + artifacts.circuit_id, + ); + transaction.commit().await?; + Ok(()) + } +} diff --git a/prover/crates/bin/witness_generator/src/leaf_aggregation/job_processor.rs b/prover/crates/bin/witness_generator/src/leaf_aggregation/job_processor.rs new file mode 100644 index 000000000000..e032084151eb --- /dev/null +++ b/prover/crates/bin/witness_generator/src/leaf_aggregation/job_processor.rs @@ -0,0 +1,124 @@ +use std::time::Instant; + +use anyhow::Context as _; +use async_trait::async_trait; +use zksync_prover_dal::ProverDal; +use zksync_prover_fri_types::get_current_pod_name; +use zksync_queued_job_processor::JobProcessor; +use zksync_types::basic_fri_types::AggregationRound; + +use crate::{ + artifacts::ArtifactsManager, + leaf_aggregation::{ + prepare_leaf_aggregation_job, LeafAggregationArtifacts, LeafAggregationWitnessGenerator, + LeafAggregationWitnessGeneratorJob, + }, + metrics::WITNESS_GENERATOR_METRICS, +}; + +#[async_trait] +impl JobProcessor for LeafAggregationWitnessGenerator { + type Job = LeafAggregationWitnessGeneratorJob; + type JobId = u32; + type JobArtifacts = LeafAggregationArtifacts; + + const SERVICE_NAME: &'static str = "fri_leaf_aggregation_witness_generator"; + + async fn get_next_job(&self) -> anyhow::Result> { + let mut prover_connection = self.prover_connection_pool.connection().await?; + let pod_name = get_current_pod_name(); + let Some(metadata) = prover_connection + .fri_witness_generator_dal() + .get_next_leaf_aggregation_job(self.protocol_version, &pod_name) + .await + else { + return Ok(None); + }; + tracing::info!("Processing leaf aggregation job {:?}", metadata.id); + Ok(Some(( + metadata.id, + prepare_leaf_aggregation_job(metadata, &*self.object_store, self.keystore.clone()) + .await + .context("prepare_leaf_aggregation_job()")?, + ))) + } + + async fn save_failure(&self, job_id: u32, _started_at: Instant, error: String) -> () { + self.prover_connection_pool + .connection() + .await + .unwrap() + .fri_witness_generator_dal() + .mark_leaf_aggregation_job_failed(&error, job_id) + .await; + } + + #[allow(clippy::async_yields_async)] + async fn process_job( + &self, + _job_id: &Self::JobId, + job: LeafAggregationWitnessGeneratorJob, + started_at: Instant, + ) -> tokio::task::JoinHandle> { + let object_store = self.object_store.clone(); + let max_circuits_in_flight = self.config.max_circuits_in_flight; + tokio::spawn(async move { + Ok(Self::process_job_impl(job, started_at, object_store, max_circuits_in_flight).await) + }) + } + + async fn save_result( + &self, + job_id: u32, + started_at: Instant, + artifacts: LeafAggregationArtifacts, + ) -> anyhow::Result<()> { + let block_number = artifacts.block_number; + let circuit_id = artifacts.circuit_id; + tracing::info!( + "Saving leaf aggregation artifacts for block {} with circuit {}", + block_number.0, + circuit_id, + ); + + let blob_save_started_at = Instant::now(); + + let blob_urls = Self::save_artifacts(job_id, artifacts.clone(), &*self.object_store).await; + + WITNESS_GENERATOR_METRICS.blob_save_time[&AggregationRound::LeafAggregation.into()] + .observe(blob_save_started_at.elapsed()); + + tracing::info!( + "Saved leaf aggregation artifacts for block {} with circuit {}", + block_number.0, + circuit_id, + ); + Self::update_database( + &self.prover_connection_pool, + job_id, + started_at, + blob_urls, + artifacts, + ) + .await?; + Ok(()) + } + + fn max_attempts(&self) -> u32 { + self.config.max_attempts + } + + async fn get_job_attempts(&self, job_id: &u32) -> anyhow::Result { + let mut prover_storage = self + .prover_connection_pool + .connection() + .await + .context("failed to acquire DB connection for LeafAggregationWitnessGenerator")?; + prover_storage + .fri_witness_generator_dal() + .get_leaf_aggregation_job_attempts(*job_id) + .await + .map(|attempts| attempts.unwrap_or(0)) + .context("failed to get job attempts for LeafAggregationWitnessGenerator") + } +} diff --git a/prover/crates/bin/witness_generator/src/leaf_aggregation.rs b/prover/crates/bin/witness_generator/src/leaf_aggregation/mod.rs similarity index 52% rename from prover/crates/bin/witness_generator/src/leaf_aggregation.rs rename to prover/crates/bin/witness_generator/src/leaf_aggregation/mod.rs index 503c46e41bbd..d669a4cc97e3 100644 --- a/prover/crates/bin/witness_generator/src/leaf_aggregation.rs +++ b/prover/crates/bin/witness_generator/src/leaf_aggregation/mod.rs @@ -1,7 +1,6 @@ use std::{sync::Arc, time::Instant}; use anyhow::Context as _; -use async_trait::async_trait; use circuit_definitions::circuit_definitions::recursion_layer::base_circuit_type_into_recursive_leaf_circuit_type; use tokio::sync::Semaphore; use zkevm_test_harness::{ @@ -12,7 +11,7 @@ use zkevm_test_harness::{ }; use zksync_config::configs::FriWitnessGeneratorConfig; use zksync_object_store::ObjectStore; -use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; +use zksync_prover_dal::{ConnectionPool, Prover}; use zksync_prover_fri_types::{ circuit_definitions::{ boojum::field::goldilocks::GoldilocksField, @@ -22,40 +21,25 @@ use zksync_prover_fri_types::{ encodings::recursion_request::RecursionQueueSimulator, zkevm_circuits::recursion::leaf_layer::input::RecursionLeafParametersWitness, }, - get_current_pod_name, - keys::ClosedFormInputKey, FriProofWrapper, }; -use zksync_prover_fri_utils::get_recursive_layer_circuit_id_for_base_layer; use zksync_prover_keystore::keystore::Keystore; -use zksync_queued_job_processor::JobProcessor; use zksync_types::{ basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, prover_dal::LeafAggregationJobMetadata, L1BatchNumber, }; use crate::{ + artifacts::ArtifactsManager, metrics::WITNESS_GENERATOR_METRICS, utils::{ - load_proofs_for_job_ids, save_node_aggregations_artifacts, - save_recursive_layer_prover_input_artifacts, ClosedFormInputWrapper, + load_proofs_for_job_ids, save_recursive_layer_prover_input_artifacts, + ClosedFormInputWrapper, }, }; -pub struct LeafAggregationArtifacts { - circuit_id: u8, - block_number: L1BatchNumber, - pub aggregations: Vec<(u64, RecursionQueueSimulator)>, - pub circuit_ids_and_urls: Vec<(u8, String)>, - #[allow(dead_code)] - closed_form_inputs: Vec>, -} - -#[derive(Debug)] -struct BlobUrls { - circuit_ids_and_urls: Vec<(u8, String)>, - aggregations_urls: String, -} +mod artifacts; +mod job_processor; pub struct LeafAggregationWitnessGeneratorJob { pub(crate) circuit_id: u8, @@ -75,6 +59,16 @@ pub struct LeafAggregationWitnessGenerator { keystore: Keystore, } +#[derive(Clone)] +pub struct LeafAggregationArtifacts { + circuit_id: u8, + block_number: L1BatchNumber, + pub aggregations: Vec<(u64, RecursionQueueSimulator)>, + pub circuit_ids_and_urls: Vec<(u8, String)>, + #[allow(dead_code)] + closed_form_inputs: Vec>, +} + impl LeafAggregationWitnessGenerator { pub fn new( config: FriWitnessGeneratorConfig, @@ -113,108 +107,6 @@ impl LeafAggregationWitnessGenerator { } } -#[async_trait] -impl JobProcessor for LeafAggregationWitnessGenerator { - type Job = LeafAggregationWitnessGeneratorJob; - type JobId = u32; - type JobArtifacts = LeafAggregationArtifacts; - - const SERVICE_NAME: &'static str = "fri_leaf_aggregation_witness_generator"; - - async fn get_next_job(&self) -> anyhow::Result> { - let mut prover_connection = self.prover_connection_pool.connection().await?; - let pod_name = get_current_pod_name(); - let Some(metadata) = prover_connection - .fri_witness_generator_dal() - .get_next_leaf_aggregation_job(self.protocol_version, &pod_name) - .await - else { - return Ok(None); - }; - tracing::info!("Processing leaf aggregation job {:?}", metadata.id); - Ok(Some(( - metadata.id, - prepare_leaf_aggregation_job(metadata, &*self.object_store, self.keystore.clone()) - .await - .context("prepare_leaf_aggregation_job()")?, - ))) - } - - async fn save_failure(&self, job_id: u32, _started_at: Instant, error: String) -> () { - self.prover_connection_pool - .connection() - .await - .unwrap() - .fri_witness_generator_dal() - .mark_leaf_aggregation_job_failed(&error, job_id) - .await; - } - - #[allow(clippy::async_yields_async)] - async fn process_job( - &self, - _job_id: &Self::JobId, - job: LeafAggregationWitnessGeneratorJob, - started_at: Instant, - ) -> tokio::task::JoinHandle> { - let object_store = self.object_store.clone(); - let max_circuits_in_flight = self.config.max_circuits_in_flight; - tokio::spawn(async move { - Ok(Self::process_job_impl(job, started_at, object_store, max_circuits_in_flight).await) - }) - } - - async fn save_result( - &self, - job_id: u32, - started_at: Instant, - artifacts: LeafAggregationArtifacts, - ) -> anyhow::Result<()> { - let block_number = artifacts.block_number; - let circuit_id = artifacts.circuit_id; - tracing::info!( - "Saving leaf aggregation artifacts for block {} with circuit {}", - block_number.0, - circuit_id, - ); - let blob_urls = save_artifacts(artifacts, &*self.object_store).await; - tracing::info!( - "Saved leaf aggregation artifacts for block {} with circuit {} (count: {})", - block_number.0, - circuit_id, - blob_urls.circuit_ids_and_urls.len(), - ); - update_database( - &self.prover_connection_pool, - started_at, - block_number, - job_id, - blob_urls, - circuit_id, - ) - .await; - Ok(()) - } - - fn max_attempts(&self) -> u32 { - self.config.max_attempts - } - - async fn get_job_attempts(&self, job_id: &u32) -> anyhow::Result { - let mut prover_storage = self - .prover_connection_pool - .connection() - .await - .context("failed to acquire DB connection for LeafAggregationWitnessGenerator")?; - prover_storage - .fri_witness_generator_dal() - .get_leaf_aggregation_job_attempts(*job_id) - .await - .map(|attempts| attempts.unwrap_or(0)) - .context("failed to get job attempts for LeafAggregationWitnessGenerator") - } -} - #[tracing::instrument( skip_all, fields(l1_batch = %metadata.block_number, circuit_id = %metadata.circuit_id) @@ -225,7 +117,8 @@ pub async fn prepare_leaf_aggregation_job( keystore: Keystore, ) -> anyhow::Result { let started_at = Instant::now(); - let closed_form_input = get_artifacts(&metadata, object_store).await; + let closed_form_input = + LeafAggregationWitnessGenerator::get_artifacts(&metadata, object_store).await?; WITNESS_GENERATOR_METRICS.blob_fetch_time[&AggregationRound::LeafAggregation.into()] .observe(started_at.elapsed()); @@ -368,125 +261,3 @@ pub async fn process_leaf_aggregation_job( closed_form_inputs: job.closed_form_inputs.0, } } - -#[tracing::instrument( - skip_all, - fields(l1_batch = %block_number, circuit_id = %circuit_id) -)] -async fn update_database( - prover_connection_pool: &ConnectionPool, - started_at: Instant, - block_number: L1BatchNumber, - job_id: u32, - blob_urls: BlobUrls, - circuit_id: u8, -) { - tracing::info!( - "Updating database for job_id {}, block {} with circuit id {}", - job_id, - block_number.0, - circuit_id, - ); - let mut prover_connection = prover_connection_pool.connection().await.unwrap(); - let mut transaction = prover_connection.start_transaction().await.unwrap(); - let number_of_dependent_jobs = blob_urls.circuit_ids_and_urls.len(); - let protocol_version_id = transaction - .fri_witness_generator_dal() - .protocol_version_for_l1_batch(block_number) - .await; - tracing::info!( - "Inserting {} prover jobs for job_id {}, block {} with circuit id {}", - blob_urls.circuit_ids_and_urls.len(), - job_id, - block_number.0, - circuit_id, - ); - transaction - .fri_prover_jobs_dal() - .insert_prover_jobs( - block_number, - blob_urls.circuit_ids_and_urls, - AggregationRound::LeafAggregation, - 0, - protocol_version_id, - ) - .await; - tracing::info!( - "Updating node aggregation jobs url for job_id {}, block {} with circuit id {}", - job_id, - block_number.0, - circuit_id, - ); - transaction - .fri_witness_generator_dal() - .update_node_aggregation_jobs_url( - block_number, - get_recursive_layer_circuit_id_for_base_layer(circuit_id), - number_of_dependent_jobs, - 0, - blob_urls.aggregations_urls, - ) - .await; - tracing::info!( - "Marking leaf aggregation job as successful for job id {}, block {} with circuit id {}", - job_id, - block_number.0, - circuit_id, - ); - transaction - .fri_witness_generator_dal() - .mark_leaf_aggregation_as_successful(job_id, started_at.elapsed()) - .await; - - tracing::info!( - "Committing transaction for job_id {}, block {} with circuit id {}", - job_id, - block_number.0, - circuit_id, - ); - transaction.commit().await.unwrap(); -} - -#[tracing::instrument( - skip_all, - fields(l1_batch = %metadata.block_number, circuit_id = %metadata.circuit_id) -)] -async fn get_artifacts( - metadata: &LeafAggregationJobMetadata, - object_store: &dyn ObjectStore, -) -> ClosedFormInputWrapper { - let key = ClosedFormInputKey { - block_number: metadata.block_number, - circuit_id: metadata.circuit_id, - }; - object_store - .get(key) - .await - .unwrap_or_else(|_| panic!("leaf aggregation job artifacts missing: {:?}", key)) -} - -#[tracing::instrument( - skip_all, - fields(l1_batch = %artifacts.block_number, circuit_id = %artifacts.circuit_id) -)] -async fn save_artifacts( - artifacts: LeafAggregationArtifacts, - object_store: &dyn ObjectStore, -) -> BlobUrls { - let started_at = Instant::now(); - let aggregations_urls = save_node_aggregations_artifacts( - artifacts.block_number, - get_recursive_layer_circuit_id_for_base_layer(artifacts.circuit_id), - 0, - artifacts.aggregations, - object_store, - ) - .await; - WITNESS_GENERATOR_METRICS.blob_save_time[&AggregationRound::LeafAggregation.into()] - .observe(started_at.elapsed()); - - BlobUrls { - circuit_ids_and_urls: artifacts.circuit_ids_and_urls, - aggregations_urls, - } -} diff --git a/prover/crates/bin/witness_generator/src/lib.rs b/prover/crates/bin/witness_generator/src/lib.rs index 00d2ebf2bb3d..c0ac9718c6ee 100644 --- a/prover/crates/bin/witness_generator/src/lib.rs +++ b/prover/crates/bin/witness_generator/src/lib.rs @@ -1,6 +1,7 @@ #![allow(incomplete_features)] // We have to use generic const exprs. #![feature(generic_const_exprs)] +pub mod artifacts; pub mod basic_circuits; pub mod leaf_aggregation; pub mod metrics; diff --git a/prover/crates/bin/witness_generator/src/node_aggregation/artifacts.rs b/prover/crates/bin/witness_generator/src/node_aggregation/artifacts.rs new file mode 100644 index 000000000000..245027f0d677 --- /dev/null +++ b/prover/crates/bin/witness_generator/src/node_aggregation/artifacts.rs @@ -0,0 +1,146 @@ +use std::time::Instant; + +use async_trait::async_trait; +use zksync_object_store::ObjectStore; +use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; +use zksync_prover_fri_types::keys::AggregationsKey; +use zksync_types::{basic_fri_types::AggregationRound, prover_dal::NodeAggregationJobMetadata}; + +use crate::{ + artifacts::{AggregationBlobUrls, ArtifactsManager, BlobUrls}, + metrics::WITNESS_GENERATOR_METRICS, + node_aggregation::{NodeAggregationArtifacts, NodeAggregationWitnessGenerator}, + utils::{save_node_aggregations_artifacts, AggregationWrapper}, +}; + +#[async_trait] +impl ArtifactsManager for NodeAggregationWitnessGenerator { + type InputMetadata = NodeAggregationJobMetadata; + type InputArtifacts = AggregationWrapper; + type OutputArtifacts = NodeAggregationArtifacts; + + #[tracing::instrument( + skip_all, + fields(l1_batch = % metadata.block_number, circuit_id = % metadata.circuit_id) + )] + async fn get_artifacts( + metadata: &Self::InputMetadata, + object_store: &dyn ObjectStore, + ) -> anyhow::Result { + let key = AggregationsKey { + block_number: metadata.block_number, + circuit_id: metadata.circuit_id, + depth: metadata.depth, + }; + let artifacts = object_store.get(key).await.unwrap_or_else(|error| { + panic!( + "node aggregation job artifacts getting error. Key: {:?}, error: {:?}", + key, error + ) + }); + + Ok(artifacts) + } + + #[tracing::instrument( + skip_all, + fields(l1_batch = %artifacts.block_number, circuit_id = %artifacts.circuit_id) + )] + async fn save_artifacts( + _job_id: u32, + artifacts: Self::OutputArtifacts, + object_store: &dyn ObjectStore, + ) -> BlobUrls { + let started_at = Instant::now(); + let aggregations_urls = save_node_aggregations_artifacts( + artifacts.block_number, + artifacts.circuit_id, + artifacts.depth, + artifacts.next_aggregations, + object_store, + ) + .await; + + WITNESS_GENERATOR_METRICS.blob_save_time[&AggregationRound::NodeAggregation.into()] + .observe(started_at.elapsed()); + + BlobUrls::Aggregation(AggregationBlobUrls { + aggregations_urls, + circuit_ids_and_urls: artifacts.recursive_circuit_ids_and_urls, + }) + } + + #[tracing::instrument( + skip_all, + fields(l1_batch = % job_id) + )] + async fn update_database( + connection_pool: &ConnectionPool, + job_id: u32, + started_at: Instant, + blob_urls: BlobUrls, + artifacts: Self::OutputArtifacts, + ) -> anyhow::Result<()> { + let mut prover_connection = connection_pool.connection().await.unwrap(); + let blob_urls = match blob_urls { + BlobUrls::Aggregation(blobs) => blobs, + _ => unreachable!(), + }; + let mut transaction = prover_connection.start_transaction().await.unwrap(); + let dependent_jobs = blob_urls.circuit_ids_and_urls.len(); + let protocol_version_id = transaction + .fri_witness_generator_dal() + .protocol_version_for_l1_batch(artifacts.block_number) + .await; + match artifacts.next_aggregations.len() > 1 { + true => { + transaction + .fri_prover_jobs_dal() + .insert_prover_jobs( + artifacts.block_number, + blob_urls.circuit_ids_and_urls, + AggregationRound::NodeAggregation, + artifacts.depth, + protocol_version_id, + ) + .await; + transaction + .fri_witness_generator_dal() + .insert_node_aggregation_jobs( + artifacts.block_number, + artifacts.circuit_id, + Some(dependent_jobs as i32), + artifacts.depth, + &blob_urls.aggregations_urls, + protocol_version_id, + ) + .await; + } + false => { + let (_, blob_url) = blob_urls.circuit_ids_and_urls[0].clone(); + transaction + .fri_prover_jobs_dal() + .insert_prover_job( + artifacts.block_number, + artifacts.circuit_id, + artifacts.depth, + 0, + AggregationRound::NodeAggregation, + &blob_url, + true, + protocol_version_id, + ) + .await + } + } + + transaction + .fri_witness_generator_dal() + .mark_node_aggregation_as_successful(job_id, started_at.elapsed()) + .await; + + transaction.commit().await?; + + Ok(()) + } +} diff --git a/prover/crates/bin/witness_generator/src/node_aggregation/job_processor.rs b/prover/crates/bin/witness_generator/src/node_aggregation/job_processor.rs new file mode 100644 index 000000000000..a015462cd6fe --- /dev/null +++ b/prover/crates/bin/witness_generator/src/node_aggregation/job_processor.rs @@ -0,0 +1,115 @@ +use std::time::Instant; + +use anyhow::Context as _; +use async_trait::async_trait; +use zksync_prover_dal::ProverDal; +use zksync_prover_fri_types::get_current_pod_name; +use zksync_queued_job_processor::JobProcessor; +use zksync_types::basic_fri_types::AggregationRound; + +use crate::{ + artifacts::ArtifactsManager, + metrics::WITNESS_GENERATOR_METRICS, + node_aggregation::{ + prepare_job, NodeAggregationArtifacts, NodeAggregationWitnessGenerator, + NodeAggregationWitnessGeneratorJob, + }, +}; + +#[async_trait] +impl JobProcessor for NodeAggregationWitnessGenerator { + type Job = NodeAggregationWitnessGeneratorJob; + type JobId = u32; + type JobArtifacts = NodeAggregationArtifacts; + + const SERVICE_NAME: &'static str = "fri_node_aggregation_witness_generator"; + + async fn get_next_job(&self) -> anyhow::Result> { + let mut prover_connection = self.prover_connection_pool.connection().await?; + let pod_name = get_current_pod_name(); + let Some(metadata) = prover_connection + .fri_witness_generator_dal() + .get_next_node_aggregation_job(self.protocol_version, &pod_name) + .await + else { + return Ok(None); + }; + tracing::info!("Processing node aggregation job {:?}", metadata.id); + Ok(Some(( + metadata.id, + prepare_job(metadata, &*self.object_store, self.keystore.clone()) + .await + .context("prepare_job()")?, + ))) + } + + async fn save_failure(&self, job_id: u32, _started_at: Instant, error: String) -> () { + self.prover_connection_pool + .connection() + .await + .unwrap() + .fri_witness_generator_dal() + .mark_node_aggregation_job_failed(&error, job_id) + .await; + } + + #[allow(clippy::async_yields_async)] + async fn process_job( + &self, + _job_id: &Self::JobId, + job: NodeAggregationWitnessGeneratorJob, + started_at: Instant, + ) -> tokio::task::JoinHandle> { + let object_store = self.object_store.clone(); + let max_circuits_in_flight = self.config.max_circuits_in_flight; + tokio::spawn(async move { + Ok(Self::process_job_impl(job, started_at, object_store, max_circuits_in_flight).await) + }) + } + + #[tracing::instrument( + skip_all, + fields(l1_batch = % artifacts.block_number, circuit_id = % artifacts.circuit_id) + )] + async fn save_result( + &self, + job_id: u32, + started_at: Instant, + artifacts: NodeAggregationArtifacts, + ) -> anyhow::Result<()> { + let blob_save_started_at = Instant::now(); + + let blob_urls = Self::save_artifacts(job_id, artifacts.clone(), &*self.object_store).await; + + WITNESS_GENERATOR_METRICS.blob_save_time[&AggregationRound::NodeAggregation.into()] + .observe(blob_save_started_at.elapsed()); + + Self::update_database( + &self.prover_connection_pool, + job_id, + started_at, + blob_urls, + artifacts, + ) + .await?; + Ok(()) + } + + fn max_attempts(&self) -> u32 { + self.config.max_attempts + } + + async fn get_job_attempts(&self, job_id: &u32) -> anyhow::Result { + let mut prover_storage = self + .prover_connection_pool + .connection() + .await + .context("failed to acquire DB connection for NodeAggregationWitnessGenerator")?; + prover_storage + .fri_witness_generator_dal() + .get_node_aggregation_job_attempts(*job_id) + .await + .map(|attempts| attempts.unwrap_or(0)) + .context("failed to get job attempts for NodeAggregationWitnessGenerator") + } +} diff --git a/prover/crates/bin/witness_generator/src/node_aggregation.rs b/prover/crates/bin/witness_generator/src/node_aggregation/mod.rs similarity index 52% rename from prover/crates/bin/witness_generator/src/node_aggregation.rs rename to prover/crates/bin/witness_generator/src/node_aggregation/mod.rs index 72bdebde572a..047caa363a89 100644 --- a/prover/crates/bin/witness_generator/src/node_aggregation.rs +++ b/prover/crates/bin/witness_generator/src/node_aggregation/mod.rs @@ -1,7 +1,6 @@ use std::{sync::Arc, time::Instant}; use anyhow::Context as _; -use async_trait::async_trait; use circuit_definitions::circuit_definitions::recursion_layer::RECURSION_ARITY; use tokio::sync::Semaphore; use zkevm_test_harness::witness::recursive_aggregation::{ @@ -9,7 +8,7 @@ use zkevm_test_harness::witness::recursive_aggregation::{ }; use zksync_config::configs::FriWitnessGeneratorConfig; use zksync_object_store::ObjectStore; -use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; +use zksync_prover_dal::{ConnectionPool, Prover}; use zksync_prover_fri_types::{ circuit_definitions::{ boojum::field::goldilocks::GoldilocksField, @@ -19,25 +18,24 @@ use zksync_prover_fri_types::{ encodings::recursion_request::RecursionQueueSimulator, zkevm_circuits::recursion::leaf_layer::input::RecursionLeafParametersWitness, }, - get_current_pod_name, - keys::AggregationsKey, FriProofWrapper, }; use zksync_prover_keystore::{keystore::Keystore, utils::get_leaf_vk_params}; -use zksync_queued_job_processor::JobProcessor; use zksync_types::{ basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, prover_dal::NodeAggregationJobMetadata, L1BatchNumber, }; use crate::{ + artifacts::ArtifactsManager, metrics::WITNESS_GENERATOR_METRICS, - utils::{ - load_proofs_for_job_ids, save_node_aggregations_artifacts, - save_recursive_layer_prover_input_artifacts, AggregationWrapper, - }, + utils::{load_proofs_for_job_ids, save_recursive_layer_prover_input_artifacts}, }; +mod artifacts; +mod job_processor; + +#[derive(Clone)] pub struct NodeAggregationArtifacts { circuit_id: u8, block_number: L1BatchNumber, @@ -46,12 +44,6 @@ pub struct NodeAggregationArtifacts { pub recursive_circuit_ids_and_urls: Vec<(u8, String)>, } -#[derive(Debug)] -struct BlobUrls { - node_aggregations_url: String, - circuit_ids_and_urls: Vec<(u8, String)>, -} - #[derive(Clone)] pub struct NodeAggregationWitnessGeneratorJob { circuit_id: u8, @@ -92,7 +84,7 @@ impl NodeAggregationWitnessGenerator { #[tracing::instrument( skip_all, - fields(l1_batch = %job.block_number, circuit_id = %job.circuit_id) + fields(l1_batch = % job.block_number, circuit_id = % job.circuit_id) )] pub async fn process_job_impl( job: NodeAggregationWitnessGeneratorJob, @@ -223,108 +215,9 @@ impl NodeAggregationWitnessGenerator { } } -#[async_trait] -impl JobProcessor for NodeAggregationWitnessGenerator { - type Job = NodeAggregationWitnessGeneratorJob; - type JobId = u32; - type JobArtifacts = NodeAggregationArtifacts; - - const SERVICE_NAME: &'static str = "fri_node_aggregation_witness_generator"; - - async fn get_next_job(&self) -> anyhow::Result> { - let mut prover_connection = self.prover_connection_pool.connection().await?; - let pod_name = get_current_pod_name(); - let Some(metadata) = prover_connection - .fri_witness_generator_dal() - .get_next_node_aggregation_job(self.protocol_version, &pod_name) - .await - else { - return Ok(None); - }; - tracing::info!("Processing node aggregation job {:?}", metadata.id); - Ok(Some(( - metadata.id, - prepare_job(metadata, &*self.object_store, self.keystore.clone()) - .await - .context("prepare_job()")?, - ))) - } - - async fn save_failure(&self, job_id: u32, _started_at: Instant, error: String) -> () { - self.prover_connection_pool - .connection() - .await - .unwrap() - .fri_witness_generator_dal() - .mark_node_aggregation_job_failed(&error, job_id) - .await; - } - - #[allow(clippy::async_yields_async)] - async fn process_job( - &self, - _job_id: &Self::JobId, - job: NodeAggregationWitnessGeneratorJob, - started_at: Instant, - ) -> tokio::task::JoinHandle> { - let object_store = self.object_store.clone(); - let max_circuits_in_flight = self.config.max_circuits_in_flight; - tokio::spawn(async move { - Ok(Self::process_job_impl(job, started_at, object_store, max_circuits_in_flight).await) - }) - } - - #[tracing::instrument( - skip_all, - fields(l1_batch = %artifacts.block_number, circuit_id = %artifacts.circuit_id) - )] - async fn save_result( - &self, - job_id: u32, - started_at: Instant, - artifacts: NodeAggregationArtifacts, - ) -> anyhow::Result<()> { - let block_number = artifacts.block_number; - let circuit_id = artifacts.circuit_id; - let depth = artifacts.depth; - let shall_continue_node_aggregations = artifacts.next_aggregations.len() > 1; - let blob_urls = save_artifacts(artifacts, &*self.object_store).await; - update_database( - &self.prover_connection_pool, - started_at, - job_id, - block_number, - depth, - circuit_id, - blob_urls, - shall_continue_node_aggregations, - ) - .await; - Ok(()) - } - - fn max_attempts(&self) -> u32 { - self.config.max_attempts - } - - async fn get_job_attempts(&self, job_id: &u32) -> anyhow::Result { - let mut prover_storage = self - .prover_connection_pool - .connection() - .await - .context("failed to acquire DB connection for NodeAggregationWitnessGenerator")?; - prover_storage - .fri_witness_generator_dal() - .get_node_aggregation_job_attempts(*job_id) - .await - .map(|attempts| attempts.unwrap_or(0)) - .context("failed to get job attempts for NodeAggregationWitnessGenerator") - } -} - #[tracing::instrument( skip_all, - fields(l1_batch = %metadata.block_number, circuit_id = %metadata.circuit_id) + fields(l1_batch = % metadata.block_number, circuit_id = % metadata.circuit_id) )] pub async fn prepare_job( metadata: NodeAggregationJobMetadata, @@ -332,7 +225,7 @@ pub async fn prepare_job( keystore: Keystore, ) -> anyhow::Result { let started_at = Instant::now(); - let artifacts = get_artifacts(&metadata, object_store).await; + let artifacts = NodeAggregationWitnessGenerator::get_artifacts(&metadata, object_store).await?; WITNESS_GENERATOR_METRICS.blob_fetch_time[&AggregationRound::NodeAggregation.into()] .observe(started_at.elapsed()); @@ -361,123 +254,3 @@ pub async fn prepare_job( all_leafs_layer_params: get_leaf_vk_params(&keystore).context("get_leaf_vk_params()")?, }) } - -#[allow(clippy::too_many_arguments)] -#[tracing::instrument( - skip_all, - fields(l1_batch = %block_number, circuit_id = %circuit_id) -)] -async fn update_database( - prover_connection_pool: &ConnectionPool, - started_at: Instant, - id: u32, - block_number: L1BatchNumber, - depth: u16, - circuit_id: u8, - blob_urls: BlobUrls, - shall_continue_node_aggregations: bool, -) { - let mut prover_connection = prover_connection_pool.connection().await.unwrap(); - let mut transaction = prover_connection.start_transaction().await.unwrap(); - let dependent_jobs = blob_urls.circuit_ids_and_urls.len(); - let protocol_version_id = transaction - .fri_witness_generator_dal() - .protocol_version_for_l1_batch(block_number) - .await; - match shall_continue_node_aggregations { - true => { - transaction - .fri_prover_jobs_dal() - .insert_prover_jobs( - block_number, - blob_urls.circuit_ids_and_urls, - AggregationRound::NodeAggregation, - depth, - protocol_version_id, - ) - .await; - transaction - .fri_witness_generator_dal() - .insert_node_aggregation_jobs( - block_number, - circuit_id, - Some(dependent_jobs as i32), - depth, - &blob_urls.node_aggregations_url, - protocol_version_id, - ) - .await; - } - false => { - let (_, blob_url) = blob_urls.circuit_ids_and_urls[0].clone(); - transaction - .fri_prover_jobs_dal() - .insert_prover_job( - block_number, - circuit_id, - depth, - 0, - AggregationRound::NodeAggregation, - &blob_url, - true, - protocol_version_id, - ) - .await - } - } - - transaction - .fri_witness_generator_dal() - .mark_node_aggregation_as_successful(id, started_at.elapsed()) - .await; - - transaction.commit().await.unwrap(); -} - -#[tracing::instrument( - skip_all, - fields(l1_batch = %metadata.block_number, circuit_id = %metadata.circuit_id) -)] -async fn get_artifacts( - metadata: &NodeAggregationJobMetadata, - object_store: &dyn ObjectStore, -) -> AggregationWrapper { - let key = AggregationsKey { - block_number: metadata.block_number, - circuit_id: metadata.circuit_id, - depth: metadata.depth, - }; - object_store.get(key).await.unwrap_or_else(|error| { - panic!( - "node aggregation job artifacts getting error. Key: {:?}, error: {:?}", - key, error - ) - }) -} - -#[tracing::instrument( - skip_all, - fields(l1_batch = %artifacts.block_number, circuit_id = %artifacts.circuit_id) -)] -async fn save_artifacts( - artifacts: NodeAggregationArtifacts, - object_store: &dyn ObjectStore, -) -> BlobUrls { - let started_at = Instant::now(); - let aggregations_urls = save_node_aggregations_artifacts( - artifacts.block_number, - artifacts.circuit_id, - artifacts.depth, - artifacts.next_aggregations, - object_store, - ) - .await; - - WITNESS_GENERATOR_METRICS.blob_save_time[&AggregationRound::NodeAggregation.into()] - .observe(started_at.elapsed()); - - BlobUrls { - node_aggregations_url: aggregations_urls, - circuit_ids_and_urls: artifacts.recursive_circuit_ids_and_urls, - } -} diff --git a/prover/crates/bin/witness_generator/src/recursion_tip/artifacts.rs b/prover/crates/bin/witness_generator/src/recursion_tip/artifacts.rs new file mode 100644 index 000000000000..8379fcf9f933 --- /dev/null +++ b/prover/crates/bin/witness_generator/src/recursion_tip/artifacts.rs @@ -0,0 +1,141 @@ +use std::{collections::HashMap, time::Instant}; + +use async_trait::async_trait; +use circuit_definitions::{ + circuit_definitions::recursion_layer::{ZkSyncRecursionLayerStorageType, ZkSyncRecursionProof}, + zkevm_circuits::scheduler::aux::BaseLayerCircuitType, +}; +use zkevm_test_harness::empty_node_proof; +use zksync_object_store::ObjectStore; +use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; +use zksync_prover_fri_types::{keys::FriCircuitKey, CircuitWrapper, FriProofWrapper}; +use zksync_types::{basic_fri_types::AggregationRound, L1BatchNumber}; + +use crate::{ + artifacts::{ArtifactsManager, BlobUrls}, + recursion_tip::{RecursionTipArtifacts, RecursionTipWitnessGenerator}, +}; + +#[async_trait] +impl ArtifactsManager for RecursionTipWitnessGenerator { + type InputMetadata = Vec<(u8, u32)>; + type InputArtifacts = Vec; + type OutputArtifacts = RecursionTipArtifacts; + + /// Loads all proofs for a given recursion tip's job ids. + /// Note that recursion tip may not have proofs for some specific circuits (because the batch didn't contain them). + /// In this scenario, we still need to pass a proof, but it won't be taken into account during proving. + /// For this scenario, we use an empty_proof, but any proof would suffice. + async fn get_artifacts( + metadata: &Vec<(u8, u32)>, + object_store: &dyn ObjectStore, + ) -> anyhow::Result> { + let job_mapping: HashMap = metadata + .clone() + .into_iter() + .map(|(leaf_circuit_id, job_id)| { + ( + ZkSyncRecursionLayerStorageType::from_leaf_u8_to_basic_u8(leaf_circuit_id), + job_id, + ) + }) + .collect(); + + let empty_proof = empty_node_proof().into_inner(); + + let mut proofs = Vec::new(); + for circuit_id in BaseLayerCircuitType::as_iter_u8() { + if job_mapping.contains_key(&circuit_id) { + let fri_proof_wrapper = object_store + .get(*job_mapping.get(&circuit_id).unwrap()) + .await + .unwrap_or_else(|_| { + panic!( + "Failed to load proof with circuit_id {} for recursion tip", + circuit_id + ) + }); + match fri_proof_wrapper { + FriProofWrapper::Base(_) => { + return Err(anyhow::anyhow!( + "Expected only recursive proofs for recursion tip, got Base for circuit {}", + circuit_id + )); + } + FriProofWrapper::Recursive(recursive_proof) => { + proofs.push(recursive_proof.into_inner()); + } + } + } else { + proofs.push(empty_proof.clone()); + } + } + Ok(proofs) + } + + async fn save_artifacts( + job_id: u32, + artifacts: Self::OutputArtifacts, + object_store: &dyn ObjectStore, + ) -> BlobUrls { + let key = FriCircuitKey { + block_number: L1BatchNumber(job_id), + circuit_id: 255, + sequence_number: 0, + depth: 0, + aggregation_round: AggregationRound::RecursionTip, + }; + + let blob_url = object_store + .put( + key, + &CircuitWrapper::Recursive(artifacts.recursion_tip_circuit.clone()), + ) + .await + .unwrap(); + + BlobUrls::Url(blob_url) + } + + async fn update_database( + connection_pool: &ConnectionPool, + job_id: u32, + started_at: Instant, + blob_urls: BlobUrls, + _artifacts: Self::OutputArtifacts, + ) -> anyhow::Result<()> { + let blob_url = match blob_urls { + BlobUrls::Url(url) => url, + _ => panic!("Unexpected blob urls type"), + }; + + let mut prover_connection = connection_pool.connection().await?; + let mut transaction = prover_connection.start_transaction().await?; + let protocol_version_id = transaction + .fri_witness_generator_dal() + .protocol_version_for_l1_batch(L1BatchNumber(job_id)) + .await; + transaction + .fri_prover_jobs_dal() + .insert_prover_job( + L1BatchNumber(job_id), + ZkSyncRecursionLayerStorageType::RecursionTipCircuit as u8, + 0, + 0, + AggregationRound::RecursionTip, + &blob_url, + false, + protocol_version_id, + ) + .await; + + transaction + .fri_witness_generator_dal() + .mark_recursion_tip_job_as_successful(L1BatchNumber(job_id), started_at.elapsed()) + .await; + + transaction.commit().await?; + + Ok(()) + } +} diff --git a/prover/crates/bin/witness_generator/src/recursion_tip/job_processor.rs b/prover/crates/bin/witness_generator/src/recursion_tip/job_processor.rs new file mode 100644 index 000000000000..f114724cfec4 --- /dev/null +++ b/prover/crates/bin/witness_generator/src/recursion_tip/job_processor.rs @@ -0,0 +1,130 @@ +use std::time::Instant; + +use anyhow::Context as _; +use async_trait::async_trait; +use zksync_prover_dal::ProverDal; +use zksync_prover_fri_types::get_current_pod_name; +use zksync_queued_job_processor::JobProcessor; +use zksync_types::{basic_fri_types::AggregationRound, L1BatchNumber}; + +use crate::{ + artifacts::ArtifactsManager, + metrics::WITNESS_GENERATOR_METRICS, + recursion_tip::{ + prepare_job, RecursionTipArtifacts, RecursionTipWitnessGenerator, + RecursionTipWitnessGeneratorJob, + }, +}; + +#[async_trait] +impl JobProcessor for RecursionTipWitnessGenerator { + type Job = RecursionTipWitnessGeneratorJob; + type JobId = L1BatchNumber; + type JobArtifacts = RecursionTipArtifacts; + + const SERVICE_NAME: &'static str = "recursion_tip_witness_generator"; + + async fn get_next_job(&self) -> anyhow::Result> { + let mut prover_connection = self.prover_connection_pool.connection().await?; + let pod_name = get_current_pod_name(); + let Some((l1_batch_number, number_of_final_node_jobs)) = prover_connection + .fri_witness_generator_dal() + .get_next_recursion_tip_witness_job(self.protocol_version, &pod_name) + .await + else { + return Ok(None); + }; + + let final_node_proof_job_ids = prover_connection + .fri_prover_jobs_dal() + .get_final_node_proof_job_ids_for(l1_batch_number) + .await; + + assert_eq!( + final_node_proof_job_ids.len(), + number_of_final_node_jobs as usize, + "recursion tip witness job was scheduled without all final node jobs being completed; expected {}, got {}", + number_of_final_node_jobs, final_node_proof_job_ids.len() + ); + + Ok(Some(( + l1_batch_number, + prepare_job( + l1_batch_number, + final_node_proof_job_ids, + &*self.object_store, + self.keystore.clone(), + ) + .await + .context("prepare_job()")?, + ))) + } + + async fn save_failure(&self, job_id: L1BatchNumber, _started_at: Instant, error: String) -> () { + self.prover_connection_pool + .connection() + .await + .unwrap() + .fri_witness_generator_dal() + .mark_recursion_tip_job_failed(&error, job_id) + .await; + } + + #[allow(clippy::async_yields_async)] + async fn process_job( + &self, + _job_id: &Self::JobId, + job: RecursionTipWitnessGeneratorJob, + started_at: Instant, + ) -> tokio::task::JoinHandle> { + tokio::task::spawn_blocking(move || Ok(Self::process_job_sync(job, started_at))) + } + + #[tracing::instrument( + skip_all, + fields(l1_batch = %job_id) + )] + async fn save_result( + &self, + job_id: L1BatchNumber, + started_at: Instant, + artifacts: RecursionTipArtifacts, + ) -> anyhow::Result<()> { + let blob_save_started_at = Instant::now(); + + let blob_urls = + Self::save_artifacts(job_id.0, artifacts.clone(), &*self.object_store).await; + + WITNESS_GENERATOR_METRICS.blob_save_time[&AggregationRound::RecursionTip.into()] + .observe(blob_save_started_at.elapsed()); + + Self::update_database( + &self.prover_connection_pool, + job_id.0, + started_at, + blob_urls, + artifacts, + ) + .await?; + + Ok(()) + } + + fn max_attempts(&self) -> u32 { + self.config.max_attempts + } + + async fn get_job_attempts(&self, job_id: &L1BatchNumber) -> anyhow::Result { + let mut prover_storage = self + .prover_connection_pool + .connection() + .await + .context("failed to acquire DB connection for RecursionTipWitnessGenerator")?; + prover_storage + .fri_witness_generator_dal() + .get_recursion_tip_witness_job_attempts(*job_id) + .await + .map(|attempts| attempts.unwrap_or(0)) + .context("failed to get job attempts for RecursionTipWitnessGenerator") + } +} diff --git a/prover/crates/bin/witness_generator/src/recursion_tip.rs b/prover/crates/bin/witness_generator/src/recursion_tip/mod.rs similarity index 58% rename from prover/crates/bin/witness_generator/src/recursion_tip.rs rename to prover/crates/bin/witness_generator/src/recursion_tip/mod.rs index 5e97631babb9..4abb56a7d788 100644 --- a/prover/crates/bin/witness_generator/src/recursion_tip.rs +++ b/prover/crates/bin/witness_generator/src/recursion_tip/mod.rs @@ -1,7 +1,6 @@ use std::{sync::Arc, time::Instant}; use anyhow::Context; -use async_trait::async_trait; use circuit_definitions::{ circuit_definitions::recursion_layer::{ recursion_tip::RecursionTipCircuit, ZkSyncRecursionLayerStorageType, @@ -37,23 +36,20 @@ use zkevm_test_harness::{ }; use zksync_config::configs::FriWitnessGeneratorConfig; use zksync_object_store::ObjectStore; -use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; -use zksync_prover_fri_types::{ - get_current_pod_name, - keys::{ClosedFormInputKey, FriCircuitKey}, - CircuitWrapper, -}; +use zksync_prover_dal::{ConnectionPool, Prover}; +use zksync_prover_fri_types::keys::ClosedFormInputKey; use zksync_prover_keystore::{keystore::Keystore, utils::get_leaf_vk_params}; -use zksync_queued_job_processor::JobProcessor; use zksync_types::{ basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, L1BatchNumber, }; use crate::{ - metrics::WITNESS_GENERATOR_METRICS, - utils::{load_proofs_for_recursion_tip, ClosedFormInputWrapper}, + artifacts::ArtifactsManager, metrics::WITNESS_GENERATOR_METRICS, utils::ClosedFormInputWrapper, }; +mod artifacts; +mod job_processor; + #[derive(Clone)] pub struct RecursionTipWitnessGeneratorJob { block_number: L1BatchNumber, @@ -65,6 +61,7 @@ pub struct RecursionTipWitnessGeneratorJob { node_vk: ZkSyncRecursionLayerVerificationKey, } +#[derive(Clone)] pub struct RecursionTipArtifacts { pub recursion_tip_circuit: ZkSyncRecursiveLayerCircuit, } @@ -138,148 +135,6 @@ impl RecursionTipWitnessGenerator { } } -#[async_trait] -impl JobProcessor for RecursionTipWitnessGenerator { - type Job = RecursionTipWitnessGeneratorJob; - type JobId = L1BatchNumber; - type JobArtifacts = RecursionTipArtifacts; - - const SERVICE_NAME: &'static str = "recursion_tip_witness_generator"; - - async fn get_next_job(&self) -> anyhow::Result> { - let mut prover_connection = self.prover_connection_pool.connection().await?; - let pod_name = get_current_pod_name(); - let Some((l1_batch_number, number_of_final_node_jobs)) = prover_connection - .fri_witness_generator_dal() - .get_next_recursion_tip_witness_job(self.protocol_version, &pod_name) - .await - else { - return Ok(None); - }; - - let final_node_proof_job_ids = prover_connection - .fri_prover_jobs_dal() - .get_final_node_proof_job_ids_for(l1_batch_number) - .await; - - assert_eq!( - final_node_proof_job_ids.len(), - number_of_final_node_jobs as usize, - "recursion tip witness job was scheduled without all final node jobs being completed; expected {}, got {}", - number_of_final_node_jobs, final_node_proof_job_ids.len() - ); - - Ok(Some(( - l1_batch_number, - prepare_job( - l1_batch_number, - final_node_proof_job_ids, - &*self.object_store, - self.keystore.clone(), - ) - .await - .context("prepare_job()")?, - ))) - } - - async fn save_failure(&self, job_id: L1BatchNumber, _started_at: Instant, error: String) -> () { - self.prover_connection_pool - .connection() - .await - .unwrap() - .fri_witness_generator_dal() - .mark_recursion_tip_job_failed(&error, job_id) - .await; - } - - #[allow(clippy::async_yields_async)] - async fn process_job( - &self, - _job_id: &Self::JobId, - job: RecursionTipWitnessGeneratorJob, - started_at: Instant, - ) -> tokio::task::JoinHandle> { - tokio::task::spawn_blocking(move || Ok(Self::process_job_sync(job, started_at))) - } - - #[tracing::instrument( - skip_all, - fields(l1_batch = %job_id) - )] - async fn save_result( - &self, - job_id: L1BatchNumber, - started_at: Instant, - artifacts: RecursionTipArtifacts, - ) -> anyhow::Result<()> { - let key = FriCircuitKey { - block_number: job_id, - circuit_id: 255, - sequence_number: 0, - depth: 0, - aggregation_round: AggregationRound::RecursionTip, - }; - let blob_save_started_at = Instant::now(); - - let recursion_tip_circuit_blob_url = self - .object_store - .put( - key, - &CircuitWrapper::Recursive(artifacts.recursion_tip_circuit), - ) - .await?; - - WITNESS_GENERATOR_METRICS.blob_save_time[&AggregationRound::RecursionTip.into()] - .observe(blob_save_started_at.elapsed()); - - let mut prover_connection = self.prover_connection_pool.connection().await?; - let mut transaction = prover_connection.start_transaction().await?; - let protocol_version_id = transaction - .fri_witness_generator_dal() - .protocol_version_for_l1_batch(job_id) - .await; - transaction - .fri_prover_jobs_dal() - .insert_prover_job( - job_id, - ZkSyncRecursionLayerStorageType::RecursionTipCircuit as u8, - 0, - 0, - AggregationRound::RecursionTip, - &recursion_tip_circuit_blob_url, - false, - protocol_version_id, - ) - .await; - - transaction - .fri_witness_generator_dal() - .mark_recursion_tip_job_as_successful(job_id, started_at.elapsed()) - .await; - - transaction.commit().await?; - Ok(()) - } - - fn max_attempts(&self) -> u32 { - self.config.max_attempts - } - - async fn get_job_attempts(&self, job_id: &L1BatchNumber) -> anyhow::Result { - let mut prover_storage = self - .prover_connection_pool - .connection() - .await - .context("failed to acquire DB connection for RecursionTipWitnessGenerator")?; - prover_storage - .fri_witness_generator_dal() - .get_recursion_tip_witness_job_attempts(*job_id) - .await - .map(|attempts| attempts.unwrap_or(0)) - .context("failed to get job attempts for RecursionTipWitnessGenerator") - } -} - #[tracing::instrument( skip_all, fields(l1_batch = %l1_batch_number) @@ -292,7 +147,8 @@ pub async fn prepare_job( ) -> anyhow::Result { let started_at = Instant::now(); let recursion_tip_proofs = - load_proofs_for_recursion_tip(final_node_proof_job_ids, object_store).await?; + RecursionTipWitnessGenerator::get_artifacts(&final_node_proof_job_ids, object_store) + .await?; WITNESS_GENERATOR_METRICS.blob_fetch_time[&AggregationRound::RecursionTip.into()] .observe(started_at.elapsed()); diff --git a/prover/crates/bin/witness_generator/src/scheduler/artifacts.rs b/prover/crates/bin/witness_generator/src/scheduler/artifacts.rs new file mode 100644 index 000000000000..b20a97641887 --- /dev/null +++ b/prover/crates/bin/witness_generator/src/scheduler/artifacts.rs @@ -0,0 +1,94 @@ +use std::time::Instant; + +use async_trait::async_trait; +use circuit_definitions::circuit_definitions::recursion_layer::ZkSyncRecursionLayerStorageType; +use zksync_object_store::ObjectStore; +use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; +use zksync_prover_fri_types::{keys::FriCircuitKey, CircuitWrapper, FriProofWrapper}; +use zksync_types::{basic_fri_types::AggregationRound, L1BatchNumber}; + +use crate::{ + artifacts::{ArtifactsManager, BlobUrls}, + scheduler::{SchedulerArtifacts, SchedulerWitnessGenerator}, +}; + +#[async_trait] +impl ArtifactsManager for SchedulerWitnessGenerator { + type InputMetadata = u32; + type InputArtifacts = FriProofWrapper; + type OutputArtifacts = SchedulerArtifacts; + + async fn get_artifacts( + metadata: &Self::InputMetadata, + object_store: &dyn ObjectStore, + ) -> anyhow::Result { + let artifacts = object_store.get(*metadata).await?; + + Ok(artifacts) + } + + async fn save_artifacts( + job_id: u32, + artifacts: Self::OutputArtifacts, + object_store: &dyn ObjectStore, + ) -> BlobUrls { + let key = FriCircuitKey { + block_number: L1BatchNumber(job_id), + circuit_id: 1, + sequence_number: 0, + depth: 0, + aggregation_round: AggregationRound::Scheduler, + }; + + let blob_url = object_store + .put( + key, + &CircuitWrapper::Recursive(artifacts.scheduler_circuit.clone()), + ) + .await + .unwrap(); + + BlobUrls::Url(blob_url) + } + + async fn update_database( + connection_pool: &ConnectionPool, + job_id: u32, + started_at: Instant, + blob_urls: BlobUrls, + _artifacts: Self::OutputArtifacts, + ) -> anyhow::Result<()> { + let blob_url = match blob_urls { + BlobUrls::Url(url) => url, + _ => panic!("Unexpected blob urls type"), + }; + + let mut prover_connection = connection_pool.connection().await?; + let mut transaction = prover_connection.start_transaction().await?; + let protocol_version_id = transaction + .fri_witness_generator_dal() + .protocol_version_for_l1_batch(L1BatchNumber(job_id)) + .await; + transaction + .fri_prover_jobs_dal() + .insert_prover_job( + L1BatchNumber(job_id), + ZkSyncRecursionLayerStorageType::SchedulerCircuit as u8, + 0, + 0, + AggregationRound::Scheduler, + &blob_url, + false, + protocol_version_id, + ) + .await; + + transaction + .fri_witness_generator_dal() + .mark_scheduler_job_as_successful(L1BatchNumber(job_id), started_at.elapsed()) + .await; + + transaction.commit().await?; + Ok(()) + } +} diff --git a/prover/crates/bin/witness_generator/src/scheduler/job_processor.rs b/prover/crates/bin/witness_generator/src/scheduler/job_processor.rs new file mode 100644 index 000000000000..fe4f2db4090a --- /dev/null +++ b/prover/crates/bin/witness_generator/src/scheduler/job_processor.rs @@ -0,0 +1,129 @@ +use std::time::Instant; + +use anyhow::Context as _; +use async_trait::async_trait; +use zksync_prover_dal::ProverDal; +use zksync_prover_fri_types::get_current_pod_name; +use zksync_queued_job_processor::JobProcessor; +use zksync_types::{basic_fri_types::AggregationRound, L1BatchNumber}; + +use crate::{ + artifacts::ArtifactsManager, + metrics::WITNESS_GENERATOR_METRICS, + scheduler::{ + prepare_job, SchedulerArtifacts, SchedulerWitnessGenerator, SchedulerWitnessGeneratorJob, + }, +}; + +#[async_trait] +impl JobProcessor for SchedulerWitnessGenerator { + type Job = SchedulerWitnessGeneratorJob; + type JobId = L1BatchNumber; + type JobArtifacts = SchedulerArtifacts; + + const SERVICE_NAME: &'static str = "fri_scheduler_witness_generator"; + + async fn get_next_job(&self) -> anyhow::Result> { + let mut prover_connection = self.prover_connection_pool.connection().await?; + let pod_name = get_current_pod_name(); + let Some(l1_batch_number) = prover_connection + .fri_witness_generator_dal() + .get_next_scheduler_witness_job(self.protocol_version, &pod_name) + .await + else { + return Ok(None); + }; + let recursion_tip_job_id = prover_connection + .fri_prover_jobs_dal() + .get_recursion_tip_proof_job_id(l1_batch_number) + .await + .context(format!( + "could not find recursion tip proof for l1 batch {}", + l1_batch_number + ))?; + + Ok(Some(( + l1_batch_number, + prepare_job( + l1_batch_number, + recursion_tip_job_id, + &*self.object_store, + self.keystore.clone(), + ) + .await + .context("prepare_job()")?, + ))) + } + + async fn save_failure(&self, job_id: L1BatchNumber, _started_at: Instant, error: String) -> () { + self.prover_connection_pool + .connection() + .await + .unwrap() + .fri_witness_generator_dal() + .mark_scheduler_job_failed(&error, job_id) + .await; + } + + #[allow(clippy::async_yields_async)] + async fn process_job( + &self, + _job_id: &Self::JobId, + job: SchedulerWitnessGeneratorJob, + started_at: Instant, + ) -> tokio::task::JoinHandle> { + tokio::task::spawn_blocking(move || { + let block_number = job.block_number; + let _span = tracing::info_span!("scheduler", %block_number).entered(); + Ok(Self::process_job_sync(job, started_at)) + }) + } + + #[tracing::instrument( + skip_all, + fields(l1_batch = %job_id) + )] + async fn save_result( + &self, + job_id: L1BatchNumber, + started_at: Instant, + artifacts: SchedulerArtifacts, + ) -> anyhow::Result<()> { + let blob_save_started_at = Instant::now(); + + let blob_urls = + Self::save_artifacts(job_id.0, artifacts.clone(), &*self.object_store).await; + + WITNESS_GENERATOR_METRICS.blob_save_time[&AggregationRound::Scheduler.into()] + .observe(blob_save_started_at.elapsed()); + + Self::update_database( + &self.prover_connection_pool, + job_id.0, + started_at, + blob_urls, + artifacts, + ) + .await?; + + Ok(()) + } + + fn max_attempts(&self) -> u32 { + self.config.max_attempts + } + + async fn get_job_attempts(&self, job_id: &L1BatchNumber) -> anyhow::Result { + let mut prover_storage = self + .prover_connection_pool + .connection() + .await + .context("failed to acquire DB connection for SchedulerWitnessGenerator")?; + prover_storage + .fri_witness_generator_dal() + .get_scheduler_witness_job_attempts(*job_id) + .await + .map(|attempts| attempts.unwrap_or(0)) + .context("failed to get job attempts for SchedulerWitnessGenerator") + } +} diff --git a/prover/crates/bin/witness_generator/src/scheduler.rs b/prover/crates/bin/witness_generator/src/scheduler/mod.rs similarity index 54% rename from prover/crates/bin/witness_generator/src/scheduler.rs rename to prover/crates/bin/witness_generator/src/scheduler/mod.rs index c6e43582bbdb..10230b35c4f6 100644 --- a/prover/crates/bin/witness_generator/src/scheduler.rs +++ b/prover/crates/bin/witness_generator/src/scheduler/mod.rs @@ -1,13 +1,12 @@ use std::{convert::TryInto, sync::Arc, time::Instant}; use anyhow::Context as _; -use async_trait::async_trait; use zkevm_test_harness::zkevm_circuits::recursion::{ leaf_layer::input::RecursionLeafParametersWitness, NUM_BASE_LAYER_CIRCUITS, }; use zksync_config::configs::FriWitnessGeneratorConfig; use zksync_object_store::ObjectStore; -use zksync_prover_dal::{ConnectionPool, Prover, ProverDal}; +use zksync_prover_dal::{ConnectionPool, Prover}; use zksync_prover_fri_types::{ circuit_definitions::{ boojum::{ @@ -21,18 +20,22 @@ use zksync_prover_fri_types::{ recursion_layer_proof_config, zkevm_circuits::scheduler::{input::SchedulerCircuitInstanceWitness, SchedulerConfig}, }, - get_current_pod_name, - keys::FriCircuitKey, - CircuitWrapper, FriProofWrapper, + FriProofWrapper, }; use zksync_prover_keystore::{keystore::Keystore, utils::get_leaf_vk_params}; -use zksync_queued_job_processor::JobProcessor; use zksync_types::{ basic_fri_types::AggregationRound, protocol_version::ProtocolSemanticVersion, L1BatchNumber, }; -use crate::{metrics::WITNESS_GENERATOR_METRICS, utils::SchedulerPartialInputWrapper}; +use crate::{ + artifacts::ArtifactsManager, metrics::WITNESS_GENERATOR_METRICS, + utils::SchedulerPartialInputWrapper, +}; + +mod artifacts; +mod job_processor; +#[derive(Clone)] pub struct SchedulerArtifacts { pub scheduler_circuit: ZkSyncRecursiveLayerCircuit, } @@ -121,143 +124,6 @@ impl SchedulerWitnessGenerator { } } -#[async_trait] -impl JobProcessor for SchedulerWitnessGenerator { - type Job = SchedulerWitnessGeneratorJob; - type JobId = L1BatchNumber; - type JobArtifacts = SchedulerArtifacts; - - const SERVICE_NAME: &'static str = "fri_scheduler_witness_generator"; - - async fn get_next_job(&self) -> anyhow::Result> { - let mut prover_connection = self.prover_connection_pool.connection().await?; - let pod_name = get_current_pod_name(); - let Some(l1_batch_number) = prover_connection - .fri_witness_generator_dal() - .get_next_scheduler_witness_job(self.protocol_version, &pod_name) - .await - else { - return Ok(None); - }; - let recursion_tip_job_id = prover_connection - .fri_prover_jobs_dal() - .get_recursion_tip_proof_job_id(l1_batch_number) - .await - .context(format!( - "could not find recursion tip proof for l1 batch {}", - l1_batch_number - ))?; - - Ok(Some(( - l1_batch_number, - prepare_job( - l1_batch_number, - recursion_tip_job_id, - &*self.object_store, - self.keystore.clone(), - ) - .await - .context("prepare_job()")?, - ))) - } - - async fn save_failure(&self, job_id: L1BatchNumber, _started_at: Instant, error: String) -> () { - self.prover_connection_pool - .connection() - .await - .unwrap() - .fri_witness_generator_dal() - .mark_scheduler_job_failed(&error, job_id) - .await; - } - - #[allow(clippy::async_yields_async)] - async fn process_job( - &self, - _job_id: &Self::JobId, - job: SchedulerWitnessGeneratorJob, - started_at: Instant, - ) -> tokio::task::JoinHandle> { - tokio::task::spawn_blocking(move || { - let block_number = job.block_number; - let _span = tracing::info_span!("scheduler", %block_number).entered(); - Ok(Self::process_job_sync(job, started_at)) - }) - } - - #[tracing::instrument( - skip_all, - fields(l1_batch = %job_id) - )] - async fn save_result( - &self, - job_id: L1BatchNumber, - started_at: Instant, - artifacts: SchedulerArtifacts, - ) -> anyhow::Result<()> { - let key = FriCircuitKey { - block_number: job_id, - circuit_id: 1, - sequence_number: 0, - depth: 0, - aggregation_round: AggregationRound::Scheduler, - }; - let blob_save_started_at = Instant::now(); - let scheduler_circuit_blob_url = self - .object_store - .put(key, &CircuitWrapper::Recursive(artifacts.scheduler_circuit)) - .await?; - WITNESS_GENERATOR_METRICS.blob_save_time[&AggregationRound::Scheduler.into()] - .observe(blob_save_started_at.elapsed()); - - let mut prover_connection = self.prover_connection_pool.connection().await?; - let mut transaction = prover_connection.start_transaction().await?; - let protocol_version_id = transaction - .fri_witness_generator_dal() - .protocol_version_for_l1_batch(job_id) - .await; - transaction - .fri_prover_jobs_dal() - .insert_prover_job( - job_id, - ZkSyncRecursionLayerStorageType::SchedulerCircuit as u8, - 0, - 0, - AggregationRound::Scheduler, - &scheduler_circuit_blob_url, - false, - protocol_version_id, - ) - .await; - - transaction - .fri_witness_generator_dal() - .mark_scheduler_job_as_successful(job_id, started_at.elapsed()) - .await; - - transaction.commit().await?; - Ok(()) - } - - fn max_attempts(&self) -> u32 { - self.config.max_attempts - } - - async fn get_job_attempts(&self, job_id: &L1BatchNumber) -> anyhow::Result { - let mut prover_storage = self - .prover_connection_pool - .connection() - .await - .context("failed to acquire DB connection for SchedulerWitnessGenerator")?; - prover_storage - .fri_witness_generator_dal() - .get_scheduler_witness_job_attempts(*job_id) - .await - .map(|attempts| attempts.unwrap_or(0)) - .context("failed to get job attempts for SchedulerWitnessGenerator") - } -} - #[tracing::instrument( skip_all, fields(l1_batch = %l1_batch_number) @@ -269,7 +135,8 @@ pub async fn prepare_job( keystore: Keystore, ) -> anyhow::Result { let started_at = Instant::now(); - let wrapper = object_store.get(recursion_tip_job_id).await?; + let wrapper = + SchedulerWitnessGenerator::get_artifacts(&recursion_tip_job_id, object_store).await?; let recursion_tip_proof = match wrapper { FriProofWrapper::Base(_) => Err(anyhow::anyhow!( "Expected only recursive proofs for scheduler l1 batch {l1_batch_number}, got Base" diff --git a/prover/crates/bin/witness_generator/src/utils.rs b/prover/crates/bin/witness_generator/src/utils.rs index f8656ac90f44..3ea2b539773f 100644 --- a/prover/crates/bin/witness_generator/src/utils.rs +++ b/prover/crates/bin/witness_generator/src/utils.rs @@ -1,21 +1,14 @@ use std::{ - collections::HashMap, io::{BufWriter, Write as _}, sync::Arc, }; use circuit_definitions::{ - circuit_definitions::{ - base_layer::ZkSyncBaseLayerCircuit, - recursion_layer::{ZkSyncRecursionLayerStorageType, ZkSyncRecursionProof}, - }, + circuit_definitions::base_layer::ZkSyncBaseLayerCircuit, encodings::memory_query::MemoryQueueStateWitnesses, }; use once_cell::sync::Lazy; -use zkevm_test_harness::{ - boojum::field::goldilocks::GoldilocksField, empty_node_proof, - zkevm_circuits::scheduler::aux::BaseLayerCircuitType, -}; +use zkevm_test_harness::boojum::field::goldilocks::GoldilocksField; use zksync_multivm::utils::get_used_bootloader_memory_bytes; use zksync_object_store::{serialize_using_bincode, Bucket, ObjectStore, StoredObject}; use zksync_prover_fri_types::{ @@ -248,54 +241,3 @@ pub async fn load_proofs_for_job_ids( .map(|x| x.unwrap()) .collect() } - -/// Loads all proofs for a given recursion tip's job ids. -/// Note that recursion tip may not have proofs for some specific circuits (because the batch didn't contain them). -/// In this scenario, we still need to pass a proof, but it won't be taken into account during proving. -/// For this scenario, we use an empty_proof, but any proof would suffice. -#[tracing::instrument(skip_all)] -pub async fn load_proofs_for_recursion_tip( - job_ids: Vec<(u8, u32)>, - object_store: &dyn ObjectStore, -) -> anyhow::Result> { - let job_mapping: HashMap = job_ids - .into_iter() - .map(|(leaf_circuit_id, job_id)| { - ( - ZkSyncRecursionLayerStorageType::from_leaf_u8_to_basic_u8(leaf_circuit_id), - job_id, - ) - }) - .collect(); - - let empty_proof = empty_node_proof().into_inner(); - - let mut proofs = Vec::new(); - for circuit_id in BaseLayerCircuitType::as_iter_u8() { - if job_mapping.contains_key(&circuit_id) { - let fri_proof_wrapper = object_store - .get(*job_mapping.get(&circuit_id).unwrap()) - .await - .unwrap_or_else(|_| { - panic!( - "Failed to load proof with circuit_id {} for recursion tip", - circuit_id - ) - }); - match fri_proof_wrapper { - FriProofWrapper::Base(_) => { - return Err(anyhow::anyhow!( - "Expected only recursive proofs for recursion tip, got Base for circuit {}", - circuit_id - )); - } - FriProofWrapper::Recursive(recursive_proof) => { - proofs.push(recursive_proof.into_inner()); - } - } - } else { - proofs.push(empty_proof.clone()); - } - } - Ok(proofs) -} diff --git a/yarn.lock b/yarn.lock index f400104b9c20..b70e64f148a1 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1703,20 +1703,20 @@ chalk "4.1.2" ts-morph "^19.0.0" -"@matterlabs/hardhat-zksync-deploy@^1.3.0": - version "1.3.0" - resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-deploy/-/hardhat-zksync-deploy-1.3.0.tgz#5c2b723318ddf6c4d3929ec225401864ff54557a" - integrity sha512-4UHOgOwIBC4JA3W8DE9GHqbAuBhCPAjtM+Oew1aiYYGkIsPUAMYsH35+4I2FzJsYyE6mD6ATmoS/HfZweQHTlQ== +"@matterlabs/hardhat-zksync-deploy@^1.5.0": + version "1.5.0" + resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-deploy/-/hardhat-zksync-deploy-1.5.0.tgz#40cb454fb187da4bb354f3acb48762a6657fcb36" + integrity sha512-7LAgYYwoKWHeR+3CyWEvA3NKBKtt7ktcr7SX6ZPgbEYqHAdXH02vxJZGwNADtMWpyYm8h+fEQkpPIgErD4NhmA== dependencies: - "@matterlabs/hardhat-zksync-solc" "^1.0.4" - chai "^4.3.6" - chalk "4.1.2" + "@matterlabs/hardhat-zksync-solc" "^1.2.0" + chai "^4.3.4" + chalk "^4.1.2" fs-extra "^11.2.0" - glob "^10.3.10" + glob "^10.4.1" lodash "^4.17.21" - sinon "^17.0.1" + sinon "^18.0.0" sinon-chai "^3.7.0" - ts-morph "^21.0.1" + ts-morph "^22.0.0" "@matterlabs/hardhat-zksync-node@^0.0.1-beta.7": version "0.0.1" @@ -1760,7 +1760,7 @@ chalk "4.1.2" dockerode "^3.3.4" -"@matterlabs/hardhat-zksync-solc@^1.0.4", "@matterlabs/hardhat-zksync-solc@^1.0.5", "@matterlabs/hardhat-zksync-solc@^1.1.4": +"@matterlabs/hardhat-zksync-solc@^1.0.5", "@matterlabs/hardhat-zksync-solc@^1.1.4": version "1.1.4" resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-solc/-/hardhat-zksync-solc-1.1.4.tgz#04a2fad6fb6b6944c64ad969080ee65b9af3f617" integrity sha512-4/usbogh9neewR2/v8Dn2OzqVblZMUuT/iH2MyPZgPRZYQlL4SlZtMvokU9UQjZT6iSoaKCbbdWESHDHSzfUjA== @@ -1794,6 +1794,23 @@ sinon-chai "^3.7.0" undici "^6.18.2" +"@matterlabs/hardhat-zksync-solc@^1.2.4": + version "1.2.4" + resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-solc/-/hardhat-zksync-solc-1.2.4.tgz#b14a1dbfe751058bf2d79eab747b87c7ca7d2361" + integrity sha512-9Nk95kxOZ9rl26trP/pXDLw5MqFAd0CD8FMTGDvA5HBGk6CL2wg4tS0gmucYz5R4qj09KUYOO4FW4rgd/atcGg== + dependencies: + "@nomiclabs/hardhat-docker" "^2.0.2" + chai "^4.3.4" + chalk "^4.1.2" + debug "^4.3.5" + dockerode "^4.0.2" + fs-extra "^11.2.0" + proper-lockfile "^4.1.2" + semver "^7.6.2" + sinon "^18.0.0" + sinon-chai "^3.7.0" + undici "^6.18.2" + "@matterlabs/hardhat-zksync-verify@^0.2.0": version "0.2.2" resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-verify/-/hardhat-zksync-verify-0.2.2.tgz#daa34bc4404096ed0f44461ee366c1cb0e5a4f2f" @@ -1824,20 +1841,20 @@ sinon "^18.0.0" sinon-chai "^3.7.0" -"@matterlabs/hardhat-zksync-vyper@^1.0.8": - version "1.0.8" - resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-vyper/-/hardhat-zksync-vyper-1.0.8.tgz#d5bd496715a1e322b0bf3926b4146b4e18ab64ff" - integrity sha512-XR7rbfDuBG5/LZWYfhQTP9gD+U24hSJHDuZ9U55wgIfiQTOxPoztFwEbQNiC39vjT5MjP/Nv8/IDrlEBkaVCgw== +"@matterlabs/hardhat-zksync-vyper@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-vyper/-/hardhat-zksync-vyper-1.1.0.tgz#b3fb304429e88a84b4abc3fe4e5a83b2f5e907bd" + integrity sha512-zDjHPeIuHRpumXiWZUbhoji4UJe09jTDRn4xnxsuVkLH7qLAm0VDFzCXYNMvEuySZSdhbSbekxJsH9Kunc5ycA== dependencies: - "@nomiclabs/hardhat-docker" "^2.0.0" - chai "^4.3.6" - chalk "4.1.2" + "@nomiclabs/hardhat-docker" "^2.0.2" + chai "^4.3.4" + chalk "^4.1.2" dockerode "^4.0.2" - fs-extra "^11.1.1" - semver "^7.5.4" - sinon "^17.0.1" + fs-extra "^11.2.0" + semver "^7.6.2" + sinon "^18.0.0" sinon-chai "^3.7.0" - undici "^5.14.0" + undici "^6.18.2" "@matterlabs/prettier-config@^1.0.3": version "1.0.3" @@ -2531,10 +2548,10 @@ mkdirp "^2.1.6" path-browserify "^1.0.1" -"@ts-morph/common@~0.22.0": - version "0.22.0" - resolved "https://registry.yarnpkg.com/@ts-morph/common/-/common-0.22.0.tgz#8951d451622a26472fbc3a227d6c3a90e687a683" - integrity sha512-HqNBuV/oIlMKdkLshXd1zKBqNQCsuPEsgQOkfFQ/eUKjRlwndXW1AjN9LVkBEIukm00gGXSRmfkl0Wv5VXLnlw== +"@ts-morph/common@~0.23.0": + version "0.23.0" + resolved "https://registry.yarnpkg.com/@ts-morph/common/-/common-0.23.0.tgz#bd4ddbd3f484f29476c8bd985491592ae5fc147e" + integrity sha512-m7Lllj9n/S6sOkCkRftpM7L24uvmfXQFedlW/4hENcuJH1HHm9u5EgxZb9uVjQSCGrbBWBkOGgcTxNg36r6ywA== dependencies: fast-glob "^3.3.2" minimatch "^9.0.3" @@ -4053,6 +4070,11 @@ code-block-writer@^12.0.0: resolved "https://registry.yarnpkg.com/code-block-writer/-/code-block-writer-12.0.0.tgz#4dd58946eb4234105aff7f0035977b2afdc2a770" integrity sha512-q4dMFMlXtKR3XNBHyMHt/3pwYNA69EDk00lloMOaaUMKPUXBw6lpXtbu3MMVG6/uOihGnRDOlkyqsONEUj60+w== +code-block-writer@^13.0.1: + version "13.0.2" + resolved "https://registry.yarnpkg.com/code-block-writer/-/code-block-writer-13.0.2.tgz#e1c6c3dbe5d38b4ac76fb62c4d4b2fc4bf04c9c1" + integrity sha512-XfXzAGiStXSmCIwrkdfvc7FS5Dtj8yelCtyOf2p2skCAfvLd6zu0rGzuS9NSCO3bq1JKpFZ7tbKdKlcd5occQA== + collect-v8-coverage@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz#c0b29bcd33bcd0779a1344c2136051e6afd3d9e9" @@ -5925,16 +5947,17 @@ glob@8.1.0, glob@^8.0.3: minimatch "^5.0.1" once "^1.3.0" -glob@^10.3.10: - version "10.3.16" - resolved "https://registry.yarnpkg.com/glob/-/glob-10.3.16.tgz#bf6679d5d51279c8cfae4febe0d051d2a4bf4c6f" - integrity sha512-JDKXl1DiuuHJ6fVS2FXjownaavciiHNUU4mOvV/B793RLh05vZL1rcPnCSaOgv1hDT6RDlY7AB7ZUvFYAtPgAw== +glob@^10.4.1: + version "10.4.5" + resolved "https://registry.yarnpkg.com/glob/-/glob-10.4.5.tgz#f4d9f0b90ffdbab09c9d77f5f29b4262517b0956" + integrity sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg== dependencies: foreground-child "^3.1.0" jackspeak "^3.1.2" - minimatch "^9.0.1" - minipass "^7.0.4" - path-scurry "^1.11.0" + minimatch "^9.0.4" + minipass "^7.1.2" + package-json-from-dist "^1.0.0" + path-scurry "^1.11.1" glob@^5.0.15: version "5.0.15" @@ -7951,13 +7974,20 @@ minimatch@^7.4.3: dependencies: brace-expansion "^2.0.1" -minimatch@^9.0.1, minimatch@^9.0.3: +minimatch@^9.0.3: version "9.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.4.tgz#8e49c731d1749cbec05050ee5145147b32496a51" integrity sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw== dependencies: brace-expansion "^2.0.1" +minimatch@^9.0.4: + version "9.0.5" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.5.tgz#d74f9dd6b57d83d8e98cfb82133b03978bc929e5" + integrity sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow== + dependencies: + brace-expansion "^2.0.1" + minimatch@~3.0.4: version "3.0.8" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.8.tgz#5e6a59bd11e2ab0de1cfb843eb2d82e546c321c1" @@ -7970,11 +8000,16 @@ minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6, minimist@^1.2.8, minimist@~1. resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== -"minipass@^5.0.0 || ^6.0.2 || ^7.0.0", minipass@^7.0.4: +"minipass@^5.0.0 || ^6.0.2 || ^7.0.0": version "7.1.1" resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.1.1.tgz#f7f85aff59aa22f110b20e27692465cf3bf89481" integrity sha512-UZ7eQ+h8ywIRAW1hIEl2AqdwzJucU/Kp59+8kkZeSvafXhZjul247BvIJjEVFVeON6d7lM46XX1HXCduKAS8VA== +minipass@^7.1.2: + version "7.1.2" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.1.2.tgz#93a9626ce5e5e66bd4db86849e7515e92340a707" + integrity sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw== + mkdirp-classic@^0.5.2: version "0.5.3" resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113" @@ -8447,6 +8482,11 @@ p-try@^2.0.0: resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== +package-json-from-dist@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/package-json-from-dist/-/package-json-from-dist-1.0.0.tgz#e501cd3094b278495eb4258d4c9f6d5ac3019f00" + integrity sha512-dATvCeZN/8wQsGywez1mzHtTlP22H8OEfPrVMLNr4/eGa+ijtLn/6M5f0dY8UKNrC2O9UCU6SSoG3qRKnt7STw== + parent-module@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" @@ -8512,7 +8552,7 @@ path-parse@^1.0.6, path-parse@^1.0.7: resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== -path-scurry@^1.11.0: +path-scurry@^1.11.1: version "1.11.1" resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.11.1.tgz#7960a668888594a0720b12a911d1a742ab9f11d2" integrity sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA== @@ -10258,13 +10298,13 @@ ts-morph@^19.0.0: "@ts-morph/common" "~0.20.0" code-block-writer "^12.0.0" -ts-morph@^21.0.1: - version "21.0.1" - resolved "https://registry.yarnpkg.com/ts-morph/-/ts-morph-21.0.1.tgz#712302a0f6e9dbf1aa8d9cf33a4386c4b18c2006" - integrity sha512-dbDtVdEAncKctzrVZ+Nr7kHpHkv+0JDJb2MjjpBaj8bFeCkePU9rHfMklmhuLFnpeq/EJZk2IhStY6NzqgjOkg== +ts-morph@^22.0.0: + version "22.0.0" + resolved "https://registry.yarnpkg.com/ts-morph/-/ts-morph-22.0.0.tgz#5532c592fb6dddae08846f12c9ab0fc590b1d42e" + integrity sha512-M9MqFGZREyeb5fTl6gNHKZLqBQA0TjA1lea+CR48R8EBTDuWrNqW6ccC5QvjNR4s6wDumD3LTCjOFSp9iwlzaw== dependencies: - "@ts-morph/common" "~0.22.0" - code-block-writer "^12.0.0" + "@ts-morph/common" "~0.23.0" + code-block-writer "^13.0.1" ts-node@^10.1.0, ts-node@^10.7.0: version "10.9.2" diff --git a/zk_toolbox/Cargo.lock b/zk_toolbox/Cargo.lock index 75859021979f..fd524865d567 100644 --- a/zk_toolbox/Cargo.lock +++ b/zk_toolbox/Cargo.lock @@ -409,6 +409,18 @@ dependencies = [ "generic-array", ] +[[package]] +name = "blst" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4378725facc195f1a538864863f6de233b500a8862747e7f165078a419d5e874" +dependencies = [ + "cc", + "glob", + "threadpool", + "zeroize", +] + [[package]] name = "bs58" version = "0.5.1" @@ -631,7 +643,7 @@ dependencies = [ "hmac", "once_cell", "pbkdf2 0.12.2", - "rand", + "rand 0.8.5", "sha2", "thiserror", ] @@ -672,6 +684,7 @@ dependencies = [ "console", "ethers", "futures", + "git_version_macro", "once_cell", "serde", "serde_json", @@ -709,9 +722,10 @@ dependencies = [ "clap", "common", "ethers", - "rand", + "rand 0.8.5", "serde", "serde_json", + "serde_yaml", "strum", "thiserror", "types", @@ -857,7 +871,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" dependencies = [ "generic-array", - "rand_core", + "rand_core 0.6.4", "subtle", "zeroize", ] @@ -869,7 +883,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ "generic-array", - "rand_core", + "rand_core 0.6.4", "subtle", "zeroize", ] @@ -903,6 +917,33 @@ dependencies = [ "cipher", ] +[[package]] +name = "curve25519-dalek" +version = "4.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" +dependencies = [ + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest", + "fiat-crypto", + "rustc_version", + "subtle", + "zeroize", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] + [[package]] name = "darling" version = "0.13.4" @@ -1120,6 +1161,31 @@ dependencies = [ "spki 0.7.3", ] +[[package]] +name = "ed25519" +version = "2.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" +dependencies = [ + "pkcs8 0.10.2", + "signature 2.2.0", +] + +[[package]] +name = "ed25519-dalek" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a3daa8e81a3963a60642bcc1f90a670680bd4a77535faa384e9d1c79d620871" +dependencies = [ + "curve25519-dalek", + "ed25519", + "rand_core 0.6.4", + "serde", + "sha2", + "subtle", + "zeroize", +] + [[package]] name = "either" version = "1.13.0" @@ -1143,7 +1209,7 @@ dependencies = [ "generic-array", "group 0.12.1", "pkcs8 0.9.0", - "rand_core", + "rand_core 0.6.4", "sec1 0.3.0", "subtle", "zeroize", @@ -1162,7 +1228,7 @@ dependencies = [ "generic-array", "group 0.13.0", "pkcs8 0.10.2", - "rand_core", + "rand_core 0.6.4", "sec1 0.7.3", "subtle", "zeroize", @@ -1212,7 +1278,7 @@ dependencies = [ "hex", "k256 0.13.3", "log", - "rand", + "rand 0.8.5", "rlp", "serde", "sha3", @@ -1267,7 +1333,7 @@ dependencies = [ "hex", "hmac", "pbkdf2 0.11.0", - "rand", + "rand 0.8.5", "scrypt", "serde", "serde_json", @@ -1430,7 +1496,7 @@ dependencies = [ "num_enum 0.7.2", "once_cell", "open-fastrlp", - "rand", + "rand 0.8.5", "rlp", "serde", "serde_json", @@ -1535,7 +1601,7 @@ dependencies = [ "elliptic-curve 0.13.8", "eth-keystore", "ethers-core", - "rand", + "rand 0.8.5", "sha2", "thiserror", "tracing", @@ -1606,7 +1672,7 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" dependencies = [ - "rand_core", + "rand_core 0.6.4", "subtle", ] @@ -1616,10 +1682,28 @@ version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" dependencies = [ - "rand_core", + "rand_core 0.6.4", "subtle", ] +[[package]] +name = "ff_ce" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b538e4231443a5b9c507caee3356f016d832cf7393d2d90f03ea3180d4e3fbc" +dependencies = [ + "byteorder", + "hex", + "rand 0.4.6", + "serde", +] + +[[package]] +name = "fiat-crypto" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" + [[package]] name = "findshlibs" version = "0.10.2" @@ -1639,7 +1723,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" dependencies = [ "byteorder", - "rand", + "rand 0.8.5", "rustc-hex", "static_assertions", ] @@ -1711,6 +1795,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" + [[package]] name = "funty" version = "2.0.0" @@ -1874,6 +1964,13 @@ version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" +[[package]] +name = "git_version_macro" +version = "0.1.0" +dependencies = [ + "chrono", +] + [[package]] name = "glob" version = "0.3.1" @@ -1899,7 +1996,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" dependencies = [ "ff 0.12.1", - "rand_core", + "rand_core 0.6.4", "subtle", ] @@ -1910,7 +2007,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" dependencies = [ "ff 0.13.0", - "rand_core", + "rand_core 0.6.4", "subtle", ] @@ -2740,7 +2837,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ffa00dec017b5b1a8b7cf5e2c008bfda1aa7e0697ac1508b491fdf2622fb4d8" dependencies = [ - "rand", + "rand 0.8.5", ] [[package]] @@ -2823,7 +2920,7 @@ dependencies = [ "num-integer", "num-iter", "num-traits", - "rand", + "rand 0.8.5", "smallvec", "zeroize", ] @@ -3119,7 +3216,7 @@ dependencies = [ "once_cell", "opentelemetry", "percent-encoding", - "rand", + "rand 0.8.5", "serde_json", "thiserror", "tokio", @@ -3220,7 +3317,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7676374caaee8a325c9e7a2ae557f216c5563a171d6997b0ef8a65af35147700" dependencies = [ "base64ct", - "rand_core", + "rand_core 0.6.4", "subtle", ] @@ -3319,7 +3416,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" dependencies = [ "phf_shared 0.11.2", - "rand", + "rand 0.8.5", ] [[package]] @@ -3530,7 +3627,7 @@ dependencies = [ "bitflags 2.6.0", "lazy_static", "num-traits", - "rand", + "rand 0.8.5", "rand_chacha", "rand_xorshift", "regex-syntax 0.8.4", @@ -3680,6 +3777,19 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" +[[package]] +name = "rand" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" +dependencies = [ + "fuchsia-cprng", + "libc", + "rand_core 0.3.1", + "rdrand", + "winapi", +] + [[package]] name = "rand" version = "0.8.5" @@ -3688,7 +3798,7 @@ checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", "rand_chacha", - "rand_core", + "rand_core 0.6.4", ] [[package]] @@ -3698,9 +3808,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +dependencies = [ + "rand_core 0.4.2", ] +[[package]] +name = "rand_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" + [[package]] name = "rand_core" version = "0.6.4" @@ -3716,7 +3841,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" dependencies = [ - "rand_core", + "rand_core 0.6.4", ] [[package]] @@ -3739,6 +3864,15 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +dependencies = [ + "rand_core 0.3.1", +] + [[package]] name = "redox_syscall" version = "0.4.1" @@ -3995,7 +4129,7 @@ dependencies = [ "num-traits", "pkcs1", "pkcs8 0.10.2", - "rand_core", + "rand_core 0.6.4", "signature 2.2.0", "spki 0.7.3", "subtle", @@ -4349,7 +4483,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "901f761681f97db3db836ef9e094acdd8756c40215326c194201941947164ef1" dependencies = [ "once_cell", - "rand", + "rand 0.8.5", "sentry-types", "serde", "serde_json", @@ -4396,7 +4530,7 @@ checksum = "da956cca56e0101998c8688bc65ce1a96f00673a0e58e663664023d4c7911e82" dependencies = [ "debugid", "hex", - "rand", + "rand 0.8.5", "serde", "serde_json", "thiserror", @@ -4586,7 +4720,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" dependencies = [ "digest", - "rand_core", + "rand_core 0.6.4", ] [[package]] @@ -4596,7 +4730,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ "digest", - "rand_core", + "rand_core 0.6.4", ] [[package]] @@ -4840,7 +4974,7 @@ dependencies = [ "memchr", "once_cell", "percent-encoding", - "rand", + "rand 0.8.5", "rsa", "serde", "sha1", @@ -4879,7 +5013,7 @@ dependencies = [ "md-5", "memchr", "once_cell", - "rand", + "rand 0.8.5", "serde", "serde_json", "sha2", @@ -5145,6 +5279,15 @@ dependencies = [ "once_cell", ] +[[package]] +name = "threadpool" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa" +dependencies = [ + "num_cpus", +] + [[package]] name = "time" version = "0.3.36" @@ -5397,7 +5540,7 @@ dependencies = [ "indexmap 1.9.3", "pin-project", "pin-project-lite", - "rand", + "rand 0.8.5", "slab", "tokio", "tokio-util", @@ -5540,7 +5683,7 @@ dependencies = [ "http 0.2.12", "httparse", "log", - "rand", + "rand 0.8.5", "rustls 0.21.12", "sha1", "thiserror", @@ -6277,6 +6420,7 @@ dependencies = [ "ethers", "human-panic", "lazy_static", + "secrecy", "serde", "serde_json", "serde_yaml", @@ -6290,6 +6434,8 @@ dependencies = [ "xshell", "zksync_basic_types", "zksync_config", + "zksync_consensus_crypto", + "zksync_consensus_roles", ] [[package]] @@ -6297,6 +6443,7 @@ name = "zk_supervisor" version = "0.1.0" dependencies = [ "anyhow", + "chrono", "clap", "clap-markdown", "common", @@ -6312,6 +6459,7 @@ dependencies = [ "types", "url", "xshell", + "zksync_basic_types", ] [[package]] @@ -6349,14 +6497,14 @@ dependencies = [ [[package]] name = "zksync_concurrency" -version = "0.1.0-rc.12" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a49ad68bfaf6fb8542c68894b68b28be31514786549855aaa8a46b36defbb100" +checksum = "c1c8cf6c689ab5922b52d81b775cd2d9cffbfc8fb8da65985e11b06546dfb3bf" dependencies = [ "anyhow", "once_cell", "pin-project", - "rand", + "rand 0.8.5", "sha3", "thiserror", "time", @@ -6371,7 +6519,7 @@ name = "zksync_config" version = "0.1.0" dependencies = [ "anyhow", - "rand", + "rand 0.8.5", "secrecy", "serde", "url", @@ -6381,14 +6529,59 @@ dependencies = [ "zksync_crypto_primitives", ] +[[package]] +name = "zksync_consensus_crypto" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc7baced4e811015038322dad10239f2d631d9e339e8d6b7b6e6b146bee30f41" +dependencies = [ + "anyhow", + "blst", + "ed25519-dalek", + "elliptic-curve 0.13.8", + "ff_ce", + "hex", + "k256 0.13.3", + "num-bigint", + "num-traits", + "rand 0.4.6", + "rand 0.8.5", + "sha3", + "thiserror", + "tracing", + "zeroize", +] + +[[package]] +name = "zksync_consensus_roles" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0aab4ddf62f6001903c5fe9f65afb1bdc42464928c9d1c6ce52e4d7e9944f5dc" +dependencies = [ + "anyhow", + "bit-vec", + "hex", + "num-bigint", + "prost 0.12.6", + "rand 0.8.5", + "serde", + "thiserror", + "tracing", + "zksync_concurrency", + "zksync_consensus_crypto", + "zksync_consensus_utils", + "zksync_protobuf", + "zksync_protobuf_build", +] + [[package]] name = "zksync_consensus_utils" -version = "0.1.0-rc.12" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d624f55e2449f43b2c85588b5dd2a28b3c5ea629effc89df76e3254f8d9d2fb" +checksum = "29e69dffc0fbc7c096548c997f5ca157a490b34b3d49fd524fa3d51840f7fb22" dependencies = [ "anyhow", - "rand", + "rand 0.8.5", "thiserror", "zksync_concurrency", ] @@ -6413,7 +6606,7 @@ dependencies = [ "anyhow", "blake2", "hex", - "rand", + "rand 0.8.5", "secp256k1", "serde", "serde_json", @@ -6434,9 +6627,9 @@ dependencies = [ [[package]] name = "zksync_protobuf" -version = "0.1.0-rc.12" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d26fb2beb3aeafb5e9babf1acf6494662cc7157b893fa248bd151494f931d07f" +checksum = "df5467dfe2f845ca1fd6ceec623bbd32187589793d3c4023dcd2f5172369d198" dependencies = [ "anyhow", "bit-vec", @@ -6444,7 +6637,7 @@ dependencies = [ "prost 0.12.6", "prost-reflect", "quick-protobuf", - "rand", + "rand 0.8.5", "serde", "serde_json", "serde_yaml", @@ -6455,9 +6648,9 @@ dependencies = [ [[package]] name = "zksync_protobuf_build" -version = "0.1.0-rc.12" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58e86c198e056d921b4f3f1d2755c23d090e942b5a70b03bcb7e7c02445aa491" +checksum = "33d35280660b11be2a4ebdf531184eb729acebfdc3368d27176ec104f8bf9c5f" dependencies = [ "anyhow", "heck", @@ -6477,7 +6670,7 @@ dependencies = [ "anyhow", "hex", "prost 0.12.6", - "rand", + "rand 0.8.5", "secrecy", "serde_json", "serde_yaml", @@ -6539,7 +6732,6 @@ dependencies = [ "bigdecimal", "futures", "hex", - "itertools 0.10.5", "num", "once_cell", "reqwest 0.12.5", diff --git a/zk_toolbox/Cargo.toml b/zk_toolbox/Cargo.toml index e1ad63136af1..d8b84f93adde 100644 --- a/zk_toolbox/Cargo.toml +++ b/zk_toolbox/Cargo.toml @@ -5,6 +5,7 @@ members = [ "crates/types", "crates/zk_inception", "crates/zk_supervisor", + "crates/git_version_macro", ] resolver = "2" @@ -25,12 +26,15 @@ keywords = ["zk", "cryptography", "blockchain", "ZKStack", "ZKsync"] common = { path = "crates/common" } config = { path = "crates/config" } types = { path = "crates/types" } +git_version_macro = { path = "crates/git_version_macro" } # ZkSync deps zksync_config = { path = "../core/lib/config" } zksync_protobuf_config = { path = "../core/lib/protobuf_config" } zksync_basic_types = { path = "../core/lib/basic_types" } -zksync_protobuf = "=0.1.0-rc.12" +zksync_consensus_roles = "=0.1.1" +zksync_consensus_crypto = "=0.1.1" +zksync_protobuf = "=0.1.1" # External dependencies anyhow = "1.0.82" @@ -38,6 +42,7 @@ clap = { version = "4.4", features = ["derive", "wrap_help", "string"] } slugify-rs = "0.0.3" cliclack = "0.2.5" console = "0.15.8" +chrono = "0.4.38" ethers = "2.0" futures = "0.3.30" human-panic = "2.0" @@ -59,3 +64,4 @@ toml = "0.8.12" url = { version = "2.5.0", features = ["serde"] } xshell = "0.2.6" clap-markdown = "0.1.4" +secrecy = "0.8.0" diff --git a/zk_toolbox/crates/common/Cargo.toml b/zk_toolbox/crates/common/Cargo.toml index 1f6fb6fd9fe1..5fdf481bea6f 100644 --- a/zk_toolbox/crates/common/Cargo.toml +++ b/zk_toolbox/crates/common/Cargo.toml @@ -29,3 +29,4 @@ url.workspace = true xshell.workspace = true thiserror.workspace = true strum.workspace = true +git_version_macro.workspace = true diff --git a/zk_toolbox/crates/common/src/ethereum.rs b/zk_toolbox/crates/common/src/ethereum.rs index 93cc524568c3..4f000ed0fd53 100644 --- a/zk_toolbox/crates/common/src/ethereum.rs +++ b/zk_toolbox/crates/common/src/ethereum.rs @@ -10,7 +10,7 @@ use ethers::{ }; use types::TokenInfo; -use crate::{logger, wallets::Wallet}; +use crate::wallets::Wallet; pub fn create_ethers_client( private_key: H256, @@ -89,35 +89,30 @@ pub async fn mint_token( chain_id: u64, amount: u128, ) -> anyhow::Result<()> { - let client = Arc::new(create_ethers_client( - main_wallet.private_key.unwrap(), - l1_rpc, - Some(chain_id), - )?); + let client = Arc::new( + create_ethers_client(main_wallet.private_key.unwrap(), l1_rpc, Some(chain_id))? + .nonce_manager(main_wallet.address), + ); let contract = TokenContract::new(token_address, client); - // contract + + let mut pending_calls = vec![]; for address in addresses { - if let Err(err) = mint(&contract, address, amount).await { - logger::warn(format!("Failed to mint {err}")) - } + pending_calls.push(contract.mint(address, amount.into())); } - Ok(()) -} + let mut pending_txs = vec![]; + for call in &pending_calls { + pending_txs.push( + call.send() + .await? + // It's safe to set such low number of confirmations and low interval for localhost + .confirmations(3) + .interval(Duration::from_millis(30)), + ); + } + + futures::future::join_all(pending_txs).await; -async fn mint( - contract: &TokenContract, - address: Address, - amount: u128, -) -> anyhow::Result<()> { - contract - .mint(address, amount.into()) - .send() - .await? - // It's safe to set such low number of confirmations and low interval for localhost - .confirmations(1) - .interval(Duration::from_millis(30)) - .await?; Ok(()) } diff --git a/zk_toolbox/crates/common/src/external_node.rs b/zk_toolbox/crates/common/src/external_node.rs index 09115f92d5fb..8a5cbc3cd14c 100644 --- a/zk_toolbox/crates/common/src/external_node.rs +++ b/zk_toolbox/crates/common/src/external_node.rs @@ -9,6 +9,7 @@ pub fn run( config_path: &str, secrets_path: &str, en_config_path: &str, + consensus_args: Vec, additional_args: Vec, ) -> anyhow::Result<()> { let _dir = shell.push_dir(code_path); @@ -22,6 +23,7 @@ pub fn run( --external-node-config-path {en_config_path} " ) + .args(consensus_args) .args(additional_args) .env_remove("RUSTUP_TOOLCHAIN"), ) diff --git a/zk_toolbox/crates/common/src/forge.rs b/zk_toolbox/crates/common/src/forge.rs index f00921a0bf20..7fd5399cc66b 100644 --- a/zk_toolbox/crates/common/src/forge.rs +++ b/zk_toolbox/crates/common/src/forge.rs @@ -96,6 +96,12 @@ impl ForgeScript { self } + /// Add the sender address to the forge script command. + pub fn with_sender(mut self, address: String) -> Self { + self.args.add_arg(ForgeScriptArg::Sender { address }); + self + } + /// Add the rpc-url flag to the forge script command. pub fn with_rpc_url(mut self, rpc_url: String) -> Self { self.args.add_arg(ForgeScriptArg::RpcUrl { url: rpc_url }); @@ -135,6 +141,7 @@ impl ForgeScript { }); self } + // Do not start the script if balance is not enough pub fn private_key(&self) -> Option { self.args.args.iter().find_map(|a| { @@ -244,6 +251,10 @@ pub enum ForgeScriptArg { }, Verify, Resume, + #[strum(to_string = "sender={address}")] + Sender { + address: String, + }, } /// ForgeScriptArgs is a set of arguments that can be passed to the forge script command. diff --git a/zk_toolbox/crates/common/src/lib.rs b/zk_toolbox/crates/common/src/lib.rs index 7be4af740700..c23ef9202261 100644 --- a/zk_toolbox/crates/common/src/lib.rs +++ b/zk_toolbox/crates/common/src/lib.rs @@ -12,7 +12,9 @@ pub mod files; pub mod forge; pub mod git; pub mod server; +pub mod version; pub mod wallets; +pub mod yaml; pub use prerequisites::{ check_general_prerequisites, check_prerequisites, GCLOUD_PREREQUISITE, GPU_PREREQUISITES, diff --git a/zk_toolbox/crates/common/src/server.rs b/zk_toolbox/crates/common/src/server.rs index c65c8d4c13e2..40da1cf80325 100644 --- a/zk_toolbox/crates/common/src/server.rs +++ b/zk_toolbox/crates/common/src/server.rs @@ -9,6 +9,7 @@ use crate::cmd::Cmd; pub struct Server { components: Option>, code_path: PathBuf, + uring: bool, } /// Possible server modes. @@ -20,10 +21,11 @@ pub enum ServerMode { impl Server { /// Creates a new instance of the server. - pub fn new(components: Option>, code_path: PathBuf) -> Self { + pub fn new(components: Option>, code_path: PathBuf, uring: bool) -> Self { Self { components, code_path, + uring, } } @@ -52,10 +54,12 @@ impl Server { additional_args.push("--genesis".to_string()); } + let uring = self.uring.then_some("--features=rocksdb/io-uring"); + let mut cmd = Cmd::new( cmd!( shell, - "cargo run --release --bin zksync_server -- + "cargo run --release --bin zksync_server {uring...} -- --genesis-path {genesis_path} --wallets-path {wallets_path} --config-path {general_path} diff --git a/zk_toolbox/crates/common/src/version.rs b/zk_toolbox/crates/common/src/version.rs new file mode 100644 index 000000000000..43be7a07b7ee --- /dev/null +++ b/zk_toolbox/crates/common/src/version.rs @@ -0,0 +1,24 @@ +const GIT_VERSION: &str = git_version_macro::build_git_revision!(); +const GIT_BRANCH: &str = git_version_macro::build_git_branch!(); +const GIT_SUBMODULES: &[(&str, &str)] = git_version_macro::build_git_submodules!(); +const BUILD_TIMESTAMP: &str = git_version_macro::build_timestamp!(); + +/// Returns a multi-line version message that includes: +/// - provided crate version +/// - git revision +/// - git branch +/// - git submodules +/// - build timestamp +pub fn version_message(crate_version: &str) -> String { + let mut version = format!("v{}-{}\n", crate_version, GIT_VERSION); + version.push_str(&format!("Branch: {}\n", GIT_BRANCH)); + #[allow(clippy::const_is_empty)] // Proc-macro generated. + if !GIT_SUBMODULES.is_empty() { + version.push_str("Submodules:\n"); + for (name, rev) in GIT_SUBMODULES { + version.push_str(&format!(" - {}: {}\n", name, rev)); + } + } + version.push_str(&format!("Build timestamp: {}\n", BUILD_TIMESTAMP)); + version +} diff --git a/zk_toolbox/crates/common/src/yaml.rs b/zk_toolbox/crates/common/src/yaml.rs new file mode 100644 index 000000000000..83b59ad67642 --- /dev/null +++ b/zk_toolbox/crates/common/src/yaml.rs @@ -0,0 +1,475 @@ +use anyhow::Context; + +use crate::logger; + +pub(super) const MSG_INVALID_KEY_TYPE_ERR: &str = "Invalid key type"; + +/// Holds the differences between two YAML configurations. +#[derive(Default)] +pub struct ConfigDiff { + /// Fields that have different values between the two configurations + /// This contains the new values + pub differing_values: serde_yaml::Mapping, + + /// Fields that are present in the new configuration but not in the old one. + pub new_fields: serde_yaml::Mapping, +} + +impl ConfigDiff { + pub fn print(&self, msg: &str, is_warning: bool) { + if self.new_fields.is_empty() { + return; + } + + if is_warning { + logger::warn(msg); + logger::warn(logger::object_to_string(&self.new_fields)); + } else { + logger::info(msg); + logger::info(logger::object_to_string(&self.new_fields)); + } + } +} + +fn merge_yaml_internal( + a: &mut serde_yaml::Value, + b: serde_yaml::Value, + current_key: String, + diff: &mut ConfigDiff, + override_values: bool, +) -> anyhow::Result<()> { + match (a, b) { + (serde_yaml::Value::Mapping(a), serde_yaml::Value::Mapping(b)) => { + for (key, value) in b { + let k = key.as_str().context(MSG_INVALID_KEY_TYPE_ERR)?.to_string(); + let current_key = if current_key.is_empty() { + k.clone() + } else { + format!("{}.{}", current_key, k) + }; + + if a.contains_key(&key) { + let a_value = a.get_mut(&key).unwrap(); + if value.is_null() && override_values { + a.remove(&key); + diff.differing_values + .insert(current_key.into(), serde_yaml::Value::Null); + } else { + merge_yaml_internal(a_value, value, current_key, diff, override_values)?; + } + } else if !value.is_null() { + a.insert(key.clone(), value.clone()); + diff.new_fields.insert(current_key.into(), value); + } else if override_values { + diff.differing_values + .insert(current_key.into(), serde_yaml::Value::Null); + } + } + } + (a, b) => { + if a != &b { + diff.differing_values.insert(current_key.into(), b.clone()); + if override_values { + *a = b; + } + } + } + } + Ok(()) +} + +pub fn merge_yaml( + a: &mut serde_yaml::Value, + b: serde_yaml::Value, + override_values: bool, +) -> anyhow::Result { + let mut diff = ConfigDiff::default(); + merge_yaml_internal(a, b, "".into(), &mut diff, override_values)?; + Ok(diff) +} + +#[cfg(test)] +mod tests { + #[test] + fn test_merge_yaml_both_are_equal_returns_no_diff() { + let mut a = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + "#, + ) + .unwrap(); + let b: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + "#, + ) + .unwrap(); + let expected: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + "#, + ) + .unwrap(); + let diff = super::merge_yaml(&mut a, b, false).unwrap(); + assert!(diff.differing_values.is_empty()); + assert!(diff.new_fields.is_empty()); + assert_eq!(a, expected); + } + + #[test] + fn test_merge_yaml_b_has_extra_field_returns_diff() { + let mut a = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + "#, + ) + .unwrap(); + let b: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + key5: value5 + "#, + ) + .unwrap(); + + let expected: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + key5: value5 + "#, + ) + .unwrap(); + + let diff = super::merge_yaml(&mut a, b.clone(), false).unwrap(); + assert!(diff.differing_values.is_empty()); + assert_eq!(diff.new_fields.len(), 1); + assert_eq!( + diff.new_fields.get::("key5".into()).unwrap(), + b.clone().get("key5").unwrap() + ); + assert_eq!(a, expected); + } + + #[test] + fn test_merge_yaml_a_has_extra_field_no_diff() { + let mut a = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + key5: value5 + "#, + ) + .unwrap(); + let b: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + "#, + ) + .unwrap(); + + let expected: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + key5: value5 + "#, + ) + .unwrap(); + + let diff = super::merge_yaml(&mut a, b, false).unwrap(); + assert!(diff.differing_values.is_empty()); + assert!(diff.new_fields.is_empty()); + assert_eq!(a, expected); + } + + #[test] + fn test_merge_yaml_a_has_extra_field_and_b_has_extra_field_returns_diff() { + let mut a = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + key5: value5 + "#, + ) + .unwrap(); + let b: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + key6: value6 + "#, + ) + .unwrap(); + + let expected: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + key5: value5 + key6: value6 + "#, + ) + .unwrap(); + + let diff = super::merge_yaml(&mut a, b.clone(), false).unwrap(); + assert_eq!(diff.differing_values.len(), 0); + assert_eq!(diff.new_fields.len(), 1); + assert_eq!( + diff.new_fields.get::("key6".into()).unwrap(), + b.clone().get("key6").unwrap() + ); + assert_eq!(a, expected); + } + + #[test] + fn test_merge_yaml_a_has_different_value_returns_diff() { + let mut a = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + "#, + ) + .unwrap(); + let b: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value5 + "#, + ) + .unwrap(); + + let expected: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + "#, + ) + .unwrap(); + + let diff = super::merge_yaml(&mut a, b.clone(), false).unwrap(); + assert_eq!(diff.differing_values.len(), 1); + assert_eq!( + diff.differing_values + .get::("key3.key4".into()) + .unwrap(), + b.get("key3").unwrap().get("key4").unwrap() + ); + assert_eq!(a, expected); + } + + #[test] + fn test_merge_yaml_a_has_different_value_and_b_has_extra_field_returns_diff() { + let mut a = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + "#, + ) + .unwrap(); + let b: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value5 + key5: value5 + "#, + ) + .unwrap(); + + let expected: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + key5: value5 + "#, + ) + .unwrap(); + + let diff = super::merge_yaml(&mut a, b.clone(), false).unwrap(); + assert_eq!(diff.differing_values.len(), 1); + assert_eq!( + diff.differing_values + .get::("key3.key4".into()) + .unwrap(), + b.get("key3").unwrap().get("key4").unwrap() + ); + assert_eq!(diff.new_fields.len(), 1); + assert_eq!( + diff.new_fields.get::("key5".into()).unwrap(), + b.get("key5").unwrap() + ); + assert_eq!(a, expected); + } + + #[test] + fn test_merge_yaml_override_values() { + let mut a = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + "#, + ) + .unwrap(); + let b: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value5 + "#, + ) + .unwrap(); + + let expected: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value5 + "#, + ) + .unwrap(); + + let diff = super::merge_yaml(&mut a, b.clone(), true).unwrap(); + assert_eq!(diff.differing_values.len(), 1); + assert_eq!( + diff.differing_values + .get::("key3.key4".into()) + .unwrap(), + b.get("key3").unwrap().get("key4").unwrap() + ); + assert_eq!(a, expected); + } + + #[test] + fn test_merge_yaml_override_values_with_extra_field() { + let mut a = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + "#, + ) + .unwrap(); + let b: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value5 + key5: value5 + "#, + ) + .unwrap(); + + let expected: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value5 + key5: value5 + "#, + ) + .unwrap(); + + let diff = super::merge_yaml(&mut a, b.clone(), true).unwrap(); + assert_eq!(diff.differing_values.len(), 1); + assert_eq!( + diff.differing_values + .get::("key3.key4".into()) + .unwrap(), + b.get("key3").unwrap().get("key4").unwrap() + ); + assert_eq!(diff.new_fields.len(), 1); + assert_eq!( + diff.new_fields.get::("key5".into()).unwrap(), + b.get("key5").unwrap() + ); + assert_eq!(a, expected); + } + + #[test] + fn test_override_values_with_null() { + let mut a = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: + key4: value4 + "#, + ) + .unwrap(); + let b: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + key3: null + "#, + ) + .unwrap(); + + let expected: serde_yaml::Value = serde_yaml::from_str( + r#" + key1: value1 + key2: value2 + "#, + ) + .unwrap(); + + let diff = super::merge_yaml(&mut a, b.clone(), true).unwrap(); + assert_eq!(diff.differing_values.len(), 1); + assert_eq!( + diff.differing_values + .get::("key3".into()) + .unwrap(), + b.get("key3").unwrap() + ); + assert_eq!(a, expected); + } +} diff --git a/zk_toolbox/crates/config/Cargo.toml b/zk_toolbox/crates/config/Cargo.toml index 5f1419c7ce97..9320beffef22 100644 --- a/zk_toolbox/crates/config/Cargo.toml +++ b/zk_toolbox/crates/config/Cargo.toml @@ -18,6 +18,7 @@ ethers.workspace = true rand.workspace = true serde.workspace = true serde_json.workspace = true +serde_yaml.workspace = true strum.workspace = true thiserror.workspace = true types.workspace = true diff --git a/zk_toolbox/crates/config/src/chain.rs b/zk_toolbox/crates/config/src/chain.rs index 54ed1f7d3f35..affc8ccc770c 100644 --- a/zk_toolbox/crates/config/src/chain.rs +++ b/zk_toolbox/crates/config/src/chain.rs @@ -38,6 +38,8 @@ pub struct ChainConfigInternal { pub l1_batch_commit_data_generator_mode: L1BatchCommitmentMode, pub base_token: BaseToken, pub wallet_creation: WalletCreation, + #[serde(skip_serializing_if = "Option::is_none")] + pub legacy_bridge: Option, } /// Chain configuration file. This file is created in the chain @@ -58,6 +60,7 @@ pub struct ChainConfig { pub base_token: BaseToken, pub wallet_creation: WalletCreation, pub shell: OnceCell, + pub legacy_bridge: Option, } impl Serialize for ChainConfig { @@ -153,6 +156,7 @@ impl ChainConfig { l1_batch_commit_data_generator_mode: self.l1_batch_commit_data_generator_mode, base_token: self.base_token.clone(), wallet_creation: self.wallet_creation, + legacy_bridge: self.legacy_bridge, } } } diff --git a/zk_toolbox/crates/config/src/consensus_config.rs b/zk_toolbox/crates/config/src/consensus_config.rs new file mode 100644 index 000000000000..0bb4750d1fc0 --- /dev/null +++ b/zk_toolbox/crates/config/src/consensus_config.rs @@ -0,0 +1,18 @@ +use zksync_config::configs::consensus::ConsensusConfig; +use zksync_protobuf_config::encode_yaml_repr; + +use crate::{ + traits::{FileConfigWithDefaultName, SaveConfig}, + CONSENSUS_CONFIG_FILE, +}; + +impl FileConfigWithDefaultName for ConsensusConfig { + const FILE_NAME: &'static str = CONSENSUS_CONFIG_FILE; +} + +impl SaveConfig for ConsensusConfig { + fn save(&self, shell: &xshell::Shell, path: impl AsRef) -> anyhow::Result<()> { + let bytes = encode_yaml_repr::(self)?; + Ok(shell.write_file(path.as_ref(), bytes)?) + } +} diff --git a/zk_toolbox/crates/config/src/consensus_secrets.rs b/zk_toolbox/crates/config/src/consensus_secrets.rs new file mode 100644 index 000000000000..0e5c4592d2fc --- /dev/null +++ b/zk_toolbox/crates/config/src/consensus_secrets.rs @@ -0,0 +1,14 @@ +use std::path::Path; + +use xshell::Shell; +use zksync_config::configs::consensus::ConsensusSecrets; +use zksync_protobuf_config::decode_yaml_repr; + +use crate::traits::ReadConfig; + +impl ReadConfig for ConsensusSecrets { + fn read(shell: &Shell, path: impl AsRef) -> anyhow::Result { + let path = shell.current_dir().join(path); + decode_yaml_repr::(&path, false) + } +} diff --git a/zk_toolbox/crates/config/src/consts.rs b/zk_toolbox/crates/config/src/consts.rs index 1e1c0998f00e..80b204cc6191 100644 --- a/zk_toolbox/crates/config/src/consts.rs +++ b/zk_toolbox/crates/config/src/consts.rs @@ -11,6 +11,8 @@ pub const GENESIS_FILE: &str = "genesis.yaml"; // Name of external node specific config pub const EN_CONFIG_FILE: &str = "external_node.yaml"; +// Name of consensus config +pub const CONSENSUS_CONFIG_FILE: &str = "consensus_config.yaml"; pub(crate) const ERC20_CONFIGS_FILE: &str = "erc20.yaml"; /// Name of the initial deployments config file pub(crate) const INITIAL_DEPLOYMENT_FILE: &str = "initial_deployments.yaml"; @@ -60,6 +62,8 @@ pub const DEFAULT_EXPLORER_WORKER_PORT: u16 = 3001; pub const DEFAULT_EXPLORER_API_PORT: u16 = 3002; /// Default port for the explorer data fetcher service pub const DEFAULT_EXPLORER_DATA_FETCHER_PORT: u16 = 3040; +/// Default port for consensus service +pub const DEFAULT_CONSENSUS_PORT: u16 = 3054; pub const EXPLORER_API_DOCKER_IMAGE: &str = "matterlabs/block-explorer-api"; pub const EXPLORER_DATA_FETCHER_DOCKER_IMAGE: &str = "matterlabs/block-explorer-data-fetcher"; diff --git a/zk_toolbox/crates/config/src/contracts.rs b/zk_toolbox/crates/config/src/contracts.rs index 19d432909487..8296aa188527 100644 --- a/zk_toolbox/crates/config/src/contracts.rs +++ b/zk_toolbox/crates/config/src/contracts.rs @@ -7,6 +7,7 @@ use crate::{ deploy_ecosystem::output::DeployL1Output, deploy_l2_contracts::output::{ ConsensusRegistryOutput, DefaultL2UpgradeOutput, InitializeBridgeOutput, + Multicall3Output, }, register_chain::output::RegisterChainOutput, }, @@ -69,6 +70,7 @@ impl ContractsConfig { self.ecosystem_contracts .diamond_cut_data .clone_from(&deploy_l1_output.contracts_config.diamond_cut_data); + self.l1.chain_admin_addr = deploy_l1_output.deployed_addresses.chain_admin; } pub fn set_chain_contracts(&mut self, register_chain_output: &RegisterChainOutput) { @@ -101,6 +103,11 @@ impl ContractsConfig { self.l2.default_l2_upgrader = default_upgrade_output.l2_default_upgrader; Ok(()) } + + pub fn set_multicall3(&mut self, multicall3_output: &Multicall3Output) -> anyhow::Result<()> { + self.l2.multicall3 = Some(multicall3_output.multicall3); + Ok(()) + } } impl FileConfigWithDefaultName for ContractsConfig { @@ -151,4 +158,5 @@ pub struct L2Contracts { pub testnet_paymaster_addr: Address, pub default_l2_upgrader: Address, pub consensus_registry: Option
, + pub multicall3: Option
, } diff --git a/zk_toolbox/crates/config/src/ecosystem.rs b/zk_toolbox/crates/config/src/ecosystem.rs index a0412fbc4733..7ff65d4612df 100644 --- a/zk_toolbox/crates/config/src/ecosystem.rs +++ b/zk_toolbox/crates/config/src/ecosystem.rs @@ -173,6 +173,7 @@ impl EcosystemConfig { artifacts: config .artifacts_path .unwrap_or_else(|| self.get_chain_artifacts_path(name)), + legacy_bridge: config.legacy_bridge, }) } diff --git a/zk_toolbox/crates/config/src/forge_interface/deploy_ecosystem/output.rs b/zk_toolbox/crates/config/src/forge_interface/deploy_ecosystem/output.rs index bf9292e9ba30..7f35cf0357c2 100644 --- a/zk_toolbox/crates/config/src/forge_interface/deploy_ecosystem/output.rs +++ b/zk_toolbox/crates/config/src/forge_interface/deploy_ecosystem/output.rs @@ -44,6 +44,7 @@ pub struct DeployL1DeployedAddressesOutput { pub governance_addr: Address, pub transparent_proxy_admin_addr: Address, pub validator_timelock_addr: Address, + pub chain_admin: Address, pub bridgehub: L1BridgehubOutput, pub bridges: L1BridgesOutput, pub state_transition: L1StateTransitionOutput, diff --git a/zk_toolbox/crates/config/src/forge_interface/deploy_l2_contracts/output.rs b/zk_toolbox/crates/config/src/forge_interface/deploy_l2_contracts/output.rs index 860e7e293f99..ca5cac12c02d 100644 --- a/zk_toolbox/crates/config/src/forge_interface/deploy_l2_contracts/output.rs +++ b/zk_toolbox/crates/config/src/forge_interface/deploy_l2_contracts/output.rs @@ -6,6 +6,7 @@ use crate::traits::ZkToolboxConfig; impl ZkToolboxConfig for InitializeBridgeOutput {} impl ZkToolboxConfig for DefaultL2UpgradeOutput {} impl ZkToolboxConfig for ConsensusRegistryOutput {} +impl ZkToolboxConfig for Multicall3Output {} #[derive(Debug, Clone, Serialize, Deserialize)] pub struct InitializeBridgeOutput { @@ -23,3 +24,8 @@ pub struct ConsensusRegistryOutput { pub consensus_registry_implementation: Address, pub consensus_registry_proxy: Address, } + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Multicall3Output { + pub multicall3: Address, +} diff --git a/zk_toolbox/crates/config/src/forge_interface/mod.rs b/zk_toolbox/crates/config/src/forge_interface/mod.rs index ea3d49c67ecb..c7033c45ed22 100644 --- a/zk_toolbox/crates/config/src/forge_interface/mod.rs +++ b/zk_toolbox/crates/config/src/forge_interface/mod.rs @@ -4,3 +4,4 @@ pub mod deploy_l2_contracts; pub mod paymaster; pub mod register_chain; pub mod script_params; +pub mod setup_legacy_bridge; diff --git a/zk_toolbox/crates/config/src/forge_interface/register_chain/input.rs b/zk_toolbox/crates/config/src/forge_interface/register_chain/input.rs index 29494ba5d8f5..e2e60294e867 100644 --- a/zk_toolbox/crates/config/src/forge_interface/register_chain/input.rs +++ b/zk_toolbox/crates/config/src/forge_interface/register_chain/input.rs @@ -54,7 +54,6 @@ impl ZkToolboxConfig for RegisterChainL1Config {} impl RegisterChainL1Config { pub fn new(chain_config: &ChainConfig, contracts: &ContractsConfig) -> anyhow::Result { - let genesis_config = chain_config.get_genesis_config()?; let wallets_config = chain_config.get_wallets_config()?; Ok(Self { contracts_config: Contracts { @@ -72,7 +71,7 @@ impl RegisterChainL1Config { validator_timelock_addr: contracts.ecosystem_contracts.validator_timelock_addr, }, chain: ChainL1Config { - chain_chain_id: genesis_config.l2_chain_id, + chain_chain_id: chain_config.chain_id, base_token_gas_price_multiplier_nominator: chain_config.base_token.nominator, base_token_gas_price_multiplier_denominator: chain_config.base_token.denominator, base_token_addr: chain_config.base_token.address, diff --git a/zk_toolbox/crates/config/src/forge_interface/script_params.rs b/zk_toolbox/crates/config/src/forge_interface/script_params.rs index fb16aa97e6a8..e7e21ad132b8 100644 --- a/zk_toolbox/crates/config/src/forge_interface/script_params.rs +++ b/zk_toolbox/crates/config/src/forge_interface/script_params.rs @@ -61,3 +61,9 @@ pub const ACCEPT_GOVERNANCE_SCRIPT_PARAMS: ForgeScriptParams = ForgeScriptParams output: "script-out/output-accept-admin.toml", script_path: "deploy-scripts/AcceptAdmin.s.sol", }; + +pub const SETUP_LEGACY_BRIDGE: ForgeScriptParams = ForgeScriptParams { + input: "script-config/setup-legacy-bridge.toml", + output: "script-out/setup-legacy-bridge.toml", + script_path: "deploy-scripts/dev/SetupLegacyBridge.s.sol", +}; diff --git a/zk_toolbox/crates/config/src/forge_interface/setup_legacy_bridge/mod.rs b/zk_toolbox/crates/config/src/forge_interface/setup_legacy_bridge/mod.rs new file mode 100644 index 000000000000..e8189c521fb3 --- /dev/null +++ b/zk_toolbox/crates/config/src/forge_interface/setup_legacy_bridge/mod.rs @@ -0,0 +1,20 @@ +use serde::{Deserialize, Serialize}; +use zksync_basic_types::{Address, L2ChainId, H256}; + +use crate::traits::ZkToolboxConfig; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SetupLegacyBridgeInput { + pub bridgehub: Address, + pub diamond_proxy: Address, + pub shared_bridge_proxy: Address, + pub transparent_proxy_admin: Address, + pub erc20bridge_proxy: Address, + pub token_weth_address: Address, + pub chain_id: L2ChainId, + pub l2shared_bridge_address: Address, + pub create2factory_salt: H256, + pub create2factory_addr: Address, +} + +impl ZkToolboxConfig for SetupLegacyBridgeInput {} diff --git a/zk_toolbox/crates/config/src/general.rs b/zk_toolbox/crates/config/src/general.rs index 41c2e4c33cfd..87eb3a7eb19b 100644 --- a/zk_toolbox/crates/config/src/general.rs +++ b/zk_toolbox/crates/config/src/general.rs @@ -1,21 +1,24 @@ use std::path::{Path, PathBuf}; use anyhow::Context; +use common::yaml::merge_yaml; use url::Url; use xshell::Shell; -use zksync_config::configs::object_store::ObjectStoreMode; pub use zksync_config::configs::GeneralConfig; +use zksync_config::configs::{consensus::Host, object_store::ObjectStoreMode}; use zksync_protobuf_config::{decode_yaml_repr, encode_yaml_repr}; use crate::{ consts::GENERAL_FILE, traits::{ConfigWithL2RpcUrl, FileConfigWithDefaultName, ReadConfig, SaveConfig}, + ChainConfig, DEFAULT_CONSENSUS_PORT, }; pub struct RocksDbs { pub state_keeper: PathBuf, pub merkle_tree: PathBuf, pub protective_reads: PathBuf, + pub basic_witness_input_producer: PathBuf, } pub struct FileArtifacts { @@ -54,6 +57,15 @@ pub fn set_rocks_db_config(config: &mut GeneralConfig, rocks_dbs: RocksDbs) -> a .as_mut() .context("Protective reads config is not presented")? .db_path = rocks_dbs.protective_reads.to_str().unwrap().to_string(); + config + .basic_witness_input_producer_config + .as_mut() + .context("Basic witness input producer config is not presented")? + .db_path = rocks_dbs + .basic_witness_input_producer + .to_str() + .unwrap() + .to_string(); Ok(()) } @@ -104,6 +116,11 @@ pub fn set_file_artifacts(config: &mut GeneralConfig, file_artifacts: FileArtifa pub fn ports_config(config: &GeneralConfig) -> Option { let api = config.api_config.as_ref()?; let contract_verifier = config.contract_verifier.as_ref()?; + let consensus_port = if let Some(consensus_config) = config.clone().consensus_config { + consensus_config.server_addr.port() + } else { + DEFAULT_CONSENSUS_PORT + }; Some(PortsConfig { web3_json_rpc_http_port: api.web3_json_rpc.http_port, @@ -112,6 +129,7 @@ pub fn ports_config(config: &GeneralConfig) -> Option { merkle_tree_port: api.merkle_tree.port, prometheus_listener_port: api.prometheus.listener_port, contract_verifier_port: contract_verifier.port, + consensus_port, }) } @@ -128,6 +146,10 @@ pub fn update_ports(config: &mut GeneralConfig, ports_config: &PortsConfig) -> a .prometheus_config .as_mut() .context("Prometheus config is not presented")?; + if let Some(consensus) = config.consensus_config.as_mut() { + consensus.server_addr.set_port(ports_config.consensus_port); + update_port_in_host(&mut consensus.public_addr, ports_config.consensus_port)?; + } api.web3_json_rpc.http_port = ports_config.web3_json_rpc_http_port; update_port_in_url( @@ -153,6 +175,15 @@ pub fn update_ports(config: &mut GeneralConfig, ports_config: &PortsConfig) -> a Ok(()) } +pub fn override_config(shell: &Shell, path: PathBuf, chain: &ChainConfig) -> anyhow::Result<()> { + let chain_config_path = chain.path_to_general_config(); + let override_config = serde_yaml::from_str(&shell.read_file(path)?)?; + let mut chain_config = serde_yaml::from_str(&shell.read_file(chain_config_path.clone())?)?; + merge_yaml(&mut chain_config, override_config, true)?; + shell.write_file(chain_config_path, serde_yaml::to_string(&chain_config)?)?; + Ok(()) +} + fn update_port_in_url(http_url: &mut String, port: u16) -> anyhow::Result<()> { let mut http_url_url = Url::parse(http_url)?; if let Err(()) = http_url_url.set_port(Some(port)) { @@ -162,6 +193,13 @@ fn update_port_in_url(http_url: &mut String, port: u16) -> anyhow::Result<()> { Ok(()) } +fn update_port_in_host(host: &mut Host, port: u16) -> anyhow::Result<()> { + let url = Url::parse(&format!("http://{}", host.0))?; + let host_str = url.host_str().context("Failed to get host")?; + host.0 = format!("{host_str}:{port}"); + Ok(()) +} + impl FileConfigWithDefaultName for GeneralConfig { const FILE_NAME: &'static str = GENERAL_FILE; } @@ -173,6 +211,7 @@ pub struct PortsConfig { pub merkle_tree_port: u16, pub prometheus_listener_port: u16, pub contract_verifier_port: u16, + pub consensus_port: u16, } impl PortsConfig { @@ -183,6 +222,7 @@ impl PortsConfig { self.merkle_tree_port += offset; self.prometheus_listener_port += offset; self.contract_verifier_port += offset; + self.consensus_port += offset; } pub fn next_empty_ports_config(&self) -> PortsConfig { @@ -193,6 +233,7 @@ impl PortsConfig { merkle_tree_port: self.merkle_tree_port + 100, prometheus_listener_port: self.prometheus_listener_port + 100, contract_verifier_port: self.contract_verifier_port + 100, + consensus_port: self.consensus_port + 100, } } } diff --git a/zk_toolbox/crates/config/src/lib.rs b/zk_toolbox/crates/config/src/lib.rs index 3c7443f24490..1a7c5bf1d7e2 100644 --- a/zk_toolbox/crates/config/src/lib.rs +++ b/zk_toolbox/crates/config/src/lib.rs @@ -25,6 +25,8 @@ mod secrets; mod wallet_creation; mod wallets; +pub mod consensus_config; +pub mod consensus_secrets; pub mod docker_compose; pub mod explorer; pub mod explorer_compose; diff --git a/zk_toolbox/crates/git_version_macro/Cargo.toml b/zk_toolbox/crates/git_version_macro/Cargo.toml new file mode 100644 index 000000000000..eb70b450a4cf --- /dev/null +++ b/zk_toolbox/crates/git_version_macro/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "git_version_macro" +edition = "2021" +description = "Procedural macro to generate metainformation about build in compile time" +version.workspace = true +homepage.workspace = true +license.workspace = true +authors.workspace = true +exclude.workspace = true +repository.workspace = true +keywords.workspace = true + +[lib] +proc-macro = true + +[dependencies] +chrono.workspace = true diff --git a/zk_toolbox/crates/git_version_macro/src/lib.rs b/zk_toolbox/crates/git_version_macro/src/lib.rs new file mode 100644 index 000000000000..34b83efce195 --- /dev/null +++ b/zk_toolbox/crates/git_version_macro/src/lib.rs @@ -0,0 +1,81 @@ +extern crate proc_macro; +use std::{process::Command, str::FromStr}; + +use proc_macro::TokenStream; + +/// Outputs the current date and time as a string literal. +/// Can be used to include the build timestamp in the binary. +#[proc_macro] +pub fn build_timestamp(_item: TokenStream) -> TokenStream { + let now = chrono::Local::now().format("%Y-%m-%d %H:%M:%S").to_string(); + encode_as_str(&now) +} + +/// Outputs the current git branch as a string literal. +#[proc_macro] +pub fn build_git_branch(_item: TokenStream) -> TokenStream { + let out = run_cmd("git", &["rev-parse", "--abbrev-ref", "HEAD"]); + encode_as_str(&out) +} + +/// Outputs the current git commit hash as a string literal. +#[proc_macro] +pub fn build_git_revision(_item: TokenStream) -> TokenStream { + let out = run_cmd("git", &["rev-parse", "--short", "HEAD"]); + encode_as_str(&out) +} + +/// Creates a slice of `&[(&str, &str)]` tuples that correspond to +/// the submodule name -> revision. +/// Results in an empty list if there are no submodules or if +/// the command fails. +#[proc_macro] +pub fn build_git_submodules(_item: TokenStream) -> TokenStream { + let Some(out) = run_cmd_opt("git", &["submodule", "status"]) else { + return TokenStream::from_str("&[]").unwrap(); + }; + let submodules = out + .lines() + .filter_map(|line| { + let parts: Vec<&str> = line.split_whitespace().collect(); + // Index 0 is commit hash, index 1 is the path to the folder, and there + // may be some metainformation after that. + if parts.len() >= 2 { + let folder_name = parts[1].split('/').last().unwrap_or(parts[1]); + Some((folder_name, parts[0])) + } else { + None + } + }) + .collect::>(); + let submodules = submodules + .iter() + .map(|(name, rev)| format!("(\"{}\", \"{}\")", name, rev)) + .collect::>() + .join(", "); + TokenStream::from_str(format!("&[{}]", submodules).as_str()) + .unwrap_or_else(|_| panic!("Unable to encode submodules: {}", submodules)) +} + +/// Tries to run the command, only returns `Some` if the command +/// succeeded and the output was valid utf8. +fn run_cmd(cmd: &str, args: &[&str]) -> String { + run_cmd_opt(cmd, args).unwrap_or("unknown".to_string()) +} + +fn run_cmd_opt(cmd: &str, args: &[&str]) -> Option { + let output = Command::new(cmd).args(args).output().ok()?; + if output.status.success() { + String::from_utf8(output.stdout) + .ok() + .map(|s| s.trim().to_string()) + } else { + None + } +} + +/// Encodes string as a literal. +fn encode_as_str(s: &str) -> TokenStream { + TokenStream::from_str(format!("\"{}\"", s).as_str()) + .unwrap_or_else(|_| panic!("Unable to encode string: {}", s)) +} diff --git a/zk_toolbox/crates/zk_inception/Cargo.toml b/zk_toolbox/crates/zk_inception/Cargo.toml index 01d0697d6b6c..61983d59e6e9 100644 --- a/zk_toolbox/crates/zk_inception/Cargo.toml +++ b/zk_toolbox/crates/zk_inception/Cargo.toml @@ -34,3 +34,6 @@ zksync_config.workspace = true slugify-rs.workspace = true zksync_basic_types.workspace = true clap-markdown.workspace = true +zksync_consensus_roles.workspace = true +zksync_consensus_crypto.workspace = true +secrecy.workspace = true diff --git a/zk_toolbox/crates/zk_inception/src/commands/args/run_server.rs b/zk_toolbox/crates/zk_inception/src/commands/args/run_server.rs index 1e373319ec73..ebe407d4822d 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/args/run_server.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/args/run_server.rs @@ -3,7 +3,7 @@ use serde::{Deserialize, Serialize}; use crate::messages::{ MSG_SERVER_ADDITIONAL_ARGS_HELP, MSG_SERVER_BUILD_HELP, MSG_SERVER_COMPONENTS_HELP, - MSG_SERVER_GENESIS_HELP, + MSG_SERVER_GENESIS_HELP, MSG_SERVER_URING_HELP, }; #[derive(Debug, Serialize, Deserialize, Parser)] @@ -17,4 +17,6 @@ pub struct RunServerArgs { additional_args: Vec, #[clap(long, help = MSG_SERVER_BUILD_HELP)] pub build: bool, + #[clap(help=MSG_SERVER_URING_HELP, long, default_missing_value = "true", num_args = 0..=1)] + pub uring: bool, } diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/args/build_transactions.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/args/build_transactions.rs new file mode 100644 index 000000000000..793bea487f7e --- /dev/null +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/args/build_transactions.rs @@ -0,0 +1,60 @@ +use std::path::PathBuf; + +use clap::Parser; +use common::{config::global_config, forge::ForgeScriptArgs, Prompt}; +use serde::{Deserialize, Serialize}; +use url::Url; + +use crate::{ + consts::DEFAULT_UNSIGNED_TRANSACTIONS_DIR, + defaults::LOCAL_RPC_URL, + messages::{MSG_L1_RPC_URL_HELP, MSG_L1_RPC_URL_INVALID_ERR, MSG_L1_RPC_URL_PROMPT}, +}; + +const CHAIN_SUBDIR: &str = "chain"; + +#[derive(Debug, Clone, Serialize, Deserialize, Parser)] +pub struct BuildTransactionsArgs { + /// Output directory for the generated files. + #[arg(long, short)] + pub out: Option, + /// All ethereum environment related arguments + #[clap(flatten)] + #[serde(flatten)] + pub forge_args: ForgeScriptArgs, + #[clap(long, help = MSG_L1_RPC_URL_HELP)] + pub l1_rpc_url: Option, +} + +impl BuildTransactionsArgs { + pub fn fill_values_with_prompt(self, default_chain: String) -> BuildTransactionsArgsFinal { + let chain_name = global_config().chain_name.clone(); + + let l1_rpc_url = self.l1_rpc_url.unwrap_or_else(|| { + Prompt::new(MSG_L1_RPC_URL_PROMPT) + .default(LOCAL_RPC_URL) + .validate_with(|val: &String| -> Result<(), String> { + Url::parse(val) + .map(|_| ()) + .map_err(|_| MSG_L1_RPC_URL_INVALID_ERR.to_string()) + }) + .ask() + }); + + BuildTransactionsArgsFinal { + out: self + .out + .unwrap_or(PathBuf::from(DEFAULT_UNSIGNED_TRANSACTIONS_DIR).join(CHAIN_SUBDIR)) + .join(chain_name.unwrap_or(default_chain)), + forge_args: self.forge_args, + l1_rpc_url, + } + } +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct BuildTransactionsArgsFinal { + pub out: PathBuf, + pub forge_args: ForgeScriptArgs, + pub l1_rpc_url: String, +} diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/args/create.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/args/create.rs index 65f809287890..3ea15d10f8be 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/chain/args/create.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/args/create.rs @@ -65,6 +65,8 @@ pub struct ChainCreateArgs { base_token_price_denominator: Option, #[clap(long, help = MSG_SET_AS_DEFAULT_HELP, default_missing_value = "true", num_args = 0..=1)] pub(crate) set_as_default: Option, + #[clap(long, default_value = "false")] + pub(crate) legacy_bridge: bool, } impl ChainCreateArgs { @@ -224,6 +226,7 @@ impl ChainCreateArgs { wallet_path, base_token, set_as_default, + legacy_bridge: self.legacy_bridge, }) } } @@ -238,6 +241,7 @@ pub struct ChainCreateArgsFinal { pub wallet_path: Option, pub base_token: BaseToken, pub set_as_default: bool, + pub legacy_bridge: bool, } #[derive(Debug, Clone, EnumIter, Display, PartialEq, Eq)] diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/args/mod.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/args/mod.rs index 08f39a90a843..f2a5f6b8be1f 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/chain/args/mod.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/args/mod.rs @@ -1,3 +1,4 @@ +pub mod build_transactions; pub mod create; pub mod genesis; pub mod init; diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/build_transactions.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/build_transactions.rs new file mode 100644 index 000000000000..68cb7a9a0742 --- /dev/null +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/build_transactions.rs @@ -0,0 +1,90 @@ +use anyhow::Context; +use common::{config::global_config, git, logger, spinner::Spinner}; +use config::{ + copy_configs, traits::SaveConfigWithBasePath, update_from_chain_config, EcosystemConfig, +}; +use ethers::utils::hex::ToHex; +use xshell::Shell; + +use super::common::register_chain; +use crate::{ + commands::chain::args::build_transactions::BuildTransactionsArgs, + messages::{ + MSG_BUILDING_CHAIN_REGISTRATION_TXNS_SPINNER, MSG_CHAIN_NOT_FOUND_ERR, + MSG_CHAIN_TRANSACTIONS_BUILT, MSG_CHAIN_TXN_MISSING_CONTRACT_CONFIG, + MSG_CHAIN_TXN_OUT_PATH_INVALID_ERR, MSG_PREPARING_CONFIG_SPINNER, MSG_SELECTED_CONFIG, + MSG_WRITING_OUTPUT_FILES_SPINNER, + }, +}; + +const REGISTER_CHAIN_TXNS_FILE_SRC: &str = + "contracts/l1-contracts/broadcast/RegisterHyperchain.s.sol/9/dry-run/run-latest.json"; +const REGISTER_CHAIN_TXNS_FILE_DST: &str = "register-hyperchain-txns.json"; + +const SCRIPT_CONFIG_FILE_SRC: &str = + "contracts/l1-contracts/script-config/register-hyperchain.toml"; +const SCRIPT_CONFIG_FILE_DST: &str = "register-hyperchain.toml"; + +pub(crate) async fn run(args: BuildTransactionsArgs, shell: &Shell) -> anyhow::Result<()> { + let config = EcosystemConfig::from_file(shell)?; + let chain_name = global_config().chain_name.clone(); + let chain_config = config + .load_chain(chain_name) + .context(MSG_CHAIN_NOT_FOUND_ERR)?; + + let args = args.fill_values_with_prompt(config.default_chain.clone()); + + git::submodule_update(shell, config.link_to_code.clone())?; + + let spinner = Spinner::new(MSG_PREPARING_CONFIG_SPINNER); + copy_configs(shell, &config.link_to_code, &chain_config.configs)?; + + logger::note(MSG_SELECTED_CONFIG, logger::object_to_string(&chain_config)); + + let mut genesis_config = chain_config.get_genesis_config()?; + update_from_chain_config(&mut genesis_config, &chain_config); + + // Copy ecosystem contracts + let mut contracts_config = config + .get_contracts_config() + .context(MSG_CHAIN_TXN_MISSING_CONTRACT_CONFIG)?; + contracts_config.l1.base_token_addr = chain_config.base_token.address; + spinner.finish(); + + let spinner = Spinner::new(MSG_BUILDING_CHAIN_REGISTRATION_TXNS_SPINNER); + let governor: String = config.get_wallets()?.governor.address.encode_hex_upper(); + + register_chain( + shell, + args.forge_args.clone(), + &config, + &chain_config, + &mut contracts_config, + args.l1_rpc_url.clone(), + Some(governor), + false, + ) + .await?; + + contracts_config.save_with_base_path(shell, &args.out)?; + spinner.finish(); + + let spinner = Spinner::new(MSG_WRITING_OUTPUT_FILES_SPINNER); + shell + .create_dir(&args.out) + .context(MSG_CHAIN_TXN_OUT_PATH_INVALID_ERR)?; + + shell.copy_file( + config.link_to_code.join(REGISTER_CHAIN_TXNS_FILE_SRC), + args.out.join(REGISTER_CHAIN_TXNS_FILE_DST), + )?; + + shell.copy_file( + config.link_to_code.join(SCRIPT_CONFIG_FILE_SRC), + args.out.join(SCRIPT_CONFIG_FILE_DST), + )?; + spinner.finish(); + + logger::success(MSG_CHAIN_TRANSACTIONS_BUILT); + Ok(()) +} diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/common.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/common.rs new file mode 100644 index 000000000000..ec70d6122d23 --- /dev/null +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/common.rs @@ -0,0 +1,125 @@ +use common::{ + forge::{Forge, ForgeScriptArgs}, + spinner::Spinner, +}; +use config::{ + forge_interface::{ + register_chain::{input::RegisterChainL1Config, output::RegisterChainOutput}, + script_params::REGISTER_CHAIN_SCRIPT_PARAMS, + }, + traits::{ReadConfig, SaveConfig}, + ChainConfig, ContractsConfig, EcosystemConfig, +}; +use types::{BaseToken, L1Network, WalletCreation}; +use xshell::Shell; + +use crate::{ + consts::AMOUNT_FOR_DISTRIBUTION_TO_WALLETS, + messages::{MSG_DISTRIBUTING_ETH_SPINNER, MSG_MINT_BASE_TOKEN_SPINNER}, + utils::forge::{check_the_balance, fill_forge_private_key}, +}; + +#[allow(clippy::too_many_arguments)] +pub async fn register_chain( + shell: &Shell, + forge_args: ForgeScriptArgs, + config: &EcosystemConfig, + chain_config: &ChainConfig, + contracts: &mut ContractsConfig, + l1_rpc_url: String, + sender: Option, + broadcast: bool, +) -> anyhow::Result<()> { + let deploy_config_path = REGISTER_CHAIN_SCRIPT_PARAMS.input(&config.link_to_code); + + let deploy_config = RegisterChainL1Config::new(chain_config, contracts)?; + deploy_config.save(shell, deploy_config_path)?; + + let mut forge = Forge::new(&config.path_to_foundry()) + .script(®ISTER_CHAIN_SCRIPT_PARAMS.script(), forge_args.clone()) + .with_ffi() + .with_rpc_url(l1_rpc_url); + + if broadcast { + forge = forge.with_broadcast(); + } + + if let Some(address) = sender { + forge = forge.with_sender(address); + } else { + forge = fill_forge_private_key(forge, config.get_wallets()?.governor_private_key())?; + check_the_balance(&forge).await?; + } + + forge.run(shell)?; + + let register_chain_output = RegisterChainOutput::read( + shell, + REGISTER_CHAIN_SCRIPT_PARAMS.output(&chain_config.link_to_code), + )?; + contracts.set_chain_contracts(®ister_chain_output); + Ok(()) +} + +// Distribute eth to the chain wallets for localhost environment +pub async fn distribute_eth( + ecosystem_config: &EcosystemConfig, + chain_config: &ChainConfig, + l1_rpc_url: String, +) -> anyhow::Result<()> { + if chain_config.wallet_creation == WalletCreation::Localhost + && ecosystem_config.l1_network == L1Network::Localhost + { + let spinner = Spinner::new(MSG_DISTRIBUTING_ETH_SPINNER); + let wallets = ecosystem_config.get_wallets()?; + let chain_wallets = chain_config.get_wallets_config()?; + let mut addresses = vec![ + chain_wallets.operator.address, + chain_wallets.blob_operator.address, + chain_wallets.governor.address, + ]; + if let Some(deployer) = chain_wallets.deployer { + addresses.push(deployer.address) + } + common::ethereum::distribute_eth( + wallets.operator, + addresses, + l1_rpc_url, + ecosystem_config.l1_network.chain_id(), + AMOUNT_FOR_DISTRIBUTION_TO_WALLETS, + ) + .await?; + spinner.finish(); + } + Ok(()) +} + +pub async fn mint_base_token( + ecosystem_config: &EcosystemConfig, + chain_config: &ChainConfig, + l1_rpc_url: String, +) -> anyhow::Result<()> { + if chain_config.wallet_creation == WalletCreation::Localhost + && ecosystem_config.l1_network == L1Network::Localhost + && chain_config.base_token != BaseToken::eth() + { + let spinner = Spinner::new(MSG_MINT_BASE_TOKEN_SPINNER); + let wallets = ecosystem_config.get_wallets()?; + let chain_wallets = chain_config.get_wallets_config()?; + let base_token = &chain_config.base_token; + let addresses = vec![wallets.governor.address, chain_wallets.governor.address]; + let amount = AMOUNT_FOR_DISTRIBUTION_TO_WALLETS * base_token.nominator as u128 + / base_token.denominator as u128; + common::ethereum::mint_token( + wallets.operator, + base_token.address, + addresses, + l1_rpc_url, + ecosystem_config.l1_network.chain_id(), + amount, + ) + .await?; + spinner.finish(); + } + Ok(()) +} diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/create.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/create.rs index abdea482db4c..48a320ec27e0 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/chain/create.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/create.rs @@ -59,15 +59,24 @@ pub(crate) fn create_chain_inner( ecosystem_config: &EcosystemConfig, shell: &Shell, ) -> anyhow::Result<()> { + if args.legacy_bridge { + logger::warn("WARNING!!! You are creating a chain with legacy bridge, use it only for testing compatibility") + } let default_chain_name = args.chain_name.clone(); let chain_path = ecosystem_config.chains.join(&default_chain_name); let chain_configs_path = create_local_configs_dir(shell, &chain_path)?; - let chain_id = ecosystem_config.list_of_chains().len() as u32; + let (chain_id, legacy_bridge) = if args.legacy_bridge { + // Legacy bridge is distinguished by using the same chain id as ecosystem + (ecosystem_config.era_chain_id, Some(true)) + } else { + (L2ChainId::from(args.chain_id), None) + }; + let internal_id = ecosystem_config.list_of_chains().len() as u32; let chain_config = ChainConfig { - id: chain_id, + id: internal_id, name: default_chain_name.clone(), - chain_id: L2ChainId::from(args.chain_id), + chain_id, prover_version: args.prover_version, l1_network: ecosystem_config.l1_network, link_to_code: ecosystem_config.link_to_code.clone(), @@ -79,13 +88,14 @@ pub(crate) fn create_chain_inner( base_token: args.base_token, wallet_creation: args.wallet_creation, shell: OnceCell::from(shell.clone()), + legacy_bridge, }; create_wallets( shell, &chain_config.configs, &ecosystem_config.link_to_code, - chain_id, + internal_id, args.wallet_creation, args.wallet_path, )?; diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/deploy_l2_contracts.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/deploy_l2_contracts.rs index 3625abfb15a9..5bfc0a623488 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/chain/deploy_l2_contracts.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/deploy_l2_contracts.rs @@ -11,7 +11,10 @@ use config::{ forge_interface::{ deploy_l2_contracts::{ input::DeployL2ContractsInput, - output::{ConsensusRegistryOutput, DefaultL2UpgradeOutput, InitializeBridgeOutput}, + output::{ + ConsensusRegistryOutput, DefaultL2UpgradeOutput, InitializeBridgeOutput, + Multicall3Output, + }, }, script_params::DEPLOY_L2_CONTRACTS_SCRIPT_PARAMS, }, @@ -125,12 +128,17 @@ pub async fn initialize_bridges( contracts_config: &mut ContractsConfig, forge_args: ForgeScriptArgs, ) -> anyhow::Result<()> { + let signature = if let Some(true) = chain_config.legacy_bridge { + Some("runDeployLegacySharedBridge") + } else { + Some("runDeploySharedBridge") + }; build_and_deploy( shell, chain_config, ecosystem_config, forge_args, - Some("runDeploySharedBridge"), + signature, |shell, out| { contracts_config.set_l2_shared_bridge(&InitializeBridgeOutput::read(shell, out)?) }, @@ -185,16 +193,22 @@ pub async fn deploy_l2_contracts( contracts_config: &mut ContractsConfig, forge_args: ForgeScriptArgs, ) -> anyhow::Result<()> { + let signature = if let Some(true) = chain_config.legacy_bridge { + Some("runWithLegacyBridge") + } else { + None + }; build_and_deploy( shell, chain_config, ecosystem_config, forge_args, - None, + signature, |shell, out| { contracts_config.set_l2_shared_bridge(&InitializeBridgeOutput::read(shell, out)?)?; contracts_config.set_default_l2_upgrade(&DefaultL2UpgradeOutput::read(shell, out)?)?; contracts_config.set_consensus_registry(&ConsensusRegistryOutput::read(shell, out)?)?; + contracts_config.set_multicall3(&Multicall3Output::read(shell, out)?)?; Ok(()) }, ) diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/deploy_paymaster.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/deploy_paymaster.rs index 81ac457cd884..58c199189bd7 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/chain/deploy_paymaster.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/deploy_paymaster.rs @@ -2,7 +2,6 @@ use anyhow::Context; use common::{ config::global_config, forge::{Forge, ForgeScriptArgs}, - spinner::Spinner, }; use config::{ forge_interface::{ @@ -15,9 +14,7 @@ use config::{ use xshell::Shell; use crate::{ - messages::{ - MSG_CHAIN_NOT_INITIALIZED, MSG_DEPLOYING_PAYMASTER, MSG_L1_SECRETS_MUST_BE_PRESENTED, - }, + messages::{MSG_CHAIN_NOT_INITIALIZED, MSG_L1_SECRETS_MUST_BE_PRESENTED}, utils::forge::{check_the_balance, fill_forge_private_key}, }; @@ -28,7 +25,7 @@ pub async fn run(args: ForgeScriptArgs, shell: &Shell) -> anyhow::Result<()> { .load_chain(chain_name) .context(MSG_CHAIN_NOT_INITIALIZED)?; let mut contracts = chain_config.get_contracts_config()?; - deploy_paymaster(shell, &chain_config, &mut contracts, args).await?; + deploy_paymaster(shell, &chain_config, &mut contracts, args, None, true).await?; contracts.save_with_base_path(shell, chain_config.configs) } @@ -37,6 +34,8 @@ pub async fn deploy_paymaster( chain_config: &ChainConfig, contracts_config: &mut ContractsConfig, forge_args: ForgeScriptArgs, + sender: Option, + broadcast: bool, ) -> anyhow::Result<()> { let input = DeployPaymasterInput::new(chain_config)?; let foundry_contracts_path = chain_config.path_to_foundry(); @@ -56,18 +55,23 @@ pub async fn deploy_paymaster( .l1_rpc_url .expose_str() .to_string(), - ) - .with_broadcast(); + ); - forge = fill_forge_private_key( - forge, - chain_config.get_wallets_config()?.governor_private_key(), - )?; + if let Some(address) = sender { + forge = forge.with_sender(address); + } else { + forge = fill_forge_private_key( + forge, + chain_config.get_wallets_config()?.governor_private_key(), + )?; + } + + if broadcast { + forge = forge.with_broadcast(); + check_the_balance(&forge).await?; + } - let spinner = Spinner::new(MSG_DEPLOYING_PAYMASTER); - check_the_balance(&forge).await?; forge.run(shell)?; - spinner.finish(); let output = DeployPaymasterOutput::read( shell, diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/genesis.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/genesis.rs index 0eb40d630ae9..187af41489d9 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/chain/genesis.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/genesis.rs @@ -9,7 +9,7 @@ use common::{ spinner::Spinner, }; use config::{ - set_databases, set_file_artifacts, set_rocks_db_config, + override_config, set_databases, set_file_artifacts, set_rocks_db_config, traits::{FileConfigWithDefaultName, SaveConfigWithBasePath}, ChainConfig, ContractsConfig, EcosystemConfig, FileArtifacts, GeneralConfig, GenesisConfig, SecretsConfig, WalletsConfig, @@ -17,12 +17,14 @@ use config::{ use types::ProverMode; use xshell::Shell; use zksync_basic_types::commitment::L1BatchCommitmentMode; -use zksync_config::configs::eth_sender::{ProofSendingMode, PubdataSendingMode}; use super::args::genesis::GenesisArgsFinal; use crate::{ commands::chain::args::genesis::GenesisArgs, - consts::{PROVER_MIGRATIONS, SERVER_MIGRATIONS}, + consts::{ + PATH_TO_ONLY_REAL_PROOFS_OVERRIDE_CONFIG, PATH_TO_VALIDIUM_OVERRIDE_CONFIG, + PROVER_MIGRATIONS, SERVER_MIGRATIONS, + }, messages::{ MSG_CHAIN_NOT_INITIALIZED, MSG_FAILED_TO_DROP_PROVER_DATABASE_ERR, MSG_FAILED_TO_DROP_SERVER_DATABASE_ERR, MSG_FAILED_TO_RUN_SERVER_ERR, @@ -55,41 +57,31 @@ pub async fn genesis( ) -> anyhow::Result<()> { shell.create_dir(&config.rocks_db_path)?; + let link_to_code = config.link_to_code.clone(); let rocks_db = recreate_rocksdb_dirs(shell, &config.rocks_db_path, RocksDBDirOption::Main) .context(MSG_RECREATE_ROCKS_DB_ERRROR)?; let mut general = config.get_general_config()?; let file_artifacts = FileArtifacts::new(config.artifacts.clone()); set_rocks_db_config(&mut general, rocks_db)?; set_file_artifacts(&mut general, file_artifacts); + general.save_with_base_path(shell, &config.configs)?; + if config.prover_version != ProverMode::NoProofs { - general - .eth - .as_mut() - .context("eth")? - .sender - .as_mut() - .context("sender")? - .proof_sending_mode = ProofSendingMode::OnlyRealProofs; + override_config( + shell, + link_to_code.join(PATH_TO_ONLY_REAL_PROOFS_OVERRIDE_CONFIG), + config, + )?; } if config.l1_batch_commit_data_generator_mode == L1BatchCommitmentMode::Validium { - general - .eth - .as_mut() - .context("eth")? - .sender - .as_mut() - .context("sender")? - .pubdata_sending_mode = PubdataSendingMode::Custom; - general - .state_keeper_config - .as_mut() - .context("state_keeper_config")? - .pubdata_overhead_part = 0.0; + override_config( + shell, + link_to_code.join(PATH_TO_VALIDIUM_OVERRIDE_CONFIG), + config, + )?; } - general.save_with_base_path(shell, &config.configs)?; - let mut secrets = config.get_secrets_config()?; set_databases(&mut secrets, &args.server_db, &args.prover_db)?; secrets.save_with_base_path(shell, &config.configs)?; @@ -168,7 +160,7 @@ async fn initialize_databases( } fn run_server_genesis(chain_config: &ChainConfig, shell: &Shell) -> anyhow::Result<()> { - let server = Server::new(None, chain_config.link_to_code.clone()); + let server = Server::new(None, chain_config.link_to_code.clone(), false); server .run( shell, diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/init.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/init.rs index a5f57981d583..fa2388a69be8 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/chain/init.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/init.rs @@ -1,24 +1,13 @@ use anyhow::{bail, Context}; -use common::{ - config::global_config, - forge::{Forge, ForgeScriptArgs}, - git, logger, - spinner::Spinner, -}; +use common::{config::global_config, git, logger, spinner::Spinner}; use config::{ - copy_configs, - forge_interface::{ - register_chain::{input::RegisterChainL1Config, output::RegisterChainOutput}, - script_params::REGISTER_CHAIN_SCRIPT_PARAMS, - }, - ports_config, set_l1_rpc_url, - traits::{ReadConfig, SaveConfig, SaveConfigWithBasePath}, - update_from_chain_config, update_ports, ChainConfig, ContractsConfig, EcosystemConfig, - GeneralConfig, + copy_configs, ports_config, set_l1_rpc_url, traits::SaveConfigWithBasePath, + update_from_chain_config, update_ports, ChainConfig, EcosystemConfig, GeneralConfig, }; -use types::{BaseToken, L1Network, WalletCreation}; +use types::BaseToken; use xshell::Shell; +use super::common::{distribute_eth, mint_base_token, register_chain}; use crate::{ accept_ownership::accept_admin, commands::{ @@ -27,18 +16,18 @@ use crate::{ deploy_l2_contracts, deploy_paymaster, genesis::genesis, set_token_multiplier_setter::set_token_multiplier_setter, + setup_legacy_bridge::setup_legacy_bridge, }, portal::update_portal_config, }, - consts::AMOUNT_FOR_DISTRIBUTION_TO_WALLETS, messages::{ msg_initializing_chain, MSG_ACCEPTING_ADMIN_SPINNER, MSG_CHAIN_INITIALIZED, - MSG_CHAIN_NOT_FOUND_ERR, MSG_DISTRIBUTING_ETH_SPINNER, MSG_GENESIS_DATABASE_ERR, - MSG_MINT_BASE_TOKEN_SPINNER, MSG_PORTAL_FAILED_TO_CREATE_CONFIG_ERR, + MSG_CHAIN_NOT_FOUND_ERR, MSG_DEPLOYING_PAYMASTER, MSG_GENESIS_DATABASE_ERR, + MSG_PORTAL_FAILED_TO_CREATE_CONFIG_ERR, MSG_PORTS_CONFIG_ERR, MSG_REGISTERING_CHAIN_SPINNER, MSG_SELECTED_CONFIG, MSG_UPDATING_TOKEN_MULTIPLIER_SETTER_SPINNER, MSG_WALLET_TOKEN_MULTIPLIER_SETTER_NOT_FOUND, }, - utils::forge::{check_the_balance, fill_forge_private_key}, + utils::consensus::{generate_consensus_keys, get_consensus_config, get_consensus_secrets}, }; pub(crate) async fn run(args: InitArgs, shell: &Shell) -> anyhow::Result<()> { @@ -69,6 +58,12 @@ pub async fn init( let mut general_config = chain_config.get_general_config()?; apply_port_offset(init_args.port_offset, &mut general_config)?; + let ports = ports_config(&general_config).context(MSG_PORTS_CONFIG_ERR)?; + + let consensus_keys = generate_consensus_keys(); + let consensus_config = + get_consensus_config(chain_config, ports, Some(consensus_keys.clone()), None)?; + general_config.consensus_config = Some(consensus_config); general_config.save_with_base_path(shell, &chain_config.configs)?; let mut genesis_config = chain_config.get_genesis_config()?; @@ -85,6 +80,7 @@ pub async fn init( let mut secrets = chain_config.get_secrets_config()?; set_l1_rpc_url(&mut secrets, init_args.l1_rpc_url.clone())?; + secrets.consensus = Some(get_consensus_secrets(&consensus_keys)); secrets.save_with_base_path(shell, &chain_config.configs)?; let spinner = Spinner::new(MSG_REGISTERING_CHAIN_SPINNER); @@ -95,6 +91,8 @@ pub async fn init( chain_config, &mut contracts_config, init_args.l1_rpc_url.clone(), + None, + true, ) .await?; contracts_config.save_with_base_path(shell, &chain_config.configs)?; @@ -142,15 +140,30 @@ pub async fn init( .await?; contracts_config.save_with_base_path(shell, &chain_config.configs)?; + if let Some(true) = chain_config.legacy_bridge { + setup_legacy_bridge( + shell, + chain_config, + ecosystem_config, + &contracts_config, + init_args.forge_args.clone(), + ) + .await?; + } + if init_args.deploy_paymaster { + let spinner = Spinner::new(MSG_DEPLOYING_PAYMASTER); deploy_paymaster::deploy_paymaster( shell, chain_config, &mut contracts_config, init_args.forge_args.clone(), + None, + true, ) .await?; contracts_config.save_with_base_path(shell, &chain_config.configs)?; + spinner.finish(); } genesis(init_args.genesis_args.clone(), shell, chain_config) @@ -164,100 +177,6 @@ pub async fn init( Ok(()) } -async fn register_chain( - shell: &Shell, - forge_args: ForgeScriptArgs, - config: &EcosystemConfig, - chain_config: &ChainConfig, - contracts: &mut ContractsConfig, - l1_rpc_url: String, -) -> anyhow::Result<()> { - let deploy_config_path = REGISTER_CHAIN_SCRIPT_PARAMS.input(&config.link_to_code); - - let deploy_config = RegisterChainL1Config::new(chain_config, contracts)?; - deploy_config.save(shell, deploy_config_path)?; - - let mut forge = Forge::new(&config.path_to_foundry()) - .script(®ISTER_CHAIN_SCRIPT_PARAMS.script(), forge_args.clone()) - .with_ffi() - .with_rpc_url(l1_rpc_url) - .with_broadcast(); - - forge = fill_forge_private_key(forge, config.get_wallets()?.governor_private_key())?; - check_the_balance(&forge).await?; - forge.run(shell)?; - - let register_chain_output = RegisterChainOutput::read( - shell, - REGISTER_CHAIN_SCRIPT_PARAMS.output(&chain_config.link_to_code), - )?; - contracts.set_chain_contracts(®ister_chain_output); - Ok(()) -} - -// Distribute eth to the chain wallets for localhost environment -pub async fn distribute_eth( - ecosystem_config: &EcosystemConfig, - chain_config: &ChainConfig, - l1_rpc_url: String, -) -> anyhow::Result<()> { - if chain_config.wallet_creation == WalletCreation::Localhost - && ecosystem_config.l1_network == L1Network::Localhost - { - let spinner = Spinner::new(MSG_DISTRIBUTING_ETH_SPINNER); - let wallets = ecosystem_config.get_wallets()?; - let chain_wallets = chain_config.get_wallets_config()?; - let mut addresses = vec![ - chain_wallets.operator.address, - chain_wallets.blob_operator.address, - chain_wallets.governor.address, - ]; - if let Some(deployer) = chain_wallets.deployer { - addresses.push(deployer.address) - } - common::ethereum::distribute_eth( - wallets.operator, - addresses, - l1_rpc_url, - ecosystem_config.l1_network.chain_id(), - AMOUNT_FOR_DISTRIBUTION_TO_WALLETS, - ) - .await?; - spinner.finish(); - } - Ok(()) -} - -pub async fn mint_base_token( - ecosystem_config: &EcosystemConfig, - chain_config: &ChainConfig, - l1_rpc_url: String, -) -> anyhow::Result<()> { - if chain_config.wallet_creation == WalletCreation::Localhost - && ecosystem_config.l1_network == L1Network::Localhost - && chain_config.base_token != BaseToken::eth() - { - let spinner = Spinner::new(MSG_MINT_BASE_TOKEN_SPINNER); - let wallets = ecosystem_config.get_wallets()?; - let chain_wallets = chain_config.get_wallets_config()?; - let base_token = &chain_config.base_token; - let addresses = vec![wallets.governor.address, chain_wallets.governor.address]; - let amount = AMOUNT_FOR_DISTRIBUTION_TO_WALLETS * base_token.nominator as u128 - / base_token.denominator as u128; - common::ethereum::mint_token( - wallets.operator, - base_token.address, - addresses, - l1_rpc_url, - ecosystem_config.l1_network.chain_id(), - amount, - ) - .await?; - spinner.finish(); - } - Ok(()) -} - fn apply_port_offset(port_offset: u16, general_config: &mut GeneralConfig) -> anyhow::Result<()> { let Some(mut ports_config) = ports_config(general_config) else { bail!("Missing ports config"); diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/mod.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/mod.rs index afc92d2288bf..4ddc4bf58569 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/chain/mod.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/mod.rs @@ -1,6 +1,7 @@ +use ::common::forge::ForgeScriptArgs; +use args::build_transactions::BuildTransactionsArgs; pub(crate) use args::create::ChainCreateArgsFinal; use clap::Subcommand; -use common::forge::ForgeScriptArgs; pub(crate) use create::create_chain_inner; use xshell::Shell; @@ -10,17 +11,22 @@ use crate::commands::chain::{ }; pub(crate) mod args; +mod build_transactions; +mod common; mod create; pub mod deploy_l2_contracts; pub mod deploy_paymaster; pub mod genesis; pub(crate) mod init; mod set_token_multiplier_setter; +mod setup_legacy_bridge; #[derive(Subcommand, Debug)] pub enum ChainCommands { /// Create a new chain, setting the necessary configurations for later initialization Create(ChainCreateArgs), + /// Create unsigned transactions for chain deployment + BuildTransactions(BuildTransactionsArgs), /// Initialize chain, deploying necessary contracts and performing on-chain operations Init(InitArgs), /// Run server genesis @@ -47,6 +53,7 @@ pub(crate) async fn run(shell: &Shell, args: ChainCommands) -> anyhow::Result<() match args { ChainCommands::Create(args) => create::run(args, shell), ChainCommands::Init(args) => init::run(args, shell).await, + ChainCommands::BuildTransactions(args) => build_transactions::run(args, shell).await, ChainCommands::Genesis(args) => genesis::run(args, shell).await, ChainCommands::DeployL2Contracts(args) => { deploy_l2_contracts::run(args, shell, Deploy2ContractsOption::All).await diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/set_token_multiplier_setter.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/set_token_multiplier_setter.rs index f92391c22f47..15f7de4c277c 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/chain/set_token_multiplier_setter.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/set_token_multiplier_setter.rs @@ -43,8 +43,8 @@ pub async fn run(args: ForgeScriptArgs, shell: &Shell) -> anyhow::Result<()> { .l1_rpc_url .expose_str() .to_string(); - let token_multiplier_setter_address = ecosystem_config - .get_wallets() + let token_multiplier_setter_address = chain_config + .get_wallets_config() .context(MSG_WALLETS_CONFIG_MUST_BE_PRESENT)? .token_multiplier_setter .context(MSG_WALLET_TOKEN_MULTIPLIER_SETTER_NOT_FOUND)? diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/setup_legacy_bridge.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/setup_legacy_bridge.rs new file mode 100644 index 000000000000..925014fe4e61 --- /dev/null +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/setup_legacy_bridge.rs @@ -0,0 +1,73 @@ +use anyhow::Context; +use common::{ + forge::{Forge, ForgeScriptArgs}, + spinner::Spinner, +}; +use config::{ + forge_interface::{ + script_params::SETUP_LEGACY_BRIDGE, setup_legacy_bridge::SetupLegacyBridgeInput, + }, + traits::SaveConfig, + ChainConfig, ContractsConfig, EcosystemConfig, +}; +use xshell::Shell; + +use crate::{ + messages::{MSG_DEPLOYING_PAYMASTER, MSG_L1_SECRETS_MUST_BE_PRESENTED}, + utils::forge::{check_the_balance, fill_forge_private_key}, +}; + +pub async fn setup_legacy_bridge( + shell: &Shell, + chain_config: &ChainConfig, + ecosystem_config: &EcosystemConfig, + contracts_config: &ContractsConfig, + forge_args: ForgeScriptArgs, +) -> anyhow::Result<()> { + let input = SetupLegacyBridgeInput { + bridgehub: contracts_config.ecosystem_contracts.bridgehub_proxy_addr, + diamond_proxy: contracts_config.l1.diamond_proxy_addr, + shared_bridge_proxy: contracts_config.bridges.shared.l1_address, + transparent_proxy_admin: contracts_config + .ecosystem_contracts + .transparent_proxy_admin_addr, + erc20bridge_proxy: contracts_config.bridges.erc20.l1_address, + token_weth_address: Default::default(), + chain_id: chain_config.chain_id, + l2shared_bridge_address: contracts_config + .bridges + .shared + .l2_address + .expect("Not fully initialized"), + create2factory_salt: contracts_config.create2_factory_salt, + create2factory_addr: contracts_config.create2_factory_addr, + }; + let foundry_contracts_path = chain_config.path_to_foundry(); + input.save(shell, SETUP_LEGACY_BRIDGE.input(&chain_config.link_to_code))?; + let secrets = chain_config.get_secrets_config()?; + + let mut forge = Forge::new(&foundry_contracts_path) + .script(&SETUP_LEGACY_BRIDGE.script(), forge_args.clone()) + .with_ffi() + .with_rpc_url( + secrets + .l1 + .context(MSG_L1_SECRETS_MUST_BE_PRESENTED)? + .l1_rpc_url + .expose_str() + .to_string(), + ) + .with_broadcast(); + + forge = fill_forge_private_key( + forge, + ecosystem_config.get_wallets()?.governor_private_key(), + )?; + + let spinner = Spinner::new(MSG_DEPLOYING_PAYMASTER); + check_the_balance(&forge).await?; + forge.run(shell)?; + spinner.finish(); + + Ok(()) +} diff --git a/zk_toolbox/crates/zk_inception/src/commands/containers.rs b/zk_toolbox/crates/zk_inception/src/commands/containers.rs index 81d7970df839..9c11cc2e3efc 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/containers.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/containers.rs @@ -6,9 +6,13 @@ use config::{EcosystemConfig, DOCKER_COMPOSE_FILE, ERA_OBSERVABILITY_COMPOSE_FIL use xshell::Shell; use super::args::ContainersArgs; -use crate::messages::{ - MSG_CONTAINERS_STARTED, MSG_FAILED_TO_FIND_ECOSYSTEM_ERR, MSG_RETRY_START_CONTAINERS_PROMPT, - MSG_STARTING_CONTAINERS, MSG_STARTING_DOCKER_CONTAINERS_SPINNER, +use crate::{ + commands::ecosystem::setup_observability, + messages::{ + MSG_CONTAINERS_STARTED, MSG_FAILED_TO_FIND_ECOSYSTEM_ERR, + MSG_RETRY_START_CONTAINERS_PROMPT, MSG_STARTING_CONTAINERS, + MSG_STARTING_DOCKER_CONTAINERS_SPINNER, + }, }; pub fn run(shell: &Shell, args: ContainersArgs) -> anyhow::Result<()> { @@ -20,6 +24,10 @@ pub fn run(shell: &Shell, args: ContainersArgs) -> anyhow::Result<()> { logger::info(MSG_STARTING_CONTAINERS); let spinner = Spinner::new(MSG_STARTING_DOCKER_CONTAINERS_SPINNER); + if args.observability { + setup_observability::run(shell)?; + } + start_containers(shell, args.observability)?; spinner.finish(); diff --git a/zk_toolbox/crates/zk_inception/src/commands/contract_verifier/args/init.rs b/zk_toolbox/crates/zk_inception/src/commands/contract_verifier/args/init.rs index c74e4a4f765e..7ba7d3cb40cf 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/contract_verifier/args/init.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/contract_verifier/args/init.rs @@ -5,12 +5,13 @@ use xshell::Shell; use super::releases::{get_releases_with_arch, Arch, Version}; use crate::messages::{ - MSG_ARCH_NOT_SUPPORTED_ERR, MSG_FETCHING_VYPER_RELEASES_SPINNER, - MSG_FETCHING_ZKSOLC_RELEASES_SPINNER, MSG_FETCHING_ZKVYPER_RELEASES_SPINNER, - MSG_FETCH_SOLC_RELEASES_SPINNER, MSG_GET_SOLC_RELEASES_ERR, MSG_GET_VYPER_RELEASES_ERR, - MSG_GET_ZKSOLC_RELEASES_ERR, MSG_GET_ZKVYPER_RELEASES_ERR, MSG_NO_VERSION_FOUND_ERR, - MSG_OS_NOT_SUPPORTED_ERR, MSG_SOLC_VERSION_PROMPT, MSG_VYPER_VERSION_PROMPT, - MSG_ZKSOLC_VERSION_PROMPT, MSG_ZKVYPER_VERSION_PROMPT, + MSG_ARCH_NOT_SUPPORTED_ERR, MSG_ERA_VM_SOLC_VERSION_PROMPT, + MSG_FETCHING_VYPER_RELEASES_SPINNER, MSG_FETCHING_ZKSOLC_RELEASES_SPINNER, + MSG_FETCHING_ZKVYPER_RELEASES_SPINNER, MSG_FETCH_ERA_VM_SOLC_RELEASES_SPINNER, + MSG_FETCH_SOLC_RELEASES_SPINNER, MSG_GET_ERA_VM_SOLC_RELEASES_ERR, MSG_GET_SOLC_RELEASES_ERR, + MSG_GET_VYPER_RELEASES_ERR, MSG_GET_ZKSOLC_RELEASES_ERR, MSG_GET_ZKVYPER_RELEASES_ERR, + MSG_NO_VERSION_FOUND_ERR, MSG_OS_NOT_SUPPORTED_ERR, MSG_SOLC_VERSION_PROMPT, + MSG_VYPER_VERSION_PROMPT, MSG_ZKSOLC_VERSION_PROMPT, MSG_ZKVYPER_VERSION_PROMPT, }; #[derive(Debug, Clone, Parser, Default)] @@ -24,9 +25,15 @@ pub struct InitContractVerifierArgs { /// Version of solc to install #[clap(long)] pub solc_version: Option, + /// Version of era vm solc to install + #[clap(long)] + pub era_vm_solc_version: Option, /// Version of vyper to install #[clap(long)] pub vyper_version: Option, + /// Install only provided compilers + #[clap(long, default_missing_value = "true")] + pub only: bool, } #[derive(Debug, Clone)] @@ -34,6 +41,7 @@ pub struct InitContractVerifierArgsFinal { pub zksolc_releases: Vec, pub zkvyper_releases: Vec, pub solc_releases: Vec, + pub era_vm_solc_releases: Vec, pub vyper_releases: Vec, } @@ -68,6 +76,14 @@ impl InitContractVerifierArgs { ) .context(MSG_GET_SOLC_RELEASES_ERR)?; + let era_vm_solc_releases = get_releases_with_arch( + shell, + "matter-labs/era-solidity", + arch, + MSG_FETCH_ERA_VM_SOLC_RELEASES_SPINNER, + ) + .context(MSG_GET_ERA_VM_SOLC_RELEASES_ERR)?; + let vyper_releases = get_releases_with_arch( shell, "vyperlang/vyper", @@ -81,33 +97,42 @@ impl InitContractVerifierArgs { zksolc_releases.clone(), MSG_ZKSOLC_VERSION_PROMPT, )?; - let zksolc_releases = get_releases_above_version(zksolc_releases, zksolc_version)?; + let zksolc_releases = get_final_releases(zksolc_releases, zksolc_version, self.only)?; let zkvyper_version = select_min_version( self.zkvyper_version, zkvyper_releases.clone(), MSG_ZKVYPER_VERSION_PROMPT, )?; - let zkvyper_releases = get_releases_above_version(zkvyper_releases, zkvyper_version)?; + let zkvyper_releases = get_final_releases(zkvyper_releases, zkvyper_version, self.only)?; let solc_version = select_min_version( self.solc_version, solc_releases.clone(), MSG_SOLC_VERSION_PROMPT, )?; - let solc_releases = get_releases_above_version(solc_releases, solc_version)?; + let solc_releases = get_final_releases(solc_releases, solc_version, self.only)?; + + let era_vm_solc_version = select_min_version( + self.era_vm_solc_version, + era_vm_solc_releases.clone(), + MSG_ERA_VM_SOLC_VERSION_PROMPT, + )?; + let era_vm_solc_releases = + get_final_releases(era_vm_solc_releases, era_vm_solc_version, self.only)?; let vyper_version = select_min_version( self.vyper_version, vyper_releases.clone(), MSG_VYPER_VERSION_PROMPT, )?; - let vyper_releases = get_releases_above_version(vyper_releases, vyper_version)?; + let vyper_releases = get_final_releases(vyper_releases, vyper_version, self.only)?; Ok(InitContractVerifierArgsFinal { zksolc_releases, zkvyper_releases, solc_releases, + era_vm_solc_releases, vyper_releases, }) } @@ -156,14 +181,20 @@ fn select_min_version( Ok(selected) } -fn get_releases_above_version( +fn get_final_releases( releases: Vec, version: Version, + only: bool, ) -> anyhow::Result> { let pos = releases .iter() .position(|r| r.version == version.version) .context(MSG_NO_VERSION_FOUND_ERR)?; - Ok(releases[..=pos].to_vec()) + let result = if only { + vec![releases[pos].clone()] + } else { + releases[..=pos].to_vec() + }; + Ok(result) } diff --git a/zk_toolbox/crates/zk_inception/src/commands/contract_verifier/init.rs b/zk_toolbox/crates/zk_inception/src/commands/contract_verifier/init.rs index 5fd482ae5fff..f376a0d36eca 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/contract_verifier/init.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/contract_verifier/init.rs @@ -36,6 +36,14 @@ pub(crate) async fn run(shell: &Shell, args: InitContractVerifierArgs) -> anyhow "solc", )?; + download_binaries( + shell, + args.era_vm_solc_releases, + get_era_vm_solc_path, + &link_to_code, + "solc", + )?; + download_binaries( shell, args.vyper_releases, @@ -105,3 +113,9 @@ fn get_vyper_path(link_to_code: &Path, version: &str) -> PathBuf { fn get_solc_path(link_to_code: &Path, version: &str) -> PathBuf { link_to_code.join("etc/solc-bin/").join(version) } + +fn get_era_vm_solc_path(link_to_code: &Path, version: &str) -> PathBuf { + link_to_code + .join("etc/solc-bin/") + .join(format!("zkVM-{version}")) +} diff --git a/zk_toolbox/crates/zk_inception/src/commands/ecosystem/args/build_transactions.rs b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/args/build_transactions.rs new file mode 100644 index 000000000000..697fa518b6e4 --- /dev/null +++ b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/args/build_transactions.rs @@ -0,0 +1,68 @@ +use std::{path::PathBuf, str::FromStr}; + +use clap::Parser; +use common::{forge::ForgeScriptArgs, Prompt}; +use serde::{Deserialize, Serialize}; +use url::Url; +use zksync_basic_types::H160; + +use crate::{ + consts::DEFAULT_UNSIGNED_TRANSACTIONS_DIR, + defaults::LOCAL_RPC_URL, + messages::{ + MSG_L1_RPC_URL_HELP, MSG_L1_RPC_URL_INVALID_ERR, MSG_L1_RPC_URL_PROMPT, + MSG_SENDER_ADDRESS_PROMPT, + }, +}; + +#[derive(Debug, Clone, Serialize, Deserialize, Parser)] +pub struct BuildTransactionsArgs { + /// Address of the transaction sender. + #[clap(long)] + pub sender: Option, + #[clap(long, help = MSG_L1_RPC_URL_HELP)] + pub l1_rpc_url: Option, + /// Output directory for the generated files. + #[arg(long, short)] + pub out: Option, + #[clap(flatten)] + #[serde(flatten)] + pub forge_args: ForgeScriptArgs, +} + +impl BuildTransactionsArgs { + pub fn fill_values_with_prompt(self) -> BuildTransactionsFinal { + let sender = self.sender.unwrap_or_else(|| { + Prompt::new(MSG_SENDER_ADDRESS_PROMPT) + .validate_with(|val: &String| -> Result<(), String> { + H160::from_str(val).map_or_else(|err| Err(err.to_string()), |_| Ok(())) + }) + .ask() + }); + + let l1_rpc_url = self.l1_rpc_url.unwrap_or_else(|| { + Prompt::new(MSG_L1_RPC_URL_PROMPT) + .default(LOCAL_RPC_URL) + .validate_with(|val: &String| -> Result<(), String> { + Url::parse(val) + .map(|_| ()) + .map_err(|_| MSG_L1_RPC_URL_INVALID_ERR.to_string()) + }) + .ask() + }); + BuildTransactionsFinal { + sender, + out: self.out.unwrap_or(DEFAULT_UNSIGNED_TRANSACTIONS_DIR.into()), + forge_args: self.forge_args.clone(), + l1_rpc_url, + } + } +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct BuildTransactionsFinal { + pub sender: String, + pub out: PathBuf, + pub forge_args: ForgeScriptArgs, + pub l1_rpc_url: String, +} diff --git a/zk_toolbox/crates/zk_inception/src/commands/ecosystem/args/mod.rs b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/args/mod.rs index 8a6048a8643b..c25eebda3d6d 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/ecosystem/args/mod.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/args/mod.rs @@ -1,3 +1,4 @@ +pub mod build_transactions; pub mod change_default; pub mod create; pub mod init; diff --git a/zk_toolbox/crates/zk_inception/src/commands/ecosystem/build_transactions.rs b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/build_transactions.rs new file mode 100644 index 000000000000..ff7132360972 --- /dev/null +++ b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/build_transactions.rs @@ -0,0 +1,79 @@ +use anyhow::Context; +use common::{git, logger, spinner::Spinner}; +use config::{traits::SaveConfigWithBasePath, EcosystemConfig}; +use xshell::Shell; + +use super::{ + args::build_transactions::BuildTransactionsArgs, + common::deploy_l1, + create_configs::create_initial_deployments_config, + utils::{build_system_contracts, install_yarn_dependencies}, +}; +use crate::messages::{ + MSG_BUILDING_ECOSYSTEM, MSG_BUILDING_ECOSYSTEM_CONTRACTS_SPINNER, MSG_ECOSYSTEM_TXN_OUTRO, + MSG_ECOSYSTEM_TXN_OUT_PATH_INVALID_ERR, MSG_INTALLING_DEPS_SPINNER, + MSG_WRITING_OUTPUT_FILES_SPINNER, +}; + +const DEPLOY_TRANSACTIONS_FILE_SRC: &str = + "contracts/l1-contracts/broadcast/DeployL1.s.sol/9/dry-run/run-latest.json"; +const DEPLOY_TRANSACTIONS_FILE_DST: &str = "deploy-l1-txns.json"; + +const SCRIPT_CONFIG_FILE_SRC: &str = "contracts/l1-contracts/script-config/config-deploy-l1.toml"; +const SCRIPT_CONFIG_FILE_DST: &str = "config-deploy-l1.toml"; + +pub async fn run(args: BuildTransactionsArgs, shell: &Shell) -> anyhow::Result<()> { + let args = args.fill_values_with_prompt(); + let ecosystem_config = EcosystemConfig::from_file(shell)?; + + git::submodule_update(shell, ecosystem_config.link_to_code.clone())?; + + let initial_deployment_config = match ecosystem_config.get_initial_deployment_config() { + Ok(config) => config, + Err(_) => create_initial_deployments_config(shell, &ecosystem_config.config)?, + }; + + logger::info(MSG_BUILDING_ECOSYSTEM); + + let spinner = Spinner::new(MSG_INTALLING_DEPS_SPINNER); + install_yarn_dependencies(shell, &ecosystem_config.link_to_code)?; + build_system_contracts(shell, &ecosystem_config.link_to_code)?; + spinner.finish(); + + let spinner = Spinner::new(MSG_BUILDING_ECOSYSTEM_CONTRACTS_SPINNER); + let contracts_config = deploy_l1( + shell, + &args.forge_args, + &ecosystem_config, + &initial_deployment_config, + &args.l1_rpc_url, + Some(args.sender), + false, + ) + .await?; + + contracts_config.save_with_base_path(shell, &args.out)?; + spinner.finish(); + + let spinner = Spinner::new(MSG_WRITING_OUTPUT_FILES_SPINNER); + shell + .create_dir(&args.out) + .context(MSG_ECOSYSTEM_TXN_OUT_PATH_INVALID_ERR)?; + + shell.copy_file( + ecosystem_config + .link_to_code + .join(DEPLOY_TRANSACTIONS_FILE_SRC), + args.out.join(DEPLOY_TRANSACTIONS_FILE_DST), + )?; + + shell.copy_file( + ecosystem_config.link_to_code.join(SCRIPT_CONFIG_FILE_SRC), + args.out.join(SCRIPT_CONFIG_FILE_DST), + )?; + spinner.finish(); + + logger::outro(MSG_ECOSYSTEM_TXN_OUTRO); + + Ok(()) +} diff --git a/zk_toolbox/crates/zk_inception/src/commands/ecosystem/common.rs b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/common.rs new file mode 100644 index 000000000000..950d39876b09 --- /dev/null +++ b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/common.rs @@ -0,0 +1,75 @@ +use anyhow::Context; +use common::forge::{Forge, ForgeScriptArgs}; +use config::{ + forge_interface::{ + deploy_ecosystem::{ + input::{DeployL1Config, InitialDeploymentConfig}, + output::DeployL1Output, + }, + script_params::DEPLOY_ECOSYSTEM_SCRIPT_PARAMS, + }, + traits::{ReadConfig, ReadConfigWithBasePath, SaveConfig}, + ContractsConfig, EcosystemConfig, GenesisConfig, +}; +use types::{L1Network, ProverMode}; +use xshell::Shell; + +use crate::utils::forge::{check_the_balance, fill_forge_private_key}; + +pub async fn deploy_l1( + shell: &Shell, + forge_args: &ForgeScriptArgs, + config: &EcosystemConfig, + initial_deployment_config: &InitialDeploymentConfig, + l1_rpc_url: &str, + sender: Option, + broadcast: bool, +) -> anyhow::Result { + let deploy_config_path = DEPLOY_ECOSYSTEM_SCRIPT_PARAMS.input(&config.link_to_code); + let default_genesis_config = + GenesisConfig::read_with_base_path(shell, config.get_default_configs_path()) + .context("Context")?; + + let wallets_config = config.get_wallets()?; + // For deploying ecosystem we only need genesis batch params + let deploy_config = DeployL1Config::new( + &default_genesis_config, + &wallets_config, + initial_deployment_config, + config.era_chain_id, + config.prover_version == ProverMode::NoProofs, + ); + deploy_config.save(shell, deploy_config_path)?; + + let mut forge = Forge::new(&config.path_to_foundry()) + .script(&DEPLOY_ECOSYSTEM_SCRIPT_PARAMS.script(), forge_args.clone()) + .with_ffi() + .with_rpc_url(l1_rpc_url.to_string()); + + if config.l1_network == L1Network::Localhost { + // It's a kludge for reth, just because it doesn't behave properly with large amount of txs + forge = forge.with_slow(); + } + + if let Some(address) = sender { + forge = forge.with_sender(address); + } else { + forge = fill_forge_private_key(forge, wallets_config.deployer_private_key())?; + } + + if broadcast { + forge = forge.with_broadcast(); + check_the_balance(&forge).await?; + } + + forge.run(shell)?; + + let script_output = DeployL1Output::read( + shell, + DEPLOY_ECOSYSTEM_SCRIPT_PARAMS.output(&config.link_to_code), + )?; + let mut contracts_config = ContractsConfig::default(); + contracts_config.update_from_l1_output(&script_output); + + Ok(contracts_config) +} diff --git a/zk_toolbox/crates/zk_inception/src/commands/ecosystem/init.rs b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/init.rs index 0862d1018d89..2d31aad10336 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/ecosystem/init.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/init.rs @@ -1,11 +1,7 @@ -use std::{ - path::{Path, PathBuf}, - str::FromStr, -}; +use std::{path::PathBuf, str::FromStr}; use anyhow::Context; use common::{ - cmd::Cmd, config::global_config, forge::{Forge, ForgeScriptArgs}, git, logger, @@ -15,28 +11,25 @@ use common::{ use config::{ forge_interface::{ deploy_ecosystem::{ - input::{ - DeployErc20Config, DeployL1Config, Erc20DeploymentConfig, InitialDeploymentConfig, - }, - output::{DeployL1Output, ERC20Tokens}, + input::{DeployErc20Config, Erc20DeploymentConfig, InitialDeploymentConfig}, + output::ERC20Tokens, }, - script_params::{DEPLOY_ECOSYSTEM_SCRIPT_PARAMS, DEPLOY_ERC20_SCRIPT_PARAMS}, - }, - traits::{ - FileConfigWithDefaultName, ReadConfig, ReadConfigWithBasePath, SaveConfig, - SaveConfigWithBasePath, + script_params::DEPLOY_ERC20_SCRIPT_PARAMS, }, - ContractsConfig, EcosystemConfig, GenesisConfig, + traits::{FileConfigWithDefaultName, ReadConfig, SaveConfig, SaveConfigWithBasePath}, + ContractsConfig, EcosystemConfig, }; -use types::{L1Network, ProverMode}; -use xshell::{cmd, Shell}; +use types::L1Network; +use xshell::Shell; use super::{ args::init::{EcosystemArgsFinal, EcosystemInitArgs, EcosystemInitArgsFinal}, + common::deploy_l1, setup_observability, + utils::{build_system_contracts, install_yarn_dependencies}, }; use crate::{ - accept_ownership::accept_owner, + accept_ownership::{accept_admin, accept_owner}, commands::{ chain::{self, args::init::PortOffset}, ecosystem::create_configs::{ @@ -280,47 +273,19 @@ async fn deploy_ecosystem_inner( initial_deployment_config: &InitialDeploymentConfig, l1_rpc_url: String, ) -> anyhow::Result { - let deploy_config_path = DEPLOY_ECOSYSTEM_SCRIPT_PARAMS.input(&config.link_to_code); - - let default_genesis_config = - GenesisConfig::read_with_base_path(shell, config.get_default_configs_path()) - .context("Context")?; - - let wallets_config = config.get_wallets()?; - // For deploying ecosystem we only need genesis batch params - let deploy_config = DeployL1Config::new( - &default_genesis_config, - &wallets_config, - initial_deployment_config, - config.era_chain_id, - config.prover_version == ProverMode::NoProofs, - ); - deploy_config.save(shell, deploy_config_path)?; - - let mut forge = Forge::new(&config.path_to_foundry()) - .script(&DEPLOY_ECOSYSTEM_SCRIPT_PARAMS.script(), forge_args.clone()) - .with_ffi() - .with_rpc_url(l1_rpc_url.clone()) - .with_broadcast(); - - if config.l1_network == L1Network::Localhost { - // It's a kludge for reth, just because it doesn't behave properly with large amount of txs - forge = forge.with_slow(); - } - - forge = fill_forge_private_key(forge, wallets_config.deployer_private_key())?; - let spinner = Spinner::new(MSG_DEPLOYING_ECOSYSTEM_CONTRACTS_SPINNER); - check_the_balance(&forge).await?; - forge.run(shell)?; + let contracts_config = deploy_l1( + shell, + &forge_args, + config, + initial_deployment_config, + &l1_rpc_url, + None, + true, + ) + .await?; spinner.finish(); - let script_output = DeployL1Output::read( - shell, - DEPLOY_ECOSYSTEM_SCRIPT_PARAMS.output(&config.link_to_code), - )?; - let mut contracts_config = ContractsConfig::default(); - contracts_config.update_from_l1_output(&script_output); accept_owner( shell, config, @@ -332,6 +297,17 @@ async fn deploy_ecosystem_inner( ) .await?; + accept_admin( + shell, + config, + contracts_config.l1.chain_admin_addr, + config.get_wallets()?.governor_private_key(), + contracts_config.ecosystem_contracts.bridgehub_proxy_addr, + &forge_args, + l1_rpc_url.clone(), + ) + .await?; + accept_owner( shell, config, @@ -343,6 +319,17 @@ async fn deploy_ecosystem_inner( ) .await?; + accept_admin( + shell, + config, + contracts_config.l1.chain_admin_addr, + config.get_wallets()?.governor_private_key(), + contracts_config.bridges.shared.l1_address, + &forge_args, + l1_rpc_url.clone(), + ) + .await?; + accept_owner( shell, config, @@ -356,15 +343,18 @@ async fn deploy_ecosystem_inner( ) .await?; - Ok(contracts_config) -} - -fn install_yarn_dependencies(shell: &Shell, link_to_code: &Path) -> anyhow::Result<()> { - let _dir_guard = shell.push_dir(link_to_code); - Ok(Cmd::new(cmd!(shell, "yarn install")).run()?) -} + accept_admin( + shell, + config, + contracts_config.l1.chain_admin_addr, + config.get_wallets()?.governor_private_key(), + contracts_config + .ecosystem_contracts + .state_transition_proxy_addr, + &forge_args, + l1_rpc_url.clone(), + ) + .await?; -fn build_system_contracts(shell: &Shell, link_to_code: &Path) -> anyhow::Result<()> { - let _dir_guard = shell.push_dir(link_to_code.join("contracts")); - Ok(Cmd::new(cmd!(shell, "yarn sc build")).run()?) + Ok(contracts_config) } diff --git a/zk_toolbox/crates/zk_inception/src/commands/ecosystem/mod.rs b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/mod.rs index cb5195ccf937..3f4aa7565e19 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/ecosystem/mod.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/mod.rs @@ -1,3 +1,4 @@ +use args::build_transactions::BuildTransactionsArgs; use clap::Subcommand; use xshell::Shell; @@ -6,11 +7,14 @@ use crate::commands::ecosystem::args::{ }; mod args; +pub(crate) mod build_transactions; mod change_default; +mod common; mod create; pub mod create_configs; pub(crate) mod init; -mod setup_observability; +pub(crate) mod setup_observability; +mod utils; #[derive(Subcommand, Debug)] #[allow(clippy::large_enum_variant)] @@ -18,6 +22,8 @@ pub enum EcosystemCommands { /// Create a new ecosystem and chain, /// setting necessary configurations for later initialization Create(EcosystemCreateArgs), + /// Create transactions to build ecosystem contracts + BuildTransactions(BuildTransactionsArgs), /// Initialize ecosystem and chain, /// deploying necessary contracts and performing on-chain operations Init(EcosystemInitArgs), @@ -33,6 +39,7 @@ pub enum EcosystemCommands { pub(crate) async fn run(shell: &Shell, args: EcosystemCommands) -> anyhow::Result<()> { match args { EcosystemCommands::Create(args) => create::run(args, shell), + EcosystemCommands::BuildTransactions(args) => build_transactions::run(args, shell).await, EcosystemCommands::Init(args) => init::run(args, shell).await, EcosystemCommands::ChangeDefaultChain(args) => change_default::run(args, shell), EcosystemCommands::SetupObservability => setup_observability::run(shell), diff --git a/zk_toolbox/crates/zk_inception/src/commands/ecosystem/utils.rs b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/utils.rs new file mode 100644 index 000000000000..a51adc75fb42 --- /dev/null +++ b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/utils.rs @@ -0,0 +1,14 @@ +use std::path::Path; + +use common::cmd::Cmd; +use xshell::{cmd, Shell}; + +pub(super) fn install_yarn_dependencies(shell: &Shell, link_to_code: &Path) -> anyhow::Result<()> { + let _dir_guard = shell.push_dir(link_to_code); + Ok(Cmd::new(cmd!(shell, "yarn install")).run()?) +} + +pub(super) fn build_system_contracts(shell: &Shell, link_to_code: &Path) -> anyhow::Result<()> { + let _dir_guard = shell.push_dir(link_to_code.join("contracts")); + Ok(Cmd::new(cmd!(shell, "yarn sc build")).run()?) +} diff --git a/zk_toolbox/crates/zk_inception/src/commands/external_node/args/run.rs b/zk_toolbox/crates/zk_inception/src/commands/external_node/args/run.rs index 1bc0c06728d7..cd6ff4c71534 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/external_node/args/run.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/external_node/args/run.rs @@ -1,7 +1,9 @@ use clap::Parser; use serde::{Deserialize, Serialize}; -use crate::messages::{MSG_SERVER_ADDITIONAL_ARGS_HELP, MSG_SERVER_COMPONENTS_HELP}; +use crate::messages::{ + MSG_ENABLE_CONSENSUS_HELP, MSG_SERVER_ADDITIONAL_ARGS_HELP, MSG_SERVER_COMPONENTS_HELP, +}; #[derive(Debug, Serialize, Deserialize, Parser)] pub struct RunExternalNodeArgs { @@ -9,6 +11,8 @@ pub struct RunExternalNodeArgs { pub reinit: bool, #[clap(long, help = MSG_SERVER_COMPONENTS_HELP)] pub components: Option>, + #[clap(long, help = MSG_ENABLE_CONSENSUS_HELP, default_missing_value = "true", num_args = 0..=1)] + pub enable_consensus: Option, #[clap(long, short)] #[arg(trailing_var_arg = true, allow_hyphen_values = true, hide = false, help = MSG_SERVER_ADDITIONAL_ARGS_HELP)] pub additional_args: Vec, diff --git a/zk_toolbox/crates/zk_inception/src/commands/external_node/prepare_configs.rs b/zk_toolbox/crates/zk_inception/src/commands/external_node/prepare_configs.rs index 51101c228878..89e08418c6e5 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/external_node/prepare_configs.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/external_node/prepare_configs.rs @@ -1,4 +1,4 @@ -use std::{path::Path, str::FromStr}; +use std::{collections::BTreeMap, path::Path, str::FromStr}; use anyhow::Context; use common::{config::global_config, logger}; @@ -8,14 +8,24 @@ use config::{ }; use xshell::Shell; use zksync_basic_types::url::SensitiveUrl; -use zksync_config::configs::{DatabaseSecrets, L1Secrets}; +use zksync_config::configs::{ + consensus::{ConsensusSecrets, NodeSecretKey, Secret}, + DatabaseSecrets, L1Secrets, +}; +use zksync_consensus_crypto::TextFmt; +use zksync_consensus_roles as roles; use crate::{ commands::external_node::args::prepare_configs::{PrepareConfigArgs, PrepareConfigFinal}, messages::{ - msg_preparing_en_config_is_done, MSG_CHAIN_NOT_INITIALIZED, MSG_PREPARING_EN_CONFIGS, + msg_preparing_en_config_is_done, MSG_CHAIN_NOT_INITIALIZED, + MSG_CONSENSUS_CONFIG_MISSING_ERR, MSG_CONSENSUS_SECRETS_MISSING_ERR, + MSG_CONSENSUS_SECRETS_NODE_KEY_MISSING_ERR, MSG_PORTS_CONFIG_ERR, MSG_PREPARING_EN_CONFIGS, + }, + utils::{ + consensus::{get_consensus_config, node_public_key}, + rocks_db::{recreate_rocksdb_dirs, RocksDBDirOption}, }, - utils::rocks_db::{recreate_rocksdb_dirs, RocksDBDirOption}, }; pub fn run(shell: &Shell, args: PrepareConfigArgs) -> anyhow::Result<()> { @@ -64,15 +74,45 @@ fn prepare_configs( gateway_url: None, }; let mut general_en = general.clone(); + let next_empty_ports_config = ports_config(&general) + .context(MSG_PORTS_CONFIG_ERR)? + .next_empty_ports_config(); + update_ports(&mut general_en, &next_empty_ports_config)?; + + // Set consensus config + let main_node_consensus_config = general + .consensus_config + .context(MSG_CONSENSUS_CONFIG_MISSING_ERR)?; + + let mut gossip_static_outbound = BTreeMap::new(); + let main_node_public_key = node_public_key( + &config + .get_secrets_config()? + .consensus + .context(MSG_CONSENSUS_SECRETS_MISSING_ERR)?, + )? + .context(MSG_CONSENSUS_SECRETS_NODE_KEY_MISSING_ERR)?; - update_ports( - &mut general_en, - &ports_config(&general) - .context("da")? - .next_empty_ports_config(), + gossip_static_outbound.insert(main_node_public_key, main_node_consensus_config.public_addr); + + let en_consensus_config = get_consensus_config( + config, + next_empty_ports_config, + None, + Some(gossip_static_outbound), )?; + general_en.consensus_config = Some(en_consensus_config.clone()); + en_consensus_config.save_with_base_path(shell, en_configs_path)?; + + // Set secrets config + let node_key = roles::node::SecretKey::generate().encode(); + let consensus_secrets = ConsensusSecrets { + validator_key: None, + attester_key: None, + node_key: Some(NodeSecretKey(Secret::new(node_key))), + }; let secrets = SecretsConfig { - consensus: None, + consensus: Some(consensus_secrets), database: Some(DatabaseSecrets { server_url: Some(args.db.full_url().into()), prover_url: None, diff --git a/zk_toolbox/crates/zk_inception/src/commands/external_node/run.rs b/zk_toolbox/crates/zk_inception/src/commands/external_node/run.rs index 9d3da4663859..46328699e6d4 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/external_node/run.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/external_node/run.rs @@ -32,6 +32,7 @@ async fn run_external_node( if args.reinit { init::init(shell, chain_config).await? } + let enable_consensus = args.enable_consensus.unwrap_or(false); let server = RunExternalNode::new(args.components.clone(), chain_config)?; - server.run(shell, args.additional_args.clone()) + server.run(shell, enable_consensus, args.additional_args.clone()) } diff --git a/zk_toolbox/crates/zk_inception/src/commands/prover/init.rs b/zk_toolbox/crates/zk_inception/src/commands/prover/init.rs index 1d92357635c5..20e682745870 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/prover/init.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/prover/init.rs @@ -163,7 +163,7 @@ fn init_file_backed_proof_storage( ecosystem_config: &EcosystemConfig, config: ProofStorageFileBacked, ) -> anyhow::Result { - let proof_store_dir = config.proof_store_dir; + let proof_store_dir = config.proof_store_dir.clone(); let prover_path = get_link_to_prover(ecosystem_config); let proof_store_dir = prover_path.join(proof_store_dir).join("witness_inputs"); @@ -173,7 +173,7 @@ fn init_file_backed_proof_storage( let object_store_config = ObjectStoreConfig { mode: ObjectStoreMode::FileBacked { - file_backed_base_path: proof_store_dir.into_os_string().into_string().unwrap(), + file_backed_base_path: config.proof_store_dir, }, max_retries: PROVER_STORE_MAX_RETRIES, local_mirror_path: None, diff --git a/zk_toolbox/crates/zk_inception/src/commands/prover/run.rs b/zk_toolbox/crates/zk_inception/src/commands/prover/run.rs index 8f72da03f3b3..5f4bf2f4a671 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/prover/run.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/prover/run.rs @@ -2,7 +2,7 @@ use std::path::PathBuf; use anyhow::{anyhow, Context}; use common::{check_prerequisites, cmd::Cmd, config::global_config, logger, GPU_PREREQUISITES}; -use config::{get_link_to_prover, EcosystemConfig}; +use config::{get_link_to_prover, ChainConfig, EcosystemConfig}; use xshell::{cmd, Shell}; use super::args::run::{ProverComponent, ProverRunArgs}; @@ -69,6 +69,7 @@ pub(crate) async fn run(args: ProverRunArgs, shell: &Shell) -> anyhow::Result<() if in_docker { let path_to_configs = chain.configs.clone(); let path_to_prover = get_link_to_prover(&ecosystem_config); + update_setup_data_path(&chain, "prover/data/keys".to_string())?; run_dockerized_component( shell, component.image_name(), @@ -80,6 +81,7 @@ pub(crate) async fn run(args: ProverRunArgs, shell: &Shell) -> anyhow::Result<() &path_to_prover, )? } else { + update_setup_data_path(&chain, "data/keys".to_string())?; run_binary_component( shell, component.binary_name(), @@ -132,3 +134,14 @@ fn run_binary_component( cmd = cmd.with_force_run(); cmd.run().context(error) } + +fn update_setup_data_path(chain: &ChainConfig, path: String) -> anyhow::Result<()> { + let mut general_config = chain.get_general_config()?; + general_config + .prover_config + .as_mut() + .expect("Prover config not found") + .setup_data_path = path; + chain.save_general_config(&general_config)?; + Ok(()) +} diff --git a/zk_toolbox/crates/zk_inception/src/commands/server.rs b/zk_toolbox/crates/zk_inception/src/commands/server.rs index b5a09ed04370..f96bc3aeebc9 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/server.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/server.rs @@ -35,7 +35,11 @@ fn run_server( chain_config: &ChainConfig, shell: &Shell, ) -> anyhow::Result<()> { - let server = Server::new(args.components.clone(), chain_config.link_to_code.clone()); + let server = Server::new( + args.components.clone(), + chain_config.link_to_code.clone(), + args.uring, + ); if args.build { server.build(shell)?; diff --git a/zk_toolbox/crates/zk_inception/src/commands/update.rs b/zk_toolbox/crates/zk_inception/src/commands/update.rs index a05ecbe62e0f..c140c3a4e9c8 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/update.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/update.rs @@ -1,7 +1,11 @@ use std::path::Path; use anyhow::{Context, Ok}; -use common::{git, logger, spinner::Spinner}; +use common::{ + git, logger, + spinner::Spinner, + yaml::{merge_yaml, ConfigDiff}, +}; use config::{ ChainConfig, EcosystemConfig, CONTRACTS_FILE, EN_CONFIG_FILE, ERA_OBSERBAVILITY_DIR, GENERAL_FILE, GENESIS_FILE, SECRETS_FILE, @@ -12,38 +16,11 @@ use super::args::UpdateArgs; use crate::messages::{ msg_diff_contracts_config, msg_diff_genesis_config, msg_diff_secrets, msg_updating_chain, MSG_CHAIN_NOT_FOUND_ERR, MSG_DIFF_EN_CONFIG, MSG_DIFF_EN_GENERAL_CONFIG, - MSG_DIFF_GENERAL_CONFIG, MSG_INVALID_KEY_TYPE_ERR, MSG_PULLING_ZKSYNC_CODE_SPINNER, + MSG_DIFF_GENERAL_CONFIG, MSG_PULLING_ZKSYNC_CODE_SPINNER, MSG_UPDATING_ERA_OBSERVABILITY_SPINNER, MSG_UPDATING_SUBMODULES_SPINNER, MSG_UPDATING_ZKSYNC, MSG_ZKSYNC_UPDATED, }; -/// Holds the differences between two YAML configurations. -#[derive(Default)] -struct ConfigDiff { - /// Fields that have different values between the two configurations - /// This contains the new values - pub differing_values: serde_yaml::Mapping, - - /// Fields that are present in the new configuration but not in the old one. - pub new_fields: serde_yaml::Mapping, -} - -impl ConfigDiff { - fn print(&self, msg: &str, is_warning: bool) { - if self.new_fields.is_empty() { - return; - } - - if is_warning { - logger::warn(msg); - logger::warn(logger::object_to_string(&self.new_fields)); - } else { - logger::info(msg); - logger::info(logger::object_to_string(&self.new_fields)); - } - } -} - pub fn run(shell: &Shell, args: UpdateArgs) -> anyhow::Result<()> { logger::info(MSG_UPDATING_ZKSYNC); let ecosystem = EcosystemConfig::from_file(shell)?; @@ -127,7 +104,7 @@ fn update_config( ) -> anyhow::Result<()> { let original_config = serde_yaml::from_str(&shell.read_file(original_config_path)?)?; let mut chain_config = serde_yaml::from_str(&shell.read_file(chain_config_path)?)?; - let diff = merge_yaml(&mut chain_config, original_config)?; + let diff = merge_yaml(&mut chain_config, original_config, false)?; if save_config { save_updated_config(&shell, chain_config, chain_config_path, diff, msg)?; } else { @@ -202,298 +179,3 @@ fn update_chain( Ok(()) } - -fn merge_yaml_internal( - a: &mut serde_yaml::Value, - b: serde_yaml::Value, - current_key: String, - diff: &mut ConfigDiff, -) -> anyhow::Result<()> { - match (a, b) { - (serde_yaml::Value::Mapping(a), serde_yaml::Value::Mapping(b)) => { - for (key, value) in b { - let k = key.as_str().context(MSG_INVALID_KEY_TYPE_ERR)?.to_string(); - let current_key = if current_key.is_empty() { - k.clone() - } else { - format!("{}.{}", current_key, k) - }; - - if a.contains_key(&key) { - merge_yaml_internal(a.get_mut(&key).unwrap(), value, current_key, diff)?; - } else { - a.insert(key.clone(), value.clone()); - diff.new_fields.insert(current_key.into(), value); - } - } - } - (a, b) => { - if a != &b { - diff.differing_values.insert(current_key.into(), b); - } - } - } - Ok(()) -} - -fn merge_yaml(a: &mut serde_yaml::Value, b: serde_yaml::Value) -> anyhow::Result { - let mut diff = ConfigDiff::default(); - merge_yaml_internal(a, b, "".into(), &mut diff)?; - Ok(diff) -} - -#[cfg(test)] -mod tests { - #[test] - fn test_merge_yaml_both_are_equal_returns_no_diff() { - let mut a = serde_yaml::from_str( - r#" - key1: value1 - key2: value2 - key3: - key4: value4 - "#, - ) - .unwrap(); - let b: serde_yaml::Value = serde_yaml::from_str( - r#" - key1: value1 - key2: value2 - key3: - key4: value4 - "#, - ) - .unwrap(); - let expected: serde_yaml::Value = serde_yaml::from_str( - r#" - key1: value1 - key2: value2 - key3: - key4: value4 - "#, - ) - .unwrap(); - let diff = super::merge_yaml(&mut a, b).unwrap(); - assert!(diff.differing_values.is_empty()); - assert!(diff.new_fields.is_empty()); - assert_eq!(a, expected); - } - - #[test] - fn test_merge_yaml_b_has_extra_field_returns_diff() { - let mut a = serde_yaml::from_str( - r#" - key1: value1 - key2: value2 - key3: - key4: value4 - "#, - ) - .unwrap(); - let b: serde_yaml::Value = serde_yaml::from_str( - r#" - key1: value1 - key2: value2 - key3: - key4: value4 - key5: value5 - "#, - ) - .unwrap(); - - let expected: serde_yaml::Value = serde_yaml::from_str( - r#" - key1: value1 - key2: value2 - key3: - key4: value4 - key5: value5 - "#, - ) - .unwrap(); - - let diff = super::merge_yaml(&mut a, b.clone()).unwrap(); - assert!(diff.differing_values.is_empty()); - assert_eq!(diff.new_fields.len(), 1); - assert_eq!( - diff.new_fields.get::("key5".into()).unwrap(), - b.clone().get("key5").unwrap() - ); - assert_eq!(a, expected); - } - - #[test] - fn test_merge_yaml_a_has_extra_field_no_diff() { - let mut a = serde_yaml::from_str( - r#" - key1: value1 - key2: value2 - key3: - key4: value4 - key5: value5 - "#, - ) - .unwrap(); - let b: serde_yaml::Value = serde_yaml::from_str( - r#" - key1: value1 - key2: value2 - key3: - key4: value4 - "#, - ) - .unwrap(); - - let expected: serde_yaml::Value = serde_yaml::from_str( - r#" - key1: value1 - key2: value2 - key3: - key4: value4 - key5: value5 - "#, - ) - .unwrap(); - - let diff = super::merge_yaml(&mut a, b).unwrap(); - assert!(diff.differing_values.is_empty()); - assert!(diff.new_fields.is_empty()); - assert_eq!(a, expected); - } - - #[test] - fn test_merge_yaml_a_has_extra_field_and_b_has_extra_field_returns_diff() { - let mut a = serde_yaml::from_str( - r#" - key1: value1 - key2: value2 - key3: - key4: value4 - key5: value5 - "#, - ) - .unwrap(); - let b: serde_yaml::Value = serde_yaml::from_str( - r#" - key1: value1 - key2: value2 - key3: - key4: value4 - key6: value6 - "#, - ) - .unwrap(); - - let expected: serde_yaml::Value = serde_yaml::from_str( - r#" - key1: value1 - key2: value2 - key3: - key4: value4 - key5: value5 - key6: value6 - "#, - ) - .unwrap(); - - let diff = super::merge_yaml(&mut a, b.clone()).unwrap(); - assert_eq!(diff.differing_values.len(), 0); - assert_eq!(diff.new_fields.len(), 1); - assert_eq!( - diff.new_fields.get::("key6".into()).unwrap(), - b.clone().get("key6").unwrap() - ); - assert_eq!(a, expected); - } - - #[test] - fn test_merge_yaml_a_has_different_value_returns_diff() { - let mut a = serde_yaml::from_str( - r#" - key1: value1 - key2: value2 - key3: - key4: value4 - "#, - ) - .unwrap(); - let b: serde_yaml::Value = serde_yaml::from_str( - r#" - key1: value1 - key2: value2 - key3: - key4: value5 - "#, - ) - .unwrap(); - - let expected: serde_yaml::Value = serde_yaml::from_str( - r#" - key1: value1 - key2: value2 - key3: - key4: value4 - "#, - ) - .unwrap(); - - let diff = super::merge_yaml(&mut a, b.clone()).unwrap(); - assert_eq!(diff.differing_values.len(), 1); - assert_eq!( - diff.differing_values - .get::("key3.key4".into()) - .unwrap(), - b.get("key3").unwrap().get("key4").unwrap() - ); - assert_eq!(a, expected); - } - - #[test] - fn test_merge_yaml_a_has_different_value_and_b_has_extra_field_returns_diff() { - let mut a = serde_yaml::from_str( - r#" - key1: value1 - key2: value2 - key3: - key4: value4 - "#, - ) - .unwrap(); - let b: serde_yaml::Value = serde_yaml::from_str( - r#" - key1: value1 - key2: value2 - key3: - key4: value5 - key5: value5 - "#, - ) - .unwrap(); - - let expected: serde_yaml::Value = serde_yaml::from_str( - r#" - key1: value1 - key2: value2 - key3: - key4: value4 - key5: value5 - "#, - ) - .unwrap(); - - let diff = super::merge_yaml(&mut a, b.clone()).unwrap(); - assert_eq!(diff.differing_values.len(), 1); - assert_eq!( - diff.differing_values - .get::("key3.key4".into()) - .unwrap(), - b.get("key3").unwrap().get("key4").unwrap() - ); - assert_eq!(diff.new_fields.len(), 1); - assert_eq!( - diff.new_fields.get::("key5".into()).unwrap(), - b.get("key5").unwrap() - ); - assert_eq!(a, expected); - } -} diff --git a/zk_toolbox/crates/zk_inception/src/consts.rs b/zk_toolbox/crates/zk_inception/src/consts.rs index 72c8948a65d1..9f81847e3336 100644 --- a/zk_toolbox/crates/zk_inception/src/consts.rs +++ b/zk_toolbox/crates/zk_inception/src/consts.rs @@ -1,3 +1,5 @@ +use std::net::{IpAddr, Ipv4Addr}; + pub const AMOUNT_FOR_DISTRIBUTION_TO_WALLETS: u128 = 1000000000000000000000; pub const MINIMUM_BALANCE_FOR_WALLET: u128 = 5000000000000000000; @@ -6,9 +8,31 @@ pub const PROVER_MIGRATIONS: &str = "prover/crates/lib/prover_dal/migrations"; pub const PROVER_STORE_MAX_RETRIES: u16 = 10; pub const DEFAULT_CREDENTIALS_FILE: &str = "~/.config/gcloud/application_default_credentials.json"; pub const DEFAULT_PROOF_STORE_DIR: &str = "artifacts"; +pub const DEFAULT_UNSIGNED_TRANSACTIONS_DIR: &str = "transactions"; pub const BELLMAN_CUDA_DIR: &str = "era-bellman-cuda"; pub const L2_BASE_TOKEN_ADDRESS: &str = "0x000000000000000000000000000000000000800A"; +#[allow(non_upper_case_globals)] +const kB: usize = 1024; + +/// Max payload size for consensus in bytes +pub const MAX_PAYLOAD_SIZE: usize = 2_500_000; +/// Max batch size for consensus in bytes +/// Compute a default batch size, so operators are not caught out by the missing setting +/// while we're still working on batch syncing. The batch interval is ~1 minute, +/// so there will be ~60 blocks, and an Ethereum Merkle proof is ~1kB, but under high +/// traffic there can be thousands of huge transactions that quickly fill up blocks +/// and there could be more blocks in a batch then expected. We chose a generous +/// limit so as not to prevent any legitimate batch from being transmitted. +pub const MAX_BATCH_SIZE: usize = MAX_PAYLOAD_SIZE * 5000 + kB; +/// Gossip dynamic inbound limit for consensus +pub const GOSSIP_DYNAMIC_INBOUND_LIMIT: usize = 100; + +/// Public address for consensus +pub const CONSENSUS_PUBLIC_ADDRESS_HOST: IpAddr = IpAddr::V4(Ipv4Addr::UNSPECIFIED); +/// Server address for consensus +pub const CONSENSUS_SERVER_ADDRESS_HOST: IpAddr = IpAddr::V4(Ipv4Addr::LOCALHOST); + /// Path to the JS runtime config for the block-explorer-app docker container to be mounted to pub const EXPLORER_APP_DOCKER_CONFIG_PATH: &str = "/usr/src/app/packages/app/dist/config.js"; pub const EXPLORER_APP_DOCKER_IMAGE: &str = "matterlabs/block-explorer-app"; @@ -30,3 +54,7 @@ pub const WITNESS_VECTOR_GENERATOR_BINARY_NAME: &str = "zksync_witness_vector_ge pub const PROVER_BINARY_NAME: &str = "zksync_prover_fri"; pub const COMPRESSOR_BINARY_NAME: &str = "zksync_proof_fri_compressor"; pub const PROVER_JOB_MONITOR_BINARY_NAME: &str = "zksync_prover_job_monitor"; + +pub const PATH_TO_ONLY_REAL_PROOFS_OVERRIDE_CONFIG: &str = + "etc/env/file_based/overrides/only_real_proofs.yaml"; +pub const PATH_TO_VALIDIUM_OVERRIDE_CONFIG: &str = "etc/env/file_based/overrides/validium.yaml"; diff --git a/zk_toolbox/crates/zk_inception/src/defaults.rs b/zk_toolbox/crates/zk_inception/src/defaults.rs index 544e28377403..6c3821eed856 100644 --- a/zk_toolbox/crates/zk_inception/src/defaults.rs +++ b/zk_toolbox/crates/zk_inception/src/defaults.rs @@ -14,6 +14,7 @@ lazy_static! { pub const ROCKS_DB_STATE_KEEPER: &str = "state_keeper"; pub const ROCKS_DB_TREE: &str = "tree"; pub const ROCKS_DB_PROTECTIVE_READS: &str = "protective_reads"; +pub const ROCKS_DB_BASIC_WITNESS_INPUT_PRODUCER: &str = "basic_witness_input_producer"; pub const EN_ROCKS_DB_PREFIX: &str = "en"; pub const MAIN_ROCKS_DB_PREFIX: &str = "main"; diff --git a/zk_toolbox/crates/zk_inception/src/external_node.rs b/zk_toolbox/crates/zk_inception/src/external_node.rs index ef62738a7d2a..5ff4ce070250 100644 --- a/zk_toolbox/crates/zk_inception/src/external_node.rs +++ b/zk_toolbox/crates/zk_inception/src/external_node.rs @@ -6,6 +6,7 @@ use config::{ SecretsConfig, }; use xshell::Shell; +use zksync_config::configs::consensus::ConsensusConfig; use crate::messages::MSG_FAILED_TO_RUN_SERVER_ERR; @@ -15,6 +16,7 @@ pub struct RunExternalNode { general_config: PathBuf, secrets: PathBuf, en_config: PathBuf, + consensus_config: PathBuf, } impl RunExternalNode { @@ -29,6 +31,7 @@ impl RunExternalNode { let general_config = GeneralConfig::get_path_with_base_path(&en_path); let secrets = SecretsConfig::get_path_with_base_path(&en_path); let enconfig = ENConfig::get_path_with_base_path(&en_path); + let consensus_config = ConsensusConfig::get_path_with_base_path(&en_path); Ok(Self { components, @@ -36,17 +39,29 @@ impl RunExternalNode { general_config, secrets, en_config: enconfig, + consensus_config, }) } - pub fn run(&self, shell: &Shell, mut additional_args: Vec) -> anyhow::Result<()> { + pub fn run( + &self, + shell: &Shell, + enable_consensus: bool, + mut additional_args: Vec, + ) -> anyhow::Result<()> { let code_path = self.code_path.to_str().unwrap(); let config_general_config = &self.general_config.to_str().unwrap(); let en_config = &self.en_config.to_str().unwrap(); let secrets = &self.secrets.to_str().unwrap(); + let consensus_config = &self.consensus_config.to_str().unwrap(); if let Some(components) = self.components() { additional_args.push(format!("--components={}", components)) } + let mut consensus_args = vec![]; + if enable_consensus { + consensus_args.push("--enable-consensus".to_string()); + consensus_args.push(format!("--consensus-path={}", consensus_config)) + } common::external_node::run( shell, @@ -54,6 +69,7 @@ impl RunExternalNode { config_general_config, secrets, en_config, + consensus_args, additional_args, ) .context(MSG_FAILED_TO_RUN_SERVER_ERR) diff --git a/zk_toolbox/crates/zk_inception/src/main.rs b/zk_toolbox/crates/zk_inception/src/main.rs index f6f7d83dede6..f1ca348df386 100644 --- a/zk_toolbox/crates/zk_inception/src/main.rs +++ b/zk_toolbox/crates/zk_inception/src/main.rs @@ -8,6 +8,7 @@ use common::{ config::{global_config, init_global_config, GlobalConfig}, error::log_error, init_prompt_theme, logger, + version::version_message, }; use config::EcosystemConfig; use xshell::Shell; @@ -26,7 +27,10 @@ mod messages; mod utils; #[derive(Parser, Debug)] -#[command(version, about)] +#[command( + version = version_message(env!("CARGO_PKG_VERSION")), + about +)] struct Inception { #[command(subcommand)] command: InceptionSubcommands, @@ -86,13 +90,16 @@ struct InceptionGlobalArgs { async fn main() -> anyhow::Result<()> { human_panic::setup_panic!(); + // We must parse arguments before printing the intro, because some autogenerated + // Clap commands (like `--version` would look odd otherwise). + let inception_args = Inception::parse(); + init_prompt_theme(); logger::new_empty_line(); logger::intro(); let shell = Shell::new().unwrap(); - let inception_args = Inception::parse(); init_global_config_inner(&shell, &inception_args.global)?; diff --git a/zk_toolbox/crates/zk_inception/src/messages.rs b/zk_toolbox/crates/zk_inception/src/messages.rs index 827aa03d7ba8..3bbac066dbb6 100644 --- a/zk_toolbox/crates/zk_inception/src/messages.rs +++ b/zk_toolbox/crates/zk_inception/src/messages.rs @@ -75,6 +75,7 @@ pub(super) const MSG_DISTRIBUTING_ETH_SPINNER: &str = "Distributing eth..."; pub(super) const MSG_MINT_BASE_TOKEN_SPINNER: &str = "Minting base token to the governance addresses..."; pub(super) const MSG_INTALLING_DEPS_SPINNER: &str = "Installing and building dependencies..."; +pub(super) const MSG_PREPARING_CONFIG_SPINNER: &str = "Preparing config files..."; pub(super) const MSG_DEPLOYING_ERC20_SPINNER: &str = "Deploying ERC20 contracts..."; pub(super) const MSG_DEPLOYING_ECOSYSTEM_CONTRACTS_SPINNER: &str = "Deploying ecosystem contracts..."; @@ -121,6 +122,14 @@ pub(super) fn msg_chain_load_err(chain_name: &str) -> String { format!("Failed to load chain config for {chain_name}") } +/// Build ecosystem transactions related messages +pub(super) const MSG_SENDER_ADDRESS_PROMPT: &str = "What is the address of the transaction sender?"; +pub(super) const MSG_BUILDING_ECOSYSTEM: &str = "Building ecosystem transactions"; +pub(super) const MSG_BUILDING_ECOSYSTEM_CONTRACTS_SPINNER: &str = "Building ecosystem contracts..."; +pub(super) const MSG_WRITING_OUTPUT_FILES_SPINNER: &str = "Writing output files..."; +pub(super) const MSG_ECOSYSTEM_TXN_OUTRO: &str = "Transactions successfully built"; +pub(super) const MSG_ECOSYSTEM_TXN_OUT_PATH_INVALID_ERR: &str = "Invalid path"; + /// Chain create related messages pub(super) const MSG_PROVER_MODE_HELP: &str = "Prover options"; pub(super) const MSG_CHAIN_ID_HELP: &str = "Chain ID"; @@ -177,6 +186,7 @@ pub(super) const MSG_INITIALIZING_SERVER_DATABASE: &str = "Initializing server d pub(super) const MSG_FAILED_TO_DROP_SERVER_DATABASE_ERR: &str = "Failed to drop server database"; pub(super) const MSG_INITIALIZING_PROVER_DATABASE: &str = "Initializing prover database"; pub(super) const MSG_FAILED_TO_DROP_PROVER_DATABASE_ERR: &str = "Failed to drop prover database"; + /// Chain update related messages pub(super) const MSG_WALLETS_CONFIG_MUST_BE_PRESENT: &str = "Wallets configuration must be present"; @@ -218,12 +228,22 @@ pub(super) const MSG_DEPLOYING_L2_CONTRACT_SPINNER: &str = "Deploying l2 contrac /// Chain deploy paymaster related messages pub(super) const MSG_DEPLOYING_PAYMASTER: &str = "Deploying paymaster"; +/// Chain build related messages +pub(super) const MSG_BUILDING_CHAIN_REGISTRATION_TXNS_SPINNER: &str = + "Building chain registration transactions..."; +pub(super) const MSG_CHAIN_TXN_OUT_PATH_INVALID_ERR: &str = "Invalid path"; +pub(super) const MSG_CHAIN_TXN_MISSING_CONTRACT_CONFIG: &str = + "Missing contract.yaml, please be sure to run this command within initialized ecosystem"; +pub(super) const MSG_CHAIN_TRANSACTIONS_BUILT: &str = "Chain transactions successfully built"; + /// Run server related messages pub(super) const MSG_SERVER_COMPONENTS_HELP: &str = "Components of server to run"; +pub(super) const MSG_ENABLE_CONSENSUS_HELP: &str = "Enable consensus"; pub(super) const MSG_SERVER_GENESIS_HELP: &str = "Run server in genesis mode"; pub(super) const MSG_SERVER_ADDITIONAL_ARGS_HELP: &str = "Additional arguments that can be passed through the CLI"; pub(super) const MSG_SERVER_BUILD_HELP: &str = "Build server but don't run it"; +pub(super) const MSG_SERVER_URING_HELP: &str = "Enables uring support for RocksDB"; /// Accept ownership related messages pub(super) const MSG_ACCEPTING_GOVERNANCE_SPINNER: &str = "Accepting governance..."; @@ -305,6 +325,11 @@ pub(super) fn msg_preparing_en_config_is_done(path: &Path) -> String { pub(super) const MSG_EXTERNAL_NODE_CONFIG_NOT_INITIALIZED: &str = "External node is not initialized"; +pub(super) const MSG_CONSENSUS_CONFIG_MISSING_ERR: &str = "Consensus config is missing"; +pub(super) const MSG_CONSENSUS_SECRETS_MISSING_ERR: &str = "Consensus secrets config is missing"; +pub(super) const MSG_CONSENSUS_SECRETS_NODE_KEY_MISSING_ERR: &str = "Consensus node key is missing"; + +pub(super) const MSG_PORTS_CONFIG_ERR: &str = "Failed to get ports config"; pub(super) const MSG_STARTING_EN: &str = "Starting external node"; @@ -389,10 +414,12 @@ pub(super) const MSG_GET_ZKSOLC_RELEASES_ERR: &str = "Failed to get zksolc relea pub(super) const MSG_FETCHING_ZKSOLC_RELEASES_SPINNER: &str = "Fetching zksolc releases..."; pub(super) const MSG_FETCHING_ZKVYPER_RELEASES_SPINNER: &str = "Fetching zkvyper releases..."; pub(super) const MSG_FETCH_SOLC_RELEASES_SPINNER: &str = "Fetching solc releases..."; +pub(super) const MSG_FETCH_ERA_VM_SOLC_RELEASES_SPINNER: &str = "Fetching era vm solc releases..."; pub(super) const MSG_FETCHING_VYPER_RELEASES_SPINNER: &str = "Fetching vyper releases..."; pub(super) const MSG_ZKSOLC_VERSION_PROMPT: &str = "Select the minimal zksolc version:"; pub(super) const MSG_ZKVYPER_VERSION_PROMPT: &str = "Select the minimal zkvyper version:"; pub(super) const MSG_SOLC_VERSION_PROMPT: &str = "Select the minimal solc version:"; +pub(super) const MSG_ERA_VM_SOLC_VERSION_PROMPT: &str = "Select the minimal era vm solc version:"; pub(super) const MSG_VYPER_VERSION_PROMPT: &str = "Select the minimal vyper version:"; pub(super) const MSG_NO_RELEASES_FOUND_ERR: &str = "No releases found for current architecture"; pub(super) const MSG_NO_VERSION_FOUND_ERR: &str = "No version found"; @@ -400,6 +427,7 @@ pub(super) const MSG_ARCH_NOT_SUPPORTED_ERR: &str = "Architecture not supported" pub(super) const MSG_OS_NOT_SUPPORTED_ERR: &str = "OS not supported"; pub(super) const MSG_GET_VYPER_RELEASES_ERR: &str = "Failed to get vyper releases"; pub(super) const MSG_GET_SOLC_RELEASES_ERR: &str = "Failed to get solc releases"; +pub(super) const MSG_GET_ERA_VM_SOLC_RELEASES_ERR: &str = "Failed to get era vm solc releases"; pub(super) const MSG_GET_ZKVYPER_RELEASES_ERR: &str = "Failed to get zkvyper releases"; pub(super) fn msg_binary_already_exists(name: &str, version: &str) -> String { @@ -426,7 +454,6 @@ pub(super) const MSG_DIFF_EN_CONFIG: &str = "Added the following fields to the external node config:"; pub(super) const MSG_DIFF_EN_GENERAL_CONFIG: &str = "Added the following fields to the external node generalconfig:"; -pub(super) const MSG_INVALID_KEY_TYPE_ERR: &str = "Invalid key type"; pub(super) const MSG_UPDATING_ERA_OBSERVABILITY_SPINNER: &str = "Updating era observability..."; pub(super) fn msg_diff_genesis_config(chain: &str) -> String { diff --git a/zk_toolbox/crates/zk_inception/src/utils/consensus.rs b/zk_toolbox/crates/zk_inception/src/utils/consensus.rs new file mode 100644 index 000000000000..06848334a6e1 --- /dev/null +++ b/zk_toolbox/crates/zk_inception/src/utils/consensus.rs @@ -0,0 +1,124 @@ +use std::{ + collections::{BTreeMap, BTreeSet}, + net::SocketAddr, +}; + +use config::{ChainConfig, PortsConfig}; +use secrecy::{ExposeSecret, Secret}; +use zksync_config::configs::consensus::{ + AttesterPublicKey, AttesterSecretKey, ConsensusConfig, ConsensusSecrets, GenesisSpec, Host, + NodePublicKey, NodeSecretKey, ProtocolVersion, ValidatorPublicKey, ValidatorSecretKey, + WeightedAttester, WeightedValidator, +}; +use zksync_consensus_crypto::{Text, TextFmt}; +use zksync_consensus_roles as roles; + +use crate::consts::{ + CONSENSUS_PUBLIC_ADDRESS_HOST, CONSENSUS_SERVER_ADDRESS_HOST, GOSSIP_DYNAMIC_INBOUND_LIMIT, + MAX_BATCH_SIZE, MAX_PAYLOAD_SIZE, +}; + +#[derive(Debug, Clone)] +pub struct ConsensusSecretKeys { + validator_key: roles::validator::SecretKey, + attester_key: roles::attester::SecretKey, + node_key: roles::node::SecretKey, +} + +pub struct ConsensusPublicKeys { + validator_key: roles::validator::PublicKey, + attester_key: roles::attester::PublicKey, +} + +pub fn get_consensus_config( + chain_config: &ChainConfig, + ports: PortsConfig, + consensus_keys: Option, + gossip_static_outbound: Option>, +) -> anyhow::Result { + let genesis_spec = + consensus_keys.map(|consensus_keys| get_genesis_specs(chain_config, &consensus_keys)); + + let public_addr = SocketAddr::new(CONSENSUS_PUBLIC_ADDRESS_HOST, ports.consensus_port); + let server_addr = SocketAddr::new(CONSENSUS_SERVER_ADDRESS_HOST, ports.consensus_port); + + Ok(ConsensusConfig { + server_addr, + public_addr: Host(public_addr.encode()), + genesis_spec, + max_payload_size: MAX_PAYLOAD_SIZE, + gossip_dynamic_inbound_limit: GOSSIP_DYNAMIC_INBOUND_LIMIT, + max_batch_size: MAX_BATCH_SIZE, + gossip_static_inbound: BTreeSet::new(), + gossip_static_outbound: gossip_static_outbound.unwrap_or_default(), + rpc: None, + }) +} + +pub fn generate_consensus_keys() -> ConsensusSecretKeys { + ConsensusSecretKeys { + validator_key: roles::validator::SecretKey::generate(), + attester_key: roles::attester::SecretKey::generate(), + node_key: roles::node::SecretKey::generate(), + } +} + +fn get_consensus_public_keys(consensus_keys: &ConsensusSecretKeys) -> ConsensusPublicKeys { + ConsensusPublicKeys { + validator_key: consensus_keys.validator_key.public(), + attester_key: consensus_keys.attester_key.public(), + } +} + +pub fn get_genesis_specs( + chain_config: &ChainConfig, + consensus_keys: &ConsensusSecretKeys, +) -> GenesisSpec { + let public_keys = get_consensus_public_keys(consensus_keys); + let validator_key = public_keys.validator_key.encode(); + let attester_key = public_keys.attester_key.encode(); + + let validator = WeightedValidator { + key: ValidatorPublicKey(validator_key.clone()), + weight: 1, + }; + let attester = WeightedAttester { + key: AttesterPublicKey(attester_key), + weight: 1, + }; + let leader = ValidatorPublicKey(validator_key); + + GenesisSpec { + chain_id: chain_config.chain_id, + protocol_version: ProtocolVersion(1), + validators: vec![validator], + attesters: vec![attester], + leader, + registry_address: None, + } +} + +pub fn get_consensus_secrets(consensus_keys: &ConsensusSecretKeys) -> ConsensusSecrets { + let validator_key = consensus_keys.validator_key.encode(); + let attester_key = consensus_keys.attester_key.encode(); + let node_key = consensus_keys.node_key.encode(); + + ConsensusSecrets { + validator_key: Some(ValidatorSecretKey(Secret::new(validator_key))), + attester_key: Some(AttesterSecretKey(Secret::new(attester_key))), + node_key: Some(NodeSecretKey(Secret::new(node_key))), + } +} + +pub fn node_public_key(secrets: &ConsensusSecrets) -> anyhow::Result> { + Ok(node_key(secrets)?.map(|node_secret_key| NodePublicKey(node_secret_key.public().encode()))) +} +fn node_key(secrets: &ConsensusSecrets) -> anyhow::Result> { + read_secret_text(secrets.node_key.as_ref().map(|x| &x.0)) +} + +fn read_secret_text(text: Option<&Secret>) -> anyhow::Result> { + text.map(|text| Text::new(text.expose_secret()).decode()) + .transpose() + .map_err(|_| anyhow::format_err!("invalid format")) +} diff --git a/zk_toolbox/crates/zk_inception/src/utils/mod.rs b/zk_toolbox/crates/zk_inception/src/utils/mod.rs index a84f0a336de5..229d3908dc3a 100644 --- a/zk_toolbox/crates/zk_inception/src/utils/mod.rs +++ b/zk_toolbox/crates/zk_inception/src/utils/mod.rs @@ -1,2 +1,3 @@ +pub mod consensus; pub mod forge; pub mod rocks_db; diff --git a/zk_toolbox/crates/zk_inception/src/utils/rocks_db.rs b/zk_toolbox/crates/zk_inception/src/utils/rocks_db.rs index 17cffa66e39d..1b7e29dd9722 100644 --- a/zk_toolbox/crates/zk_inception/src/utils/rocks_db.rs +++ b/zk_toolbox/crates/zk_inception/src/utils/rocks_db.rs @@ -4,8 +4,8 @@ use config::RocksDbs; use xshell::Shell; use crate::defaults::{ - EN_ROCKS_DB_PREFIX, MAIN_ROCKS_DB_PREFIX, ROCKS_DB_PROTECTIVE_READS, ROCKS_DB_STATE_KEEPER, - ROCKS_DB_TREE, + EN_ROCKS_DB_PREFIX, MAIN_ROCKS_DB_PREFIX, ROCKS_DB_BASIC_WITNESS_INPUT_PRODUCER, + ROCKS_DB_PROTECTIVE_READS, ROCKS_DB_STATE_KEEPER, ROCKS_DB_TREE, }; pub enum RocksDBDirOption { @@ -37,9 +37,13 @@ pub fn recreate_rocksdb_dirs( .join(option.prefix()) .join(ROCKS_DB_PROTECTIVE_READS); shell.remove_path(&protective_reads)?; + let basic_witness_input_producer = rocks_db_path + .join(option.prefix()) + .join(ROCKS_DB_BASIC_WITNESS_INPUT_PRODUCER); Ok(RocksDbs { state_keeper: shell.create_dir(state_keeper)?, merkle_tree: shell.create_dir(merkle_tree)?, protective_reads: shell.create_dir(protective_reads)?, + basic_witness_input_producer: shell.create_dir(basic_witness_input_producer)?, }) } diff --git a/zk_toolbox/crates/zk_supervisor/Cargo.toml b/zk_toolbox/crates/zk_supervisor/Cargo.toml index d9c5c2196fae..4c450a736393 100644 --- a/zk_toolbox/crates/zk_supervisor/Cargo.toml +++ b/zk_toolbox/crates/zk_supervisor/Cargo.toml @@ -15,6 +15,7 @@ anyhow.workspace = true clap.workspace = true common.workspace = true config.workspace = true +chrono.workspace = true ethers.workspace = true human-panic.workspace = true strum.workspace = true @@ -27,3 +28,4 @@ clap-markdown.workspace = true futures.workspace = true types.workspace = true serde_yaml.workspace = true +zksync_basic_types.workspace = true diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/clean/mod.rs b/zk_toolbox/crates/zk_supervisor/src/commands/clean/mod.rs index 92c8a0f1086e..803e962c0ff8 100644 --- a/zk_toolbox/crates/zk_supervisor/src/commands/clean/mod.rs +++ b/zk_toolbox/crates/zk_supervisor/src/commands/clean/mod.rs @@ -43,6 +43,7 @@ pub fn containers(shell: &Shell) -> anyhow::Result<()> { pub fn contracts(shell: &Shell, ecosystem_config: &EcosystemConfig) -> anyhow::Result<()> { let path_to_foundry = ecosystem_config.path_to_foundry(); + let contracts_path = ecosystem_config.link_to_code.join("contracts"); logger::info(MSG_CONTRACTS_CLEANING); shell .remove_path(path_to_foundry.join("broadcast")) @@ -62,18 +63,35 @@ pub fn contracts(shell: &Shell, ecosystem_config: &EcosystemConfig) -> anyhow::R shell .remove_path(path_to_foundry.join("typechain")) .context("typechain")?; + shell + .remove_path(contracts_path.join("l2-contracts/cache-forge")) + .context("l2-contracts/cache-forge")?; + shell + .remove_path(contracts_path.join("l2-contracts/zkout")) + .context("l2-contracts/zkout")?; + shell + .remove_path(contracts_path.join("system-contracts/cache-forge")) + .context("system-contracts/cache-forge")?; + shell + .remove_path(contracts_path.join("system-contracts/zkout")) + .context("system-contracts/zkout")?; + shell + .remove_path(contracts_path.join("system-contracts/contracts-preprocessed")) + .context("system-contracts/contracts-preprocessed")?; shell .remove_path(path_to_foundry.join("script-config")) .context("remove script-config")?; shell .create_dir(path_to_foundry.join("script-config")) .context("create script-config")?; + shell.write_file(path_to_foundry.join("script-config/.gitkeep"), "")?; shell .remove_path(path_to_foundry.join("script-out")) .context("remove script-out")?; shell .create_dir(path_to_foundry.join("script-out")) .context("create script-out")?; + shell.write_file(path_to_foundry.join("script-out/.gitkeep"), "")?; logger::info(MSG_CONTRACTS_CLEANING_FINISHED); Ok(()) } diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/config_writer.rs b/zk_toolbox/crates/zk_supervisor/src/commands/config_writer.rs new file mode 100644 index 000000000000..3adecb36d069 --- /dev/null +++ b/zk_toolbox/crates/zk_supervisor/src/commands/config_writer.rs @@ -0,0 +1,35 @@ +use anyhow::Context; +use clap::Parser; +use common::{config::global_config, logger, Prompt}; +use config::{override_config, EcosystemConfig}; +use xshell::Shell; + +use crate::messages::{ + msg_overriding_config, MSG_CHAIN_NOT_FOUND_ERR, MSG_OVERRIDE_CONFIG_PATH_HELP, + MSG_OVERRIDE_SUCCESS, MSG_OVERRRIDE_CONFIG_PATH_PROMPT, +}; + +#[derive(Debug, Parser)] +pub struct ConfigWriterArgs { + #[clap(long, short, help = MSG_OVERRIDE_CONFIG_PATH_HELP)] + pub path: Option, +} + +impl ConfigWriterArgs { + pub fn get_config_path(self) -> String { + self.path + .unwrap_or_else(|| Prompt::new(MSG_OVERRRIDE_CONFIG_PATH_PROMPT).ask()) + } +} + +pub fn run(shell: &Shell, args: ConfigWriterArgs) -> anyhow::Result<()> { + let path = args.get_config_path().into(); + let ecosystem = EcosystemConfig::from_file(shell)?; + let chain = ecosystem + .load_chain(global_config().chain_name.clone()) + .context(MSG_CHAIN_NOT_FOUND_ERR)?; + logger::step(msg_overriding_config(chain.name.clone())); + override_config(shell, path, &chain)?; + logger::outro(MSG_OVERRIDE_SUCCESS); + Ok(()) +} diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/mod.rs b/zk_toolbox/crates/zk_supervisor/src/commands/mod.rs index 875f2982c959..d3cb99f1e342 100644 --- a/zk_toolbox/crates/zk_supervisor/src/commands/mod.rs +++ b/zk_toolbox/crates/zk_supervisor/src/commands/mod.rs @@ -1,9 +1,11 @@ pub mod clean; +pub mod config_writer; pub mod contracts; pub mod database; pub mod fmt; pub mod lint; pub(crate) mod lint_utils; pub mod prover; +pub mod send_transactions; pub mod snapshot; pub mod test; diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/prover/info.rs b/zk_toolbox/crates/zk_supervisor/src/commands/prover/info.rs index 05964cf689fd..6a7d7ddeda8a 100644 --- a/zk_toolbox/crates/zk_supervisor/src/commands/prover/info.rs +++ b/zk_toolbox/crates/zk_supervisor/src/commands/prover/info.rs @@ -5,18 +5,23 @@ use std::{ use anyhow::Context as _; use common::{config::global_config, logger}; -use config::EcosystemConfig; +use config::{ChainConfig, EcosystemConfig}; use xshell::{cmd, Shell}; use crate::messages::MSG_CHAIN_NOT_FOUND_ERR; pub async fn run(shell: &Shell) -> anyhow::Result<()> { - let link_to_code = EcosystemConfig::from_file(shell)?.link_to_code; + let ecosystem_config = EcosystemConfig::from_file(shell)?; + let chain_config = ecosystem_config + .load_chain(global_config().chain_name.clone()) + .expect(MSG_CHAIN_NOT_FOUND_ERR); + + let link_to_code = ecosystem_config.link_to_code; let link_to_prover = link_to_code.join("prover"); let protocol_version = get_protocol_version(shell, &link_to_prover).await?; let snark_wrapper = get_snark_wrapper(&link_to_prover).await?; - let prover_url = get_database_url(shell).await?; + let prover_url = get_database_url(&chain_config).await?; logger::info(format!( " @@ -59,13 +64,8 @@ pub(crate) async fn get_snark_wrapper(link_to_prover: &Path) -> anyhow::Result anyhow::Result { - let ecosystem = EcosystemConfig::from_file(shell)?; - let chain_config = ecosystem - .load_chain(global_config().chain_name.clone()) - .context(MSG_CHAIN_NOT_FOUND_ERR)?; - - let prover_url = chain_config +pub(crate) async fn get_database_url(chain: &ChainConfig) -> anyhow::Result { + let prover_url = chain .get_secrets_config()? .database .context("Database secrets not found")? diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/prover/insert_batch.rs b/zk_toolbox/crates/zk_supervisor/src/commands/prover/insert_batch.rs index 2c4a1cf97513..b1c02c9a9fea 100644 --- a/zk_toolbox/crates/zk_supervisor/src/commands/prover/insert_batch.rs +++ b/zk_toolbox/crates/zk_supervisor/src/commands/prover/insert_batch.rs @@ -1,19 +1,27 @@ -use common::{check_prerequisites, cmd::Cmd, logger, PROVER_CLI_PREREQUISITE}; +use common::{ + check_prerequisites, cmd::Cmd, config::global_config, logger, PROVER_CLI_PREREQUISITE, +}; use config::{get_link_to_prover, EcosystemConfig}; use xshell::{cmd, Shell}; -use crate::commands::prover::{ - args::insert_batch::{InsertBatchArgs, InsertBatchArgsFinal}, - info, +use crate::{ + commands::prover::{ + args::insert_batch::{InsertBatchArgs, InsertBatchArgsFinal}, + info, + }, + messages::MSG_CHAIN_NOT_FOUND_ERR, }; pub async fn run(shell: &Shell, args: InsertBatchArgs) -> anyhow::Result<()> { check_prerequisites(shell, &PROVER_CLI_PREREQUISITE, false); let ecosystem_config = EcosystemConfig::from_file(shell)?; + let chain_config = ecosystem_config + .load_chain(global_config().chain_name.clone()) + .expect(MSG_CHAIN_NOT_FOUND_ERR); let version = info::get_protocol_version(shell, &get_link_to_prover(&ecosystem_config)).await?; - let prover_url = info::get_database_url(shell).await?; + let prover_url = info::get_database_url(&chain_config).await?; let InsertBatchArgsFinal { number, version } = args.fill_values_with_prompts(version); diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/prover/insert_version.rs b/zk_toolbox/crates/zk_supervisor/src/commands/prover/insert_version.rs index ab28efca9446..16bbdf13df4f 100644 --- a/zk_toolbox/crates/zk_supervisor/src/commands/prover/insert_version.rs +++ b/zk_toolbox/crates/zk_supervisor/src/commands/prover/insert_version.rs @@ -1,21 +1,29 @@ -use common::{check_prerequisites, cmd::Cmd, logger, PROVER_CLI_PREREQUISITE}; +use common::{ + check_prerequisites, cmd::Cmd, config::global_config, logger, PROVER_CLI_PREREQUISITE, +}; use config::{get_link_to_prover, EcosystemConfig}; use xshell::{cmd, Shell}; -use crate::commands::prover::{ - args::insert_version::{InsertVersionArgs, InsertVersionArgsFinal}, - info, +use crate::{ + commands::prover::{ + args::insert_version::{InsertVersionArgs, InsertVersionArgsFinal}, + info, + }, + messages::MSG_CHAIN_NOT_FOUND_ERR, }; pub async fn run(shell: &Shell, args: InsertVersionArgs) -> anyhow::Result<()> { check_prerequisites(shell, &PROVER_CLI_PREREQUISITE, false); let ecosystem_config = EcosystemConfig::from_file(shell)?; + let chain_config = ecosystem_config + .load_chain(global_config().chain_name.clone()) + .expect(MSG_CHAIN_NOT_FOUND_ERR); let version = info::get_protocol_version(shell, &get_link_to_prover(&ecosystem_config)).await?; let snark_wrapper = info::get_snark_wrapper(&get_link_to_prover(&ecosystem_config)).await?; - let prover_url = info::get_database_url(shell).await?; + let prover_url = info::get_database_url(&chain_config).await?; let InsertVersionArgsFinal { version, diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/send_transactions/args/mod.rs b/zk_toolbox/crates/zk_supervisor/src/commands/send_transactions/args/mod.rs new file mode 100644 index 000000000000..e3d4f220ff28 --- /dev/null +++ b/zk_toolbox/crates/zk_supervisor/src/commands/send_transactions/args/mod.rs @@ -0,0 +1,69 @@ +use std::path::PathBuf; + +use clap::Parser; +use common::Prompt; +use url::Url; + +use crate::{ + defaults::LOCAL_RPC_URL, + messages::{ + MSG_INVALID_L1_RPC_URL_ERR, MSG_PROMPT_L1_RPC_URL, MSG_PROMPT_SECRET_KEY, + MSG_PROMPT_TRANSACTION_FILE, + }, +}; + +const DEFAULT_TRANSACTION_CONFIRMATIONS: usize = 2; + +#[derive(Debug, Parser)] +pub struct SendTransactionsArgs { + #[clap(long)] + pub file: Option, + #[clap(long)] + pub private_key: Option, + #[clap(long)] + pub l1_rpc_url: Option, + #[clap(long)] + pub confirmations: Option, +} + +#[derive(Debug)] +pub struct SendTransactionsArgsFinal { + pub file: PathBuf, + pub private_key: String, + pub l1_rpc_url: String, + pub confirmations: usize, +} + +impl SendTransactionsArgs { + pub fn fill_values_with_prompt(self) -> SendTransactionsArgsFinal { + let file = self + .file + .unwrap_or_else(|| Prompt::new(MSG_PROMPT_TRANSACTION_FILE).ask()); + + let private_key = self + .private_key + .unwrap_or_else(|| Prompt::new(MSG_PROMPT_SECRET_KEY).ask()); + + let l1_rpc_url = self.l1_rpc_url.unwrap_or_else(|| { + Prompt::new(MSG_PROMPT_L1_RPC_URL) + .default(LOCAL_RPC_URL) + .validate_with(|val: &String| -> Result<(), String> { + Url::parse(val) + .map(|_| ()) + .map_err(|_| MSG_INVALID_L1_RPC_URL_ERR.to_string()) + }) + .ask() + }); + + let confirmations = self + .confirmations + .unwrap_or(DEFAULT_TRANSACTION_CONFIRMATIONS); + + SendTransactionsArgsFinal { + file, + private_key, + l1_rpc_url, + confirmations, + } + } +} diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/send_transactions/mod.rs b/zk_toolbox/crates/zk_supervisor/src/commands/send_transactions/mod.rs new file mode 100644 index 000000000000..79d8efc600e8 --- /dev/null +++ b/zk_toolbox/crates/zk_supervisor/src/commands/send_transactions/mod.rs @@ -0,0 +1,132 @@ +use std::{ + fs::{File, OpenOptions}, + io::{Read, Write}, + ops::Add, + path::PathBuf, + time::Duration, +}; + +use anyhow::Context; +use args::SendTransactionsArgs; +use chrono::Local; +use common::{ethereum::create_ethers_client, logger}; +use config::EcosystemConfig; +use ethers::{abi::Bytes, providers::Middleware, types::TransactionRequest, utils::hex}; +use serde::Deserialize; +use tokio::time::sleep; +use xshell::Shell; +use zksync_basic_types::{H160, U256}; + +use crate::{ + consts::DEFAULT_UNSIGNED_TRANSACTIONS_DIR, + messages::{ + msg_send_txns_outro, MSG_FAILED_TO_SEND_TXN_ERR, MSG_UNABLE_TO_OPEN_FILE_ERR, + MSG_UNABLE_TO_READ_FILE_ERR, MSG_UNABLE_TO_READ_PARSE_JSON_ERR, + MSG_UNABLE_TO_WRITE_FILE_ERR, + }, +}; + +pub mod args; + +const MAX_ATTEMPTS: u32 = 3; + +#[derive(Deserialize)] +struct Transaction { + from: String, + gas: String, + input: String, +} + +#[derive(Deserialize)] +struct Txn { + #[serde(rename = "contractAddress")] + contract_address: String, + transaction: Transaction, +} + +#[derive(Deserialize)] +struct Txns { + transactions: Vec, +} + +pub async fn run(shell: &Shell, args: SendTransactionsArgs) -> anyhow::Result<()> { + let args = args.fill_values_with_prompt(); + + let ecosystem_config = EcosystemConfig::from_file(shell)?; + let chain_id = ecosystem_config.l1_network.chain_id(); + + // Read the JSON file + let mut file = File::open(args.file).context(MSG_UNABLE_TO_OPEN_FILE_ERR)?; + let mut data = String::new(); + file.read_to_string(&mut data) + .context(MSG_UNABLE_TO_READ_FILE_ERR)?; + + // Parse the JSON file + let txns: Txns = serde_json::from_str(&data).context(MSG_UNABLE_TO_READ_PARSE_JSON_ERR)?; + + let timestamp = Local::now().format("%Y%m%d_%H%M%S").to_string(); + let log_file = ecosystem_config + .link_to_code + .join(DEFAULT_UNSIGNED_TRANSACTIONS_DIR) + .join(format!("{}_receipt.log", timestamp)); + + let client = create_ethers_client(args.private_key.parse()?, args.l1_rpc_url, Some(chain_id))?; + let mut nonce = client.get_transaction_count(client.address(), None).await?; + let gas_price = client.get_gas_price().await?; + + for txn in txns.transactions { + let to: H160 = txn.contract_address.parse()?; + let from: H160 = txn.transaction.from.parse()?; + let gas_limit: U256 = txn.transaction.gas.parse()?; + let input_data: Bytes = hex::decode(txn.transaction.input)?; + + let tx = TransactionRequest::new() + .to(to) + .from(from) + .gas(gas_limit) + .gas_price(gas_price) + .nonce(nonce) + .data(input_data) + .chain_id(chain_id); + + nonce = nonce.add(1); + + let mut attempts = 0; + let receipt = loop { + attempts += 1; + match client + .send_transaction(tx.clone(), None) + .await? + .confirmations(args.confirmations) + .interval(Duration::from_millis(30)) + .await + { + Ok(receipt) => break receipt, + Err(e) if attempts < MAX_ATTEMPTS => { + logger::info(format!("Attempt {} failed: {:?}", attempts, e).as_str()); + sleep(Duration::from_secs(1)).await; + continue; + } + Err(e) => return Err(e).context(MSG_FAILED_TO_SEND_TXN_ERR)?, + } + }; + + log_receipt(&log_file, format!("{:?}", receipt).as_str())?; + } + + logger::outro(msg_send_txns_outro(log_file.to_string_lossy().as_ref())); + + Ok(()) +} + +fn log_receipt(path: &PathBuf, receipt: &str) -> anyhow::Result<()> { + let mut file = OpenOptions::new() + .append(true) + .create(true) + .open(path) + .context(MSG_UNABLE_TO_OPEN_FILE_ERR)?; + + writeln!(file, "{}", receipt).context(MSG_UNABLE_TO_WRITE_FILE_ERR)?; + + Ok(()) +} diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/test/db.rs b/zk_toolbox/crates/zk_supervisor/src/commands/test/db.rs new file mode 100644 index 000000000000..a08b0404605e --- /dev/null +++ b/zk_toolbox/crates/zk_supervisor/src/commands/test/db.rs @@ -0,0 +1,34 @@ +use std::path::Path; + +use common::{cmd::Cmd, db::wait_for_db, logger}; +use xshell::{cmd, Shell}; + +use crate::{commands::database, dals::Dal, messages::MSG_RESETTING_TEST_DATABASES}; + +pub async fn reset_test_databases( + shell: &Shell, + link_to_code: &Path, + dals: Vec, +) -> anyhow::Result<()> { + logger::info(MSG_RESETTING_TEST_DATABASES); + + Cmd::new(cmd!( + shell, + "docker compose -f docker-compose-unit-tests.yml down" + )) + .run()?; + Cmd::new(cmd!( + shell, + "docker compose -f docker-compose-unit-tests.yml up -d" + )) + .run()?; + + for dal in dals { + let mut url = dal.url.clone(); + url.set_path(""); + wait_for_db(&url, 3).await?; + database::reset::reset_database(shell, link_to_code, dal.clone()).await?; + } + + Ok(()) +} diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/test/loadtest.rs b/zk_toolbox/crates/zk_supervisor/src/commands/test/loadtest.rs new file mode 100644 index 000000000000..5a2a87871b58 --- /dev/null +++ b/zk_toolbox/crates/zk_supervisor/src/commands/test/loadtest.rs @@ -0,0 +1,52 @@ +use anyhow::Context; +use common::{cmd::Cmd, config::global_config, logger}; +use config::EcosystemConfig; +use xshell::{cmd, Shell}; + +use crate::messages::MSG_CHAIN_NOT_FOUND_ERR; + +pub fn run(shell: &Shell) -> anyhow::Result<()> { + let ecosystem_config = EcosystemConfig::from_file(shell)?; + + let chain_config = ecosystem_config + .load_chain(global_config().chain_name.clone()) + .context(MSG_CHAIN_NOT_FOUND_ERR)?; + + let general_api = chain_config + .get_general_config()? + .api_config + .context("API config is not found")?; + + let mut command = cmd!(shell, "cargo run --release --bin loadnext") + .env( + "L2_CHAIN_ID", + chain_config + .get_genesis_config()? + .l2_chain_id + .as_u64() + .to_string(), + ) + .env( + "MAIN_TOKEN", + format!( + "{:?}", + ecosystem_config + .get_erc20_tokens() + .first() + .context("NO Erc20 tokens were deployed")? + .address + ), + ) + .env("L2_RPC_ADDRESS", general_api.web3_json_rpc.http_url) + .env("L2_WS_RPC_ADDRESS", general_api.web3_json_rpc.ws_url); + + if global_config().verbose { + command = command.env("RUST_LOG", "loadnext=info") + } + + Cmd::new(command).with_force_run().run()?; + + logger::outro("Loadtest success"); + + Ok(()) +} diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/test/mod.rs b/zk_toolbox/crates/zk_supervisor/src/commands/test/mod.rs index a536302afc15..7d2af71ae9ce 100644 --- a/zk_toolbox/crates/zk_supervisor/src/commands/test/mod.rs +++ b/zk_toolbox/crates/zk_supervisor/src/commands/test/mod.rs @@ -6,15 +6,17 @@ use clap::Subcommand; use xshell::Shell; use crate::messages::{ - MSG_BUILD_ABOUT, MSG_INTEGRATION_TESTS_ABOUT, MSG_L1_CONTRACTS_ABOUT, MSG_PROVER_TEST_ABOUT, - MSG_RECOVERY_TEST_ABOUT, MSG_REVERT_TEST_ABOUT, MSG_RUST_TEST_ABOUT, MSG_TEST_WALLETS_INFO, - MSG_UPGRADE_TEST_ABOUT, + MSG_BUILD_ABOUT, MSG_INTEGRATION_TESTS_ABOUT, MSG_L1_CONTRACTS_ABOUT, MSG_LOADTEST_ABOUT, + MSG_PROVER_TEST_ABOUT, MSG_RECOVERY_TEST_ABOUT, MSG_REVERT_TEST_ABOUT, MSG_RUST_TEST_ABOUT, + MSG_TEST_WALLETS_INFO, MSG_UPGRADE_TEST_ABOUT, }; mod args; mod build; +mod db; mod integration; mod l1_contracts; +mod loadtest; mod prover; mod recovery; mod revert; @@ -43,6 +45,8 @@ pub enum TestCommands { Prover, #[clap(about = MSG_TEST_WALLETS_INFO)] Wallet, + #[clap(about = MSG_LOADTEST_ABOUT)] + Loadtest, } pub async fn run(shell: &Shell, args: TestCommands) -> anyhow::Result<()> { @@ -54,7 +58,8 @@ pub async fn run(shell: &Shell, args: TestCommands) -> anyhow::Result<()> { TestCommands::Build => build::run(shell), TestCommands::Rust(args) => rust::run(shell, args).await, TestCommands::L1Contracts => l1_contracts::run(shell), - TestCommands::Prover => prover::run(shell), + TestCommands::Prover => prover::run(shell).await, TestCommands::Wallet => wallet::run(shell), + TestCommands::Loadtest => loadtest::run(shell), } } diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/test/prover.rs b/zk_toolbox/crates/zk_supervisor/src/commands/test/prover.rs index 4e9c4fc25283..f48b359a9357 100644 --- a/zk_toolbox/crates/zk_supervisor/src/commands/test/prover.rs +++ b/zk_toolbox/crates/zk_supervisor/src/commands/test/prover.rs @@ -1,15 +1,29 @@ +use std::str::FromStr; + use common::{cmd::Cmd, logger}; use config::EcosystemConfig; +use url::Url; use xshell::{cmd, Shell}; -use crate::messages::MSG_PROVER_TEST_SUCCESS; +use crate::{ + commands::test::db::reset_test_databases, + dals::{Dal, PROVER_DAL_PATH}, + defaults::TEST_DATABASE_PROVER_URL, + messages::MSG_PROVER_TEST_SUCCESS, +}; -pub fn run(shell: &Shell) -> anyhow::Result<()> { +pub async fn run(shell: &Shell) -> anyhow::Result<()> { let ecosystem = EcosystemConfig::from_file(shell)?; - let _dir_guard = shell.push_dir(ecosystem.link_to_code.join("prover")); + let dals = vec![Dal { + url: Url::from_str(TEST_DATABASE_PROVER_URL)?, + path: PROVER_DAL_PATH.to_string(), + }]; + reset_test_databases(shell, &ecosystem.link_to_code, dals).await?; + let _dir_guard = shell.push_dir(ecosystem.link_to_code.join("prover")); Cmd::new(cmd!(shell, "cargo test --release --workspace --locked")) .with_force_run() + .env("TEST_DATABASE_PROVER_URL", TEST_DATABASE_PROVER_URL) .run()?; logger::outro(MSG_PROVER_TEST_SUCCESS); diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/test/recovery.rs b/zk_toolbox/crates/zk_supervisor/src/commands/test/recovery.rs index 030d28966031..c69a9826d56c 100644 --- a/zk_toolbox/crates/zk_supervisor/src/commands/test/recovery.rs +++ b/zk_toolbox/crates/zk_supervisor/src/commands/test/recovery.rs @@ -21,7 +21,7 @@ pub async fn run(shell: &Shell, args: RecoveryArgs) -> anyhow::Result<()> { shell.change_dir(ecosystem_config.link_to_code.join(RECOVERY_TESTS_PATH)); logger::info(MSG_RECOVERY_TEST_RUN_INFO); - Server::new(None, ecosystem_config.link_to_code.clone()).build(shell)?; + Server::new(None, ecosystem_config.link_to_code.clone(), false).build(shell)?; if !args.no_deps { install_and_build_dependencies(shell, &ecosystem_config)?; diff --git a/zk_toolbox/crates/zk_supervisor/src/commands/test/rust.rs b/zk_toolbox/crates/zk_supervisor/src/commands/test/rust.rs index ad1318cfa768..fdee03fe63ea 100644 --- a/zk_toolbox/crates/zk_supervisor/src/commands/test/rust.rs +++ b/zk_toolbox/crates/zk_supervisor/src/commands/test/rust.rs @@ -1,19 +1,19 @@ -use std::{path::Path, str::FromStr}; +use std::str::FromStr; use anyhow::Context; -use common::{cmd::Cmd, db::wait_for_db, logger}; +use common::{cmd::Cmd, logger}; use config::EcosystemConfig; use url::Url; use xshell::{cmd, Shell}; use super::args::rust::RustArgs; use crate::{ - commands::database, + commands::test::db::reset_test_databases, dals::{Dal, CORE_DAL_PATH, PROVER_DAL_PATH}, defaults::{TEST_DATABASE_PROVER_URL, TEST_DATABASE_SERVER_URL}, messages::{ - MSG_CARGO_NEXTEST_MISSING_ERR, MSG_CHAIN_NOT_FOUND_ERR, MSG_POSTGRES_CONFIG_NOT_FOUND_ERR, - MSG_RESETTING_TEST_DATABASES, MSG_UNIT_TESTS_RUN_SUCCESS, MSG_USING_CARGO_NEXTEST, + MSG_CHAIN_NOT_FOUND_ERR, MSG_POSTGRES_CONFIG_NOT_FOUND_ERR, MSG_UNIT_TESTS_RUN_SUCCESS, + MSG_USING_CARGO_NEXTEST, }, }; @@ -61,13 +61,8 @@ pub async fn run(shell: &Shell, args: RustArgs) -> anyhow::Result<()> { let _dir_guard = shell.push_dir(&link_to_code); - let cmd = if nextest_is_installed(shell)? { - logger::info(MSG_USING_CARGO_NEXTEST); - cmd!(shell, "cargo nextest run --release") - } else { - logger::error(MSG_CARGO_NEXTEST_MISSING_ERR); - cmd!(shell, "cargo test --release") - }; + logger::info(MSG_USING_CARGO_NEXTEST); + let cmd = cmd!(shell, "cargo nextest run --release"); let cmd = if let Some(options) = args.options { Cmd::new(cmd.args(options.split_whitespace())).with_force_run() @@ -83,40 +78,3 @@ pub async fn run(shell: &Shell, args: RustArgs) -> anyhow::Result<()> { logger::outro(MSG_UNIT_TESTS_RUN_SUCCESS); Ok(()) } - -fn nextest_is_installed(shell: &Shell) -> anyhow::Result { - let out = String::from_utf8( - Cmd::new(cmd!(shell, "cargo install --list")) - .run_with_output()? - .stdout, - )?; - Ok(out.contains("cargo-nextest")) -} - -async fn reset_test_databases( - shell: &Shell, - link_to_code: &Path, - dals: Vec, -) -> anyhow::Result<()> { - logger::info(MSG_RESETTING_TEST_DATABASES); - - Cmd::new(cmd!( - shell, - "docker compose -f docker-compose-unit-tests.yml down" - )) - .run()?; - Cmd::new(cmd!( - shell, - "docker compose -f docker-compose-unit-tests.yml up -d" - )) - .run()?; - - for dal in dals { - let mut url = dal.url.clone(); - url.set_path(""); - wait_for_db(&url, 3).await?; - database::reset::reset_database(shell, link_to_code, dal.clone()).await?; - } - - Ok(()) -} diff --git a/zk_toolbox/crates/zk_supervisor/src/consts.rs b/zk_toolbox/crates/zk_supervisor/src/consts.rs new file mode 100644 index 000000000000..66f00c7553b5 --- /dev/null +++ b/zk_toolbox/crates/zk_supervisor/src/consts.rs @@ -0,0 +1 @@ +pub const DEFAULT_UNSIGNED_TRANSACTIONS_DIR: &str = "transactions"; diff --git a/zk_toolbox/crates/zk_supervisor/src/defaults.rs b/zk_toolbox/crates/zk_supervisor/src/defaults.rs index f4bae739c2d1..d9325402f533 100644 --- a/zk_toolbox/crates/zk_supervisor/src/defaults.rs +++ b/zk_toolbox/crates/zk_supervisor/src/defaults.rs @@ -2,3 +2,4 @@ pub const TEST_DATABASE_SERVER_URL: &str = "postgres://postgres:notsecurepassword@localhost:5433/zksync_local_test"; pub const TEST_DATABASE_PROVER_URL: &str = "postgres://postgres:notsecurepassword@localhost:5433/prover_local_test"; +pub const LOCAL_RPC_URL: &str = "http://127.0.0.1:8545"; diff --git a/zk_toolbox/crates/zk_supervisor/src/main.rs b/zk_toolbox/crates/zk_supervisor/src/main.rs index 32aefa7fcad9..242affd8a71b 100644 --- a/zk_toolbox/crates/zk_supervisor/src/main.rs +++ b/zk_toolbox/crates/zk_supervisor/src/main.rs @@ -1,6 +1,7 @@ use clap::{Parser, Subcommand}; use commands::{ - contracts::ContractsArgs, database::DatabaseCommands, lint::LintArgs, prover::ProverCommands, + config_writer::ConfigWriterArgs, contracts::ContractsArgs, database::DatabaseCommands, + lint::LintArgs, prover::ProverCommands, send_transactions::args::SendTransactionsArgs, snapshot::SnapshotCommands, test::TestCommands, }; use common::{ @@ -8,24 +9,30 @@ use common::{ config::{global_config, init_global_config, GlobalConfig}, error::log_error, init_prompt_theme, logger, + version::version_message, }; use config::EcosystemConfig; use messages::{ - msg_global_chain_does_not_exist, MSG_CONTRACTS_ABOUT, MSG_PROVER_VERSION_ABOUT, - MSG_SUBCOMMAND_CLEAN, MSG_SUBCOMMAND_DATABASE_ABOUT, MSG_SUBCOMMAND_FMT_ABOUT, - MSG_SUBCOMMAND_LINT_ABOUT, MSG_SUBCOMMAND_SNAPSHOTS_CREATOR_ABOUT, MSG_SUBCOMMAND_TESTS_ABOUT, + msg_global_chain_does_not_exist, MSG_CONFIG_WRITER_ABOUT, MSG_CONTRACTS_ABOUT, + MSG_PROVER_VERSION_ABOUT, MSG_SEND_TXNS_ABOUT, MSG_SUBCOMMAND_CLEAN, + MSG_SUBCOMMAND_DATABASE_ABOUT, MSG_SUBCOMMAND_FMT_ABOUT, MSG_SUBCOMMAND_LINT_ABOUT, + MSG_SUBCOMMAND_SNAPSHOTS_CREATOR_ABOUT, MSG_SUBCOMMAND_TESTS_ABOUT, }; use xshell::Shell; use crate::commands::{clean::CleanCommands, fmt::FmtArgs}; mod commands; +mod consts; mod dals; mod defaults; mod messages; #[derive(Parser, Debug)] -#[command(version, about)] +#[command( + version = version_message(env!("CARGO_PKG_VERSION")), + about +)] struct Supervisor { #[command(subcommand)] command: SupervisorSubcommands, @@ -53,6 +60,10 @@ enum SupervisorSubcommands { Prover(ProverCommands), #[command(about = MSG_CONTRACTS_ABOUT)] Contracts(ContractsArgs), + #[command(about = MSG_CONFIG_WRITER_ABOUT, alias = "o")] + ConfigWriter(ConfigWriterArgs), + #[command(about = MSG_SEND_TXNS_ABOUT)] + SendTransactions(SendTransactionsArgs), } #[derive(Parser, Debug)] @@ -73,14 +84,16 @@ struct SupervisorGlobalArgs { async fn main() -> anyhow::Result<()> { human_panic::setup_panic!(); + // We must parse arguments before printing the intro, because some autogenerated + // Clap commands (like `--version` would look odd otherwise). + let args = Supervisor::parse(); + init_prompt_theme(); logger::new_empty_line(); logger::intro(); let shell = Shell::new().unwrap(); - let args = Supervisor::parse(); - init_global_config_inner(&shell, &args.global)?; if !global_config().ignore_prerequisites { @@ -111,6 +124,10 @@ async fn run_subcommand(args: Supervisor, shell: &Shell) -> anyhow::Result<()> { SupervisorSubcommands::Fmt(args) => commands::fmt::run(shell.clone(), args).await?, SupervisorSubcommands::Prover(command) => commands::prover::run(shell, command).await?, SupervisorSubcommands::Contracts(args) => commands::contracts::run(shell, args)?, + SupervisorSubcommands::ConfigWriter(args) => commands::config_writer::run(shell, args)?, + SupervisorSubcommands::SendTransactions(args) => { + commands::send_transactions::run(shell, args).await? + } } Ok(()) } diff --git a/zk_toolbox/crates/zk_supervisor/src/messages.rs b/zk_toolbox/crates/zk_supervisor/src/messages.rs index d64e87cd0eb4..5f68630f7562 100644 --- a/zk_toolbox/crates/zk_supervisor/src/messages.rs +++ b/zk_toolbox/crates/zk_supervisor/src/messages.rs @@ -14,6 +14,7 @@ pub(super) const MSG_SUBCOMMAND_TESTS_ABOUT: &str = "Run tests"; pub(super) const MSG_SUBCOMMAND_CLEAN: &str = "Clean artifacts"; pub(super) const MSG_SUBCOMMAND_LINT_ABOUT: &str = "Lint code"; pub(super) const MSG_CONTRACTS_ABOUT: &str = "Build contracts"; +pub(super) const MSG_CONFIG_WRITER_ABOUT: &str = "Overwrite general config"; pub(super) const MSG_SUBCOMMAND_FMT_ABOUT: &str = "Format code"; @@ -99,7 +100,6 @@ pub(super) const MSG_TESTS_RECOVERY_SNAPSHOT_HELP: &str = "Run recovery from a snapshot instead of genesis"; pub(super) const MSG_UNIT_TESTS_RUN_SUCCESS: &str = "Unit tests ran successfully"; pub(super) const MSG_USING_CARGO_NEXTEST: &str = "Using cargo-nextest for running tests"; -pub(super) const MSG_CARGO_NEXTEST_MISSING_ERR: &str = "cargo-nextest is missing, please run 'cargo install cargo-nextest'. Falling back to 'cargo test'"; pub(super) const MSG_L1_CONTRACTS_ABOUT: &str = "Run L1 contracts tests"; pub(super) const MSG_L1_CONTRACTS_TEST_SUCCESS: &str = "L1 contracts tests ran successfully"; pub(super) const MSG_PROVER_TEST_ABOUT: &str = "Run prover tests"; @@ -201,3 +201,30 @@ pub(super) const MSG_RUNNING_CONTRACTS_FMT_SPINNER: &str = "Running prettier for pub(super) const MSG_TEST_WALLETS_INFO: &str = "Print test wallets information"; pub(super) const MSG_DESERIALIZE_TEST_WALLETS_ERR: &str = "Impossible to deserialize test wallets"; pub(super) const MSG_WALLETS_TEST_SUCCESS: &str = "Wallets test success"; + +pub(super) const MSG_LOADTEST_ABOUT: &str = "Run loadtest"; + +pub(super) const MSG_OVERRIDE_CONFIG_PATH_HELP: &str = "Path to the config file to override"; +pub(super) const MSG_OVERRRIDE_CONFIG_PATH_PROMPT: &str = + "Provide path to the config file to override"; +pub(super) const MSG_OVERRIDE_SUCCESS: &str = "Config was overridden successfully"; + +pub(super) fn msg_overriding_config(chain: String) -> String { + format!("Overriding general config for chain {}", chain) +} + +// Send transactions related messages +pub(super) const MSG_SEND_TXNS_ABOUT: &str = "Send transactions from file"; +pub(super) const MSG_PROMPT_TRANSACTION_FILE: &str = "Path to transactions file"; +pub(super) const MSG_PROMPT_SECRET_KEY: &str = "Secret key of the sender"; +pub(super) const MSG_PROMPT_L1_RPC_URL: &str = "L1 RPC URL"; +pub(super) fn msg_send_txns_outro(log_file: &str) -> String { + format!("Transaction receipts logged to: {}", log_file) +} + +pub(super) const MSG_UNABLE_TO_OPEN_FILE_ERR: &str = "Unable to open file"; +pub(super) const MSG_UNABLE_TO_READ_FILE_ERR: &str = "Unable to read file"; +pub(super) const MSG_UNABLE_TO_WRITE_FILE_ERR: &str = "Unable to write data to file"; +pub(super) const MSG_UNABLE_TO_READ_PARSE_JSON_ERR: &str = "Unable to parse JSON"; +pub(super) const MSG_FAILED_TO_SEND_TXN_ERR: &str = "Failed to send transaction"; +pub(super) const MSG_INVALID_L1_RPC_URL_ERR: &str = "Invalid L1 RPC URL";