diff --git a/.circleci/config.yml b/.circleci/config.yml index b05b90cf172d..96343056735e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -530,6 +530,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_2_pxes.test.ts + environment: { DEBUG: 'aztec:*' } e2e-multiple-accounts-1-enc-key: machine: @@ -541,6 +542,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_multiple_accounts_1_enc_key.test.ts + environment: { DEBUG: 'aztec:*' } e2e-deploy-contract: machine: @@ -552,6 +554,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_deploy_contract.test.ts + environment: { DEBUG: 'aztec:*' } e2e-lending-contract: machine: @@ -563,6 +566,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_lending_contract.test.ts + environment: { DEBUG: 'aztec:*' } e2e-token-contract: machine: @@ -574,6 +578,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_token_contract.test.ts + environment: { DEBUG: 'aztec:*' } e2e-private-airdrop: machine: @@ -585,6 +590,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_private_airdrop.test.ts + environment: { DEBUG: 'aztec:*' } e2e-private-token-contract: machine: @@ -596,6 +602,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_private_token_contract.test.ts + environment: { DEBUG: 'aztec:*' } e2e-sandbox-example: machine: @@ -607,6 +614,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_sandbox_example.test.ts ./scripts/docker-compose-e2e-sandbox.yml + environment: { DEBUG: 'aztec:*' } e2e-multi-transfer-contract: machine: @@ -618,6 +626,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_multi_transfer.test.ts + environment: { DEBUG: 'aztec:*' } e2e-block-building: machine: @@ -629,6 +638,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_block_building.test.ts + environment: { DEBUG: 'aztec:*' } e2e-nested-contract: machine: @@ -640,6 +650,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_nested_contract.test.ts + environment: { DEBUG: 'aztec:*' } e2e-non-contract-account: machine: @@ -651,6 +662,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_non_contract_account.test.ts + environment: { DEBUG: 'aztec:*' } e2e-cross-chain-messaging: machine: @@ -662,6 +674,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_cross_chain_messaging.test.ts + environment: { DEBUG: 'aztec:*' } e2e-public-cross-chain-messaging: machine: @@ -673,6 +686,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_public_cross_chain_messaging.test.ts + environment: { DEBUG: 'aztec:*' } e2e-public-to-private-messaging: machine: @@ -684,6 +698,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_public_to_private_messaging.test.ts + environment: { DEBUG: 'aztec:*' } e2e-account-contracts: machine: @@ -695,6 +710,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_account_contracts.test.ts + environment: { DEBUG: 'aztec:*' } e2e-escrow-contract: machine: @@ -706,6 +722,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_escrow_contract.test.ts + environment: { DEBUG: 'aztec:*' } e2e-pending-commitments-contract: machine: @@ -717,6 +734,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_pending_commitments_contract.test.ts + environment: { DEBUG: 'aztec:*' } e2e-ordering: machine: @@ -728,6 +746,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_ordering.test.ts + environment: { DEBUG: 'aztec:*' } uniswap-trade-on-l1-from-l2: machine: @@ -739,6 +758,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local uniswap_trade_on_l1_from_l2.test.ts + environment: { DEBUG: 'aztec:*' } integration-archiver-l1-to-l2: machine: @@ -750,6 +770,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local integration_archiver_l1_to_l2.test.ts + environment: { DEBUG: 'aztec:*' } integration-l1-publisher: machine: @@ -761,6 +782,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local integration_l1_publisher.test.ts + environment: { DEBUG: 'aztec:*' } e2e-public-token-contract: machine: @@ -772,6 +794,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_public_token_contract.test.ts + environment: { DEBUG: 'aztec:*' } e2e-cli: machine: @@ -783,6 +806,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_cli.test.ts + environment: { DEBUG: 'aztec:*' } e2e-p2p: machine: @@ -794,6 +818,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_p2p_network.test.ts + environment: { DEBUG: 'aztec:*' } e2e-browser-sandbox: machine: @@ -805,6 +830,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_aztec_js_browser.test.ts ./scripts/docker-compose-e2e-sandbox-browser.yml + environment: { DEBUG: 'aztec:*' } e2e-card-game: machine: @@ -816,6 +842,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local e2e_card_game.test.ts + environment: { DEBUG: 'aztec:*' } pxe-sandbox: machine: @@ -827,6 +854,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local pxe_sandbox.test.ts ./scripts/docker-compose-e2e-sandbox.yml + environment: { DEBUG: 'aztec:*' } cli-docs-sandbox: machine: @@ -838,6 +866,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local cli_docs_sandbox.test.ts ./scripts/docker-compose-e2e-sandbox.yml + environment: { DEBUG: 'aztec:*' } guides-writing-an-account-contract: machine: @@ -849,6 +878,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local guides/writing_an_account_contract.test.ts + environment: { DEBUG: 'aztec:*' } guides-dapp-testing: machine: @@ -860,6 +890,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local guides/dapp_testing.test.ts ./scripts/docker-compose-e2e-sandbox.yml + environment: { DEBUG: 'aztec:*' } guides-sample-dapp: machine: @@ -871,6 +902,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local sample-dapp ./scripts/docker-compose-e2e-sandbox.yml + environment: { DEBUG: 'aztec:*' } guides-up-quick-start: machine: @@ -882,6 +914,7 @@ jobs: - run: name: "Test" command: cond_run_script end-to-end ./scripts/run_tests_local guides/up_quick_start.test.ts ./scripts/docker-compose-e2e-sandbox.yml + environment: { DEBUG: 'aztec:*' } e2e-canary-test: machine: @@ -894,6 +927,18 @@ jobs: name: "Test" command: cond_run_script canary-build ./scripts/run_tests uniswap_trade_on_l1_from_l2.test.ts canary-build ./scripts/docker-compose-e2e-sandbox.yml + bench-publish-rollup: + machine: + image: ubuntu-2204:2023.07.2 + resource_class: large + steps: + - *checkout + - *setup_env + - run: + name: "Benchmark" + command: cond_run_script end-to-end ./scripts/run_tests_local benchmarks/bench_publish_rollup.test.ts + environment: { DEBUG: 'aztec:benchmarks:*,aztec:sequencer' } + build-docs: machine: image: ubuntu-2204:2023.07.2 @@ -926,6 +971,16 @@ jobs: name: "Noop" command: echo Noop + bench-summary: + machine: + image: ubuntu-2204:2023.07.2 + steps: + - *checkout + - *setup_env + - run: + name: "Assemble benchmark summary from uploaded logs" + command: ./scripts/ci/assemble_e2e_benchmark.sh + deploy-npm: machine: image: ubuntu-2204:2023.07.2 @@ -1220,6 +1275,7 @@ workflows: - guides-dapp-testing: *e2e_test - guides-sample-dapp: *e2e_test - guides-up-quick-start: *e2e_test + - bench-publish-rollup: *e2e_test - e2e-end: requires: @@ -1257,6 +1313,12 @@ workflows: - guides-dapp-testing - guides-sample-dapp - guides-up-quick-start + - bench-publish-rollup + <<: *defaults + + - bench-summary: + requires: + - e2e-end <<: *defaults # Deployment and Canary tests diff --git a/scripts/ci/aggregate_e2e_benchmark.js b/scripts/ci/aggregate_e2e_benchmark.js new file mode 100644 index 000000000000..faa12fb47f7a --- /dev/null +++ b/scripts/ci/aggregate_e2e_benchmark.js @@ -0,0 +1,94 @@ +// Given a local folder with the e2e benchmark files, generates a single file +// output with the grouped metrics to be published. This script can probably +// be replaced by a single call to jq, but I found this easier to write, +// and pretty much every CI comes with a working version of node. + +const fs = require("fs"); +const path = require("path"); +const readline = require("readline"); + +const { + L1_ROLLUP_CALLDATA_SIZE_IN_BYTES, + L1_ROLLUP_CALLDATA_GAS, + L1_ROLLUP_EXECUTION_GAS, + ROLLUP_PUBLISHED_TO_L1, + ROLLUP_SIZES, + BENCHMARK_FILE_JSON, +} = require("./benchmark_shared.js"); + +// Folder where to load logs from +const logsDir = process.env.LOGS_DIR ?? `log`; + +// Appends a datapoint to the final results for the given metric in the given bucket +function append(results, metric, bucket, value) { + if (value === undefined) { + console.error(`Undefined value for ${metric} in bucket ${bucket}`); + return; + } + const numeric = Number(value); + if (Number.isNaN(numeric)) { + console.error(`Value ${value} for ${metric} in ${bucket} is not a number`); + return; + } + if (!results[metric]) results[metric] = {}; + if (!results[metric][bucket]) results[metric][bucket] = []; + results[metric][bucket].push(numeric); +} + +// Processes an entry with event name 'rollup-published-to-l1' and updates results +function processRollupPublished(entry, results) { + const bucket = entry.txCount; + if (!ROLLUP_SIZES.includes(bucket)) return; + append(results, L1_ROLLUP_CALLDATA_GAS, bucket, entry.calldataGas); + append(results, L1_ROLLUP_CALLDATA_SIZE_IN_BYTES, bucket, entry.calldataSize); + append(results, L1_ROLLUP_EXECUTION_GAS, bucket, entry.gasUsed); +} + +// Processes a parsed entry from a logfile and updates results +function processEntry(entry, results) { + switch (entry.eventName) { + case ROLLUP_PUBLISHED_TO_L1: + return processRollupPublished(entry, results); + default: + return; + } +} + +// Parses all jsonl files downloaded and aggregates them into a single results object +async function main() { + const results = {}; + + // Get all jsonl files in the logs dir + const files = fs.readdirSync(logsDir).filter((f) => f.endsWith(".jsonl")); + + // Iterate over each .jsonl file + for (const file of files) { + const filePath = path.join(logsDir, file); + const fileStream = fs.createReadStream(filePath); + const rl = readline.createInterface({ input: fileStream }); + + for await (const line of rl) { + const entry = JSON.parse(line); + processEntry(entry, results); + } + } + + // For each bucket of each metric compute the average all collected datapoints + for (const metricName in results) { + const metric = results[metricName]; + for (const bucketName in metric) { + const bucket = metric[bucketName]; + let avg = bucket.reduce((acc, val) => acc + val, 0) / bucket.length; + if (avg > 100) avg = Math.floor(avg); + metric[bucketName] = avg; + } + } + + // Throw in a timestamp + results.timestamp = new Date().toISOString(); + + // Write results to disk + fs.writeFileSync(BENCHMARK_FILE_JSON, JSON.stringify(results, null, 2)); +} + +main(); diff --git a/scripts/ci/assemble_e2e_benchmark.sh b/scripts/ci/assemble_e2e_benchmark.sh new file mode 100755 index 000000000000..53173ea2b2e1 --- /dev/null +++ b/scripts/ci/assemble_e2e_benchmark.sh @@ -0,0 +1,61 @@ +#!/bin/bash +# Grabs the log files uploaded in yarn-project/end-to-end/scripts/upload_logs_to_s3.sh +# that contain representative benchmarks, extracts whatever metrics are interesting, +# and assembles a single file that shows the current state of the repository. + +[ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace +set -eu + +BUCKET_NAME="aztec-ci-artifacts" +LOG_FOLDER="${LOG_FOLDER:-log}" +COMMIT_HASH="${COMMIT_HASH:-$(git rev-parse HEAD)}" +BENCHMARK_FILE_JSON="benchmark.json" + +# Adapted from yarn-project/end-to-end/scripts/upload_logs_to_s3.sh +if [ "${CIRCLE_BRANCH:-}" = "master" ]; then + LOG_SOURCE_FOLDER="logs-v1/master/$COMMIT_HASH/" + BENCHMARK_TARGET_FILE="benchmarks-v1/master/$COMMIT_HASH.json" + BENCHMARK_LATEST_FILE="benchmarks-v1/latest.json" +elif [ -n "${CIRCLE_PULL_REQUEST:-}" ]; then + LOG_SOURCE_FOLDER="logs-v1/pulls/${CIRCLE_PULL_REQUEST##*/}" + BENCHMARK_TARGET_FILE="benchmarks-v1/pulls/${CIRCLE_PULL_REQUEST##*/}.json" +else + echo "Can only run benchmark aggregation on master or on a PR. Ensure CIRCLE_BRANCH or CIRCLE_PULL_REQUEST are set." + exit 1 +fi + +# Download benchmark log files from S3 LOG_SOURCE_FOLDER into local LOG_FOLDER +mkdir -p $LOG_FOLDER +aws s3 cp "s3://${BUCKET_NAME}/${LOG_SOURCE_FOLDER}/" $LOG_FOLDER --exclude '*' --include 'bench*.jsonl' --recursive + +# Only generate the aggregated benchmark if we've managed to retrieve all the needed log files +# If these runs were skipped due to no changes in their rebuild-patterns, then there's no need +# to recompute the aggregated benchmark. Note that if one benchmark did run but others didn't, +# this skips the whole aggregation. For now, that's fine because all benchmark files have the +# same rebuild pattern rules. But if that changes, then we'd need to go up in the commit history +# to find the latest log files for the unchanged benchmarks. +EXPECTED_BENCHMARK_COUNT=$(find yarn-project/end-to-end/src -type f -name "bench*.test.ts" | wc -l) +DOWNLOADED_BENCHMARK_COUNT=$(find $LOG_FOLDER -type f -name "*.jsonl" | wc -l) +if [ "$DOWNLOADED_BENCHMARK_COUNT" -lt "$EXPECTED_BENCHMARK_COUNT"]; then + echo Found $DOWNLOADED_BENCHMARK_COUNT out of $EXPECTED_BENCHMARK_COUNT benchmark log files. Exiting. + exit 0 +fi + +# Generate the aggregated benchmark file +node scripts/ci/aggregate_e2e_benchmark.js +echo "generated: $BENCHMARK_FILE_JSON" + +# Upload it to master or pulls +aws s3 cp $BENCHMARK_FILE_JSON "s3://${BUCKET_NAME}/${BENCHMARK_TARGET_FILE}" + +# If on master, also update the "latest" benchmark file +if [ -n "${BENCHMARK_LATEST_FILE:-}" ]; then + aws s3 cp $BENCHMARK_FILE_JSON "s3://${BUCKET_NAME}/${BENCHMARK_LATEST_FILE}" +fi + +# If on a pull request, comment on it +if [ -n "${CIRCLE_PULL_REQUEST:-}" ]; then + (node scripts/ci/comment_e2e_benchmark.js && echo "commented on pr $CIRCLE_PULL_REQUEST") || echo "failed commenting on pr" +fi + + diff --git a/scripts/ci/benchmark_shared.js b/scripts/ci/benchmark_shared.js new file mode 100644 index 000000000000..6554b15c6e25 --- /dev/null +++ b/scripts/ci/benchmark_shared.js @@ -0,0 +1,24 @@ +// Metrics to capture +const L1_ROLLUP_CALLDATA_SIZE_IN_BYTES = "l1_rollup_calldata_size_in_bytes"; +const L1_ROLLUP_CALLDATA_GAS = "l1_rollup_calldata_gas"; +const L1_ROLLUP_EXECUTION_GAS = "l1_rollup_execution_gas"; + +// Events to track +const ROLLUP_PUBLISHED_TO_L1 = "rollup-published-to-l1"; + +// Rollup sizes to track (duplicated from yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts) +const ROLLUP_SIZES = process.env.ROLLUP_SIZES + ? process.env.ROLLUP_SIZES.split(",").map(Number) + : [8, 32, 128]; + +// Output files +const BENCHMARK_FILE_JSON = process.env.BENCHMARK_FILE_JSON ?? "benchmark.json"; + +module.exports = { + L1_ROLLUP_CALLDATA_SIZE_IN_BYTES, + L1_ROLLUP_CALLDATA_GAS, + L1_ROLLUP_EXECUTION_GAS, + ROLLUP_PUBLISHED_TO_L1, + ROLLUP_SIZES, + BENCHMARK_FILE_JSON, +}; diff --git a/scripts/ci/comment_e2e_benchmark.js b/scripts/ci/comment_e2e_benchmark.js new file mode 100644 index 000000000000..16f6541a048b --- /dev/null +++ b/scripts/ci/comment_e2e_benchmark.js @@ -0,0 +1,141 @@ +// Given a local benchmark json aggregated file, reformats it in markdown +// and comments on the PR that prompted it. If the CI is rerun, the comment +// is updated. + +const https = require("https"); +const fs = require("fs"); + +const GITHUB_TOKEN = process.env.GITHUB_TOKEN; +const OWNER = "AztecProtocol"; +const REPO = "aztec3-packages"; +const COMMENT_MARK = ""; + +const { ROLLUP_SIZES, BENCHMARK_FILE_JSON } = require("./benchmark_shared.js"); + +// Returns the md content to post +function getContent() { + const benchmark = JSON.parse(fs.readFileSync(BENCHMARK_FILE_JSON, "utf-8")); + delete benchmark.timestamp; + + const sizes = ROLLUP_SIZES; + const header = `| Metric | ${sizes.map((i) => `${i} txs`).join(" | ")} |`; + const separator = `| - | ${sizes.map(() => "-").join(" | ")} |`; + const rows = Object.keys(benchmark).map((key) => { + const metric = benchmark[key]; + return `${key} | ${sizes.map((i) => metric[i]).join(" | ")} |`; + }); + + return ` +## Benchmark results + +### Rollup published to L1 + +${header} +${separator} +${rows.join("\n")} + +${COMMENT_MARK} +`; +} + +// Returns the number of the current PR +function getPrNumber() { + if (!process.env.CIRCLE_PULL_REQUEST) throw new Error(`Not in Circle PR`); + const fragments = process.env.CIRCLE_PULL_REQUEST.split("/"); + return fragments[fragments.length - 1]; +} + +// Function to check if a bench comment already exists +async function getExistingComment() { + try { + const response = await sendGitHubRequest( + `/repos/${OWNER}/${REPO}/issues/${getPrNumber()}/comments` + ); + const comments = JSON.parse(response); + return comments.find((comment) => comment.body.includes(COMMENT_MARK)); + } catch (error) { + throw new Error("Error checking for existing comments: " + error.message); + } +} + +// Function to create or update a comment +async function upsertComment(existingCommentId) { + try { + const commentContent = getContent(); + const commentData = { body: commentContent }; + + const requestMethod = existingCommentId ? "PATCH" : "POST"; + const requestUrl = existingCommentId + ? `/repos/${OWNER}/${REPO}/issues/comments/${existingCommentId}` + : `/repos/${OWNER}/${REPO}/issues/${getPrNumber()}/comments`; + + await sendGitHubRequest(requestUrl, requestMethod, commentData); + console.log("Comment added or updated successfully."); + } catch (error) { + throw new Error("Error adding or updating comment: " + error.message); + } +} + +// Function to send a request to the GitHub API +async function sendGitHubRequest(url, method = "GET", data = null) { + const apiUrl = url.startsWith("http") ? url : `https://api.github.com${url}`; + const headers = { + Authorization: `Bearer ${GITHUB_TOKEN}`, + Accept: "application/vnd.github+json", + "X-GitHub-Api-Version": "2022-11-28", + "User-Agent": OWNER, + }; + if (data) headers["Content-Type"] = "application/json"; + const requestOptions = { method, headers }; + + return new Promise((resolve, reject) => { + const req = https.request(apiUrl, requestOptions, (res) => { + if ( + res.statusCode === 301 || + res.statusCode === 302 || + res.statusCode === 307 + ) { + sendGitHubRequest(res.headers.location, method, data) + .then(resolve) + .catch(reject); + return; + } else { + let data = ""; + res.on("data", (chunk) => { + data += chunk; + }); + + res.on("end", () => { + if (res.statusCode >= 200 && res.statusCode < 300) { + resolve(data); + } else { + reject( + new Error( + `GitHub API request failed with ${res.statusCode}: ${data}` + ) + ); + } + }); + } + }); + + req.on("error", (error) => { + reject(error); + }); + + if (data) req.write(JSON.stringify(data)); + req.end(); + }); +} + +async function main() { + try { + const existingComment = await getExistingComment(); + await upsertComment(existingComment?.id); + } catch (err) { + console.error(`error while commenting on pull request:`, err); + process.exit(1); + } +} + +main(); diff --git a/scripts/ci/store_test_benchmark_logs b/scripts/ci/store_test_benchmark_logs index 403d3ed93604..3554aa746d2d 100755 --- a/scripts/ci/store_test_benchmark_logs +++ b/scripts/ci/store_test_benchmark_logs @@ -1,4 +1,7 @@ #!/bin/bash +# Script for storing barretenberg benchmark results. +# Uploads results to the AztecProtocol/benchmark-archive repository. + [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -e diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index f1e3f3ad6078..f485f3917103 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -55,19 +55,19 @@ import { openDb } from './db.js'; */ export class AztecNodeService implements AztecNode { constructor( - protected config: AztecNodeConfig, - protected p2pClient: P2P, - protected blockSource: L2BlockSource, - protected encryptedLogsSource: L2LogsSource, - protected unencryptedLogsSource: L2LogsSource, - protected contractDataSource: ContractDataSource, - protected l1ToL2MessageSource: L1ToL2MessageSource, - protected worldStateSynchronizer: WorldStateSynchronizer, - protected sequencer: SequencerClient, - protected chainId: number, - protected version: number, - protected globalVariableBuilder: GlobalVariableBuilder, - protected merkleTreesDb: levelup.LevelUp, + protected readonly config: AztecNodeConfig, + protected readonly p2pClient: P2P, + protected readonly blockSource: L2BlockSource, + protected readonly encryptedLogsSource: L2LogsSource, + protected readonly unencryptedLogsSource: L2LogsSource, + protected readonly contractDataSource: ContractDataSource, + protected readonly l1ToL2MessageSource: L1ToL2MessageSource, + protected readonly worldStateSynchronizer: WorldStateSynchronizer, + protected readonly sequencer: SequencerClient, + protected readonly chainId: number, + protected readonly version: number, + protected readonly globalVariableBuilder: GlobalVariableBuilder, + protected readonly merkleTreesDb: levelup.LevelUp, private log = createDebugLogger('aztec:node'), ) {} @@ -122,6 +122,14 @@ export class AztecNodeService implements AztecNode { ); } + /** + * Returns the sequencer client instance. + * @returns The sequencer client instance. + */ + public getSequencer(): SequencerClient { + return this.sequencer; + } + /** * Method to return the currently deployed L1 contract addresses. * @returns - The currently deployed L1 contract addresses. diff --git a/yarn-project/aztec-sandbox/src/logging.ts b/yarn-project/aztec-sandbox/src/logging.ts index 3c270417e61f..5a592496a491 100644 --- a/yarn-project/aztec-sandbox/src/logging.ts +++ b/yarn-project/aztec-sandbox/src/logging.ts @@ -2,7 +2,6 @@ import { onLog } from '@aztec/foundation/log'; import * as path from 'path'; import * as process from 'process'; -import * as util from 'util'; import * as winston from 'winston'; import DailyRotateFile from 'winston-daily-rotate-file'; @@ -37,8 +36,8 @@ function createWinstonLogger() { */ export function setupFileDebugLog() { const logger = createWinstonLogger(); - onLog((level, namespace, args) => { - logger.log({ level, namespace, message: util.format(...args) }); + onLog((level, namespace, message, data) => { + logger.log({ ...data, level, namespace, message }); }); const workdir = process.env.HOST_WORKDIR ?? process.cwd(); return path.join(workdir, LOG_DIR, CURRENT_LOG_FILE_NAME); diff --git a/yarn-project/canary/src/uniswap_trade_on_l1_from_l2.test.ts b/yarn-project/canary/src/uniswap_trade_on_l1_from_l2.test.ts index 8ae683ebfee1..58fcb9d6797d 100644 --- a/yarn-project/canary/src/uniswap_trade_on_l1_from_l2.test.ts +++ b/yarn-project/canary/src/uniswap_trade_on_l1_from_l2.test.ts @@ -434,10 +434,10 @@ describe('uniswap_trade_on_l1_from_l2', () => { const wethL2BalanceAfterSwap = await getL2PrivateBalanceOf(ownerAddress, wethL2Contract); const daiL2BalanceAfterSwap = await getL2PrivateBalanceOf(ownerAddress, daiL2Contract); - logger('WETH balance before swap: ', wethL2BalanceBeforeSwap.toString()); - logger('DAI balance before swap : ', daiL2BalanceBeforeSwap.toString()); + logger('WETH balance before swap: ' + wethL2BalanceBeforeSwap.toString()); + logger('DAI balance before swap : ' + daiL2BalanceBeforeSwap.toString()); logger('***** 🧚‍♀️ SWAP L2 assets on L1 Uniswap 🧚‍♀️ *****'); - logger('WETH balance after swap : ', wethL2BalanceAfterSwap.toString()); - logger('DAI balance after swap : ', daiL2BalanceAfterSwap.toString()); + logger('WETH balance after swap : ' + wethL2BalanceAfterSwap.toString()); + logger('DAI balance after swap : ' + daiL2BalanceAfterSwap.toString()); }, 140_000); }); diff --git a/yarn-project/cli/src/index.ts b/yarn-project/cli/src/index.ts index 962acb0aa617..e0e190af6bf5 100644 --- a/yarn-project/cli/src/index.ts +++ b/yarn-project/cli/src/index.ts @@ -19,6 +19,7 @@ import { createSecp256k1PeerId } from '@libp2p/peer-id-factory'; import { Command, Option } from 'commander'; import { readFileSync } from 'fs'; import { dirname, resolve } from 'path'; +import { format } from 'util'; import { mnemonicToAccount } from 'viem/accounts'; import { createCompatibleClient } from './client.js'; @@ -440,7 +441,7 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { const client = await createCompatibleClient(options.rpcUrl, debugLogger); const from = await getTxSender(client, options.from); const result = await client.viewTx(functionName, functionArgs, options.contractAddress, from); - log('\nView result: ', result, '\n'); + log(format('\nView result: ', result, '\n')); }); program diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index 73f29b7ef3c5..6d3d3426817c 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -9,7 +9,7 @@ "clean": "rm -rf ./dest .tsbuildinfo", "formatting": "run -T prettier --check ./src \"!src/web/main.js\" && run -T eslint ./src", "formatting:fix": "run -T prettier -w ./src", - "test": "DEBUG='aztec:*' NODE_NO_WARNINGS=1 node --experimental-vm-modules $(yarn bin jest) --runInBand --passWithNoTests --testTimeout=15000", + "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules $(yarn bin jest) --runInBand --passWithNoTests --testTimeout=15000", "test:integration": "concurrently -k -s first -c reset,dim -n test,anvil \"yarn test:integration:run\" \"anvil\"", "test:integration:run": "NODE_NO_WARNINGS=1 node --experimental-vm-modules $(yarn bin jest) --no-cache --runInBand --config jest.integration.config.json" }, @@ -39,6 +39,7 @@ "@aztec/world-state": "workspace:^", "@jest/globals": "^29.5.0", "@noble/curves": "^1.0.0", + "@types/fs-extra": "^11.0.2", "@types/jest": "^29.5.0", "@types/koa": "^2.13.9", "@types/koa-static": "^4.0.2", @@ -65,11 +66,11 @@ "ts-node": "^10.9.1", "tslib": "^2.4.0", "typescript": "^5.0.4", - "viem": "^1.2.5" + "viem": "^1.2.5", + "winston": "^3.10.0" }, "devDependencies": { "@rushstack/eslint-patch": "^1.1.4", - "@types/lodash.compact": "^3.0.7", "concurrently": "^7.6.0" }, "files": [ diff --git a/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox-browser.yml b/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox-browser.yml index 54424e6c93e6..7436b41d6fb1 100644 --- a/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox-browser.yml +++ b/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox-browser.yml @@ -31,8 +31,12 @@ services: end-to-end: image: aztecprotocol/end-to-end:latest environment: + BENCHMARK: true + DEBUG: ${DEBUG:-'aztec:*'} ETHEREUM_HOST: http://fork:8545 CHAIN_ID: 31337 SANDBOX_URL: http://sandbox:8080 SEARCH_START_BLOCK: ${FORK_BLOCK_NUMBER:-0} entrypoint: ['./scripts/start_e2e_ci_browser.sh', './src/e2e_aztec_js_browser.test.ts'] + volumes: + - ../log:/usr/src/yarn-project/end-to-end/log:rw diff --git a/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox.yml b/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox.yml index 53eb4fde6d22..e59bb5c062f9 100644 --- a/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox.yml +++ b/yarn-project/end-to-end/scripts/docker-compose-e2e-sandbox.yml @@ -31,8 +31,12 @@ services: end-to-end: image: aztecprotocol/end-to-end:latest environment: + BENCHMARK: true + DEBUG: ${DEBUG:-'aztec:*'} ETHEREUM_HOST: http://fork:8545 CHAIN_ID: 31337 SANDBOX_URL: http://sandbox:8080 SEARCH_START_BLOCK: ${FORK_BLOCK_NUMBER:-0} command: ${TEST:-./src/e2e_deploy_contract.test.ts} + volumes: + - ../log:/usr/src/yarn-project/end-to-end/log:rw diff --git a/yarn-project/end-to-end/scripts/docker-compose-p2p.yml b/yarn-project/end-to-end/scripts/docker-compose-p2p.yml index 851641e664b0..c6d607a79bbe 100644 --- a/yarn-project/end-to-end/scripts/docker-compose-p2p.yml +++ b/yarn-project/end-to-end/scripts/docker-compose-p2p.yml @@ -21,7 +21,8 @@ services: end-to-end: image: aztecprotocol/end-to-end:latest environment: - DEBUG: 'aztec:*' + BENCHMARK: true + DEBUG: ${DEBUG:-'aztec:*'} ETHEREUM_HOST: http://fork:8545 CHAIN_ID: 31337 ARCHIVER_POLLING_INTERVAL: 500 @@ -36,3 +37,5 @@ services: P2P_ENABLED: 'true' BOOTSTRAP_NODES: '/ip4/p2p-bootstrap/tcp/40400/p2p/12D3KooWGBpbC6qQFkaCYphjNeY6sV99o4SnEWyTeBigoVriDn4D' command: ${TEST:-./src/e2e_p2p_network.test.ts} + volumes: + - ../log:/usr/src/yarn-project/end-to-end/log:rw diff --git a/yarn-project/end-to-end/scripts/docker-compose.yml b/yarn-project/end-to-end/scripts/docker-compose.yml index a20f4ffb02f5..69db7b71b145 100644 --- a/yarn-project/end-to-end/scripts/docker-compose.yml +++ b/yarn-project/end-to-end/scripts/docker-compose.yml @@ -15,7 +15,8 @@ services: end-to-end: image: aztecprotocol/end-to-end:latest environment: - DEBUG: 'aztec:*' + BENCHMARK: true + DEBUG: ${DEBUG:-'aztec:*'} ETHEREUM_HOST: http://fork:8545 CHAIN_ID: 31337 ARCHIVER_POLLING_INTERVAL_MS: 50 @@ -26,3 +27,5 @@ services: ARCHIVER_VIEM_POLLING_INTERVAL_MS: 500 SEARCH_START_BLOCK: ${FORK_BLOCK_NUMBER:-0} command: ${TEST:-./src/e2e_deploy_contract.test.ts} + volumes: + - ../log:/usr/src/yarn-project/end-to-end/log:rw diff --git a/yarn-project/end-to-end/scripts/run_tests_local b/yarn-project/end-to-end/scripts/run_tests_local index c17d582371d9..3df64ff40616 100755 --- a/yarn-project/end-to-end/scripts/run_tests_local +++ b/yarn-project/end-to-end/scripts/run_tests_local @@ -19,3 +19,6 @@ done docker-compose -f $COMPOSE_FILE rm -f docker-compose -f $COMPOSE_FILE up --exit-code-from end-to-end + +# Upload the logs with metrics to S3 +./scripts/upload_logs_to_s3.sh \ No newline at end of file diff --git a/yarn-project/end-to-end/scripts/upload_logs_to_s3.sh b/yarn-project/end-to-end/scripts/upload_logs_to_s3.sh new file mode 100755 index 000000000000..eb81df8bfac2 --- /dev/null +++ b/yarn-project/end-to-end/scripts/upload_logs_to_s3.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +# Uploads to S3 the contents of the log file mounted on the end-to-end container, +# which contains log entries with an associated event and metrics for it. +# Logs are uploaded to aztec-ci-artifacts/logs-v1/master/$COMMIT/$JOB.jsonl +# or to aztec-ci-artifacts/logs-v1/pulls/$PRNUMBER/$JOB.jsonl if on a PR + +[ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace +set -eu + +BUCKET_NAME="aztec-ci-artifacts" +LOG_FOLDER="${LOG_FOLDER:-log}" +COMMIT_HASH="${COMMIT_HASH:-$(git rev-parse HEAD)}" + +if [ ! -d "$LOG_FOLDER" ] || [ -z "$(ls -A "$LOG_FOLDER")" ]; then + echo "No logs in folder $LOG_FOLDER to upload" + exit 0 +fi + +# Duplicated in scripts/ci/assemble_e2e_benchmark.sh +if [ "${CIRCLE_BRANCH:-}" = "master" ]; then + TARGET_FOLDER="logs-v1/master/$COMMIT_HASH/" +elif [ -n "${CIRCLE_PULL_REQUEST:-}" ]; then + TARGET_FOLDER="logs-v1/pulls/${CIRCLE_PULL_REQUEST##*/}" +fi + +if [ -n "${TARGET_FOLDER:-}" ]; then + aws s3 cp $LOG_FOLDER "s3://${BUCKET_NAME}/${TARGET_FOLDER}" --include "*.jsonl" --recursive +else + echo Skipping upload since no target folder was defined +fi \ No newline at end of file diff --git a/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts new file mode 100644 index 000000000000..8c42bafb5770 --- /dev/null +++ b/yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts @@ -0,0 +1,49 @@ +/* eslint-disable camelcase */ +import { AztecAddress } from '@aztec/aztec.js'; +import { sleep } from '@aztec/foundation/sleep'; +import { TokenContract } from '@aztec/noir-contracts/types'; + +import times from 'lodash.times'; + +import { setup } from '../fixtures/utils.js'; + +const ROLLUP_SIZES = process.env.ROLLUP_SIZES ? process.env.ROLLUP_SIZES.split(',').map(Number) : [8, 32, 128]; + +describe('benchmarks/publish_rollup', () => { + let context: Awaited>; + let token: TokenContract; + let owner: AztecAddress; + let recipient: AztecAddress; + + beforeEach(async () => { + context = await setup(2, { maxTxsPerBlock: 1024 }); + [owner, recipient] = context.accounts.map(a => a.address); + token = await TokenContract.deploy(context.wallet, owner).send().deployed(); + await token.methods.mint_public(owner, 10000n).send().wait(); + await context.aztecNode?.getSequencer().stop(); + }, 60_000); + + it.each(ROLLUP_SIZES)( + `publishes a rollup with %d txs`, + async (txCount: number) => { + context.logger(`Assembling rollup with ${txCount} txs`); + // Simulate and simultaneously send %d txs. These should not yet be processed since sequencer is stopped. + const calls = times(txCount, () => token.methods.transfer_public(owner, recipient, 1, 0)); + calls.forEach(call => call.simulate({ skipPublicSimulation: true })); + const sentTxs = calls.map(call => call.send()); + // Awaiting txHash waits until the aztec node has received the tx into its p2p pool + await Promise.all(sentTxs.map(tx => tx.getTxHash())); + // And then wait a bit more just in case + await sleep(100); + // Restart sequencer to process all txs together + context.aztecNode?.getSequencer().restart(); + // Wait for the last tx to be processed + await sentTxs[sentTxs.length - 1].wait({ timeout: 600_00 }); + }, + 10 * 60_000, + ); + + afterEach(async () => { + await context.teardown(); + }, 60_000); +}); diff --git a/yarn-project/end-to-end/src/canary/cli.ts b/yarn-project/end-to-end/src/canary/cli.ts index 2bac457365dd..79bbbfb766b1 100644 --- a/yarn-project/end-to-end/src/canary/cli.ts +++ b/yarn-project/end-to-end/src/canary/cli.ts @@ -2,7 +2,6 @@ import { AztecAddress, CompleteAddress, DebugLogger, Fr, PXE, computeMessageSecr import { getProgram } from '@aztec/cli'; import stringArgv from 'string-argv'; -import { format } from 'util'; const INITIAL_BALANCE = 33000; const TRANSFER_BALANCE = 3000; @@ -19,16 +18,16 @@ export const cliTestSuite = ( let pxe: PXE; let existingAccounts: CompleteAddress[]; let contractAddress: AztecAddress; - let log: (...args: any[]) => void; + let log: (msg: string) => void; // All logs emitted by the cli will be collected here, and reset between tests const logs: string[] = []; beforeAll(async () => { pxe = await setup(); - log = (...args: any[]) => { - logs.push(format(...args)); - debug(...args); + log = (msg: string) => { + logs.push(msg); + debug(msg); }; }, 30_000); diff --git a/yarn-project/end-to-end/src/cli_docs_sandbox.test.ts b/yarn-project/end-to-end/src/cli_docs_sandbox.test.ts index bedab689aef2..a063df297393 100644 --- a/yarn-project/end-to-end/src/cli_docs_sandbox.test.ts +++ b/yarn-project/end-to-end/src/cli_docs_sandbox.test.ts @@ -3,7 +3,6 @@ import { getProgram } from '@aztec/cli'; import { TxHash } from '@aztec/types'; import stringArgv from 'string-argv'; -import { format } from 'util'; const debug = createDebugLogger('aztec:e2e_cli'); @@ -11,15 +10,15 @@ const { SANDBOX_URL = 'http://localhost:8080' } = process.env; describe('CLI docs sandbox', () => { let cli: ReturnType; - let log: (...args: any[]) => void; + let log: (msg: string) => void; // All logs emitted by the cli will be collected here, and reset between tests const logs: string[] = []; beforeAll(async () => { - log = (...args: any[]) => { - logs.push(format(...args)); - debug(...args); + log = (msg: string) => { + logs.push(msg); + debug(msg); }; await waitForSandboxWithCli(); diff --git a/yarn-project/end-to-end/src/e2e_sandbox_example.test.ts b/yarn-project/end-to-end/src/e2e_sandbox_example.test.ts index 665dc3714add..3de8d16843f5 100644 --- a/yarn-project/end-to-end/src/e2e_sandbox_example.test.ts +++ b/yarn-project/end-to-end/src/e2e_sandbox_example.test.ts @@ -13,6 +13,8 @@ import { import { GrumpkinScalar } from '@aztec/circuits.js'; import { TokenContract } from '@aztec/noir-contracts/types'; +import { format } from 'util'; + const { SANDBOX_URL = 'http://localhost:8080' } = process.env; // docs:end:imports @@ -29,7 +31,7 @@ describe('e2e_sandbox_example', () => { const nodeInfo = await pxe.getNodeInfo(); - logger('Aztec Sandbox Info ', nodeInfo); + logger(format('Aztec Sandbox Info ', nodeInfo)); // docs:end:setup expect(typeof nodeInfo.protocolVersion).toBe('number'); diff --git a/yarn-project/end-to-end/src/fixtures/cross_chain_test_harness.ts b/yarn-project/end-to-end/src/fixtures/cross_chain_test_harness.ts index 59aa17353318..aa46c47521a6 100644 --- a/yarn-project/end-to-end/src/fixtures/cross_chain_test_harness.ts +++ b/yarn-project/end-to-end/src/fixtures/cross_chain_test_harness.ts @@ -132,7 +132,7 @@ export class CrossChainTestHarness { this.logger("Generating a claim secret using pedersen's hash function"); const secret = Fr.random(); const secretHash = await computeMessageSecretHash(secret); - this.logger('Generated claim secret: ', secretHash.toString(true)); + this.logger('Generated claim secret: ' + secretHash.toString(true)); return [secret, secretHash]; } diff --git a/yarn-project/end-to-end/src/fixtures/logging.ts b/yarn-project/end-to-end/src/fixtures/logging.ts new file mode 100644 index 000000000000..54adc31866ae --- /dev/null +++ b/yarn-project/end-to-end/src/fixtures/logging.ts @@ -0,0 +1,34 @@ +import { onLog } from '@aztec/foundation/log'; + +import { mkdirpSync } from 'fs-extra'; +import { dirname } from 'path'; +import * as winston from 'winston'; + +const { format, transports } = winston; + +let metricsLoggerSet = false; + +/** Returns whether metrics logging should be enabled by default, checking env vars CI and BENCHMARK. */ +export function isMetricsLoggingRequested() { + return !!(process.env.CI || process.env.BENCHMARK); +} + +/** + * Configures an NDJSON logger to output entries to a local file that have an `eventName` associated. + * Idempotent and automatically called by `setup` if CI or BENCHMARK env vars are set. + */ +export function setupMetricsLogger(filename: string) { + if (metricsLoggerSet) return; + mkdirpSync(dirname(filename)); + const logger = winston.createLogger({ + level: 'debug', + format: format.combine(format.timestamp(), format.json()), + transports: [new transports.File({ filename })], + }); + onLog((level, namespace, message, data) => { + if (data && data['eventName']) { + logger.log({ ...data, level, namespace, message }); + } + }); + metricsLoggerSet = true; +} diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index 2955f2b2554b..596d50ed8a29 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -44,6 +44,7 @@ import { NonNativeTokenContract, TokenBridgeContract, TokenContract } from '@azt import { PXEService, createPXEService, getPXEServiceConfig } from '@aztec/pxe'; import { L2BlockL2Logs, LogType, PXE, TxStatus, UnencryptedL2Log } from '@aztec/types'; +import * as path from 'path'; import { Account, Chain, @@ -60,6 +61,7 @@ import { import { mnemonicToAccount } from 'viem/accounts'; import { MNEMONIC, localAnvil } from './fixtures.js'; +import { isMetricsLoggingRequested, setupMetricsLogger } from './logging.js'; const { SANDBOX_URL = '' } = process.env; @@ -218,60 +220,50 @@ async function setupWithSandbox(account: Account, config: AztecNodeConfig, logge }; } -/** - * Sets up the environment for the end-to-end tests. - * @param numberOfAccounts - The number of new accounts to be created once the PXE is initiated. - */ -export async function setup( - numberOfAccounts = 1, - stateLoad: string | undefined = undefined, -): Promise<{ - /** - * The Aztec Node service. - */ +/** Options for the e2e tests setup */ +type SetupOptions = { /** State load */ stateLoad?: string } & Partial; + +/** Context for an end-to-end test as returned by the `setup` function */ +export type EndToEndContext = { + /** The Aztec Node service. */ aztecNode: AztecNodeService | undefined; - /** - * The Private eXecution Environment (PXE). - */ + /** The Private eXecution Environment (PXE). */ pxe: PXE; - /** - * Return values from deployL1Contracts function. - */ + /** Return values from deployL1Contracts function. */ deployL1ContractsValues: DeployL1Contracts; - /** - * The accounts created by the PXE. - */ + /** The accounts created by the PXE. */ accounts: CompleteAddress[]; - /** - * The Aztec Node configuration. - */ + /** The Aztec Node configuration. */ config: AztecNodeConfig; - /** - * The first wallet to be used. - */ + /** The first wallet to be used. */ wallet: AccountWallet; - /** - * The wallets to be used. - */ + /** The wallets to be used. */ wallets: AccountWallet[]; - /** - * Logger instance named as the current test. - */ + /** Logger instance named as the current test. */ logger: DebugLogger; - /** - * The cheat codes. - */ + /** The cheat codes. */ cheatCodes: CheatCodes; - /** - * Function to stop the started services. - */ + /** Function to stop the started services. */ teardown: () => Promise; -}> { - const config = getConfigEnvVars(); +}; + +/** + * Sets up the environment for the end-to-end tests. + * @param numberOfAccounts - The number of new accounts to be created once the PXE is initiated. + * @param opts - Options to pass to the node initialisation and to the setup script. + */ +export async function setup(numberOfAccounts = 1, opts: SetupOptions = {}): Promise { + const config = { ...getConfigEnvVars(), ...opts }; + + // Enable logging metrics to a local file named after the test suite + if (isMetricsLoggingRequested()) { + const filename = path.join('log', getJobName() + '.jsonl'); + setupMetricsLogger(filename); + } - if (stateLoad) { + if (opts.stateLoad) { const ethCheatCodes = new EthCheatCodes(config.rpcUrl); - await ethCheatCodes.loadChainState(stateLoad); + await ethCheatCodes.loadChainState(opts.stateLoad); } const logger = getLogger(); @@ -332,12 +324,17 @@ export async function setNextBlockTimestamp(rpcUrl: string, timestamp: number) { }); } +/** Returns the job name for the current test. */ +function getJobName() { + return process.env.JOB_NAME ?? expect.getState().currentTestName?.split(' ')[0].replaceAll('/', '_') ?? 'unknown'; +} + /** * Returns a logger instance for the current test. * @returns a logger instance for the current test. */ export function getLogger() { - const describeBlockName = expect.getState().currentTestName?.split(' ')[0]; + const describeBlockName = expect.getState().currentTestName?.split(' ')[0].replaceAll('/', ':'); return createDebugLogger('aztec:' + describeBlockName); } diff --git a/yarn-project/end-to-end/src/integration_archiver_l1_to_l2.test.ts b/yarn-project/end-to-end/src/integration_archiver_l1_to_l2.test.ts index 7c0878ae32cd..f78160612883 100644 --- a/yarn-project/end-to-end/src/integration_archiver_l1_to_l2.test.ts +++ b/yarn-project/end-to-end/src/integration_archiver_l1_to_l2.test.ts @@ -81,7 +81,7 @@ describe('archiver integration with l1 to l2 messages', () => { const secret = Fr.random(); const secretHash = await computeMessageSecretHash(secret); const secretString = `0x${secretHash.toBuffer().toString('hex')}` as `0x${string}`; - logger('Generated claim secret: ', secretString); + logger('Generated claim secret: ' + secretString); logger('Minting tokens on L1'); await underlyingERC20.write.mint([ethAccount.toString(), 1000000n], {} as any); diff --git a/yarn-project/end-to-end/src/uniswap_trade_on_l1_from_l2.test.ts b/yarn-project/end-to-end/src/uniswap_trade_on_l1_from_l2.test.ts index 583a692b84e0..407e223239e0 100644 --- a/yarn-project/end-to-end/src/uniswap_trade_on_l1_from_l2.test.ts +++ b/yarn-project/end-to-end/src/uniswap_trade_on_l1_from_l2.test.ts @@ -64,7 +64,7 @@ describe('uniswap_trade_on_l1_from_l2', () => { wallets, logger: logger_, cheatCodes, - } = await setup(2, dumpedState); + } = await setup(2, { stateLoad: dumpedState }); const walletClient = deployL1ContractsValues.walletClient; const publicClient = deployL1ContractsValues.publicClient; @@ -272,11 +272,11 @@ describe('uniswap_trade_on_l1_from_l2', () => { const wethL2BalanceAfterSwap = await wethCrossChainHarness.getL2PrivateBalanceOf(ownerAddress); const daiL2BalanceAfterSwap = await daiCrossChainHarness.getL2PrivateBalanceOf(ownerAddress); - logger('WETH balance before swap: ', wethL2BalanceBeforeSwap.toString()); - logger('DAI balance before swap : ', daiL2BalanceBeforeSwap.toString()); + logger('WETH balance before swap: ' + wethL2BalanceBeforeSwap.toString()); + logger('DAI balance before swap : ' + daiL2BalanceBeforeSwap.toString()); logger('***** 🧚‍♀️ SWAP L2 assets on L1 Uniswap 🧚‍♀️ *****'); - logger('WETH balance after swap : ', wethL2BalanceAfterSwap.toString()); - logger('DAI balance after swap : ', daiL2BalanceAfterSwap.toString()); + logger('WETH balance after swap : ' + wethL2BalanceAfterSwap.toString()); + logger('DAI balance after swap : ' + daiL2BalanceAfterSwap.toString()); }, 140_000); it('should uniswap trade on L1 from L2 funds publicly (swaps WETH -> DAI)', async () => { diff --git a/yarn-project/foundation/src/fifo/memory_fifo.ts b/yarn-project/foundation/src/fifo/memory_fifo.ts index c6233075daeb..95f1b95fa776 100644 --- a/yarn-project/foundation/src/fifo/memory_fifo.ts +++ b/yarn-project/foundation/src/fifo/memory_fifo.ts @@ -108,7 +108,7 @@ export class MemoryFifo { await handler(item); } } catch (err) { - this.log.error('Queue handler exception:', err); + this.log.error('Queue handler exception', err); } } } diff --git a/yarn-project/foundation/src/json-rpc/client/json_rpc_client.ts b/yarn-project/foundation/src/json-rpc/client/json_rpc_client.ts index c8f118bdb6cd..f504121cd28c 100644 --- a/yarn-project/foundation/src/json-rpc/client/json_rpc_client.ts +++ b/yarn-project/foundation/src/json-rpc/client/json_rpc_client.ts @@ -3,6 +3,7 @@ // This takes a {foo(): T} and makes {foo(): Promise} // while avoiding Promise of Promise. import { RemoteObject } from 'comlink'; +import { format } from 'util'; import { DebugLogger, createDebugLogger } from '../../log/index.js'; import { NoRetryError, makeBackoff, retry } from '../../retry/index.js'; @@ -29,7 +30,7 @@ export async function defaultFetch( useApiEndpoints: boolean, noRetry = false, ) { - debug(`JsonRpcClient.fetch`, host, rpcMethod, '->', body); + debug(format(`JsonRpcClient.fetch`, host, rpcMethod, '->', body)); let resp: Response; if (useApiEndpoints) { resp = await fetch(`${host}/${rpcMethod}`, { @@ -104,9 +105,9 @@ export function createJsonRpcClient( method, params: params.map(param => convertToJsonObj(classConverter, param)), }; - debug(`JsonRpcClient.request`, method, '<-', params); + debug(format(`JsonRpcClient.request`, method, '<-', params)); const res = await fetch(host, method, body, useApiEndpoints); - debug(`JsonRpcClient.result`, method, '->', res); + debug(format(`JsonRpcClient.result`, method, '->', res)); if (res.error) { throw res.error; } @@ -124,7 +125,7 @@ export function createJsonRpcClient( get: (target, rpcMethod: string) => { if (['then', 'catch'].includes(rpcMethod)) return Reflect.get(target, rpcMethod); return (...params: any[]) => { - debug(`JsonRpcClient.constructor`, 'proxy', rpcMethod, '<-', params); + debug(format(`JsonRpcClient.constructor`, 'proxy', rpcMethod, '<-', params)); return request(rpcMethod, params); }; }, diff --git a/yarn-project/foundation/src/json-rpc/server/json_proxy.ts b/yarn-project/foundation/src/json-rpc/server/json_proxy.ts index 65134740e056..e7630d7f5e84 100644 --- a/yarn-project/foundation/src/json-rpc/server/json_proxy.ts +++ b/yarn-project/foundation/src/json-rpc/server/json_proxy.ts @@ -1,3 +1,5 @@ +import { format } from 'util'; + import { createDebugLogger } from '../../log/index.js'; import { ClassConverter, JsonClassConverterInput, StringClassConverterInput } from '../class_converter.js'; import { convertFromJsonObj, convertToJsonObj } from '../convert.js'; @@ -25,17 +27,17 @@ export class JsonProxy { * @returns The remote result. */ public async call(methodName: string, jsonParams: any[] = []) { - debug(`JsonProxy:call`, methodName, jsonParams); + debug(format(`JsonProxy:call`, methodName, jsonParams)); // Get access to our class members const proto = Object.getPrototypeOf(this.handler); assert(hasOwnProperty(proto, methodName), `JsonProxy: Method ${methodName} not found!`); assert(Array.isArray(jsonParams), 'JsonProxy: Params not an array!'); // convert the params from json representation to classes const convertedParams = jsonParams.map(param => convertFromJsonObj(this.classConverter, param)); - debug('JsonProxy:call', methodName, '<-', convertedParams); + debug(format('JsonProxy:call', methodName, '<-', convertedParams)); const rawRet = await (this.handler as any)[methodName](...convertedParams); const ret = convertToJsonObj(this.classConverter, rawRet); - debug('JsonProxy:call', methodName, '->', ret); + debug(format('JsonProxy:call', methodName, '->', ret)); return ret; } } diff --git a/yarn-project/foundation/src/log/index.ts b/yarn-project/foundation/src/log/index.ts index c9c747a50b10..f3acad0ae583 100644 --- a/yarn-project/foundation/src/log/index.ts +++ b/yarn-project/foundation/src/log/index.ts @@ -1,7 +1,8 @@ -/** - * A callable logger instance. - */ -export type LogFn = (...args: any[]) => void; +/** Structured log data to include with the message. */ +export type LogData = Record; + +/** A callable logger instance. */ +export type LogFn = (msg: string, data?: LogData) => void; export * from './console.js'; export * from './debug.js'; diff --git a/yarn-project/foundation/src/log/log_history.test.ts b/yarn-project/foundation/src/log/log_history.test.ts index 5c63351fafae..4db9e5276f45 100644 --- a/yarn-project/foundation/src/log/log_history.test.ts +++ b/yarn-project/foundation/src/log/log_history.test.ts @@ -6,7 +6,7 @@ import { LogHistory } from './log_history.js'; jest.useFakeTimers({ doNotFake: ['performance'] }); describe('log history', () => { - let debug: (...any: any) => void; + let debug: (msg: string) => void; let logHistory: LogHistory; const timestemp = new Date().toISOString(); const name = 'test:a'; @@ -21,18 +21,18 @@ describe('log history', () => { logHistory.enable(); expect(logHistory.getLogs()).toEqual([]); debug('0'); - debug('1', 2); - debug('2', { key: ['value'] }, Buffer.alloc(2)); + debug('1'); + debug('2'); expect(logHistory.getLogs()).toEqual([ [timestemp, name, '0'], - [timestemp, name, '1', 2], - [timestemp, name, '2', { key: ['value'] }, Buffer.alloc(2)], + [timestemp, name, '1'], + [timestemp, name, '2'], ]); }); it('does not keep logs if not enabled', () => { debug('0'); - debug('1', 2); + debug('1'); expect(logHistory.getLogs()).toEqual([]); }); @@ -59,12 +59,12 @@ describe('log history', () => { expect(logHistory.getLogs()).toEqual([[timestemp, name, '0']]); enableLogs(`${name},${name2}`); - debug('1', 2); - debug2('one', 3); + debug('1'); + debug2('one'); expect(logHistory.getLogs()).toEqual([ [timestemp, name, '0'], - [timestemp, name, '1', 2], - [timestemp, name2, 'one', 3], + [timestemp, name, '1'], + [timestemp, name2, 'one'], ]); }); diff --git a/yarn-project/foundation/src/log/logger.ts b/yarn-project/foundation/src/log/logger.ts index 4e1d0c0f06d1..c65ba3f077ef 100644 --- a/yarn-project/foundation/src/log/logger.ts +++ b/yarn-project/foundation/src/log/logger.ts @@ -2,24 +2,27 @@ import debug from 'debug'; import isNode from 'detect-node'; import { isatty } from 'tty'; -import { LogFn } from './index.js'; +import { LogData, LogFn } from './index.js'; // Matches a subset of Winston log levels const LogLevels = ['silent', 'error', 'warn', 'info', 'verbose', 'debug'] as const; -const DefaultLogLevel = 'info' as const; +const DefaultLogLevel = process.env.NODE_ENV === 'test' ? ('silent' as const) : ('info' as const); /** * A valid log severity level. */ -type LogLevel = (typeof LogLevels)[number]; +export type LogLevel = (typeof LogLevels)[number]; const envLogLevel = process.env.LOG_LEVEL?.toLowerCase() as LogLevel; const currentLevel = LogLevels.includes(envLogLevel) ? envLogLevel : DefaultLogLevel; +/** Log function that accepts an exception object */ +type ErrorLogFn = (msg: string, err?: Error | unknown, data?: LogData) => void; + /** * Logger that supports multiple severity levels. */ -export type Logger = { [K in LogLevel]: LogFn }; +export type Logger = { [K in LogLevel]: LogFn } & { /** Error log function */ error: ErrorLogFn }; /** * Logger that supports multiple severity levels and can be called directly to issue a debug statement. @@ -40,17 +43,17 @@ export function createDebugLogger(name: string): DebugLogger { const logger = { silent: () => {}, - error: (...args: any[]) => logWithDebug(debugLogger, 'error', args), - warn: (...args: any[]) => logWithDebug(debugLogger, 'warn', args), - info: (...args: any[]) => logWithDebug(debugLogger, 'info', args), - verbose: (...args: any[]) => logWithDebug(debugLogger, 'verbose', args), - debug: (...args: any[]) => logWithDebug(debugLogger, 'debug', args), + error: (msg: string, err?: unknown, data?: LogData) => logWithDebug(debugLogger, 'error', fmtErr(msg, err), data), + warn: (msg: string, data?: LogData) => logWithDebug(debugLogger, 'warn', msg, data), + info: (msg: string, data?: LogData) => logWithDebug(debugLogger, 'info', msg, data), + verbose: (msg: string, data?: LogData) => logWithDebug(debugLogger, 'verbose', msg, data), + debug: (msg: string, data?: LogData) => logWithDebug(debugLogger, 'debug', msg, data), }; - return Object.assign((...args: any[]) => logWithDebug(debugLogger, 'debug', args), logger); + return Object.assign((msg: string, data?: LogData) => logWithDebug(debugLogger, 'debug', msg, data), logger); } /** A callback to capture all logs. */ -export type LogHandler = (level: LogLevel, namespace: string, args: any[]) => void; +export type LogHandler = (level: LogLevel, namespace: string, msg: string, data?: LogData) => void; const logHandlers: LogHandler[] = []; @@ -68,14 +71,16 @@ export function onLog(handler: LogHandler) { * @param level - Intended log level. * @param args - Args to log. */ -function logWithDebug(debug: debug.Debugger, level: LogLevel, args: any[]) { +function logWithDebug(debug: debug.Debugger, level: LogLevel, msg: string, data?: LogData) { for (const handler of logHandlers) { - handler(level, debug.namespace, args); + handler(level, debug.namespace, msg, data); } + + const msgWithData = data ? `${msg} ${fmtLogData(data)}` : msg; if (debug.enabled) { - debug(args[0], ...args.slice(1)); - } else if (LogLevels.indexOf(level) <= LogLevels.indexOf(currentLevel) && process.env.NODE_ENV !== 'test') { - printLog([getPrefix(debug, level), ...args]); + debug(msgWithData); + } else if (LogLevels.indexOf(level) <= LogLevels.indexOf(currentLevel)) { + printLog(`${getPrefix(debug, level)} ${msgWithData}`); } } @@ -96,9 +101,30 @@ function getPrefix(debugLogger: debug.Debugger, level: LogLevel) { /** * Outputs to console error. - * @param args - Args to log. + * @param msg - What to log. */ -function printLog(args: any[]) { +function printLog(msg: string) { // eslint-disable-next-line no-console - console.error(...args); + console.error(msg); +} + +/** + * Concatenates a log message and an exception. + * @param msg - Log message + * @param err - Error to log + * @returns A string with both the log message and the error message. + */ +function fmtErr(msg: string, err?: Error | unknown): string { + const errStr = err && [(err as Error).name, (err as Error).message].filter(x => !!x).join(' '); + return err ? `${msg}: ${errStr || err}` : msg; +} + +/** + * Formats structured log data as a string for console output. + * @param data - Optional log data. + */ +function fmtLogData(data?: LogData): string { + return Object.entries(data ?? {}) + .map(([key, value]) => `${key}=${value}`) + .join(' '); } diff --git a/yarn-project/foundation/src/transport/dispatch/create_dispatch_fn.ts b/yarn-project/foundation/src/transport/dispatch/create_dispatch_fn.ts index fef2b98bb2e4..43276da87c4b 100644 --- a/yarn-project/foundation/src/transport/dispatch/create_dispatch_fn.ts +++ b/yarn-project/foundation/src/transport/dispatch/create_dispatch_fn.ts @@ -1,3 +1,5 @@ +import { format } from 'util'; + import { createDebugLogger } from '../../log/index.js'; /** @@ -27,7 +29,7 @@ export interface DispatchMsg { export function createDispatchFn(targetFn: () => any, debug = createDebugLogger('aztec:foundation:dispatch')) { return async ({ fn, args }: DispatchMsg) => { const target = targetFn(); - debug(`dispatching to ${target}: ${fn}`, args); + debug(format(`dispatching to ${target}: ${fn}`, args)); return await target[fn](...args); }; } diff --git a/yarn-project/foundation/src/transport/transport_client.ts b/yarn-project/foundation/src/transport/transport_client.ts index 5fc5b631da24..3fb2fd836d53 100644 --- a/yarn-project/foundation/src/transport/transport_client.ts +++ b/yarn-project/foundation/src/transport/transport_client.ts @@ -1,4 +1,5 @@ import EventEmitter from 'events'; +import { format } from 'util'; import { createDebugLogger } from '../log/index.js'; import { EventMessage, ResponseMessage, isEventMessage } from './dispatch/messages.js'; @@ -91,7 +92,7 @@ export class TransportClient extends EventEmitter { } const msgId = this.msgId++; const msg = { msgId, payload }; - debug(`->`, msg); + debug(format(`->`, msg)); return new Promise((resolve, reject) => { this.pendingRequests.push({ resolve, reject, msgId }); this.socket!.send(msg, transfer).catch(reject); @@ -111,7 +112,7 @@ export class TransportClient extends EventEmitter { this.close(); return; } - debug(`<-`, msg); + debug(format(`<-`, msg)); if (isEventMessage(msg)) { this.emit('event_msg', msg.payload); return; diff --git a/yarn-project/foundation/src/wasm/wasm_module.ts b/yarn-project/foundation/src/wasm/wasm_module.ts index c673cd786c59..3a3490a90e8e 100644 --- a/yarn-project/foundation/src/wasm/wasm_module.ts +++ b/yarn-project/foundation/src/wasm/wasm_module.ts @@ -149,9 +149,9 @@ export class WasmModule implements IWasmModule { */ public addLogger(logger: LogFn) { const oldDebug = this.debug; - this.debug = (...args: any[]) => { - logger(...args); - oldDebug(...args); + this.debug = (msg: string) => { + logger(msg); + oldDebug(msg); }; } diff --git a/yarn-project/noir-contracts/src/scripts/copy_output.ts b/yarn-project/noir-contracts/src/scripts/copy_output.ts index 53cf474a8dfa..15b5dede4461 100644 --- a/yarn-project/noir-contracts/src/scripts/copy_output.ts +++ b/yarn-project/noir-contracts/src/scripts/copy_output.ts @@ -12,6 +12,7 @@ import omit from 'lodash.omit'; import snakeCase from 'lodash.snakecase'; import upperFirst from 'lodash.upperfirst'; import { join as pathJoin } from 'path'; +import { format } from 'util'; // const STATEMENT_TYPES = ['type', 'params', 'return'] as const; const log = createConsoleLogger('aztec:noir-contracts'); @@ -103,6 +104,6 @@ const main = () => { try { main(); } catch (err: unknown) { - log(err); + log(format(`Error copying build output`, err)); process.exit(1); } diff --git a/yarn-project/p2p/src/bootstrap/bootstrap.ts b/yarn-project/p2p/src/bootstrap/bootstrap.ts index e57e95962634..5c3dc673f062 100644 --- a/yarn-project/p2p/src/bootstrap/bootstrap.ts +++ b/yarn-project/p2p/src/bootstrap/bootstrap.ts @@ -8,6 +8,7 @@ import { mplex } from '@libp2p/mplex'; import { tcp } from '@libp2p/tcp'; import { Libp2p, Libp2pOptions, ServiceFactoryMap, createLibp2p } from 'libp2p'; import { identifyService } from 'libp2p/identify'; +import { format } from 'util'; import { P2PConfig } from '../config.js'; import { createLibP2PPeerId } from '../index.js'; @@ -74,15 +75,15 @@ export class BootstrapNode { }); this.node.addEventListener('peer:discovery', evt => { - this.logger('Discovered %s', evt.detail.id.toString()); // Log discovered peer + this.logger(format('Discovered %s', evt.detail.id.toString())); // Log discovered peer }); this.node.addEventListener('peer:connect', evt => { - this.logger('Connected to %s', evt.detail.toString()); // Log connected peer + this.logger(format('Connected to %s', evt.detail.toString())); // Log connected peer }); this.node.addEventListener('peer:disconnect', evt => { - this.logger('Disconnected from %s', evt.detail.toString()); // Log connected peer + this.logger(format('Disconnected from %s', evt.detail.toString())); // Log connected peer }); } diff --git a/yarn-project/pxe/src/synchronizer/synchronizer.ts b/yarn-project/pxe/src/synchronizer/synchronizer.ts index 8a6796de435a..d3deb73eab90 100644 --- a/yarn-project/pxe/src/synchronizer/synchronizer.ts +++ b/yarn-project/pxe/src/synchronizer/synchronizer.ts @@ -128,7 +128,7 @@ export class Synchronizer { this.synchedToBlock = latestBlock.block.number; } catch (err) { - this.log.error(err); + this.log.error(`Error in synchronizer work`, err); await this.interruptableSleep.sleep(retryInterval); } } @@ -182,7 +182,7 @@ export class Synchronizer { this.noteProcessors.push(noteProcessor); } } catch (err) { - this.log.error(err); + this.log.error(`Error in synchronizer workNoteProcessorCatchUp`, err); await this.interruptableSleep.sleep(retryInterval); } } diff --git a/yarn-project/sequencer-client/src/publisher/index.ts b/yarn-project/sequencer-client/src/publisher/index.ts index 9fd03d258802..8f4a3a3b3e13 100644 --- a/yarn-project/sequencer-client/src/publisher/index.ts +++ b/yarn-project/sequencer-client/src/publisher/index.ts @@ -5,6 +5,34 @@ import { ViemTxSender } from './viem-tx-sender.js'; export { L1Publisher } from './l1-publisher.js'; export { PublisherConfig } from './config.js'; +/** Stats logged for each L1 rollup publish tx.*/ +export type L1PublishStats = { + /** Name of the event for metrics purposes */ + eventName: 'rollup-published-to-l1'; + /** Effective gas price of the tx. */ + gasPrice: bigint; + /** Effective gas used in the tx. */ + gasUsed: bigint; + /** Hash of the L1 tx. */ + transactionHash: string; + /** Gas cost of the calldata. */ + calldataGas: number; + /** Size in bytes of the calldata. */ + calldataSize: number; + /** Number of txs in the L2 block. */ + txCount: number; + /** Number of the L2 block. */ + blockNumber: number; + /** Number of encrypted logs. */ + encryptedLogCount: number; + /** Number of unencrypted logs. */ + unencryptedLogCount: number; + /** Serialised size of encrypted logs. */ + encryptedLogSize: number; + /** Serialised size of unencrypted logs. */ + unencryptedLogSize: number; +}; + /** * Returns a new instance of the L1Publisher. * @param config - Configuration to initialize the new instance. diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts index 63b34ebc6246..df3c48e550c5 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts @@ -21,7 +21,7 @@ describe('L1Publisher', () => { txSender = mock(); txHash = `0x${Buffer.from('txHash').toString('hex')}`; // random tx hash - txReceipt = { transactionHash: txHash, status: true }; + txReceipt = { transactionHash: txHash, status: true } as MinimalTransactionReceipt; txSender.sendProcessTx.mockResolvedValueOnce(txHash); txSender.getTransactionReceipt.mockResolvedValueOnce(txReceipt); diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index e22880c6cc6c..62b566b80376 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -2,21 +2,36 @@ import { createDebugLogger } from '@aztec/foundation/log'; import { InterruptableSleep } from '@aztec/foundation/sleep'; import { ExtendedContractData, L2Block } from '@aztec/types'; +import pick from 'lodash.pick'; + import { L2BlockReceiver } from '../receiver.js'; import { PublisherConfig } from './config.js'; +import { L1PublishStats } from './index.js'; + +/** + * Stats for a sent transaction. + */ +export type TransactionStats = { + /** Hash of the transaction. */ + transactionHash: string; + /** Size in bytes of the tx calldata */ + calldataSize: number; + /** Gas required to pay for the calldata inclusion (depends on size and number of zeros) */ + calldataGas: number; +}; /** * Minimal information from a tx receipt returned by an L1PublisherTxSender. */ export type MinimalTransactionReceipt = { - /** - * True if the tx was successful, false if reverted. - */ + /** True if the tx was successful, false if reverted. */ status: boolean; - /** - * Hash of the transaction. - */ + /** Hash of the transaction. */ transactionHash: string; + /** Effective gas used by the tx */ + gasUsed: bigint; + /** Effective gas price paid by the tx */ + gasPrice: bigint; }; /** @@ -38,10 +53,7 @@ export interface L1PublisherTxSender { * @param publicKeys - The public keys of the deployed contract * @param newExtendedContractData - Data to publish. * @returns The hash of the mined tx. - * @remarks Partial addresses, public keys and contract data has to be in the same order. - * @remarks See the link bellow for more info on partial address and public key: - * https://github.com/AztecProtocol/aztec-packages/blob/master/docs/docs/concepts/foundation/accounts/keys.md#addresses-partial-addresses-and-public-keys - * TODO: replace the link above with the link to deployed docs + * @remarks Partial addresses, public keys and contract data has to be in the same order. Read more {@link https://docs.aztec.network/concepts/foundation/accounts/keys#addresses-partial-addresses-and-public-keys | here}. */ sendEmitContractDeploymentTx( l2BlockNum: number, @@ -55,6 +67,12 @@ export interface L1PublisherTxSender { * @returns Undefined if the tx hasn't been mined yet, the receipt otherwise. */ getTransactionReceipt(txHash: string): Promise; + + /** + * Returns info on a tx by calling eth_getTransaction. + * @param txHash - Hash of the tx to look for. + */ + getTransactionStats(txHash: string): Promise; } /** @@ -122,7 +140,22 @@ export class L1Publisher implements L2BlockReceiver { if (!receipt) break; // Tx was mined successfully - if (receipt.status) return true; + if (receipt.status) { + const tx = await this.txSender.getTransactionStats(txHash); + const stats: L1PublishStats = { + ...pick(receipt, 'gasPrice', 'gasUsed', 'transactionHash'), + ...pick(tx!, 'calldataGas', 'calldataSize'), + txCount: l2BlockData.numberOfTxs, + blockNumber: l2BlockData.number, + encryptedLogCount: l2BlockData.newEncryptedLogs?.getTotalLogCount() ?? 0, + unencryptedLogCount: l2BlockData.newUnencryptedLogs?.getTotalLogCount() ?? 0, + encryptedLogSize: l2BlockData.newEncryptedLogs?.getSerializedLength() ?? 0, + unencryptedLogSize: l2BlockData.newUnencryptedLogs?.getSerializedLength() ?? 0, + eventName: 'rollup-published-to-l1', + }; + this.log.info(`Published L2 block to L1 rollup contract`, stats); + return true; + } // Check if someone else incremented the block number if (!(await this.checkNextL2BlockNum(l2BlockData.number))) { @@ -185,13 +218,18 @@ export class L1Publisher implements L2BlockReceiver { * Calling `interrupt` will cause any in progress call to `publishRollup` to return `false` asap. * Be warned, the call may return false even if the tx subsequently gets successfully mined. * In practice this shouldn't matter, as we'll only ever be calling `interrupt` when we know it's going to fail. - * A call to `clearInterrupt` is required before you can continue publishing. + * A call to `restart` is required before you can continue publishing. */ public interrupt() { this.interrupted = true; this.interruptableSleep.interrupt(); } + /** Restarts the publisher after calling `interrupt`. */ + public restart() { + this.interrupted = false; + } + // TODO: Check fee distributor has at least 0.5 ETH. // Related to https://github.com/AztecProtocol/aztec-packages/issues/1588 // eslint-disable-next-line require-await @@ -210,7 +248,7 @@ export class L1Publisher implements L2BlockReceiver { try { return await this.txSender.sendProcessTx(encodedData); } catch (err) { - this.log(`ROLLUP PUBLISH FAILED`, err); + this.log.error(`Rollup publish failed`, err); return undefined; } } diff --git a/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts b/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts index 2e087b723fe5..f8df1b805b0b 100644 --- a/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts +++ b/yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts @@ -13,13 +13,19 @@ import { createWalletClient, getAddress, getContract, + hexToBytes, http, } from 'viem'; import { PrivateKeyAccount, privateKeyToAccount } from 'viem/accounts'; import * as chains from 'viem/chains'; import { TxSenderConfig } from './config.js'; -import { L1PublisherTxSender, MinimalTransactionReceipt, L1ProcessArgs as ProcessTxArgs } from './l1-publisher.js'; +import { + L1PublisherTxSender, + MinimalTransactionReceipt, + L1ProcessArgs as ProcessTxArgs, + TransactionStats, +} from './l1-publisher.js'; /** * Pushes transactions to the L1 rollup contract using viem. @@ -69,6 +75,17 @@ export class ViemTxSender implements L1PublisherTxSender { }); } + async getTransactionStats(txHash: string): Promise { + const tx = await this.publicClient.getTransaction({ hash: txHash as Hex }); + if (!tx) return undefined; + const calldata = hexToBytes(tx.input); + return { + transactionHash: tx.hash, + calldataSize: calldata.length, + calldataGas: getCalldataGasUsage(calldata), + }; + } + /** * Returns a tx receipt if the tx has been mined. * @param txHash - Hash of the tx to look for. @@ -79,16 +96,16 @@ export class ViemTxSender implements L1PublisherTxSender { hash: txHash as Hex, }); - // TODO: check for confirmations - if (receipt) { return { status: receipt.status === 'success', transactionHash: txHash, + gasUsed: receipt.gasUsed, + gasPrice: receipt.effectiveGasPrice, }; } - this.log('Receipt not found for tx hash', txHash); + this.log(`Receipt not found for tx hash ${txHash}`); return undefined; } @@ -165,3 +182,12 @@ export class ViemTxSender implements L1PublisherTxSender { throw new Error(`Chain with id ${chainId} not found`); } } + +/** + * Returns cost of calldata usage in Ethereum. + * @param data - Calldata. + * @returns 4 for each zero byte, 16 for each nonzero. + */ +function getCalldataGasUsage(data: Uint8Array) { + return data.filter(byte => byte === 0).length * 4 + data.filter(byte => byte !== 0).length * 16; +} diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 487b227f5d16..4398545fc30c 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -53,6 +53,7 @@ export class Sequencer { if (config.minTxsPerBlock) { this.minTxsPerBLock = config.minTxsPerBlock; } + this.log(`Initialized sequencer with ${this.minTxsPerBLock}-${this.maxTxsPerBlock} txs per block.`); } /** @@ -82,6 +83,7 @@ export class Sequencer { */ public restart() { this.log('Restarting sequencer'); + this.publisher.restart(); this.runningPromise!.start(); this.state = SequencerState.IDLE; } @@ -119,17 +121,16 @@ export class Sequencer { // Get txs to build the new block const pendingTxs = await this.p2pClient.getTxs(); if (pendingTxs.length < this.minTxsPerBLock) return; + this.log.info(`Retrieved ${pendingTxs.length} txs from P2P pool`); // Filter out invalid txs // TODO: It should be responsibility of the P2P layer to validate txs before passing them on here const validTxs = await this.takeValidTxs(pendingTxs); - if (validTxs.length < this.minTxsPerBLock) { - return; - } + if (validTxs.length < this.minTxsPerBLock) return; const blockNumber = (await this.l2BlockSource.getBlockNumber()) + 1; - this.log.info(`Building block ${blockNumber} with ${validTxs.length} transactions...`); + this.log.info(`Building block ${blockNumber} with ${validTxs.length} transactions`); this.state = SequencerState.CREATING_BLOCK; const newGlobalVariables = await this.globalsBuilder.buildGlobalVariables(new Fr(blockNumber)); @@ -173,8 +174,7 @@ export class Sequencer { await this.publishL2Block(block); this.log.info(`Submitted rollup block ${block.number} with ${processedValidTxs.length} transactions`); } catch (err) { - this.log.error(err); - this.log.error(`Rolling back world state DB`); + this.log.error(`Rolling back world state DB due to error assembling block`, err); await this.worldState.getLatest().rollback(); } } diff --git a/yarn-project/types/src/l2_block_downloader/l2_block_downloader.ts b/yarn-project/types/src/l2_block_downloader/l2_block_downloader.ts index ccb051cd70ef..5b0f60d6049f 100644 --- a/yarn-project/types/src/l2_block_downloader/l2_block_downloader.ts +++ b/yarn-project/types/src/l2_block_downloader/l2_block_downloader.ts @@ -43,7 +43,7 @@ export class L2BlockDownloader { await this.jobQueue.put(() => this.collectBlocks()); await this.interruptableSleep.sleep(this.pollIntervalMS); } catch (err) { - log.error(err); + log.error(`Error downloading L2 block`, err); await this.interruptableSleep.sleep(this.pollIntervalMS); } } diff --git a/yarn-project/types/src/logs/l2_block_l2_logs.ts b/yarn-project/types/src/logs/l2_block_l2_logs.ts index c37e61178ade..808062964ee5 100644 --- a/yarn-project/types/src/logs/l2_block_l2_logs.ts +++ b/yarn-project/types/src/logs/l2_block_l2_logs.ts @@ -31,6 +31,19 @@ export class L2BlockL2Logs { return this.txLogs.reduce((acc, logs) => acc + logs.getSerializedLength(), 0) + 4; } + /** + * Gets the total number of logs emitted from all the TxL2Logs. + */ + public getTotalLogCount(): number { + let count = 0; + for (const txLog of this.txLogs) { + for (const functionLog of txLog.functionLogs) { + count += functionLog.logs.length; + } + } + return count; + } + /** * Deserializes logs from a buffer. * @param buffer - The buffer containing the serialized logs. diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index ebc867c97db0..cf21ff9d41ac 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -379,11 +379,11 @@ __metadata: "@jest/globals": ^29.5.0 "@noble/curves": ^1.0.0 "@rushstack/eslint-patch": ^1.1.4 + "@types/fs-extra": ^11.0.2 "@types/jest": ^29.5.0 "@types/koa": ^2.13.9 "@types/koa-static": ^4.0.2 "@types/levelup": ^5.1.2 - "@types/lodash.compact": ^3.0.7 "@types/lodash.every": ^4.6.7 "@types/lodash.times": ^4.3.7 "@types/lodash.zip": ^4.2.7 @@ -408,6 +408,7 @@ __metadata: tslib: ^2.4.0 typescript: ^5.0.4 viem: ^1.2.5 + winston: ^3.10.0 languageName: unknown linkType: soft @@ -4273,6 +4274,16 @@ __metadata: languageName: node linkType: hard +"@types/fs-extra@npm:^11.0.2": + version: 11.0.2 + resolution: "@types/fs-extra@npm:11.0.2" + dependencies: + "@types/jsonfile": "*" + "@types/node": "*" + checksum: 5b3e30343ee62d2e393e1029355f13f64bab6f3416226e22492483f99da840e2e53ca22cbfa4ac3749f2f83f7086d19c009005c8fa175da01df0fae59c2d73e1 + languageName: node + linkType: hard + "@types/graceful-fs@npm:^4.1.3": version: 4.1.6 resolution: "@types/graceful-fs@npm:4.1.6"