Skip to content

Commit

Permalink
Measure history processing time
Browse files Browse the repository at this point in the history
  • Loading branch information
spalladino committed Oct 6, 2023
1 parent 23e5229 commit abd042e
Show file tree
Hide file tree
Showing 12 changed files with 302 additions and 84 deletions.
23 changes: 18 additions & 5 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -955,9 +955,20 @@ jobs:
name: "Benchmark"
command: cond_run_script end-to-end ./scripts/run_tests_local benchmarks/bench_publish_rollup.test.ts
environment:
{
DEBUG: "aztec:benchmarks:*,aztec:sequencer,aztec:world_state,aztec:merkle_trees",
}
DEBUG: "aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees"

bench-process-history:
machine:
image: ubuntu-2204:2023.07.2
resource_class: large
steps:
- *checkout
- *setup_env
- run:
name: "Benchmark"
command: cond_run_script end-to-end ./scripts/run_tests_local benchmarks/bench_process_history.test.ts
environment:
DEBUG: "aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees"

build-docs:
machine:
Expand Down Expand Up @@ -1307,6 +1318,7 @@ workflows:
- guides-sample-dapp: *e2e_test
- guides-up-quick-start: *e2e_test
- bench-publish-rollup: *e2e_test
- bench-process-history: *e2e_test

- e2e-end:
requires:
Expand Down Expand Up @@ -1344,15 +1356,16 @@ workflows:
- guides-dapp-testing
- guides-sample-dapp
- guides-up-quick-start
- bench-publish-rollup
<<: *defaults

- bench-summary:
requires:
- e2e-end
- bench-publish-rollup
- bench-process-history
<<: *defaults

# Deployment and Canary tests
# Deployment and Canary tests
- deploy-dockerhub:
requires:
- e2e-end
Expand Down
29 changes: 25 additions & 4 deletions scripts/ci/aggregate_e2e_benchmark.js
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,14 @@ const {
L2_BLOCK_BUILD_TIME,
L2_BLOCK_ROLLUP_SIMULATION_TIME,
L2_BLOCK_PUBLIC_TX_PROCESS_TIME,
NODE_HISTORY_SYNC_TIME,
NODE_SYNCED_CHAIN,
NOTE_HISTORY_TRIAL_DECRYPTING_TIME,
NOTE_HISTORY_SUCCESSFUL_DECRYPTING_TIME,
ROLLUP_SIZES,
CHAIN_LENGTHS,
BENCHMARK_FILE_JSON,
BLOCK_SIZE,
} = require("./benchmark_shared.js");

// Folder where to load logs from
Expand Down Expand Up @@ -83,13 +89,17 @@ function processCircuitSimulation(entry, results) {
}

// Processes an entry with event name 'note-processor-caught-up' and updates results
// Buckets are rollup sizes
// Buckets are rollup sizes for NOTE_DECRYPTING_TIME, or chain sizes for NOTE_HISTORY_DECRYPTING_TIME
function processNoteProcessorCaughtUp(entry, results) {
const { seen, decrypted } = entry;
const { seen, decrypted, blocks, txs, duration } = entry;
if (ROLLUP_SIZES.includes(decrypted))
append(results, NOTE_SUCCESSFUL_DECRYPTING_TIME, decrypted, entry.duration);
append(results, NOTE_SUCCESSFUL_DECRYPTING_TIME, decrypted, duration);
if (ROLLUP_SIZES.includes(seen) && decrypted === 0)
append(results, NOTE_TRIAL_DECRYPTING_TIME, seen, entry.duration);
append(results, NOTE_TRIAL_DECRYPTING_TIME, seen, duration);
if (CHAIN_LENGTHS.includes(blocks) && decrypted > 0)
append(results, NOTE_HISTORY_SUCCESSFUL_DECRYPTING_TIME, blocks, duration);
if (CHAIN_LENGTHS.includes(blocks) && decrypted === 0)
append(results, NOTE_HISTORY_TRIAL_DECRYPTING_TIME, blocks, duration);
}

// Processes an entry with event name 'l2-block-built' and updates results
Expand All @@ -112,6 +122,15 @@ function processL2BlockBuilt(entry, results) {
);
}

// Processes entries with event name node-synced-chain-history emitted by benchmark tests
// Buckets are chain lengths
function processNodeSyncedChain(entry, results) {
const bucket = entry.blockCount;
if (!CHAIN_LENGTHS.includes(bucket)) return;
if (entry.txsPerBlock !== BLOCK_SIZE) return;
append(results, NODE_HISTORY_SYNC_TIME, bucket, entry.duration);
}

// Processes a parsed entry from a logfile and updates results
function processEntry(entry, results) {
switch (entry.eventName) {
Expand All @@ -125,6 +144,8 @@ function processEntry(entry, results) {
return processNoteProcessorCaughtUp(entry, results);
case L2_BLOCK_BUILT:
return processL2BlockBuilt(entry, results);
case NODE_SYNCED_CHAIN:
return processNodeSyncedChain(entry, results);
default:
return;
}
Expand Down
20 changes: 18 additions & 2 deletions scripts/ci/benchmark_shared.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,16 @@
// Rollup sizes to track (duplicated from yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts)
// Block sizes to track (duplicated from yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts)
const ROLLUP_SIZES = process.env.ROLLUP_SIZES
? process.env.ROLLUP_SIZES.split(",").map(Number)
: [8, 32, 128];

// Block size to use for building chains of multiple length (duplicated from yarn-project/end-to-end/src/benchmarks/bench_process_history.test.ts)
const BLOCK_SIZE = process.env.BLOCK_SIZE ? +process.env.BLOCK_SIZE : 32;

// Chain lengths to test (duplicated from yarn-project/end-to-end/src/benchmarks/bench_process_history.test.ts)
const CHAIN_LENGTHS = process.env.CHAIN_LENGTHS
? process.env.CHAIN_LENGTHS.split(",").map(Number)
: [10, 20, 50];

// Output files
const BENCHMARK_FILE_JSON = process.env.BENCHMARK_FILE_JSON ?? "benchmark.json";

Expand All @@ -16,17 +24,25 @@ module.exports = {
CIRCUIT_INPUT_SIZE: "circuit_input_size_in_bytes",
CIRCUIT_OUTPUT_SIZE: "circuit_output_size_in_bytes",
NOTE_SUCCESSFUL_DECRYPTING_TIME: "note_successful_decrypting_time_in_ms",
NOTE_TRIAL_DECRYPTING_TIME: "note_unsuccessful_decrypting_time_in_ms",
NOTE_TRIAL_DECRYPTING_TIME: "note_trial_decrypting_time_in_ms",
L2_BLOCK_BUILD_TIME: "l2_block_building_time_in_ms",
L2_BLOCK_ROLLUP_SIMULATION_TIME: "l2_block_rollup_simulation_time_in_ms",
L2_BLOCK_PUBLIC_TX_PROCESS_TIME: "l2_block_public_tx_process_time_in_ms",
NODE_HISTORY_SYNC_TIME: "node_history_sync_time_in_ms",
NOTE_HISTORY_SUCCESSFUL_DECRYPTING_TIME:
"note_history_successful_decrypting_time_in_ms",
NOTE_HISTORY_TRIAL_DECRYPTING_TIME:
"note_history_trial_decrypting_time_in_ms",
// Events to track
L2_BLOCK_PUBLISHED_TO_L1: "rollup-published-to-l1",
L2_BLOCK_SYNCED: "l2-block-handled",
L2_BLOCK_BUILT: "l2-block-built",
CIRCUIT_SIMULATED: "circuit-simulation",
NOTE_PROCESSOR_CAUGHT_UP: "note-processor-caught-up",
NODE_SYNCED_CHAIN: "node-synced-chain-history",
// Other
ROLLUP_SIZES,
BLOCK_SIZE,
CHAIN_LENGTHS,
BENCHMARK_FILE_JSON,
};
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
import { AztecNodeService } from '@aztec/aztec-node';
import { Fr, GrumpkinScalar } from '@aztec/circuits.js';
import { sleep } from '@aztec/foundation/sleep';
import { elapsed } from '@aztec/foundation/timer';
import { BenchmarkingContract } from '@aztec/noir-contracts/types';
import { SequencerClient } from '@aztec/sequencer-client';
import { INITIAL_L2_BLOCK_NUM } from '@aztec/types';

import { EndToEndContext } from '../fixtures/utils.js';
import { benchmarkSetup, sendTxs, waitNewPXESynced, waitRegisteredAccountSynced } from './utils.js';

const BLOCK_SIZE = process.env.BLOCK_SIZE ? +process.env.BLOCK_SIZE : 32;
const CHAIN_LENGTHS = process.env.CHAIN_LENGTHS ? process.env.CHAIN_LENGTHS.split(',').map(Number) : [10, 20, 50];
const MAX_CHAIN_LENGTH = CHAIN_LENGTHS[CHAIN_LENGTHS.length - 1];
const SETUP_BLOCK_COUNT = 2; // deploy account + deploy contract

describe('benchmarks/process_history', () => {
let context: EndToEndContext;
let contract: BenchmarkingContract;
let sequencer: SequencerClient;

beforeEach(async () => {
({ context, contract, sequencer } = await benchmarkSetup({ maxTxsPerBlock: BLOCK_SIZE }));
}, 60_000);

it(
`processes chain history of ${MAX_CHAIN_LENGTH} with ${BLOCK_SIZE}-tx blocks`,
async () => {
// Ensure each block has exactly BLOCK_SIZE txs
sequencer.updateSequencerConfig({ minTxsPerBlock: BLOCK_SIZE });
let lastBlock = 0;

for (const chainLength of CHAIN_LENGTHS) {
// Send enough txs to move the chain to the next block number checkpoint
const txCount = (chainLength - lastBlock) * BLOCK_SIZE;
const sentTxs = await sendTxs(txCount, context, contract);
await sentTxs[sentTxs.length - 1].wait({ timeout: 5 * 60_000 });
await sleep(100);

// Create a new node and measure how much time it takes it to sync
const [nodeSyncTime, node] = await elapsed(async () => {
const node = await AztecNodeService.createAndSync({ ...context.config, disableSequencer: true });
await node.getTreeRoots();
return node;
});

const blockNumber = await node.getBlockNumber();
expect(blockNumber).toEqual(chainLength + SETUP_BLOCK_COUNT);

context.logger(`Node synced chain up to block ${chainLength}`, {
eventName: 'node-synced-chain-history',
txCount: BLOCK_SIZE * chainLength,
txsPerBlock: BLOCK_SIZE,
duration: nodeSyncTime,
blockNumber,
blockCount: chainLength,
});

// Create a new pxe and measure how much time it takes it to sync with failed and successful decryption
// Skip the first two blocks used for setup (create account contract and deploy benchmarking contract)
context.logger(`Starting new pxe`);
const pxe = await waitNewPXESynced(node, contract, INITIAL_L2_BLOCK_NUM + SETUP_BLOCK_COUNT);

// Register the owner account and wait until it's synced so we measure how much time it took
context.logger(`Registering owner account on new pxe`);
const partialAddress = context.wallet.getCompleteAddress().partialAddress;
const privateKey = context.wallet.getEncryptionPrivateKey();
await waitRegisteredAccountSynced(pxe, privateKey, partialAddress);

// Repeat for another account that didn't receive any notes for them, so we measure trial-decrypts
context.logger(`Registering fresh account on new pxe`);
await waitRegisteredAccountSynced(pxe, GrumpkinScalar.random(), Fr.random());

// Stop the external node and pxe
await pxe.stop();
await node.stop();

lastBlock = chainLength;
}

await context.teardown();
},
60 * 60_000,
);
});
61 changes: 14 additions & 47 deletions yarn-project/end-to-end/src/benchmarks/bench_publish_rollup.test.ts
Original file line number Diff line number Diff line change
@@ -1,55 +1,30 @@
/* eslint-disable camelcase */
import { AztecNodeService } from '@aztec/aztec-node';
import { AztecAddress, BatchCall } from '@aztec/aztec.js';
import { EthAddress, Fr, GrumpkinScalar } from '@aztec/circuits.js';
import { retryUntil } from '@aztec/foundation/retry';
import { sleep } from '@aztec/foundation/sleep';
import { Fr, GrumpkinScalar } from '@aztec/circuits.js';
import { BenchmarkingContract } from '@aztec/noir-contracts/types';
import { createPXEService } from '@aztec/pxe';
import { SequencerClient } from '@aztec/sequencer-client';

import times from 'lodash.times';

import { setup } from '../fixtures/utils.js';
import { EndToEndContext } from '../fixtures/utils.js';
import { benchmarkSetup, sendTxs, waitNewPXESynced, waitRegisteredAccountSynced } from './utils.js';

const ROLLUP_SIZES = process.env.ROLLUP_SIZES ? process.env.ROLLUP_SIZES.split(',').map(Number) : [8, 32, 128];

describe('benchmarks/publish_rollup', () => {
let context: Awaited<ReturnType<typeof setup>>;
let context: EndToEndContext;
let contract: BenchmarkingContract;
let owner: AztecAddress;
let sequencer: SequencerClient;

beforeEach(async () => {
context = await setup(2, { maxTxsPerBlock: 1024 });
[owner] = context.accounts.map(a => a.address);
contract = await BenchmarkingContract.deploy(context.wallet).send().deployed();
context.logger(`Deployed benchmarking contract at ${contract.address}`);
sequencer = (context.aztecNode as AztecNodeService).getSequencer()!;
await sequencer.stop();
({ context, contract, sequencer } = await benchmarkSetup({ maxTxsPerBlock: 1024 }));
}, 60_000);

// Each tx has a private execution (account entrypoint), a nested private call (create_note),
// a public call (increment_balance), and a nested public call (broadcast). These include
// emitting one private note and one unencrypted log, two storage reads and one write.
const makeBatchCall = (i: number) =>
new BatchCall(context.wallet, [
contract.methods.create_note(owner, i + 1).request(),
contract.methods.increment_balance(owner, i + 1).request(),
]);

it.each(ROLLUP_SIZES)(
`publishes a rollup with %d txs`,
async (txCount: number) => {
context.logger(`Assembling rollup with ${txCount} txs`);
// Simulate and simultaneously send ROLLUP_SIZE txs. These should not yet be processed since sequencer is stopped.
const calls = times(txCount, makeBatchCall);
calls.forEach(call => call.simulate({ skipPublicSimulation: true }));
const sentTxs = calls.map(call => call.send());
await sequencer.stop();

// Awaiting txHash waits until the aztec node has received the tx into its p2p pool
await Promise.all(sentTxs.map(tx => tx.getTxHash()));
await sleep(100);
// Simulate and simultaneously send ROLLUP_SIZE txs. These should not yet be processed since sequencer is stopped.
context.logger(`Assembling rollup with ${txCount} txs`);
const sentTxs = await sendTxs(txCount, context, contract);

// Restart sequencer to process all txs together
sequencer.restart();
Expand All @@ -66,25 +41,17 @@ describe('benchmarks/publish_rollup', () => {

// Spin up a new pxe and sync it, we'll use it to test sync times of new accounts for the last block
context.logger(`Starting new pxe`);
const pxe = await createPXEService(node, { l2BlockPollingIntervalMS: 100, l2StartingBlock: blockNumber! - 1 });
await pxe.addContracts([{ ...contract, portalContract: EthAddress.ZERO }]);
await retryUntil(() => pxe.isGlobalStateSynchronized(), 'pxe-global-sync');
const { publicKey, partialAddress } = context.wallet.getCompleteAddress();
const privateKey = context.wallet.getEncryptionPrivateKey();
const l2Block = await node.getBlockNumber();
const pxe = await waitNewPXESynced(node, contract, blockNumber! - 1);

// Register the owner account and wait until it's synced so we measure how much time it took
context.logger(`Registering owner account on new pxe`);
await pxe.registerAccount(privateKey, partialAddress);
const isOwnerSynced = async () => (await pxe.getSyncStatus()).notes[publicKey.toString()] === l2Block;
await retryUntil(isOwnerSynced, 'pxe-owner-sync');
const partialAddress = context.wallet.getCompleteAddress().partialAddress;
const privateKey = context.wallet.getEncryptionPrivateKey();
await waitRegisteredAccountSynced(pxe, privateKey, partialAddress);

// Repeat for another account that didn't receive any notes for them, so we measure trial-decrypts
context.logger(`Registering fresh account on new pxe`);
const newAccount = await pxe.registerAccount(GrumpkinScalar.random(), Fr.random());
const isNewAccountSynced = async () =>
(await pxe.getSyncStatus()).notes[newAccount.publicKey.toString()] === l2Block;
await retryUntil(isNewAccountSynced, 'pxe-new-account-sync');
await waitRegisteredAccountSynced(pxe, GrumpkinScalar.random(), Fr.random());

// Stop the external node and pxe
await pxe.stop();
Expand Down
Loading

0 comments on commit abd042e

Please sign in to comment.