From ae26474709e28116a38fd2c2773de39dfb6816ad Mon Sep 17 00:00:00 2001 From: Lasse Herskind <16536249+LHerskind@users.noreply.github.com> Date: Tue, 17 Sep 2024 18:10:29 +0100 Subject: [PATCH] feat: only download non-pruned blocks (#8578) Fixes #8562. - Extends the `L2BlockProposed` event to also include the archive - Change `archiveAt` to only return archive if in pending, e.g., return `bytes32(0)` for pruned blocks - Remove the expected l2 block num but check that it is in the `blocks` on contract - To support multiple blocks with same block number - Adds a test that will prune part of the chain, then sync a fresh and then build a new block. - Fixes a bug in the `eth_log_handler::getBlockFromRollupTx` as it was loading an incorrect archive `nextAvailableLeafIndex` value, but since it seemed to not be used it did not cause any issues. --- l1-contracts/src/core/Rollup.sol | 7 +- l1-contracts/src/core/interfaces/IRollup.sol | 2 +- .../archiver/src/archiver/archiver.test.ts | 99 ++++++++++++++----- .../archiver/src/archiver/archiver.ts | 27 +++-- .../archiver/src/archiver/data_retrieval.ts | 28 +++--- .../archiver/src/archiver/eth_log_handlers.ts | 55 +++++++---- .../end-to-end/src/e2e_synching.test.ts | 99 +++++++++++++++---- 7 files changed, 235 insertions(+), 82 deletions(-) diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index cdda726092b..a5ded45c052 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -225,7 +225,7 @@ contract Rollup is Leonidas, IRollup, ITestRollup { uint256 l2ToL1TreeMinHeight = min + 1; OUTBOX.insert(blockNumber, header.contentCommitment.outHash, l2ToL1TreeMinHeight); - emit L2BlockProposed(blockNumber); + emit L2BlockProposed(blockNumber, _archive); // Automatically flag the block as proven if we have cheated and set assumeProvenThroughBlockNumber. if (blockNumber <= assumeProvenThroughBlockNumber) { @@ -390,7 +390,10 @@ contract Rollup is Leonidas, IRollup, ITestRollup { * @return bytes32 - The archive root of the block */ function archiveAt(uint256 _blockNumber) external view override(IRollup) returns (bytes32) { - return blocks[_blockNumber].archive; + if (_blockNumber <= tips.pendingBlockNumber) { + return blocks[_blockNumber].archive; + } + return bytes32(0); } /** diff --git a/l1-contracts/src/core/interfaces/IRollup.sol b/l1-contracts/src/core/interfaces/IRollup.sol index 131a71289c6..cf9d22019dc 100644 --- a/l1-contracts/src/core/interfaces/IRollup.sol +++ b/l1-contracts/src/core/interfaces/IRollup.sol @@ -15,7 +15,7 @@ interface ITestRollup { } interface IRollup { - event L2BlockProposed(uint256 indexed blockNumber); + event L2BlockProposed(uint256 indexed blockNumber, bytes32 indexed archive); event L2ProofVerified(uint256 indexed blockNumber, bytes32 indexed proverId); event PrunedPending(uint256 provenBlockNumber, uint256 pendingBlockNumber); diff --git a/yarn-project/archiver/src/archiver/archiver.test.ts b/yarn-project/archiver/src/archiver/archiver.test.ts index 9b9232b0ddb..fb18418ca0a 100644 --- a/yarn-project/archiver/src/archiver/archiver.test.ts +++ b/yarn-project/archiver/src/archiver/archiver.test.ts @@ -10,6 +10,7 @@ import { Fr } from '@aztec/foundation/fields'; import { sleep } from '@aztec/foundation/sleep'; import { type InboxAbi, RollupAbi } from '@aztec/l1-artifacts'; +import { jest } from '@jest/globals'; import { type MockProxy, mock } from 'jest-mock-extended'; import { type Chain, @@ -26,6 +27,14 @@ import { type ArchiverDataStore } from './archiver_store.js'; import { type ArchiverInstrumentation } from './instrumentation.js'; import { MemoryArchiverStore } from './memory_archiver_store/memory_archiver_store.js'; +interface MockRollupContractRead { + archiveAt: (args: readonly [bigint]) => Promise<`0x${string}`>; +} + +class MockRollupContract { + constructor(public read: MockRollupContractRead, public address: `0x${string}`) {} +} + describe('Archiver', () => { const rollupAddress = EthAddress.ZERO; const inboxAddress = EthAddress.ZERO; @@ -39,6 +48,7 @@ describe('Archiver', () => { let now: number; let archiver: Archiver; + let blocks: L2Block[]; beforeEach(() => { now = +new Date(); @@ -47,16 +57,11 @@ describe('Archiver', () => { timestamp: args.blockNumber * 1000n + BigInt(now), })) as any, }); + instrumentation = mock({ isEnabled: () => true }); archiverStore = new MemoryArchiverStore(1000); proverId = Fr.random(); - }); - - afterEach(async () => { - await archiver?.stop(); - }); - it('can start, sync and stop and handle l1 to l2 messages and logs', async () => { archiver = new Archiver( publicClient, rollupAddress, @@ -67,10 +72,22 @@ describe('Archiver', () => { instrumentation, ); + blocks = blockNumbers.map(x => L2Block.random(x, 4, x, x + 1, 2, 2)); + + const mockRollupRead = mock({ + archiveAt: (args: readonly [bigint]) => Promise.resolve(blocks[Number(args[0] - 1n)].archive.root.toString()), + }); + (archiver as any).rollup = new MockRollupContract(mockRollupRead, rollupAddress.toString()); + }); + + afterEach(async () => { + await archiver?.stop(); + }); + + it('can start, sync and stop and handle l1 to l2 messages and logs', async () => { let latestBlockNum = await archiver.getBlockNumber(); expect(latestBlockNum).toEqual(0); - const blocks = blockNumbers.map(x => L2Block.random(x, 4, x, x + 1, 2, 2)); blocks.forEach((b, i) => (b.header.globalVariables.timestamp = new Fr(now + 1000 * (i + 1)))); const rollupTxs = blocks.map(makeRollupTx); @@ -78,7 +95,7 @@ describe('Archiver', () => { mockGetLogs({ messageSent: [makeMessageSentEvent(98n, 1n, 0n), makeMessageSentEvent(99n, 1n, 1n)], - L2BlockProposed: [makeL2BlockProposedEvent(101n, 1n)], + L2BlockProposed: [makeL2BlockProposedEvent(101n, 1n, blocks[0].archive.root.toString())], proofVerified: [makeProofVerifiedEvent(102n, 1n, proverId)], }); @@ -89,7 +106,10 @@ describe('Archiver', () => { makeMessageSentEvent(2505n, 2n, 2n), makeMessageSentEvent(2506n, 3n, 1n), ], - L2BlockProposed: [makeL2BlockProposedEvent(2510n, 2n), makeL2BlockProposedEvent(2520n, 3n)], + L2BlockProposed: [ + makeL2BlockProposedEvent(2510n, 2n, blocks[1].archive.root.toString()), + makeL2BlockProposedEvent(2520n, 3n, blocks[2].archive.root.toString()), + ], }); publicClient.getTransaction.mockResolvedValueOnce(rollupTxs[0]); @@ -168,45 +188,76 @@ describe('Archiver', () => { }, 10_000); it('does not sync past current block number', async () => { + let latestBlockNum = await archiver.getBlockNumber(); + expect(latestBlockNum).toEqual(0); + const numL2BlocksInTest = 2; - archiver = new Archiver( - publicClient, - rollupAddress, - inboxAddress, - registryAddress, - archiverStore, - 1000, - instrumentation, - ); + + const rollupTxs = blocks.map(makeRollupTx); + + // Here we set the current L1 block number to 102. L1 to L2 messages after this should not be read. + publicClient.getBlockNumber.mockResolvedValue(102n); + + mockGetLogs({ + messageSent: [makeMessageSentEvent(66n, 1n, 0n), makeMessageSentEvent(68n, 1n, 1n)], + L2BlockProposed: [ + makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString()), + makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString()), + ], + }); + + mockGetLogs({}); + + rollupTxs.slice(0, numL2BlocksInTest).forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); + + await archiver.start(false); + + while ((await archiver.getBlockNumber()) !== numL2BlocksInTest) { + await sleep(100); + } + + latestBlockNum = await archiver.getBlockNumber(); + expect(latestBlockNum).toEqual(numL2BlocksInTest); + }, 10_000); + + it('ignores block 3 because it have been pruned (simulate pruning)', async () => { + const loggerSpy = jest.spyOn((archiver as any).log, 'warn'); let latestBlockNum = await archiver.getBlockNumber(); expect(latestBlockNum).toEqual(0); - const blocks = blockNumbers.map(x => L2Block.random(x, 4, x, x + 1, 2, 2)); + const numL2BlocksInTest = 2; const rollupTxs = blocks.map(makeRollupTx); // Here we set the current L1 block number to 102. L1 to L2 messages after this should not be read. publicClient.getBlockNumber.mockResolvedValue(102n); + const badArchive = Fr.random().toString(); + mockGetLogs({ messageSent: [makeMessageSentEvent(66n, 1n, 0n), makeMessageSentEvent(68n, 1n, 1n)], - L2BlockProposed: [makeL2BlockProposedEvent(70n, 1n), makeL2BlockProposedEvent(80n, 2n)], + L2BlockProposed: [ + makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString()), + makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString()), + makeL2BlockProposedEvent(90n, 3n, badArchive), + ], }); mockGetLogs({}); - rollupTxs.slice(0, numL2BlocksInTest).forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); + rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); await archiver.start(false); - // Wait until block 3 is processed. If this won't happen the test will fail with timeout. while ((await archiver.getBlockNumber()) !== numL2BlocksInTest) { await sleep(100); } latestBlockNum = await archiver.getBlockNumber(); expect(latestBlockNum).toEqual(numL2BlocksInTest); + const errorMessage = `Archive mismatch matching, ignoring block ${3} with archive: ${badArchive}, expected ${blocks[2].archive.root.toString()}`; + expect(loggerSpy).toHaveBeenCalledWith(errorMessage); }, 10_000); // logs should be created in order of how archiver syncs. @@ -228,10 +279,10 @@ describe('Archiver', () => { * @param l2BlockNum - L2 Block number. * @returns An L2BlockProposed event log. */ -function makeL2BlockProposedEvent(l1BlockNum: bigint, l2BlockNum: bigint) { +function makeL2BlockProposedEvent(l1BlockNum: bigint, l2BlockNum: bigint, archive: `0x${string}`) { return { blockNumber: l1BlockNum, - args: { blockNumber: l2BlockNum }, + args: { blockNumber: l2BlockNum, archive }, transactionHash: `0x${l2BlockNum}`, } as Log; } diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 7b993f2bb51..7744d02d3f1 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -43,7 +43,15 @@ import { } from '@aztec/types/contracts'; import groupBy from 'lodash.groupby'; -import { type Chain, type HttpTransport, type PublicClient, createPublicClient, getContract, http } from 'viem'; +import { + type Chain, + type GetContractReturnType, + type HttpTransport, + type PublicClient, + createPublicClient, + getContract, + http, +} from 'viem'; import { type ArchiverDataStore } from './archiver_store.js'; import { type ArchiverConfig } from './config.js'; @@ -68,6 +76,8 @@ export class Archiver implements ArchiveSource { */ private runningPromise?: RunningPromise; + private rollup: GetContractReturnType>; + /** * Creates a new instance of the Archiver. * @param publicClient - A client for interacting with the Ethereum node. @@ -88,7 +98,13 @@ export class Archiver implements ArchiveSource { private readonly instrumentation: ArchiverInstrumentation, private readonly l1StartBlock: bigint = 0n, private readonly log: DebugLogger = createDebugLogger('aztec:archiver'), - ) {} + ) { + this.rollup = getContract({ + address: rollupAddress.toString(), + abi: RollupAbi, + client: publicClient, + }); + } /** * Creates a new instance of the Archiver and blocks until it syncs from chain. @@ -245,17 +261,14 @@ export class Archiver implements ArchiveSource { await this.store.addL1ToL2Messages(retrievedL1ToL2Messages); - // Read all data from chain and then write to our stores at the end - const nextExpectedL2BlockNum = BigInt((await this.store.getSynchedL2BlockNumber()) + 1); - this.log.debug(`Retrieving blocks from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`); const retrievedBlocks = await retrieveBlockFromRollup( + this.rollup, this.publicClient, - this.rollupAddress, blockUntilSynced, blocksSynchedTo + 1n, currentL1BlockNumber, - nextExpectedL2BlockNum, + this.log, ); // Add the body diff --git a/yarn-project/archiver/src/archiver/data_retrieval.ts b/yarn-project/archiver/src/archiver/data_retrieval.ts index 5827296d3f1..f9c7fa3f865 100644 --- a/yarn-project/archiver/src/archiver/data_retrieval.ts +++ b/yarn-project/archiver/src/archiver/data_retrieval.ts @@ -1,10 +1,17 @@ import { type InboxLeaf, type L2Block } from '@aztec/circuit-types'; import { Fr, type Proof } from '@aztec/circuits.js'; -import { type EthAddress } from '@aztec/foundation/eth-address'; +import { EthAddress } from '@aztec/foundation/eth-address'; import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { RollupAbi } from '@aztec/l1-artifacts'; -import { type Hex, type PublicClient, getAbiItem } from 'viem'; +import { + type Chain, + type GetContractReturnType, + type Hex, + type HttpTransport, + type PublicClient, + getAbiItem, +} from 'viem'; import { getBlockProofFromSubmitProofTx, @@ -27,12 +34,11 @@ import { type L1Published } from './structs/published.js'; * @returns An array of block; as well as the next eth block to search from. */ export async function retrieveBlockFromRollup( + rollup: GetContractReturnType>, publicClient: PublicClient, - rollupAddress: EthAddress, blockUntilSynced: boolean, searchStartBlock: bigint, searchEndBlock: bigint, - expectedNextL2BlockNum: bigint, logger: DebugLogger = createDebugLogger('aztec:archiver'), ): Promise[]> { const retrievedBlocks: L1Published[] = []; @@ -40,25 +46,25 @@ export async function retrieveBlockFromRollup( if (searchStartBlock > searchEndBlock) { break; } - const L2BlockProposedLogs = await getL2BlockProposedLogs( + const l2BlockProposedLogs = await getL2BlockProposedLogs( publicClient, - rollupAddress, + EthAddress.fromString(rollup.address), searchStartBlock, searchEndBlock, ); - if (L2BlockProposedLogs.length === 0) { + + if (l2BlockProposedLogs.length === 0) { break; } - const lastLog = L2BlockProposedLogs[L2BlockProposedLogs.length - 1]; + const lastLog = l2BlockProposedLogs[l2BlockProposedLogs.length - 1]; logger.debug( - `Got L2 block processed logs for ${L2BlockProposedLogs[0].blockNumber}-${lastLog.blockNumber} between ${searchStartBlock}-${searchEndBlock} L1 blocks`, + `Got L2 block processed logs for ${l2BlockProposedLogs[0].blockNumber}-${lastLog.blockNumber} between ${searchStartBlock}-${searchEndBlock} L1 blocks`, ); - const newBlocks = await processL2BlockProposedLogs(publicClient, expectedNextL2BlockNum, L2BlockProposedLogs); + const newBlocks = await processL2BlockProposedLogs(rollup, publicClient, l2BlockProposedLogs, logger); retrievedBlocks.push(...newBlocks); searchStartBlock = lastLog.blockNumber! + 1n; - expectedNextL2BlockNum += BigInt(newBlocks.length); } while (blockUntilSynced && searchStartBlock <= searchEndBlock); return retrievedBlocks; } diff --git a/yarn-project/archiver/src/archiver/eth_log_handlers.ts b/yarn-project/archiver/src/archiver/eth_log_handlers.ts index fa218fb2086..48a471d592c 100644 --- a/yarn-project/archiver/src/archiver/eth_log_handlers.ts +++ b/yarn-project/archiver/src/archiver/eth_log_handlers.ts @@ -2,10 +2,22 @@ import { Body, InboxLeaf, L2Block, type ViemSignature } from '@aztec/circuit-typ import { AppendOnlyTreeSnapshot, Header, Proof } from '@aztec/circuits.js'; import { type EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; +import { type DebugLogger } from '@aztec/foundation/log'; import { numToUInt32BE } from '@aztec/foundation/serialize'; import { InboxAbi, RollupAbi } from '@aztec/l1-artifacts'; -import { type Hex, type Log, type PublicClient, decodeFunctionData, getAbiItem, getAddress, hexToBytes } from 'viem'; +import { + type Chain, + type GetContractReturnType, + type Hex, + type HttpTransport, + type Log, + type PublicClient, + decodeFunctionData, + getAbiItem, + getAddress, + hexToBytes, +} from 'viem'; import { type L1Published, type L1PublishedData } from './structs/published.js'; @@ -27,33 +39,40 @@ export function processMessageSentLogs( /** * Processes newly received L2BlockProposed logs. + * @param rollup - The rollup contract * @param publicClient - The viem public client to use for transaction retrieval. - * @param expectedL2BlockNumber - The next expected L2 block number. * @param logs - L2BlockProposed logs. * @returns - An array blocks. */ export async function processL2BlockProposedLogs( + rollup: GetContractReturnType>, publicClient: PublicClient, - expectedL2BlockNumber: bigint, logs: Log[], + logger: DebugLogger, ): Promise[]> { const retrievedBlocks: L1Published[] = []; for (const log of logs) { - const blockNum = log.args.blockNumber; - if (blockNum !== expectedL2BlockNumber) { - throw new Error('Block number mismatch. Expected: ' + expectedL2BlockNumber + ' but got: ' + blockNum + '.'); + const blockNum = log.args.blockNumber!; + const archive = log.args.archive!; + const archiveFromChain = await rollup.read.archiveAt([blockNum]); + + // The value from the event and contract will match only if the block is in the chain. + if (archive === archiveFromChain) { + // TODO: Fetch blocks from calldata in parallel + const block = await getBlockFromRollupTx(publicClient, log.transactionHash!, blockNum); + + const l1: L1PublishedData = { + blockNumber: log.blockNumber, + blockHash: log.blockHash, + timestamp: await getL1BlockTime(publicClient, log.blockNumber), + }; + + retrievedBlocks.push({ data: block, l1 }); + } else { + logger.warn( + `Archive mismatch matching, ignoring block ${blockNum} with archive: ${archive}, expected ${archiveFromChain}`, + ); } - // TODO: Fetch blocks from calldata in parallel - const block = await getBlockFromRollupTx(publicClient, log.transactionHash!, log.args.blockNumber); - - const l1: L1PublishedData = { - blockNumber: log.blockNumber, - blockHash: log.blockHash, - timestamp: await getL1BlockTime(publicClient, log.blockNumber), - }; - - retrievedBlocks.push({ data: block, l1 }); - expectedL2BlockNumber++; } return retrievedBlocks; @@ -101,7 +120,7 @@ async function getBlockFromRollupTx( const archive = AppendOnlyTreeSnapshot.fromBuffer( Buffer.concat([ Buffer.from(hexToBytes(archiveRootHex)), // L2Block.archive.root - numToUInt32BE(Number(l2BlockNum)), // L2Block.archive.nextAvailableLeafIndex + numToUInt32BE(Number(l2BlockNum + 1n)), // L2Block.archive.nextAvailableLeafIndex ]), ); diff --git a/yarn-project/end-to-end/src/e2e_synching.test.ts b/yarn-project/end-to-end/src/e2e_synching.test.ts index e01529836d8..0ec53ed95ce 100644 --- a/yarn-project/end-to-end/src/e2e_synching.test.ts +++ b/yarn-project/end-to-end/src/e2e_synching.test.ts @@ -26,18 +26,18 @@ * Previous results. The `blockCount` is the number of blocks we will construct with `txCount` * transactions of the `complexity` provided. * The `numberOfBlocks` is the total number of blocks, including deployments of canonical contracts - * and setup before we start the "actual" test. Similar, `numberOfTransactions` is the total number - * of transactions across these blocks. - * blockCount: 10, txCount: 36, complexity: Deployment: {"numberOfBlocks":16, "syncTime":17.490706521987914, "numberOfTransactions":366} - * blockCount: 10, txCount: 36, complexity: PrivateTransfer: {"numberOfBlocks":19, "syncTime":20.846745924949644, "numberOfTransactions":474} - * blockCount: 10, txCount: 36, complexity: PublicTransfer: {"numberOfBlocks":18, "syncTime":21.340179460525512, "numberOfTransactions":438} - * blockCount: 10, txCount: 9, complexity: Spam: {"numberOfBlocks":17, "syncTime":49.40888188171387, "numberOfTransactions":105} + * and setup before we start the "actual" test. + * blockCount: 10, txCount: 36, complexity: Deployment: {"numberOfBlocks":16, "syncTime":17.490706521987914} + * blockCount: 10, txCount: 36, complexity: PrivateTransfer: {"numberOfBlocks":19, "syncTime":20.846745924949644} + * blockCount: 10, txCount: 36, complexity: PublicTransfer: {"numberOfBlocks":18, "syncTime":21.340179460525512} + * blockCount: 10, txCount: 9, complexity: Spam: {"numberOfBlocks":17, "syncTime":49.40888188171387} */ import { getSchnorrAccount } from '@aztec/accounts/schnorr'; import { AztecNodeService } from '@aztec/aztec-node'; import { type AccountWallet, type AccountWalletWithSecretKey, + AnvilTestWatcher, BatchCall, type DebugLogger, Fr, @@ -49,15 +49,17 @@ import { import { ExtendedNote, L2Block, Note, type TxHash } from '@aztec/circuit-types'; import { type AztecAddress, ETHEREUM_SLOT_DURATION } from '@aztec/circuits.js'; import { Timer } from '@aztec/foundation/timer'; +import { RollupAbi } from '@aztec/l1-artifacts'; import { SpamContract, TokenContract } from '@aztec/noir-contracts.js'; import { type PXEService } from '@aztec/pxe'; import { L1Publisher } from '@aztec/sequencer-client'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import * as fs from 'fs'; +import { getContract } from 'viem'; import { addAccounts } from './fixtures/snapshot_manager.js'; -import { getPrivateKeyFromIndex, setup } from './fixtures/utils.js'; +import { type EndToEndContext, getPrivateKeyFromIndex, setup, setupPXEService } from './fixtures/utils.js'; const SALT = 420; const AZTEC_GENERATE_TEST_DATA = !!process.env.AZTEC_GENERATE_TEST_DATA; @@ -276,13 +278,6 @@ class TestVariant { return (json['blocks'] as string[]).map(b => L2Block.fromString(b)); } - async writeStats(content: Record) { - await this.writeJson(`stats`, { - description: this.description(), - ...content, - }); - } - numberOfBlocksStored() { const files = fs.readdirSync(this.dir()); return files.filter(file => file.startsWith('block_')).length; @@ -365,7 +360,11 @@ describe('e2e_l1_with_wall_time', () => { 1_200_000, ); - it.each(variants)('replay and then sync - %s', async (variant: TestVariant) => { + const testTheVariant = async ( + variant: TestVariant, + beforeSync: (opts: Partial) => Promise = () => Promise.resolve(), + afterSync: (opts: Partial) => Promise = () => Promise.resolve(), + ) => { if (AZTEC_GENERATE_TEST_DATA) { return; } @@ -407,6 +406,8 @@ describe('e2e_l1_with_wall_time', () => { await publisher.proposeL2Block(block); } + await beforeSync({ deployL1ContractsValues, cheatCodes, config, logger }); + // All the blocks have been "re-played" and we are now to simply get a new node up to speed const timer = new Timer(); const freshNode = await AztecNodeService.createAndSync( @@ -415,19 +416,79 @@ describe('e2e_l1_with_wall_time', () => { ); const syncTime = timer.s(); - const txCount = blocks.map(b => b.getStats().txCount).reduce((acc, curr) => acc + curr, 0); const blockNumber = await freshNode.getBlockNumber(); - // @note We should consider storing these stats to see changes over time etc. - // await variant.writeStats({ numberOfBlocks: blockNumber, syncTime, numberOfTransactions: txCount }); logger.info( `Stats: ${variant.description()}: ${JSON.stringify({ numberOfBlocks: blockNumber, syncTime, - numberOfTransactions: txCount, })}`, ); + await afterSync({ deployL1ContractsValues, cheatCodes, config, logger }); + await teardown(); + }; + + it.each(variants)('replay and then sync - %s', async (variant: TestVariant) => { + await testTheVariant(variant); + }); + + it('replay, then prune and only then perform an initial sync', async () => { + if (AZTEC_GENERATE_TEST_DATA) { + return; + } + + const variant = variants[0]; + + const beforeSync = async (opts: Partial) => { + const rollup = getContract({ + address: opts.deployL1ContractsValues!.l1ContractAddresses.rollupAddress.toString(), + abi: RollupAbi, + client: opts.deployL1ContractsValues!.walletClient, + }); + + const pendingBlockNumber = await rollup.read.getPendingBlockNumber(); + await rollup.write.setAssumeProvenThroughBlockNumber([pendingBlockNumber - BigInt(variant.blockCount) / 2n]); + + const timeliness = await rollup.read.TIMELINESS_PROVING_IN_SLOTS(); + const [, , slot] = await rollup.read.blocks([(await rollup.read.getProvenBlockNumber()) + 1n]); + const timeJumpTo = await rollup.read.getTimestampForSlot([slot + timeliness]); + + await opts.cheatCodes!.eth.warp(Number(timeJumpTo)); + + await rollup.write.prune(); + }; + + // After we have synched the chain, we will publish a block. Here we are VERY interested in seeing the block number. + const afterSync = async (opts: Partial) => { + const watcher = new AnvilTestWatcher( + opts.cheatCodes!.eth, + opts.deployL1ContractsValues!.l1ContractAddresses.rollupAddress, + opts.deployL1ContractsValues!.publicClient, + ); + await watcher.start(); + + // The sync here could likely be avoided by using the node we just synched. + const aztecNode = await AztecNodeService.createAndSync(opts.config!, new NoopTelemetryClient()); + const sequencer = aztecNode.getSequencer(); + + const { pxe } = await setupPXEService(aztecNode!); + + variant.setPXE(pxe); + + const blockBefore = await aztecNode.getBlock(await aztecNode.getBlockNumber()); + + sequencer?.updateSequencerConfig({ minTxsPerBlock: variant.txCount, maxTxsPerBlock: variant.txCount }); + const txs = await variant.createAndSendTxs(); + await Promise.all(txs.map(tx => tx.wait({ timeout: 1200 }))); + + const blockAfter = await aztecNode.getBlock(await aztecNode.getBlockNumber()); + + expect(blockAfter!.number).toEqual(blockBefore!.number + 1); + expect(blockAfter!.header.lastArchive).toEqual(blockBefore!.archive); + }; + + await testTheVariant(variant, beforeSync, afterSync); }); });