diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index a5ded45c052..283aafc4828 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -102,6 +102,27 @@ contract Rollup is Leonidas, IRollup, ITestRollup { setupEpoch(); } + function status(uint256 myHeaderBlockNumber) + external + view + override(IRollup) + returns ( + uint256 provenBlockNumber, + bytes32 provenArchive, + uint256 pendingBlockNumber, + bytes32 pendingArchive, + bytes32 archiveOfMyBlock + ) + { + return ( + tips.provenBlockNumber, + blocks[tips.provenBlockNumber].archive, + tips.pendingBlockNumber, + blocks[tips.pendingBlockNumber].archive, + archiveAt(myHeaderBlockNumber) + ); + } + /** * @notice Prune the pending chain up to the last proven block * @@ -382,20 +403,6 @@ contract Rollup is Leonidas, IRollup, ITestRollup { emit L2ProofVerified(header.globalVariables.blockNumber, _proverId); } - /** - * @notice Get the archive root of a specific block - * - * @param _blockNumber - The block number to get the archive root of - * - * @return bytes32 - The archive root of the block - */ - function archiveAt(uint256 _blockNumber) external view override(IRollup) returns (bytes32) { - if (_blockNumber <= tips.pendingBlockNumber) { - return blocks[_blockNumber].archive; - } - return bytes32(0); - } - /** * @notice Check if msg.sender can propose at a given time * @@ -482,6 +489,20 @@ contract Rollup is Leonidas, IRollup, ITestRollup { return tips.pendingBlockNumber; } + /** + * @notice Get the archive root of a specific block + * + * @param _blockNumber - The block number to get the archive root of + * + * @return bytes32 - The archive root of the block + */ + function archiveAt(uint256 _blockNumber) public view override(IRollup) returns (bytes32) { + if (_blockNumber <= tips.pendingBlockNumber) { + return blocks[_blockNumber].archive; + } + return bytes32(0); + } + /** * @notice Validates the header for submission * diff --git a/l1-contracts/src/core/interfaces/IRollup.sol b/l1-contracts/src/core/interfaces/IRollup.sol index cf9d22019dc..1f02808bf96 100644 --- a/l1-contracts/src/core/interfaces/IRollup.sol +++ b/l1-contracts/src/core/interfaces/IRollup.sol @@ -54,6 +54,17 @@ interface IRollup { function L1_BLOCK_AT_GENESIS() external view returns (uint256); + function status(uint256 myHeaderBlockNumber) + external + view + returns ( + uint256 provenBlockCount, + bytes32 provenArchive, + uint256 pendingBlockCount, + bytes32 pendingArchive, + bytes32 archiveOfMyBlock + ); + // TODO(#7346): Integrate batch rollups // function submitRootProof( // bytes32 _previousArchive, diff --git a/yarn-project/archiver/src/archiver/archiver.test.ts b/yarn-project/archiver/src/archiver/archiver.test.ts index b57d2fcdfce..0ec87a3b780 100644 --- a/yarn-project/archiver/src/archiver/archiver.test.ts +++ b/yarn-project/archiver/src/archiver/archiver.test.ts @@ -5,6 +5,7 @@ import { LogType, UnencryptedL2BlockL2Logs, } from '@aztec/circuit-types'; +import { GENESIS_ARCHIVE_ROOT } from '@aztec/circuits.js'; import { EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; import { sleep } from '@aztec/foundation/sleep'; @@ -30,6 +31,7 @@ import { MemoryArchiverStore } from './memory_archiver_store/memory_archiver_sto interface MockRollupContractRead { archiveAt: (args: readonly [bigint]) => Promise<`0x${string}`>; getProvenBlockNumber: () => Promise; + status: (args: readonly [bigint]) => Promise<[bigint, `0x${string}`, bigint, `0x${string}`, `0x${string}`]>; } describe('Archiver', () => { @@ -47,6 +49,8 @@ describe('Archiver', () => { let archiver: Archiver; let blocks: L2Block[]; + const GENESIS_ROOT = new Fr(GENESIS_ARCHIVE_ROOT).toString(); + beforeEach(() => { now = +new Date(); publicClient = mock>({ @@ -90,6 +94,10 @@ describe('Archiver', () => { publicClient.getBlockNumber.mockResolvedValueOnce(2500n).mockResolvedValueOnce(2600n).mockResolvedValueOnce(2700n); + rollupRead.status + .mockResolvedValueOnce([0n, GENESIS_ROOT, 1n, blocks[0].archive.root.toString(), GENESIS_ROOT]) + .mockResolvedValue([0n, GENESIS_ROOT, 3n, blocks[2].archive.root.toString(), blocks[0].archive.root.toString()]); + mockGetLogs({ messageSent: [makeMessageSentEvent(98n, 1n, 0n), makeMessageSentEvent(99n, 1n, 1n)], L2BlockProposed: [makeL2BlockProposedEvent(101n, 1n, blocks[0].archive.root.toString())], @@ -180,7 +188,9 @@ describe('Archiver', () => { expect((await archiver.getBlocks(1, 100, true)).map(b => b.number)).toEqual([1]); }, 10_000); - it('does not sync past current block number', async () => { + it('ignores block 3 because it have been pruned (simulate pruning)', async () => { + const loggerSpy = jest.spyOn((archiver as any).log, 'warn'); + let latestBlockNum = await archiver.getBlockNumber(); expect(latestBlockNum).toEqual(0); @@ -191,17 +201,20 @@ describe('Archiver', () => { // Here we set the current L1 block number to 102. L1 to L2 messages after this should not be read. publicClient.getBlockNumber.mockResolvedValue(102n); + const badArchive = Fr.random().toString(); + + rollupRead.status.mockResolvedValue([0n, GENESIS_ROOT, 2n, blocks[1].archive.root.toString(), GENESIS_ROOT]); + mockGetLogs({ messageSent: [makeMessageSentEvent(66n, 1n, 0n), makeMessageSentEvent(68n, 1n, 1n)], L2BlockProposed: [ makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString()), makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString()), + makeL2BlockProposedEvent(90n, 3n, badArchive), ], }); - mockGetLogs({}); - - rollupTxs.slice(0, numL2BlocksInTest).forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); + rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); await archiver.start(false); @@ -211,10 +224,12 @@ describe('Archiver', () => { latestBlockNum = await archiver.getBlockNumber(); expect(latestBlockNum).toEqual(numL2BlocksInTest); + const errorMessage = `Archive mismatch matching, ignoring block ${3} with archive: ${badArchive}, expected ${blocks[2].archive.root.toString()}`; + expect(loggerSpy).toHaveBeenCalledWith(errorMessage); }, 10_000); - it('ignores block 3 because it have been pruned (simulate pruning)', async () => { - const loggerSpy = jest.spyOn((archiver as any).log, 'warn'); + it('skip event search if not blocks found', async () => { + const loggerSpy = jest.spyOn((archiver as any).log, 'verbose'); let latestBlockNum = await archiver.getBlockNumber(); expect(latestBlockNum).toEqual(0); @@ -223,22 +238,24 @@ describe('Archiver', () => { const rollupTxs = blocks.map(makeRollupTx); - // Here we set the current L1 block number to 102. L1 to L2 messages after this should not be read. - publicClient.getBlockNumber.mockResolvedValue(102n); - - const badArchive = Fr.random().toString(); + publicClient.getBlockNumber.mockResolvedValueOnce(50n).mockResolvedValueOnce(100n); + rollupRead.status + .mockResolvedValueOnce([0n, GENESIS_ROOT, 0n, GENESIS_ROOT, GENESIS_ROOT]) + .mockResolvedValueOnce([0n, GENESIS_ROOT, 2n, blocks[1].archive.root.toString(), GENESIS_ROOT]); + // This can look slightly odd, but we will need to do an empty request for the messages, and will entirely skip + // a call to the proposed blocks because of changes with status. + mockGetLogs({ + messageSent: [], + }); mockGetLogs({ messageSent: [makeMessageSentEvent(66n, 1n, 0n), makeMessageSentEvent(68n, 1n, 1n)], L2BlockProposed: [ makeL2BlockProposedEvent(70n, 1n, blocks[0].archive.root.toString()), makeL2BlockProposedEvent(80n, 2n, blocks[1].archive.root.toString()), - makeL2BlockProposedEvent(90n, 3n, badArchive), ], }); - mockGetLogs({}); - rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx)); await archiver.start(false); @@ -249,8 +266,13 @@ describe('Archiver', () => { latestBlockNum = await archiver.getBlockNumber(); expect(latestBlockNum).toEqual(numL2BlocksInTest); - const errorMessage = `Archive mismatch matching, ignoring block ${3} with archive: ${badArchive}, expected ${blocks[2].archive.root.toString()}`; - expect(loggerSpy).toHaveBeenCalledWith(errorMessage); + + // For some reason, this is 1-indexed. + expect(loggerSpy).toHaveBeenNthCalledWith( + 1, + `Retrieved no new L1 -> L2 messages between L1 blocks ${1n} and ${50}.`, + ); + expect(loggerSpy).toHaveBeenNthCalledWith(2, `No blocks to retrieve from ${1n} to ${50n}`); }, 10_000); // logs should be created in order of how archiver syncs. @@ -258,9 +280,12 @@ describe('Archiver', () => { messageSent?: ReturnType[]; L2BlockProposed?: ReturnType[]; }) => { - publicClient.getLogs - .mockResolvedValueOnce(logs.messageSent ?? []) - .mockResolvedValueOnce(logs.L2BlockProposed ?? []); + if (logs.messageSent) { + publicClient.getLogs.mockResolvedValueOnce(logs.messageSent); + } + if (logs.L2BlockProposed) { + publicClient.getLogs.mockResolvedValueOnce(logs.L2BlockProposed); + } }; }); diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 62797c70a13..a74ec853ff6 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -251,15 +251,21 @@ export class Archiver implements ArchiveSource { currentL1BlockNumber, ); - if (retrievedL1ToL2Messages.retrievedData.length !== 0) { - await this.store.addL1ToL2Messages(retrievedL1ToL2Messages); - + if (retrievedL1ToL2Messages.retrievedData.length === 0) { + await this.store.setMessageSynchedL1BlockNumber(currentL1BlockNumber); this.log.verbose( - `Retrieved ${retrievedL1ToL2Messages.retrievedData.length} new L1 -> L2 messages between L1 blocks ${ - messagesSynchedTo + 1n - } and ${currentL1BlockNumber}.`, + `Retrieved no new L1 -> L2 messages between L1 blocks ${messagesSynchedTo + 1n} and ${currentL1BlockNumber}.`, ); + return; } + + await this.store.addL1ToL2Messages(retrievedL1ToL2Messages); + + this.log.verbose( + `Retrieved ${retrievedL1ToL2Messages.retrievedData.length} new L1 -> L2 messages between L1 blocks ${ + messagesSynchedTo + 1n + } and ${currentL1BlockNumber}.`, + ); } private async updateLastProvenL2Block(provenSynchedTo: bigint, currentL1BlockNumber: bigint) { @@ -281,6 +287,29 @@ export class Archiver implements ArchiveSource { return; } + const lastBlock = await this.getBlock(-1); + + const [, , pendingBlockNumber, pendingArchive, archiveOfMyBlock] = await this.rollup.read.status([ + BigInt(lastBlock?.number ?? 0), + ]); + + const noBlocksButInitial = lastBlock === undefined && pendingBlockNumber == 0n; + const noBlockSinceLast = + lastBlock && + pendingBlockNumber === BigInt(lastBlock.number) && + pendingArchive === lastBlock.archive.root.toString(); + + if (noBlocksButInitial || noBlockSinceLast) { + await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber); + this.log.verbose(`No blocks to retrieve from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`); + return; + } + + if (lastBlock && archiveOfMyBlock !== lastBlock.archive.root.toString()) { + // @todo Either `prune` have been called, or L1 have re-orged deep enough to remove a block. + // Issue#8620 and Issue#8621 + } + this.log.debug(`Retrieving blocks from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`); const retrievedBlocks = await retrieveBlockFromRollup( this.rollup, @@ -291,14 +320,19 @@ export class Archiver implements ArchiveSource { this.log, ); - (retrievedBlocks.length ? this.log.verbose : this.log.debug)( - `Retrieved ${retrievedBlocks.length || 'no'} new L2 blocks between L1 blocks ${ + if (retrievedBlocks.length === 0) { + await this.store.setBlockSynchedL1BlockNumber(currentL1BlockNumber); + this.log.verbose(`Retrieved no new blocks from ${blocksSynchedTo + 1n} to ${currentL1BlockNumber}`); + return; + } + + this.log.debug( + `Retrieved ${retrievedBlocks.length} new L2 blocks between L1 blocks ${ blocksSynchedTo + 1n } and ${currentL1BlockNumber}.`, ); - const lastProcessedL1BlockNumber = - retrievedBlocks.length > 0 ? retrievedBlocks[retrievedBlocks.length - 1].l1.blockNumber : blocksSynchedTo; + const lastProcessedL1BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].l1.blockNumber; this.log.debug( `Processing retrieved blocks ${retrievedBlocks @@ -306,44 +340,37 @@ export class Archiver implements ArchiveSource { .join(',')} with last processed L1 block ${lastProcessedL1BlockNumber}`, ); - // If we actually received something, we will use it. - if (retrievedBlocks.length > 0) { - await Promise.all( - retrievedBlocks.map(block => { - return this.store.addLogs( - block.data.body.noteEncryptedLogs, - block.data.body.encryptedLogs, - block.data.body.unencryptedLogs, - block.data.number, - ); - }), - ); - - // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them - await Promise.all( - retrievedBlocks.map(async block => { - const blockLogs = block.data.body.txEffects - .flatMap(txEffect => (txEffect ? [txEffect.unencryptedLogs] : [])) - .flatMap(txLog => txLog.unrollLogs()); - await this.storeRegisteredContractClasses(blockLogs, block.data.number); - await this.storeDeployedContractInstances(blockLogs, block.data.number); - await this.storeBroadcastedIndividualFunctions(blockLogs, block.data.number); - }), - ); + await Promise.all( + retrievedBlocks.map(block => { + return this.store.addLogs( + block.data.body.noteEncryptedLogs, + block.data.body.encryptedLogs, + block.data.body.unencryptedLogs, + block.data.number, + ); + }), + ); - const timer = new Timer(); - await this.store.addBlocks(retrievedBlocks); - this.instrumentation.processNewBlocks( - timer.ms() / retrievedBlocks.length, - retrievedBlocks.map(b => b.data), - ); - const lastL2BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].data.number; - this.log.verbose(`Processed ${retrievedBlocks.length} new L2 blocks up to ${lastL2BlockNumber}`); - } + // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them + await Promise.all( + retrievedBlocks.map(async block => { + const blockLogs = block.data.body.txEffects + .flatMap(txEffect => (txEffect ? [txEffect.unencryptedLogs] : [])) + .flatMap(txLog => txLog.unrollLogs()); + await this.storeRegisteredContractClasses(blockLogs, block.data.number); + await this.storeDeployedContractInstances(blockLogs, block.data.number); + await this.storeBroadcastedIndividualFunctions(blockLogs, block.data.number); + }), + ); - if (retrievedBlocks.length > 0 || blockUntilSynced) { - (blockUntilSynced ? this.log.info : this.log.verbose)(`Synced to L1 block ${currentL1BlockNumber}`); - } + const timer = new Timer(); + await this.store.addBlocks(retrievedBlocks); + this.instrumentation.processNewBlocks( + timer.ms() / retrievedBlocks.length, + retrievedBlocks.map(b => b.data), + ); + const lastL2BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].data.number; + this.log.verbose(`Processed ${retrievedBlocks.length} new L2 blocks up to ${lastL2BlockNumber}`); } /** diff --git a/yarn-project/archiver/src/archiver/archiver_store.ts b/yarn-project/archiver/src/archiver/archiver_store.ts index b6da328fc64..9ddbb8e1050 100644 --- a/yarn-project/archiver/src/archiver/archiver_store.ts +++ b/yarn-project/archiver/src/archiver/archiver_store.ts @@ -147,6 +147,18 @@ export interface ArchiverDataStore { */ setProvenL2BlockNumber(l2BlockNumber: SingletonDataRetrieval): Promise; + /** + * Stores the l1 block number that blocks have been synched until + * @param l1BlockNumber - The l1 block number + */ + setBlockSynchedL1BlockNumber(l1BlockNumber: bigint): Promise; + + /** + * Stores the l1 block number that messages have been synched until + * @param l1BlockNumber - The l1 block number + */ + setMessageSynchedL1BlockNumber(l1BlockNumber: bigint): Promise; + /** * Gets the synch point of the archiver */ diff --git a/yarn-project/archiver/src/archiver/data_retrieval.ts b/yarn-project/archiver/src/archiver/data_retrieval.ts index 89400c6a3f2..1e79352fc81 100644 --- a/yarn-project/archiver/src/archiver/data_retrieval.ts +++ b/yarn-project/archiver/src/archiver/data_retrieval.ts @@ -83,14 +83,14 @@ export async function processL2BlockProposedLogs( ): Promise[]> { const retrievedBlocks: L1Published[] = []; for (const log of logs) { - const blockNum = log.args.blockNumber!; + const l2BlockNumber = log.args.blockNumber!; const archive = log.args.archive!; - const archiveFromChain = await rollup.read.archiveAt([blockNum]); + const archiveFromChain = await rollup.read.archiveAt([l2BlockNumber]); // The value from the event and contract will match only if the block is in the chain. if (archive === archiveFromChain) { // TODO: Fetch blocks from calldata in parallel - const block = await getBlockFromRollupTx(publicClient, log.transactionHash!, blockNum); + const block = await getBlockFromRollupTx(publicClient, log.transactionHash!, l2BlockNumber); const l1: L1PublishedData = { blockNumber: log.blockNumber, @@ -101,7 +101,7 @@ export async function processL2BlockProposedLogs( retrievedBlocks.push({ data: block, l1 }); } else { logger.warn( - `Archive mismatch matching, ignoring block ${blockNum} with archive: ${archive}, expected ${archiveFromChain}`, + `Archive mismatch matching, ignoring block ${l2BlockNumber} with archive: ${archive}, expected ${archiveFromChain}`, ); } } diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts index 84d8455b422..024df9f1065 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts @@ -187,6 +187,10 @@ export class BlockStore { return this.#lastSynchedL1Block.get(); } + setSynchedL1BlockNumber(l1BlockNumber: bigint) { + void this.#lastSynchedL1Block.set(l1BlockNumber); + } + #computeBlockRange(start: number, limit: number): Required, 'start' | 'end'>> { if (limit < 1) { throw new Error(`Invalid limit: ${limit}`); diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts index 2677c55e3f0..0c01d390724 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts @@ -235,6 +235,16 @@ export class KVArchiverDataStore implements ArchiverDataStore { await this.#provenStore.setProvenL2BlockNumber(blockNumber); } + setBlockSynchedL1BlockNumber(l1BlockNumber: bigint) { + this.#blockStore.setSynchedL1BlockNumber(l1BlockNumber); + return Promise.resolve(); + } + + setMessageSynchedL1BlockNumber(l1BlockNumber: bigint) { + this.#messageStore.setSynchedL1BlockNumber(l1BlockNumber); + return Promise.resolve(); + } + /** * Gets the last L1 block number processed by the archiver */ diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/message_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/message_store.ts index 38bfb1fc6ee..23a13a9ca32 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/message_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/message_store.ts @@ -16,7 +16,7 @@ import { type DataRetrieval } from '../structs/data_retrieval.js'; export class MessageStore { #l1ToL2Messages: AztecMap; #l1ToL2MessageIndices: AztecMap; // We store array of bigints here because there can be duplicate messages - #lastL1BlockMessages: AztecSingleton; + #lastSynchedL1Block: AztecSingleton; #log = createDebugLogger('aztec:archiver:message_store'); @@ -25,7 +25,7 @@ export class MessageStore { constructor(private db: AztecKVStore) { this.#l1ToL2Messages = db.openMap('archiver_l1_to_l2_messages'); this.#l1ToL2MessageIndices = db.openMap('archiver_l1_to_l2_message_indices'); - this.#lastL1BlockMessages = db.openSingleton('archiver_last_l1_block_new_messages'); + this.#lastSynchedL1Block = db.openSingleton('archiver_last_l1_block_new_messages'); } /** @@ -33,7 +33,11 @@ export class MessageStore { * @returns The last L1 block number processed */ getSynchedL1BlockNumber(): bigint | undefined { - return this.#lastL1BlockMessages.get(); + return this.#lastSynchedL1Block.get(); + } + + setSynchedL1BlockNumber(l1BlockNumber: bigint) { + void this.#lastSynchedL1Block.set(l1BlockNumber); } /** @@ -43,12 +47,12 @@ export class MessageStore { */ addL1ToL2Messages(messages: DataRetrieval): Promise { return this.db.transaction(() => { - const lastL1BlockNumber = this.#lastL1BlockMessages.get() ?? 0n; + const lastL1BlockNumber = this.#lastSynchedL1Block.get() ?? 0n; if (lastL1BlockNumber >= messages.lastProcessedL1BlockNumber) { return false; } - void this.#lastL1BlockMessages.set(messages.lastProcessedL1BlockNumber); + void this.#lastSynchedL1Block.set(messages.lastProcessedL1BlockNumber); for (const message of messages.retrievedData) { if (message.index >= this.#l1ToL2MessagesSubtreeSize) { diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts index 9ea02e71f21..ee17a6ed9ca 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts @@ -420,6 +420,16 @@ export class MemoryArchiverStore implements ArchiverDataStore { return Promise.resolve(); } + setBlockSynchedL1BlockNumber(l1BlockNumber: bigint) { + this.lastL1BlockNewBlocks = l1BlockNumber; + return Promise.resolve(); + } + + setMessageSynchedL1BlockNumber(l1BlockNumber: bigint) { + this.lastL1BlockNewMessages = l1BlockNumber; + return Promise.resolve(); + } + public getSynchPoint(): Promise { return Promise.resolve({ blocksSynchedTo: this.lastL1BlockNewBlocks, diff --git a/yarn-project/end-to-end/src/e2e_synching.test.ts b/yarn-project/end-to-end/src/e2e_synching.test.ts index 0ec53ed95ce..2cbb21530fa 100644 --- a/yarn-project/end-to-end/src/e2e_synching.test.ts +++ b/yarn-project/end-to-end/src/e2e_synching.test.ts @@ -306,7 +306,7 @@ class TestVariant { /** * Setting up the different variants we will be testing with. * - * @note The `MaxDiff` test have much fewer transactions than all others, this is + * @note The `Spam` test have much fewer transactions than all others, this is * because each transaction is LARGE, so the block size in kb is hit. * I decided that 1/4 should be acceptable, and still small enough to work. */ @@ -317,7 +317,7 @@ const variants: TestVariant[] = [ new TestVariant(10, 9, TxComplexity.Spam), ]; -describe('e2e_l1_with_wall_time', () => { +describe('e2e_synching', () => { // WARNING: Running this with AZTEC_GENERATE_TEST_DATA is VERY slow, and will build a whole slew // of fixtures including multiple blocks with many transaction in. it.each(variants)(