diff --git a/cspell.json b/cspell.json index 08dcf839cb1..35d7779e39d 100644 --- a/cspell.json +++ b/cspell.json @@ -226,7 +226,8 @@ "auditability", "hardfork", "composablity", - "counterparty" + "counterparty", + "lmdb" ], "ignorePaths": [ "node_modules/", @@ -253,7 +254,5 @@ "lib", "*.cmake" ], - "flagWords": [ - "anonymous" - ] + "flagWords": ["anonymous"] } diff --git a/yarn-project/archiver/package.json b/yarn-project/archiver/package.json index 86b50d71c57..3af3e147e2c 100644 --- a/yarn-project/archiver/package.json +++ b/yarn-project/archiver/package.json @@ -41,6 +41,7 @@ "@aztec/types": "workspace:^", "@types/lodash.omit": "^4.5.7", "debug": "^4.3.4", + "lmdb": "^2.9.1", "lodash.omit": "^4.5.0", "tsc-watch": "^6.0.0", "tslib": "^2.5.0", diff --git a/yarn-project/archiver/src/archiver/archiver.test.ts b/yarn-project/archiver/src/archiver/archiver.test.ts index c9d8790f2ea..4f2d720d224 100644 --- a/yarn-project/archiver/src/archiver/archiver.test.ts +++ b/yarn-project/archiver/src/archiver/archiver.test.ts @@ -11,7 +11,8 @@ import times from 'lodash.times'; import { Chain, HttpTransport, Log, PublicClient, Transaction, encodeFunctionData, toHex } from 'viem'; import { Archiver } from './archiver.js'; -import { ArchiverDataStore, MemoryArchiverStore } from './archiver_store.js'; +import { ArchiverDataStore } from './archiver_store.js'; +import { MemoryArchiverStore } from './memory_archiver_store/memory_archiver_store.js'; describe('Archiver', () => { const rollupAddress = EthAddress.ZERO.toString(); @@ -34,7 +35,6 @@ describe('Archiver', () => { EthAddress.fromString(inboxAddress), EthAddress.fromString(registryAddress), EthAddress.fromString(contractDeploymentEmitterAddress), - 0, archiverStore, 1000, ); @@ -138,7 +138,6 @@ describe('Archiver', () => { EthAddress.fromString(inboxAddress), EthAddress.fromString(registryAddress), EthAddress.fromString(contractDeploymentEmitterAddress), - 0, archiverStore, 1000, ); @@ -216,7 +215,6 @@ describe('Archiver', () => { EthAddress.fromString(inboxAddress), EthAddress.fromString(registryAddress), EthAddress.fromString(contractDeploymentEmitterAddress), - 0, archiverStore, 1000, ); diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 845b2b0a5c8..5f3c855d224 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -6,7 +6,6 @@ import { EthAddress } from '@aztec/foundation/eth-address'; import { Fr } from '@aztec/foundation/fields'; import { DebugLogger, createDebugLogger } from '@aztec/foundation/log'; import { RunningPromise } from '@aztec/foundation/running-promise'; -import { RegistryAbi } from '@aztec/l1-artifacts'; import { ContractData, ContractDataSource, @@ -26,9 +25,9 @@ import { } from '@aztec/types'; import omit from 'lodash.omit'; -import { Chain, HttpTransport, PublicClient, createPublicClient, getContract, http } from 'viem'; +import { Chain, HttpTransport, PublicClient, createPublicClient, http } from 'viem'; -import { ArchiverDataStore, MemoryArchiverStore } from './archiver_store.js'; +import { ArchiverDataStore } from './archiver_store.js'; import { ArchiverConfig } from './config.js'; import { retrieveBlocks, @@ -53,11 +52,6 @@ export class Archiver implements L2BlockSource, L2LogsSource, ContractDataSource */ private nextL2BlockFromL1Block = 0n; - /** - * Last Processed Block Number - */ - private lastProcessedL1BlockNumber = 0n; - /** * Use this to track logged block in order to avoid repeating the same message. */ @@ -81,22 +75,23 @@ export class Archiver implements L2BlockSource, L2LogsSource, ContractDataSource private readonly inboxAddress: EthAddress, private readonly registryAddress: EthAddress, private readonly contractDeploymentEmitterAddress: EthAddress, - searchStartBlock: number, private readonly store: ArchiverDataStore, private readonly pollingIntervalMs = 10_000, private readonly log: DebugLogger = createDebugLogger('aztec:archiver'), - ) { - this.nextL2BlockFromL1Block = BigInt(searchStartBlock); - this.lastProcessedL1BlockNumber = BigInt(searchStartBlock); - } + ) {} /** * Creates a new instance of the Archiver and blocks until it syncs from chain. * @param config - The archiver's desired configuration. + * @param archiverStore - The backing store for the archiver. * @param blockUntilSynced - If true, blocks until the archiver has fully synced. * @returns - An instance of the archiver. */ - public static async createAndSync(config: ArchiverConfig, blockUntilSynced = true): Promise { + public static async createAndSync( + config: ArchiverConfig, + archiverStore: ArchiverDataStore, + blockUntilSynced = true, + ): Promise { const chain = createEthereumChain(config.rpcUrl, config.apiKey); const publicClient = createPublicClient({ chain: chain.chainInfo, @@ -104,23 +99,12 @@ export class Archiver implements L2BlockSource, L2LogsSource, ContractDataSource pollingInterval: config.viemPollingIntervalMS, }); - // ask the registry for the block number when the rollup was deployed - // this is the block from which archiver has to search from - const registryContract = getContract({ - address: config.l1Contracts.registryAddress.toString(), - abi: RegistryAbi, - publicClient, - }); - const searchStartBlock = Number((await registryContract.read.getCurrentSnapshot()).blockNumber); - - const archiverStore = new MemoryArchiverStore(config.maxLogs ?? 1000); const archiver = new Archiver( publicClient, config.l1Contracts.rollupAddress, config.l1Contracts.inboxAddress, config.l1Contracts.registryAddress, config.l1Contracts.contractDeploymentEmitterAddress, - searchStartBlock, archiverStore, config.archiverPollingIntervalMS, ); @@ -152,7 +136,12 @@ export class Archiver implements L2BlockSource, L2LogsSource, ContractDataSource */ private async sync(blockUntilSynced: boolean) { const currentL1BlockNumber = await this.publicClient.getBlockNumber(); - if (currentL1BlockNumber <= this.lastProcessedL1BlockNumber) { + // this makes the archiver more resilient to eventually-consistent eth providers like Infura + // it _will_ process the same L1 blocks over and over again until the L2 chain advances + // one thing to handle now is that we will process the same L1 to L2 messages over and over again + // so the store needs to account for that. + const lastProcessedL1BlockNumber = await this.store.getL1BlockNumber(); + if (currentL1BlockNumber <= lastProcessedL1BlockNumber) { // reducing logs, otherwise this gets triggered on every loop (1s) if (currentL1BlockNumber !== this.lastLoggedL1BlockNumber) { this.log(`No new blocks to process, current block number: ${currentL1BlockNumber}`); @@ -169,14 +158,14 @@ export class Archiver implements L2BlockSource, L2LogsSource, ContractDataSource * to ensure that data is read exactly once. * * The first is the problem of eventually consistent ETH service providers like Infura. - * We are not currently handling this correctly in the case of L1 to L2 messages and we will - * want to re-visit L2 Block and contract data retrieval at a later stage. This is not - * currently a problem but will need to be addressed before a mainnet release. + * We currently read from the last L1 block that we saw emit an L2 block. This could mean + * that the archiver ends up looking at the same L1 block multiple times (e.g. if we last saw + * an L2 block emitted at L1 block 10, we'd constantly ask for L1 blocks from 11 onwards until + * we see another L2 block). For this to work message and block processing need to be idempotent. + * We should re-visit this before mainnet launch. * * The second is that in between the various calls to L1, the block number can move meaning some * of the following calls will return data for blocks that were not present during earlier calls. - * This is a problem for example when setting the last block number marker for L1 to L2 messages - - * this.lastProcessedBlockNumber = currentBlockNumber; * It's possible that we actually received messages in block currentBlockNumber + 1 meaning the next time * we do this sync we get the same message again. Additionally, the call to get cancelled L1 to L2 messages * could read from a block not present when retrieving pending messages. If a message was added and cancelled @@ -195,14 +184,14 @@ export class Archiver implements L2BlockSource, L2LogsSource, ContractDataSource this.publicClient, this.inboxAddress, blockUntilSynced, - this.lastProcessedL1BlockNumber + 1n, // + 1 to prevent re-including messages from the last processed block + lastProcessedL1BlockNumber + 1n, // + 1 to prevent re-including messages from the last processed block currentL1BlockNumber, ); const retrievedCancelledL1ToL2Messages = await retrieveNewCancelledL1ToL2Messages( this.publicClient, this.inboxAddress, blockUntilSynced, - this.lastProcessedL1BlockNumber + 1n, + lastProcessedL1BlockNumber + 1n, currentL1BlockNumber, ); @@ -215,8 +204,6 @@ export class Archiver implements L2BlockSource, L2LogsSource, ContractDataSource this.log('Removing pending l1 to l2 messages from store where messages were cancelled'); await this.store.cancelPendingL1ToL2Messages(retrievedCancelledL1ToL2Messages.retrievedData); - this.lastProcessedL1BlockNumber = currentL1BlockNumber; - // ********** Events that are processed per block ********** // Read all data from chain and then write to our stores at the end @@ -252,26 +239,22 @@ export class Archiver implements L2BlockSource, L2LogsSource, ContractDataSource this.log(`Retrieved ${retrievedBlocks.retrievedData.length} block(s) from chain`); - // store encrypted logs from L2 Blocks that we have retrieved - const encryptedLogs = retrievedBlocks.retrievedData.map(block => { - return block.newEncryptedLogs!; - }); - await this.store.addLogs(encryptedLogs, LogType.ENCRYPTED); - - // store unencrypted logs from L2 Blocks that we have retrieved - const unencryptedLogs = retrievedBlocks.retrievedData.map(block => { - return block.newUnencryptedLogs!; - }); - await this.store.addLogs(unencryptedLogs, LogType.UNENCRYPTED); + await Promise.all( + retrievedBlocks.retrievedData.map(block => + this.store.addLogs(block.newEncryptedLogs, block.newUnencryptedLogs, block.number), + ), + ); // store contracts for which we have retrieved L2 blocks const lastKnownL2BlockNum = retrievedBlocks.retrievedData[retrievedBlocks.retrievedData.length - 1].number; - retrievedContracts.retrievedData.forEach(async ([contracts, l2BlockNum], index) => { - this.log(`Retrieved extended contract data for l2 block number: ${index}`); - if (l2BlockNum <= lastKnownL2BlockNum) { - await this.store.addExtendedContractData(contracts, l2BlockNum); - } - }); + await Promise.all( + retrievedContracts.retrievedData.map(async ([contracts, l2BlockNum]) => { + this.log(`Retrieved extended contract data for l2 block number: ${l2BlockNum}`); + if (l2BlockNum <= lastKnownL2BlockNum) { + await this.store.addExtendedContractData(contracts, l2BlockNum); + } + }), + ); // from retrieved L2Blocks, confirm L1 to L2 messages that have been published // from each l2block fetch all messageKeys in a flattened array: @@ -285,7 +268,11 @@ export class Archiver implements L2BlockSource, L2LogsSource, ContractDataSource retrievedBlocks.retrievedData.map(block => { // Ensure we pad the L1 to L2 message array to the full size before storing. block.newL1ToL2Messages = padArrayEnd(block.newL1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); - return L2Block.fromFields(omit(block, ['newEncryptedLogs', 'newUnencryptedLogs']), block.getBlockHash()); + return L2Block.fromFields( + omit(block, ['newEncryptedLogs', 'newUnencryptedLogs']), + block.getBlockHash(), + block.getL1BlockNumber(), + ); }), ); diff --git a/yarn-project/archiver/src/archiver/archiver_store.test.ts b/yarn-project/archiver/src/archiver/archiver_store.test.ts deleted file mode 100644 index f4e6aae5a13..00000000000 --- a/yarn-project/archiver/src/archiver/archiver_store.test.ts +++ /dev/null @@ -1,312 +0,0 @@ -import { - INITIAL_L2_BLOCK_NUM, - L2Block, - L2BlockContext, - L2BlockL2Logs, - LogId, - LogType, - TxHash, - UnencryptedL2Log, -} from '@aztec/types'; - -import { randomBytes } from 'crypto'; - -import { ArchiverDataStore, MemoryArchiverStore } from './archiver_store.js'; - -describe('Archiver Memory Store', () => { - let archiverStore: ArchiverDataStore; - - beforeEach(() => { - archiverStore = new MemoryArchiverStore(1000); - }); - - it('can store and retrieve blocks', async () => { - const blocks = Array(10) - .fill(0) - .map((_, index) => L2Block.random(index)); - await archiverStore.addBlocks(blocks); - // Offset indices by INITIAL_L2_BLOCK_NUM to ensure we are correctly aligned - for (const [from, limit] of [ - [0 + INITIAL_L2_BLOCK_NUM, 10], - [3 + INITIAL_L2_BLOCK_NUM, 3], - [1 + INITIAL_L2_BLOCK_NUM, 7], - [5 + INITIAL_L2_BLOCK_NUM, 8], - [10 + INITIAL_L2_BLOCK_NUM, 1], - [11 + INITIAL_L2_BLOCK_NUM, 1], - ]) { - const expected = blocks.slice(from - INITIAL_L2_BLOCK_NUM, from - INITIAL_L2_BLOCK_NUM + limit); - const actual = await archiverStore.getBlocks(from, limit); - expect(expected).toEqual(actual); - } - }); - - test.each([LogType.ENCRYPTED, LogType.UNENCRYPTED])('can store and retrieve logs', async (logType: LogType) => { - const logs = Array(10) - .fill(0) - .map(_ => L2BlockL2Logs.random(6, 3, 2)); - await archiverStore.addLogs(logs, logType); - // Offset indices by INITIAL_L2_BLOCK_NUM to ensure we are correctly aligned - for (const [from, limit] of [ - [0 + INITIAL_L2_BLOCK_NUM, 10], - [3 + INITIAL_L2_BLOCK_NUM, 3], - [1 + INITIAL_L2_BLOCK_NUM, 7], - [5 + INITIAL_L2_BLOCK_NUM, 8], - [10 + INITIAL_L2_BLOCK_NUM, 1], - [11 + INITIAL_L2_BLOCK_NUM, 1], - ]) { - const expected = logs.slice(from - INITIAL_L2_BLOCK_NUM, from - INITIAL_L2_BLOCK_NUM + limit); - const actual = await archiverStore.getLogs(from, limit, logType); - expect(expected).toEqual(actual); - } - }); - - it('throws if we try and request less than 1 block', async () => { - const blocks = Array(10) - .fill(0) - .map((_, index) => L2Block.random(index)); - await archiverStore.addBlocks(blocks); - await expect(async () => await archiverStore.getBlocks(1, 0)).rejects.toThrow(`Invalid limit: 0`); - }); - - it('returns from the beginning when "from" < genesis block', async () => { - const blocks = Array(10) - .fill(0) - .map((_, index) => L2Block.random(index)); - await archiverStore.addBlocks(blocks); - const from = -5; - const limit = 1; - const retrievedBlocks = await archiverStore.getBlocks(from, limit); - expect(retrievedBlocks.length).toEqual(1); - expect(retrievedBlocks[0]).toEqual(blocks[0]); - }); - - test.each([LogType.ENCRYPTED, LogType.UNENCRYPTED])( - 'throws if we try and request less than 1 log', - async (logType: LogType) => { - const logs = Array(10) - .fill(0) - .map(_ => L2BlockL2Logs.random(6, 3, 2)); - await archiverStore.addLogs(logs, logType); - await expect(async () => await archiverStore.getLogs(1, 0, logType)).rejects.toThrow(`Invalid limit: 0`); - }, - ); - - describe('getUnencryptedLogs config', () => { - it('does not return more than "maxLogs" logs', async () => { - const maxLogs = 5; - archiverStore = new MemoryArchiverStore(maxLogs); - const blocks = Array(10) - .fill(0) - .map((_, index: number) => L2Block.random(index + 1, 4, 2, 3, 2, 2)); - - await archiverStore.addBlocks(blocks); - await archiverStore.addLogs( - blocks.map(block => block.newUnencryptedLogs!), - LogType.UNENCRYPTED, - ); - - const response = await archiverStore.getUnencryptedLogs({}); - - expect(response.maxLogsHit).toBeTruthy(); - expect(response.logs.length).toEqual(maxLogs); - }); - }); - - describe('getUnencryptedLogs filtering', () => { - const txsPerBlock = 4; - const numPublicFunctionCalls = 3; - const numUnencryptedLogs = 4; - const numBlocks = 10; - let blocks: L2Block[]; - - beforeEach(async () => { - blocks = Array(numBlocks) - .fill(0) - .map((_, index: number) => - L2Block.random(index + 1, txsPerBlock, 2, numPublicFunctionCalls, 2, numUnencryptedLogs), - ); - - await archiverStore.addBlocks(blocks); - await archiverStore.addLogs( - blocks.map(block => block.newUnencryptedLogs!), - LogType.UNENCRYPTED, - ); - }); - - it('"txHash" filter param is respected', async () => { - // get random tx - const targetBlockIndex = Math.floor(Math.random() * numBlocks); - const targetTxIndex = Math.floor(Math.random() * txsPerBlock); - const targetTxHash = new L2BlockContext(blocks[targetBlockIndex]).getTxHash(targetTxIndex); - - const response = await archiverStore.getUnencryptedLogs({ txHash: targetTxHash }); - const logs = response.logs; - - expect(response.maxLogsHit).toBeFalsy(); - - const expectedNumLogs = numPublicFunctionCalls * numUnencryptedLogs; - expect(logs.length).toEqual(expectedNumLogs); - - const targeBlockNumber = targetBlockIndex + INITIAL_L2_BLOCK_NUM; - for (const log of logs) { - expect(log.id.blockNumber).toEqual(targeBlockNumber); - expect(log.id.txIndex).toEqual(targetTxIndex); - } - }); - - it('"fromBlock" and "toBlock" filter params are respected', async () => { - // Set "fromBlock" and "toBlock" - const fromBlock = 3; - const toBlock = 7; - - const response = await archiverStore.getUnencryptedLogs({ fromBlock, toBlock }); - const logs = response.logs; - - expect(response.maxLogsHit).toBeFalsy(); - - const expectedNumLogs = txsPerBlock * numPublicFunctionCalls * numUnencryptedLogs * (toBlock - fromBlock); - expect(logs.length).toEqual(expectedNumLogs); - - for (const log of logs) { - const blockNumber = log.id.blockNumber; - expect(blockNumber).toBeGreaterThanOrEqual(fromBlock); - expect(blockNumber).toBeLessThan(toBlock); - } - }); - - it('"afterLog" filter param is respected', async () => { - // Get a random log as reference - const targetBlockIndex = Math.floor(Math.random() * numBlocks); - const targetTxIndex = Math.floor(Math.random() * txsPerBlock); - const targetLogIndex = Math.floor(Math.random() * numUnencryptedLogs); - - const afterLog = new LogId(targetBlockIndex + INITIAL_L2_BLOCK_NUM, targetTxIndex, targetLogIndex); - - const response = await archiverStore.getUnencryptedLogs({ afterLog }); - const logs = response.logs; - - expect(response.maxLogsHit).toBeFalsy(); - - for (const log of logs) { - const logId = log.id; - expect(logId.blockNumber).toBeGreaterThanOrEqual(afterLog.blockNumber); - if (logId.blockNumber === afterLog.blockNumber) { - expect(logId.txIndex).toBeGreaterThanOrEqual(afterLog.txIndex); - if (logId.txIndex === afterLog.txIndex) { - expect(logId.logIndex).toBeGreaterThan(afterLog.logIndex); - } - } - } - }); - - it('"contractAddress" filter param is respected', async () => { - // Get a random contract address from the logs - const targetBlockIndex = Math.floor(Math.random() * numBlocks); - const targetTxIndex = Math.floor(Math.random() * txsPerBlock); - const targetFunctionLogIndex = Math.floor(Math.random() * numPublicFunctionCalls); - const targetLogIndex = Math.floor(Math.random() * numUnencryptedLogs); - const targetContractAddress = UnencryptedL2Log.fromBuffer( - blocks[targetBlockIndex].newUnencryptedLogs!.txLogs[targetTxIndex].functionLogs[targetFunctionLogIndex].logs[ - targetLogIndex - ], - ).contractAddress; - - const response = await archiverStore.getUnencryptedLogs({ contractAddress: targetContractAddress }); - - expect(response.maxLogsHit).toBeFalsy(); - - for (const extendedLog of response.logs) { - expect(extendedLog.log.contractAddress.equals(targetContractAddress)).toBeTruthy(); - } - }); - - it('"selector" filter param is respected', async () => { - // Get a random selector from the logs - const targetBlockIndex = Math.floor(Math.random() * numBlocks); - const targetTxIndex = Math.floor(Math.random() * txsPerBlock); - const targetFunctionLogIndex = Math.floor(Math.random() * numPublicFunctionCalls); - const targetLogIndex = Math.floor(Math.random() * numUnencryptedLogs); - const targetSelector = UnencryptedL2Log.fromBuffer( - blocks[targetBlockIndex].newUnencryptedLogs!.txLogs[targetTxIndex].functionLogs[targetFunctionLogIndex].logs[ - targetLogIndex - ], - ).selector; - - const response = await archiverStore.getUnencryptedLogs({ selector: targetSelector }); - - expect(response.maxLogsHit).toBeFalsy(); - - for (const extendedLog of response.logs) { - expect(extendedLog.log.selector.equals(targetSelector)).toBeTruthy(); - } - }); - - it('"txHash" filter param is ignored when "afterLog" is set', async () => { - // Get random txHash - const txHash = new TxHash(randomBytes(TxHash.SIZE)); - const afterLog = new LogId(1, 0, 0); - - const response = await archiverStore.getUnencryptedLogs({ txHash, afterLog }); - expect(response.logs.length).toBeGreaterThan(1); - }); - - it('intersecting works', async () => { - let logs = (await archiverStore.getUnencryptedLogs({ fromBlock: -10, toBlock: -5 })).logs; - expect(logs.length).toBe(0); - - // "fromBlock" gets correctly trimmed to range and "toBlock" is exclusive - logs = (await archiverStore.getUnencryptedLogs({ fromBlock: -10, toBlock: 5 })).logs; - let blockNumbers = new Set(logs.map(log => log.id.blockNumber)); - expect(blockNumbers).toEqual(new Set([1, 2, 3, 4])); - - // "toBlock" should be exclusive - logs = (await archiverStore.getUnencryptedLogs({ fromBlock: 1, toBlock: 1 })).logs; - expect(logs.length).toBe(0); - - logs = (await archiverStore.getUnencryptedLogs({ fromBlock: 10, toBlock: 5 })).logs; - expect(logs.length).toBe(0); - - // both "fromBlock" and "toBlock" get correctly capped to range and logs from all blocks are returned - logs = (await archiverStore.getUnencryptedLogs({ fromBlock: -100, toBlock: +100 })).logs; - blockNumbers = new Set(logs.map(log => log.id.blockNumber)); - expect(blockNumbers.size).toBe(numBlocks); - - // intersecting with "afterLog" works - logs = (await archiverStore.getUnencryptedLogs({ fromBlock: 2, toBlock: 5, afterLog: new LogId(4, 0, 0) })).logs; - blockNumbers = new Set(logs.map(log => log.id.blockNumber)); - expect(blockNumbers).toEqual(new Set([4])); - - logs = (await archiverStore.getUnencryptedLogs({ toBlock: 5, afterLog: new LogId(5, 1, 0) })).logs; - expect(logs.length).toBe(0); - - logs = (await archiverStore.getUnencryptedLogs({ fromBlock: 2, toBlock: 5, afterLog: new LogId(100, 0, 0) })) - .logs; - expect(logs.length).toBe(0); - }); - - it('"txIndex" and "logIndex" are respected when "afterLog.blockNumber" is equal to "fromBlock"', async () => { - // Get a random log as reference - const targetBlockIndex = Math.floor(Math.random() * numBlocks); - const targetTxIndex = Math.floor(Math.random() * txsPerBlock); - const targetLogIndex = Math.floor(Math.random() * numUnencryptedLogs); - - const afterLog = new LogId(targetBlockIndex + INITIAL_L2_BLOCK_NUM, targetTxIndex, targetLogIndex); - - const response = await archiverStore.getUnencryptedLogs({ afterLog, fromBlock: afterLog.blockNumber }); - const logs = response.logs; - - expect(response.maxLogsHit).toBeFalsy(); - - for (const log of logs) { - const logId = log.id; - expect(logId.blockNumber).toBeGreaterThanOrEqual(afterLog.blockNumber); - if (logId.blockNumber === afterLog.blockNumber) { - expect(logId.txIndex).toBeGreaterThanOrEqual(afterLog.txIndex); - if (logId.txIndex === afterLog.txIndex) { - expect(logId.logIndex).toBeGreaterThan(afterLog.logIndex); - } - } - } - }); - }); -}); diff --git a/yarn-project/archiver/src/archiver/archiver_store.ts b/yarn-project/archiver/src/archiver/archiver_store.ts index 266c42aa131..9abfe82686b 100644 --- a/yarn-project/archiver/src/archiver/archiver_store.ts +++ b/yarn-project/archiver/src/archiver/archiver_store.ts @@ -1,25 +1,20 @@ -import { Fr, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js'; +import { Fr } from '@aztec/circuits.js'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { + CancelledL1ToL2Message, ContractData, ExtendedContractData, - ExtendedUnencryptedL2Log, GetUnencryptedLogsResponse, - INITIAL_L2_BLOCK_NUM, L1ToL2Message, L2Block, - L2BlockContext, L2BlockL2Logs, L2Tx, LogFilter, - LogId, LogType, + PendingL1ToL2Message, TxHash, - UnencryptedL2Log, } from '@aztec/types'; -import { L1ToL2MessageStore, PendingL1ToL2MessageStore } from './l1_to_l2_message_store.js'; - /** * Interface describing a data store to be used by the archiver to store all its relevant data * (blocks, encrypted logs, aztec contract data extended contract data). @@ -49,25 +44,30 @@ export interface ArchiverDataStore { /** * Append new logs to the store's list. - * @param data - The logs to be added to the store. - * @param logType - The type of the logs to be added to the store. + * @param encryptedLogs - The encrypted logs to be added to the store. + * @param unencryptedLogs - The unencrypted logs to be added to the store. + * @param blockNumber - The block for which to add the logs. * @returns True if the operation is successful. */ - addLogs(data: L2BlockL2Logs[], logType: LogType): Promise; + addLogs( + encryptedLogs: L2BlockL2Logs | undefined, + unencryptedLogs: L2BlockL2Logs | undefined, + blockNumber: number, + ): Promise; /** * Append new pending L1 to L2 messages to the store. * @param messages - The L1 to L2 messages to be added to the store. * @returns True if the operation is successful. */ - addPendingL1ToL2Messages(messages: L1ToL2Message[]): Promise; + addPendingL1ToL2Messages(messages: PendingL1ToL2Message[]): Promise; /** * Remove pending L1 to L2 messages from the store (if they were cancelled). - * @param messageKeys - The message keys to be removed from the store. + * @param message - The message keys to be removed from the store. * @returns True if the operation is successful. */ - cancelPendingL1ToL2Messages(messageKeys: Fr[]): Promise; + cancelPendingL1ToL2Messages(message: CancelledL1ToL2Message[]): Promise; /** * Messages that have been published in an L2 block are confirmed. @@ -150,366 +150,9 @@ export interface ArchiverDataStore { * @returns The number of the latest L2 block processed. */ getBlockNumber(): Promise; -} - -/** - * Simple, in-memory implementation of an archiver data store. - */ -export class MemoryArchiverStore implements ArchiverDataStore { - /** - * An array containing all the L2 blocks that have been fetched so far. - */ - private l2BlockContexts: L2BlockContext[] = []; - - /** - * An array containing all the L2 Txs in the L2 blocks that have been fetched so far. - */ - private l2Txs: L2Tx[] = []; - - /** - * An array containing all the encrypted logs that have been fetched so far. - * Note: Index in the "outer" array equals to (corresponding L2 block's number - INITIAL_L2_BLOCK_NUM). - */ - private encryptedLogsPerBlock: L2BlockL2Logs[] = []; - - /** - * An array containing all the unencrypted logs that have been fetched so far. - * Note: Index in the "outer" array equals to (corresponding L2 block's number - INITIAL_L2_BLOCK_NUM). - */ - private unencryptedLogsPerBlock: L2BlockL2Logs[] = []; - - /** - * A sparse array containing all the extended contract data that have been fetched so far. - */ - private extendedContractDataByBlock: (ExtendedContractData[] | undefined)[] = []; - - /** - * A mapping of contract address to extended contract data. - */ - private extendedContractData: Map = new Map(); - - /** - * Contains all the confirmed L1 to L2 messages (i.e. messages that were consumed in an L2 block) - * It is a map of entryKey to the corresponding L1 to L2 message and the number of times it has appeared - */ - private confirmedL1ToL2Messages: L1ToL2MessageStore = new L1ToL2MessageStore(); - - /** - * Contains all the pending L1 to L2 messages (accounts for duplication of messages) - */ - private pendingL1ToL2Messages: PendingL1ToL2MessageStore = new PendingL1ToL2MessageStore(); - - constructor( - /** The max number of logs that can be obtained in 1 "getUnencryptedLogs" call. */ - public readonly maxLogs: number, - ) {} - - /** - * Append new blocks to the store's list. - * @param blocks - The L2 blocks to be added to the store. - * @returns True if the operation is successful (always in this implementation). - */ - public addBlocks(blocks: L2Block[]): Promise { - this.l2BlockContexts.push(...blocks.map(block => new L2BlockContext(block))); - this.l2Txs.push(...blocks.flatMap(b => b.getTxs())); - return Promise.resolve(true); - } /** - * Append new logs to the store's list. - * @param data - The logs to be added to the store. - * @param logType - The type of the logs to be added to the store. - * @returns True if the operation is successful. - */ - addLogs(data: L2BlockL2Logs[], logType: LogType): Promise { - logType === LogType.ENCRYPTED - ? this.encryptedLogsPerBlock.push(...data) - : this.unencryptedLogsPerBlock.push(...data); - return Promise.resolve(true); - } - - /** - * Append new pending L1 to L2 messages to the store. - * @param messages - The L1 to L2 messages to be added to the store. - * @returns True if the operation is successful (always in this implementation). - */ - public addPendingL1ToL2Messages(messages: L1ToL2Message[]): Promise { - for (const msg of messages) { - this.pendingL1ToL2Messages.addMessage(msg.entryKey!, msg); - } - return Promise.resolve(true); - } - - /** - * Remove pending L1 to L2 messages from the store (if they were cancelled). - * @param messageKeys - The message keys to be removed from the store. - * @returns True if the operation is successful (always in this implementation). - */ - public cancelPendingL1ToL2Messages(messageKeys: Fr[]): Promise { - messageKeys.forEach(messageKey => { - this.pendingL1ToL2Messages.removeMessage(messageKey); - }); - return Promise.resolve(true); - } - - /** - * Messages that have been published in an L2 block are confirmed. - * Add them to the confirmed store, also remove them from the pending store. - * @param messageKeys - The message keys to be removed from the store. - * @returns True if the operation is successful (always in this implementation). - */ - public confirmL1ToL2Messages(messageKeys: Fr[]): Promise { - messageKeys.forEach(messageKey => { - this.confirmedL1ToL2Messages.addMessage(messageKey, this.pendingL1ToL2Messages.getMessage(messageKey)!); - this.pendingL1ToL2Messages.removeMessage(messageKey); - }); - return Promise.resolve(true); - } - - /** - * Store new extended contract data from an L2 block to the store's list. - * @param data - List of contracts' data to be added. - * @param blockNum - Number of the L2 block the contract data was deployed in. - * @returns True if the operation is successful (always in this implementation). - */ - public addExtendedContractData(data: ExtendedContractData[], blockNum: number): Promise { - // Add to the contracts mapping - for (const contractData of data) { - const key = contractData.contractData.contractAddress.toString(); - this.extendedContractData.set(key, contractData); - } - - // Add the index per block - if (this.extendedContractDataByBlock[blockNum]?.length) { - this.extendedContractDataByBlock[blockNum]?.push(...data); - } else { - this.extendedContractDataByBlock[blockNum] = [...data]; - } - return Promise.resolve(true); - } - - /** - * Gets up to `limit` amount of L2 blocks starting from `from`. - * @param from - Number of the first block to return (inclusive). - * @param limit - The number of blocks to return. - * @returns The requested L2 blocks. - * @remarks When "from" is smaller than genesis block number, blocks from the beginning are returned. - */ - public getBlocks(from: number, limit: number): Promise { - // Return an empty array if we are outside of range - if (limit < 1) { - throw new Error(`Invalid limit: ${limit}`); - } - - const fromIndex = Math.max(from - INITIAL_L2_BLOCK_NUM, 0); - if (fromIndex >= this.l2BlockContexts.length) { - return Promise.resolve([]); - } - - const toIndex = fromIndex + limit; - return Promise.resolve(this.l2BlockContexts.slice(fromIndex, toIndex).map(blockContext => blockContext.block)); - } - - /** - * Gets an l2 tx. - * @param txHash - The txHash of the l2 tx. - * @returns The requested L2 tx. - */ - public getL2Tx(txHash: TxHash): Promise { - const l2Tx = this.l2Txs.find(tx => tx.txHash.equals(txHash)); - return Promise.resolve(l2Tx); - } - - /** - * Gets up to `limit` amount of pending L1 to L2 messages, sorted by fee - * @param limit - The number of messages to return (by default NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP). - * @returns The requested L1 to L2 message keys. - */ - public getPendingL1ToL2MessageKeys(limit: number = NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP): Promise { - return Promise.resolve(this.pendingL1ToL2Messages.getMessageKeys(limit)); - } - - /** - * Gets the confirmed L1 to L2 message corresponding to the given message key. - * @param messageKey - The message key to look up. - * @returns The requested L1 to L2 message or throws if not found. - */ - public getConfirmedL1ToL2Message(messageKey: Fr): Promise { - const message = this.confirmedL1ToL2Messages.getMessage(messageKey); - if (!message) { - throw new Error(`L1 to L2 Message with key ${messageKey.toString()} not found in the confirmed messages store`); - } - return Promise.resolve(message); - } - - /** - * Gets up to `limit` amount of logs starting from `from`. - * @param from - Number of the L2 block to which corresponds the first logs to be returned. - * @param limit - The number of logs to return. - * @param logType - Specifies whether to return encrypted or unencrypted logs. - * @returns The requested logs. - */ - getLogs(from: number, limit: number, logType: LogType): Promise { - if (from < INITIAL_L2_BLOCK_NUM || limit < 1) { - throw new Error(`Invalid limit: ${limit}`); - } - const logs = logType === LogType.ENCRYPTED ? this.encryptedLogsPerBlock : this.unencryptedLogsPerBlock; - if (from > logs.length) { - return Promise.resolve([]); - } - const startIndex = from - INITIAL_L2_BLOCK_NUM; - const endIndex = startIndex + limit; - return Promise.resolve(logs.slice(startIndex, endIndex)); - } - - /** - * Gets unencrypted logs based on the provided filter. - * @param filter - The filter to apply to the logs. - * @returns The requested logs. - * @remarks Works by doing an intersection of all params in the filter. - */ - getUnencryptedLogs(filter: LogFilter): Promise { - let txHash: TxHash | undefined; - let fromBlockIndex = 0; - let toBlockIndex = this.unencryptedLogsPerBlock.length; - let txIndexInBlock = 0; - let logIndexInTx = 0; - - if (filter.afterLog) { - // Continuation parameter is set --> tx hash is ignored - if (filter.fromBlock == undefined || filter.fromBlock <= filter.afterLog.blockNumber) { - fromBlockIndex = filter.afterLog.blockNumber - INITIAL_L2_BLOCK_NUM; - txIndexInBlock = filter.afterLog.txIndex; - logIndexInTx = filter.afterLog.logIndex + 1; // We want to start from the next log - } else { - fromBlockIndex = filter.fromBlock - INITIAL_L2_BLOCK_NUM; - } - } else { - txHash = filter.txHash; - - if (filter.fromBlock !== undefined) { - fromBlockIndex = filter.fromBlock - INITIAL_L2_BLOCK_NUM; - } - } - - if (filter.toBlock !== undefined) { - toBlockIndex = filter.toBlock - INITIAL_L2_BLOCK_NUM; - } - - // Ensure the indices are within block array bounds - fromBlockIndex = Math.max(fromBlockIndex, 0); - toBlockIndex = Math.min(toBlockIndex, this.unencryptedLogsPerBlock.length); - - if (fromBlockIndex > this.unencryptedLogsPerBlock.length || toBlockIndex < fromBlockIndex || toBlockIndex <= 0) { - return Promise.resolve({ - logs: [], - maxLogsHit: false, - }); - } - - const contractAddress = filter.contractAddress; - const selector = filter.selector; - - const logs: ExtendedUnencryptedL2Log[] = []; - - for (; fromBlockIndex < toBlockIndex; fromBlockIndex++) { - const blockContext = this.l2BlockContexts[fromBlockIndex]; - const blockLogs = this.unencryptedLogsPerBlock[fromBlockIndex]; - for (; txIndexInBlock < blockLogs.txLogs.length; txIndexInBlock++) { - const txLogs = blockLogs.txLogs[txIndexInBlock].unrollLogs().map(log => UnencryptedL2Log.fromBuffer(log)); - for (; logIndexInTx < txLogs.length; logIndexInTx++) { - const log = txLogs[logIndexInTx]; - if ( - (!txHash || blockContext.getTxHash(txIndexInBlock).equals(txHash)) && - (!contractAddress || log.contractAddress.equals(contractAddress)) && - (!selector || log.selector.equals(selector)) - ) { - logs.push( - new ExtendedUnencryptedL2Log(new LogId(blockContext.block.number, txIndexInBlock, logIndexInTx), log), - ); - if (logs.length === this.maxLogs) { - return Promise.resolve({ - logs, - maxLogsHit: true, - }); - } - } - } - logIndexInTx = 0; - } - txIndexInBlock = 0; - } - - return Promise.resolve({ - logs, - maxLogsHit: false, - }); - } - - /** - * Get the extended contract data for this contract. - * @param contractAddress - The contract data address. - * @returns The extended contract data or undefined if not found. - */ - getExtendedContractData(contractAddress: AztecAddress): Promise { - const result = this.extendedContractData.get(contractAddress.toString()); - return Promise.resolve(result); - } - - /** - * Lookup all contract data in an L2 block. - * @param blockNum - The block number to get all contract data from. - * @returns All extended contract data in the block (if found). - */ - public getExtendedContractDataInBlock(blockNum: number): Promise { - if (blockNum > this.l2BlockContexts.length) { - return Promise.resolve([]); - } - return Promise.resolve(this.extendedContractDataByBlock[blockNum] || []); - } - - /** - * Get basic info for an L2 contract. - * Contains contract address & the ethereum portal address. - * @param contractAddress - The contract data address. - * @returns ContractData with the portal address (if we didn't throw an error). - */ - public getContractData(contractAddress: AztecAddress): Promise { - if (contractAddress.isZero()) { - return Promise.resolve(undefined); - } - for (const blockContext of this.l2BlockContexts) { - for (const contractData of blockContext.block.newContractData) { - if (contractData.contractAddress.equals(contractAddress)) { - return Promise.resolve(contractData); - } - } - } - return Promise.resolve(undefined); - } - - /** - * Get basic info for an all L2 contracts deployed in a block. - * Contains contract address & the ethereum portal address. - * @param l2BlockNum - Number of the L2 block where contracts were deployed. - * @returns ContractData with the portal address (if we didn't throw an error). - */ - public getContractDataInBlock(l2BlockNum: number): Promise { - if (l2BlockNum > this.l2BlockContexts.length) { - return Promise.resolve([]); - } - const block = this.l2BlockContexts[l2BlockNum].block; - return Promise.resolve(block.newContractData); - } - - /** - * Gets the number of the latest L2 block processed. - * @returns The number of the latest L2 block processed. + * Gets the number of the latest L1 block processed. */ - public getBlockNumber(): Promise { - if (this.l2BlockContexts.length === 0) { - return Promise.resolve(INITIAL_L2_BLOCK_NUM - 1); - } - return Promise.resolve(this.l2BlockContexts[this.l2BlockContexts.length - 1].block.number); - } + getL1BlockNumber(): Promise; } diff --git a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts new file mode 100644 index 00000000000..9686eab0ea2 --- /dev/null +++ b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts @@ -0,0 +1,638 @@ +import { AztecAddress, Fr } from '@aztec/circuits.js'; +import { randomBytes } from '@aztec/foundation/crypto'; +import { + CancelledL1ToL2Message, + ExtendedContractData, + INITIAL_L2_BLOCK_NUM, + L1ToL2Message, + L2Block, + L2BlockContext, + LogId, + LogType, + PendingL1ToL2Message, + TxHash, + UnencryptedL2Log, +} from '@aztec/types'; +import '@aztec/types/jest'; + +import { ArchiverDataStore } from './archiver_store.js'; + +/** + * @param testName - The name of the test suite. + * @param getStore - Returns an instance of a store that's already been initialized. + */ +export function describeArchiverDataStore(testName: string, getStore: () => ArchiverDataStore) { + describe(testName, () => { + let store: ArchiverDataStore; + let blocks: L2Block[]; + const blockTests: [number, number, () => L2Block[]][] = [ + [1, 1, () => blocks.slice(0, 1)], + [10, 1, () => blocks.slice(9, 10)], + [1, 10, () => blocks.slice(0, 10)], + [2, 5, () => blocks.slice(1, 6)], + [5, 2, () => blocks.slice(4, 6)], + ]; + + beforeEach(() => { + store = getStore(); + blocks = Array.from({ length: 10 }).map((_, i) => { + const block = L2Block.random(i + 1); + block.setL1BlockNumber(BigInt(i + 1)); + return block; + }); + }); + + describe('addBlocks', () => { + it('returns success when adding blocks', async () => { + await expect(store.addBlocks(blocks)).resolves.toBe(true); + }); + + it('allows duplicate blocks', async () => { + await store.addBlocks(blocks); + await expect(store.addBlocks(blocks)).resolves.toBe(true); + }); + }); + + describe('getBlocks', () => { + beforeEach(async () => { + await store.addBlocks(blocks); + }); + + it.each(blockTests)('retrieves previously stored blocks', async (start, limit, getExpectedBlocks) => { + await expect(store.getBlocks(start, limit)).resolves.toEqual(getExpectedBlocks()); + }); + + it('returns an empty array if no blocks are found', async () => { + await expect(store.getBlocks(12, 1)).resolves.toEqual([]); + }); + + it('throws an error if limit is invalid', async () => { + await expect(store.getBlocks(1, 0)).rejects.toThrowError('Invalid limit: 0'); + }); + + it('resets `from` to the first block if it is out of range', async () => { + await expect(store.getBlocks(INITIAL_L2_BLOCK_NUM - 100, 1)).resolves.toEqual(blocks.slice(0, 1)); + }); + }); + + describe('getBlockNumber', () => { + it('returns the block number before INITIAL_L2_BLOCK_NUM if no blocks have been added', async () => { + await expect(store.getBlockNumber()).resolves.toEqual(INITIAL_L2_BLOCK_NUM - 1); + }); + + it("returns the most recently added block's number", async () => { + await store.addBlocks(blocks); + await expect(store.getBlockNumber()).resolves.toEqual(blocks.at(-1)!.number); + }); + }); + + describe('getL1BlockNumber', () => { + it('returns 0n if no blocks have been added', async () => { + await expect(store.getL1BlockNumber()).resolves.toEqual(0n); + }); + + it('returns the L1 block number in which the most recent L2 block was published', async () => { + await store.addBlocks(blocks); + await expect(store.getL1BlockNumber()).resolves.toEqual(blocks.at(-1)!.getL1BlockNumber()); + }); + }); + + describe('addLogs', () => { + it('adds encrypted & unencrypted logs', async () => { + await expect( + store.addLogs(blocks[0].newEncryptedLogs, blocks[0].newUnencryptedLogs, blocks[0].number), + ).resolves.toEqual(true); + }); + }); + + describe.each([ + ['encrypted', LogType.ENCRYPTED], + ['unencrypted', LogType.UNENCRYPTED], + ])('getLogs (%s)', (_, logType) => { + beforeEach(async () => { + await Promise.all( + blocks.map(block => store.addLogs(block.newEncryptedLogs, block.newUnencryptedLogs, block.number)), + ); + }); + + it.each(blockTests)('retrieves previously stored logs', async (from, limit, getExpectedBlocks) => { + const expectedLogs = getExpectedBlocks().map(block => + logType === LogType.ENCRYPTED ? block.newEncryptedLogs : block.newUnencryptedLogs, + ); + const actualLogs = await store.getLogs(from, limit, logType); + expect(actualLogs).toEqual(expectedLogs); + }); + }); + + describe('getL2Tx', () => { + beforeEach(async () => { + await Promise.all( + blocks.map(block => store.addLogs(block.newEncryptedLogs, block.newUnencryptedLogs, block.number)), + ); + await store.addBlocks(blocks); + }); + + it.each([ + () => blocks[0].getTx(0), + () => blocks[9].getTx(3), + () => blocks[3].getTx(1), + () => blocks[5].getTx(2), + () => blocks[1].getTx(0), + ])('retrieves a previously stored transaction', async getExpectedTx => { + const expectedTx = getExpectedTx(); + const actualTx = await store.getL2Tx(expectedTx.txHash); + expect(actualTx).toEqual(expectedTx); + }); + + it('returns undefined if tx is not found', async () => { + await expect(store.getL2Tx(new TxHash(Fr.random().toBuffer()))).resolves.toBeUndefined(); + }); + }); + + describe('addPendingL1ToL2Messages', () => { + it('stores pending L1 to L2 messages', async () => { + await expect( + store.addPendingL1ToL2Messages([new PendingL1ToL2Message(L1ToL2Message.random(Fr.random()), 1n, 0)]), + ).resolves.toEqual(true); + }); + + it('allows duplicate pending messages in different positions in the same block', async () => { + const message = L1ToL2Message.random(Fr.random()); + await expect( + store.addPendingL1ToL2Messages([ + new PendingL1ToL2Message(message, 1n, 0), + new PendingL1ToL2Message(message, 1n, 1), + ]), + ).resolves.toEqual(true); + + await expect(store.getPendingL1ToL2MessageKeys(2)).resolves.toEqual([message.entryKey!, message.entryKey!]); + }); + + it('allows duplicate pending messages in different blocks', async () => { + const message = L1ToL2Message.random(Fr.random()); + await expect( + store.addPendingL1ToL2Messages([ + new PendingL1ToL2Message(message, 1n, 0), + new PendingL1ToL2Message(message, 2n, 0), + ]), + ).resolves.toEqual(true); + + await expect(store.getPendingL1ToL2MessageKeys(2)).resolves.toEqual([message.entryKey!, message.entryKey!]); + }); + + it('is idempotent', async () => { + const message = L1ToL2Message.random(Fr.random()); + await expect( + store.addPendingL1ToL2Messages([ + new PendingL1ToL2Message(message, 1n, 0), + new PendingL1ToL2Message(message, 1n, 0), + ]), + ).resolves.toEqual(true); + await expect(store.getPendingL1ToL2MessageKeys(2)).resolves.toEqual([message.entryKey!]); + }); + }); + + describe('getPendingL1ToL2Messages', () => { + it('returns previously stored pending L1 to L2 messages', async () => { + const messageCtx = new PendingL1ToL2Message(L1ToL2Message.random(Fr.random()), 1n, 0); + await store.addPendingL1ToL2Messages([messageCtx]); + await expect(store.getPendingL1ToL2MessageKeys(1)).resolves.toEqual([messageCtx.message.entryKey!]); + }); + + it('returns messages ordered by fee', async () => { + const messageCtxs = Array.from({ length: 3 }).map( + (_, i) => new PendingL1ToL2Message(L1ToL2Message.random(Fr.random()), 1n, i), + ); + // add a duplicate message + messageCtxs.push(new PendingL1ToL2Message(messageCtxs[0].message, 1n, 3)); + + await store.addPendingL1ToL2Messages(messageCtxs); + + messageCtxs.sort((a, b) => b.message.fee - a.message.fee); + await expect(store.getPendingL1ToL2MessageKeys(messageCtxs.length)).resolves.toEqual( + messageCtxs.map(({ message }) => message.entryKey!), + ); + }); + + it('returns an empty array if no messages are found', async () => { + await expect(store.getPendingL1ToL2MessageKeys(1)).resolves.toEqual([]); + }); + }); + + describe('confirmL1ToL2Messages', () => { + it('updates a message from pending to confirmed', async () => { + const messageCtx = new PendingL1ToL2Message(L1ToL2Message.random(Fr.random()), 1n, 0); + await store.addPendingL1ToL2Messages([messageCtx]); + await expect(store.confirmL1ToL2Messages([messageCtx.message.entryKey!])).resolves.toEqual(true); + }); + + it('once confirmed, a message is no longer pending', async () => { + const pendingMessage = new PendingL1ToL2Message(L1ToL2Message.random(Fr.random()), 1n, 0); + await store.addPendingL1ToL2Messages([pendingMessage]); + await store.confirmL1ToL2Messages([pendingMessage.message.entryKey!]); + await expect(store.getPendingL1ToL2MessageKeys(1)).resolves.toEqual([]); + }); + + it('once confirmed a message can also be pending if added again', async () => { + const message = L1ToL2Message.random(Fr.random()); + await store.addPendingL1ToL2Messages([new PendingL1ToL2Message(message, 1n, 0)]); + await store.confirmL1ToL2Messages([message.entryKey!]); + await store.addPendingL1ToL2Messages([new PendingL1ToL2Message(message, 2n, 0)]); + await expect(store.getPendingL1ToL2MessageKeys(2)).resolves.toEqual([message.entryKey!]); + }); + + it('once confirmed a message can remain pending if more of it were pending', async () => { + const message = L1ToL2Message.random(Fr.random()); + await store.addPendingL1ToL2Messages([ + new PendingL1ToL2Message(message, 1n, 0), + new PendingL1ToL2Message(message, 1n, 1), + ]); + + await store.confirmL1ToL2Messages([message.entryKey!]); + await expect(store.getPendingL1ToL2MessageKeys(1)).resolves.toEqual([message.entryKey!]); + }); + }); + + describe('cancelL1ToL2Messages', () => { + it('cancels a pending message', async () => { + const message = L1ToL2Message.random(Fr.random()); + await store.addPendingL1ToL2Messages([new PendingL1ToL2Message(message, 1n, 0)]); + await store.cancelPendingL1ToL2Messages([new CancelledL1ToL2Message(message.entryKey!, 1n, 0)]); + await expect(store.getPendingL1ToL2MessageKeys(1)).resolves.toEqual([]); + }); + + it('cancels only one of the pending messages if duplicates exist', async () => { + const message = L1ToL2Message.random(Fr.random()); + await store.addPendingL1ToL2Messages([ + new PendingL1ToL2Message(message, 1n, 0), + new PendingL1ToL2Message(message, 1n, 1), + ]); + await store.cancelPendingL1ToL2Messages([new CancelledL1ToL2Message(message.entryKey!, 2n, 0)]); + await expect(store.getPendingL1ToL2MessageKeys(2)).resolves.toEqual([message.entryKey]); + }); + + it('once canceled a message can also be pending if added again', async () => { + const message = L1ToL2Message.random(Fr.random()); + await store.addPendingL1ToL2Messages([new PendingL1ToL2Message(message, 1n, 0)]); + + await store.cancelPendingL1ToL2Messages([new CancelledL1ToL2Message(message.entryKey!, 1n, 0)]); + await expect(store.getPendingL1ToL2MessageKeys(1)).resolves.toEqual([]); + + await store.addPendingL1ToL2Messages([new PendingL1ToL2Message(message, 2n, 0)]); + await expect(store.getPendingL1ToL2MessageKeys(1)).resolves.toEqual([message.entryKey!]); + }); + + it('allows adding and cancelling in the same block', async () => { + const message = L1ToL2Message.random(Fr.random()); + await store.addPendingL1ToL2Messages([new PendingL1ToL2Message(message, 1n, 0)]); + await store.cancelPendingL1ToL2Messages([new CancelledL1ToL2Message(message.entryKey!, 1n, 0)]); + await expect(store.getPendingL1ToL2MessageKeys(1)).resolves.toEqual([]); + }); + + it('allows duplicates cancellations in different positions in the same block', async () => { + const message = L1ToL2Message.random(Fr.random()); + await store.addPendingL1ToL2Messages([ + new PendingL1ToL2Message(message, 1n, 0), + new PendingL1ToL2Message(message, 1n, 1), + ]); + + await store.cancelPendingL1ToL2Messages([ + new CancelledL1ToL2Message(message.entryKey!, 2n, 0), + new CancelledL1ToL2Message(message.entryKey!, 2n, 1), + ]); + + await expect(store.getPendingL1ToL2MessageKeys(2)).resolves.toEqual([]); + }); + + it('allows duplicates cancellations in different blocks', async () => { + const message = L1ToL2Message.random(Fr.random()); + await store.addPendingL1ToL2Messages([ + new PendingL1ToL2Message(message, 1n, 0), + new PendingL1ToL2Message(message, 1n, 1), + ]); + + await store.cancelPendingL1ToL2Messages([ + new CancelledL1ToL2Message(message.entryKey!, 2n, 0), + new CancelledL1ToL2Message(message.entryKey!, 3n, 0), + ]); + + await expect(store.getPendingL1ToL2MessageKeys(2)).resolves.toEqual([]); + }); + + it('is idempotent', async () => { + const message = L1ToL2Message.random(Fr.random()); + await store.addPendingL1ToL2Messages([ + new PendingL1ToL2Message(message, 1n, 0), + new PendingL1ToL2Message(message, 1n, 1), + ]); + + await store.cancelPendingL1ToL2Messages([ + new CancelledL1ToL2Message(message.entryKey!, 2n, 0), + new CancelledL1ToL2Message(message.entryKey!, 2n, 0), + ]); + + await expect(store.getPendingL1ToL2MessageKeys(2)).resolves.toEqual([message.entryKey!]); + }); + }); + + describe('getContractData', () => { + let block: L2Block; + beforeEach(async () => { + block = L2Block.random(1); + await store.addBlocks([block]); + }); + + it('returns previously stored contract data', async () => { + await expect(store.getContractData(block.newContractData[0].contractAddress)).resolves.toEqual( + block.newContractData[0], + ); + }); + + it('returns undefined if contract data is not found', async () => { + await expect(store.getContractData(AztecAddress.random())).resolves.toBeUndefined(); + }); + }); + + describe('getContractDataInBlock', () => { + let block: L2Block; + beforeEach(async () => { + block = L2Block.random(1); + await store.addBlocks([block]); + }); + + it('returns the contract data for a known block', async () => { + await expect(store.getContractDataInBlock(block.number)).resolves.toEqual(block.newContractData); + }); + + it('returns an empty array if contract data is not found', async () => { + await expect(store.getContractDataInBlock(block.number + 1)).resolves.toEqual([]); + }); + }); + + describe('addExtendedContractData', () => { + it('stores extended contract data', async () => { + const block = L2Block.random(1); + await store.addBlocks([block]); + await expect(store.addExtendedContractData([ExtendedContractData.random()], block.number)).resolves.toEqual( + true, + ); + }); + + it('stores extended contract data for an unknown block', async () => { + await expect(store.addExtendedContractData([ExtendedContractData.random()], 1)).resolves.toEqual(true); + }); + + it('"pushes" extended contract data and does not overwrite', async () => { + const block = L2Block.random(1); + await store.addBlocks([block]); + + const firstContract = ExtendedContractData.random(block.newContractData[0]); + await store.addExtendedContractData([firstContract], block.number); + + const secondContract = ExtendedContractData.random(block.newContractData[1]); + await store.addExtendedContractData([secondContract], block.number); + + await expect(store.getExtendedContractDataInBlock(block.number)).resolves.toEqual([ + firstContract, + secondContract, + ]); + }); + }); + + describe('getExtendedContractData', () => { + let block: L2Block; + let extendedContractData: ExtendedContractData; + beforeEach(async () => { + block = L2Block.random(1); + extendedContractData = ExtendedContractData.random(block.newContractData[0]); + await store.addBlocks([block]); + await store.addExtendedContractData([extendedContractData], block.number); + }); + + it('returns previously stored extended contract data', async () => { + await expect(store.getExtendedContractData(extendedContractData.contractData.contractAddress)).resolves.toEqual( + extendedContractData, + ); + }); + + it('returns undefined if extended contract data is not found', async () => { + await expect(store.getExtendedContractData(AztecAddress.random())).resolves.toBeUndefined(); + }); + }); + + describe('getExtendedContractDataInBlock', () => { + let block: L2Block; + let extendedContractData: ExtendedContractData; + beforeEach(async () => { + block = L2Block.random(1); + extendedContractData = ExtendedContractData.random(block.newContractData[0]); + await store.addBlocks([block]); + await store.addExtendedContractData([extendedContractData], block.number); + }); + + it('returns previously stored extended contract data', async () => { + await expect(store.getExtendedContractDataInBlock(block.number)).resolves.toEqual([extendedContractData]); + }); + + it('returns an empty array if extended contract data is not found for the block', async () => { + await expect(store.getExtendedContractDataInBlock(block.number + 1)).resolves.toEqual([]); + }); + }); + + describe('getUnencryptedLogs', () => { + const txsPerBlock = 4; + const numPublicFunctionCalls = 3; + const numUnencryptedLogs = 4; + const numBlocks = 10; + let blocks: L2Block[]; + + beforeEach(async () => { + blocks = Array(numBlocks) + .fill(0) + .map((_, index: number) => + L2Block.random(index + 1, txsPerBlock, 2, numPublicFunctionCalls, 2, numUnencryptedLogs), + ); + + await store.addBlocks(blocks); + await Promise.all( + blocks.map(block => store.addLogs(block.newEncryptedLogs, block.newUnencryptedLogs, block.number)), + ); + }); + + it('"txHash" filter param is respected', async () => { + // get random tx + const targetBlockIndex = Math.floor(Math.random() * numBlocks); + const targetTxIndex = Math.floor(Math.random() * txsPerBlock); + const targetTxHash = new L2BlockContext(blocks[targetBlockIndex]).getTxHash(targetTxIndex); + + const response = await store.getUnencryptedLogs({ txHash: targetTxHash }); + const logs = response.logs; + + expect(response.maxLogsHit).toBeFalsy(); + + const expectedNumLogs = numPublicFunctionCalls * numUnencryptedLogs; + expect(logs.length).toEqual(expectedNumLogs); + + const targeBlockNumber = targetBlockIndex + INITIAL_L2_BLOCK_NUM; + for (const log of logs) { + expect(log.id.blockNumber).toEqual(targeBlockNumber); + expect(log.id.txIndex).toEqual(targetTxIndex); + } + }); + + it('"fromBlock" and "toBlock" filter params are respected', async () => { + // Set "fromBlock" and "toBlock" + const fromBlock = 3; + const toBlock = 7; + + const response = await store.getUnencryptedLogs({ fromBlock, toBlock }); + const logs = response.logs; + + expect(response.maxLogsHit).toBeFalsy(); + + const expectedNumLogs = txsPerBlock * numPublicFunctionCalls * numUnencryptedLogs * (toBlock - fromBlock); + expect(logs.length).toEqual(expectedNumLogs); + + for (const log of logs) { + const blockNumber = log.id.blockNumber; + expect(blockNumber).toBeGreaterThanOrEqual(fromBlock); + expect(blockNumber).toBeLessThan(toBlock); + } + }); + + it('"contractAddress" filter param is respected', async () => { + // Get a random contract address from the logs + const targetBlockIndex = Math.floor(Math.random() * numBlocks); + const targetTxIndex = Math.floor(Math.random() * txsPerBlock); + const targetFunctionLogIndex = Math.floor(Math.random() * numPublicFunctionCalls); + const targetLogIndex = Math.floor(Math.random() * numUnencryptedLogs); + const targetContractAddress = UnencryptedL2Log.fromBuffer( + blocks[targetBlockIndex].newUnencryptedLogs!.txLogs[targetTxIndex].functionLogs[targetFunctionLogIndex].logs[ + targetLogIndex + ], + ).contractAddress; + + const response = await store.getUnencryptedLogs({ contractAddress: targetContractAddress }); + + expect(response.maxLogsHit).toBeFalsy(); + + for (const extendedLog of response.logs) { + expect(extendedLog.log.contractAddress.equals(targetContractAddress)).toBeTruthy(); + } + }); + + it('"selector" filter param is respected', async () => { + // Get a random selector from the logs + const targetBlockIndex = Math.floor(Math.random() * numBlocks); + const targetTxIndex = Math.floor(Math.random() * txsPerBlock); + const targetFunctionLogIndex = Math.floor(Math.random() * numPublicFunctionCalls); + const targetLogIndex = Math.floor(Math.random() * numUnencryptedLogs); + const targetSelector = UnencryptedL2Log.fromBuffer( + blocks[targetBlockIndex].newUnencryptedLogs!.txLogs[targetTxIndex].functionLogs[targetFunctionLogIndex].logs[ + targetLogIndex + ], + ).selector; + + const response = await store.getUnencryptedLogs({ selector: targetSelector }); + + expect(response.maxLogsHit).toBeFalsy(); + + for (const extendedLog of response.logs) { + expect(extendedLog.log.selector.equals(targetSelector)).toBeTruthy(); + } + }); + + it('"afterLog" filter param is respected', async () => { + // Get a random log as reference + const targetBlockIndex = Math.floor(Math.random() * numBlocks); + const targetTxIndex = Math.floor(Math.random() * txsPerBlock); + const targetLogIndex = Math.floor(Math.random() * numUnencryptedLogs); + + const afterLog = new LogId(targetBlockIndex + INITIAL_L2_BLOCK_NUM, targetTxIndex, targetLogIndex); + + const response = await store.getUnencryptedLogs({ afterLog }); + const logs = response.logs; + + expect(response.maxLogsHit).toBeFalsy(); + + for (const log of logs) { + const logId = log.id; + expect(logId.blockNumber).toBeGreaterThanOrEqual(afterLog.blockNumber); + if (logId.blockNumber === afterLog.blockNumber) { + expect(logId.txIndex).toBeGreaterThanOrEqual(afterLog.txIndex); + if (logId.txIndex === afterLog.txIndex) { + expect(logId.logIndex).toBeGreaterThan(afterLog.logIndex); + } + } + } + }); + + it('"txHash" filter param is ignored when "afterLog" is set', async () => { + // Get random txHash + const txHash = new TxHash(randomBytes(TxHash.SIZE)); + const afterLog = new LogId(1, 0, 0); + + const response = await store.getUnencryptedLogs({ txHash, afterLog }); + expect(response.logs.length).toBeGreaterThan(1); + }); + + it('intersecting works', async () => { + let logs = (await store.getUnencryptedLogs({ fromBlock: -10, toBlock: -5 })).logs; + expect(logs.length).toBe(0); + + // "fromBlock" gets correctly trimmed to range and "toBlock" is exclusive + logs = (await store.getUnencryptedLogs({ fromBlock: -10, toBlock: 5 })).logs; + let blockNumbers = new Set(logs.map(log => log.id.blockNumber)); + expect(blockNumbers).toEqual(new Set([1, 2, 3, 4])); + + // "toBlock" should be exclusive + logs = (await store.getUnencryptedLogs({ fromBlock: 1, toBlock: 1 })).logs; + expect(logs.length).toBe(0); + + logs = (await store.getUnencryptedLogs({ fromBlock: 10, toBlock: 5 })).logs; + expect(logs.length).toBe(0); + + // both "fromBlock" and "toBlock" get correctly capped to range and logs from all blocks are returned + logs = (await store.getUnencryptedLogs({ fromBlock: -100, toBlock: +100 })).logs; + blockNumbers = new Set(logs.map(log => log.id.blockNumber)); + expect(blockNumbers.size).toBe(numBlocks); + + // intersecting with "afterLog" works + logs = (await store.getUnencryptedLogs({ fromBlock: 2, toBlock: 5, afterLog: new LogId(4, 0, 0) })).logs; + blockNumbers = new Set(logs.map(log => log.id.blockNumber)); + expect(blockNumbers).toEqual(new Set([4])); + + logs = (await store.getUnencryptedLogs({ toBlock: 5, afterLog: new LogId(5, 1, 0) })).logs; + expect(logs.length).toBe(0); + + logs = (await store.getUnencryptedLogs({ fromBlock: 2, toBlock: 5, afterLog: new LogId(100, 0, 0) })).logs; + expect(logs.length).toBe(0); + }); + + it('"txIndex" and "logIndex" are respected when "afterLog.blockNumber" is equal to "fromBlock"', async () => { + // Get a random log as reference + const targetBlockIndex = Math.floor(Math.random() * numBlocks); + const targetTxIndex = Math.floor(Math.random() * txsPerBlock); + const targetLogIndex = Math.floor(Math.random() * numUnencryptedLogs); + + const afterLog = new LogId(targetBlockIndex + INITIAL_L2_BLOCK_NUM, targetTxIndex, targetLogIndex); + + const response = await store.getUnencryptedLogs({ afterLog, fromBlock: afterLog.blockNumber }); + const logs = response.logs; + + expect(response.maxLogsHit).toBeFalsy(); + + for (const log of logs) { + const logId = log.id; + expect(logId.blockNumber).toBeGreaterThanOrEqual(afterLog.blockNumber); + if (logId.blockNumber === afterLog.blockNumber) { + expect(logId.txIndex).toBeGreaterThanOrEqual(afterLog.txIndex); + if (logId.txIndex === afterLog.txIndex) { + expect(logId.logIndex).toBeGreaterThan(afterLog.logIndex); + } + } + } + }); + }); + }); +} diff --git a/yarn-project/archiver/src/archiver/data_retrieval.ts b/yarn-project/archiver/src/archiver/data_retrieval.ts index 67d391cc336..53b22b1a0f4 100644 --- a/yarn-project/archiver/src/archiver/data_retrieval.ts +++ b/yarn-project/archiver/src/archiver/data_retrieval.ts @@ -1,6 +1,5 @@ import { EthAddress } from '@aztec/foundation/eth-address'; -import { Fr } from '@aztec/foundation/fields'; -import { ExtendedContractData, L1ToL2Message, L2Block } from '@aztec/types'; +import { CancelledL1ToL2Message, ExtendedContractData, L2Block, PendingL1ToL2Message } from '@aztec/types'; import { PublicClient } from 'viem'; @@ -124,8 +123,8 @@ export async function retrieveNewPendingL1ToL2Messages( blockUntilSynced: boolean, searchStartBlock: bigint, searchEndBlock: bigint, -): Promise> { - const retrievedNewL1ToL2Messages: L1ToL2Message[] = []; +): Promise> { + const retrievedNewL1ToL2Messages: PendingL1ToL2Message[] = []; do { if (searchStartBlock > searchEndBlock) { break; @@ -162,8 +161,8 @@ export async function retrieveNewCancelledL1ToL2Messages( blockUntilSynced: boolean, searchStartBlock: bigint, searchEndBlock: bigint, -): Promise> { - const retrievedNewCancelledL1ToL2Messages: Fr[] = []; +): Promise> { + const retrievedNewCancelledL1ToL2Messages: CancelledL1ToL2Message[] = []; do { if (searchStartBlock > searchEndBlock) { break; diff --git a/yarn-project/archiver/src/archiver/eth_log_handlers.ts b/yarn-project/archiver/src/archiver/eth_log_handlers.ts index 9a8e82ee228..99ed516b80b 100644 --- a/yarn-project/archiver/src/archiver/eth_log_handlers.ts +++ b/yarn-project/archiver/src/archiver/eth_log_handlers.ts @@ -4,6 +4,7 @@ import { Fr, Point } from '@aztec/foundation/fields'; import { ContractDeploymentEmitterAbi, InboxAbi, RollupAbi } from '@aztec/l1-artifacts'; import { BufferReader, + CancelledL1ToL2Message, ContractData, EncodedContractFunction, ExtendedContractData, @@ -11,6 +12,7 @@ import { L1ToL2Message, L2Actor, L2Block, + PendingL1ToL2Message, } from '@aztec/types'; import { Hex, Log, PublicClient, decodeFunctionData, getAbiItem, getAddress, hexToBytes } from 'viem'; @@ -22,20 +24,24 @@ import { Hex, Log, PublicClient, decodeFunctionData, getAbiItem, getAddress, hex */ export function processPendingL1ToL2MessageAddedLogs( logs: Log[], -): L1ToL2Message[] { - const l1ToL2Messages: L1ToL2Message[] = []; - for (const log of logs) { +): PendingL1ToL2Message[] { + const l1ToL2Messages: PendingL1ToL2Message[] = []; + for (const [index, log] of logs.entries()) { const { sender, senderChainId, recipient, recipientVersion, content, secretHash, deadline, fee, entryKey } = log.args; l1ToL2Messages.push( - new L1ToL2Message( - new L1Actor(EthAddress.fromString(sender), Number(senderChainId)), - new L2Actor(AztecAddress.fromString(recipient), Number(recipientVersion)), - Fr.fromString(content), - Fr.fromString(secretHash), - deadline, - Number(fee), - Fr.fromString(entryKey), + new PendingL1ToL2Message( + new L1ToL2Message( + new L1Actor(EthAddress.fromString(sender), Number(senderChainId)), + new L2Actor(AztecAddress.fromString(recipient), Number(recipientVersion)), + Fr.fromString(content), + Fr.fromString(secretHash), + deadline, + Number(fee), + Fr.fromString(entryKey), + ), + log.blockNumber!, + index, ), ); } @@ -49,10 +55,10 @@ export function processPendingL1ToL2MessageAddedLogs( */ export function processCancelledL1ToL2MessagesLogs( logs: Log[], -): Fr[] { - const cancelledL1ToL2Messages: Fr[] = []; - for (const log of logs) { - cancelledL1ToL2Messages.push(Fr.fromString(log.args.entryKey)); +): CancelledL1ToL2Message[] { + const cancelledL1ToL2Messages: CancelledL1ToL2Message[] = []; + for (const [index, log] of logs.entries()) { + cancelledL1ToL2Messages.push(new CancelledL1ToL2Message(Fr.fromString(log.args.entryKey), log.blockNumber!, index)); } return cancelledL1ToL2Messages; } @@ -67,7 +73,7 @@ export async function processBlockLogs( publicClient: PublicClient, expectedL2BlockNumber: bigint, logs: Log[], -) { +): Promise { const retrievedBlocks: L2Block[] = []; for (const log of logs) { const blockNum = log.args.blockNum; @@ -76,6 +82,7 @@ export async function processBlockLogs( } // TODO: Fetch blocks from calldata in parallel const newBlock = await getBlockFromCallData(publicClient, log.transactionHash!, log.args.blockNum); + newBlock.setL1BlockNumber(log.blockNumber!); retrievedBlocks.push(newBlock); expectedL2BlockNumber++; } diff --git a/yarn-project/archiver/src/archiver/index.ts b/yarn-project/archiver/src/archiver/index.ts index 201381aa0be..0ef2b0025c8 100644 --- a/yarn-project/archiver/src/archiver/index.ts +++ b/yarn-project/archiver/src/archiver/index.ts @@ -1,2 +1,5 @@ export * from './archiver.js'; export * from './config.js'; +export { MemoryArchiverStore } from './memory_archiver_store/memory_archiver_store.js'; +export { LMDBArchiverStore } from './lmdb_archiver_store.js'; +export { ArchiverDataStore } from './archiver_store.js'; diff --git a/yarn-project/archiver/src/archiver/lmdb_archiver_store.test.ts b/yarn-project/archiver/src/archiver/lmdb_archiver_store.test.ts new file mode 100644 index 00000000000..63014846b50 --- /dev/null +++ b/yarn-project/archiver/src/archiver/lmdb_archiver_store.test.ts @@ -0,0 +1,34 @@ +import { mkdtemp, rm } from 'fs/promises'; +import { RootDatabase, open } from 'lmdb'; +import { tmpdir } from 'os'; +import { join } from 'path'; + +import { describeArchiverDataStore } from './archiver_store_test_suite.js'; +import { LMDBArchiverStore } from './lmdb_archiver_store.js'; + +describe('LMDB Memory Store', () => { + let archiverStore: LMDBArchiverStore; + let tmpDbLocation: string; + let tmpDb: RootDatabase; + + beforeAll(async () => { + tmpDbLocation = await mkdtemp(join(tmpdir(), 'archiver-store-test-')); + tmpDb = open(tmpDbLocation, {}); + }); + + afterAll(async () => { + await tmpDb.close(); + await rm(tmpDbLocation, { recursive: true }); + }); + + beforeEach(() => { + archiverStore = new LMDBArchiverStore(tmpDb); + }); + + afterEach(async () => { + await archiverStore?.close(); + await tmpDb.clearAsync(); + }); + + describeArchiverDataStore('LMDBArchiverStore', () => archiverStore); +}); diff --git a/yarn-project/archiver/src/archiver/lmdb_archiver_store.ts b/yarn-project/archiver/src/archiver/lmdb_archiver_store.ts new file mode 100644 index 00000000000..e57b34b31f3 --- /dev/null +++ b/yarn-project/archiver/src/archiver/lmdb_archiver_store.ts @@ -0,0 +1,728 @@ +import { Fr } from '@aztec/circuits.js'; +import { AztecAddress } from '@aztec/foundation/aztec-address'; +import { toBigIntBE, toBufferBE } from '@aztec/foundation/bigint-buffer'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { + CancelledL1ToL2Message, + ContractData, + ExtendedContractData, + ExtendedUnencryptedL2Log, + GetUnencryptedLogsResponse, + INITIAL_L2_BLOCK_NUM, + L1ToL2Message, + L2Block, + L2BlockL2Logs, + L2Tx, + LogFilter, + LogId, + LogType, + PendingL1ToL2Message, + TxHash, + UnencryptedL2Log, +} from '@aztec/types'; + +import { Database, RangeOptions, RootDatabase } from 'lmdb'; + +import { ArchiverDataStore } from './archiver_store.js'; + +/* eslint-disable */ +type L1ToL2MessageAndCount = { + message: Buffer; + pendingCount: number; + confirmedCount: number; +}; + +type L1ToL2MessageBlockKey = `${string}:${'newMessage' | 'cancelledMessage'}:${number}`; + +function l1ToL2MessageBlockKey( + l1BlockNumber: bigint, + key: 'newMessage' | 'cancelledMessage', + indexInBlock: number, +): L1ToL2MessageBlockKey { + return `${toBufferBE(l1BlockNumber, 32).toString('hex')}:${key}:${indexInBlock}`; +} + +type BlockIndexValue = [blockNumber: number, index: number]; + +type BlockContext = { + block?: Uint8Array; + l1BlockNumber?: Uint8Array; + encryptedLogs?: Uint8Array; + unencryptedLogs?: Uint8Array; + extendedContractData?: Array; +}; +/* eslint-enable */ + +/** + * LMDB implementation of the ArchiverDataStore interface. + */ +export class LMDBArchiverStore implements ArchiverDataStore { + #tables: { + /** Where block information will be stored */ + blocks: Database; + /** Transactions index */ + txIndex: Database; + /** Contracts index */ + contractIndex: Database; + /** L1 to L2 messages */ + l1ToL2Messages: Database; + /** Which blocks emitted which messages */ + l1ToL2MessagesByBlock: Database; + /** Pending L1 to L2 messages sorted by their fee, in buckets (dupSort=true) */ + pendingMessagesByFee: Database; + }; + + #logsMaxPageSize: number; + + #log = createDebugLogger('aztec:archiver:lmdb'); + + constructor(db: RootDatabase, logsMaxPageSize: number = 1000) { + this.#tables = { + blocks: db.openDB('blocks', { + keyEncoding: 'uint32', + encoding: 'msgpack', + }), + txIndex: db.openDB('tx_index', { + keyEncoding: 'binary', + encoding: 'msgpack', + }), + contractIndex: db.openDB('contract_index', { + keyEncoding: 'binary', + encoding: 'msgpack', + }), + l1ToL2Messages: db.openDB('l1_to_l2_messages', { + keyEncoding: 'binary', + encoding: 'msgpack', + }), + l1ToL2MessagesByBlock: db.openDB('l1_to_l2_message_nonces', { + keyEncoding: 'ordered-binary', + encoding: 'binary', + }), + pendingMessagesByFee: db.openDB('pending_messages_by_fee', { + keyEncoding: 'ordered-binary', + encoding: 'binary', + dupSort: true, + }), + }; + + this.#logsMaxPageSize = logsMaxPageSize; + } + + public async close() { + await Promise.all(Object.values(this.#tables).map(table => table.close())); + } + + /** + * Append new blocks to the store's list. + * @param blocks - The L2 blocks to be added to the store. + * @returns True if the operation is successful. + */ + addBlocks(blocks: L2Block[]): Promise { + // LMDB transactions are shared across databases, so we can use a single transaction for all the writes + // https://github.com/kriszyp/lmdb-js/blob/67505a979ab63187953355a88747a7ad703d50b6/README.md#dbopendbdatabase-stringnamestring + return this.#tables.blocks.transaction(() => { + for (const block of blocks) { + const blockCtx = this.#tables.blocks.get(block.number) ?? {}; + blockCtx.block = block.toBuffer(); + blockCtx.l1BlockNumber = toBufferBE(block.getL1BlockNumber(), 32); + + // no need to await, all writes are enqueued in the transaction + // awaiting would interrupt the execution flow of this callback and "leak" the transaction to some other part + // of the system and any writes would then be part of our transaction here + void this.#tables.blocks.put(block.number, blockCtx); + + for (const [i, tx] of block.getTxs().entries()) { + if (tx.txHash.isZero()) { + continue; + } + void this.#tables.txIndex.put(tx.txHash.buffer, [block.number, i]); + } + + for (const [i, contractData] of block.newContractData.entries()) { + if (contractData.contractAddress.isZero()) { + continue; + } + + void this.#tables.contractIndex.put(contractData.contractAddress.toBuffer(), [block.number, i]); + } + } + + return true; + }); + } + + /** + * Gets up to `limit` amount of L2 blocks starting from `from`. + * @param start - Number of the first block to return (inclusive). + * @param limit - The number of blocks to return. + * @returns The requested L2 blocks. + */ + getBlocks(start: number, limit: number): Promise { + try { + const blocks = this.#tables.blocks + .getRange(this.#computeBlockRange(start, limit)) + .filter(({ value }) => value.block) + .map(({ value }) => { + const block = L2Block.fromBuffer(asBuffer(value.block!)); + if (value.encryptedLogs) { + block.attachLogs(L2BlockL2Logs.fromBuffer(asBuffer(value.encryptedLogs)), LogType.ENCRYPTED); + } + + if (value.unencryptedLogs) { + block.attachLogs(L2BlockL2Logs.fromBuffer(asBuffer(value.unencryptedLogs)), LogType.UNENCRYPTED); + } + + return block; + }).asArray; + + return Promise.resolve(blocks); + } catch (err) { + // this function is sync so if any errors are thrown we need to make sure they're passed on as rejected Promises + return Promise.reject(err); + } + } + + /** + * Gets an l2 tx. + * @param txHash - The txHash of the l2 tx. + * @returns The requested L2 tx. + */ + getL2Tx(txHash: TxHash): Promise { + const [blockNumber, txIndex] = this.#tables.txIndex.get(txHash.buffer) ?? []; + if (typeof blockNumber !== 'number' || typeof txIndex !== 'number') { + return Promise.resolve(undefined); + } + + const block = this.#getBlock(blockNumber, true); + return Promise.resolve(block?.getTx(txIndex)); + } + + /** + * Append new logs to the store's list. + * @param encryptedLogs - The logs to be added to the store. + * @param unencryptedLogs - The type of the logs to be added to the store. + * @param blockNumber - The block for which to add the logs. + * @returns True if the operation is successful. + */ + addLogs( + encryptedLogs: L2BlockL2Logs | undefined, + unencryptedLogs: L2BlockL2Logs | undefined, + blockNumber: number, + ): Promise { + return this.#tables.blocks.transaction(() => { + const blockCtx = this.#tables.blocks.get(blockNumber) ?? {}; + + if (encryptedLogs) { + blockCtx.encryptedLogs = encryptedLogs.toBuffer(); + } + + if (unencryptedLogs) { + blockCtx.unencryptedLogs = unencryptedLogs.toBuffer(); + } + + void this.#tables.blocks.put(blockNumber, blockCtx); + return true; + }); + } + + /** + * Append new pending L1 to L2 messages to the store. + * @param messages - The L1 to L2 messages to be added to the store. + * @returns True if the operation is successful. + */ + addPendingL1ToL2Messages(messages: PendingL1ToL2Message[]): Promise { + return this.#tables.l1ToL2Messages.transaction(() => { + for (const { message, blockNumber, indexInBlock } of messages) { + const messageKey = message.entryKey?.toBuffer(); + if (!messageKey) { + throw new Error('Message does not have an entry key'); + } + + const dupeKey = l1ToL2MessageBlockKey(blockNumber, 'newMessage', indexInBlock); + const messageInBlock = this.#tables.l1ToL2MessagesByBlock.get(dupeKey); + + if (messageInBlock?.equals(messageKey)) { + continue; + } else { + if (messageInBlock) { + this.#log( + `Previously add pending message ${messageInBlock.toString( + 'hex', + )} at ${dupeKey.toString()}, now got ${messageKey.toString('hex')}`, + ); + } + + void this.#tables.l1ToL2MessagesByBlock.put(dupeKey, messageKey); + } + + let messageWithCount = this.#tables.l1ToL2Messages.get(messageKey); + if (!messageWithCount) { + messageWithCount = { + message: message.toBuffer(), + pendingCount: 0, + confirmedCount: 0, + }; + void this.#tables.l1ToL2Messages.put(messageKey, messageWithCount); + } + + this.#updateMessageCountInTx(messageKey, message, 1, 0); + } + return true; + }); + } + + /** + * Remove pending L1 to L2 messages from the store (if they were cancelled). + * @param messages - The message keys to be removed from the store. + * @returns True if the operation is successful. + */ + cancelPendingL1ToL2Messages(messages: CancelledL1ToL2Message[]): Promise { + return this.#tables.l1ToL2Messages.transaction(() => { + for (const { blockNumber, indexInBlock, entryKey } of messages) { + const messageKey = entryKey.toBuffer(); + const dupeKey = l1ToL2MessageBlockKey(blockNumber, 'cancelledMessage', indexInBlock); + const messageInBlock = this.#tables.l1ToL2MessagesByBlock.get(dupeKey); + if (messageInBlock?.equals(messageKey)) { + continue; + } else { + if (messageInBlock) { + this.#log( + `Previously add pending message ${messageInBlock.toString( + 'hex', + )} at ${dupeKey.toString()}, now got ${messageKey.toString('hex')}`, + ); + } + void this.#tables.l1ToL2MessagesByBlock.put(dupeKey, messageKey); + } + + const message = this.#getL1ToL2Message(messageKey); + this.#updateMessageCountInTx(messageKey, message, -1, 0); + } + return true; + }); + } + + /** + * Messages that have been published in an L2 block are confirmed. + * Add them to the confirmed store, also remove them from the pending store. + * @param entryKeys - The message keys to be removed from the store. + * @returns True if the operation is successful. + */ + confirmL1ToL2Messages(entryKeys: Fr[]): Promise { + return this.#tables.l1ToL2Messages.transaction(() => { + for (const entryKey of entryKeys) { + const messageKey = entryKey.toBuffer(); + const message = this.#getL1ToL2Message(messageKey); + this.#updateMessageCountInTx(messageKey, message, -1, 1); + } + return true; + }); + } + + /** + * Gets up to `limit` amount of pending L1 to L2 messages, sorted by fee + * @param limit - The number of messages to return (by default NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP). + * @returns The requested L1 to L2 message keys. + */ + getPendingL1ToL2MessageKeys(limit: number): Promise { + // start a read transaction in order to have a consistent view of the data + // this is all sync code, but better to be safe in case it changes in the future + // or we end up having multiple processes touching the same db + const transaction = this.#tables.pendingMessagesByFee.useReadTransaction(); + + try { + // get all the keys, in reverse order + const fees = this.#tables.pendingMessagesByFee.getKeys({ reverse: true, transaction }); + const messages: Fr[] = []; + + loopOverFees: for (const fee of fees) { + const pendingMessages = this.#tables.pendingMessagesByFee.getValues(fee, { transaction }); + this.#log(`Found pending messages for ${fee}`); + + for (const messageKey of pendingMessages) { + const messageWithCount = this.#tables.l1ToL2Messages.get(messageKey, { transaction }); + if (!messageWithCount || messageWithCount.pendingCount === 0) { + this.#log( + `Message ${messageKey.toString( + 'hex', + )} has no pending count but it got picked up by getPEndingL1ToL2MessageKeys`, + ); + continue; + } + const toAdd = Array(messageWithCount.pendingCount).fill(Fr.fromBuffer(messageKey)); + this.#log(`Adding ${toAdd.length} copies of ${messageKey.toString('hex')} for ${fee}`); + messages.push(...toAdd); + + if (messages.length >= limit) { + break loopOverFees; + } + } + } + + return Promise.resolve(messages); + } catch (err) { + return Promise.reject(err); + } finally { + transaction.done(); + } + } + + /** + * Gets the confirmed L1 to L2 message corresponding to the given message key. + * @param messageKey - The message key to look up. + * @returns The requested L1 to L2 message or throws if not found. + */ + getConfirmedL1ToL2Message(messageKey: Fr): Promise { + const value = this.#tables.l1ToL2Messages.get(messageKey.toBuffer()); + if (!value) { + return Promise.reject(new Error(`Message with key ${messageKey} not found`)); + } + + if (value.confirmedCount === 0) { + return Promise.reject(new Error(`Message with key ${messageKey} not confirmed`)); + } + + return Promise.resolve(L1ToL2Message.fromBuffer(value.message)); + } + + /** + * Gets up to `limit` amount of logs starting from `from`. + * @param start - Number of the L2 block to which corresponds the first logs to be returned. + * @param limit - The number of logs to return. + * @param logType - Specifies whether to return encrypted or unencrypted logs. + * @returns The requested logs. + */ + getLogs(start: number, limit: number, logType: LogType): Promise { + try { + const blockCtxKey = logType === LogType.ENCRYPTED ? 'encryptedLogs' : 'unencryptedLogs'; + const logs = this.#tables.blocks + .getRange(this.#computeBlockRange(start, limit)) + .map(({ value: { [blockCtxKey]: logs } }) => + logs ? L2BlockL2Logs.fromBuffer(asBuffer(logs)) : new L2BlockL2Logs([]), + ).asArray; + + return Promise.resolve(logs); + } catch (err) { + return Promise.reject(err); + } + } + + /** + * Gets unencrypted logs based on the provided filter. + * @param filter - The filter to apply to the logs. + * @returns The requested logs. + */ + getUnencryptedLogs(filter: LogFilter): Promise { + try { + if (filter.afterLog) { + return Promise.resolve(this.#filterLogsBetweenBlocks(filter)); + } else if (filter.txHash) { + return Promise.resolve(this.#filterLogsOfTx(filter)); + } else { + return Promise.resolve(this.#filterLogsBetweenBlocks(filter)); + } + } catch (err) { + return Promise.reject(err); + } + } + + #filterLogsOfTx(filter: LogFilter): GetUnencryptedLogsResponse { + if (!filter.txHash) { + throw new Error('Missing txHash'); + } + + const [blockNumber, txIndex] = this.#tables.txIndex.get(filter.txHash.buffer) ?? []; + if (typeof blockNumber !== 'number' || typeof txIndex !== 'number') { + return { logs: [], maxLogsHit: false }; + } + + const block = this.#getBlock(blockNumber, true); + if (!block || !block.newUnencryptedLogs) { + return { logs: [], maxLogsHit: false }; + } + + const txLogs = block.newUnencryptedLogs.txLogs[txIndex].unrollLogs().map(log => UnencryptedL2Log.fromBuffer(log)); + const logs: ExtendedUnencryptedL2Log[] = []; + const maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter); + + return { logs, maxLogsHit }; + } + + #filterLogsBetweenBlocks(filter: LogFilter): GetUnencryptedLogsResponse { + const start = + filter.afterLog?.blockNumber ?? Math.max(filter.fromBlock ?? INITIAL_L2_BLOCK_NUM, INITIAL_L2_BLOCK_NUM); + const end = filter.toBlock; + + if (typeof end === 'number' && end < start) { + return { + logs: [], + maxLogsHit: true, + }; + } + + const logs: ExtendedUnencryptedL2Log[] = []; + + const blockNumbers = this.#tables.blocks.getKeys({ start, end, snapshot: false }); + let maxLogsHit = false; + + loopOverBlocks: for (const blockNumber of blockNumbers) { + const block = this.#getBlock(blockNumber, true); + if (!block || !block.newUnencryptedLogs) { + continue; + } + + const unencryptedLogsInBlock = block.newUnencryptedLogs; + for (let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < unencryptedLogsInBlock.txLogs.length; txIndex++) { + const txLogs = unencryptedLogsInBlock.txLogs[txIndex].unrollLogs().map(log => UnencryptedL2Log.fromBuffer(log)); + maxLogsHit = this.#accumulateLogs(logs, blockNumber, txIndex, txLogs, filter); + if (maxLogsHit) { + break loopOverBlocks; + } + } + } + + return { logs, maxLogsHit }; + } + + #accumulateLogs( + results: ExtendedUnencryptedL2Log[], + blockNumber: number, + txIndex: number, + txLogs: UnencryptedL2Log[], + filter: LogFilter, + ): boolean { + let maxLogsHit = false; + let logIndex = typeof filter.afterLog?.logIndex === 'number' ? filter.afterLog.logIndex + 1 : 0; + for (; logIndex < txLogs.length; logIndex++) { + const log = txLogs[logIndex]; + if (filter.contractAddress && !log.contractAddress.equals(filter.contractAddress)) { + continue; + } + + if (filter.selector && !log.selector.equals(filter.selector)) { + continue; + } + + results.push(new ExtendedUnencryptedL2Log(new LogId(blockNumber, txIndex, logIndex), log)); + if (results.length >= this.#logsMaxPageSize) { + maxLogsHit = true; + break; + } + } + + return maxLogsHit; + } + + /** + * Add new extended contract data from an L2 block to the store's list. + * @param data - List of contracts' data to be added. + * @param blockNum - Number of the L2 block the contract data was deployed in. + * @returns True if the operation is successful. + */ + addExtendedContractData(data: ExtendedContractData[], blockNum: number): Promise { + return this.#tables.blocks.transaction(() => { + const blockCtx = this.#tables.blocks.get(blockNum) ?? {}; + if (!blockCtx.extendedContractData) { + blockCtx.extendedContractData = []; + } + this.#log(`Adding ${data.length} extended contract data to block ${blockNum}`); + blockCtx.extendedContractData.push(...data.map(data => data.toBuffer())); + void this.#tables.blocks.put(blockNum, blockCtx); + + return true; + }); + } + + /** + * Get the extended contract data for this contract. + * @param contractAddress - The contract data address. + * @returns The extended contract data or undefined if not found. + */ + getExtendedContractData(contractAddress: AztecAddress): Promise { + const [blockNumber, _] = this.#tables.contractIndex.get(contractAddress.toBuffer()) ?? []; + + if (typeof blockNumber !== 'number') { + return Promise.resolve(undefined); + } + + const blockCtx = this.#tables.blocks.get(blockNumber); + if (!blockCtx) { + return Promise.resolve(undefined); + } + + for (const data of blockCtx.extendedContractData ?? []) { + const extendedContractData = ExtendedContractData.fromBuffer(asBuffer(data)); + if (extendedContractData.contractData.contractAddress.equals(contractAddress)) { + return Promise.resolve(extendedContractData); + } + } + + return Promise.resolve(undefined); + } + + /** + * Lookup all extended contract data in an L2 block. + * @param blockNum - The block number to get all contract data from. + * @returns All extended contract data in the block (if found). + */ + getExtendedContractDataInBlock(blockNum: number): Promise { + const blockCtx = this.#tables.blocks.get(blockNum); + if (!blockCtx || !blockCtx.extendedContractData) { + return Promise.resolve([]); + } + + return Promise.resolve(blockCtx.extendedContractData.map(data => ExtendedContractData.fromBuffer(asBuffer(data)))); + } + + /** + * Get basic info for an L2 contract. + * Contains contract address & the ethereum portal address. + * @param contractAddress - The contract data address. + * @returns ContractData with the portal address (if we didn't throw an error). + */ + getContractData(contractAddress: AztecAddress): Promise { + const [blockNumber, index] = this.#tables.contractIndex.get(contractAddress.toBuffer()) ?? []; + if (typeof blockNumber !== 'number' || typeof index !== 'number') { + return Promise.resolve(undefined); + } + + const block = this.#getBlock(blockNumber); + return Promise.resolve(block?.newContractData[index]); + } + + /** + * Get basic info for an all L2 contracts deployed in a block. + * Contains contract address & the ethereum portal address. + * @param blockNumber - Number of the L2 block where contracts were deployed. + * @returns ContractData with the portal address (if we didn't throw an error). + */ + getContractDataInBlock(blockNumber: number): Promise { + const block = this.#getBlock(blockNumber); + return Promise.resolve(block?.newContractData ?? []); + } + + /** + * Gets the number of the latest L2 block processed. + * @returns The number of the latest L2 block processed. + */ + getBlockNumber(): Promise { + // inverse range with no start/end will return the last key + const [lastBlockNumber] = this.#tables.blocks.getKeys({ reverse: true, limit: 1 }).asArray; + return Promise.resolve(typeof lastBlockNumber === 'number' ? lastBlockNumber : INITIAL_L2_BLOCK_NUM - 1); + } + + getL1BlockNumber(): Promise { + // inverse range with no start/end will return the last value + const [lastBlock] = this.#tables.blocks.getRange({ reverse: true, limit: 1 }).asArray; + if (!lastBlock) { + return Promise.resolve(0n); + } else { + const blockCtx = lastBlock.value; + if (!blockCtx.l1BlockNumber) { + return Promise.reject(new Error('L1 block number not found')); + } else { + return Promise.resolve(toBigIntBE(asBuffer(blockCtx.l1BlockNumber))); + } + } + } + + #getBlock(blockNumber: number, withLogs = false): L2Block | undefined { + const blockCtx = this.#tables.blocks.get(blockNumber); + if (!blockCtx || !blockCtx.block) { + return undefined; + } + + const block = L2Block.fromBuffer(asBuffer(blockCtx.block)); + + if (withLogs) { + if (blockCtx.encryptedLogs) { + block.attachLogs(L2BlockL2Logs.fromBuffer(asBuffer(blockCtx.encryptedLogs)), LogType.ENCRYPTED); + } + + if (blockCtx.unencryptedLogs) { + block.attachLogs(L2BlockL2Logs.fromBuffer(asBuffer(blockCtx.unencryptedLogs)), LogType.UNENCRYPTED); + } + } + + return block; + } + + #computeBlockRange(start: number, limit: number): Required> { + if (limit < 1) { + throw new Error(`Invalid limit: ${limit}`); + } + + if (start < INITIAL_L2_BLOCK_NUM) { + this.#log(`Clamping start block ${start} to ${INITIAL_L2_BLOCK_NUM}`); + start = INITIAL_L2_BLOCK_NUM; + } + + const end = start + limit; + return { start, end }; + } + + #getL1ToL2Message(entryKey: Buffer): L1ToL2Message { + const value = this.#tables.l1ToL2Messages.get(entryKey); + if (!value) { + throw new Error('Unknown message: ' + entryKey.toString()); + } + + return L1ToL2Message.fromBuffer(value.message); + } + + /** + * Atomically updates the pending and confirmed count for a message. + * If both counts are 0 after adding their respective deltas, the message is removed from the store. + * + * Only call this method from inside a _transaction_! + * + * @param messageKey - The message key to update. + * @param message - The message to update. + * @param deltaPendingCount - The amount to add to the pending count. + * @param deltaConfirmedCount - The amount to add to the confirmed count. + */ + #updateMessageCountInTx( + messageKey: Buffer, + message: L1ToL2Message, + deltaPendingCount: number, + deltaConfirmedCount: number, + ): void { + const entry = this.#tables.l1ToL2Messages.getEntry(messageKey); + if (!entry) { + return; + } + + const { value } = entry; + + value.pendingCount = Math.max(0, value.pendingCount + deltaPendingCount); + value.confirmedCount = Math.max(0, value.confirmedCount + deltaConfirmedCount); + + this.#log( + `Updating count of ${messageKey.toString('hex')} to ${value.pendingCount} pending and ${ + value.confirmedCount + } confirmed}`, + ); + + if (value.pendingCount === 0) { + this.#log(`Removing message ${messageKey.toString('hex')} from pending messages group with fee ${message.fee}`); + void this.#tables.pendingMessagesByFee.remove(message.fee, messageKey); + } else if (value.pendingCount > 0) { + this.#log(`Adding message ${messageKey.toString('hex')} to pending message group with fee ${message.fee}`); + void this.#tables.pendingMessagesByFee.put(message.fee, messageKey); + } + + if (value.pendingCount === 0 && value.confirmedCount === 0) { + void this.#tables.l1ToL2Messages.remove(messageKey); + } else { + void this.#tables.l1ToL2Messages.put(messageKey, value); + } + } +} + +/** + * Creates a Buffer viewing the same memory location as the passed array. + * @param arr - A Uint8Array + */ +function asBuffer(arr: Uint8Array | Buffer): Buffer { + return Buffer.isBuffer(arr) ? arr : Buffer.from(arr.buffer, arr.byteOffset, arr.length / arr.BYTES_PER_ELEMENT); +} diff --git a/yarn-project/archiver/src/archiver/l1_to_l2_message_store.test.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/l1_to_l2_message_store.test.ts similarity index 70% rename from yarn-project/archiver/src/archiver/l1_to_l2_message_store.test.ts rename to yarn-project/archiver/src/archiver/memory_archiver_store/l1_to_l2_message_store.test.ts index 0901d7a479f..6058cbedddc 100644 --- a/yarn-project/archiver/src/archiver/l1_to_l2_message_store.test.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/l1_to_l2_message_store.test.ts @@ -16,15 +16,21 @@ describe('l1_to_l2_message_store', () => { }); it('addMessage adds a message', () => { - store.addMessage(entryKey, msg); + store.addMessage(entryKey, msg, 1n, 0); expect(store.getMessage(entryKey)).toEqual(msg); }); it('addMessage increments the count if the message is already in the store', () => { - store.addMessage(entryKey, msg); - store.addMessage(entryKey, msg); + store.addMessage(entryKey, msg, 1n, 0); + store.addMessage(entryKey, msg, 1n, 1); expect(store.getMessageAndCount(entryKey)).toEqual({ message: msg, count: 2 }); }); + + it('addMessage does not increment the count if the message is already in the store at the same position', () => { + store.addMessage(entryKey, msg, 1n, 0); + store.addMessage(entryKey, msg, 1n, 0); + expect(store.getMessageAndCount(entryKey)).toEqual({ message: msg, count: 1 }); + }); }); describe('pending_l1_to_l2_message_store', () => { @@ -40,22 +46,22 @@ describe('pending_l1_to_l2_message_store', () => { }); it('removeMessage removes the message if the count is 1', () => { - store.addMessage(entryKey, msg); - store.removeMessage(entryKey); + store.addMessage(entryKey, msg, 1n, 0); + store.removeMessage(entryKey, 2n, 0); expect(store.getMessage(entryKey)).toBeUndefined(); }); it("handles case when removing a message that doesn't exist", () => { - expect(() => store.removeMessage(new Fr(0))).not.toThrow(); + expect(() => store.removeMessage(new Fr(0), 1n, 0)).not.toThrow(); const one = new Fr(1); - expect(() => store.removeMessage(one)).toThrow(`Message with key ${one.value} not found in store`); + expect(() => store.removeMessage(one, 1n, 0)).toThrow(`Message with key ${one.value} not found in store`); }); it('removeMessage decrements the count if the message is already in the store', () => { - store.addMessage(entryKey, msg); - store.addMessage(entryKey, msg); - store.addMessage(entryKey, msg); - store.removeMessage(entryKey); + store.addMessage(entryKey, msg, 1n, 0); + store.addMessage(entryKey, msg, 1n, 1); + store.addMessage(entryKey, msg, 1n, 2); + store.removeMessage(entryKey, 2n, 0); expect(store.getMessageAndCount(entryKey)).toEqual({ message: msg, count: 2 }); }); @@ -64,21 +70,21 @@ describe('pending_l1_to_l2_message_store', () => { }); it('getMessageKeys returns an empty array if limit is 0', () => { - store.addMessage(entryKey, msg); + store.addMessage(entryKey, msg, 1n, 0); expect(store.getMessageKeys(0)).toEqual([]); }); it('get messages for a non-empty store when limit > number of messages in store', () => { const entryKeys = [1, 2, 3, 4, 5].map(x => new Fr(x)); - entryKeys.forEach(entryKey => { - store.addMessage(entryKey, L1ToL2Message.random()); + entryKeys.forEach((entryKey, i) => { + store.addMessage(entryKey, L1ToL2Message.random(), 1n, i); }); expect(store.getMessageKeys(10).length).toEqual(5); }); it('get messages returns messages sorted by fees and also includes multiple of the same message', () => { const entryKeys = [1, 2, 3, 3, 3, 4].map(x => new Fr(x)); - entryKeys.forEach(entryKey => { + entryKeys.forEach((entryKey, i) => { // set msg.fee to entryKey to test the sort. const msg = new L1ToL2Message( L1Actor.random(), @@ -89,7 +95,7 @@ describe('pending_l1_to_l2_message_store', () => { Number(entryKey.value), entryKey, ); - store.addMessage(entryKey, msg); + store.addMessage(entryKey, msg, 1n, i); }); const expectedMessageFees = [4n, 3n, 3n, 3n]; // the top 4. const receivedMessageFees = store.getMessageKeys(4).map(key => key.value); diff --git a/yarn-project/archiver/src/archiver/l1_to_l2_message_store.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/l1_to_l2_message_store.ts similarity index 74% rename from yarn-project/archiver/src/archiver/l1_to_l2_message_store.ts rename to yarn-project/archiver/src/archiver/memory_archiver_store/l1_to_l2_message_store.ts index 441a0a81783..fdc4bff61b6 100644 --- a/yarn-project/archiver/src/archiver/l1_to_l2_message_store.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/l1_to_l2_message_store.ts @@ -11,16 +11,26 @@ export class L1ToL2MessageStore { * messages (and the number of times the message has been seen). */ protected store: Map = new Map(); + private messagesByBlock = new Set(); constructor() {} - addMessage(messageKey: Fr, msg: L1ToL2Message) { + addMessage(messageKey: Fr, message: L1ToL2Message, l1BlocKNumber: bigint, messageIndex: number) { + if (this.messagesByBlock.has(`${l1BlocKNumber}-${messageIndex}`)) { + return; + } + this.messagesByBlock.add(`${l1BlocKNumber}-${messageIndex}`); + + this.addMessageUnsafe(messageKey, message); + } + + addMessageUnsafe(messageKey: Fr, message: L1ToL2Message) { const messageKeyBigInt = messageKey.value; const msgAndCount = this.store.get(messageKeyBigInt); if (msgAndCount) { msgAndCount.count++; } else { - this.store.set(messageKeyBigInt, { message: msg, count: 1 }); + this.store.set(messageKeyBigInt, { message, count: 1 }); } } @@ -38,6 +48,7 @@ export class L1ToL2MessageStore { * for removing messages or fetching multiple messages. */ export class PendingL1ToL2MessageStore extends L1ToL2MessageStore { + private cancelledMessagesByBlock = new Set(); getMessageKeys(limit: number): Fr[] { if (limit < 1) { return []; @@ -57,11 +68,20 @@ export class PendingL1ToL2MessageStore extends L1ToL2MessageStore { return messages; } - removeMessage(messageKey: Fr) { + removeMessage(messageKey: Fr, l1BlockNumber: bigint, messageIndex: number) { // ignore 0 - messageKey is a hash, so a 0 can probabilistically never occur. It is best to skip it. if (messageKey.equals(Fr.ZERO)) { return; } + + if (this.cancelledMessagesByBlock.has(`${l1BlockNumber}-${messageIndex}`)) { + return; + } + this.cancelledMessagesByBlock.add(`${l1BlockNumber}-${messageIndex}`); + this.removeMessageUnsafe(messageKey); + } + + removeMessageUnsafe(messageKey: Fr) { const messageKeyBigInt = messageKey.value; const msgAndCount = this.store.get(messageKeyBigInt); if (!msgAndCount) { diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts new file mode 100644 index 00000000000..1fe8fc43171 --- /dev/null +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts @@ -0,0 +1,35 @@ +import { L2Block } from '@aztec/types'; + +import { ArchiverDataStore } from '../archiver_store.js'; +import { describeArchiverDataStore } from '../archiver_store_test_suite.js'; +import { MemoryArchiverStore } from './memory_archiver_store.js'; + +describe('MemoryArchiverStore', () => { + let archiverStore: ArchiverDataStore; + + beforeEach(() => { + archiverStore = new MemoryArchiverStore(1000); + }); + + describeArchiverDataStore('implements ArchiverStore', () => archiverStore); + + describe('getUnencryptedLogs config', () => { + it('does not return more than "maxLogs" logs', async () => { + const maxLogs = 5; + archiverStore = new MemoryArchiverStore(maxLogs); + const blocks = Array(10) + .fill(0) + .map((_, index: number) => L2Block.random(index + 1, 4, 2, 3, 2, 2)); + + await archiverStore.addBlocks(blocks); + await Promise.all( + blocks.map(block => archiverStore.addLogs(block.newEncryptedLogs, block.newUnencryptedLogs, block.number)), + ); + + const response = await archiverStore.getUnencryptedLogs({}); + + expect(response.maxLogsHit).toBeTruthy(); + expect(response.logs.length).toEqual(maxLogs); + }); + }); +}); diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts new file mode 100644 index 00000000000..3b032c19036 --- /dev/null +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts @@ -0,0 +1,399 @@ +import { Fr, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js'; +import { AztecAddress } from '@aztec/foundation/aztec-address'; +import { + CancelledL1ToL2Message, + ContractData, + ExtendedContractData, + ExtendedUnencryptedL2Log, + GetUnencryptedLogsResponse, + INITIAL_L2_BLOCK_NUM, + L1ToL2Message, + L2Block, + L2BlockContext, + L2BlockL2Logs, + L2Tx, + LogFilter, + LogId, + LogType, + PendingL1ToL2Message, + TxHash, + UnencryptedL2Log, +} from '@aztec/types'; + +import { ArchiverDataStore } from '../archiver_store.js'; +import { L1ToL2MessageStore, PendingL1ToL2MessageStore } from './l1_to_l2_message_store.js'; + +/** + * Simple, in-memory implementation of an archiver data store. + */ +export class MemoryArchiverStore implements ArchiverDataStore { + /** + * An array containing all the L2 blocks that have been fetched so far. + */ + private l2BlockContexts: L2BlockContext[] = []; + + /** + * An array containing all the L2 Txs in the L2 blocks that have been fetched so far. + */ + private l2Txs: L2Tx[] = []; + + /** + * An array containing all the encrypted logs that have been fetched so far. + * Note: Index in the "outer" array equals to (corresponding L2 block's number - INITIAL_L2_BLOCK_NUM). + */ + private encryptedLogsPerBlock: L2BlockL2Logs[] = []; + + /** + * An array containing all the unencrypted logs that have been fetched so far. + * Note: Index in the "outer" array equals to (corresponding L2 block's number - INITIAL_L2_BLOCK_NUM). + */ + private unencryptedLogsPerBlock: L2BlockL2Logs[] = []; + + /** + * A sparse array containing all the extended contract data that have been fetched so far. + */ + private extendedContractDataByBlock: (ExtendedContractData[] | undefined)[] = []; + + /** + * A mapping of contract address to extended contract data. + */ + private extendedContractData: Map = new Map(); + + /** + * Contains all the confirmed L1 to L2 messages (i.e. messages that were consumed in an L2 block) + * It is a map of entryKey to the corresponding L1 to L2 message and the number of times it has appeared + */ + private confirmedL1ToL2Messages: L1ToL2MessageStore = new L1ToL2MessageStore(); + + /** + * Contains all the pending L1 to L2 messages (accounts for duplication of messages) + */ + private pendingL1ToL2Messages: PendingL1ToL2MessageStore = new PendingL1ToL2MessageStore(); + + constructor( + /** The max number of logs that can be obtained in 1 "getUnencryptedLogs" call. */ + public readonly maxLogs: number, + ) {} + + /** + * Append new blocks to the store's list. + * @param blocks - The L2 blocks to be added to the store. + * @returns True if the operation is successful (always in this implementation). + */ + public addBlocks(blocks: L2Block[]): Promise { + this.l2BlockContexts.push(...blocks.map(block => new L2BlockContext(block))); + this.l2Txs.push(...blocks.flatMap(b => b.getTxs())); + return Promise.resolve(true); + } + + /** + * Append new logs to the store's list. + * @param encryptedLogs - The encrypted logs to be added to the store. + * @param unencryptedLogs - The unencrypted logs to be added to the store. + * @param blockNumber - The block for which to add the logs. + * @returns True if the operation is successful. + */ + addLogs(encryptedLogs: L2BlockL2Logs, unencryptedLogs: L2BlockL2Logs, blockNumber: number): Promise { + if (encryptedLogs) { + this.encryptedLogsPerBlock[blockNumber - INITIAL_L2_BLOCK_NUM] = encryptedLogs; + } + + if (unencryptedLogs) { + this.unencryptedLogsPerBlock[blockNumber - INITIAL_L2_BLOCK_NUM] = unencryptedLogs; + } + + return Promise.resolve(true); + } + + /** + * Append new pending L1 to L2 messages to the store. + * @param messages - The L1 to L2 messages to be added to the store. + * @returns True if the operation is successful (always in this implementation). + */ + public addPendingL1ToL2Messages(messages: PendingL1ToL2Message[]): Promise { + for (const { message, blockNumber, indexInBlock } of messages) { + this.pendingL1ToL2Messages.addMessage(message.entryKey!, message, blockNumber, indexInBlock); + } + return Promise.resolve(true); + } + + /** + * Remove pending L1 to L2 messages from the store (if they were cancelled). + * @param messages - The message keys to be removed from the store. + * @returns True if the operation is successful (always in this implementation). + */ + public cancelPendingL1ToL2Messages(messages: CancelledL1ToL2Message[]): Promise { + messages.forEach(({ entryKey, blockNumber, indexInBlock }) => { + this.pendingL1ToL2Messages.removeMessage(entryKey, blockNumber, indexInBlock); + }); + return Promise.resolve(true); + } + + /** + * Messages that have been published in an L2 block are confirmed. + * Add them to the confirmed store, also remove them from the pending store. + * @param messageKeys - The message keys to be removed from the store. + * @returns True if the operation is successful (always in this implementation). + */ + public confirmL1ToL2Messages(messageKeys: Fr[]): Promise { + messageKeys.forEach(messageKey => { + this.confirmedL1ToL2Messages.addMessageUnsafe(messageKey, this.pendingL1ToL2Messages.getMessage(messageKey)!); + this.pendingL1ToL2Messages.removeMessageUnsafe(messageKey); + }); + return Promise.resolve(true); + } + + /** + * Store new extended contract data from an L2 block to the store's list. + * @param data - List of contracts' data to be added. + * @param blockNum - Number of the L2 block the contract data was deployed in. + * @returns True if the operation is successful (always in this implementation). + */ + public addExtendedContractData(data: ExtendedContractData[], blockNum: number): Promise { + // Add to the contracts mapping + for (const contractData of data) { + const key = contractData.contractData.contractAddress.toString(); + this.extendedContractData.set(key, contractData); + } + + // Add the index per block + if (this.extendedContractDataByBlock[blockNum]?.length) { + this.extendedContractDataByBlock[blockNum]?.push(...data); + } else { + this.extendedContractDataByBlock[blockNum] = [...data]; + } + return Promise.resolve(true); + } + + /** + * Gets up to `limit` amount of L2 blocks starting from `from`. + * @param from - Number of the first block to return (inclusive). + * @param limit - The number of blocks to return. + * @returns The requested L2 blocks. + * @remarks When "from" is smaller than genesis block number, blocks from the beginning are returned. + */ + public getBlocks(from: number, limit: number): Promise { + // Return an empty array if we are outside of range + if (limit < 1) { + return Promise.reject(new Error(`Invalid limit: ${limit}`)); + } + + const fromIndex = Math.max(from - INITIAL_L2_BLOCK_NUM, 0); + if (fromIndex >= this.l2BlockContexts.length) { + return Promise.resolve([]); + } + + const toIndex = fromIndex + limit; + return Promise.resolve(this.l2BlockContexts.slice(fromIndex, toIndex).map(blockContext => blockContext.block)); + } + + /** + * Gets an l2 tx. + * @param txHash - The txHash of the l2 tx. + * @returns The requested L2 tx. + */ + public getL2Tx(txHash: TxHash): Promise { + const l2Tx = this.l2Txs.find(tx => tx.txHash.equals(txHash)); + return Promise.resolve(l2Tx); + } + + /** + * Gets up to `limit` amount of pending L1 to L2 messages, sorted by fee + * @param limit - The number of messages to return (by default NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP). + * @returns The requested L1 to L2 message keys. + */ + public getPendingL1ToL2MessageKeys(limit: number = NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP): Promise { + return Promise.resolve(this.pendingL1ToL2Messages.getMessageKeys(limit)); + } + + /** + * Gets the confirmed L1 to L2 message corresponding to the given message key. + * @param messageKey - The message key to look up. + * @returns The requested L1 to L2 message or throws if not found. + */ + public getConfirmedL1ToL2Message(messageKey: Fr): Promise { + const message = this.confirmedL1ToL2Messages.getMessage(messageKey); + if (!message) { + throw new Error(`L1 to L2 Message with key ${messageKey.toString()} not found in the confirmed messages store`); + } + return Promise.resolve(message); + } + + /** + * Gets up to `limit` amount of logs starting from `from`. + * @param from - Number of the L2 block to which corresponds the first logs to be returned. + * @param limit - The number of logs to return. + * @param logType - Specifies whether to return encrypted or unencrypted logs. + * @returns The requested logs. + */ + getLogs(from: number, limit: number, logType: LogType): Promise { + if (from < INITIAL_L2_BLOCK_NUM || limit < 1) { + throw new Error(`Invalid limit: ${limit}`); + } + const logs = logType === LogType.ENCRYPTED ? this.encryptedLogsPerBlock : this.unencryptedLogsPerBlock; + if (from > logs.length) { + return Promise.resolve([]); + } + const startIndex = from - INITIAL_L2_BLOCK_NUM; + const endIndex = startIndex + limit; + return Promise.resolve(logs.slice(startIndex, endIndex)); + } + + /** + * Gets unencrypted logs based on the provided filter. + * @param filter - The filter to apply to the logs. + * @returns The requested logs. + * @remarks Works by doing an intersection of all params in the filter. + */ + getUnencryptedLogs(filter: LogFilter): Promise { + let txHash: TxHash | undefined; + let fromBlockIndex = 0; + let toBlockIndex = this.unencryptedLogsPerBlock.length; + let txIndexInBlock = 0; + let logIndexInTx = 0; + + if (filter.afterLog) { + // Continuation parameter is set --> tx hash is ignored + if (filter.fromBlock == undefined || filter.fromBlock <= filter.afterLog.blockNumber) { + fromBlockIndex = filter.afterLog.blockNumber - INITIAL_L2_BLOCK_NUM; + txIndexInBlock = filter.afterLog.txIndex; + logIndexInTx = filter.afterLog.logIndex + 1; // We want to start from the next log + } else { + fromBlockIndex = filter.fromBlock - INITIAL_L2_BLOCK_NUM; + } + } else { + txHash = filter.txHash; + + if (filter.fromBlock !== undefined) { + fromBlockIndex = filter.fromBlock - INITIAL_L2_BLOCK_NUM; + } + } + + if (filter.toBlock !== undefined) { + toBlockIndex = filter.toBlock - INITIAL_L2_BLOCK_NUM; + } + + // Ensure the indices are within block array bounds + fromBlockIndex = Math.max(fromBlockIndex, 0); + toBlockIndex = Math.min(toBlockIndex, this.unencryptedLogsPerBlock.length); + + if (fromBlockIndex > this.unencryptedLogsPerBlock.length || toBlockIndex < fromBlockIndex || toBlockIndex <= 0) { + return Promise.resolve({ + logs: [], + maxLogsHit: false, + }); + } + + const contractAddress = filter.contractAddress; + const selector = filter.selector; + + const logs: ExtendedUnencryptedL2Log[] = []; + + for (; fromBlockIndex < toBlockIndex; fromBlockIndex++) { + const blockContext = this.l2BlockContexts[fromBlockIndex]; + const blockLogs = this.unencryptedLogsPerBlock[fromBlockIndex]; + for (; txIndexInBlock < blockLogs.txLogs.length; txIndexInBlock++) { + const txLogs = blockLogs.txLogs[txIndexInBlock].unrollLogs().map(log => UnencryptedL2Log.fromBuffer(log)); + for (; logIndexInTx < txLogs.length; logIndexInTx++) { + const log = txLogs[logIndexInTx]; + if ( + (!txHash || blockContext.getTxHash(txIndexInBlock).equals(txHash)) && + (!contractAddress || log.contractAddress.equals(contractAddress)) && + (!selector || log.selector.equals(selector)) + ) { + logs.push( + new ExtendedUnencryptedL2Log(new LogId(blockContext.block.number, txIndexInBlock, logIndexInTx), log), + ); + if (logs.length === this.maxLogs) { + return Promise.resolve({ + logs, + maxLogsHit: true, + }); + } + } + } + logIndexInTx = 0; + } + txIndexInBlock = 0; + } + + return Promise.resolve({ + logs, + maxLogsHit: false, + }); + } + + /** + * Get the extended contract data for this contract. + * @param contractAddress - The contract data address. + * @returns The extended contract data or undefined if not found. + */ + getExtendedContractData(contractAddress: AztecAddress): Promise { + const result = this.extendedContractData.get(contractAddress.toString()); + return Promise.resolve(result); + } + + /** + * Lookup all contract data in an L2 block. + * @param blockNum - The block number to get all contract data from. + * @returns All extended contract data in the block (if found). + */ + public getExtendedContractDataInBlock(blockNum: number): Promise { + if (blockNum > this.l2BlockContexts.length) { + return Promise.resolve([]); + } + return Promise.resolve(this.extendedContractDataByBlock[blockNum] || []); + } + + /** + * Get basic info for an L2 contract. + * Contains contract address & the ethereum portal address. + * @param contractAddress - The contract data address. + * @returns ContractData with the portal address (if we didn't throw an error). + */ + public getContractData(contractAddress: AztecAddress): Promise { + if (contractAddress.isZero()) { + return Promise.resolve(undefined); + } + for (const blockContext of this.l2BlockContexts) { + for (const contractData of blockContext.block.newContractData) { + if (contractData.contractAddress.equals(contractAddress)) { + return Promise.resolve(contractData); + } + } + } + return Promise.resolve(undefined); + } + + /** + * Get basic info for an all L2 contracts deployed in a block. + * Contains contract address & the ethereum portal address. + * @param l2BlockNum - Number of the L2 block where contracts were deployed. + * @returns ContractData with the portal address (if we didn't throw an error). + */ + public getContractDataInBlock(l2BlockNum: number): Promise { + if (l2BlockNum > this.l2BlockContexts.length) { + return Promise.resolve([]); + } + const block: L2Block | undefined = this.l2BlockContexts[l2BlockNum - INITIAL_L2_BLOCK_NUM]?.block; + return Promise.resolve(block?.newContractData); + } + + /** + * Gets the number of the latest L2 block processed. + * @returns The number of the latest L2 block processed. + */ + public getBlockNumber(): Promise { + if (this.l2BlockContexts.length === 0) { + return Promise.resolve(INITIAL_L2_BLOCK_NUM - 1); + } + return Promise.resolve(this.l2BlockContexts[this.l2BlockContexts.length - 1].block.number); + } + + public getL1BlockNumber(): Promise { + if (this.l2BlockContexts.length === 0) { + return Promise.resolve(0n); + } + return Promise.resolve(this.l2BlockContexts[this.l2BlockContexts.length - 1].block.getL1BlockNumber()); + } +} diff --git a/yarn-project/archiver/src/index.ts b/yarn-project/archiver/src/index.ts index 144cd9210a7..c2f0d128be8 100644 --- a/yarn-project/archiver/src/index.ts +++ b/yarn-project/archiver/src/index.ts @@ -4,8 +4,8 @@ import { fileURLToPath } from '@aztec/foundation/url'; import { createPublicClient, http } from 'viem'; import { localhost } from 'viem/chains'; -import { MemoryArchiverStore } from './archiver/archiver_store.js'; import { Archiver, getConfigEnvVars } from './archiver/index.js'; +import { MemoryArchiverStore } from './archiver/memory_archiver_store/memory_archiver_store.js'; export * from './archiver/index.js'; @@ -32,7 +32,6 @@ async function main() { l1Contracts.inboxAddress, l1Contracts.registryAddress, l1Contracts.contractDeploymentEmitterAddress, - 0, // searchStartBlock archiverStore, ); diff --git a/yarn-project/aztec-node/package.json b/yarn-project/aztec-node/package.json index a4532c23e6f..65c618f5fe1 100644 --- a/yarn-project/aztec-node/package.json +++ b/yarn-project/aztec-node/package.json @@ -46,6 +46,7 @@ "koa": "^2.14.2", "koa-router": "^12.0.0", "levelup": "^5.1.1", + "lmdb": "^2.9.1", "memdown": "^6.1.1", "tslib": "^2.4.0" }, diff --git a/yarn-project/aztec-node/src/aztec-node/db.ts b/yarn-project/aztec-node/src/aztec-node/db.ts index 28ece5ea90e..35ebf1b86b6 100644 --- a/yarn-project/aztec-node/src/aztec-node/db.ts +++ b/yarn-project/aztec-node/src/aztec-node/db.ts @@ -1,5 +1,8 @@ +import { LogFn } from '@aztec/foundation/log'; + import { LevelDown, default as leveldown } from 'leveldown'; import { LevelUp, default as levelup } from 'levelup'; +import { RootDatabase, open } from 'lmdb'; import { MemDown, default as memdown } from 'memdown'; import { mkdir } from 'node:fs/promises'; import { join } from 'node:path'; @@ -10,6 +13,7 @@ export const createMemDown = () => (memdown as any)() as MemDown; export const createLevelDown = (path: string) => (leveldown as any)(path) as LevelDown; const DB_SUBDIR = 'aztec-node'; +const WORLD_STATE_SUBDIR = 'aztec-world-state'; const NODE_METADATA_KEY = '@@aztec_node_metadata'; /** @@ -28,45 +32,61 @@ type NodeMetadata = { * @throws If `config.dataDirectory` is set and the directory cannot be created. * @returns The database for the aztec node. */ -export async function openDb(config: AztecNodeConfig): Promise { +export async function openDb( + config: AztecNodeConfig, + log: LogFn, +): Promise<[nodeDb: RootDatabase, worldStateDb: LevelUp]> { const nodeMetadata: NodeMetadata = { rollupContractAddress: config.l1Contracts.rollupAddress.toString(), }; - let db: LevelUp; + let nodeDb: RootDatabase; + let worldStateDb: LevelUp; if (config.dataDirectory) { - const dbDir = join(config.dataDirectory, DB_SUBDIR); + const nodeDir = join(config.dataDirectory, DB_SUBDIR); + const worldStateDir = join(config.dataDirectory, WORLD_STATE_SUBDIR); // this throws if we don't have permissions to create the directory - await mkdir(dbDir, { recursive: true }); - db = levelup(createLevelDown(dbDir)); - } else { - db = levelup(createMemDown()); - } + await mkdir(nodeDir, { recursive: true }); + await mkdir(worldStateDir, { recursive: true }); - const prevNodeMetadata = await getNodeMetadata(db); + log(`Opening aztec-node database at ${nodeDir}`); + nodeDb = open(nodeDir, {}); - // if the rollup addresses are different, wipe the local database and start over - if (nodeMetadata.rollupContractAddress !== prevNodeMetadata.rollupContractAddress) { - await db.clear(); + log(`Opening world-state database at ${worldStateDir}`); + worldStateDb = levelup(createLevelDown(worldStateDir)); + } else { + log('Opening temporary databases'); + // not passing a path will use a temp file that gets deleted when the process exits + nodeDb = open({}); + worldStateDb = levelup(createMemDown()); } - await db.put(NODE_METADATA_KEY, JSON.stringify(nodeMetadata)); - return db; + await checkNodeMetadataAndClear(nodeDb, worldStateDb, nodeMetadata, log); + return [nodeDb, worldStateDb]; } /** - * Gets the metadata for the aztec node. - * @param db - The database for the aztec node. - * @returns Node metadata. + * Checks the node metadata and clears the database if the rollup contract address has changed. + * @param nodeDb - The database for the aztec node. + * @param nodeMetadata - The metadata for the aztec node. */ -async function getNodeMetadata(db: LevelUp): Promise { +async function checkNodeMetadataAndClear( + nodeDb: RootDatabase, + worldStateDb: LevelUp, + nodeMetadata: NodeMetadata, + log: LogFn, +): Promise { + const metadataDB = nodeDb.openDB('metadata', {}); try { - const value: Buffer = await db.get(NODE_METADATA_KEY); - return JSON.parse(value.toString('utf-8')); - } catch { - return { - rollupContractAddress: '', - }; + const existing = metadataDB.get(NODE_METADATA_KEY); + // if the rollup addresses are different, wipe the local database and start over + if (!existing || existing.rollupContractAddress !== nodeMetadata.rollupContractAddress) { + log('Rollup contract address has changed, clearing databases'); + await Promise.all([nodeDb.clearAsync(), worldStateDb.clear()]); + } + await metadataDB.put(NODE_METADATA_KEY, nodeMetadata); + } finally { + await metadataDB.close(); } } diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index b528a699343..b2f83b86232 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -1,4 +1,4 @@ -import { Archiver } from '@aztec/archiver'; +import { Archiver, LMDBArchiverStore } from '@aztec/archiver'; import { CONTRACT_TREE_HEIGHT, Fr, @@ -7,7 +7,7 @@ import { L1_TO_L2_MSG_TREE_HEIGHT, NOTE_HASH_TREE_HEIGHT, } from '@aztec/circuits.js'; -import { computePublicDataTreeIndex } from '@aztec/circuits.js/abis'; +import { computeGlobalsHash, computePublicDataTreeIndex } from '@aztec/circuits.js/abis'; import { L1ContractAddresses, createEthereumChain } from '@aztec/ethereum'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { createDebugLogger } from '@aztec/foundation/log'; @@ -47,7 +47,7 @@ import { getConfigEnvVars as getWorldStateConfig, } from '@aztec/world-state'; -import levelup from 'levelup'; +import { LevelUp } from 'levelup'; import { AztecNodeConfig } from './config.js'; import { openDb } from './db.js'; @@ -69,7 +69,7 @@ export class AztecNodeService implements AztecNode { protected readonly chainId: number, protected readonly version: number, protected readonly globalVariableBuilder: GlobalVariableBuilder, - protected readonly merkleTreesDb: levelup.LevelUp, + protected readonly merkleTreesDb: LevelUp, private log = createDebugLogger('aztec:node'), ) { const message = @@ -95,8 +95,13 @@ export class AztecNodeService implements AztecNode { `RPC URL configured for chain id ${ethereumChain.chainInfo.id} but expected id ${config.chainId}`, ); } + + const log = createDebugLogger('aztec:node'); + const [nodeDb, worldStateDb] = await openDb(config, log); + // first create and sync the archiver - const archiver = await Archiver.createAndSync(config); + const archiverStore = new LMDBArchiverStore(nodeDb, config.maxLogs); + const archiver = await Archiver.createAndSync(config, archiverStore, true); // we identify the P2P transaction protocol by using the rollup contract address. // this may well change in future @@ -106,10 +111,14 @@ export class AztecNodeService implements AztecNode { const p2pClient = await createP2PClient(config, new InMemoryTxPool(), archiver); // now create the merkle trees and the world state synchronizer - const db = await openDb(config); - const merkleTrees = await MerkleTrees.new(db); + const merkleTrees = await MerkleTrees.new(worldStateDb); const worldStateConfig: WorldStateConfig = getWorldStateConfig(); - const worldStateSynchronizer = await ServerWorldStateSynchronizer.new(db, merkleTrees, archiver, worldStateConfig); + const worldStateSynchronizer = await ServerWorldStateSynchronizer.new( + worldStateDb, + merkleTrees, + archiver, + worldStateConfig, + ); // start both and wait for them to sync from the block source await Promise.all([p2pClient.start(), worldStateSynchronizer.start()]); @@ -132,7 +141,8 @@ export class AztecNodeService implements AztecNode { ethereumChain.chainInfo.id, config.version, getGlobalVariableBuilder(config), - db, + worldStateDb, + log, ); } @@ -418,8 +428,9 @@ export class AztecNodeService implements AztecNode { // TODO we should be able to remove this after https://github.com/AztecProtocol/aztec-packages/issues/1869 // So simulation of public functions doesn't affect the merkle trees. const merkleTrees = new MerkleTrees(this.merkleTreesDb, this.log); + const globalVariablesHash = computeGlobalsHash(prevGlobalVariables); await merkleTrees.init({ - globalVariables: prevGlobalVariables, + globalVariablesHash, }); const publicProcessorFactory = new PublicProcessorFactory( diff --git a/yarn-project/aztec-node/src/declaration.d.ts b/yarn-project/aztec-node/src/declaration.d.ts new file mode 100644 index 00000000000..d7367c50ba8 --- /dev/null +++ b/yarn-project/aztec-node/src/declaration.d.ts @@ -0,0 +1,16 @@ +// eslint-disable-next-line @typescript-eslint/no-unused-vars +import { RootDatabaseOptionsWithPath } from 'lmdb'; + +// The problem is this snippet `nodeDb = open({});` in src/aztec-node/db.ts +// tsc compiles this code fine, but ts-jest can't. +// This is a mixture for two bugs: +// - the first in ts-jest, it gets confused by packages with mixed CJS and ESM type exports - https://github.com/kulshekhar/ts-jest/issues/4221 +// - the second in lmdb, it outputs different CJS and ESM types - https://github.com/kriszyp/lmdb-js/issues/243#issuecomment-1823585586 + +declare module 'lmdb' { + /* eslint-disable jsdoc/require-jsdoc */ + interface RootDatabaseOptionsWithPath { + path?: string; + } + /* eslint-enable jsdoc/require-jsdoc */ +} diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index bb804afbf6b..d3059c62120 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -56,6 +56,7 @@ "koa": "^2.14.2", "koa-static": "^5.0.0", "levelup": "^5.1.1", + "lmdb": "^2.9.1", "lodash.compact": "^3.0.1", "lodash.every": "^4.6.0", "lodash.times": "^4.3.2", diff --git a/yarn-project/end-to-end/src/e2e_slow_tree.test.ts b/yarn-project/end-to-end/src/e2e_slow_tree.test.ts index 6ce7cda57a8..87c267e5e82 100644 --- a/yarn-project/end-to-end/src/e2e_slow_tree.test.ts +++ b/yarn-project/end-to-end/src/e2e_slow_tree.test.ts @@ -133,7 +133,7 @@ describe('e2e_slow_tree', () => { ); await wallet.addCapsule(getMembershipCapsule({ ...zeroProof, value: new Fr(0) })); await expect(contract.methods.read_at(key).simulate()).rejects.toThrowError( - 'Assertion failed: Root does not match expected', + /Assertion failed: Root does not match expected/, ); logger(`"Reads" tree[${key}], expect to be 1`); diff --git a/yarn-project/end-to-end/src/integration_archiver_l1_to_l2.test.ts b/yarn-project/end-to-end/src/integration_archiver_l1_to_l2.test.ts index c33b0fe3887..9252a95e017 100644 --- a/yarn-project/end-to-end/src/integration_archiver_l1_to_l2.test.ts +++ b/yarn-project/end-to-end/src/integration_archiver_l1_to_l2.test.ts @@ -1,4 +1,4 @@ -import { Archiver } from '@aztec/archiver'; +import { Archiver, LMDBArchiverStore } from '@aztec/archiver'; import { AztecNodeConfig } from '@aztec/aztec-node'; import { AztecAddress, @@ -12,6 +12,7 @@ import { } from '@aztec/aztec.js'; import { TokenContract } from '@aztec/noir-contracts/types'; +import { open } from 'lmdb'; import { Chain, HttpTransport, PublicClient } from 'viem'; import { delay, deployAndInitializeTokenAndBridgeContracts, setNextBlockTimestamp, setup } from './fixtures/utils.js'; @@ -40,7 +41,10 @@ describe('archiver integration with l1 to l2 messages', () => { let accounts: CompleteAddress[]; ({ teardown, wallet, deployL1ContractsValues, accounts, config, logger } = await setup(2)); config.archiverPollingIntervalMS = 100; - archiver = await Archiver.createAndSync({ ...config, l1Contracts: deployL1ContractsValues.l1ContractAddresses }); + archiver = await Archiver.createAndSync( + { ...config, l1Contracts: deployL1ContractsValues.l1ContractAddresses }, + new LMDBArchiverStore(open({} as any)), + ); const walletClient = deployL1ContractsValues.walletClient; publicClient = deployL1ContractsValues.publicClient; @@ -118,6 +122,6 @@ describe('archiver integration with l1 to l2 messages', () => { await l2Token.methods.transfer_public(owner, receiver, 0n, 0n).send().wait(); expect((await archiver.getPendingL1ToL2Messages(10)).length).toEqual(0); - expect(() => archiver.getConfirmedL1ToL2Message(Fr.ZERO)).toThrow(); + await expect(archiver.getConfirmedL1ToL2Message(Fr.ZERO)).rejects.toThrow(); }, 30_000); }); diff --git a/yarn-project/end-to-end/src/sample-dapp/contracts.mjs b/yarn-project/end-to-end/src/sample-dapp/contracts.mjs index aa4e08b1004..80edf95737a 100644 --- a/yarn-project/end-to-end/src/sample-dapp/contracts.mjs +++ b/yarn-project/end-to-end/src/sample-dapp/contracts.mjs @@ -1,4 +1,4 @@ -import { Contract } from '@aztec/aztec.js'; +import { AztecAddress, Contract } from '@aztec/aztec.js'; import { TokenContractArtifact } from '@aztec/noir-contracts/artifacts'; import { readFileSync } from 'fs'; @@ -6,6 +6,6 @@ import { readFileSync } from 'fs'; // docs:start:get-tokens export async function getToken(client) { const addresses = JSON.parse(readFileSync('addresses.json')); - return Contract.at(addresses.token, TokenContractArtifact, client); + return Contract.at(AztecAddress.fromString(addresses.token), TokenContractArtifact, client); } // docs:end:get-tokens diff --git a/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts b/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts index 62a1f69cb76..29c2bac627c 100644 --- a/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts +++ b/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts @@ -8,7 +8,12 @@ import { TreeBase } from '../tree_base.js'; const log = createDebugLogger('aztec:standard-indexed-tree'); const indexToKeyLeaf = (name: string, index: bigint) => { - return `${name}:leaf:${index}`; + return `${name}:leaf:${toBufferBE(index, 32).toString('hex')}`; +}; + +const keyLeafToIndex = (key: string): bigint => { + const index = key.split(':')[2]; + return toBigIntBE(Buffer.from(index, 'hex')); }; const zeroLeaf: LeafData = { @@ -245,8 +250,8 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { lte: indexToKeyLeaf(this.getName(), 2n ** BigInt(this.getDepth())), }) .on('data', function (data) { - const index = Number(data.key); - values[index] = decodeTreeValue(data.value); + const index = keyLeafToIndex(data.key.toString('utf-8')); + values[Number(index)] = decodeTreeValue(data.value); }) .on('close', function () {}) .on('end', function () { @@ -269,7 +274,7 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { const keys = Object.getOwnPropertyNames(this.cachedLeaves); for (const key of keys) { const index = Number(key); - batch.put(key, this.cachedLeaves[index]); + batch.put(indexToKeyLeaf(this.getName(), BigInt(index)), encodeTreeValue(this.cachedLeaves[index])); this.leaves[index] = this.cachedLeaves[index]; } await batch.write(); diff --git a/yarn-project/types/package.json b/yarn-project/types/package.json index 4aa7cc04ca3..17e999a421f 100644 --- a/yarn-project/types/package.json +++ b/yarn-project/types/package.json @@ -4,7 +4,8 @@ "type": "module", "exports": { ".": "./dest/index.js", - "./stats": "./dest/stats/index.js" + "./stats": "./dest/stats/index.js", + "./jest": "./dest/jest/index.js" }, "typedocOptions": { "entryPoints": [ diff --git a/yarn-project/types/src/contract_data.ts b/yarn-project/types/src/contract_data.ts index ac1727f6452..093a14b0176 100644 --- a/yarn-project/types/src/contract_data.ts +++ b/yarn-project/types/src/contract_data.ts @@ -202,11 +202,12 @@ export class ExtendedContractData { /** * Generate ContractData with random addresses. + * @param contractData - Optional contract data to use. * @returns A random ExtendedContractData object. */ - static random(): ExtendedContractData { + static random(contractData?: ContractData): ExtendedContractData { return new ExtendedContractData( - ContractData.random(), + contractData ?? ContractData.random(), [EncodedContractFunction.random(), EncodedContractFunction.random()], Fr.random(), Point.random(), diff --git a/yarn-project/types/src/jest/eq_testers.ts b/yarn-project/types/src/jest/eq_testers.ts new file mode 100644 index 00000000000..bf7f7253414 --- /dev/null +++ b/yarn-project/types/src/jest/eq_testers.ts @@ -0,0 +1,29 @@ +import { L2Block } from '../l2_block.js'; + +/** + * Checks if two objects are the same L2Block. + * + * Sometimes we might be comparing two L2Block instances that represent the same block but one of them might not have + * calculated and filled its `blockHash` property (which is computed on demand). This function ensures both objects + * are really the same L2Block. + * + * @param a - An object + * @param b - Another object + * @returns True if both a and b are the same L2Block + */ +export function equalL2Blocks(a: any, b: any) { + const aAsL2Block = a && a instanceof L2Block ? a : undefined; + const bAsL2Block = b && b instanceof L2Block ? b : undefined; + + if (aAsL2Block && bAsL2Block) { + // we got two L2Block instances, so we can compare them + // use a custom comparator because the blockHash property is lazily computed and one instance might not have it + return aAsL2Block.toBuffer().equals(bAsL2Block.toBuffer()); + } else if (aAsL2Block || bAsL2Block) { + // one value is an L2block and the other isn't. Definitely not equal. + return false; + } else { + // we don't know what they are, tell Jest to keep looking + return undefined; + } +} diff --git a/yarn-project/types/src/jest/index.ts b/yarn-project/types/src/jest/index.ts new file mode 100644 index 00000000000..94b59b06822 --- /dev/null +++ b/yarn-project/types/src/jest/index.ts @@ -0,0 +1,5 @@ +import { expect } from '@jest/globals'; + +import { equalL2Blocks } from './eq_testers.js'; + +expect.addEqualityTesters([equalL2Blocks]); diff --git a/yarn-project/types/src/l1_to_l2_message.ts b/yarn-project/types/src/l1_to_l2_message.ts index 0fae5414d6f..fffc5ca64c3 100644 --- a/yarn-project/types/src/l1_to_l2_message.ts +++ b/yarn-project/types/src/l1_to_l2_message.ts @@ -62,6 +62,34 @@ export class L1ToL2MessageAndIndex { } } +/** + * An L1 to L2 message emitted in a particular L1 block. + */ +export class PendingL1ToL2Message { + constructor( + /** the message */ + public readonly message: L1ToL2Message, + /** the L1 block this message was emitted in */ + public readonly blockNumber: bigint, + /** at which index in the L1 block this message was emitted */ + public readonly indexInBlock: number, + ) {} +} + +/** + * An L1 to L2 message that was cancelled. + */ +export class CancelledL1ToL2Message { + constructor( + /** the message */ + public readonly entryKey: Fr, + /** the L1 block this message was emitted in */ + public readonly blockNumber: bigint, + /** at which index in the L1 block this message was emitted */ + public readonly indexInBlock: number, + ) {} +} + /** * The format of an L1 to L2 Message. */ @@ -131,7 +159,7 @@ export class L1ToL2Message { return new L1ToL2Message(L1Actor.empty(), L2Actor.empty(), Fr.ZERO, Fr.ZERO, 0, 0); } - static random(): L1ToL2Message { + static random(entryKey?: Fr): L1ToL2Message { return new L1ToL2Message( L1Actor.random(), L2Actor.random(), @@ -139,6 +167,7 @@ export class L1ToL2Message { Fr.random(), Math.floor(Math.random() * 1000), Math.floor(Math.random() * 1000), + entryKey, ); } } diff --git a/yarn-project/types/src/l2_block.ts b/yarn-project/types/src/l2_block.ts index 3b6d4ebb6a8..1ddd1347ede 100644 --- a/yarn-project/types/src/l2_block.ts +++ b/yarn-project/types/src/l2_block.ts @@ -54,6 +54,8 @@ export class L2Block { */ public newUnencryptedLogs?: L2BlockL2Logs; + #l1BlockNumber?: bigint; + constructor( /** * The number of the L2 block. @@ -142,6 +144,7 @@ export class L2Block { newEncryptedLogs?: L2BlockL2Logs, newUnencryptedLogs?: L2BlockL2Logs, private blockHash?: Buffer, + l1BlockNumber?: bigint, ) { if (newCommitments.length % MAX_NEW_COMMITMENTS_PER_TX !== 0) { throw new Error(`The number of new commitments must be a multiple of ${MAX_NEW_COMMITMENTS_PER_TX}.`); @@ -162,6 +165,8 @@ export class L2Block { this.numberOfTxs++; } } + + this.#l1BlockNumber = l1BlockNumber; } /** @@ -202,37 +207,43 @@ export class L2Block { LogType.UNENCRYPTED, ); - return L2Block.fromFields({ - number: l2BlockNum, - globalVariables: makeGlobalVariables(0, l2BlockNum), - startNoteHashTreeSnapshot: makeAppendOnlyTreeSnapshot(0), - startNullifierTreeSnapshot: makeAppendOnlyTreeSnapshot(0), - startContractTreeSnapshot: makeAppendOnlyTreeSnapshot(0), - startPublicDataTreeRoot: Fr.random(), - startL1ToL2MessagesTreeSnapshot: makeAppendOnlyTreeSnapshot(0), - startHistoricBlocksTreeSnapshot: makeAppendOnlyTreeSnapshot(0), - endNoteHashTreeSnapshot: makeAppendOnlyTreeSnapshot(newCommitments.length), - endNullifierTreeSnapshot: makeAppendOnlyTreeSnapshot(newNullifiers.length), - endContractTreeSnapshot: makeAppendOnlyTreeSnapshot(newContracts.length), - endPublicDataTreeRoot: Fr.random(), - endL1ToL2MessagesTreeSnapshot: makeAppendOnlyTreeSnapshot(1), - endHistoricBlocksTreeSnapshot: makeAppendOnlyTreeSnapshot(1), - newCommitments, - newNullifiers, - newContracts, - newContractData, - newPublicDataWrites, - newL1ToL2Messages, - newL2ToL1Msgs, - newEncryptedLogs, - newUnencryptedLogs, - }); + return L2Block.fromFields( + { + number: l2BlockNum, + globalVariables: makeGlobalVariables(0, l2BlockNum), + startNoteHashTreeSnapshot: makeAppendOnlyTreeSnapshot(0), + startNullifierTreeSnapshot: makeAppendOnlyTreeSnapshot(0), + startContractTreeSnapshot: makeAppendOnlyTreeSnapshot(0), + startPublicDataTreeRoot: Fr.random(), + startL1ToL2MessagesTreeSnapshot: makeAppendOnlyTreeSnapshot(0), + startHistoricBlocksTreeSnapshot: makeAppendOnlyTreeSnapshot(0), + endNoteHashTreeSnapshot: makeAppendOnlyTreeSnapshot(newCommitments.length), + endNullifierTreeSnapshot: makeAppendOnlyTreeSnapshot(newNullifiers.length), + endContractTreeSnapshot: makeAppendOnlyTreeSnapshot(newContracts.length), + endPublicDataTreeRoot: Fr.random(), + endL1ToL2MessagesTreeSnapshot: makeAppendOnlyTreeSnapshot(1), + endHistoricBlocksTreeSnapshot: makeAppendOnlyTreeSnapshot(1), + newCommitments, + newNullifiers, + newContracts, + newContractData, + newPublicDataWrites, + newL1ToL2Messages, + newL2ToL1Msgs, + newEncryptedLogs, + newUnencryptedLogs, + }, + undefined, + // just for testing purposes, each random L2 block got emitted in the equivalent L1 block + BigInt(l2BlockNum), + ); } /** * Constructs a new instance from named fields. * @param fields - Fields to pass to the constructor. * @param blockHash - Hash of the block. + * @param l1BlockNumber - The block number of the L1 block that contains this L2 block. * @returns A new instance. */ static fromFields( @@ -331,6 +342,7 @@ export class L2Block { newUnencryptedLogs?: L2BlockL2Logs; }, blockHash?: Buffer, + l1BlockNumber?: bigint, ) { return new this( fields.number, @@ -357,6 +369,7 @@ export class L2Block { fields.newEncryptedLogs, fields.newUnencryptedLogs, blockHash, + l1BlockNumber, ); } @@ -533,6 +546,25 @@ export class L2Block { this[logFieldName] = logs; } + /** + * Sets the L1 block number that included this block + * @param l1BlockNumber - The block number of the L1 block that contains this L2 block. + */ + public setL1BlockNumber(l1BlockNumber: bigint) { + this.#l1BlockNumber = l1BlockNumber; + } + + /** + * Gets the L1 block number that included this block + */ + public getL1BlockNumber(): bigint { + if (typeof this.#l1BlockNumber === 'undefined') { + throw new Error('L1 block number has to be attached before calling "getL1BlockNumber"'); + } + + return this.#l1BlockNumber; + } + /** * Returns the block's hash. * @returns The block's hash. diff --git a/yarn-project/types/src/mocks.ts b/yarn-project/types/src/mocks.ts index 5752c8789f8..24d958df490 100644 --- a/yarn-project/types/src/mocks.ts +++ b/yarn-project/types/src/mocks.ts @@ -34,7 +34,7 @@ export const mockTx = (seed = 1) => { TxL2Logs.random(8, 3), // 8 priv function invocations creating 3 encrypted logs each TxL2Logs.random(11, 2), // 8 priv + 3 pub function invocations creating 2 unencrypted logs each times(MAX_PUBLIC_CALL_STACK_LENGTH_PER_TX, makePublicCallRequest), - times(MAX_NEW_CONTRACTS_PER_TX, ExtendedContractData.random) as Tuple< + times(MAX_NEW_CONTRACTS_PER_TX, () => ExtendedContractData.random()) as Tuple< ExtendedContractData, typeof MAX_NEW_CONTRACTS_PER_TX >, diff --git a/yarn-project/world-state/src/world-state-db/merkle_trees.ts b/yarn-project/world-state/src/world-state-db/merkle_trees.ts index 16275724b9b..1e8fdc7b5c1 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_trees.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_trees.ts @@ -47,11 +47,13 @@ import { */ interface FromDbOptions { /** - * The global variables from the last block. + * The global variables hash from the last block. */ - globalVariables: GlobalVariables; + globalVariablesHash: Fr; } +const LAST_GLOBAL_VARS_HASH = 'lastGlobalVarsHash'; + /** * A convenience class for managing multiple merkle trees. */ @@ -127,7 +129,9 @@ export class MerkleTrees implements MerkleTreeDb { await this._updateHistoricBlocksTree(initialGlobalVariablesHash, true); await this._commit(); } else { - await this._updateLatestGlobalVariablesHash(computeGlobalsHash(fromDbOptions.globalVariables)); + await this._updateLatestGlobalVariablesHash(fromDbOptions.globalVariablesHash); + // make the restored global variables hash and tree roots current + await this._commit(); } } @@ -138,7 +142,10 @@ export class MerkleTrees implements MerkleTreeDb { */ public static async new(db: levelup.LevelUp) { const merkleTrees = new MerkleTrees(db); - await merkleTrees.init(); + const globalVariablesHash: Buffer | undefined = await db.get(LAST_GLOBAL_VARS_HASH).catch(() => undefined); + await merkleTrees.init( + globalVariablesHash ? { globalVariablesHash: Fr.fromBuffer(globalVariablesHash) } : undefined, + ); return merkleTrees; } @@ -504,6 +511,7 @@ export class MerkleTrees implements MerkleTreeDb { await tree.commit(); } this.latestGlobalVariablesHash.commit(); + await this.db.put(LAST_GLOBAL_VARS_HASH, this.latestGlobalVariablesHash.get().toBuffer()); } /** diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index cc4fb0dbca9..0d12832b004 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -103,6 +103,7 @@ __metadata: debug: ^4.3.4 jest: ^29.5.0 jest-mock-extended: ^3.0.4 + lmdb: ^2.9.1 lodash.omit: ^4.5.0 lodash.times: ^4.3.2 ts-jest: ^29.1.0 @@ -161,6 +162,7 @@ __metadata: koa: ^2.14.2 koa-router: ^12.0.0 levelup: ^5.1.1 + lmdb: ^2.9.1 memdown: ^6.1.1 ts-jest: ^29.1.0 ts-node: ^10.9.1 @@ -446,6 +448,7 @@ __metadata: koa: ^2.14.2 koa-static: ^5.0.0 levelup: ^5.1.1 + lmdb: ^2.9.1 lodash.compact: ^3.0.1 lodash.every: ^4.6.0 lodash.times: ^4.3.2 @@ -3260,6 +3263,48 @@ __metadata: languageName: node linkType: hard +"@lmdb/lmdb-darwin-arm64@npm:2.9.1": + version: 2.9.1 + resolution: "@lmdb/lmdb-darwin-arm64@npm:2.9.1" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + +"@lmdb/lmdb-darwin-x64@npm:2.9.1": + version: 2.9.1 + resolution: "@lmdb/lmdb-darwin-x64@npm:2.9.1" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + +"@lmdb/lmdb-linux-arm64@npm:2.9.1": + version: 2.9.1 + resolution: "@lmdb/lmdb-linux-arm64@npm:2.9.1" + conditions: os=linux & cpu=arm64 + languageName: node + linkType: hard + +"@lmdb/lmdb-linux-arm@npm:2.9.1": + version: 2.9.1 + resolution: "@lmdb/lmdb-linux-arm@npm:2.9.1" + conditions: os=linux & cpu=arm + languageName: node + linkType: hard + +"@lmdb/lmdb-linux-x64@npm:2.9.1": + version: 2.9.1 + resolution: "@lmdb/lmdb-linux-x64@npm:2.9.1" + conditions: os=linux & cpu=x64 + languageName: node + linkType: hard + +"@lmdb/lmdb-win32-x64@npm:2.9.1": + version: 2.9.1 + resolution: "@lmdb/lmdb-win32-x64@npm:2.9.1" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + "@ltd/j-toml@npm:^1.38.0": version: 1.38.0 resolution: "@ltd/j-toml@npm:1.38.0" @@ -3417,6 +3462,48 @@ __metadata: languageName: node linkType: hard +"@msgpackr-extract/msgpackr-extract-darwin-arm64@npm:3.0.2": + version: 3.0.2 + resolution: "@msgpackr-extract/msgpackr-extract-darwin-arm64@npm:3.0.2" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + +"@msgpackr-extract/msgpackr-extract-darwin-x64@npm:3.0.2": + version: 3.0.2 + resolution: "@msgpackr-extract/msgpackr-extract-darwin-x64@npm:3.0.2" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + +"@msgpackr-extract/msgpackr-extract-linux-arm64@npm:3.0.2": + version: 3.0.2 + resolution: "@msgpackr-extract/msgpackr-extract-linux-arm64@npm:3.0.2" + conditions: os=linux & cpu=arm64 + languageName: node + linkType: hard + +"@msgpackr-extract/msgpackr-extract-linux-arm@npm:3.0.2": + version: 3.0.2 + resolution: "@msgpackr-extract/msgpackr-extract-linux-arm@npm:3.0.2" + conditions: os=linux & cpu=arm + languageName: node + linkType: hard + +"@msgpackr-extract/msgpackr-extract-linux-x64@npm:3.0.2": + version: 3.0.2 + resolution: "@msgpackr-extract/msgpackr-extract-linux-x64@npm:3.0.2" + conditions: os=linux & cpu=x64 + languageName: node + linkType: hard + +"@msgpackr-extract/msgpackr-extract-win32-x64@npm:3.0.2": + version: 3.0.2 + resolution: "@msgpackr-extract/msgpackr-extract-win32-x64@npm:3.0.2" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + "@multiformats/mafmt@npm:^12.1.2": version: 12.1.6 resolution: "@multiformats/mafmt@npm:12.1.6" @@ -9041,6 +9128,13 @@ __metadata: languageName: node linkType: hard +"detect-libc@npm:^2.0.1": + version: 2.0.2 + resolution: "detect-libc@npm:2.0.2" + checksum: 2b2cd3649b83d576f4be7cc37eb3b1815c79969c8b1a03a40a4d55d83bc74d010753485753448eacb98784abf22f7dbd3911fd3b60e29fda28fed2d1a997944d + languageName: node + linkType: hard + "detect-newline@npm:^3.0.0": version: 3.1.0 resolution: "detect-newline@npm:3.1.0" @@ -14174,6 +14268,41 @@ __metadata: languageName: node linkType: hard +"lmdb@npm:^2.9.1": + version: 2.9.1 + resolution: "lmdb@npm:2.9.1" + dependencies: + "@lmdb/lmdb-darwin-arm64": 2.9.1 + "@lmdb/lmdb-darwin-x64": 2.9.1 + "@lmdb/lmdb-linux-arm": 2.9.1 + "@lmdb/lmdb-linux-arm64": 2.9.1 + "@lmdb/lmdb-linux-x64": 2.9.1 + "@lmdb/lmdb-win32-x64": 2.9.1 + msgpackr: ^1.9.9 + node-addon-api: ^6.1.0 + node-gyp: latest + node-gyp-build-optional-packages: 5.1.1 + ordered-binary: ^1.4.1 + weak-lru-cache: ^1.2.2 + dependenciesMeta: + "@lmdb/lmdb-darwin-arm64": + optional: true + "@lmdb/lmdb-darwin-x64": + optional: true + "@lmdb/lmdb-linux-arm": + optional: true + "@lmdb/lmdb-linux-arm64": + optional: true + "@lmdb/lmdb-linux-x64": + optional: true + "@lmdb/lmdb-win32-x64": + optional: true + bin: + download-lmdb-prebuilds: bin/download-prebuilds.js + checksum: 1f0a8754cc019586c8e34bd45e4ee1df99f6f5732e8dc04f951cf631895a179dfd913123773206935a580cfe80bce117800a3ccf0a2cc8187821badfdaa71cd4 + languageName: node + linkType: hard + "load-json-file@npm:^6.2.0": version: 6.2.0 resolution: "load-json-file@npm:6.2.0" @@ -15214,6 +15343,49 @@ __metadata: languageName: node linkType: hard +"msgpackr-extract@npm:^3.0.2": + version: 3.0.2 + resolution: "msgpackr-extract@npm:3.0.2" + dependencies: + "@msgpackr-extract/msgpackr-extract-darwin-arm64": 3.0.2 + "@msgpackr-extract/msgpackr-extract-darwin-x64": 3.0.2 + "@msgpackr-extract/msgpackr-extract-linux-arm": 3.0.2 + "@msgpackr-extract/msgpackr-extract-linux-arm64": 3.0.2 + "@msgpackr-extract/msgpackr-extract-linux-x64": 3.0.2 + "@msgpackr-extract/msgpackr-extract-win32-x64": 3.0.2 + node-gyp: latest + node-gyp-build-optional-packages: 5.0.7 + dependenciesMeta: + "@msgpackr-extract/msgpackr-extract-darwin-arm64": + optional: true + "@msgpackr-extract/msgpackr-extract-darwin-x64": + optional: true + "@msgpackr-extract/msgpackr-extract-linux-arm": + optional: true + "@msgpackr-extract/msgpackr-extract-linux-arm64": + optional: true + "@msgpackr-extract/msgpackr-extract-linux-x64": + optional: true + "@msgpackr-extract/msgpackr-extract-win32-x64": + optional: true + bin: + download-msgpackr-prebuilds: bin/download-prebuilds.js + checksum: 5adb809b965bac41c310e60373d54c955fe78e4d134ab036d0f9ee5b322cec0a739878d395e17c1ac82d840705896b2dafae6a8cc04ad34c14d2de4b06b58330 + languageName: node + linkType: hard + +"msgpackr@npm:^1.9.9": + version: 1.9.9 + resolution: "msgpackr@npm:1.9.9" + dependencies: + msgpackr-extract: ^3.0.2 + dependenciesMeta: + msgpackr-extract: + optional: true + checksum: b63182d99f479d79f0d082fd2688ce7cf699b1aee71e20f28591c30b48743bb57868fdd72656759a892891072d186d864702c756434520709e8fe7e0d350a119 + languageName: node + linkType: hard + "multicast-dns@npm:^7.2.5": version: 7.2.5 resolution: "multicast-dns@npm:7.2.5" @@ -15343,6 +15515,15 @@ __metadata: languageName: node linkType: hard +"node-addon-api@npm:^6.1.0": + version: 6.1.0 + resolution: "node-addon-api@npm:6.1.0" + dependencies: + node-gyp: latest + checksum: 3a539510e677cfa3a833aca5397300e36141aca064cdc487554f2017110709a03a95da937e98c2a14ec3c626af7b2d1b6dabe629a481f9883143d0d5bff07bf2 + languageName: node + linkType: hard + "node-cleanup@npm:^2.1.2": version: 2.1.2 resolution: "node-cleanup@npm:2.1.2" @@ -15389,6 +15570,30 @@ __metadata: languageName: node linkType: hard +"node-gyp-build-optional-packages@npm:5.0.7": + version: 5.0.7 + resolution: "node-gyp-build-optional-packages@npm:5.0.7" + bin: + node-gyp-build-optional-packages: bin.js + node-gyp-build-optional-packages-optional: optional.js + node-gyp-build-optional-packages-test: build-test.js + checksum: bcb4537af15bcb3811914ea0db8f69284ca10db1cc7543a167a4c41ae4b9b5044b133f789fdadad0b7adc6931f6ae7def3c75b0bc7b05836881aae52400163e6 + languageName: node + linkType: hard + +"node-gyp-build-optional-packages@npm:5.1.1": + version: 5.1.1 + resolution: "node-gyp-build-optional-packages@npm:5.1.1" + dependencies: + detect-libc: ^2.0.1 + bin: + node-gyp-build-optional-packages: bin.js + node-gyp-build-optional-packages-optional: optional.js + node-gyp-build-optional-packages-test: build-test.js + checksum: f3cb197862516e6879377adaa58142ae9013ab69c86cf2645f8b008db339354145d8ebd9140a13ec7ece5ce28a372ca7e14660379d3a3dd7b908a6f2743606e9 + languageName: node + linkType: hard + "node-gyp-build@npm:^4.2.0": version: 4.6.1 resolution: "node-gyp-build@npm:4.6.1" @@ -15816,6 +16021,13 @@ __metadata: languageName: node linkType: hard +"ordered-binary@npm:^1.4.1": + version: 1.4.1 + resolution: "ordered-binary@npm:1.4.1" + checksum: 274940b4ef983562e11371c84415c265432a4e1337ab85f8e7669eeab6afee8f655c6c12ecee1cd121aaf399c32f5c781b0d50e460bd42da004eba16dcc66574 + languageName: node + linkType: hard + "outdent@npm:^0.8.0": version: 0.8.0 resolution: "outdent@npm:0.8.0" @@ -20009,6 +20221,13 @@ __metadata: languageName: node linkType: hard +"weak-lru-cache@npm:^1.2.2": + version: 1.2.2 + resolution: "weak-lru-cache@npm:1.2.2" + checksum: 0fbe16839d193ed82ddb4fe331ca8cfaee2ecbd42596aa02366c708956cf41f7258f2d5411c3bc9aa099c26058dc47afbd2593d449718a18e4ef4d870c5ace18 + languageName: node + linkType: hard + "web-streams-polyfill@npm:^3.0.3": version: 3.2.1 resolution: "web-streams-polyfill@npm:3.2.1"