diff --git a/yarn-project/archiver/src/archiver/archiver.test.ts b/yarn-project/archiver/src/archiver/archiver.test.ts index 6184eb4269b..cfffe374c53 100644 --- a/yarn-project/archiver/src/archiver/archiver.test.ts +++ b/yarn-project/archiver/src/archiver/archiver.test.ts @@ -58,15 +58,15 @@ describe('Archiver', () => { const l1ToL2MessageAddedEvents = [ makeL1ToL2MessageAddedEvents( 100n, - blocks[0].newL1ToL2Messages.map(key => key.toString()), + blocks[0].body.l1ToL2Messages.map(key => key.toString()), ), makeL1ToL2MessageAddedEvents( 100n, - blocks[1].newL1ToL2Messages.map(key => key.toString()), + blocks[1].body.l1ToL2Messages.map(key => key.toString()), ), makeL1ToL2MessageAddedEvents( 2501n, - blocks[2].newL1ToL2Messages.map(key => key.toString()), + blocks[2].body.l1ToL2Messages.map(key => key.toString()), ), makeL1ToL2MessageAddedEvents(2502n, [ messageToCancel1, @@ -162,11 +162,11 @@ describe('Archiver', () => { const l1ToL2MessageAddedEvents = [ makeL1ToL2MessageAddedEvents( 100n, - blocks[0].newL1ToL2Messages.map(key => key.toString()), + blocks[0].body.l1ToL2Messages.map(key => key.toString()), ), makeL1ToL2MessageAddedEvents( 101n, - blocks[1].newL1ToL2Messages.map(key => key.toString()), + blocks[1].body.l1ToL2Messages.map(key => key.toString()), ), makeL1ToL2MessageAddedEvents(102n, additionalL1ToL2MessagesBlock102), makeL1ToL2MessageAddedEvents(103n, additionalL1ToL2MessagesBlock103), @@ -223,7 +223,7 @@ describe('Archiver', () => { expect(latestBlockNum).toEqual(0); const block = L2Block.random(1, 4, 1, 2, 4, 6); - block.newL1ToL2Messages = times(2, Fr.random); + block.body.l1ToL2Messages = times(2, Fr.random); const rollupTx = makeRollupTx(block); publicClient.getBlockNumber.mockResolvedValueOnce(2500n); @@ -232,7 +232,7 @@ describe('Archiver', () => { .mockResolvedValueOnce( makeL1ToL2MessageAddedEvents( 100n, - block.newL1ToL2Messages.map(x => x.toString()), + block.body.l1ToL2Messages.map(x => x.toString()), ), ) .mockResolvedValueOnce([]) @@ -250,11 +250,11 @@ describe('Archiver', () => { latestBlockNum = await archiver.getBlockNumber(); expect(latestBlockNum).toEqual(1); - const expectedL1Messages = block.newL1ToL2Messages + const expectedL1Messages = block.body.l1ToL2Messages .concat(times(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP - NUM_RECEIVED_L1_MESSAGES, () => Fr.ZERO)) .map(x => x.value); const receivedBlock = await archiver.getBlock(1); - expect(receivedBlock?.newL1ToL2Messages.map(x => x.value)).toEqual(expectedL1Messages); + expect(receivedBlock?.body.l1ToL2Messages.map(x => x.value)).toEqual(expectedL1Messages); await archiver.stop(); }, 10_000); @@ -352,7 +352,7 @@ function makeRollupTx(l2Block: L2Block) { const header = toHex(l2Block.header.toBuffer()); const archive = toHex(l2Block.archive.root.toBuffer()); const txsHash = toHex(l2Block.getCalldataHash()); - const body = toHex(l2Block.bodyToBuffer()); + const body = toHex(l2Block.toBuffer(true, false)); const proof = `0x`; const input = encodeFunctionData({ abi: RollupAbi, diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index b0568277d7d..deb4a02071f 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -283,15 +283,18 @@ export class Archiver implements ArchiveSource { this.log(`Retrieved ${retrievedBlocks.retrievedData.length} block(s) from chain`); await Promise.all( - retrievedBlocks.retrievedData.map(block => - this.store.addLogs(block.newEncryptedLogs, block.newUnencryptedLogs, block.number), - ), + retrievedBlocks.retrievedData.map(block =>{ + const encryptedLogs = new L2BlockL2Logs(block.body.txEffects.map(txEffect => txEffect.logs!.encryptedLogs)); + const unencryptedLogs = new L2BlockL2Logs(block.body.txEffects.map(txEffect => txEffect.logs!.unencryptedLogs)); + + this.store.addLogs(encryptedLogs, unencryptedLogs, block.number); + }), ); // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them await Promise.all( retrievedBlocks.retrievedData.map(async block => { - const blockLogs = (block.newUnencryptedLogs?.txLogs ?? []) + const blockLogs = (block.body.txEffects.flatMap(txEffect => txEffect.logs ? [txEffect.logs?.unencryptedLogs] : [] )) .flatMap(txLog => txLog.unrollLogs()) .map(log => UnencryptedL2Log.fromBuffer(log)); await this.storeRegisteredContractClasses(blockLogs, block.number); @@ -315,7 +318,7 @@ export class Archiver implements ArchiveSource { // from each l2block fetch all messageKeys in a flattened array: this.log(`Confirming l1 to l2 messages in store`); for (const block of retrievedBlocks.retrievedData) { - await this.store.confirmL1ToL2Messages(block.newL1ToL2Messages); + await this.store.confirmL1ToL2Messages(block.body.l1ToL2Messages); } // store retrieved L2 blocks after removing new logs information. @@ -323,8 +326,15 @@ export class Archiver implements ArchiveSource { await this.store.addBlocks( retrievedBlocks.retrievedData.map(block => { // Ensure we pad the L1 to L2 message array to the full size before storing. - block.newL1ToL2Messages = padArrayEnd(block.newL1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); - return L2Block.fromFields(omit(block, ['newEncryptedLogs', 'newUnencryptedLogs']), block.getL1BlockNumber()); + block.body.l1ToL2Messages = padArrayEnd(block.body.l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); + + block.body.txEffects.forEach(txEffect => delete txEffect.logs); + + return L2Block.fromFields({ + archive: block.archive, + header: block.header, + body: block.body, + }, block.getL1BlockNumber()); }), ); } diff --git a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts index b590b78a762..c111121a78d 100644 --- a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts +++ b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts @@ -4,6 +4,7 @@ import { L1ToL2Message, L2Block, L2BlockContext, + L2BlockL2Logs, LogId, LogType, TxHash, @@ -127,7 +128,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch describe('addLogs', () => { it('adds encrypted & unencrypted logs', async () => { await expect( - store.addLogs(blocks[0].newEncryptedLogs, blocks[0].newUnencryptedLogs, blocks[0].number), + store.addLogs(new L2BlockL2Logs(blocks[0].body.txEffects.map(txEffect => txEffect.logs!.encryptedLogs)), new L2BlockL2Logs(blocks[0].body.txEffects.map(txEffect => txEffect.logs!.unencryptedLogs)), blocks[0].number), ).resolves.toEqual(true); }); }); @@ -138,14 +139,18 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch ])('getLogs (%s)', (_, logType) => { beforeEach(async () => { await Promise.all( - blocks.map(block => store.addLogs(block.newEncryptedLogs, block.newUnencryptedLogs, block.number)), + blocks.map(block => + store.addLogs( + new L2BlockL2Logs(block.body.txEffects.map(txEffect => txEffect.logs!.encryptedLogs)), + new L2BlockL2Logs(block.body.txEffects.map(txEffect => txEffect.logs!.unencryptedLogs)), + block.number, + ), + ), ); }); it.each(blockTests)('retrieves previously stored logs', async (from, limit, getExpectedBlocks) => { - const expectedLogs = getExpectedBlocks().map(block => - logType === LogType.ENCRYPTED ? block.newEncryptedLogs : block.newUnencryptedLogs, - ); + const expectedLogs = getExpectedBlocks().map(block => (logType === LogType.ENCRYPTED) ? new L2BlockL2Logs(block.body.txEffects.flatMap(txEffect => txEffect.logs!.encryptedLogs)) : new L2BlockL2Logs(block.body.txEffects.flatMap(txEffect => txEffect.logs!.unencryptedLogs))) const actualLogs = await store.getLogs(from, limit, logType); expect(actualLogs).toEqual(expectedLogs); }); @@ -154,7 +159,13 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch describe('getL2Tx', () => { beforeEach(async () => { await Promise.all( - blocks.map(block => store.addLogs(block.newEncryptedLogs, block.newUnencryptedLogs, block.number)), + blocks.map(block => + store.addLogs( + new L2BlockL2Logs(block.body.txEffects.map(txEffect => txEffect.logs!.encryptedLogs)), + new L2BlockL2Logs(block.body.txEffects.map(txEffect => txEffect.logs!.unencryptedLogs)), + block.number, + ), + ), ); await store.addBlocks(blocks); }); @@ -366,8 +377,9 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch }); it('returns previously stored contract data', async () => { - await expect(store.getContractData(block.newContractData[0].contractAddress)).resolves.toEqual( - block.newContractData[0], + // Assuming the first (and only) contract data in the first TX + await expect(store.getContractData(block.body.txEffects[0].contractData[0].contractAddress)).resolves.toEqual( + block.body.txEffects[0].contractData[0], ); }); @@ -384,7 +396,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch }); it('returns the contract data for a known block', async () => { - await expect(store.getContractDataInBlock(block.number)).resolves.toEqual(block.newContractData); + await expect(store.getContractDataInBlock(block.number)).resolves.toEqual(block.body.txEffects.flatMap(txEffect => txEffect.contractData)); }); it('returns an empty array if contract data is not found', async () => { @@ -409,10 +421,11 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch const block = L2Block.random(1); await store.addBlocks([block]); - const firstContract = ExtendedContractData.random(block.newContractData[0]); + // Assuming one contract per tx, and the first two txs + const firstContract = ExtendedContractData.random(block.body.txEffects[0].contractData[0]); await store.addExtendedContractData([firstContract], block.number); - const secondContract = ExtendedContractData.random(block.newContractData[1]); + const secondContract = ExtendedContractData.random(block.body.txEffects[1].contractData[0]); await store.addExtendedContractData([secondContract], block.number); await expect(store.getExtendedContractDataInBlock(block.number)).resolves.toEqual([ @@ -427,7 +440,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch let extendedContractData: ExtendedContractData; beforeEach(async () => { block = L2Block.random(1); - extendedContractData = ExtendedContractData.random(block.newContractData[0]); + extendedContractData = ExtendedContractData.random(block.body.txEffects[0].contractData[0]); await store.addBlocks([block]); await store.addExtendedContractData([extendedContractData], block.number); }); @@ -448,7 +461,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch let extendedContractData: ExtendedContractData; beforeEach(async () => { block = L2Block.random(1); - extendedContractData = ExtendedContractData.random(block.newContractData[0]); + extendedContractData = ExtendedContractData.random(block.body.txEffects[0].contractData[0]); await store.addBlocks([block]); await store.addExtendedContractData([extendedContractData], block.number); }); @@ -478,7 +491,13 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch await store.addBlocks(blocks); await Promise.all( - blocks.map(block => store.addLogs(block.newEncryptedLogs, block.newUnencryptedLogs, block.number)), + blocks.map(block => + store.addLogs( + new L2BlockL2Logs(block.body.txEffects.map(txEffect => txEffect.logs!.encryptedLogs)), + new L2BlockL2Logs(block.body.txEffects.map(txEffect => txEffect.logs!.unencryptedLogs)), + block.number, + ), + ), ); }); @@ -530,7 +549,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch const targetFunctionLogIndex = Math.floor(Math.random() * numPublicFunctionCalls); const targetLogIndex = Math.floor(Math.random() * numUnencryptedLogs); const targetContractAddress = UnencryptedL2Log.fromBuffer( - blocks[targetBlockIndex].newUnencryptedLogs!.txLogs[targetTxIndex].functionLogs[targetFunctionLogIndex].logs[ + blocks[targetBlockIndex].body.txEffects[targetTxIndex].logs!.unencryptedLogs.functionLogs[targetFunctionLogIndex].logs[ targetLogIndex ], ).contractAddress; @@ -551,7 +570,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch const targetFunctionLogIndex = Math.floor(Math.random() * numPublicFunctionCalls); const targetLogIndex = Math.floor(Math.random() * numUnencryptedLogs); const targetSelector = UnencryptedL2Log.fromBuffer( - blocks[targetBlockIndex].newUnencryptedLogs!.txLogs[targetTxIndex].functionLogs[targetFunctionLogIndex].logs[ + blocks[targetBlockIndex].body.txEffects[targetTxIndex].logs!.unencryptedLogs.functionLogs[targetFunctionLogIndex].logs[ targetLogIndex ], ).selector; diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts index ef637162c4e..49bac68d66b 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts @@ -54,7 +54,7 @@ export class BlockStore { void this.#txIndex.set(tx.txHash.toString(), [block.number, i]); } - for (const [i, contractData] of block.newContractData.entries()) { + for (const [i, contractData] of block.body.txEffects.flatMap(txEffect => txEffect.contractData).entries()) { if (contractData.contractAddress.isZero()) { continue; } diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/contract_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/contract_store.ts index 0c2f117ed92..a7eaf65101e 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/contract_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/contract_store.ts @@ -77,7 +77,8 @@ export class ContractStore { } const block = this.#blockStore.getBlock(blockNumber); - return block?.newContractData[index]; + // Assuming each txEffect only has one contract + return block?.body.txEffects[index].contractData[0]; } /** @@ -88,6 +89,6 @@ export class ContractStore { */ getContractDataInBlock(blockNumber: number): ContractData[] { const block = this.#blockStore.getBlock(blockNumber); - return block?.newContractData ?? []; + return block?.body.txEffects.flatMap(txEffect => txEffect.contractData) ?? []; } } diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts index be9318df094..fc576819633 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts @@ -1,4 +1,4 @@ -import { L2Block } from '@aztec/circuit-types'; +import { L2Block, L2BlockL2Logs } from '@aztec/circuit-types'; import { ArchiverDataStore } from '../archiver_store.js'; import { describeArchiverDataStore } from '../archiver_store_test_suite.js'; @@ -23,7 +23,13 @@ describe('MemoryArchiverStore', () => { await archiverStore.addBlocks(blocks); await Promise.all( - blocks.map(block => archiverStore.addLogs(block.newEncryptedLogs, block.newUnencryptedLogs, block.number)), + blocks.map(block => + archiverStore.addLogs( + new L2BlockL2Logs(blocks[0].body.txEffects.map(txEffect => txEffect.logs!.encryptedLogs)), + new L2BlockL2Logs(blocks[0].body.txEffects.map(txEffect => txEffect.logs!.unencryptedLogs)), + block.number, + ), + ), ); const response = await archiverStore.getUnencryptedLogs({}); diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts index 76ef94beb03..27a6a2b55b0 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts @@ -396,7 +396,7 @@ export class MemoryArchiverStore implements ArchiverDataStore { return Promise.resolve(undefined); } for (const blockContext of this.l2BlockContexts) { - for (const contractData of blockContext.block.newContractData) { + for (const contractData of blockContext.block.body.txEffects.flatMap(txEffect => txEffect.contractData)) { if (contractData.contractAddress.equals(contractAddress)) { return Promise.resolve(contractData); } @@ -416,7 +416,7 @@ export class MemoryArchiverStore implements ArchiverDataStore { return Promise.resolve([]); } const block: L2Block | undefined = this.l2BlockContexts[l2BlockNum - INITIAL_L2_BLOCK_NUM]?.block; - return Promise.resolve(block?.newContractData); + return Promise.resolve(block?.body.txEffects.flatMap(txEffect => txEffect.contractData)); } /** diff --git a/yarn-project/circuit-types/src/l2_block.test.ts b/yarn-project/circuit-types/src/l2_block.test.ts index a55064457db..0bb6f45397f 100644 --- a/yarn-project/circuit-types/src/l2_block.test.ts +++ b/yarn-project/circuit-types/src/l2_block.test.ts @@ -5,8 +5,8 @@ describe('L2Block', () => { it('can serialize an L2 block with logs to a buffer and back', () => { const block = L2Block.random(42); - const buffer = block.toBufferWithLogs(); - const recovered = L2Block.fromBufferWithLogs(buffer); + const buffer = block.toBuffer(true, true); + const recovered = L2Block.fromBuffer(buffer, true); // TODO(#3868): encoding and decoding is currently hacked and bodyHash is not recovered yet recovered.header.bodyHash = block.header.bodyHash; @@ -15,9 +15,15 @@ describe('L2Block', () => { }); it('can serialize an L2 block without logs to a buffer and back', () => { - const block = L2Block.random(42); - block.newEncryptedLogs = undefined; - block.newUnencryptedLogs = undefined; + const block = L2Block.random( + 42, + 4, + 2, + 3, + 2, + 1, + false, + ); const serialized = block.toString(); const recovered = L2Block.fromString(serialized); diff --git a/yarn-project/circuit-types/src/l2_block.ts b/yarn-project/circuit-types/src/l2_block.ts index cb3c66e27f6..6c319f8ee1e 100644 --- a/yarn-project/circuit-types/src/l2_block.ts +++ b/yarn-project/circuit-types/src/l2_block.ts @@ -8,6 +8,9 @@ import { MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, STRING_ENCODING, + PublicKernelPublicInputs, + SideEffect, + SideEffectLinkedToNoteHash, } from '@aztec/circuits.js'; import { makeAppendOnlyTreeSnapshot, makeHeader } from '@aztec/circuits.js/factories'; import { times } from '@aztec/foundation/collection'; @@ -22,13 +25,70 @@ import { LogType, TxL2Logs } from './logs/index.js'; import { L2BlockL2Logs } from './logs/l2_block_l2_logs.js'; import { PublicDataWrite } from './public_data_write.js'; import { TxHash } from './tx/tx_hash.js'; +import { computeContractLeaf } from '@aztec/circuits.js/abis'; + +export class TxEffectLogs { + constructor( + /** + * Encrypted logs emitted by txs in this block. + * @remarks `L2BlockL2Logs.txLogs` array has to match number of txs in this block and has to be in the same order + * (e.g. logs from the first tx on the first place...). + * @remarks Only private function can emit encrypted logs and for this reason length of + * `newEncryptedLogs.txLogs.functionLogs` is equal to the number of private function invocations in the tx. + */ + public encryptedLogs: TxL2Logs, + /** + * Unencrypted logs emitted by txs in this block. + * @remarks `L2BlockL2Logs.txLogs` array has to match number of txs in this block and has to be in the same order + * (e.g. logs from the first tx on the first place...). + * @remarks Both private and public functions can emit unencrypted logs and for this reason length of + * `newUnencryptedLogs.txLogs.functionLogs` is equal to the number of all function invocations in the tx. + */ + public unencryptedLogs: TxL2Logs, + ) {} +} + +export class TxEffect { + constructor( + /** + * The commitments to be inserted into the note hash tree. + */ + public newNoteHashes: Fr[], + /** + * The nullifiers to be inserted into the nullifier tree. + */ + public newNullifiers: Fr[], + /** + * The L2 to L1 messages to be inserted into the messagebox on L1. + */ + public newL2ToL1Msgs: Fr[], + /** + * The public data writes to be inserted into the public data tree. + */ + public newPublicDataWrites: PublicDataWrite[], + public contractLeaves: Fr[], + public contractData: ContractData[], + public logs?: TxEffectLogs, + ){ + if (newNoteHashes.length % MAX_NEW_COMMITMENTS_PER_TX !== 0) { + throw new Error(`The number of new commitments must be a multiple of ${MAX_NEW_COMMITMENTS_PER_TX}.`); + } + } +} + +export class L2BlockBody { + constructor( + public l1ToL2Messages: Fr[], + public txEffects: TxEffect[], + ){} +} /** * The data that makes up the rollup proof, with encoder decoder functions. * TODO: Reuse data types and serialization functions from circuits package. */ export class L2Block { - /* Having logger static to avoid issues with comparing 2 block */ + /* Having logger static to avoid issues with comparing 2 blocks */ private static logger = createDebugLogger('aztec:l2_block'); /** @@ -36,24 +96,6 @@ export class L2Block { */ public numberOfTxs: number; - /** - * Encrypted logs emitted by txs in this block. - * @remarks `L2BlockL2Logs.txLogs` array has to match number of txs in this block and has to be in the same order - * (e.g. logs from the first tx on the first place...). - * @remarks Only private function can emit encrypted logs and for this reason length of - * `newEncryptedLogs.txLogs.functionLogs` is equal to the number of private function invocations in the tx. - */ - public newEncryptedLogs?: L2BlockL2Logs; - - /** - * Unencrypted logs emitted by txs in this block. - * @remarks `L2BlockL2Logs.txLogs` array has to match number of txs in this block and has to be in the same order - * (e.g. logs from the first tx on the first place...). - * @remarks Both private and public functions can emit unencrypted logs and for this reason length of - * `newUnencryptedLogs.txLogs.functionLogs` is equal to the number of all function invocations in the tx. - */ - public newUnencryptedLogs?: L2BlockL2Logs; - #l1BlockNumber?: bigint; constructor( @@ -61,59 +103,18 @@ export class L2Block { public archive: AppendOnlyTreeSnapshot, /** L2 block header. */ public header: Header, - /** - * The commitments to be inserted into the note hash tree. - */ - public newCommitments: Fr[], - /** - * The nullifiers to be inserted into the nullifier tree. - */ - public newNullifiers: Fr[], - /** - * The public data writes to be inserted into the public data tree. - */ - public newPublicDataWrites: PublicDataWrite[], - /** - * The L2 to L1 messages to be inserted into the messagebox on L1. - */ - public newL2ToL1Msgs: Fr[], - /** - * The contracts leafs to be inserted into the contract tree. - */ - public newContracts: Fr[], - /** - * The aztec address and ethereum address for the deployed contract and its portal contract. - */ - public newContractData: ContractData[], - /** - * The L1 to L2 messages to be inserted into the L2 toL2 message tree. - */ - public newL1ToL2Messages: Fr[] = [], - newEncryptedLogs?: L2BlockL2Logs, - newUnencryptedLogs?: L2BlockL2Logs, + public body: L2BlockBody, l1BlockNumber?: bigint, ) { - if (newCommitments.length % MAX_NEW_COMMITMENTS_PER_TX !== 0) { - throw new Error(`The number of new commitments must be a multiple of ${MAX_NEW_COMMITMENTS_PER_TX}.`); - } - - if (newEncryptedLogs) { - this.attachLogs(newEncryptedLogs, LogType.ENCRYPTED); - } - if (newUnencryptedLogs) { - this.attachLogs(newUnencryptedLogs, LogType.UNENCRYPTED); - } - - // Since the block is padded to always contain a fixed number of nullifiers we get number of txs by counting number - // of non-zero tx hashes --> tx hash is set to be the first nullifier in the tx. - this.numberOfTxs = 0; - for (let i = 0; i < this.newNullifiers.length; i += MAX_NEW_NULLIFIERS_PER_TX) { - if (!this.newNullifiers[i].equals(Fr.ZERO)) { - this.numberOfTxs++; - } - } - + this.numberOfTxs = body.txEffects.length; this.#l1BlockNumber = l1BlockNumber; + + // this.header = header; + //! Should I derive it like below ? + // this.header = new Header( + // archive, + // hash(this.body), + // ) } get number(): number { @@ -137,178 +138,106 @@ export class L2Block { numPublicCallsPerTx = 3, numEncryptedLogsPerCall = 2, numUnencryptedLogsPerCall = 1, + withLogs = true, ): L2Block { - const newNullifiers = times(MAX_NEW_NULLIFIERS_PER_TX * txsPerBlock, Fr.random); - const newCommitments = times(MAX_NEW_COMMITMENTS_PER_TX * txsPerBlock, Fr.random); - const newContracts = times(MAX_NEW_CONTRACTS_PER_TX * txsPerBlock, Fr.random); - const newContractData = times(MAX_NEW_CONTRACTS_PER_TX * txsPerBlock, ContractData.random); - const newPublicDataWrites = times(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * txsPerBlock, PublicDataWrite.random); + const txEffects = [...new Array(txsPerBlock)].map(_ => new TxEffect( + times(MAX_NEW_COMMITMENTS_PER_TX, Fr.random), + times(MAX_NEW_NULLIFIERS_PER_TX, Fr.random), + times(MAX_NEW_L2_TO_L1_MSGS_PER_TX, Fr.random), + times(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, PublicDataWrite.random), + times(MAX_NEW_CONTRACTS_PER_TX, Fr.random), + times(MAX_NEW_CONTRACTS_PER_TX, ContractData.random), + withLogs ? new TxEffectLogs( + TxL2Logs.random(numPrivateCallsPerTx, numEncryptedLogsPerCall, LogType.ENCRYPTED), + TxL2Logs.random(numPublicCallsPerTx, numUnencryptedLogsPerCall, LogType.UNENCRYPTED), + ) : undefined, + )); + const newL1ToL2Messages = times(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, Fr.random); - const newL2ToL1Msgs = times(MAX_NEW_L2_TO_L1_MSGS_PER_TX, Fr.random); - const newEncryptedLogs = L2BlockL2Logs.random( - txsPerBlock, - numPrivateCallsPerTx, - numEncryptedLogsPerCall, - LogType.ENCRYPTED, - ); - const newUnencryptedLogs = L2BlockL2Logs.random( - txsPerBlock, - numPublicCallsPerTx, - numUnencryptedLogsPerCall, - LogType.UNENCRYPTED, - ); - return L2Block.fromFields( - { + const body = new L2BlockBody(newL1ToL2Messages, txEffects); + + return L2Block.fromFields({ archive: makeAppendOnlyTreeSnapshot(1), header: makeHeader(0, l2BlockNum), - newCommitments, - newNullifiers, - newContracts, - newContractData, - newPublicDataWrites, - newL1ToL2Messages, - newL2ToL1Msgs, - newEncryptedLogs, - newUnencryptedLogs, - }, + body, + }, // just for testing purposes, each random L2 block got emitted in the equivalent L1 block BigInt(l2BlockNum), ); } + /** - * Constructs a new instance from named fields. - * @param fields - Fields to pass to the constructor. - * @param blockHash - Hash of the block. - * @param l1BlockNumber - The block number of the L1 block that contains this L2 block. - * @returns A new instance. - */ + * Constructs a new instance from named fields. + * @param fields - Fields to pass to the constructor. + * @param blockHash - Hash of the block. + * @param l1BlockNumber - The block number of the L1 block that contains this L2 block. + * @returns A new instance. + */ static fromFields( fields: { /** Snapshot of archive tree after the block is applied. */ archive: AppendOnlyTreeSnapshot; /** L2 block header. */ header: Header; - /** - * The commitments to be inserted into the note hash tree. - */ - newCommitments: Fr[]; - /** - * The nullifiers to be inserted into the nullifier tree. - */ - newNullifiers: Fr[]; - /** - * The public data writes to be inserted into the public data tree. - */ - newPublicDataWrites: PublicDataWrite[]; - /** - * The L2 to L1 messages to be inserted into the messagebox on L1. - */ - newL2ToL1Msgs: Fr[]; - /** - * The contracts leafs to be inserted into the contract tree. - */ - newContracts: Fr[]; - /** - * The aztec address and ethereum address for the deployed contract and its portal contract. - */ - newContractData: ContractData[]; - /** - * The L1 to L2 messages to be inserted into the L1 to L2 message tree. - */ - newL1ToL2Messages: Fr[]; - /** - * Encrypted logs emitted by txs in a block. - */ - newEncryptedLogs?: L2BlockL2Logs; - /** - * Unencrypted logs emitted by txs in a block. - */ - newUnencryptedLogs?: L2BlockL2Logs; + body: L2BlockBody; }, l1BlockNumber?: bigint, ) { return new this( fields.archive, fields.header, - fields.newCommitments, - fields.newNullifiers, - fields.newPublicDataWrites, - fields.newL2ToL1Msgs, - fields.newContracts, - fields.newContractData, - fields.newL1ToL2Messages, - fields.newEncryptedLogs, - fields.newUnencryptedLogs, + fields.body, l1BlockNumber, ); } /** - * Serializes a block without logs to a buffer. - * @remarks This is used when the block is being served via JSON-RPC because the logs are expected to be served + * Serializes a block + * @remarks This is used specifying no logs, and a header when the block is being served via JSON-RPC because the logs are expected to be served * separately. + * Otherwise it is used with logs, and no header when serializing a block to be published on L1 * @returns A serialized L2 block without logs. */ - toBuffer() { - return serializeToBuffer( - this.header, - this.archive, - this.newCommitments.length, - this.newCommitments, - this.newNullifiers.length, - this.newNullifiers, - this.newPublicDataWrites.length, - this.newPublicDataWrites, - this.newL2ToL1Msgs.length, - this.newL2ToL1Msgs, - this.newContracts.length, - this.newContracts, - this.newContractData, - this.newL1ToL2Messages.length, - this.newL1ToL2Messages, - ); - } + toBuffer(includeLogs: boolean = false, includeHeader: boolean = true) { + const paddedTxEffects = this.getPaddedTxEffects(); + let logs: [L2BlockL2Logs, L2BlockL2Logs] | [] = []; - /** - * Serializes a block with logs to a buffer. - * @remarks This is used when the block is being submitted on L1. - * @returns A serialized L2 block with logs. - */ - toBufferWithLogs(): Buffer { - if (this.newEncryptedLogs === undefined || this.newUnencryptedLogs === undefined) { - throw new Error( - `newEncryptedLogs and newUnencryptedLogs must be defined when encoding L2BlockData (block ${this.header.globalVariables.blockNumber})`, - ); + if (includeLogs) { + this.assertLogsAttached(); + + const newEncryptedLogs = paddedTxEffects.flatMap(txEffect => txEffect.logs!.encryptedLogs); + const newUnencryptedLogs = paddedTxEffects.flatMap(txEffect => txEffect.logs!.unencryptedLogs); + logs = [new L2BlockL2Logs(newEncryptedLogs), new L2BlockL2Logs(newUnencryptedLogs)]; } - return serializeToBuffer(this.toBuffer(), this.newEncryptedLogs, this.newUnencryptedLogs); - } + const newCommitments = paddedTxEffects.flatMap(txEffect => txEffect.newNoteHashes); + const newNullifiers = paddedTxEffects.flatMap(txEffect => txEffect.newNullifiers); + const newPublicDataWrites = paddedTxEffects.flatMap(txEffect => txEffect.newPublicDataWrites); + const newL2ToL1Msgs = paddedTxEffects.flatMap(txEffect => txEffect.newL2ToL1Msgs); + const newContracts = paddedTxEffects.flatMap(txEffect => txEffect.contractLeaves); + const newContractData = paddedTxEffects.flatMap(txEffect => txEffect.contractData); + const newL1ToL2Messages = this.body.l1ToL2Messages; - bodyToBuffer(): Buffer { - if (this.newEncryptedLogs === undefined || this.newUnencryptedLogs === undefined) { - throw new Error( - `newEncryptedLogs and newUnencryptedLogs must be defined when encoding L2BlockData (block ${this.header.globalVariables.blockNumber})`, - ); - } + const header: [Header, AppendOnlyTreeSnapshot] | [] = includeHeader ? [this.header, this.archive] : []; return serializeToBuffer( - this.newCommitments.length, - this.newCommitments, - this.newNullifiers.length, - this.newNullifiers, - this.newPublicDataWrites.length, - this.newPublicDataWrites, - this.newL2ToL1Msgs.length, - this.newL2ToL1Msgs, - this.newContracts.length, - this.newContracts, - this.newContractData, - this.newL1ToL2Messages.length, - this.newL1ToL2Messages, - this.newEncryptedLogs, - this.newUnencryptedLogs, + ...header, + newCommitments.length, + newCommitments, + newNullifiers.length, + newNullifiers, + newPublicDataWrites.length, + newPublicDataWrites, + newL2ToL1Msgs.length, + newL2ToL1Msgs, + newContracts.length, + newContracts, + newContractData, + newL1ToL2Messages.length, + newL1ToL2Messages, + ...logs, ); } @@ -322,12 +251,24 @@ export class L2Block { return this.toBuffer().toString(STRING_ENCODING); } - /** - * Deserializes L2 block without logs from a buffer. + /** + * Deserializes L2 block with logs from a buffer. * @param buf - A serialized L2 block. * @returns Deserialized L2 block. */ - static fromBuffer(buf: Buffer | BufferReader) { + static fromBufferWithLogs(buf: Buffer | BufferReader) { + const reader = BufferReader.asReader(buf); + const block = L2Block.fromBuffer(reader, false); + const newEncryptedLogs = reader.readObject(L2BlockL2Logs); + const newUnencryptedLogs = reader.readObject(L2BlockL2Logs); + + block.attachLogs(newEncryptedLogs, newUnencryptedLogs); + + return block; + } + + + static fromBuffer(buf: Buffer | BufferReader, withLogs: boolean = false) { const reader = BufferReader.asReader(buf); const header = reader.readObject(Header); const archive = reader.readObject(AppendOnlyTreeSnapshot); @@ -340,36 +281,53 @@ export class L2Block { // TODO(sean): could an optimization of this be that it is encoded such that zeros are assumed const newL1ToL2Messages = reader.readVector(Fr); + let newEncryptedLogs: L2BlockL2Logs; + let newUnencryptedLogs: L2BlockL2Logs; + + let numberOfTxs = 0; + for (let i = 0; i < newNullifiers.length; i += MAX_NEW_NULLIFIERS_PER_TX) { + if (!newNullifiers[i].equals(Fr.ZERO)) { + numberOfTxs++; + } + } + + if(withLogs) { + newEncryptedLogs = reader.readObject(L2BlockL2Logs); + newUnencryptedLogs = reader.readObject(L2BlockL2Logs); + + if (new L2BlockL2Logs(newEncryptedLogs.txLogs.slice(numberOfTxs)).getTotalLogCount() !== 0 || + new L2BlockL2Logs(newUnencryptedLogs.txLogs.slice(numberOfTxs)).getTotalLogCount() !== 0) { + throw new Error('Logs exist in the padded area'); + } + } + + const txEffects: TxEffect[] = [] + + for (let i = 0; i < numberOfTxs; i += 1) { + const logs: TxEffectLogs[] = withLogs ? [new TxEffectLogs(newEncryptedLogs!.txLogs[i], newUnencryptedLogs!.txLogs[i])] : []; + + txEffects.push( + new TxEffect( + newCommitments.slice(i * MAX_NEW_COMMITMENTS_PER_TX, (i + 1) * MAX_NEW_COMMITMENTS_PER_TX), + newNullifiers.slice(i * MAX_NEW_NULLIFIERS_PER_TX, (i + 1) * MAX_NEW_NULLIFIERS_PER_TX), + newL2ToL1Msgs.slice(i * MAX_NEW_L2_TO_L1_MSGS_PER_TX, (i + 1) * MAX_NEW_L2_TO_L1_MSGS_PER_TX), + newPublicDataWrites.slice(i * MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, (i + 1) * MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX), + newContracts.slice(i * MAX_NEW_CONTRACTS_PER_TX, (i + 1) * MAX_NEW_CONTRACTS_PER_TX), + newContractData.slice(i * MAX_NEW_CONTRACTS_PER_TX, (i + 1) * MAX_NEW_CONTRACTS_PER_TX), + ...logs, + ), + ); + } + + const body = new L2BlockBody(newL1ToL2Messages, txEffects); + return L2Block.fromFields({ archive, header, - newCommitments, - newNullifiers, - newPublicDataWrites, - newL2ToL1Msgs, - newContracts, - newContractData, - newL1ToL2Messages, + body, }); } - /** - * Deserializes L2 block with logs from a buffer. - * @param buf - A serialized L2 block. - * @returns Deserialized L2 block. - */ - static fromBufferWithLogs(buf: Buffer | BufferReader) { - const reader = BufferReader.asReader(buf); - const block = L2Block.fromBuffer(reader); - const newEncryptedLogs = reader.readObject(L2BlockL2Logs); - const newUnencryptedLogs = reader.readObject(L2BlockL2Logs); - - block.attachLogs(newEncryptedLogs, LogType.ENCRYPTED); - block.attachLogs(newUnencryptedLogs, LogType.UNENCRYPTED); - - return block; - } - /** * Deserializes L2 block without logs from a buffer. * @param str - A serialized L2 block. @@ -385,32 +343,31 @@ export class L2Block { * @param logType - The type of logs to be attached. * @remarks Here, because we can have L2 blocks without logs and those logs can be attached later. */ - attachLogs(logs: L2BlockL2Logs, logType: LogType) { - const logFieldName = logType === LogType.ENCRYPTED ? 'newEncryptedLogs' : 'newUnencryptedLogs'; - - if (this[logFieldName]) { - if (this[logFieldName]?.equals(logs)) { - L2Block.logger(`${logFieldName} logs already attached`); - return; - } - throw new Error( - `Trying to attach different ${logFieldName} logs to block ${this.header.globalVariables.blockNumber}.`, - ); + attachLogs(encryptedLogs: L2BlockL2Logs, unencrypedLogs: L2BlockL2Logs) { + if ( + new L2BlockL2Logs(encryptedLogs.txLogs.slice(this.numberOfTxs)).getTotalLogCount() !== 0 || + new L2BlockL2Logs(unencrypedLogs.txLogs.slice(this.numberOfTxs)).getTotalLogCount() !== 0 + ) { + throw new Error('Logs exist in the padded area'); } - L2Block.logger( - `Attaching ${logFieldName} ${logs.getTotalLogCount()} logs to block ${this.header.globalVariables.blockNumber}`, - ); - - const numTxs = this.newCommitments.length / MAX_NEW_COMMITMENTS_PER_TX; + const txEffects = this.body.txEffects; - if (numTxs !== logs.txLogs.length) { - throw new Error( - `Number of txLogs within ${logFieldName} does not match number of transactions. Expected: ${numTxs} Got: ${logs.txLogs.length}`, - ); + if (this.areLogsAttached()) { + if (txEffects.every((txEffect, i) => txEffect.logs?.encryptedLogs.equals(encryptedLogs.txLogs[i]) && + txEffect.logs?.unencryptedLogs.equals(unencrypedLogs.txLogs[i]))) { + L2Block.logger(`Logs already attached`); + return; + } else { + throw new Error( + `Trying to attach different logs to block ${this.header.globalVariables.blockNumber}.`, + ); + } } - this[logFieldName] = logs; + txEffects.forEach((txEffect, i) => { + txEffect.logs = new TxEffectLogs(encryptedLogs.txLogs[i], unencrypedLogs.txLogs[i]); + }) } /** @@ -449,7 +406,7 @@ export class L2Block { const buf = serializeToBuffer( this.header.globalVariables, // TODO(#3868) - AppendOnlyTreeSnapshot.zero(), // this.startNoteHashTreeSnapshot, + AppendOnlyTreeSnapshot.zero(), // this.startNoteHashTreeSnapshot / committments, AppendOnlyTreeSnapshot.zero(), // this.startNullifierTreeSnapshot, AppendOnlyTreeSnapshot.zero(), // this.startContractTreeSnapshot, AppendOnlyTreeSnapshot.zero(), // this.startPublicDataTreeSnapshot, @@ -503,6 +460,27 @@ export class L2Block { return sha256(inputValue); } + private getPaddedTxEffects() { + const calc = Math.ceil(Math.log(this.numberOfTxs)/Math.log(2)); + const leafCount = (calc === 0) ? 2 : 2 ** calc; + + const emptyKernelOutput = PublicKernelPublicInputs.empty(); + + const txEffects: TxEffect[] = [...new Array(leafCount - this.numberOfTxs)].map((_) => new TxEffect( + emptyKernelOutput.end.newCommitments.map((c: SideEffect) => c.value), + emptyKernelOutput.end.newNullifiers.map((n: SideEffectLinkedToNoteHash) => n.value), + emptyKernelOutput.end.newL2ToL1Msgs, + emptyKernelOutput.end.publicDataUpdateRequests.map(t => new PublicDataWrite(t.leafSlot, t.newValue)), + emptyKernelOutput.end.newContracts.map(cd => computeContractLeaf(cd)), + emptyKernelOutput.end.newContracts.map(cd => new ContractData(cd.contractAddress, cd.portalContractAddress)), + new TxEffectLogs(new TxL2Logs([]), new TxL2Logs([])), + )) + + const newTxEffects = [...this.body.txEffects, ...txEffects]; + + return newTxEffects; + } + /** * Computes the calldata hash for the L2 block * This calldata hash is also computed by the rollup contract when the block is submitted, @@ -510,13 +488,7 @@ export class L2Block { * @returns The calldata hash. */ getCalldataHash() { - if (this.newEncryptedLogs === undefined) { - throw new Error('Encrypted logs has to be attached before calling "getCalldataHash"'); - } - - if (this.newUnencryptedLogs === undefined) { - throw new Error('Unencrypted logs has to be attached before calling "getCalldataHash"'); - } + this.assertLogsAttached(); const computeRoot = (leafs: Buffer[]): Buffer => { const layers: Buffer[][] = [leafs]; @@ -540,41 +512,30 @@ export class L2Block { return layers[layers.length - 1][0]; }; - const leafCount = this.newCommitments.length / MAX_NEW_COMMITMENTS_PER_TX; const leafs: Buffer[] = []; - for (let i = 0; i < leafCount; i++) { - const commitmentsPerBase = MAX_NEW_COMMITMENTS_PER_TX; - const nullifiersPerBase = MAX_NEW_NULLIFIERS_PER_TX; - const publicDataUpdateRequestsPerBase = MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX; - const l2ToL1MsgsPerBase = MAX_NEW_L2_TO_L1_MSGS_PER_TX; - const commitmentsBuffer = Buffer.concat( - this.newCommitments.slice(i * commitmentsPerBase, (i + 1) * commitmentsPerBase).map(x => x.toBuffer()), - ); - const nullifiersBuffer = Buffer.concat( - this.newNullifiers.slice(i * nullifiersPerBase, (i + 1) * nullifiersPerBase).map(x => x.toBuffer()), - ); - const publicDataUpdateRequestsBuffer = Buffer.concat( - this.newPublicDataWrites - .slice(i * publicDataUpdateRequestsPerBase, (i + 1) * publicDataUpdateRequestsPerBase) - .map(x => x.toBuffer()), - ); - const newL2ToL1MsgsBuffer = Buffer.concat( - this.newL2ToL1Msgs.slice(i * l2ToL1MsgsPerBase, (i + 1) * l2ToL1MsgsPerBase).map(x => x.toBuffer()), - ); - const encryptedLogsHashKernel0 = L2Block.computeKernelLogsHash(this.newEncryptedLogs.txLogs[i]); + const paddedTxEffects = this.getPaddedTxEffects(); + + for (let i = 0; i < paddedTxEffects.length; i++) { + const txEffect = paddedTxEffects[i]; - const unencryptedLogsHashKernel0 = L2Block.computeKernelLogsHash(this.newUnencryptedLogs.txLogs[i]); + const commitmentsBuffer = Buffer.concat(txEffect.newNoteHashes.map(x => x.toBuffer())); + const nullifiersBuffer = Buffer.concat(txEffect.newNullifiers.map(x => x.toBuffer())); + const publicDataUpdateRequestsBuffer = Buffer.concat(txEffect.newPublicDataWrites.map(x => x.toBuffer())); + const newL2ToL1MsgsBuffer = Buffer.concat(txEffect.newL2ToL1Msgs.map(x => x.toBuffer())); + const encryptedLogsHashKernel0 = L2Block.computeKernelLogsHash(txEffect.logs!.encryptedLogs); + const unencryptedLogsHashKernel0 = L2Block.computeKernelLogsHash(txEffect.logs!.unencryptedLogs); const inputValue = Buffer.concat([ commitmentsBuffer, nullifiersBuffer, publicDataUpdateRequestsBuffer, newL2ToL1MsgsBuffer, - this.newContracts[i].toBuffer(), - this.newContractData[i].contractAddress.toBuffer(), + // We get the first one because we only support 1 new contract per tx + txEffect.contractLeaves[0].toBuffer(), + txEffect.contractData[0].contractAddress.toBuffer(), // TODO(#3938): make portal address 20 bytes here when updating the hashing - this.newContractData[i].portalContractAddress.toBuffer32(), + txEffect.contractData[0].portalContractAddress.toBuffer32(), encryptedLogsHashKernel0, unencryptedLogsHashKernel0, ]); @@ -591,7 +552,7 @@ export class L2Block { */ getL1ToL2MessagesHash(): Buffer { // Create a long buffer of all of the l1 to l2 messages - const l1ToL2Messages = Buffer.concat(this.newL1ToL2Messages.map(message => message.toBuffer())); + const l1ToL2Messages = Buffer.concat(this.body.l1ToL2Messages.map(message => message.toBuffer())); return sha256(l1ToL2Messages); } @@ -603,24 +564,14 @@ export class L2Block { getTx(txIndex: number) { this.assertIndexInRange(txIndex); - const newCommitments = this.newCommitments - .slice(MAX_NEW_COMMITMENTS_PER_TX * txIndex, MAX_NEW_COMMITMENTS_PER_TX * (txIndex + 1)) - .filter(x => !x.isZero()); - const newNullifiers = this.newNullifiers - .slice(MAX_NEW_NULLIFIERS_PER_TX * txIndex, MAX_NEW_NULLIFIERS_PER_TX * (txIndex + 1)) - .filter(x => !x.isZero()); - const newPublicDataWrites = this.newPublicDataWrites - .slice(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * txIndex, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * (txIndex + 1)) - .filter(x => !x.isEmpty()); - const newL2ToL1Msgs = this.newL2ToL1Msgs - .slice(MAX_NEW_L2_TO_L1_MSGS_PER_TX * txIndex, MAX_NEW_L2_TO_L1_MSGS_PER_TX * (txIndex + 1)) - .filter(x => !x.isZero()); - const newContracts = this.newContracts - .slice(MAX_NEW_CONTRACTS_PER_TX * txIndex, MAX_NEW_CONTRACTS_PER_TX * (txIndex + 1)) - .filter(x => !x.isZero()); - const newContractData = this.newContractData - .slice(MAX_NEW_CONTRACTS_PER_TX * txIndex, MAX_NEW_CONTRACTS_PER_TX * (txIndex + 1)) - .filter(x => !x.isEmpty()); + const txEffect = this.body.txEffects[txIndex]; + + const newCommitments = txEffect.newNoteHashes.filter(x => !x.isZero); + const newNullifiers = txEffect.newNullifiers.filter(x => !x.isZero()); + const newPublicDataWrites = txEffect.newPublicDataWrites.filter(x => !x.isEmpty()); + const newL2ToL1Msgs = txEffect.newL2ToL1Msgs.filter(x => !x.isZero()); + const newContracts = txEffect.contractLeaves.filter(x => !x.isZero()); + const newContractData = txEffect.contractData.filter(x => !x.isEmpty()); return new L2Tx( newCommitments, @@ -642,7 +593,8 @@ export class L2Block { getTxHash(txIndex: number): TxHash { this.assertIndexInRange(txIndex); - const firstNullifier = this.newNullifiers[txIndex * MAX_NEW_NULLIFIERS_PER_TX]; + // Gets the first nullifier of the tx specified by txIndex + const firstNullifier = this.body.txEffects[txIndex].newNullifiers[0]; return new TxHash(firstNullifier.toBuffer()); } @@ -662,19 +614,17 @@ export class L2Block { * @returns Stats on tx count, number, and log size and count. */ getStats() { - const encryptedLogsStats = this.newEncryptedLogs && { - encryptedLogCount: this.newEncryptedLogs?.getTotalLogCount() ?? 0, - encryptedLogSize: this.newEncryptedLogs?.getSerializedLength() ?? 0, - }; - const unencryptedLogsStats = this.newUnencryptedLogs && { - unencryptedLogCount: this.newUnencryptedLogs?.getTotalLogCount() ?? 0, - unencryptedLogSize: this.newUnencryptedLogs?.getSerializedLength() ?? 0, - }; + const logsStats = this.areLogsAttached() && { + encryptedLogLength: this.body.txEffects.reduce((logCount, txEffect) => logCount + (txEffect.logs!.encryptedLogs.getSerializedLength()), 0), + encryptedLogCount: this.body.txEffects.reduce((logCount, txEffect) => logCount + (txEffect.logs!.encryptedLogs.getTotalLogCount()), 0), + unencryptedLogCount: this.body.txEffects.reduce((logCount, txEffect) => logCount + (txEffect.logs!.unencryptedLogs.getSerializedLength()), 0), + unencryptedLogSize: this.body.txEffects.reduce((logCount, txEffect) => logCount + (txEffect.logs!.unencryptedLogs.getTotalLogCount()), 0), + } + return { txCount: this.numberOfTxs, blockNumber: this.number, - ...encryptedLogsStats, - ...unencryptedLogsStats, + ...logsStats, }; } @@ -688,6 +638,18 @@ export class L2Block { } } + private assertLogsAttached() { + if (!this.areLogsAttached()) { + throw new Error( + `newEncryptedLogs and newUnencryptedLogs must be defined (block ${this.header.globalVariables.blockNumber})`, + ); + } + } + + private areLogsAttached() { + return this.body.txEffects.every(txEffect => txEffect.logs !== undefined); + } + // /** // * Inspect for debugging purposes.. // * @param maxBufferSize - The number of bytes to be extracted from buffer. diff --git a/yarn-project/circuit-types/src/logs/tx_l2_logs.ts b/yarn-project/circuit-types/src/logs/tx_l2_logs.ts index f59efb8ff7e..9b3247ed44e 100644 --- a/yarn-project/circuit-types/src/logs/tx_l2_logs.ts +++ b/yarn-project/circuit-types/src/logs/tx_l2_logs.ts @@ -1,5 +1,7 @@ import { BufferReader, prefixBufferWithLength } from '@aztec/foundation/serialize'; +import isEqual from 'lodash.isequal'; + import { FunctionL2Logs } from './function_l2_logs.js'; import { LogType } from './log_type.js'; @@ -105,4 +107,14 @@ export class TxL2Logs { const functionLogs = obj.functionLogs.map((log: any) => FunctionL2Logs.fromJSON(log)); return new TxL2Logs(functionLogs); } + + /** + * Checks if two TxL2Logs objects are equal. + * @param other - Another TxL2Logs object to compare with. + * @returns True if the two objects are equal, false otherwise. + */ + public equals(other: TxL2Logs): boolean { + return isEqual(this, other); + } + } diff --git a/yarn-project/end-to-end/src/e2e_inclusion_proofs_contract.test.ts b/yarn-project/end-to-end/src/e2e_inclusion_proofs_contract.test.ts index 459fddef998..93b3b4019d6 100644 --- a/yarn-project/end-to-end/src/e2e_inclusion_proofs_contract.test.ts +++ b/yarn-project/end-to-end/src/e2e_inclusion_proofs_contract.test.ts @@ -225,7 +225,7 @@ describe('e2e_inclusion_proofs_contract', () => { // Choose random block number between deployment and current block number to test archival node const blockNumber = await getRandomBlockNumberSinceDeployment(); const block = await pxe.getBlock(blockNumber); - const nullifier = block?.newNullifiers[0]; + const nullifier = block?.body.txEffects[0].newNullifiers[0]; await contract.methods.test_nullifier_inclusion(nullifier!, true, blockNumber).send().wait(); await contract.methods.test_nullifier_inclusion(nullifier!, false, 0n).send().wait(); diff --git a/yarn-project/end-to-end/src/e2e_pending_commitments_contract.test.ts b/yarn-project/end-to-end/src/e2e_pending_commitments_contract.test.ts index cb81ff66782..d23c34fe833 100644 --- a/yarn-project/end-to-end/src/e2e_pending_commitments_contract.test.ts +++ b/yarn-project/end-to-end/src/e2e_pending_commitments_contract.test.ts @@ -23,13 +23,15 @@ describe('e2e_pending_commitments_contract', () => { const blockNum = await aztecNode!.getBlockNumber(); const block = (await aztecNode!.getBlocks(blockNum, 1))[0]; + const commitmentsArray = block.body.txEffects.flatMap(txEffect => txEffect.newNoteHashes); + // all new commitments should be zero (should be squashed) for (let c = 0; c < exceptFirstFew; c++) { - expect(block.newCommitments[c]).not.toEqual(Fr.ZERO); + expect(commitmentsArray[c]).not.toEqual(Fr.ZERO); } - for (let c = exceptFirstFew; c < block.newCommitments.length; c++) { - expect(block.newCommitments[c]).toEqual(Fr.ZERO); + for (let c = exceptFirstFew; c < commitmentsArray.length; c++) { + expect(commitmentsArray[c]).toEqual(Fr.ZERO); } }; @@ -37,13 +39,15 @@ describe('e2e_pending_commitments_contract', () => { const blockNum = await aztecNode!.getBlockNumber(); const block = (await aztecNode!.getBlocks(blockNum, 1))[0]; + const nullifierArray = block.body.txEffects.flatMap(txEffect => txEffect.newNullifiers); + // 0th nullifier should be nonzero (txHash), all others should be zero (should be squashed) for (let n = 0; n < exceptFirstFew + 1; n++) { logger(`Expecting nullifier ${n} to be nonzero`); - expect(block.newNullifiers[n]).not.toEqual(Fr.ZERO); // 0th nullifier is txHash + expect(nullifierArray[n]).not.toEqual(Fr.ZERO); // 0th nullifier is txHash } - for (let n = exceptFirstFew + 1; n < block.newNullifiers.length; n++) { - expect(block.newNullifiers[n]).toEqual(Fr.ZERO); + for (let n = exceptFirstFew + 1; n < nullifierArray.length; n++) { + expect(nullifierArray[n]).toEqual(Fr.ZERO); } }; diff --git a/yarn-project/end-to-end/src/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/integration_l1_publisher.test.ts index 01d0e5a240b..34f01fb2855 100644 --- a/yarn-project/end-to-end/src/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/integration_l1_publisher.test.ts @@ -259,13 +259,13 @@ describe('L1Publisher integration', () => { }, messages: { l1ToL2Messages: l1ToL2Messages.map(m => `0x${m.toBuffer().toString('hex').padStart(64, '0')}`), - l2ToL1Messages: block.newL2ToL1Msgs.map(m => `0x${m.toBuffer().toString('hex').padStart(64, '0')}`), + l2ToL1Messages: block.body.txEffects.flatMap(txEffect => txEffect.newL2ToL1Msgs).map(m => `0x${m.toBuffer().toString('hex').padStart(64, '0')}`), }, block: { // The json formatting in forge is a bit brittle, so we convert Fr to a number in the few values below. // This should not be a problem for testing as long as the values are not larger than u32. archive: `0x${block.archive.root.toBuffer().toString('hex').padStart(64, '0')}`, - body: `0x${block.bodyToBuffer().toString('hex')}`, + body: `0x${block.toBuffer(true, false).toString('hex')}`, calldataHash: `0x${block.getCalldataHash().toString('hex').padStart(64, '0')}`, decodedHeader: { bodyHash: `0x${block.header.bodyHash.toString('hex').padStart(64, '0')}`, @@ -387,9 +387,11 @@ describe('L1Publisher integration', () => { expect(await inbox.read.contains([l1ToL2Messages[j].toString()])).toBeTruthy(); } + const newL2ToL1MsgsArray = block.body.txEffects.flatMap(txEffect => txEffect.newL2ToL1Msgs); + // check that values are not in the outbox - for (let j = 0; j < block.newL2ToL1Msgs.length; j++) { - expect(await outbox.read.contains([block.newL2ToL1Msgs[j].toString()])).toBeFalsy(); + for (let j = 0; j < newL2ToL1MsgsArray.length; j++) { + expect(await outbox.read.contains([newL2ToL1MsgsArray[j].toString()])).toBeFalsy(); } writeJson(`mixed_block_${i}`, block, l1ToL2Messages, l1ToL2Content, recipientAddress, deployerAccount.address); @@ -418,7 +420,7 @@ describe('L1Publisher integration', () => { `0x${block.header.toBuffer().toString('hex')}`, `0x${block.archive.root.toBuffer().toString('hex')}`, `0x${block.getCalldataHash().toString('hex')}`, - `0x${block.bodyToBuffer().toString('hex')}`, + `0x${block.toBuffer(true, false).toString('hex')}`, `0x${l2Proof.toString('hex')}`, ], }); @@ -431,9 +433,10 @@ describe('L1Publisher integration', () => { } expect(await inbox.read.contains([l1ToL2Messages[j].toString()])).toBeFalsy(); } + // check that values are inserted into the outbox - for (let j = 0; j < block.newL2ToL1Msgs.length; j++) { - expect(await outbox.read.contains([block.newL2ToL1Msgs[j].toString()])).toBeTruthy(); + for (let j = 0; j < newL2ToL1MsgsArray.length; j++) { + expect(await outbox.read.contains([newL2ToL1MsgsArray[j].toString()])).toBeTruthy(); } } }, 360_000); @@ -490,7 +493,7 @@ describe('L1Publisher integration', () => { `0x${block.header.toBuffer().toString('hex')}`, `0x${block.archive.root.toBuffer().toString('hex')}`, `0x${block.getCalldataHash().toString('hex')}`, - `0x${block.bodyToBuffer().toString('hex')}`, + `0x${block.toBuffer(true, false).toString('hex')}`, `0x${l2Proof.toString('hex')}`, ], }); diff --git a/yarn-project/pxe/src/note_processor/note_processor.test.ts b/yarn-project/pxe/src/note_processor/note_processor.test.ts index a08189e5a21..05b72aca9c4 100644 --- a/yarn-project/pxe/src/note_processor/note_processor.test.ts +++ b/yarn-project/pxe/src/note_processor/note_processor.test.ts @@ -105,7 +105,9 @@ describe('Note Processor', () => { } = createEncryptedLogsAndOwnedL1NotePayloads(isTargetBlock ? ownedData : [], isTargetBlock ? ownedNotes : []); encryptedLogsArr.push(encryptedLogs); ownedL1NotePayloads.push(...payloads); - block.newCommitments = newNotes.map(n => computeMockNoteHash(n.note)); + for (let i = 0; i < TXS_PER_BLOCK; i++) { + block.body.txEffects[i].newNoteHashes = newNotes.map(n => computeMockNoteHash(n.note)).slice(i * MAX_NEW_COMMITMENTS_PER_TX, (i + 1) * MAX_NEW_COMMITMENTS_PER_TX); + } const randomBlockContext = new L2BlockContext(block); blockContexts.push(randomBlockContext); @@ -192,7 +194,7 @@ describe('Note Processor', () => { index: BigInt(thisBlockDataStartIndex + MAX_NEW_COMMITMENTS_PER_TX * (4 - 1) + 2), }), ]); - }); + }, 30_000); it('should not store notes that do not belong to us', async () => { const { blockContexts, encryptedLogsArr } = mockData([]); diff --git a/yarn-project/pxe/src/note_processor/note_processor.ts b/yarn-project/pxe/src/note_processor/note_processor.ts index 4ed25627317..b8aa1e670ae 100644 --- a/yarn-project/pxe/src/note_processor/note_processor.ts +++ b/yarn-project/pxe/src/note_processor/note_processor.ts @@ -124,10 +124,7 @@ export class NoteProcessor { this.stats.txs++; const dataStartIndexForTx = dataEndIndexForBlock - (txLogs.length - indexOfTxInABlock) * MAX_NEW_COMMITMENTS_PER_TX; - const newCommitments = block.newCommitments.slice( - indexOfTxInABlock * MAX_NEW_COMMITMENTS_PER_TX, - (indexOfTxInABlock + 1) * MAX_NEW_COMMITMENTS_PER_TX, - ); + const newCommitments = block.body.txEffects[indexOfTxInABlock].newNoteHashes; // Note: Each tx generates a `TxL2Logs` object and for this reason we can rely on its index corresponding // to the index of a tx in a block. const txFunctionLogs = txLogs[indexOfTxInABlock].functionLogs; @@ -212,7 +209,7 @@ export class NoteProcessor { }); } - const newNullifiers: Fr[] = blocksAndNotes.flatMap(b => b.blockContext.block.newNullifiers); + const newNullifiers: Fr[] = blocksAndNotes.flatMap(b => b.blockContext.block.body.txEffects.flatMap(txEffect => txEffect.newNullifiers)); const removedNotes = await this.db.removeNullifiedNotes(newNullifiers, this.publicKey); removedNotes.forEach(noteDao => { this.log( diff --git a/yarn-project/pxe/src/synchronizer/synchronizer.test.ts b/yarn-project/pxe/src/synchronizer/synchronizer.test.ts index 4353ba36b9b..ae3e9ae522f 100644 --- a/yarn-project/pxe/src/synchronizer/synchronizer.test.ts +++ b/yarn-project/pxe/src/synchronizer/synchronizer.test.ts @@ -1,4 +1,4 @@ -import { AztecNode, INITIAL_L2_BLOCK_NUM, L2Block } from '@aztec/circuit-types'; +import { AztecNode, INITIAL_L2_BLOCK_NUM, L2Block, L2BlockL2Logs } from '@aztec/circuit-types'; import { CompleteAddress, Fr, GrumpkinScalar, Header } from '@aztec/circuits.js'; import { Grumpkin } from '@aztec/circuits.js/barretenberg'; import { makeHeader } from '@aztec/circuits.js/factories'; @@ -41,8 +41,9 @@ describe('Synchronizer', () => { it('sets header from latest block', async () => { const block = L2Block.random(1, 4); - aztecNode.getBlocks.mockResolvedValue([L2Block.fromFields(omit(block, 'newEncryptedLogs', 'newUnencryptedLogs'))]); - aztecNode.getLogs.mockResolvedValueOnce([block.newEncryptedLogs!]).mockResolvedValue([block.newUnencryptedLogs!]); + aztecNode.getLogs.mockResolvedValueOnce([new L2BlockL2Logs(block.body.txEffects.map(txEffect => txEffect.logs!.encryptedLogs))]).mockResolvedValue([new L2BlockL2Logs(block.body.txEffects.map(txEffect => txEffect.logs!.unencryptedLogs))]); + block.body.txEffects.forEach(txEffect => delete txEffect.logs); + aztecNode.getBlocks.mockResolvedValue([block]); await synchronizer.work(); @@ -61,10 +62,10 @@ describe('Synchronizer', () => { // We then process block with height 1, this should not change the header const block1 = L2Block.random(1, 4); - aztecNode.getBlocks.mockResolvedValueOnce([ - L2Block.fromFields(omit(block1, 'newEncryptedLogs', 'newUnencryptedLogs')), - ]); - aztecNode.getLogs.mockResolvedValue([block1.newEncryptedLogs!]).mockResolvedValue([block1.newUnencryptedLogs!]); + + aztecNode.getLogs.mockResolvedValueOnce([new L2BlockL2Logs(block1.body.txEffects.map(txEffect => txEffect.logs!.encryptedLogs))]).mockResolvedValue([new L2BlockL2Logs(block1.body.txEffects.map(txEffect => txEffect.logs!.unencryptedLogs))]); + block1.body.txEffects.forEach(txEffect => delete txEffect.logs); + aztecNode.getBlocks.mockResolvedValue([block1]); await synchronizer.work(); const header1 = database.getHeader(); @@ -73,9 +74,9 @@ describe('Synchronizer', () => { // But they should change when we process block with height 5 const block5 = L2Block.random(5, 4); - aztecNode.getBlocks.mockResolvedValueOnce([ - L2Block.fromFields(omit(block5, 'newEncryptedLogs', 'newUnencryptedLogs')), - ]); + + block5.body.txEffects.forEach(txEffect => delete txEffect.logs); + aztecNode.getBlocks.mockResolvedValue([block5]); await synchronizer.work(); const header5 = database.getHeader(); @@ -86,23 +87,34 @@ describe('Synchronizer', () => { it('note processor successfully catches up', async () => { const blocks = [L2Block.random(1, 4), L2Block.random(2, 4)]; - aztecNode.getBlocks - // called by synchronizer.work - .mockResolvedValueOnce([L2Block.fromFields(omit(blocks[0], 'newEncryptedLogs', 'newUnencryptedLogs'))]) - .mockResolvedValueOnce([L2Block.fromFields(omit(blocks[1], 'newEncryptedLogs', 'newUnencryptedLogs'))]) - // called by synchronizer.workNoteProcessorCatchUp - .mockResolvedValueOnce([L2Block.fromFields(omit(blocks[0], 'newEncryptedLogs', 'newUnencryptedLogs'))]) - .mockResolvedValueOnce([L2Block.fromFields(omit(blocks[1], 'newEncryptedLogs', 'newUnencryptedLogs'))]); - aztecNode.getLogs - // called by synchronizer.work - .mockResolvedValueOnce([blocks[0].newEncryptedLogs!]) - .mockResolvedValueOnce([blocks[0].newUnencryptedLogs!]) - .mockResolvedValueOnce([blocks[1].newEncryptedLogs!]) - .mockResolvedValueOnce([blocks[1].newUnencryptedLogs!]) + // called by synchronizer.work + .mockResolvedValueOnce([new L2BlockL2Logs(blocks[0].body.txEffects.map(txEffect => txEffect.logs!.encryptedLogs))]) + .mockResolvedValueOnce([new L2BlockL2Logs(blocks[0].body.txEffects.map(txEffect => txEffect.logs!.unencryptedLogs))]) + .mockResolvedValueOnce([new L2BlockL2Logs(blocks[1].body.txEffects.map(txEffect => txEffect.logs!.encryptedLogs))]) + .mockResolvedValueOnce([new L2BlockL2Logs(blocks[1].body.txEffects.map(txEffect => txEffect.logs!.encryptedLogs))]) + // called by synchronizer.workNoteProcessorCatchUp + .mockResolvedValueOnce([new L2BlockL2Logs(blocks[0].body.txEffects.map(txEffect => txEffect.logs!.encryptedLogs))]) + .mockResolvedValueOnce([new L2BlockL2Logs(blocks[1].body.txEffects.map(txEffect => txEffect.logs!.encryptedLogs))]); + + blocks[0].body.txEffects.forEach(txEffect => delete txEffect.logs); + blocks[1].body.txEffects.forEach(txEffect => delete txEffect.logs); + + aztecNode.getBlocks + // called by synchronizer.work, we are testing fromFields in this first call + .mockResolvedValueOnce([L2Block.fromFields({ + archive: blocks[0].archive, + header: blocks[0].header, + body: blocks[0].body, + })]) + .mockResolvedValueOnce([L2Block.fromFields({ + archive: blocks[1].archive, + header: blocks[1].header, + body: blocks[1].body, + })]) // called by synchronizer.workNoteProcessorCatchUp - .mockResolvedValueOnce([blocks[0].newEncryptedLogs!]) - .mockResolvedValueOnce([blocks[1].newEncryptedLogs!]); + .mockResolvedValueOnce([blocks[0]]) + .mockResolvedValueOnce([blocks[1]]); aztecNode.getBlockNumber.mockResolvedValue(INITIAL_L2_BLOCK_NUM + 1); diff --git a/yarn-project/pxe/src/synchronizer/synchronizer.ts b/yarn-project/pxe/src/synchronizer/synchronizer.ts index cf1f2af66b1..d2f92a020cf 100644 --- a/yarn-project/pxe/src/synchronizer/synchronizer.ts +++ b/yarn-project/pxe/src/synchronizer/synchronizer.ts @@ -131,8 +131,7 @@ export class Synchronizer { // attach logs to blocks blocks.forEach((block, i) => { - block.attachLogs(encryptedLogs[i], LogType.ENCRYPTED); - block.attachLogs(unencryptedLogs[i], LogType.UNENCRYPTED); + block.attachLogs(encryptedLogs[i], unencryptedLogs[i]); }); // Wrap blocks in block contexts & only keep those that match our query diff --git a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts index 06e160d99f2..2a6e512af1b 100644 --- a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts +++ b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts @@ -2,11 +2,12 @@ import { ContractData, ExtendedContractData, L2Block, - L2BlockL2Logs, + L2BlockBody, MerkleTreeId, PublicDataWrite, Tx, - TxL2Logs, + TxEffect, + TxEffectLogs, makeEmptyLogs, mockTx, } from '@aztec/circuit-types'; @@ -214,33 +215,25 @@ describe('sequencer/solo_block_builder', () => { // Update l1 to l2 message tree await updateL1ToL2MessageTree(mockL1ToL2Messages); - const newNullifiers = txs.flatMap(tx => tx.data.end.newNullifiers); - const newCommitments = txs.flatMap(tx => tx.data.end.newCommitments); - const newContracts = txs.flatMap(tx => tx.data.end.newContracts).map(cd => computeContractLeaf(cd)); - const newContractData = txs - .flatMap(tx => tx.data.end.newContracts) - .map(n => new ContractData(n.contractAddress, n.portalContractAddress)); - const newPublicDataWrites = txs.flatMap(tx => - tx.data.end.publicDataUpdateRequests.map(t => new PublicDataWrite(t.leafSlot, t.newValue)), + const txEffects = txs.map(tx => + new TxEffect( + tx.data.end.newCommitments.map((sideEffect: SideEffect) => sideEffect.value), + tx.data.end.newNullifiers.map((sideEffect: SideEffectLinkedToNoteHash) => sideEffect.value), + tx.data.end.newL2ToL1Msgs, + tx.data.end.publicDataUpdateRequests.map(t => new PublicDataWrite(t.leafSlot, t.newValue)), + tx.data.end.newContracts.map(cd => computeContractLeaf(cd)), + tx.data.end.newContracts.map(n => new ContractData(n.contractAddress, n.portalContractAddress)), + new TxEffectLogs(tx.encryptedLogs, tx.unencryptedLogs) + ) ); - const newL2ToL1Msgs = txs.flatMap(tx => tx.data.end.newL2ToL1Msgs); - const newEncryptedLogs = new L2BlockL2Logs(txs.map(tx => tx.encryptedLogs || new TxL2Logs([]))); - const newUnencryptedLogs = new L2BlockL2Logs(txs.map(tx => tx.unencryptedLogs || new TxL2Logs([]))); + const body = new L2BlockBody(mockL1ToL2Messages, txEffects); // We are constructing the block here just to get body hash/calldata hash so we can pass in an empty archive and header const l2Block = L2Block.fromFields({ archive: AppendOnlyTreeSnapshot.zero(), header: Header.empty(), // Only the values below go to body hash/calldata hash - newCommitments: newCommitments.map((sideEffect: SideEffect) => sideEffect.value), - newNullifiers: newNullifiers.map((sideEffect: SideEffectLinkedToNoteHash) => sideEffect.value), - newContracts, - newContractData, - newPublicDataWrites, - newL1ToL2Messages: mockL1ToL2Messages, - newL2ToL1Msgs, - newEncryptedLogs, - newUnencryptedLogs, + body, }); // Now we update can make the final header, compute the block hash and update archive diff --git a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts index a421440cc9c..3f9a454f6df 100644 --- a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts +++ b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts @@ -1,4 +1,4 @@ -import { ContractData, L2Block, L2BlockL2Logs, MerkleTreeId, PublicDataWrite, TxL2Logs } from '@aztec/circuit-types'; +import { ContractData, L2Block, L2BlockBody, MerkleTreeId, PublicDataWrite, TxEffect, TxEffectLogs, TxL2Logs } from '@aztec/circuit-types'; import { ARCHIVE_HEIGHT, AppendOnlyTreeSnapshot, @@ -101,42 +101,25 @@ export class SoloBlockBuilder implements BlockBuilder { // We fill the tx batch with empty txs, we process only one tx at a time for now const [circuitsOutput, proof] = await this.runCircuits(globalVariables, txs, newL1ToL2Messages); - // Collect all new nullifiers, commitments, and contracts from all txs in this block - const newNullifiers = txs.flatMap(tx => tx.data.end.newNullifiers); - const newCommitments = txs.flatMap(tx => tx.data.end.newCommitments); - const newContracts = txs.flatMap(tx => tx.data.end.newContracts).map(cd => computeContractLeaf(cd)); - const newContractData = txs - .flatMap(tx => tx.data.end.newContracts) - .map(n => new ContractData(n.contractAddress, n.portalContractAddress)); - const newPublicDataWrites = txs.flatMap(tx => + const txEffects: TxEffect[] = txs.filter(tx => !tx.data.end.newNullifiers[0].value.isZero()).map((tx) => new TxEffect( + tx.data.end.newCommitments.map((c: SideEffect) => c.value), + tx.data.end.newNullifiers.map((n: SideEffectLinkedToNoteHash) => n.value), + tx.data.end.newL2ToL1Msgs, tx.data.end.publicDataUpdateRequests.map(t => new PublicDataWrite(t.leafSlot, t.newValue)), - ); - const newL2ToL1Msgs = txs.flatMap(tx => tx.data.end.newL2ToL1Msgs); + tx.data.end.newContracts.map(cd => computeContractLeaf(cd)), + tx.data.end.newContracts.map(cd => new ContractData(cd.contractAddress, cd.portalContractAddress)), + new TxEffectLogs(tx.encryptedLogs || new TxL2Logs([]), tx.unencryptedLogs || new TxL2Logs([])), + )); - // Consolidate logs data from all txs - const encryptedLogsArr: TxL2Logs[] = []; - const unencryptedLogsArr: TxL2Logs[] = []; - for (const tx of txs) { - const encryptedLogs = tx.encryptedLogs || new TxL2Logs([]); - encryptedLogsArr.push(encryptedLogs); - const unencryptedLogs = tx.unencryptedLogs || new TxL2Logs([]); - unencryptedLogsArr.push(unencryptedLogs); - } - const newEncryptedLogs = new L2BlockL2Logs(encryptedLogsArr); - const newUnencryptedLogs = new L2BlockL2Logs(unencryptedLogsArr); + const blockBody = new L2BlockBody( + newL1ToL2Messages, + txEffects + ); const l2Block = L2Block.fromFields({ archive: circuitsOutput.archive, header: circuitsOutput.header, - newCommitments: newCommitments.map((c: SideEffect) => c.value), - newNullifiers: newNullifiers.map((n: SideEffectLinkedToNoteHash) => n.value), - newL2ToL1Msgs, - newContracts, - newContractData, - newPublicDataWrites, - newL1ToL2Messages, - newEncryptedLogs, - newUnencryptedLogs, + body: blockBody, }); if (!l2Block.getCalldataHash().equals(circuitsOutput.header.bodyHash)) { diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts index b3de75cebbb..a5899ac2c02 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts @@ -27,7 +27,7 @@ describe('L1Publisher', () => { header = l2Block.header.toBuffer(); archive = l2Block.archive.root.toBuffer(); txsHash = l2Block.getCalldataHash(); - body = l2Block.bodyToBuffer(); + body = l2Block.toBuffer(true, false); proof = Buffer.alloc(0); txSender = mock(); diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index c985c6d49aa..8a8579ace3a 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -154,7 +154,7 @@ export class L1Publisher implements L2BlockReceiver { return false; } - const encodedBody = block.bodyToBuffer(); + const encodedBody = block.toBuffer(true, false); // Publish block transaction effects while (!this.interrupted) { diff --git a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts index 1a3f8730a7c..3e6866bfd6d 100644 --- a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts +++ b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts @@ -26,7 +26,10 @@ const getMockBlock = (blockNumber: number, newContractsCommitments?: Buffer[]) = const block = L2Block.random(blockNumber); if (newContractsCommitments) { - block.newContracts = newContractsCommitments.map(x => Fr.fromBuffer(x)); + for (let i = 0; i < newContractsCommitments.length; i++) { + // Assuming one new contract per tx + block.body.txEffects[i].contractLeaves = [Fr.fromBuffer(newContractsCommitments[i])]; + } } return block; diff --git a/yarn-project/world-state/src/world-state-db/merkle_trees.ts b/yarn-project/world-state/src/world-state-db/merkle_trees.ts index 102946f05b2..e6b4923e8f7 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_trees.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_trees.ts @@ -508,9 +508,9 @@ export class MerkleTrees implements MerkleTreeDb { // Sync the append only trees for (const [tree, leaves] of [ - [MerkleTreeId.CONTRACT_TREE, l2Block.newContracts], - [MerkleTreeId.NOTE_HASH_TREE, l2Block.newCommitments], - [MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l2Block.newL1ToL2Messages], + [MerkleTreeId.CONTRACT_TREE, l2Block.body.txEffects.flatMap(txEffect => txEffect.contractLeaves)], + [MerkleTreeId.NOTE_HASH_TREE, l2Block.body.txEffects.flatMap(txEffect => txEffect.newNoteHashes)], + [MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l2Block.body.l1ToL2Messages], ] as const) { await this.#appendLeaves( tree, @@ -520,17 +520,16 @@ export class MerkleTrees implements MerkleTreeDb { // Sync the indexed trees await (this.trees[MerkleTreeId.NULLIFIER_TREE] as StandardIndexedTree).batchInsert( - l2Block.newNullifiers.map(fr => fr.toBuffer()), + l2Block.body.txEffects.flatMap(txEffect => txEffect.newNullifiers.map(nullifier => nullifier.toBuffer())), NULLIFIER_SUBTREE_HEIGHT, ); const publicDataTree = this.trees[MerkleTreeId.PUBLIC_DATA_TREE] as StandardIndexedTree; // We insert the public data tree leaves with one batch per tx to avoid updating the same key twice - for (let i = 0; i < l2Block.newPublicDataWrites.length / MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX; i++) { + for (let i = 0; i < l2Block.numberOfTxs; i++) { await publicDataTree.batchInsert( - l2Block.newPublicDataWrites - .slice(MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * i, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX * (i + 1)) + l2Block.body.txEffects[i].newPublicDataWrites .map(write => new PublicDataTreeLeaf(write.leafIndex, write.newValue).toBuffer()), PUBLIC_DATA_SUBTREE_HEIGHT, );