Skip to content

Commit

Permalink
fix: Fetch Headers and Bodies separately #4167 (#4632)
Browse files Browse the repository at this point in the history
Resolves #4167.

---------

Co-authored-by: Jan Beneš <[email protected]>
  • Loading branch information
sklppy88 and benesjan authored Mar 1, 2024
1 parent 2b6656d commit 0681b3a
Show file tree
Hide file tree
Showing 18 changed files with 537 additions and 134 deletions.
84 changes: 67 additions & 17 deletions yarn-project/archiver/src/archiver/archiver.test.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
import { ExtendedContractData, L2Block, L2BlockL2Logs, LogType } from '@aztec/circuit-types';
import { NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP } from '@aztec/circuits.js';
import { Body, ExtendedContractData, L2Block, L2BlockL2Logs, LogType } from '@aztec/circuit-types';
import { AztecAddress } from '@aztec/foundation/aztec-address';
import { times } from '@aztec/foundation/collection';
import { EthAddress } from '@aztec/foundation/eth-address';
import { Fr } from '@aztec/foundation/fields';
import { sleep } from '@aztec/foundation/sleep';
import { ContractDeploymentEmitterAbi, InboxAbi, RollupAbi } from '@aztec/l1-artifacts';
import { AvailabilityOracleAbi, ContractDeploymentEmitterAbi, InboxAbi, RollupAbi } from '@aztec/l1-artifacts';

import { MockProxy, mock } from 'jest-mock-extended';
import { Chain, HttpTransport, Log, PublicClient, Transaction, encodeFunctionData, toHex } from 'viem';
Expand All @@ -18,6 +16,7 @@ describe('Archiver', () => {
const rollupAddress = EthAddress.ZERO.toString();
const inboxAddress = EthAddress.ZERO.toString();
const registryAddress = EthAddress.ZERO.toString();
const availabilityOracleAddress = EthAddress.ZERO.toString();
const contractDeploymentEmitterAddress = '0x0000000000000000000000000000000000000001';
const blockNumbers = [1, 2, 3];
let publicClient: MockProxy<PublicClient<HttpTransport, Chain>>;
Expand All @@ -32,6 +31,7 @@ describe('Archiver', () => {
const archiver = new Archiver(
publicClient,
EthAddress.fromString(rollupAddress),
EthAddress.fromString(availabilityOracleAddress),
EthAddress.fromString(inboxAddress),
EthAddress.fromString(registryAddress),
EthAddress.fromString(contractDeploymentEmitterAddress),
Expand All @@ -43,7 +43,9 @@ describe('Archiver', () => {
expect(latestBlockNum).toEqual(0);

const blocks = blockNumbers.map(x => L2Block.random(x, 4, x, x + 1, x * 2, x * 3));
const publishTxs = blocks.map(block => block.body).map(makePublishTx);
const rollupTxs = blocks.map(makeRollupTx);

// `L2Block.random(x)` creates some l1 to l2 messages. We add those,
// since it is expected by the test that these would be consumed.
// Archiver removes such messages from pending store.
Expand All @@ -58,15 +60,15 @@ describe('Archiver', () => {
const l1ToL2MessageAddedEvents = [
makeL1ToL2MessageAddedEvents(
100n,
blocks[0].body.l1ToL2Messages.map(key => key.toString()),
blocks[0].body.l1ToL2Messages.flatMap(key => (key.isZero() ? [] : key.toString())),
),
makeL1ToL2MessageAddedEvents(
100n,
blocks[1].body.l1ToL2Messages.map(key => key.toString()),
blocks[1].body.l1ToL2Messages.flatMap(key => (key.isZero() ? [] : key.toString())),
),
makeL1ToL2MessageAddedEvents(
2501n,
blocks[2].body.l1ToL2Messages.map(key => key.toString()),
blocks[2].body.l1ToL2Messages.flatMap(key => (key.isZero() ? [] : key.toString())),
),
makeL1ToL2MessageAddedEvents(2502n, [
messageToCancel1,
Expand All @@ -80,14 +82,23 @@ describe('Archiver', () => {
publicClient.getLogs
.mockResolvedValueOnce(l1ToL2MessageAddedEvents.slice(0, 2).flat())
.mockResolvedValueOnce([]) // no messages to cancel
.mockResolvedValueOnce([makeTxsPublishedEvent(101n, blocks[0].body.getCalldataHash())])
.mockResolvedValueOnce([makeL2BlockProcessedEvent(101n, 1n)])
.mockResolvedValueOnce([makeContractDeploymentEvent(103n, blocks[0])]) // the first loop of the archiver ends here at block 2500
.mockResolvedValueOnce(l1ToL2MessageAddedEvents.slice(2, 4).flat())
.mockResolvedValueOnce(makeL1ToL2MessageCancelledEvents(2503n, l1ToL2MessagesToCancel))
.mockResolvedValueOnce([
makeTxsPublishedEvent(2510n, blocks[1].body.getCalldataHash()),
makeTxsPublishedEvent(2520n, blocks[2].body.getCalldataHash()),
])
.mockResolvedValueOnce([makeL2BlockProcessedEvent(2510n, 2n), makeL2BlockProcessedEvent(2520n, 3n)])
.mockResolvedValueOnce([makeContractDeploymentEvent(2540n, blocks[1])])
.mockResolvedValue([]);
rollupTxs.forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx));
publicClient.getTransaction.mockResolvedValueOnce(publishTxs[0]);
publicClient.getTransaction.mockResolvedValueOnce(rollupTxs[0]);

publishTxs.slice(1).forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx));
rollupTxs.slice(1).forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx));

await archiver.start(false);

Expand Down Expand Up @@ -135,6 +146,7 @@ describe('Archiver', () => {
const archiver = new Archiver(
publicClient,
EthAddress.fromString(rollupAddress),
EthAddress.fromString(availabilityOracleAddress),
EthAddress.fromString(inboxAddress),
EthAddress.fromString(registryAddress),
EthAddress.fromString(contractDeploymentEmitterAddress),
Expand All @@ -150,7 +162,10 @@ describe('Archiver', () => {
};

const blocks = blockNumbers.map(x => L2Block.random(x, 4, x, x + 1, x * 2, x * 3));

const publishTxs = blocks.map(block => block.body).map(makePublishTx);
const rollupTxs = blocks.map(makeRollupTx);

// `L2Block.random(x)` creates some l1 to l2 messages. We add those,
// since it is expected by the test that these would be consumed.
// Archiver removes such messages from pending store.
Expand All @@ -162,11 +177,11 @@ describe('Archiver', () => {
const l1ToL2MessageAddedEvents = [
makeL1ToL2MessageAddedEvents(
100n,
blocks[0].body.l1ToL2Messages.map(key => key.toString()),
blocks[0].body.l1ToL2Messages.flatMap(key => (key.isZero() ? [] : key.toString())),
),
makeL1ToL2MessageAddedEvents(
101n,
blocks[1].body.l1ToL2Messages.map(key => key.toString()),
blocks[1].body.l1ToL2Messages.flatMap(key => (key.isZero() ? [] : key.toString())),
),
makeL1ToL2MessageAddedEvents(102n, additionalL1ToL2MessagesBlock102),
makeL1ToL2MessageAddedEvents(103n, additionalL1ToL2MessagesBlock103),
Expand All @@ -184,8 +199,13 @@ describe('Archiver', () => {
);
})
.mockResolvedValueOnce([])
.mockResolvedValueOnce([
makeTxsPublishedEvent(70n, blocks[0].body.getCalldataHash()),
makeTxsPublishedEvent(80n, blocks[1].body.getCalldataHash()),
])
.mockResolvedValueOnce([makeL2BlockProcessedEvent(70n, 1n), makeL2BlockProcessedEvent(80n, 2n)])
.mockResolvedValue([]);
publishTxs.slice(0, numL2BlocksInTest).forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx));
rollupTxs.slice(0, numL2BlocksInTest).forEach(tx => publicClient.getTransaction.mockResolvedValueOnce(tx));

await archiver.start(false);
Expand All @@ -207,11 +227,10 @@ describe('Archiver', () => {
}, 10_000);

it('pads L1 to L2 messages', async () => {
const NUM_RECEIVED_L1_MESSAGES = 2;

const archiver = new Archiver(
publicClient,
EthAddress.fromString(rollupAddress),
EthAddress.fromString(availabilityOracleAddress),
EthAddress.fromString(inboxAddress),
EthAddress.fromString(registryAddress),
EthAddress.fromString(contractDeploymentEmitterAddress),
Expand All @@ -223,8 +242,8 @@ describe('Archiver', () => {
expect(latestBlockNum).toEqual(0);

const block = L2Block.random(1, 4, 1, 2, 4, 6);
block.body.l1ToL2Messages = times(2, Fr.random);
const rollupTx = makeRollupTx(block);
const publishTx = makePublishTx(block.body);

publicClient.getBlockNumber.mockResolvedValueOnce(2500n);
// logs should be created in order of how archiver syncs.
Expand All @@ -236,8 +255,10 @@ describe('Archiver', () => {
),
)
.mockResolvedValueOnce([])
.mockResolvedValueOnce([makeTxsPublishedEvent(101n, block.body.getCalldataHash())])
.mockResolvedValueOnce([makeL2BlockProcessedEvent(101n, 1n)])
.mockResolvedValue([]);
publicClient.getTransaction.mockResolvedValueOnce(publishTx);
publicClient.getTransaction.mockResolvedValueOnce(rollupTx);

await archiver.start(false);
Expand All @@ -250,10 +271,9 @@ describe('Archiver', () => {
latestBlockNum = await archiver.getBlockNumber();
expect(latestBlockNum).toEqual(1);

const expectedL1Messages = block.body.l1ToL2Messages
.concat(times(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP - NUM_RECEIVED_L1_MESSAGES, () => Fr.ZERO))
.map(x => x.value);
const expectedL1Messages = block.body.l1ToL2Messages.map(x => x.value);
const receivedBlock = await archiver.getBlock(1);

expect(receivedBlock?.body.l1ToL2Messages.map(x => x.value)).toEqual(expectedL1Messages);

await archiver.stop();
Expand All @@ -263,7 +283,7 @@ describe('Archiver', () => {
/**
* Makes a fake L2BlockProcessed event for testing purposes.
* @param l1BlockNum - L1 block number.
* @param l2BlockNum - L2Block number.
* @param l2BlockNum - L2 Block number.
* @returns An L2BlockProcessed event log.
*/
function makeL2BlockProcessedEvent(l1BlockNum: bigint, l2BlockNum: bigint) {
Expand All @@ -274,6 +294,21 @@ function makeL2BlockProcessedEvent(l1BlockNum: bigint, l2BlockNum: bigint) {
} as Log<bigint, number, false, undefined, true, typeof RollupAbi, 'L2BlockProcessed'>;
}

/**
* Makes a fake TxsPublished event for testing purposes.
* @param l1BlockNum - L1 block number.
* @param txsHash - txsHash for the body.
* @returns A TxsPublished event log.
*/
function makeTxsPublishedEvent(l1BlockNum: bigint, txsHash: Buffer) {
return {
blockNumber: l1BlockNum,
args: {
txsHash: txsHash.toString('hex'),
},
} as Log<bigint, number, false, undefined, true, typeof AvailabilityOracleAbi, 'TxsPublished'>;
}

/**
* Makes a fake ContractDeployment event for testing purposes.
* @param l1BlockNum - L1 block number.
Expand Down Expand Up @@ -360,3 +395,18 @@ function makeRollupTx(l2Block: L2Block) {
});
return { input } as Transaction<bigint, number>;
}

/**
* Makes a fake availability oracle tx for testing purposes.
* @param blockBody - The block body posted by the simulated tx.
* @returns A fake tx with calldata that corresponds to calling publish in the Availability Oracle contract.
*/
function makePublishTx(blockBody: Body) {
const body = toHex(blockBody.toBuffer());
const input = encodeFunctionData({
abi: AvailabilityOracleAbi,
functionName: 'publish',
args: [body],
});
return { input } as Transaction<bigint, number>;
}
61 changes: 38 additions & 23 deletions yarn-project/archiver/src/archiver/archiver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,10 @@ import {
TxHash,
UnencryptedL2Log,
} from '@aztec/circuit-types';
import {
ContractClassRegisteredEvent,
FunctionSelector,
NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
} from '@aztec/circuits.js';
import { ContractClassRegisteredEvent, FunctionSelector } from '@aztec/circuits.js';
import { ContractInstanceDeployedEvent } from '@aztec/circuits.js/contract';
import { createEthereumChain } from '@aztec/ethereum';
import { AztecAddress } from '@aztec/foundation/aztec-address';
import { padArrayEnd } from '@aztec/foundation/collection';
import { EthAddress } from '@aztec/foundation/eth-address';
import { Fr } from '@aztec/foundation/fields';
import { DebugLogger, createDebugLogger } from '@aztec/foundation/log';
Expand All @@ -43,7 +38,8 @@ import { Chain, HttpTransport, PublicClient, createPublicClient, http } from 'vi
import { ArchiverDataStore } from './archiver_store.js';
import { ArchiverConfig } from './config.js';
import {
retrieveBlocks,
retrieveBlockBodiesFromAvailabilityOracle,
retrieveBlockMetadataFromRollup,
retrieveNewCancelledL1ToL2Messages,
retrieveNewContractData,
retrieveNewPendingL1ToL2Messages,
Expand Down Expand Up @@ -84,6 +80,7 @@ export class Archiver implements ArchiveSource {
constructor(
private readonly publicClient: PublicClient<HttpTransport, Chain>,
private readonly rollupAddress: EthAddress,
private readonly availabilityOracleAddress: EthAddress,
private readonly inboxAddress: EthAddress,
private readonly registryAddress: EthAddress,
private readonly contractDeploymentEmitterAddress: EthAddress,
Expand Down Expand Up @@ -114,6 +111,7 @@ export class Archiver implements ArchiveSource {
const archiver = new Archiver(
publicClient,
config.l1Contracts.rollupAddress,
config.l1Contracts.availabilityOracleAddress,
config.l1Contracts.inboxAddress,
config.l1Contracts.registryAddress,
config.l1Contracts.contractDeploymentEmitterAddress,
Expand Down Expand Up @@ -238,7 +236,20 @@ export class Archiver implements ArchiveSource {

// Read all data from chain and then write to our stores at the end
const nextExpectedL2BlockNum = BigInt((await this.store.getBlockNumber()) + 1);
const retrievedBlocks = await retrieveBlocks(

const retrievedBlockBodies = await retrieveBlockBodiesFromAvailabilityOracle(
this.publicClient,
this.availabilityOracleAddress,
blockUntilSynced,
lastL1Blocks.addedBlock + 1n,
currentL1BlockNumber,
);

const blockBodies = retrievedBlockBodies.retrievedData.map(([blockBody]) => blockBody);

await this.store.addBlockBodies(blockBodies);

const retrievedBlockMetadata = await retrieveBlockMetadataFromRollup(
this.publicClient,
this.rollupAddress,
blockUntilSynced,
Expand All @@ -247,6 +258,23 @@ export class Archiver implements ArchiveSource {
nextExpectedL2BlockNum,
);

const retrievedBodyHashes = retrievedBlockMetadata.retrievedData.map(
([header]) => header.contentCommitment.txsHash,
);

const blockBodiesFromStore = await this.store.getBlockBodies(retrievedBodyHashes);

if (retrievedBlockMetadata.retrievedData.length !== blockBodiesFromStore.length) {
throw new Error('Block headers length does not equal block bodies length');
}

const retrievedBlocks = {
retrievedData: retrievedBlockMetadata.retrievedData.map(
(blockMetadata, i) =>
new L2Block(blockMetadata[1], blockMetadata[0], blockBodiesFromStore[i], blockMetadata[2]),
),
};

if (retrievedBlocks.retrievedData.length === 0) {
return;
} else {
Expand All @@ -260,7 +288,7 @@ export class Archiver implements ArchiveSource {
// create the block number -> block hash mapping to ensure we retrieve the appropriate events
const blockNumberToBodyHash: { [key: number]: Buffer | undefined } = {};
retrievedBlocks.retrievedData.forEach((block: L2Block) => {
blockNumberToBodyHash[block.number] = block.body.getCalldataHash();
blockNumberToBodyHash[block.number] = block.header.contentCommitment.txsHash;
});
const retrievedContracts = await retrieveNewContractData(
this.publicClient,
Expand Down Expand Up @@ -313,20 +341,7 @@ export class Archiver implements ArchiveSource {
await this.store.confirmL1ToL2EntryKeys(block.body.l1ToL2Messages);
}

// store retrieved L2 blocks after removing new logs information.
// remove logs to serve "lightweight" block information. Logs can be fetched separately if needed.
await this.store.addBlocks(
retrievedBlocks.retrievedData.map(block => {
// Ensure we pad the L1 to L2 message array to the full size before storing.
block.body.l1ToL2Messages = padArrayEnd(
block.body.l1ToL2Messages,
Fr.ZERO,
NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
);

return block;
}),
);
await this.store.addBlocks(retrievedBlocks.retrievedData);
}

/**
Expand Down
16 changes: 16 additions & 0 deletions yarn-project/archiver/src/archiver/archiver_store.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import {
Body,
ContractData,
ExtendedContractData,
GetUnencryptedLogsResponse,
Expand Down Expand Up @@ -38,6 +39,21 @@ export interface ArchiverDataStore {
*/
addBlocks(blocks: L2Block[]): Promise<boolean>;

/**
* Append new block bodies to the store's list.
* @param blockBodies - The L2 block bodies to be added to the store.
* @returns True if the operation is successful.
*/
addBlockBodies(blockBodies: Body[]): Promise<boolean>;

/**
* Gets block bodies that have the same txsHashes as we supply.
*
* @param txsHashes - A list of txsHashes (body hashes).
* @returns The requested L2 block bodies
*/
getBlockBodies(txsHashes: Buffer[]): Promise<Body[]>;

/**
* Gets up to `limit` amount of L2 blocks starting from `from`.
* @param from - Number of the first block to return (inclusive).
Expand Down
Loading

0 comments on commit 0681b3a

Please sign in to comment.