Skip to content

Commit

Permalink
feat: Prover node (#7452)
Browse files Browse the repository at this point in the history
Introduces a new prover node package, which spawns a new prover that
follows the proven chain, and exposes a high-level method for proving an
unfinalised block. This involves fetching the tx hashes from L1,
requesting the individual txs from a tx source (today a remote Aztec
node), running the tx processor using a world-state synced to the last
proven block, using the prover orchestrator to prove, and the l1
publisher to submit the proof back to L1.

Below is an example of a node and a prover node running side by side
with the following commands:

```
ETHEREUM_HOST='http://localhost:8545' DEPLOY_AZTEC_CONTRACTS=true SEQ_SKIP_SUBMIT_PROOFS=1 SEQ_PUBLISHER_PRIVATE_KEY=0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d \
yarn aztec start --node --sequencer --prover --archiver --pxe
```

```
LOG_LEVEL=verbose ETHEREUM_HOST='http://localhost:8545' AZTEC_NODE_URL='http://localhost:8080' \
yarn aztec start -p 8090 --prover-node --prover --archiver
```

[Screencast from 2024-07-19
18-42-27.webm](https://github.com/user-attachments/assets/556edf0e-9843-481a-a488-1a497728194f)

See #7346
  • Loading branch information
spalladino authored Jul 22, 2024
1 parent 689000a commit 609a68f
Show file tree
Hide file tree
Showing 89 changed files with 1,604 additions and 317 deletions.
63 changes: 39 additions & 24 deletions yarn-project/archiver/src/archiver/archiver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ export class Archiver implements ArchiveSource {
}

if (blockUntilSynced) {
this.log.info(`Performing initial chain sync...`);
this.log.info(`Performing initial chain sync to rollup contract ${this.rollupAddress.toString()}`);
await this.sync(blockUntilSynced);
}

Expand All @@ -165,15 +165,13 @@ export class Archiver implements ArchiveSource {
*
* This code does not handle reorgs.
*/
const l1SynchPoint = await this.store.getSynchPoint();
const { blocksSynchedTo, messagesSynchedTo } = await this.store.getSynchPoint();
const currentL1BlockNumber = await this.publicClient.getBlockNumber();

if (
currentL1BlockNumber <= l1SynchPoint.blocksSynchedTo &&
currentL1BlockNumber <= l1SynchPoint.messagesSynchedTo
) {
if (currentL1BlockNumber <= blocksSynchedTo && currentL1BlockNumber <= messagesSynchedTo) {
// chain hasn't moved forward
// or it's been rolled back
this.log.debug(`Nothing to sync`, { currentL1BlockNumber, blocksSynchedTo, messagesSynchedTo });
return;
}

Expand Down Expand Up @@ -204,14 +202,14 @@ export class Archiver implements ArchiveSource {
this.publicClient,
this.inboxAddress,
blockUntilSynced,
l1SynchPoint.messagesSynchedTo + 1n,
messagesSynchedTo + 1n,
currentL1BlockNumber,
);

if (retrievedL1ToL2Messages.retrievedData.length !== 0) {
this.log.verbose(
`Retrieved ${retrievedL1ToL2Messages.retrievedData.length} new L1 -> L2 messages between L1 blocks ${
l1SynchPoint.messagesSynchedTo + 1n
messagesSynchedTo + 1n
} and ${currentL1BlockNumber}.`,
);
}
Expand All @@ -225,7 +223,7 @@ export class Archiver implements ArchiveSource {
this.publicClient,
this.availabilityOracleAddress,
blockUntilSynced,
l1SynchPoint.blocksSynchedTo + 1n,
blocksSynchedTo + 1n,
currentL1BlockNumber,
);

Expand All @@ -240,7 +238,7 @@ export class Archiver implements ArchiveSource {
this.publicClient,
this.rollupAddress,
blockUntilSynced,
l1SynchPoint.blocksSynchedTo + 1n,
blocksSynchedTo + 1n,
currentL1BlockNumber,
nextExpectedL2BlockNum,
);
Expand All @@ -259,15 +257,11 @@ export class Archiver implements ArchiveSource {
(blockMetadata, i) => new L2Block(blockMetadata[1], blockMetadata[0], blockBodiesFromStore[i]),
);

if (blocks.length === 0) {
return;
} else {
this.log.verbose(
`Retrieved ${blocks.length} new L2 blocks between L1 blocks ${
l1SynchPoint.blocksSynchedTo + 1n
} and ${currentL1BlockNumber}.`,
);
}
(blocks.length ? this.log.verbose : this.log.debug)(
`Retrieved ${blocks.length || 'no'} new L2 blocks between L1 blocks ${
blocksSynchedTo + 1n
} and ${currentL1BlockNumber}.`,
);

retrievedBlocks = {
lastProcessedL1BlockNumber: retrievedBlockMetadata.lastProcessedL1BlockNumber,
Expand Down Expand Up @@ -296,19 +290,27 @@ export class Archiver implements ArchiveSource {
}),
);

await this.store.addBlocks(retrievedBlocks);
this.instrumentation.processNewBlocks(retrievedBlocks.retrievedData);
if (retrievedBlocks.retrievedData.length > 0) {
await this.store.addBlocks(retrievedBlocks);
this.instrumentation.processNewBlocks(retrievedBlocks.retrievedData);
const lastL2BlockNumber = retrievedBlocks.retrievedData[retrievedBlocks.retrievedData.length - 1].number;
this.log.verbose(`Processed ${retrievedBlocks.retrievedData.length} new L2 blocks up to ${lastL2BlockNumber}`);
}

// Fetch the logs for proven blocks in the block range and update the last proven block number.
// Note it's ok to read repeated data here, since we're just using the largest number we see on the logs.
await this.updateLastProvenL2Block(l1SynchPoint.blocksSynchedTo, currentL1BlockNumber);
await this.updateLastProvenL2Block(blocksSynchedTo, currentL1BlockNumber);

if (retrievedBlocks.retrievedData.length > 0 || blockUntilSynced) {
(blockUntilSynced ? this.log.info : this.log.verbose)(`Synced to L1 block ${currentL1BlockNumber}`);
}
}

private async updateLastProvenL2Block(fromBlock: bigint, toBlock: bigint) {
const logs = await this.publicClient.getLogs({
address: this.rollupAddress.toString(),
fromBlock,
toBlock,
toBlock: toBlock + 1n, // toBlock is exclusive
strict: true,
event: getAbiItem({ abi: RollupAbi, name: 'L2ProofVerified' }),
});
Expand All @@ -319,7 +321,15 @@ export class Archiver implements ArchiveSource {
}

const provenBlockNumber = lastLog.args.blockNumber;
await this.store.setProvenL2BlockNumber(Number(provenBlockNumber));
if (!provenBlockNumber) {
throw new Error(`Missing argument blockNumber from L2ProofVerified event`);
}

const currentProvenBlockNumber = await this.store.getProvenL2BlockNumber();
if (provenBlockNumber > currentProvenBlockNumber) {
this.log.verbose(`Updated last proven block number from ${currentProvenBlockNumber} to ${provenBlockNumber}`);
await this.store.setProvenL2BlockNumber(Number(provenBlockNumber));
}
}

/**
Expand Down Expand Up @@ -502,6 +512,11 @@ export class Archiver implements ArchiveSource {
return this.store.getProvenL2BlockNumber();
}

/** Forcefully updates the last proven block number. Use for testing. */
public setProvenBlockNumber(block: number): Promise<void> {
return this.store.setProvenL2BlockNumber(block);
}

public getContractClass(id: Fr): Promise<ContractClassPublic | undefined> {
return this.store.getContractClass(id);
}
Expand Down
37 changes: 11 additions & 26 deletions yarn-project/archiver/src/archiver/config.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import { type L1ContractAddresses } from '@aztec/ethereum';
import { EthAddress } from '@aztec/foundation/eth-address';
import { type L1ContractAddresses, getL1ContractAddressesFromEnv } from '@aztec/ethereum';

/**
* There are 2 polling intervals used in this configuration. The first is the archiver polling interval, archiverPollingIntervalMS.
Expand All @@ -12,6 +11,11 @@ import { EthAddress } from '@aztec/foundation/eth-address';
* The archiver configuration.
*/
export interface ArchiverConfig {
/**
* URL for an archiver service. If set, will return an archiver client as opposed to starting a new one.
*/
archiverUrl?: string;

/**
* The url of the Ethereum RPC node.
*/
Expand Down Expand Up @@ -45,7 +49,7 @@ export interface ArchiverConfig {
/**
* Optional dir to store data. If omitted will store in memory.
*/
dataDirectory?: string;
dataDirectory: string | undefined;

/** The max number of logs that can be obtained in 1 "getUnencryptedLogs" call. */
maxLogs?: number;
Expand All @@ -56,43 +60,24 @@ export interface ArchiverConfig {
* Note: If an environment variable is not set, the default value is used.
* @returns The archiver configuration.
*/
export function getConfigEnvVars(): ArchiverConfig {
export function getArchiverConfigFromEnv(): ArchiverConfig {
const {
ETHEREUM_HOST,
L1_CHAIN_ID,
ARCHIVER_POLLING_INTERVAL_MS,
ARCHIVER_VIEM_POLLING_INTERVAL_MS,
AVAILABILITY_ORACLE_CONTRACT_ADDRESS,
ROLLUP_CONTRACT_ADDRESS,
API_KEY,
INBOX_CONTRACT_ADDRESS,
OUTBOX_CONTRACT_ADDRESS,
REGISTRY_CONTRACT_ADDRESS,
GAS_TOKEN_CONTRACT_ADDRESS,
GAS_PORTAL_CONTRACT_ADDRESS,
DATA_DIRECTORY,
ARCHIVER_URL,
} = process.env;
// Populate the relevant addresses for use by the archiver.
const addresses: L1ContractAddresses = {
availabilityOracleAddress: AVAILABILITY_ORACLE_CONTRACT_ADDRESS
? EthAddress.fromString(AVAILABILITY_ORACLE_CONTRACT_ADDRESS)
: EthAddress.ZERO,
rollupAddress: ROLLUP_CONTRACT_ADDRESS ? EthAddress.fromString(ROLLUP_CONTRACT_ADDRESS) : EthAddress.ZERO,
registryAddress: REGISTRY_CONTRACT_ADDRESS ? EthAddress.fromString(REGISTRY_CONTRACT_ADDRESS) : EthAddress.ZERO,
inboxAddress: INBOX_CONTRACT_ADDRESS ? EthAddress.fromString(INBOX_CONTRACT_ADDRESS) : EthAddress.ZERO,
outboxAddress: OUTBOX_CONTRACT_ADDRESS ? EthAddress.fromString(OUTBOX_CONTRACT_ADDRESS) : EthAddress.ZERO,
gasTokenAddress: GAS_TOKEN_CONTRACT_ADDRESS ? EthAddress.fromString(GAS_TOKEN_CONTRACT_ADDRESS) : EthAddress.ZERO,
gasPortalAddress: GAS_PORTAL_CONTRACT_ADDRESS
? EthAddress.fromString(GAS_PORTAL_CONTRACT_ADDRESS)
: EthAddress.ZERO,
};
return {
rpcUrl: ETHEREUM_HOST || '',
l1ChainId: L1_CHAIN_ID ? +L1_CHAIN_ID : 31337, // 31337 is the default chain id for anvil
archiverPollingIntervalMS: ARCHIVER_POLLING_INTERVAL_MS ? +ARCHIVER_POLLING_INTERVAL_MS : 1_000,
viemPollingIntervalMS: ARCHIVER_VIEM_POLLING_INTERVAL_MS ? +ARCHIVER_VIEM_POLLING_INTERVAL_MS : 1_000,
apiKey: API_KEY,
l1Contracts: addresses,
l1Contracts: getL1ContractAddressesFromEnv(),
dataDirectory: DATA_DIRECTORY,
archiverUrl: ARCHIVER_URL,
};
}
23 changes: 23 additions & 0 deletions yarn-project/archiver/src/factory.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import { type AztecKVStore } from '@aztec/kv-store';
import { type TelemetryClient } from '@aztec/telemetry-client';
import { NoopTelemetryClient } from '@aztec/telemetry-client/noop';

import { Archiver } from './archiver/archiver.js';
import { type ArchiverConfig } from './archiver/config.js';
import { KVArchiverDataStore } from './archiver/index.js';
import { createArchiverClient } from './rpc/archiver_client.js';

export function createArchiver(
config: ArchiverConfig,
store: AztecKVStore,
telemetry: TelemetryClient = new NoopTelemetryClient(),
opts: { blockUntilSync: boolean } = { blockUntilSync: true },
) {
if (!config.archiverUrl) {
// first create and sync the archiver
const archiverStore = new KVArchiverDataStore(store, config.maxLogs);
return Archiver.createAndSync(config, archiverStore, telemetry, opts.blockUntilSync);
} else {
return createArchiverClient(config.archiverUrl);
}
}
5 changes: 3 additions & 2 deletions yarn-project/archiver/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,12 @@ import { NoopTelemetryClient } from '@aztec/telemetry-client/noop';
import { createPublicClient, http } from 'viem';
import { localhost } from 'viem/chains';

import { Archiver, getConfigEnvVars } from './archiver/index.js';
import { Archiver, getArchiverConfigFromEnv } from './archiver/index.js';
import { MemoryArchiverStore } from './archiver/memory_archiver_store/memory_archiver_store.js';

export * from './archiver/index.js';
export * from './rpc/index.js';
export * from './factory.js';

const log = createDebugLogger('aztec:archiver');

Expand All @@ -18,7 +19,7 @@ const log = createDebugLogger('aztec:archiver');
*/
// eslint-disable-next-line require-await
async function main() {
const config = getConfigEnvVars();
const config = getArchiverConfigFromEnv();
const { rpcUrl, l1Contracts } = config;

const publicClient = createPublicClient({
Expand Down
11 changes: 4 additions & 7 deletions yarn-project/aztec-node/src/aztec-node/config.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import { type ArchiverConfig, getConfigEnvVars as getArchiverVars } from '@aztec/archiver';
import { type ArchiverConfig, getArchiverConfigFromEnv as getArchiverVars } from '@aztec/archiver';
import { type P2PConfig, getP2PConfigEnvVars } from '@aztec/p2p';
import { type ProverClientConfig, getProverEnvVars } from '@aztec/prover-client';
import { type SequencerClientConfig, getConfigEnvVars as getSequencerVars } from '@aztec/sequencer-client';
import { getConfigEnvVars as getWorldStateVars } from '@aztec/world-state';
import { type WorldStateConfig, getWorldStateConfigFromEnv as getWorldStateVars } from '@aztec/world-state';

import { readFileSync } from 'fs';
import { dirname, resolve } from 'path';
Expand All @@ -14,23 +14,21 @@ import { fileURLToPath } from 'url';
export type AztecNodeConfig = ArchiverConfig &
SequencerClientConfig &
ProverClientConfig &
WorldStateConfig &
P2PConfig & {
/** Whether the sequencer is disabled for this node. */
disableSequencer: boolean;

/** Whether the prover is disabled for this node. */
disableProver: boolean;

/** A URL for an archiver service that the node will use. */
archiverUrl?: string;
};

/**
* Returns the config of the aztec node from environment variables with reasonable defaults.
* @returns A valid aztec node config.
*/
export function getConfigEnvVars(): AztecNodeConfig {
const { SEQ_DISABLED, PROVER_DISABLED = '', ARCHIVER_URL } = process.env;
const { SEQ_DISABLED, PROVER_DISABLED = '' } = process.env;

const allEnvVars: AztecNodeConfig = {
...getSequencerVars(),
Expand All @@ -39,7 +37,6 @@ export function getConfigEnvVars(): AztecNodeConfig {
...getWorldStateVars(),
...getProverEnvVars(),
disableSequencer: !!SEQ_DISABLED,
archiverUrl: ARCHIVER_URL,
disableProver: ['1', 'true'].includes(PROVER_DISABLED),
};

Expand Down
Loading

0 comments on commit 609a68f

Please sign in to comment.