Skip to content

Commit

Permalink
Recreate submitProof method and have sequencer push to it
Browse files Browse the repository at this point in the history
  • Loading branch information
spalladino committed Jul 8, 2024
1 parent 22c64f8 commit 77c0647
Show file tree
Hide file tree
Showing 7 changed files with 186 additions and 15 deletions.
43 changes: 43 additions & 0 deletions l1-contracts/src/core/Rollup.sol
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,49 @@ contract Rollup is IRollup {
emit L2BlockProcessed(header.globalVariables.blockNumber);
}

function submitProof(
bytes calldata _header,
bytes32 _archive,
bytes calldata _aggregationObject,
bytes calldata _proof
) external override(IRollup) {
HeaderLib.Header memory header = HeaderLib.decode(_header);

bytes32[] memory publicInputs =
new bytes32[](3 + Constants.HEADER_LENGTH + Constants.AGGREGATION_OBJECT_LENGTH);
// the archive tree root
publicInputs[0] = _archive;
// this is the _next_ available leaf in the archive tree
// normally this should be equal to the block number (since leaves are 0-indexed and blocks 1-indexed)
// but in yarn-project/merkle-tree/src/new_tree.ts we prefill the tree so that block N is in leaf N
publicInputs[1] = bytes32(header.globalVariables.blockNumber + 1);

publicInputs[2] = vkTreeRoot;

bytes32[] memory headerFields = HeaderLib.toFields(header);
for (uint256 i = 0; i < headerFields.length; i++) {
publicInputs[i + 3] = headerFields[i];
}

// the block proof is recursive, which means it comes with an aggregation object
// this snippet copies it into the public inputs needed for verification
// it also guards against empty _aggregationObject used with mocked proofs
uint256 aggregationLength = _aggregationObject.length / 32;
for (uint256 i = 0; i < Constants.AGGREGATION_OBJECT_LENGTH && i < aggregationLength; i++) {
bytes32 part;
assembly {
part := calldataload(add(_aggregationObject.offset, mul(i, 32)))
}
publicInputs[i + 3 + Constants.HEADER_LENGTH] = part;
}

if (!verifier.verify(_proof, publicInputs)) {
revert Errors.Rollup__InvalidProof();
}

emit L2ProofVerified(header.globalVariables.blockNumber);
}

function _computePublicInputHash(bytes calldata _header, bytes32 _archive)
internal
pure
Expand Down
8 changes: 8 additions & 0 deletions l1-contracts/src/core/interfaces/IRollup.sol
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,16 @@ pragma solidity >=0.8.18;

interface IRollup {
event L2BlockProcessed(uint256 indexed blockNumber);
event L2ProofVerified(uint256 indexed blockNumber);

function process(bytes calldata _header, bytes32 _archive) external;

function submitProof(
bytes calldata _header,
bytes32 _archive,
bytes calldata _aggregationObject,
bytes calldata _proof
) external;

function setVerifier(address _verifier) external;
}
22 changes: 17 additions & 5 deletions yarn-project/circuit-types/src/stats/stats.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,8 @@ export type L2BlockStats = {
unencryptedLogSize?: number;
};

/** Stats logged for each L1 rollup publish tx.*/
/** Stats logged for each L1 publish tx.*/
export type L1PublishStats = {
/** Name of the event for metrics purposes */
eventName: 'rollup-published-to-l1';
/** Effective gas price of the tx. */
gasPrice: bigint;
/** Effective gas used in the tx. */
Expand All @@ -40,7 +38,20 @@ export type L1PublishStats = {
calldataGas: number;
/** Size in bytes of the calldata. */
calldataSize: number;
} & L2BlockStats;
};

/** Stats logged for each L1 rollup publish tx.*/
export type L1PublishBlockStats = {
/** Name of the event for metrics purposes */
eventName: 'rollup-published-to-l1';
} & L1PublishStats &
L2BlockStats;

/** Stats logged for each L1 rollup publish tx.*/
export type L1PublishProofStats = {
/** Name of the event for metrics purposes */
eventName: 'proof-published-to-l1';
} & L1PublishStats;

/** Stats logged for synching node chain history. */
export type NodeSyncedChainHistoryStats = {
Expand Down Expand Up @@ -271,7 +282,8 @@ export type Stats =
| CircuitSimulationStats
| CircuitWitnessGenerationStats
| PublicDBAccessStats
| L1PublishStats
| L1PublishBlockStats
| L1PublishProofStats
| L2BlockBuiltStats
| L2BlockHandledStats
| NodeSyncedChainHistoryStats
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { BBCircuitVerifier } from '@aztec/bb-prover';
import { AGGREGATION_OBJECT_LENGTH, Fr, HEADER_LENGTH, Proof } from '@aztec/circuits.js';
import { type L1ContractAddresses } from '@aztec/ethereum';
import { type Logger } from '@aztec/foundation/log';
import { BufferReader } from '@aztec/foundation/serialize';
import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize';
import { AvailabilityOracleAbi, RollupAbi } from '@aztec/l1-artifacts';

import { type Anvil } from '@viem/anvil';
Expand Down Expand Up @@ -157,7 +157,7 @@ describe('proof_verification', () => {
});
});

describe.skip('Rollup', () => {
describe('Rollup', () => {
let availabilityContract: GetContractReturnType<typeof AvailabilityOracleAbi, typeof walletClient>;
let rollupContract: GetContractReturnType<typeof RollupAbi, typeof walletClient>;

Expand All @@ -183,9 +183,11 @@ describe('proof_verification', () => {
const args = [
`0x${block.header.toBuffer().toString('hex')}`,
`0x${block.archive.root.toBuffer().toString('hex')}`,
`0x${serializeToBuffer(aggregationObject).toString('hex')}`,
`0x${proof.withoutPublicInputs().toString('hex')}`,
] as const;

await expect(rollupContract.write.process(args)).resolves.toBeDefined();
await expect(rollupContract.write.submitProof(args)).resolves.toBeDefined();
});
});
});
84 changes: 78 additions & 6 deletions yarn-project/sequencer-client/src/publisher/l1-publisher.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
import { type L2Block } from '@aztec/circuit-types';
import { type L1PublishStats } from '@aztec/circuit-types/stats';
import { type EthAddress } from '@aztec/circuits.js';
import { type L1PublishBlockStats, type L1PublishProofStats } from '@aztec/circuit-types/stats';
import { type EthAddress, type Header, type Proof } from '@aztec/circuits.js';
import { type Fr } from '@aztec/foundation/fields';
import { createDebugLogger } from '@aztec/foundation/log';
import { serializeToBuffer } from '@aztec/foundation/serialize';
import { InterruptibleSleep } from '@aztec/foundation/sleep';

import pick from 'lodash.pick';
Expand Down Expand Up @@ -41,8 +43,10 @@ export type MinimalTransactionReceipt = {
* Pushes txs to the L1 chain and waits for their completion.
*/
export interface L1PublisherTxSender {
/** Returns the EOA used for sending txs to L1. */
getSenderAddress(): Promise<EthAddress>;

/** Returns the address elected for submitting a given block number or zero if anyone can submit. */
getSubmitterAddressForBlock(blockNumber: number): Promise<EthAddress>;

/**
Expand All @@ -59,6 +63,13 @@ export interface L1PublisherTxSender {
*/
sendProcessTx(encodedData: L1ProcessArgs): Promise<string | undefined>;

/**
* Sends a tx to the L1 rollup contract with a proof. Returns once the tx has been mined.
* @param encodedData - Serialized data for processing the new L2 block.
* @returns The hash of the mined tx.
*/
sendSubmitProofTx(submitProofArgs: L1SubmitProofArgs): Promise<string | undefined>;

/**
* Returns a tx receipt if the tx has been mined.
* @param txHash - Hash of the tx to look for.
Expand Down Expand Up @@ -86,9 +97,7 @@ export interface L1PublisherTxSender {
checkIfTxsAreAvailable(block: L2Block): Promise<boolean>;
}

/**
* Encoded block and proof ready to be pushed to the L1 contract.
*/
/** Arguments to the process method of the rollup contract */
export type L1ProcessArgs = {
/** The L2 block header. */
header: Buffer;
Expand All @@ -98,6 +107,18 @@ export type L1ProcessArgs = {
body: Buffer;
};

/** Arguments to the submitProof method of the rollup contract */
export type L1SubmitProofArgs = {
/** The L2 block header. */
header: Buffer;
/** A root of the archive tree after the L2 block is applied. */
archive: Buffer;
/** The proof for the block. */
proof: Buffer;
/** The aggregation object for the block's proof. */
aggregationObject: Buffer;
};

/**
* Publishes L2 blocks to L1. This implementation does *not* retry a transaction in
* the event of network congestion, but should work for local development.
Expand Down Expand Up @@ -192,7 +213,7 @@ export class L1Publisher implements L2BlockReceiver {
// Tx was mined successfully
if (receipt.status) {
const tx = await this.txSender.getTransactionStats(txHash);
const stats: L1PublishStats = {
const stats: L1PublishBlockStats = {
...pick(receipt, 'gasPrice', 'gasUsed', 'transactionHash'),
...pick(tx!, 'calldataGas', 'calldataSize'),
...block.getStats(),
Expand All @@ -216,6 +237,46 @@ export class L1Publisher implements L2BlockReceiver {
return false;
}

public async submitProof(header: Header, archiveRoot: Fr, aggregationObject: Fr[], proof: Proof): Promise<boolean> {
const txArgs: L1SubmitProofArgs = {
header: header.toBuffer(),
archive: archiveRoot.toBuffer(),
aggregationObject: serializeToBuffer(aggregationObject),
proof: proof.withoutPublicInputs(),
};

// Process block
while (!this.interrupted) {
const txHash = await this.sendSubmitProofTx(txArgs);
if (!txHash) {
break;
}

const receipt = await this.getTransactionReceipt(txHash);
if (!receipt) {
break;
}

// Tx was mined successfully
if (receipt.status) {
const tx = await this.txSender.getTransactionStats(txHash);
const stats: L1PublishProofStats = {
...pick(receipt, 'gasPrice', 'gasUsed', 'transactionHash'),
...pick(tx!, 'calldataGas', 'calldataSize'),
eventName: 'proof-published-to-l1',
};
this.log.info(`Published L2 block to L1 rollup contract`, stats);
return true;
}

this.log.error(`Rollup.submitProof tx status failed: ${receipt.transactionHash}`);
await this.sleepOrInterrupted();
}

this.log.verbose('L2 block data syncing interrupted while processing blocks.');
return false;
}

/**
* Calling `interrupt` will cause any in progress call to `publishRollup` to return `false` asap.
* Be warned, the call may return false even if the tx subsequently gets successfully mined.
Expand Down Expand Up @@ -247,6 +308,17 @@ export class L1Publisher implements L2BlockReceiver {
return areSame;
}

private async sendSubmitProofTx(submitProofArgs: L1SubmitProofArgs): Promise<string | undefined> {
try {
const size = Object.values(submitProofArgs).reduce((acc, arg) => acc + arg.length, 0);
this.log.info(`SubmitProof size=${size} bytes`);
return await this.txSender.sendSubmitProofTx(submitProofArgs);
} catch (err) {
this.log.error(`Rollup submit proof failed`, err);
return undefined;
}
}

private async sendPublishTx(encodedBody: Buffer): Promise<string | undefined> {
while (!this.interrupted) {
try {
Expand Down
26 changes: 26 additions & 0 deletions yarn-project/sequencer-client/src/publisher/viem-tx-sender.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import * as chains from 'viem/chains';
import { type TxSenderConfig } from './config.js';
import {
type L1PublisherTxSender,
type L1SubmitProofArgs,
type MinimalTransactionReceipt,
type L1ProcessArgs as ProcessTxArgs,
type TransactionStats,
Expand Down Expand Up @@ -169,6 +170,31 @@ export class ViemTxSender implements L1PublisherTxSender {
return hash;
}

/**
* Sends a tx to the L1 rollup contract with a proof. Returns once the tx has been mined.
* @param encodedData - Serialized data for the proof.
* @returns The hash of the mined tx.
*/
async sendSubmitProofTx(submitProofArgs: L1SubmitProofArgs): Promise<string | undefined> {
const { header, archive, aggregationObject, proof } = submitProofArgs;
const args = [
`0x${header.toString('hex')}`,
`0x${archive.toString('hex')}`,
`0x${aggregationObject.toString('hex')}`,
`0x${proof.toString('hex')}`,
] as const;

const gas = await this.rollupContract.estimateGas.submitProof(args, {
account: this.account,
});
const hash = await this.rollupContract.write.submitProof(args, {
gas,
account: this.account,
});

return hash;
}

/**
* Gets the chain object for the given chain id.
* @param chainId - Chain id of the target EVM chain.
Expand Down
10 changes: 9 additions & 1 deletion yarn-project/sequencer-client/src/sequencer/sequencer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ export class Sequencer {
await assertBlockHeight();

// Block is proven, now finalise and publish!
const { block } = await this.prover.finaliseBlock();
const { block, aggregationObject, proof } = await this.prover.finaliseBlock();

await assertBlockHeight();

Expand All @@ -308,6 +308,14 @@ export class Sequencer {

await this.publishL2Block(block);
this.log.info(`Submitted rollup block ${block.number} with ${processedTxs.length} transactions`);

// Submit the proof if we have configured this sequencer to run with a prover.
// This is temporary while we submit one proof per block, but will have to change once we
// move onto proving batches of multiple blocks at a time.
if (aggregationObject && proof) {
await this.publisher.submitProof(block.header, block.archive.root, aggregationObject, proof);
this.log.info(`Submitted proof for block ${block.number}`);
}
}

/**
Expand Down

0 comments on commit 77c0647

Please sign in to comment.