From 910610251e04bd9e50a4cc6da8a3230c20e49be6 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Tue, 3 Dec 2024 09:36:22 -0300 Subject: [PATCH 01/24] fix(prover): Handle starting blocks out of order in prover (#10350) Fixes issue introduced in #10263 where block states would be stored out of order in the internal epoch proving state, which caused them to be picked up out of order when computing block merges. --------- Co-authored-by: Alex Gherghisan --- .../src/interfaces/epoch-prover.ts | 3 +- .../src/orchestrator/epoch-proving-state.ts | 12 +++--- .../src/orchestrator/orchestrator.ts | 11 ++++-- .../orchestrator/orchestrator_errors.test.ts | 20 +++++----- .../orchestrator_failures.test.ts | 2 +- .../orchestrator_lifecycle.test.ts | 2 +- .../orchestrator_mixed_blocks.test.ts | 4 +- ...rchestrator_multi_public_functions.test.ts | 2 +- .../orchestrator_multiple_blocks.test.ts | 39 ++++++++++++++++--- .../orchestrator_public_functions.test.ts | 2 +- .../orchestrator_single_blocks.test.ts | 6 +-- .../orchestrator_workflow.test.ts | 4 +- .../src/proving_broker/proving_agent.ts | 2 +- .../src/test/bb_prover_full_rollup.test.ts | 4 +- .../prover-node/src/job/epoch-proving-job.ts | 5 ++- 15 files changed, 77 insertions(+), 41 deletions(-) diff --git a/yarn-project/circuit-types/src/interfaces/epoch-prover.ts b/yarn-project/circuit-types/src/interfaces/epoch-prover.ts index b4bef3658a7..16641c23e67 100644 --- a/yarn-project/circuit-types/src/interfaces/epoch-prover.ts +++ b/yarn-project/circuit-types/src/interfaces/epoch-prover.ts @@ -8,9 +8,10 @@ export interface EpochProver extends Omit { /** * Starts a new epoch. Must be the first method to be called. * @param epochNumber - The epoch number. + * @param firstBlockNumber - The block number of the first block in the epoch. * @param totalNumBlocks - The total number of blocks expected in the epoch (must be at least one). **/ - startNewEpoch(epochNumber: number, totalNumBlocks: number): void; + startNewEpoch(epochNumber: number, firstBlockNumber: number, totalNumBlocks: number): void; /** Pads the block with empty txs if it hasn't reached the declared number of txs. */ setBlockCompleted(blockNumber: number, expectedBlockHeader?: Header): Promise; diff --git a/yarn-project/prover-client/src/orchestrator/epoch-proving-state.ts b/yarn-project/prover-client/src/orchestrator/epoch-proving-state.ts index 522d5ba9d70..97ae9e361e9 100644 --- a/yarn-project/prover-client/src/orchestrator/epoch-proving-state.ts +++ b/yarn-project/prover-client/src/orchestrator/epoch-proving-state.ts @@ -50,10 +50,11 @@ export class EpochProvingState { private mergeRollupInputs: BlockMergeRollupInputData[] = []; public rootRollupPublicInputs: RootRollupPublicInputs | undefined; public finalProof: Proof | undefined; - public blocks: BlockProvingState[] = []; + public blocks: (BlockProvingState | undefined)[] = []; constructor( public readonly epochNumber: number, + public readonly firstBlockNumber: number, public readonly totalNumBlocks: number, private completionCallback: (result: ProvingResult) => void, private rejectionCallback: (reason: string) => void, @@ -106,8 +107,9 @@ export class EpochProvingState { archiveTreeRootSiblingPath: Tuple, previousBlockHash: Fr, ): BlockProvingState { + const index = globalVariables.blockNumber.toNumber() - this.firstBlockNumber; const block = new BlockProvingState( - this.blocks.length, + index, numTxs, globalVariables, padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP), @@ -119,8 +121,8 @@ export class EpochProvingState { previousBlockHash, this, ); - this.blocks.push(block); - if (this.blocks.length === this.totalNumBlocks) { + this.blocks[index] = block; + if (this.blocks.filter(b => !!b).length === this.totalNumBlocks) { this.provingStateLifecycle = PROVING_STATE_LIFECYCLE.PROVING_STATE_FULL; } return block; @@ -176,7 +178,7 @@ export class EpochProvingState { // Returns a specific transaction proving state public getBlockProvingStateByBlockNumber(blockNumber: number) { - return this.blocks.find(block => block.blockNumber === blockNumber); + return this.blocks.find(block => block?.blockNumber === blockNumber); } // Returns a set of merge rollup inputs diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index 156440b3fd2..713e6350c6b 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -126,14 +126,14 @@ export class ProvingOrchestrator implements EpochProver { this.paddingTxProof = undefined; } - public startNewEpoch(epochNumber: number, totalNumBlocks: number) { + public startNewEpoch(epochNumber: number, firstBlockNumber: number, totalNumBlocks: number) { const { promise: _promise, resolve, reject } = promiseWithResolvers(); const promise = _promise.catch((reason): ProvingResult => ({ status: 'failure', reason })); if (totalNumBlocks <= 0 || !Number.isInteger(totalNumBlocks)) { throw new Error(`Invalid number of blocks for epoch (got ${totalNumBlocks})`); } logger.info(`Starting epoch ${epochNumber} with ${totalNumBlocks} blocks`); - this.provingState = new EpochProvingState(epochNumber, totalNumBlocks, resolve, reject); + this.provingState = new EpochProvingState(epochNumber, firstBlockNumber, totalNumBlocks, resolve, reject); this.provingPromise = promise; } @@ -336,7 +336,7 @@ export class ProvingOrchestrator implements EpochProver { /** Returns the block as built for a given index. */ public getBlock(index: number): L2Block { - const block = this.provingState?.blocks[index].block; + const block = this.provingState?.blocks[index]?.block; if (!block) { throw new Error(`Block at index ${index} not available`); } @@ -354,7 +354,10 @@ export class ProvingOrchestrator implements EpochProver { }) private padEpoch(): Promise { const provingState = this.provingState!; - const lastBlock = maxBy(provingState.blocks, b => b.blockNumber)?.block; + const lastBlock = maxBy( + provingState.blocks.filter(b => !!b), + b => b!.blockNumber, + )?.block; if (!lastBlock) { return Promise.reject(new Error(`Epoch needs at least one completed block in order to be padded`)); } diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_errors.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_errors.test.ts index f1a9374e949..e17135ccfb7 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_errors.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_errors.test.ts @@ -26,7 +26,7 @@ describe('prover/orchestrator/errors', () => { it('throws if adding too many transactions', async () => { const txs = times(4, i => context.makeProcessedTx(i + 1)); - orchestrator.startNewEpoch(1, 1); + orchestrator.startNewEpoch(1, 1, 1); await orchestrator.startNewBlock(txs.length, context.globalVariables, []); for (const tx of txs) { @@ -43,7 +43,7 @@ describe('prover/orchestrator/errors', () => { }); it('throws if adding too many blocks', async () => { - orchestrator.startNewEpoch(1, 1); + orchestrator.startNewEpoch(1, 1, 1); await orchestrator.startNewBlock(2, context.globalVariables, []); await orchestrator.setBlockCompleted(context.blockNumber); @@ -59,21 +59,21 @@ describe('prover/orchestrator/errors', () => { }); it('throws if adding a transaction before starting block', async () => { - orchestrator.startNewEpoch(1, 1); + orchestrator.startNewEpoch(1, 1, 1); await expect(async () => await orchestrator.addNewTx(context.makeProcessedTx())).rejects.toThrow( /Block proving state for 1 not found/, ); }); it('throws if completing a block before start', async () => { - orchestrator.startNewEpoch(1, 1); + orchestrator.startNewEpoch(1, 1, 1); await expect(async () => await orchestrator.setBlockCompleted(context.blockNumber)).rejects.toThrow( /Block proving state for 1 not found/, ); }); it('throws if setting an incomplete block as completed', async () => { - orchestrator.startNewEpoch(1, 1); + orchestrator.startNewEpoch(1, 1, 1); await orchestrator.startNewBlock(3, context.globalVariables, []); await expect(async () => await orchestrator.setBlockCompleted(context.blockNumber)).rejects.toThrow( `Block not ready for completion: expecting ${3} more transactions.`, @@ -81,7 +81,7 @@ describe('prover/orchestrator/errors', () => { }); it('throws if adding to a cancelled block', async () => { - orchestrator.startNewEpoch(1, 1); + orchestrator.startNewEpoch(1, 1, 1); await orchestrator.startNewBlock(2, context.globalVariables, []); orchestrator.cancel(); @@ -93,7 +93,7 @@ describe('prover/orchestrator/errors', () => { it.each([[-4], [0], [1], [8.1]] as const)( 'fails to start a block with %i transactions', async (blockSize: number) => { - orchestrator.startNewEpoch(1, 1); + orchestrator.startNewEpoch(1, 1, 1); await expect( async () => await orchestrator.startNewBlock(blockSize, context.globalVariables, []), ).rejects.toThrow(`Invalid number of txs for block (got ${blockSize})`); @@ -101,15 +101,15 @@ describe('prover/orchestrator/errors', () => { ); it.each([[-4], [0], [8.1]] as const)('fails to start an epoch with %i blocks', (epochSize: number) => { - orchestrator.startNewEpoch(1, 1); - expect(() => orchestrator.startNewEpoch(1, epochSize)).toThrow( + orchestrator.startNewEpoch(1, 1, 1); + expect(() => orchestrator.startNewEpoch(1, 1, epochSize)).toThrow( `Invalid number of blocks for epoch (got ${epochSize})`, ); }); it('rejects if too many l1 to l2 messages are provided', async () => { const l1ToL2Messages = new Array(100).fill(new Fr(0n)); - orchestrator.startNewEpoch(1, 1); + orchestrator.startNewEpoch(1, 1, 1); await expect( async () => await orchestrator.startNewBlock(2, context.globalVariables, l1ToL2Messages), ).rejects.toThrow('Too many L1 to L2 messages'); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts index 709f044575f..ea610a11f56 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts @@ -37,7 +37,7 @@ describe('prover/orchestrator/failures', () => { // We generate them and add them as part of the pending chain const blocks = await timesAsync(3, i => context.makePendingBlock(3, 1, i + 1, j => ({ privateOnly: j === 1 }))); - orchestrator.startNewEpoch(1, 3); + orchestrator.startNewEpoch(1, 1, 3); for (const { block, txs, msgs } of blocks) { // these operations could fail if the target circuit fails before adding all blocks or txs diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts index 5325d22cd01..d24a62d50e3 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_lifecycle.test.ts @@ -37,7 +37,7 @@ describe('prover/orchestrator/lifecycle', () => { return deferred.promise; }); - orchestrator.startNewEpoch(1, 1); + orchestrator.startNewEpoch(1, 1, 1); await orchestrator.startNewBlock(2, context.globalVariables, []); await sleep(1); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts index 986b1f7f0c3..8a8924b92af 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_mixed_blocks.test.ts @@ -25,7 +25,7 @@ describe('prover/orchestrator/mixed-blocks', () => { const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); - context.orchestrator.startNewEpoch(1, 1); + context.orchestrator.startNewEpoch(1, 1, 1); await context.orchestrator.startNewBlock(3, context.globalVariables, l1ToL2Messages); for (const tx of txs) { await context.orchestrator.addNewTx(tx); @@ -41,7 +41,7 @@ describe('prover/orchestrator/mixed-blocks', () => { const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); - context.orchestrator.startNewEpoch(1, 1); + context.orchestrator.startNewEpoch(1, 1, 1); await context.orchestrator.startNewBlock(txs.length, context.globalVariables, l1ToL2Messages); for (const tx of txs) { diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_multi_public_functions.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_multi_public_functions.test.ts index a84f751ec63..26997fca8d5 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_multi_public_functions.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_multi_public_functions.test.ts @@ -40,7 +40,7 @@ describe('prover/orchestrator/public-functions', () => { tx.data.constants.protocolContractTreeRoot = protocolContractTreeRoot; } - context.orchestrator.startNewEpoch(1, 1); + context.orchestrator.startNewEpoch(1, 1, 1); await context.orchestrator.startNewBlock(numTransactions, context.globalVariables, []); const [processed, failed] = await context.processPublicFunctions( diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_multiple_blocks.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_multiple_blocks.test.ts index baa3ad189ab..c6fc35c1d00 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_multiple_blocks.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_multiple_blocks.test.ts @@ -20,11 +20,11 @@ describe('prover/orchestrator/multi-block', () => { describe('multiple blocks', () => { it.each([1, 4, 5])('builds an epoch with %s blocks in sequence', async (numBlocks: number) => { logger.info(`Seeding world state with ${numBlocks} blocks`); - const txCount = 1; + const txCount = 2; const blocks = await timesAsync(numBlocks, i => context.makePendingBlock(txCount, 0, i + 1)); logger.info(`Starting new epoch with ${numBlocks}`); - context.orchestrator.startNewEpoch(1, numBlocks); + context.orchestrator.startNewEpoch(1, 1, numBlocks); for (const { block, txs } of blocks) { await context.orchestrator.startNewBlock(Math.max(txCount, 2), block.header.globalVariables, []); for (const tx of txs) { @@ -41,15 +41,17 @@ describe('prover/orchestrator/multi-block', () => { it.each([1, 4, 5])('builds an epoch with %s blocks in parallel', async (numBlocks: number) => { logger.info(`Seeding world state with ${numBlocks} blocks`); - const txCount = 1; + const txCount = 2; const blocks = await timesAsync(numBlocks, i => context.makePendingBlock(txCount, 0, i + 1)); logger.info(`Starting new epoch with ${numBlocks}`); - context.orchestrator.startNewEpoch(1, numBlocks); + context.orchestrator.startNewEpoch(1, 1, numBlocks); await Promise.all( blocks.map(async ({ block, txs }) => { await context.orchestrator.startNewBlock(Math.max(txCount, 2), block.header.globalVariables, []); - await Promise.all(txs.map(tx => context.orchestrator.addNewTx(tx))); + for (const tx of txs) { + await context.orchestrator.addNewTx(tx); + } await context.orchestrator.setBlockCompleted(block.number); }), ); @@ -59,5 +61,32 @@ describe('prover/orchestrator/multi-block', () => { expect(epoch.publicInputs.endBlockNumber.toNumber()).toEqual(numBlocks); expect(epoch.proof).toBeDefined(); }); + + it('builds two consecutive epochs', async () => { + const numEpochs = 2; + const numBlocks = 4; + const txCount = 2; + logger.info(`Seeding world state with ${numBlocks * numEpochs} blocks`); + const blocks = await timesAsync(numBlocks * numEpochs, i => context.makePendingBlock(txCount, 0, i + 1)); + + for (let epochIndex = 0; epochIndex < numEpochs; epochIndex++) { + logger.info(`Starting epoch ${epochIndex + 1} with ${numBlocks} blocks`); + context.orchestrator.startNewEpoch(epochIndex + 1, epochIndex * numBlocks + 1, numBlocks); + await Promise.all( + blocks.slice(epochIndex * numBlocks, (epochIndex + 1) * numBlocks).map(async ({ block, txs }) => { + await context.orchestrator.startNewBlock(Math.max(txCount, 2), block.header.globalVariables, []); + for (const tx of txs) { + await context.orchestrator.addNewTx(tx); + } + await context.orchestrator.setBlockCompleted(block.number); + }), + ); + + logger.info('Finalising epoch'); + const epoch = await context.orchestrator.finaliseEpoch(); + expect(epoch.publicInputs.endBlockNumber.toNumber()).toEqual(numBlocks + epochIndex * numBlocks); + expect(epoch.proof).toBeDefined(); + } + }); }); }); diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_public_functions.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_public_functions.test.ts index 7e0221fc716..393329094f1 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_public_functions.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_public_functions.test.ts @@ -42,7 +42,7 @@ describe('prover/orchestrator/public-functions', () => { const [processed, _] = await context.processPublicFunctions([tx], 1, undefined); // This will need to be a 2 tx block - context.orchestrator.startNewEpoch(1, 1); + context.orchestrator.startNewEpoch(1, 1, 1); await context.orchestrator.startNewBlock(2, context.globalVariables, []); for (const processedTx of processed) { diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_single_blocks.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_single_blocks.test.ts index e790fa7d378..293ff277759 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_single_blocks.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_single_blocks.test.ts @@ -22,7 +22,7 @@ describe('prover/orchestrator/blocks', () => { describe('blocks', () => { it('builds an empty L2 block', async () => { - context.orchestrator.startNewEpoch(1, 1); + context.orchestrator.startNewEpoch(1, 1, 1); await context.orchestrator.startNewBlock(2, context.globalVariables, []); const block = await context.orchestrator.setBlockCompleted(context.blockNumber); @@ -34,7 +34,7 @@ describe('prover/orchestrator/blocks', () => { const txs = [context.makeProcessedTx(1)]; // This will need to be a 2 tx block - context.orchestrator.startNewEpoch(1, 1); + context.orchestrator.startNewEpoch(1, 1, 1); await context.orchestrator.startNewBlock(2, context.globalVariables, []); for (const tx of txs) { @@ -51,7 +51,7 @@ describe('prover/orchestrator/blocks', () => { const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 1 + 0x400).map(fr); - context.orchestrator.startNewEpoch(1, 1); + context.orchestrator.startNewEpoch(1, 1, 1); await context.orchestrator.startNewBlock(txs.length, context.globalVariables, l1ToL2Messages); for (const tx of txs) { diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_workflow.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_workflow.test.ts index 7525c9e16ed..ea1dd3b49f4 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_workflow.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_workflow.test.ts @@ -75,7 +75,7 @@ describe('prover/orchestrator', () => { } }); - orchestrator.startNewEpoch(1, 1); + orchestrator.startNewEpoch(1, 1, 1); await orchestrator.startNewBlock(2, globalVariables, [message]); await sleep(10); @@ -104,7 +104,7 @@ describe('prover/orchestrator', () => { }); it('waits for block to be completed before enqueueing block root proof', async () => { - orchestrator.startNewEpoch(1, 1); + orchestrator.startNewEpoch(1, 1, 1); await orchestrator.startNewBlock(2, globalVariables, []); await orchestrator.addNewTx(context.makeProcessedTx(1)); await orchestrator.addNewTx(context.makeProcessedTx(2)); diff --git a/yarn-project/prover-client/src/proving_broker/proving_agent.ts b/yarn-project/prover-client/src/proving_broker/proving_agent.ts index b7ee2eb69f8..6d17c8176b5 100644 --- a/yarn-project/prover-client/src/proving_broker/proving_agent.ts +++ b/yarn-project/prover-client/src/proving_broker/proving_agent.ts @@ -128,7 +128,7 @@ export class ProvingAgent { ) => { if (err) { const retry = err.name === ProvingError.NAME ? (err as ProvingError).retry : false; - this.log.info(`Job id=${jobId} type=${ProvingRequestType[type]} failed err=${err.message} retry=${retry}`); + this.log.error(`Job id=${jobId} type=${ProvingRequestType[type]} failed err=${err.message} retry=${retry}`, err); return this.broker.reportProvingJobError(jobId, err.message, retry); } else if (result) { const outputUri = await this.proofStore.saveProofOutput(jobId, type, result); diff --git a/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts b/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts index 89827c6e02e..43684e6f1a9 100644 --- a/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts +++ b/yarn-project/prover-client/src/test/bb_prover_full_rollup.test.ts @@ -40,7 +40,7 @@ describe('prover/bb_prover/full-rollup', () => { log.info(`Proving epoch with ${blockCount}/${totalBlocks} blocks with ${nonEmptyTxs}/${totalTxs} non-empty txs`); const initialHeader = context.getHeader(0); - context.orchestrator.startNewEpoch(1, totalBlocks); + context.orchestrator.startNewEpoch(1, 1, totalBlocks); for (let blockNum = 1; blockNum <= blockCount; blockNum++) { const globals = makeGlobals(blockNum); @@ -102,7 +102,7 @@ describe('prover/bb_prover/full-rollup', () => { Fr.random, ); - context.orchestrator.startNewEpoch(1, 1); + context.orchestrator.startNewEpoch(1, 1, 1); await context.orchestrator.startNewBlock(numTransactions, context.globalVariables, l1ToL2Messages); diff --git a/yarn-project/prover-node/src/job/epoch-proving-job.ts b/yarn-project/prover-node/src/job/epoch-proving-job.ts index 56deb373a96..02952266b2c 100644 --- a/yarn-project/prover-node/src/job/epoch-proving-job.ts +++ b/yarn-project/prover-node/src/job/epoch-proving-job.ts @@ -65,7 +65,8 @@ export class EpochProvingJob { public async run() { const epochNumber = Number(this.epochNumber); const epochSize = this.blocks.length; - this.log.info(`Starting epoch proving job`, { epochSize, epochNumber, uuid: this.uuid }); + const firstBlockNumber = this.blocks[0].number; + this.log.info(`Starting epoch proving job`, { firstBlockNumber, epochSize, epochNumber, uuid: this.uuid }); this.state = 'processing'; const timer = new Timer(); @@ -73,7 +74,7 @@ export class EpochProvingJob { this.runPromise = promise; try { - this.prover.startNewEpoch(epochNumber, epochSize); + this.prover.startNewEpoch(epochNumber, firstBlockNumber, epochSize); await asyncPool(this.config.parallelBlockLimit, this.blocks, async block => { const globalVariables = block.header.globalVariables; From cc54a1e1ef75b29d160a02d03cf9b29e28d3e4ca Mon Sep 17 00:00:00 2001 From: Cody Gunton Date: Tue, 3 Dec 2024 09:18:40 -0500 Subject: [PATCH 02/24] feat: Client IVC API (#10217) - Establish API in purely virtual class - This is just a first pass. I will continue to work on this before showing dev rel and others to get buy-in. - Implement some API functions for ClientIVC: prove, verify, prove_and_verify - Support for constructing CIVC proof for input a single circuit - This is interpreted as a "compiletime stack" - Produces ECCVM and Translator proofs from dummy/empty data; future optimization could avoid. - Add `one_circuit` to CIVC to encode whether the MH part of the CIVC proof should be a hiding circuit (which takes a folding proof) or a proof for the single circuit. - Run almost all ACIR tests against ClientIVC - Previously only ran MegaHonk tests, which are not totally meaningful. - Four are skipped because they fail. These failures are expected to be superficial (see https://github.com/AztecProtocol/barretenberg/issues/1164 and the references to it in the PR's new code). - fold_and_verify and mega honk flows go away in bb, but remain until bb.js alignment. - Delete large log file that should not be track (accounts for big negative diff). --- .github/workflows/ci.yml | 10 +- barretenberg/Earthfile | 16 +- .../flows/fold_and_verify_program.sh | 2 + .../flows/prove_and_verify_client_ivc.sh | 9 + .../flows/prove_and_verify_mega_honk.sh | 5 + .../prove_and_verify_mega_honk_program.sh | 5 + .../flows/prove_then_verify_client_ivc.sh | 8 +- .../flows/prove_then_verify_tube.sh | 2 +- barretenberg/acir_tests/flows/prove_tube.sh | 2 +- barretenberg/acir_tests/run_acir_tests.sh | 16 + barretenberg/cpp/docs/src/sumcheck-outline.md | 4 +- .../barretenberg/bb/acir_format_getters.hpp | 34 + barretenberg/cpp/src/barretenberg/bb/api.hpp | 39 + .../src/barretenberg/bb/api_client_ivc.hpp | 266 ++ .../cpp/src/barretenberg/bb/init_srs.hpp | 37 + barretenberg/cpp/src/barretenberg/bb/main.cpp | 443 +-- .../barretenberg/client_ivc/client_ivc.cpp | 36 +- .../barretenberg/client_ivc/client_ivc.hpp | 8 +- .../client_ivc/client_ivc.test.cpp | 4 +- .../client_ivc_auto_verify.test.cpp | 4 +- .../client_ivc_integration.test.cpp | 2 +- .../client_ivc/test_bench_shared.hpp | 2 +- .../dsl/acir_format/acir_format.hpp | 1 + .../stdlib_circuit_builders/mega_flavor.hpp | 2 +- .../sumcheck/sumcheck_round.test.cpp | 2 +- .../ultra_honk/decider_prover.cpp | 2 +- .../barretenberg/ultra_honk/oink_prover.hpp | 3 + .../barretenberg/ultra_honk/ultra_prover.hpp | 1 + build_manifest.yml | 2 +- full_log.ansi | 2514 ----------------- yarn-project/bb-prover/src/bb/execute.ts | 20 +- 31 files changed, 554 insertions(+), 2947 deletions(-) create mode 100755 barretenberg/acir_tests/flows/prove_and_verify_client_ivc.sh create mode 100644 barretenberg/cpp/src/barretenberg/bb/acir_format_getters.hpp create mode 100644 barretenberg/cpp/src/barretenberg/bb/api.hpp create mode 100644 barretenberg/cpp/src/barretenberg/bb/api_client_ivc.hpp create mode 100644 barretenberg/cpp/src/barretenberg/bb/init_srs.hpp delete mode 100644 full_log.ansi diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5f4095c671f..27f6df05ded 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -444,7 +444,7 @@ jobs: timeout-minutes: 40 run: earthly-ci --no-output ./+barretenberg-acir-tests-bb-ultra-honk - bb-acir-tests-bb-mega-honk: + bb-acir-tests-bb-client-ivc: needs: [noir-build-acir-tests, build, configure] runs-on: ${{ needs.configure.outputs.username }}-x86 if: needs.configure.outputs.barretenberg == 'true' || needs.configure.outputs.noir == 'true' @@ -453,11 +453,11 @@ jobs: with: { ref: "${{ env.GIT_COMMIT }}" } - uses: ./.github/ci-setup-action with: - concurrency_key: barretenberg-acir-tests-bb-mega-honk-x86 - - name: "BB Native Acir Tests (Megahonk)" + concurrency_key: barretenberg-acir-tests-bb-client-ivc-x86 + - name: "BB Native Acir Tests (ClientIVC)" working-directory: ./barretenberg/ timeout-minutes: 40 - run: earthly-ci --no-output ./+barretenberg-acir-tests-bb-mega-honk + run: earthly-ci --no-output ./+barretenberg-acir-tests-bb-client-ivc bb-acir-tests-sol: needs: [noir-build-acir-tests, build, configure] @@ -969,7 +969,7 @@ jobs: - bb-acir-tests-bb - bb-acir-tests-bb-ultra-plonk - bb-acir-tests-bb-ultra-honk - - bb-acir-tests-bb-mega-honk + - bb-acir-tests-bb-client-ivc - bb-acir-tests-sol - bb-acir-tests-sol-honk - bb-acir-tests-bb-js diff --git a/barretenberg/Earthfile b/barretenberg/Earthfile index 6030d85a771..841563b4834 100644 --- a/barretenberg/Earthfile +++ b/barretenberg/Earthfile @@ -27,9 +27,9 @@ barretenberg-acir-tests-bb: ENV VERBOSE=1 # Fold and verify an ACIR program stack using ClientIvc - RUN FLOW=fold_and_verify_program ./run_acir_tests.sh fold_basic + RUN INPUT_TYPE=compiletime_stack FLOW=prove_and_verify_client_ivc ./run_acir_tests.sh fold_basic # Fold and verify an ACIR program stack using ClientIvc, then natively verify the ClientIVC proof. - RUN FLOW=prove_then_verify_client_ivc ./run_acir_tests.sh fold_basic + RUN INPUT_TYPE=compiletime_stack FLOW=prove_then_verify_client_ivc ./run_acir_tests.sh fold_basic # Fold and verify an ACIR program stack using ClientIvc, recursively verify as part of the Tube circuit and produce and verify a Honk proof RUN FLOW=prove_then_verify_tube ./run_acir_tests.sh fold_basic # Run 1_mul through native bb build, all_cmds flow, to test all cli args. @@ -79,7 +79,7 @@ barretenberg-acir-tests-bb-ultra-honk: # Construct and verify a UltraHonk proof for a single program that recursively verifies a Honk proof RUN FLOW=prove_and_verify_ultra_honk ./run_acir_tests.sh verify_honk_proof -barretenberg-acir-tests-bb-mega-honk: +barretenberg-acir-tests-bb-client-ivc: FROM ../build-images/+from-registry COPY ./cpp/+preset-clang-assert/bin/bb /usr/src/barretenberg/cpp/build/bin/bb @@ -92,12 +92,10 @@ barretenberg-acir-tests-bb-mega-honk: ENV TEST_SRC /usr/src/acir_artifacts ENV VERBOSE=1 - # Construct and separately verify a MegaHonk proof for all acir programs - RUN FLOW=prove_then_verify_mega_honk ./run_acir_tests.sh - # Construct and verify a MegaHonk proof for a single arbitrary program - RUN FLOW=prove_and_verify_mega_honk ./run_acir_tests.sh 6_array - # Construct and verify a MegaHonk proof for all ACIR programs using the new witness stack workflow - RUN FLOW=prove_and_verify_mega_honk_program ./run_acir_tests.sh + # Construct and verify a ClientIVC proof for a single arbitrary program + RUN FLOW=prove_and_verify_client_ivc ./run_acir_tests.sh 6_array + # Construct and separately verify a ClientIVC proof for all acir programs + RUN FLOW=prove_then_verify_client_ivc CLIENT_IVC_SKIPS=true ./run_acir_tests.sh barretenberg-acir-tests-sol: FROM ../build-images/+from-registry diff --git a/barretenberg/acir_tests/flows/fold_and_verify_program.sh b/barretenberg/acir_tests/flows/fold_and_verify_program.sh index 870873befcd..1157e802bc0 100755 --- a/barretenberg/acir_tests/flows/fold_and_verify_program.sh +++ b/barretenberg/acir_tests/flows/fold_and_verify_program.sh @@ -1,6 +1,8 @@ #!/bin/sh set -eu +# this flow is deprecated. currently it is bb.js only. for bb is is replaced by: +# prove_and_verify --scheme client_ivc --input-type compiletime_stack VFLAG=${VERBOSE:+-v} $BIN fold_and_verify_program $VFLAG -c $CRS_PATH -b ./target/program.json diff --git a/barretenberg/acir_tests/flows/prove_and_verify_client_ivc.sh b/barretenberg/acir_tests/flows/prove_and_verify_client_ivc.sh new file mode 100755 index 00000000000..8931cff33b8 --- /dev/null +++ b/barretenberg/acir_tests/flows/prove_and_verify_client_ivc.sh @@ -0,0 +1,9 @@ +#!/bin/sh +set -eu + +VFLAG=${VERBOSE:+-v} +INFLAG=${INPUT_TYPE=compiletime_stack} + +FLAGS="$CRS_PATH -b ./target/program.json $VFLAG --scheme client_ivc -c --input_type $INFLAG" + +$BIN prove_and_verify $FLAGS diff --git a/barretenberg/acir_tests/flows/prove_and_verify_mega_honk.sh b/barretenberg/acir_tests/flows/prove_and_verify_mega_honk.sh index b22be05cc92..c78845a9cff 100755 --- a/barretenberg/acir_tests/flows/prove_and_verify_mega_honk.sh +++ b/barretenberg/acir_tests/flows/prove_and_verify_mega_honk.sh @@ -3,4 +3,9 @@ set -eu VFLAG=${VERBOSE:+-v} +# this flow is deprecated. currently it is bb.js only. for bb is is replaced by: +# prove_and_verify --scheme client_ivc --input-type compiletime_stack +# NB: In general, it is not meaningful to produce a MegaHonk proof an its own since +# the MegaHonk proof does not attest to the correctness of every possible kind +# of gate that could appear in a Mega execution trace. $BIN prove_and_verify_mega_honk $VFLAG -c $CRS_PATH -b ./target/program.json diff --git a/barretenberg/acir_tests/flows/prove_and_verify_mega_honk_program.sh b/barretenberg/acir_tests/flows/prove_and_verify_mega_honk_program.sh index 21e15fbf7c5..666607e86ee 100755 --- a/barretenberg/acir_tests/flows/prove_and_verify_mega_honk_program.sh +++ b/barretenberg/acir_tests/flows/prove_and_verify_mega_honk_program.sh @@ -3,4 +3,9 @@ set -eu VFLAG=${VERBOSE:+-v} +# this flow is deprecated. currently it is bb.js only. for bb is is replaced by: +# prove_and_verify --scheme client_ivc --input-type compiletime_stack +# NB: In general, it is not meaningful to produce a MegaHonk proof an its own since +# the MegaHonk proof does not attest to the correctness of every possible kind +# of gate that could appear in a Mega execution trace. $BIN prove_and_verify_mega_honk_program $VFLAG -c $CRS_PATH -b ./target/program.json diff --git a/barretenberg/acir_tests/flows/prove_then_verify_client_ivc.sh b/barretenberg/acir_tests/flows/prove_then_verify_client_ivc.sh index eda013f0494..846e48339c8 100755 --- a/barretenberg/acir_tests/flows/prove_then_verify_client_ivc.sh +++ b/barretenberg/acir_tests/flows/prove_then_verify_client_ivc.sh @@ -3,7 +3,9 @@ set -eu VFLAG=${VERBOSE:+-v} BFLAG="-b ./target/program.json" -FLAGS="-c $CRS_PATH $VFLAG" +INFLAG=${INPUT_TYPE=compiletime_stack} -$BIN client_ivc_prove_output_all $FLAGS $BFLAG -$BIN verify_client_ivc $FLAGS +FLAGS="--scheme client_ivc -c $CRS_PATH $VFLAG" + +$BIN prove $FLAGS $BFLAG --input_type $INFLAG +$BIN verify $FLAGS diff --git a/barretenberg/acir_tests/flows/prove_then_verify_tube.sh b/barretenberg/acir_tests/flows/prove_then_verify_tube.sh index c73babf27c1..dfc298ccd5b 100755 --- a/barretenberg/acir_tests/flows/prove_then_verify_tube.sh +++ b/barretenberg/acir_tests/flows/prove_then_verify_tube.sh @@ -5,7 +5,7 @@ mkdir -p ./proofs VFLAG=${VERBOSE:+-v} -$BIN client_ivc_prove_output_all $VFLAG -c $CRS_PATH -b ./target/program.json +$BIN prove --scheme client_ivc --input_type compiletime_stack $VFLAG -c $CRS_PATH -b ./target/program.json $BIN prove_tube -k vk -p proof -c $CRS_PATH $VFLAG $BIN verify_tube -k vk -p proof -c $CRS_PATH $VFLAG diff --git a/barretenberg/acir_tests/flows/prove_tube.sh b/barretenberg/acir_tests/flows/prove_tube.sh index 111ede2da48..d3798c0add7 100644 --- a/barretenberg/acir_tests/flows/prove_tube.sh +++ b/barretenberg/acir_tests/flows/prove_tube.sh @@ -5,5 +5,5 @@ VFLAG=${VERBOSE:+-v} BFLAG="-b ./target/program.json" FLAGS="-c $CRS_PATH $VFLAG" -$BIN client_ivc_prove_output_all $VFLAG -c $CRS_PATH -b ./target/program.json +$BIN prove --scheme client_ivc --input_type compiletime_stack $VFLAG -c $CRS_PATH -b ./target/program.json $BIN prove_tube -k vk -p proof $FLAGS diff --git a/barretenberg/acir_tests/run_acir_tests.sh b/barretenberg/acir_tests/run_acir_tests.sh index b31b8708e89..a506eedf818 100755 --- a/barretenberg/acir_tests/run_acir_tests.sh +++ b/barretenberg/acir_tests/run_acir_tests.sh @@ -14,6 +14,7 @@ trap handle_sigchild SIGCHLD BIN=${BIN:-../cpp/build/bin/bb} FLOW=${FLOW:-prove_and_verify} HONK=${HONK:-false} +CLIENT_IVC_SKIPS=${CLIENT_IVC_SKIPS:-false} CRS_PATH=~/.bb-crs BRANCH=master VERBOSE=${VERBOSE:-} @@ -57,6 +58,21 @@ if [ "$HONK" = true ]; then SKIP_ARRAY+=(single_verify_proof double_verify_proof double_verify_nested_proof) fi +if [ "$CLIENT_IVC_SKIPS" = true ]; then + # At least for now, skip folding tests that fail when run against ClientIVC. + # This is not a regression--folding was not being properly tested. + # TODO(https://github.com/AztecProtocol/barretenberg/issues/1164): Resolve this + # The reason for failure is that compile-time folding, as initially conceived, is + # only supported by ClientIVC through hacks. ClientIVC in Aztec is ultimately to be + # used through runtime folding, since the kernels that are needed are detected and + # constructed at runtime in Aztec's typescript proving interface. ClientIVC appends + # folding verifiers and does databus and Goblin merge work depending on its inputs, + # detecting which circuits are Aztec kernels. These tests may simple fail for trivial + # reasons, e.g. because the number of circuits in the stack is odd. + SKIP_ARRAY+=(fold_basic_nested_call fold_fibonacci fold_numeric_generic_poseidon ram_blowup_regression) +fi + + function test() { cd $1 diff --git a/barretenberg/cpp/docs/src/sumcheck-outline.md b/barretenberg/cpp/docs/src/sumcheck-outline.md index 651ce0189d1..272c33b4661 100644 --- a/barretenberg/cpp/docs/src/sumcheck-outline.md +++ b/barretenberg/cpp/docs/src/sumcheck-outline.md @@ -195,9 +195,9 @@ Observe that \f$ G \f$ has several important properties - The coefficients of \f$ G \f$ are independent and uniformly distributed. - Evaluations of \f$ G \f$ at \f$ \vec \ell \in \{0,1\}^d\f$ and related Sumcheck Round Univariates are efficiently computable. -The first two properties imply that the evaluations of Sumcheck Round Univariates for \f$G\f$ are independent and uniformly distributed. We call them Libra Round Univarites. +The first two properties imply that the evaluations of Sumcheck Round Univariates for \f$G\f$ are independent and uniformly distributed. We call them Libra Round Univariates. -Consider Round Univariates for \f$ \tilde{F} + \texttt{libra_challenge}\cdot G\f$ which are the sums of the Sumcheck Round Univariates for \f$ \tilde{F} \f$ and Libra Round Univarites multiplied by the challenge. +Consider Round Univariates for \f$ \tilde{F} + \texttt{libra_challenge}\cdot G\f$ which are the sums of the Sumcheck Round Univariates for \f$ \tilde{F} \f$ and Libra Round Univariates multiplied by the challenge. The fact that the degrees of Libra Round Univariates are big enough (i.e. \f$ \tilde{D}\geq D \f$) and that their evaluations are random imply that the evaluations \f$ \tilde{S}^i(0),\ldots,\tilde{S}^i(\tilde D)\f$ defined in [Compute Round Univariates](#ComputeRoundUnivariates) are now masked by the evaluations of Libra Round Univariates. These evaluations are described explicitly [below](#LibraRoundUnivariates). ### Example {#LibraPolynomialExample} diff --git a/barretenberg/cpp/src/barretenberg/bb/acir_format_getters.hpp b/barretenberg/cpp/src/barretenberg/bb/acir_format_getters.hpp new file mode 100644 index 00000000000..9e1023c3722 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/bb/acir_format_getters.hpp @@ -0,0 +1,34 @@ +#pragma once +#include "barretenberg/bb/config.hpp" +#include "barretenberg/bb/file_io.hpp" +#include "barretenberg/bb/get_bytecode.hpp" +#include "barretenberg/dsl/acir_format/acir_format.hpp" +#include "barretenberg/dsl/acir_format/acir_to_constraint_buf.hpp" + +namespace bb { + +acir_format::WitnessVector get_witness(std::string const& witness_path) +{ + auto witness_data = get_bytecode(witness_path); + return acir_format::witness_buf_to_witness_data(witness_data); +} + +acir_format::AcirFormat get_constraint_system(std::string const& bytecode_path, bool honk_recursion) +{ + auto bytecode = get_bytecode(bytecode_path); + return acir_format::circuit_buf_to_acir_format(bytecode, honk_recursion); +} + +acir_format::WitnessVectorStack get_witness_stack(std::string const& witness_path) +{ + auto witness_data = get_bytecode(witness_path); + return acir_format::witness_buf_to_witness_stack(witness_data); +} + +std::vector get_constraint_systems(std::string const& bytecode_path, bool honk_recursion) +{ + auto bytecode = get_bytecode(bytecode_path); + return acir_format::program_buf_to_acir_format(bytecode, honk_recursion); +} + +} // namespace bb \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/bb/api.hpp b/barretenberg/cpp/src/barretenberg/bb/api.hpp new file mode 100644 index 00000000000..f33568f1869 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/bb/api.hpp @@ -0,0 +1,39 @@ +#pragma once +#include + +namespace bb { + +class API { + public: + struct Flags { + std::optional output_type; // bytes, fields, bytes_and_fields, fields_msgpack + std::optional input_type; // compiletime_stack, runtime_stack + }; + + virtual void prove(const Flags& flags, + const std::filesystem::path& bytecode_path, + const std::filesystem::path& witness_path, + const std::filesystem::path& output_dir) = 0; + + virtual bool verify(const Flags& flags, + const std::filesystem::path& proof_path, + const std::filesystem::path& vk_path) = 0; + + virtual bool prove_and_verify(const Flags& flags, + const std::filesystem::path& bytecode_path, + const std::filesystem::path& witness_path) = 0; + + virtual void gates(const Flags& flags, + const std::filesystem::path& bytecode_path, + const std::filesystem::path& witness_path) = 0; + + virtual void contract(const Flags& flags, + const std::filesystem::path& output_path, + const std::filesystem::path& vk_path) = 0; + + virtual void to_fields(const Flags& flags, + const std::filesystem::path& proof_path, + const std::filesystem::path& vk_path, + const std::filesystem::path& output_path) = 0; +}; +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/bb/api_client_ivc.hpp b/barretenberg/cpp/src/barretenberg/bb/api_client_ivc.hpp new file mode 100644 index 00000000000..37b251bd8cf --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/bb/api_client_ivc.hpp @@ -0,0 +1,266 @@ +#pragma once + +#include "barretenberg/bb/acir_format_getters.hpp" +#include "barretenberg/bb/api.hpp" +#include "barretenberg/bb/init_srs.hpp" +#include "barretenberg/common/throw_or_abort.hpp" +#include "libdeflate.h" + +namespace bb { + +template std::shared_ptr read_to_shared_ptr(const std::filesystem::path& path) +{ + return std::make_shared(from_buffer(read_file(path))); +}; + +// TODO(#7371): this could probably be more idiomatic +template T unpack_from_file(const std::filesystem::path& filename) +{ + std::ifstream fin; + fin.open(filename, std::ios::ate | std::ios::binary); + if (!fin.is_open()) { + throw std::invalid_argument("file not found"); + } + if (fin.tellg() == -1) { + throw std::invalid_argument("something went wrong"); + } + + uint64_t fsize = static_cast(fin.tellg()); + fin.seekg(0, std::ios_base::beg); + + T result; + char* encoded_data = new char[fsize]; + fin.read(encoded_data, static_cast(fsize)); + msgpack::unpack(encoded_data, fsize).get().convert(result); + return result; +} + +// TODO(#7371) find a home for this +acir_format::WitnessVector witness_map_to_witness_vector(std::map const& witness_map) +{ + acir_format::WitnessVector wv; + size_t index = 0; + for (auto& e : witness_map) { + uint64_t value = std::stoull(e.first); + // ACIR uses a sparse format for WitnessMap where unused witness indices may be left unassigned. + // To ensure that witnesses sit at the correct indices in the `WitnessVector`, we fill any indices + // which do not exist within the `WitnessMap` with the dummy value of zero. + while (index < value) { + wv.push_back(fr(0)); + index++; + } + wv.push_back(fr(uint256_t(e.second))); + index++; + } + return wv; +} + +std::vector decompress(uint8_t* bytes, size_t size) +{ + std::vector content; + // initial size guess + content.resize(1024ULL * 128ULL); + for (;;) { + auto decompressor = std::unique_ptr{ + libdeflate_alloc_decompressor(), libdeflate_free_decompressor + }; + size_t actual_size = 0; + libdeflate_result decompress_result = libdeflate_gzip_decompress( + decompressor.get(), bytes, size, std::data(content), std::size(content), &actual_size); + if (decompress_result == LIBDEFLATE_INSUFFICIENT_SPACE) { + // need a bigger buffer + content.resize(content.size() * 2); + continue; + } + if (decompress_result == LIBDEFLATE_BAD_DATA) { + throw std::invalid_argument("bad gzip data in bb main"); + } + content.resize(actual_size); + break; + } + return content; +} + +class ClientIVCAPI : public API { + static std::vector _build_folding_stack(const std::string& input_type, + const std::filesystem::path& bytecode_path, + const std::filesystem::path& witness_path) + { + using namespace acir_format; + + std::vector folding_stack; + + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1162): Efficiently unify ACIR stack parsing + if (input_type == "compiletime_stack") { + auto program_stack = + acir_format::get_acir_program_stack(bytecode_path, witness_path, /*honk_recursion=*/false); + // Accumulate the entire program stack into the IVC + while (!program_stack.empty()) { + auto stack_item = program_stack.back(); + folding_stack.push_back(AcirProgram{ stack_item.constraints, stack_item.witness }); + program_stack.pop_back(); + } + } + + if (input_type == "runtime_stack") { + std::vector gzipped_bincodes; + std::vector witness_data; + gzipped_bincodes = unpack_from_file>(bytecode_path); + witness_data = unpack_from_file>(witness_path); + for (auto [bincode, wit] : zip_view(gzipped_bincodes, witness_data)) { + // TODO(#7371) there is a lot of copying going on in bincode, we should make sure this writes as a + // buffer in the future + std::vector constraint_buf = + decompress(reinterpret_cast(bincode.data()), bincode.size()); // NOLINT + std::vector witness_buf = + decompress(reinterpret_cast(wit.data()), wit.size()); // NOLINT + + AcirFormat constraints = circuit_buf_to_acir_format(constraint_buf, /*honk_recursion=*/false); + WitnessVector witness = witness_buf_to_witness_data(witness_buf); + + folding_stack.push_back(AcirProgram{ constraints, witness }); + } + } + + return folding_stack; + }; + + static ClientIVC _accumulate(std::vector& folding_stack) + { + using Builder = MegaCircuitBuilder; + using Program = acir_format::AcirProgram; + + using namespace acir_format; + + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1163) set these dynamically + init_bn254_crs(1 << 20); + init_grumpkin_crs(1 << 15); + + // TODO(#7371) dedupe this with the rest of the similar code + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1101): remove use of auto_verify_mode + ClientIVC ivc{ { E2E_FULL_TEST_STRUCTURE }, /*auto_verify_mode=*/true }; + + // Accumulate the entire program stack into the IVC + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1116): remove manual setting of is_kernel once + // databus has been integrated into noir kernel programs + bool is_kernel = false; + for (Program& program : folding_stack) { + // Construct a bberg circuit from the acir representation then accumulate it into the IVC + Builder circuit = acir_format::create_circuit( + program.constraints, true, 0, program.witness, false, ivc.goblin.op_queue); + + // Set the internal is_kernel flag based on the local mechanism only if it has not already been set to true + if (!circuit.databus_propagation_data.is_kernel) { + circuit.databus_propagation_data.is_kernel = is_kernel; + } + is_kernel = !is_kernel; + + // Do one step of ivc accumulator or, if there is only one circuit in the stack, prove that circuit. In this + // case, no work is added to the Goblin opqueue, but VM proofs for trivials inputs are produced. + ivc.accumulate(circuit, /*one_circuit=*/folding_stack.size() == 1); + } + + return ivc; + }; + + public: + void prove(const API::Flags& flags, + const std::filesystem::path& bytecode_path, + const std::filesystem::path& witness_path, + const std::filesystem::path& output_dir) override + { + if (!flags.output_type || *flags.output_type != "fields_msgpack") { + throw_or_abort("No output_type or output_type not supported"); + } + + if (!flags.input_type || !(*flags.input_type == "compiletime_stack" || *flags.input_type == "runtime_stack")) { + throw_or_abort("No input_type or input_type not supported"); + } + + std::vector folding_stack = + _build_folding_stack(*flags.input_type, bytecode_path, witness_path); + ClientIVC ivc = _accumulate(folding_stack); + ClientIVC::Proof proof = ivc.prove(); + + // Write the proof and verification keys into the working directory in 'binary' format (in practice it seems + // this directory is passed by bb.js) + vinfo("writing ClientIVC proof and vk..."); + write_file(output_dir / "client_ivc_proof", to_buffer(proof)); + + auto eccvm_vk = std::make_shared(ivc.goblin.get_eccvm_proving_key()); + auto translator_vk = + std::make_shared(ivc.goblin.get_translator_proving_key()); + write_file(output_dir / "client_ivc_vk", + to_buffer(ClientIVC::VerificationKey{ ivc.honk_vk, eccvm_vk, translator_vk })); + }; + + /** + * @brief Verifies a client ivc proof and writes the result to stdout + * + * Communication: + * - proc_exit: A boolean value is returned indicating whether the proof is valid. + * an exit code of 0 will be returned for success and 1 for failure. + * + * @param proof_path Path to the file containing the serialized proof + * @param vk_path Path to the serialized verification key of the final (MegaHonk) circuit in the stack + * @param accumualtor_path Path to the file containing the serialized protogalaxy accumulator + * @return true (resp., false) if the proof is valid (resp., invalid). + */ + bool verify([[maybe_unused]] const API::Flags& flags, + const std::filesystem::path& proof_path, + const std::filesystem::path& vk_path) override + { + // TODO(https://github.com/AztecProtocol/barretenberg/issues/1163): Set these dynamically + init_bn254_crs(1); + init_grumpkin_crs(1 << 15); + + const auto proof = from_buffer(read_file(proof_path)); + const auto vk = from_buffer(read_file(vk_path)); + + vk.mega->pcs_verification_key = std::make_shared>(); + vk.eccvm->pcs_verification_key = + std::make_shared>(vk.eccvm->circuit_size + 1); + vk.translator->pcs_verification_key = std::make_shared>(); + + const bool verified = ClientIVC::verify(proof, vk); + vinfo("verified: ", verified); + return verified; + }; + + bool prove_and_verify(const API::Flags& flags, + const std::filesystem::path& bytecode_path, + const std::filesystem::path& witness_path) override + { + if (!flags.input_type || !(*flags.input_type == "compiletime_stack" || *flags.input_type == "runtime_stack")) { + throw_or_abort("No input_type or input_type not supported"); + } + std::vector folding_stack = + _build_folding_stack(*flags.input_type, bytecode_path, witness_path); + ClientIVC ivc = _accumulate(folding_stack); + const bool verified = ivc.prove_and_verify(); + return verified; + }; + + void gates([[maybe_unused]] const API::Flags& flags, + [[maybe_unused]] const std::filesystem::path& bytecode_path, + [[maybe_unused]] const std::filesystem::path& witness_path) override + { + throw_or_abort("API function not implemented"); + }; + + void contract([[maybe_unused]] const API::Flags& flags, + [[maybe_unused]] const std::filesystem::path& output_path, + [[maybe_unused]] const std::filesystem::path& vk_path) override + { + throw_or_abort("API function not implemented"); + }; + + void to_fields([[maybe_unused]] const API::Flags& flags, + [[maybe_unused]] const std::filesystem::path& proof_path, + [[maybe_unused]] const std::filesystem::path& vk_path, + [[maybe_unused]] const std::filesystem::path& output_path) override + { + throw_or_abort("API function not implemented"); + }; +}; +} // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/bb/init_srs.hpp b/barretenberg/cpp/src/barretenberg/bb/init_srs.hpp new file mode 100644 index 00000000000..8d8780f251e --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/bb/init_srs.hpp @@ -0,0 +1,37 @@ +#include "get_bn254_crs.hpp" +#include "get_grumpkin_crs.hpp" + +namespace bb { +std::string getHomeDir() +{ + char* home = std::getenv("HOME"); + return home != nullptr ? std::string(home) : "./"; +} + +std::string CRS_PATH = getHomeDir() + "/.bb-crs"; + +/** + * @brief Initialize the global crs_factory for bn254 based on a known dyadic circuit size + * + * @param dyadic_circuit_size power-of-2 circuit size + */ +void init_bn254_crs(size_t dyadic_circuit_size) +{ + // Must +1 for Plonk only! + auto bn254_g1_data = get_bn254_g1_data(CRS_PATH, dyadic_circuit_size + 1); + auto bn254_g2_data = get_bn254_g2_data(CRS_PATH); + srs::init_crs_factory(bn254_g1_data, bn254_g2_data); +} + +/** + * @brief Initialize the global crs_factory for grumpkin based on a known dyadic circuit size + * @details Grumpkin crs is required only for the ECCVM + * + * @param dyadic_circuit_size power-of-2 circuit size + */ +void init_grumpkin_crs(size_t eccvm_dyadic_circuit_size) +{ + auto grumpkin_g1_data = get_grumpkin_g1_data(CRS_PATH, eccvm_dyadic_circuit_size + 1); + srs::init_grumpkin_crs_factory(grumpkin_g1_data); +} +} // namespace bb \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index a994b0b5929..00b0b8a68e0 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -1,22 +1,28 @@ +#include "barretenberg/bb/api.hpp" +#include "barretenberg/bb/api_client_ivc.hpp" #include "barretenberg/bb/file_io.hpp" #include "barretenberg/client_ivc/client_ivc.hpp" +#include "barretenberg/common/benchmark.hpp" #include "barretenberg/common/map.hpp" #include "barretenberg/common/serialize.hpp" +#include "barretenberg/common/timer.hpp" #include "barretenberg/constants.hpp" #include "barretenberg/dsl/acir_format/acir_format.hpp" +#include "barretenberg/dsl/acir_format/acir_to_constraint_buf.hpp" #include "barretenberg/dsl/acir_format/proof_surgeon.hpp" +#include "barretenberg/dsl/acir_proofs/acir_composer.hpp" #include "barretenberg/dsl/acir_proofs/honk_contract.hpp" #include "barretenberg/honk/proof_system/types/proof.hpp" #include "barretenberg/numeric/bitop/get_msb.hpp" #include "barretenberg/plonk/proof_system/proving_key/serialize.hpp" #include "barretenberg/plonk_honk_shared/types/aggregation_object_type.hpp" #include "barretenberg/serialize/cbind.hpp" +#include "barretenberg/srs/global_crs.hpp" #include "barretenberg/stdlib/client_ivc_verifier/client_ivc_recursive_verifier.hpp" #include "barretenberg/stdlib_circuit_builders/ultra_flavor.hpp" #include "barretenberg/stdlib_circuit_builders/ultra_keccak_flavor.hpp" #include "barretenberg/vm/avm/trace/public_inputs.hpp" -#include #ifndef DISABLE_AZTEC_VM #include "barretenberg/vm/avm/generated/flavor.hpp" #include "barretenberg/vm/avm/trace/common.hpp" @@ -24,64 +30,12 @@ #include "barretenberg/vm/aztec_constants.hpp" #include "barretenberg/vm/stats.hpp" #endif -#include "config.hpp" -#include "get_bn254_crs.hpp" -#include "get_bytecode.hpp" -#include "get_grumpkin_crs.hpp" -#include "libdeflate.h" -#include "log.hpp" -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include using namespace bb; -std::string getHomeDir() -{ - char* home = std::getenv("HOME"); - return home != nullptr ? std::string(home) : "./"; -} - -std::string CRS_PATH = getHomeDir() + "/.bb-crs"; - const std::filesystem::path current_path = std::filesystem::current_path(); const auto current_dir = current_path.filename().string(); -/** - * @brief Initialize the global crs_factory for bn254 based on a known dyadic circuit size - * - * @param dyadic_circuit_size power-of-2 circuit size - */ -void init_bn254_crs(size_t dyadic_circuit_size) -{ - // Must +1 for Plonk only! - auto bn254_g1_data = get_bn254_g1_data(CRS_PATH, dyadic_circuit_size + 1); - auto bn254_g2_data = get_bn254_g2_data(CRS_PATH); - srs::init_crs_factory(bn254_g1_data, bn254_g2_data); -} - -/** - * @brief Initialize the global crs_factory for grumpkin based on a known dyadic circuit size - * @details Grumpkin crs is required only for the ECCVM - * - * @param dyadic_circuit_size power-of-2 circuit size - */ -void init_grumpkin_crs(size_t eccvm_dyadic_circuit_size) -{ - auto grumpkin_g1_data = get_grumpkin_g1_data(CRS_PATH, eccvm_dyadic_circuit_size + 1); - srs::init_grumpkin_crs_factory(grumpkin_g1_data); -} - // Initializes without loading G1 // TODO(https://github.com/AztecProtocol/barretenberg/issues/811) adapt for grumpkin acir_proofs::AcirComposer verifier_init() @@ -92,30 +46,6 @@ acir_proofs::AcirComposer verifier_init() return acir_composer; } -acir_format::WitnessVector get_witness(std::string const& witness_path) -{ - auto witness_data = get_bytecode(witness_path); - return acir_format::witness_buf_to_witness_data(witness_data); -} - -acir_format::AcirFormat get_constraint_system(std::string const& bytecode_path, bool honk_recursion) -{ - auto bytecode = get_bytecode(bytecode_path); - return acir_format::circuit_buf_to_acir_format(bytecode, honk_recursion); -} - -acir_format::WitnessVectorStack get_witness_stack(std::string const& witness_path) -{ - auto witness_data = get_bytecode(witness_path); - return acir_format::witness_buf_to_witness_stack(witness_data); -} - -std::vector get_constraint_systems(std::string const& bytecode_path, bool honk_recursion) -{ - auto bytecode = get_bytecode(bytecode_path); - return acir_format::program_buf_to_acir_format(bytecode, honk_recursion); -} - std::string to_json(std::vector& data) { return format("[", join(map(data, [](auto fr) { return format("\"", fr, "\""); })), "]"); @@ -255,272 +185,6 @@ bool proveAndVerifyHonkProgram(const std::string& bytecodePath, const bool recur return true; } -// TODO(#7371): this could probably be more idiomatic -template T unpack_from_file(const std::string& filename) -{ - std::ifstream fin; - fin.open(filename, std::ios::ate | std::ios::binary); - if (!fin.is_open()) { - throw std::invalid_argument("file not found"); - } - if (fin.tellg() == -1) { - throw std::invalid_argument("something went wrong"); - } - - uint64_t fsize = static_cast(fin.tellg()); - fin.seekg(0, std::ios_base::beg); - - T result; - char* encoded_data = new char[fsize]; - fin.read(encoded_data, static_cast(fsize)); - msgpack::unpack(encoded_data, fsize).get().convert(result); - return result; -} - -// TODO(#7371) find a home for this -acir_format::WitnessVector witness_map_to_witness_vector(std::map const& witness_map) -{ - acir_format::WitnessVector wv; - size_t index = 0; - for (auto& e : witness_map) { - uint64_t value = std::stoull(e.first); - // ACIR uses a sparse format for WitnessMap where unused witness indices may be left unassigned. - // To ensure that witnesses sit at the correct indices in the `WitnessVector`, we fill any indices - // which do not exist within the `WitnessMap` with the dummy value of zero. - while (index < value) { - wv.push_back(fr(0)); - index++; - } - wv.push_back(fr(uint256_t(e.second))); - index++; - } - return wv; -} - -std::vector decompressedBuffer(uint8_t* bytes, size_t size) -{ - std::vector content; - // initial size guess - content.resize(1024ULL * 128ULL); - for (;;) { - auto decompressor = std::unique_ptr{ - libdeflate_alloc_decompressor(), libdeflate_free_decompressor - }; - size_t actual_size = 0; - libdeflate_result decompress_result = libdeflate_gzip_decompress( - decompressor.get(), bytes, size, std::data(content), std::size(content), &actual_size); - if (decompress_result == LIBDEFLATE_INSUFFICIENT_SPACE) { - // need a bigger buffer - content.resize(content.size() * 2); - continue; - } - if (decompress_result == LIBDEFLATE_BAD_DATA) { - throw std::invalid_argument("bad gzip data in bb main"); - } - content.resize(actual_size); - break; - } - return content; -} - -void client_ivc_prove_output_all_msgpack(const std::string& bytecodePath, - const std::string& witnessPath, - const std::string& outputDir) -{ - using Flavor = MegaFlavor; // This is the only option - using Builder = Flavor::CircuitBuilder; - using Program = acir_format::AcirProgram; - using ECCVMVK = ECCVMFlavor::VerificationKey; - using TranslatorVK = TranslatorFlavor::VerificationKey; - - using namespace acir_format; - - init_bn254_crs(1 << 24); - init_grumpkin_crs(1 << 15); - - auto gzipped_bincodes = unpack_from_file>(bytecodePath); - auto witness_data = unpack_from_file>(witnessPath); - std::vector folding_stack; - for (auto [bincode, wit] : zip_view(gzipped_bincodes, witness_data)) { - // TODO(#7371) there is a lot of copying going on in bincode, we should make sure this writes as a buffer in - // the future - std::vector constraint_buf = - decompressedBuffer(reinterpret_cast(bincode.data()), bincode.size()); // NOLINT - std::vector witness_buf = - decompressedBuffer(reinterpret_cast(wit.data()), wit.size()); // NOLINT - - AcirFormat constraints = circuit_buf_to_acir_format(constraint_buf, /*honk_recursion=*/false); - WitnessVector witness = witness_buf_to_witness_data(witness_buf); - - folding_stack.push_back(Program{ constraints, witness }); - } - // TODO(#7371) dedupe this with the rest of the similar code - // TODO(https://github.com/AztecProtocol/barretenberg/issues/1101): remove use of auto_verify_mode - ClientIVC ivc{ { E2E_FULL_TEST_STRUCTURE }, /*auto_verify_mode=*/true }; - - // Accumulate the entire program stack into the IVC - // TODO(https://github.com/AztecProtocol/barretenberg/issues/1116): remove manual setting of is_kernel once databus - // has been integrated into noir kernel programs - bool is_kernel = false; - for (Program& program : folding_stack) { - // Construct a bberg circuit from the acir representation then accumulate it into the IVC - auto circuit = - create_circuit(program.constraints, true, 0, program.witness, false, ivc.goblin.op_queue); - - // Set the internal is_kernel flag based on the local mechanism only if it has not already been set to true - if (!circuit.databus_propagation_data.is_kernel) { - circuit.databus_propagation_data.is_kernel = is_kernel; - } - is_kernel = !is_kernel; - ivc.accumulate(circuit); - } - - // Write the proof and verification keys into the working directory in 'binary' format (in practice it seems this - // directory is passed by bb.js) - std::string vkPath = outputDir + "/client_ivc_vk"; // the vk of the last circuit in the stack - std::string proofPath = outputDir + "/client_ivc_proof"; - - auto proof = ivc.prove(); - auto eccvm_vk = std::make_shared(ivc.goblin.get_eccvm_proving_key()); - auto translator_vk = std::make_shared(ivc.goblin.get_translator_proving_key()); - vinfo("ensure valid proof: ", ivc.verify(proof)); - - vinfo("write proof and vk data to files.."); - write_file(proofPath, to_buffer(proof)); - write_file(vkPath, to_buffer(ClientIVC::VerificationKey{ ivc.honk_vk, eccvm_vk, translator_vk })); -} - -template std::shared_ptr read_to_shared_ptr(const std::filesystem::path& path) -{ - return std::make_shared(from_buffer(read_file(path))); -}; - -/** - * @brief Verifies a client ivc proof and writes the result to stdout - * - * Communication: - * - proc_exit: A boolean value is returned indicating whether the proof is valid. - * an exit code of 0 will be returned for success and 1 for failure. - * - * @param proof_path Path to the file containing the serialized proof - * @param vk_path Path to the serialized verification key of the final (MegaHonk) circuit in the stack - * @param accumualtor_path Path to the file containing the serialized protogalaxy accumulator - * @return true (resp., false) if the proof is valid (resp., invalid). - */ -bool verify_client_ivc(const std::filesystem::path& proof_path, const std::filesystem::path& vk_path) -{ - init_bn254_crs(1); - init_grumpkin_crs(1 << 15); - - const auto proof = from_buffer(read_file(proof_path)); - const auto vk = from_buffer(read_file(vk_path)); - - vk.mega->pcs_verification_key = std::make_shared>(); - vk.eccvm->pcs_verification_key = - std::make_shared>(vk.eccvm->circuit_size + 1); - vk.translator->pcs_verification_key = std::make_shared>(); - - const bool verified = ClientIVC::verify(proof, vk); - vinfo("verified: ", verified); - return verified; -} - -bool foldAndVerifyProgram(const std::string& bytecodePath, const std::string& witnessPath) -{ - using Flavor = MegaFlavor; // This is the only option - using Builder = Flavor::CircuitBuilder; - - init_bn254_crs(1 << 22); - init_grumpkin_crs(1 << 16); - - ClientIVC ivc{ { SMALL_TEST_STRUCTURE }, /*auto_verify_mode=*/true }; - - auto program_stack = acir_format::get_acir_program_stack( - bytecodePath, witnessPath, false); // TODO(https://github.com/AztecProtocol/barretenberg/issues/1013): this - // assumes that folding is never done with ultrahonk. - - // Accumulate the entire program stack into the IVC - bool is_kernel = false; - while (!program_stack.empty()) { - auto stack_item = program_stack.back(); - - // Construct a bberg circuit from the acir representation - auto builder = acir_format::create_circuit(stack_item.constraints, - /*recursive=*/true, - 0, - stack_item.witness, - /*honk_recursion=*/false, - ivc.goblin.op_queue); - - // Set the internal is_kernel flag to trigger automatic appending of kernel logic if true - builder.databus_propagation_data.is_kernel = is_kernel; - - ivc.accumulate(builder); - - program_stack.pop_back(); - is_kernel = !is_kernel; // toggle the kernel indicator flag on/off - } - return ivc.prove_and_verify(); -} - -/** - * @brief Recieves an ACIR Program stack that gets accumulated with the ClientIVC logic and produces a client IVC proof. - * - * @param bytecodePath Path to the serialised circuit - * @param witnessPath Path to witness data - * @param outputPath Path to the folder where the proof and verification data are goingt obe wr itten (in practice this - * going to be specified when bb main is called, i.e. as the working directory in typescript). - */ -void client_ivc_prove_output_all(const std::string& bytecodePath, - const std::string& witnessPath, - const std::string& outputPath) -{ - using Flavor = MegaFlavor; // This is the only option - using Builder = Flavor::CircuitBuilder; - using ECCVMVK = ECCVMFlavor::VerificationKey; - using TranslatorVK = TranslatorFlavor::VerificationKey; - - init_bn254_crs(1 << 22); - init_grumpkin_crs(1 << 16); - - // TODO(https://github.com/AztecProtocol/barretenberg/issues/1101): remove use of auto_verify_mode - ClientIVC ivc{ { E2E_FULL_TEST_STRUCTURE }, /*auto_verify_mode=*/true }; - - auto program_stack = acir_format::get_acir_program_stack( - bytecodePath, witnessPath, false); // TODO(https://github.com/AztecProtocol/barretenberg/issues/1013): this - // assumes that folding is never done with ultrahonk. - - // Accumulate the entire program stack into the IVC - bool is_kernel = false; - while (!program_stack.empty()) { - auto stack_item = program_stack.back(); - - // Construct a bberg circuit from the acir representation - auto circuit = acir_format::create_circuit( - stack_item.constraints, true, 0, stack_item.witness, false, ivc.goblin.op_queue); - circuit.databus_propagation_data.is_kernel = is_kernel; - is_kernel = !is_kernel; // toggle on/off so every second circuit is intepreted as a kernel - - ivc.accumulate(circuit); - - program_stack.pop_back(); - } - - // Write the proof and verification keys into the working directory in 'binary' format (in practice it seems this - // directory is passed by bb.js) - std::string vkPath = outputPath + "/client_ivc_vk"; // the vk of the last circuit in the stack - std::string proofPath = outputPath + "/client_ivc_proof"; - - auto proof = ivc.prove(); - auto eccvm_vk = std::make_shared(ivc.goblin.get_eccvm_proving_key()); - auto translator_vk = std::make_shared(ivc.goblin.get_translator_proving_key()); - vinfo("ensure valid proof: ", ivc.verify(proof)); - - vinfo("write proof and vk data to files.."); - write_file(proofPath, to_buffer(proof)); - write_file(vkPath, to_buffer(ClientIVC::VerificationKey{ ivc.honk_vk, eccvm_vk, translator_vk })); -} - /** * @brief Creates a Honk Proof for the Tube circuit responsible for recursively verifying a ClientIVC proof. * @@ -536,7 +200,7 @@ void prove_tube(const std::string& output_path) using Builder = UltraCircuitBuilder; using GrumpkinVk = bb::VerifierCommitmentKey; - std::string vkPath = output_path + "/client_ivc_vk"; // the vk of the last circuit in the stack + std::string vkPath = output_path + "/client_ivc_vk"; std::string proofPath = output_path + "/client_ivc_proof"; // Note: this could be decreased once we optimise the size of the ClientIVC recursiveve rifier @@ -1083,7 +747,6 @@ void prove_honk(const std::string& bytecodePath, const std::string& outputPath, const bool recursive) { - // using Builder = Flavor::CircuitBuilder; using Prover = UltraProver_; // Construct Honk proof @@ -1408,55 +1071,66 @@ int main(int argc, char* argv[]) return 1; } - std::string command = args[0]; + const API::Flags flags = [&args]() { + return API::Flags{ .output_type = get_option(args, "--output_type", "fields_msgpack"), + .input_type = get_option(args, "--input_type", "compiletime_stack") }; + }(); + + const std::string command = args[0]; vinfo("bb command is: ", command); - std::string bytecode_path = get_option(args, "-b", "./target/program.json"); - std::string witness_path = get_option(args, "-w", "./target/witness.gz"); - std::string proof_path = get_option(args, "-p", "./proofs/proof"); - std::string vk_path = get_option(args, "-k", "./target/vk"); - std::string pk_path = get_option(args, "-r", "./target/pk"); - bool honk_recursion = flag_present(args, "-h"); - bool recursive = flag_present(args, "--recursive"); // Not every flavor handles it. + const std::string proof_system = get_option(args, "--scheme", ""); + const std::string bytecode_path = get_option(args, "-b", "./target/program.json"); + const std::string witness_path = get_option(args, "-w", "./target/witness.gz"); + const std::string proof_path = get_option(args, "-p", "./proofs/proof"); + const std::string vk_path = get_option(args, "-k", "./target/vk"); + const std::string pk_path = get_option(args, "-r", "./target/pk"); + + const bool honk_recursion = flag_present(args, "-h"); + const bool recursive = flag_present(args, "--recursive"); CRS_PATH = get_option(args, "-c", CRS_PATH); + const auto execute_command = [&](const std::string& command, const API::Flags& flags, API& api) { + ASSERT(flags.input_type.has_value()); + ASSERT(flags.output_type.has_value()); + if (command == "prove") { + const std::filesystem::path output_dir = get_option(args, "-o", "./target"); + // TODO(#7371): remove this (msgpack version...) + api.prove(flags, bytecode_path, witness_path, output_dir); + return 0; + } + + if (command == "verify") { + const std::filesystem::path output_dir = get_option(args, "-o", "./target"); + const std::filesystem::path proof_path = output_dir / "client_ivc_proof"; + const std::filesystem::path vk_path = output_dir / "client_ivc_vk"; + + return api.verify(flags, proof_path, vk_path) ? 0 : 1; + } + + if (command == "prove_and_verify") { + return api.prove_and_verify(flags, bytecode_path, witness_path) ? 0 : 1; + } + + throw_or_abort("Invalid command passed to execute_command in bb"); + return 1; + }; + // Skip CRS initialization for any command which doesn't require the CRS. if (command == "--version") { writeStringToStdout(BB_VERSION); return 0; } - if (command == "prove_and_verify") { + + if (proof_system == "client_ivc") { + ClientIVCAPI api; + execute_command(command, flags, api); + } else if (command == "prove_and_verify") { return proveAndVerify(bytecode_path, recursive, witness_path) ? 0 : 1; - } - if (command == "prove_and_verify_ultra_honk") { + } else if (command == "prove_and_verify_ultra_honk") { return proveAndVerifyHonk(bytecode_path, recursive, witness_path) ? 0 : 1; - } - if (command == "prove_and_verify_mega_honk") { - return proveAndVerifyHonk(bytecode_path, recursive, witness_path) ? 0 : 1; - } - if (command == "prove_and_verify_ultra_honk_program") { + } else if (command == "prove_and_verify_ultra_honk_program") { return proveAndVerifyHonkProgram(bytecode_path, recursive, witness_path) ? 0 : 1; - } - if (command == "prove_and_verify_mega_honk_program") { - return proveAndVerifyHonkProgram(bytecode_path, recursive, witness_path) ? 0 : 1; - } - // TODO(#7371): remove this - if (command == "client_ivc_prove_output_all_msgpack") { - std::filesystem::path output_dir = get_option(args, "-o", "./target"); - client_ivc_prove_output_all_msgpack(bytecode_path, witness_path, output_dir); - return 0; - } - if (command == "verify_client_ivc") { - std::filesystem::path output_dir = get_option(args, "-o", "./target"); - std::filesystem::path proof_path = output_dir / "client_ivc_proof"; - std::filesystem::path vk_path = output_dir / "client_ivc_vk"; - - return verify_client_ivc(proof_path, vk_path) ? 0 : 1; - } - if (command == "fold_and_verify_program") { - return foldAndVerifyProgram(bytecode_path, witness_path) ? 0 : 1; - } - - if (command == "prove") { + } else if (command == "prove") { std::string output_path = get_option(args, "-o", "./proofs/proof"); prove(bytecode_path, witness_path, output_path, recursive); } else if (command == "prove_output_all") { @@ -1474,9 +1148,6 @@ int main(int argc, char* argv[]) } else if (command == "prove_mega_honk_output_all") { std::string output_path = get_option(args, "-o", "./proofs"); prove_honk_output_all(bytecode_path, witness_path, output_path, recursive); - } else if (command == "client_ivc_prove_output_all") { - std::string output_path = get_option(args, "-o", "./target"); - client_ivc_prove_output_all(bytecode_path, witness_path, output_path); } else if (command == "prove_tube") { std::string output_path = get_option(args, "-o", "./target"); prove_tube(output_path); diff --git a/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.cpp b/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.cpp index 6412b711c94..4ba9ae098c7 100644 --- a/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.cpp +++ b/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.cpp @@ -157,8 +157,9 @@ void ClientIVC::complete_kernel_circuit_logic(ClientCircuit& circuit) * @param precomputed_vk */ void ClientIVC::accumulate(ClientCircuit& circuit, + const bool _one_circuit, const std::shared_ptr& precomputed_vk, - bool mock_vk) + const bool mock_vk) { if (auto_verify_mode && circuit.databus_propagation_data.is_kernel) { complete_kernel_circuit_logic(circuit); @@ -191,13 +192,25 @@ void ClientIVC::accumulate(ClientCircuit& circuit, honk_vk = precomputed_vk ? precomputed_vk : std::make_shared(proving_key->proving_key); if (mock_vk) { honk_vk->set_metadata(proving_key->proving_key); + vinfo("set honk vk metadata"); } - vinfo("set honk vk metadata"); - // If this is the first circuit in the IVC, use oink to complete the decider proving key and generate an oink - // proof - if (!initialized) { - OinkProver oink_prover{ proving_key }; + if (_one_circuit) { + one_circuit = _one_circuit; + MegaProver prover{ proving_key }; + vinfo("computing mega proof..."); + mega_proof = prover.prove(); + vinfo("mega proof computed"); + + proving_key->is_accumulator = true; // indicate to PG that it should not run oink on this key + // Initialize the gate challenges to zero for use in first round of folding + proving_key->gate_challenges = std::vector(CONST_PG_LOG_N, 0); + + fold_output.accumulator = proving_key; + } else if (!initialized) { + // If this is the first circuit in the IVC, use oink to complete the decider proving key and generate an oink + // proof + MegaOinkProver oink_prover{ proving_key }; vinfo("computing oink proof..."); oink_prover.prove(); vinfo("oink proof constructed"); @@ -247,10 +260,8 @@ HonkProof ClientIVC::construct_and_prove_hiding_circuit() // inputs to the tube circuit) which are intermediate stages. // TODO(https://github.com/AztecProtocol/barretenberg/issues/1048): link these properly, likely insecure auto num_public_inputs = static_cast(static_cast(fold_proof[PUBLIC_INPUTS_SIZE_INDEX])); - vinfo("num_public_inputs of the last folding proof BEFORE SUBTRACTION", num_public_inputs); num_public_inputs -= bb::PAIRING_POINT_ACCUMULATOR_SIZE; // exclude aggregation object num_public_inputs -= bb::PROPAGATED_DATABUS_COMMITMENTS_SIZE; // exclude propagated databus commitments - vinfo("num_public_inputs of the last folding proof ", num_public_inputs); for (size_t i = 0; i < num_public_inputs; i++) { size_t offset = HONK_PROOF_PUBLIC_INPUT_OFFSET; builder.add_public_variable(fold_proof[i + offset]); @@ -298,8 +309,11 @@ HonkProof ClientIVC::construct_and_prove_hiding_circuit() */ ClientIVC::Proof ClientIVC::prove() { - HonkProof mega_proof = construct_and_prove_hiding_circuit(); - ASSERT(merge_verification_queue.size() == 1); // ensure only a single merge proof remains in the queue + if (!one_circuit) { + mega_proof = construct_and_prove_hiding_circuit(); + ASSERT(merge_verification_queue.size() == 1); // ensure only a single merge proof remains in the queue + } + MergeProof& merge_proof = merge_verification_queue[0]; return { mega_proof, goblin.prove(merge_proof) }; }; @@ -341,8 +355,8 @@ HonkProof ClientIVC::decider_prove() const vinfo("prove decider..."); fold_output.accumulator->proving_key.commitment_key = bn254_commitment_key; MegaDeciderProver decider_prover(fold_output.accumulator); - return decider_prover.construct_proof(); vinfo("finished decider proving."); + return decider_prover.construct_proof(); } /** diff --git a/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.hpp b/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.hpp index 5fcf187a35d..12ec8cdc041 100644 --- a/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.hpp +++ b/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.hpp @@ -108,6 +108,7 @@ class ClientIVC { public: ProverFoldOutput fold_output; // prover accumulator and fold proof + HonkProof mega_proof; std::shared_ptr verifier_accumulator; // verifier accumulator std::shared_ptr honk_vk; // honk vk to be completed and folded into the accumulator @@ -133,6 +134,10 @@ class ClientIVC { GoblinProver goblin; + // We dynamically detect whether the input stack consists of one circuit, in which case we do not construct the + // hiding circuit and instead simply prove the single input circuit. + bool one_circuit = false; + bool initialized = false; // Is the IVC accumulator initialized ClientIVC(TraceSettings trace_settings = {}, bool auto_verify_mode = false) @@ -168,8 +173,9 @@ class ClientIVC { * @param mock_vk A boolean to say whether the precomputed vk shoudl have its metadata set. */ void accumulate(ClientCircuit& circuit, + const bool _one_circuit = false, const std::shared_ptr& precomputed_vk = nullptr, - bool mock_vk = false); + const bool mock_vk = false); Proof prove(); diff --git a/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.test.cpp b/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.test.cpp index ec0d253ce93..befb8d82a02 100644 --- a/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.test.cpp +++ b/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc.test.cpp @@ -306,7 +306,7 @@ TEST_F(ClientIVCTests, PrecomputedVerificationKeys) // Construct and accumulate set of circuits using the precomputed vkeys for (size_t idx = 0; idx < NUM_CIRCUITS; ++idx) { auto circuit = circuit_producer.create_next_circuit(ivc); - ivc.accumulate(circuit, precomputed_vks[idx]); + ivc.accumulate(circuit, /*one_circuit=*/false, precomputed_vks[idx]); } EXPECT_TRUE(ivc.prove_and_verify()); @@ -331,7 +331,7 @@ TEST_F(ClientIVCTests, StructuredPrecomputedVKs) // Construct and accumulate set of circuits using the precomputed vkeys for (size_t idx = 0; idx < NUM_CIRCUITS; ++idx) { auto circuit = circuit_producer.create_next_circuit(ivc, log2_num_gates); - ivc.accumulate(circuit, precomputed_vks[idx]); + ivc.accumulate(circuit, /*one_circuit=*/false, precomputed_vks[idx]); } EXPECT_TRUE(ivc.prove_and_verify()); diff --git a/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc_auto_verify.test.cpp b/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc_auto_verify.test.cpp index a462adc2b2d..f4dafde3803 100644 --- a/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc_auto_verify.test.cpp +++ b/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc_auto_verify.test.cpp @@ -165,7 +165,7 @@ TEST_F(ClientIVCAutoVerifyTests, PrecomputedVerificationKeys) // Accumulate each circuit using the precomputed VKs for (auto [circuit, precomputed_vk] : zip_view(circuits, precomputed_vkeys)) { - ivc.accumulate(circuit, precomputed_vk); + ivc.accumulate(circuit, /*one_circuit=*/false, precomputed_vk); } EXPECT_TRUE(ivc.prove_and_verify()); @@ -192,7 +192,7 @@ TEST_F(ClientIVCAutoVerifyTests, StructuredPrecomputedVKs) // Accumulate each circuit for (auto [circuit, precomputed_vk] : zip_view(circuits, precomputed_vkeys)) { - ivc.accumulate(circuit, precomputed_vk); + ivc.accumulate(circuit, /*one_circuit=*/false, precomputed_vk); } EXPECT_TRUE(ivc.prove_and_verify()); diff --git a/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc_integration.test.cpp b/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc_integration.test.cpp index 1eaf3c84d5c..2a32853c469 100644 --- a/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc_integration.test.cpp +++ b/barretenberg/cpp/src/barretenberg/client_ivc/client_ivc_integration.test.cpp @@ -102,7 +102,7 @@ TEST_F(ClientIVCIntegrationTests, BenchmarkCasePrecomputedVKs) for (size_t idx = 0; idx < NUM_CIRCUITS; ++idx) { Builder circuit = circuit_producer.create_next_circuit(ivc); - ivc.accumulate(circuit, precomputed_vks[idx]); + ivc.accumulate(circuit, /* one_circuit=*/false, precomputed_vks[idx]); } EXPECT_TRUE(ivc.prove_and_verify()); diff --git a/barretenberg/cpp/src/barretenberg/client_ivc/test_bench_shared.hpp b/barretenberg/cpp/src/barretenberg/client_ivc/test_bench_shared.hpp index 34fce9b314b..44d07e4cd0f 100644 --- a/barretenberg/cpp/src/barretenberg/client_ivc/test_bench_shared.hpp +++ b/barretenberg/cpp/src/barretenberg/client_ivc/test_bench_shared.hpp @@ -46,7 +46,7 @@ void perform_ivc_accumulation_rounds(size_t NUM_CIRCUITS, circuit = circuit_producer.create_next_circuit(ivc); } - ivc.accumulate(circuit, precomputed_vks[circuit_idx], mock_vk); + ivc.accumulate(circuit, /*one_circuit=*/false, precomputed_vks[circuit_idx], mock_vk); } } diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp index dba936225f6..c092e55fd6f 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp @@ -197,6 +197,7 @@ struct AcirProgramStack { void pop_back() { witness_stack.pop_back(); } }; +// TODO(https://github.com/AztecProtocol/barretenberg/issues/1161) Refactor this function template Builder create_circuit(AcirFormat& constraint_system, // Specifies whether a prover that produces SNARK recursion friendly proofs should be used. diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_flavor.hpp index f7110df3ee0..f4129c363a3 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_flavor.hpp @@ -570,7 +570,7 @@ class MegaFlavor { VerificationKey(const VerificationKey& vk) = default; - void set_metadata(ProvingKey& proving_key) + void set_metadata(const ProvingKey& proving_key) { this->pcs_verification_key = std::make_shared(); this->circuit_size = proving_key.circuit_size; diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.test.cpp b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.test.cpp index 236757ac1b8..3894ce60423 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.test.cpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.test.cpp @@ -42,7 +42,7 @@ TEST(SumcheckRound, SumcheckTupleOfTuplesOfUnivariates) univariate_2.template extend_to() * challenge[0] + univariate_3.template extend_to() * challenge[1]; - // Compare final batched univarites + // Compare final batched univariates EXPECT_EQ(result, result_expected); // Reinitialize univariate accumulators to zero diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/decider_prover.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/decider_prover.cpp index 7052c8e831e..2a27bd4b2f0 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/decider_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/decider_prover.cpp @@ -80,7 +80,7 @@ template void DeciderProver_::execute_pcs_rounds( zk_sumcheck_data.libra_univariates_monomial, sumcheck_output.claimed_libra_evaluations); } - vinfo("executed multivariate-to-univarite reduction"); + vinfo("executed multivariate-to-univariate reduction"); PCS::compute_opening_proof(ck, prover_opening_claim, transcript); vinfo("computed opening proof"); } diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.hpp b/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.hpp index 06e2d884f44..8ab9fed7aa1 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/oink_prover.hpp @@ -60,4 +60,7 @@ template class OinkProver { void execute_grand_product_computation_round(); RelationSeparator generate_alphas_round(); }; + +using MegaOinkProver = OinkProver; + } // namespace bb \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_prover.hpp b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_prover.hpp index 11264c72162..82608299ed5 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_prover.hpp @@ -46,6 +46,7 @@ template class UltraProver_ { HonkProof export_proof(); HonkProof construct_proof(); + HonkProof prove() { return construct_proof(); }; private: HonkProof proof; diff --git a/build_manifest.yml b/build_manifest.yml index 28982430bad..d469a72fd34 100644 --- a/build_manifest.yml +++ b/build_manifest.yml @@ -142,7 +142,7 @@ barretenberg-acir-tests-bb-ultra-plonk: dependencies: - barretenberg-x86_64-linux-clang-assert - noir-compile-acir-tests -barretenberg-acir-tests-bb-mega-honk: +barretenberg-acir-tests-bb-client-ivc: buildDir: barretenberg/acir_tests dockerfile: Dockerfile.bb dependencies: diff --git a/full_log.ansi b/full_log.ansi deleted file mode 100644 index 0708423dbd0..00000000000 --- a/full_log.ansi +++ /dev/null @@ -1,2514 +0,0 @@ -Debugger listening on ws://127.0.0.1:9229/88f07978-f8a1-42e3-8993-99fd17813999 -For help, see: https://nodejs.org/en/docs/inspector - aztec:telemetry-client [INFO] Using NoopTelemetryClient +0ms - aztec:snapshot_manager:full_prover_integration/full_prover [WARN] No data path given, will not persist any snapshots. +0ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] Initializing state... +2ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] Starting anvil... +0ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] Deploying L1 contracts... +118ms - aztec:snapshot_manager:full_prover_integration/full_prover [INFO] Set block interval to 12 +209ms - aztec:snapshot_manager:full_prover_integration/full_prover [INFO] Deploying contracts from 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266... +1ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] Deploying contract in tx 0xd1b23fffc82ed4638a62506a56f00048bb977c5140fb4c475e93ae65df25eaad +33ms - aztec:snapshot_manager:full_prover_integration/full_prover [INFO] Deployed Registry at 0x5fbdb2315678afecb367f032d93f642f64180aa3 +12ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] Deploying contract in tx 0x6c11c3f6513162429baa53595577735066a268ebe12f880e5f8184844d9c5b81 +15ms - aztec:snapshot_manager:full_prover_integration/full_prover [INFO] Deployed Fee Juice at 0xe7f1725e7734ce288f8367e1bb143e90bb3f0512 +6ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] Deploying contract in tx 0x3ca513b9f7757d640df6b0d939345c4633161f65e47e3b6978d1fba5c1e32bd1 +11ms - aztec:snapshot_manager:full_prover_integration/full_prover [INFO] Deployed Gerousia at 0x9fe46736679d2d9a65f0992f2272de9f3c7fa6e0 +5ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] Deploying contract in tx 0x0c843f2cea9ed7a2dd4f56cfa69667d6d613bd1e51f69625ca63a7ed41850c5b +16ms - aztec:snapshot_manager:full_prover_integration/full_prover [INFO] Deployed Apella at 0xcf7ed3acca5a467e9e704c703e8d87f634fb0fc9 +4ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] Deploying contract in tx 0x4c06d03f6f00ac9c2fa772b069fd8d5cc158e2f3b5e3b186468fa3980ce73972 +10ms - aztec:snapshot_manager:full_prover_integration/full_prover [INFO] Deployed Nomismatokopio at 0xdc64a140aa3e981100a9beca4e685f962f0cf6c9 +4ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] Deploying contract in tx 0xd3f5f79f7c6e4ffaf58dfd5cc0752a140d519c3cc54189dba1e54078f4b3d016 +9ms - aztec:snapshot_manager:full_prover_integration/full_prover [INFO] Deployed Sysstia at 0x5fc8d32690cc91d4c39d9d3abcbd16989f875707 +7ms - aztec:snapshot_manager:full_prover_integration/full_prover [INFO] All governance contracts deployed +18ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] Deploying contract in tx 0x897e923318ca3a8a8a135f6ddf14d318c8a22fd2937a45faf7762122622d0026 +10ms - aztec:snapshot_manager:full_prover_integration/full_prover [INFO] Deployed Fee Juice Portal at 0x0165878a594ca255338adfa4d48449f69242eb8f +4ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] Deploying contract in tx 0x9fe5638c2a5eacaf0b09348eebbfb495ea3ed48d8374fc627ac64bb9147d775a +10ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] Deploying contract in tx 0x46002e689600a62af8e12b939a88fd695e7d3a2413341777932f25abc9907f78 +28ms - aztec:snapshot_manager:full_prover_integration/full_prover [INFO] Deployed Rollup at 0x2279b7a0a67db372996a5fab50d91eaa73d2ebe6 +5ms - aztec:snapshot_manager:full_prover_integration/full_prover [INFO] All core contracts deployed +10ms - aztec:snapshot_manager:full_prover_integration/full_prover [INFO] Funding fee juice portal contract with fee juice in 0xe0f9d41f0fe72b01beeec19bccf4662b2b5dd2ba0a4ff543c248e87a3f6542a7 +13ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] Fee juice portal initializing in tx 0xa5f0f5f0b3aa26a26ee14c8f82b80a7434510b0f6ed087f8d5ce3aad432edfc7 +10ms - aztec:snapshot_manager:full_prover_integration/full_prover [INFO] Initialized Fee Juice Portal at 0x0165878a594ca255338adfa4d48449f69242eb8f to bridge between L1 0xe7f1725e7734ce288f8367e1bb143e90bb3f0512 to L2 0x0000000000000000000000000000000000000000000000000000000000000005 +0ms - aztec:snapshot_manager:full_prover_integration/full_prover [INFO] Inbox available at 0xed179b78d5781f93eb169730d8ad1be7313123f4 +5ms - aztec:snapshot_manager:full_prover_integration/full_prover [INFO] Outbox available at 0x1016b5aaa3270a65c315c664ecb238b6db270b64 +3ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] Upgrading registry contract at 0x5fbdb2315678afecb367f032d93f642f64180aa3 to rollup 0x2279b7a0a67db372996a5fab50d91eaa73d2ebe6 in tx 0x2130191f6b637c15f6dc6ca3a217c937fd5b18ef99a7f5a74ed4637fb05a8b1e +9ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] Transferring the ownership of the registry contract at 0x5fbdb2315678afecb367f032d93f642f64180aa3 to the Apella 0xcf7ed3acca5a467e9e704c703e8d87f634fb0fc9 in tx 0x0517ec9178a0e6a6eb81d0b77c42f3ec32e26678cf2acfecf59cfba90b24734a +10ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] All transactions for L1 deployment have been mined +8ms - aztec:snapshot_manager:full_prover_integration/full_prover [INFO] Funding sysstia in 0x3b834d19bab0e4955a3e22b93bab0fb46545750d969be1d5c0fd70d043e7b22f +14ms - aztec:utils:watcher [INFO] Watcher created for rollup at 0x2279b7a0a67db372996a5fab50d91eaa73d2ebe6 +0ms - aztec:utils:watcher [INFO] Watcher started +2ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] Using native ACVM binary at ../../noir/noir-repo/target/release/acvm with working directory /tmp/02000000/acvm +4ms - aztec:telemetry-client [INFO] Using NoopTelemetryClient +0ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] Creating and synching an aztec node... +4ms - aztec:archiver:lmdb [INFO] Creating archiver ephemeral data store +0ms - aztec:archiver [INFO] Performing initial chain sync to rollup contract 0x2279b7a0a67db372996a5fab50d91eaa73d2ebe6 +0ms - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 10 and 14. +6ms - aztec:archiver [VERBOSE] No blocks to retrieve from 10 to 14 +4ms - aztec:p2p:lmdb [INFO] Creating p2p ephemeral data store +0ms - aztec:p2p [VERBOSE] Block 1 (proven 1) already beyond current block +0ms - aztec:l2_block_stream [VERBOSE] Starting L2 block stream batchSize=20 pollIntervalMS=100 +0ms - aztec:p2p [VERBOSE] Started block downloader from block 1 +1ms - aztec:l2_block_stream [VERBOSE] Starting L2 block stream proven=false pollIntervalMS=100 batchSize=undefined +0ms - aztec:world_state [INFO] Started world state synchronizer from block 1 +0ms - aztec:node [INFO] Using native ACVM at ../../noir/noir-repo/target/release/acvm and working directory /tmp/02000000/acvm +0ms - aztec:validator [VERBOSE] Initialized validator validatorAddress=0x70997970c51812dc3a010c7d01b50e0d17dc79c8 +0ms - aztec:sequencer [VERBOSE] Initialized sequencer with 1-32 txs per block. +0ms - aztec:sequencer [INFO] Sequencer started +1ms - aztec:node [INFO] Started Aztec Node against chain 0x7a69 with contracts - -Rollup: 0x2279b7a0a67db372996a5fab50d91eaa73d2ebe6 -Registry: 0x5fbdb2315678afecb367f032d93f642f64180aa3 -Inbox: 0xed179b78d5781f93eb169730d8ad1be7313123f4 -Outbox: 0x1016b5aaa3270a65c315c664ecb238b6db270b64 +36ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] Creating and syncing a simulated prover node... +559ms - aztec:archiver:lmdb [INFO] Creating archiver ephemeral data store +0ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=0 worldStateHash=0x0fd77c2a44e9430a2e6196ff4ed74eb832169caf335c122899deb80b805570c3 l2BlockSourceNumber=0 l2BlockSourceHash=undefined p2pNumber=0 l1ToL2MessageSourceNumber=0 +31ms - aztec:archiver [INFO] Performing initial chain sync to rollup contract 0x2279b7a0a67db372996a5fab50d91eaa73d2ebe6 +0ms - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 10 and 14. +3ms - aztec:archiver [VERBOSE] No blocks to retrieve from 10 to 14 +1ms - aztec:prover [VERBOSE] Created archiver and synced to block 0 +0ms - aztec:archiver [VERBOSE] No blocks to retrieve from 10 to 14 +4ms - aztec:l2_block_stream [VERBOSE] Starting L2 block stream proven=true pollIntervalMS=100 batchSize=undefined +0ms - aztec:world_state [INFO] Started world state synchronizer from block 1 +0ms - aztec:prover [INFO] Using native ACVM at ../../noir/noir-repo/target/release/acvm and working directory /tmp/02000000/acvm +133ms - aztec:prover-client:prover-pool:queue [INFO] Proving queue started +0ms - aztec:prover-client:prover-agent [INFO] Agent started with concurrency=2 +0ms - aztec:createProverCoordination [INFO] Using prover coordination via aztec node +0ms - aztec:prover-node:token-contract [VERBOSE] Balance 0 is below required 4000. Attempting mint. +0ms - aztec:prover-node:token-contract [VERBOSE] Minted 4000 test tokens +24ms - aztec:prover-node:bond-manager [VERBOSE] Prover bond top-up 2000 required to get 0 to target 2000 +0ms - aztec:prover-node:token-contract [VERBOSE] Approving max allowance for 0x06b1d212b8da92b83af328de5eef4e211da02097 +10ms - aztec:prover-node:bond-manager [VERBOSE] Prover bond top-up of 2000 completed +31ms - aztec:prover-node:epoch-monitor [INFO] Started EpochMonitor maxPendingJobs=10 pollingIntervalMs=200 +0ms - aztec:prover-node:claims-monitor [INFO] Started ClaimsMonitor with prover address 0x3c44cdddb6a900fa2b585dd299e03d12fa4293bc maxPendingJobs=10 pollingIntervalMs=200 +0ms - aztec:prover-node [INFO] Started ProverNode pollingIntervalMs=200 maxPendingJobs=10 +0ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] Creating pxe... +488ms - aztec:pxe:keystore:lmdb [INFO] Creating pxe_key_store ephemeral data store +0ms - aztec:pxe:data:lmdb [INFO] Creating pxe_data ephemeral data store +0ms - aztec:pxe_synchronizer [INFO] Initial sync complete +0ms - aztec:pxe_service [INFO] Added protocol contract AuthRegistry at 0x0000000000000000000000000000000000000000000000000000000000000001 +0ms - aztec:pxe_service [INFO] Added protocol contract ContractInstanceDeployer at 0x0000000000000000000000000000000000000000000000000000000000000002 +16ms - aztec:pxe_service [INFO] Added protocol contract ContractClassRegisterer at 0x0000000000000000000000000000000000000000000000000000000000000003 +21ms - aztec:pxe_service [INFO] Added protocol contract MultiCallEntrypoint at 0x0000000000000000000000000000000000000000000000000000000000000004 +13ms - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 15 and 17. +1s - aztec:archiver [VERBOSE] No blocks to retrieve from 15 to 17 +2ms - aztec:pxe_service [INFO] Added protocol contract FeeJuice at 0x0000000000000000000000000000000000000000000000000000000000000005 +65ms - aztec:pxe_service [INFO] Added protocol contract Router at 0x0000000000000000000000000000000000000000000000000000000000000006 +30ms - aztec:pxe_service [INFO] Started PXE connected to chain 31337 version 1 +0ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] Applying state transition for 2_accounts... +328ms - aztec:full_prover_test:full_prover [VERBOSE] Simulating account deployment... +0ms - aztec:pxe_service [INFO] Registered account 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +67ms - aztec:pxe_service [INFO] Registered account 0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8 +0ms - aztec:pxe_service [INFO] Added contract SchnorrAccount at 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +153ms - aztec:pxe_service [INFO] Added contract SchnorrAccount at 0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8 +0ms - aztec:simulator:private_execution [VERBOSE] Executing external function MultiCallEntrypoint:entrypoint@0x0000000000000000000000000000000000000000000000000000000000000004 +0ms - aztec:simulator:private_execution [VERBOSE] Executing external function SchnorrAccount:constructor@0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:pxe_service [VERBOSE] Simulation completed for 0x0000000000000000000000000000000000000000000000000000000000000004:entrypoint +396ms - aztec:node [INFO] Simulating tx 1478643d3efe3b3d82b1e56ec7e120a23b3bbc759456975c168ea2ecb518e481 +2s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=0 worldStateHash=0x0fd77c2a44e9430a2e6196ff4ed74eb832169caf335c122899deb80b805570c3 l2BlockSourceNumber=0 l2BlockSourceHash=undefined p2pNumber=0 l1ToL2MessageSourceNumber=0 +2s - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 15 and 17. +2s - aztec:archiver [VERBOSE] No blocks to retrieve from 15 to 17 +4ms - aztec:pxe_service [INFO] Executed local simulation for 1478643d3efe3b3d82b1e56ec7e120a23b3bbc759456975c168ea2ecb518e481 +714ms - aztec:simulator:private_execution [VERBOSE] Executing external function MultiCallEntrypoint:entrypoint@0x0000000000000000000000000000000000000000000000000000000000000004 +0ms - aztec:simulator:private_execution [VERBOSE] Executing external function SchnorrAccount:constructor@0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8 +0ms - aztec:pxe_service [VERBOSE] Simulation completed for 0x0000000000000000000000000000000000000000000000000000000000000004:entrypoint +325ms - aztec:node [INFO] Simulating tx 1facd099f88103cb546eb65fed65a75a731c3fa779aced2f8c9fe176a2afcce9 +948ms - aztec:pxe_service [INFO] Executed local simulation for 1facd099f88103cb546eb65fed65a75a731c3fa779aced2f8c9fe176a2afcce9 +601ms - aztec:full_prover_test:full_prover [VERBOSE] Deploying accounts... +3s - aztec:pxe_service [INFO] Sending transaction 1478643d3efe3b3d82b1e56ec7e120a23b3bbc759456975c168ea2ecb518e481 +1s - aztec:node [INFO] Received tx 1478643d3efe3b3d82b1e56ec7e120a23b3bbc759456975c168ea2ecb518e481 +1s - aztec:pxe_service [INFO] Sending transaction 1facd099f88103cb546eb65fed65a75a731c3fa779aced2f8c9fe176a2afcce9 +1ms - aztec:node [INFO] Received tx 1facd099f88103cb546eb65fed65a75a731c3fa779aced2f8c9fe176a2afcce9 +1ms - aztec:tx_pool [INFO] Adding tx with id 1478643d3efe3b3d82b1e56ec7e120a23b3bbc759456975c168ea2ecb518e481 eventName=tx-added-to-pool txHash=1478643d3efe3b3d82b1e56ec7e120a23b3bbc759456975c168ea2ecb518e481 noteEncryptedLogCount=1 encryptedLogCount=0 unencryptedLogCount=0 noteEncryptedLogSize=493 encryptedLogSize=8 unencryptedLogSize=8 newCommitmentCount=1 newNullifierCount=3 proofSize=0 size=16941 feePaymentMethod=none classRegisteredCount=0 +0ms - aztec:tx_pool [INFO] Adding tx with id 1facd099f88103cb546eb65fed65a75a731c3fa779aced2f8c9fe176a2afcce9 eventName=tx-added-to-pool txHash=1facd099f88103cb546eb65fed65a75a731c3fa779aced2f8c9fe176a2afcce9 noteEncryptedLogCount=1 encryptedLogCount=0 unencryptedLogCount=0 noteEncryptedLogSize=493 encryptedLogSize=8 unencryptedLogSize=8 newCommitmentCount=1 newNullifierCount=3 proofSize=0 size=16941 feePaymentMethod=none classRegisteredCount=0 +4ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=0 worldStateHash=0x0fd77c2a44e9430a2e6196ff4ed74eb832169caf335c122899deb80b805570c3 l2BlockSourceNumber=0 l2BlockSourceHash=undefined p2pNumber=0 l1ToL2MessageSourceNumber=0 +2s - aztec:pxe_service [INFO] Sent transaction 1478643d3efe3b3d82b1e56ec7e120a23b3bbc759456975c168ea2ecb518e481 +29ms - aztec:pxe_service [INFO] Sent transaction 1facd099f88103cb546eb65fed65a75a731c3fa779aced2f8c9fe176a2afcce9 +3ms - aztec:sequencer [INFO] Building blockNumber=1 txCount=2 slotNumber=4 +15ms - aztec:sequencer [VERBOSE] Retrieved 0 L1 to L2 messages for block 1 +1ms - aztec:sequencer-client:block_builder_light [VERBOSE] Starting new block numTxs=2 globalVariables=[object Object] l1ToL2Messages= +0ms - aztec:sequencer-client:block_builder_light [VERBOSE] Adding new tx to block txHash=1478643d3efe3b3d82b1e56ec7e120a23b3bbc759456975c168ea2ecb518e481 +6ms - aztec:sequencer-client:block_builder_light [VERBOSE] Adding new tx to block txHash=1facd099f88103cb546eb65fed65a75a731c3fa779aced2f8c9fe176a2afcce9 +38ms - aztec:sequencer-client:block_builder_light [VERBOSE] Setting block as completed and adding 0 padding txs +40ms - aztec:sequencer-client:block_builder_light [VERBOSE] Finalising block +0ms - aztec:sequencer [VERBOSE] Assembled block 1 (txEffectsHash: 00a40df55118b7536d6697aeda3398a2e6bb2cda35bbadc942fc5beff434142e) eventName=l2-block-built creator=0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266 duration=98.15720796585083 publicProcessDuration=78.71467590332031 rollupCircuitsDuration=95.19495809078217 txCount=2 blockNumber=1 blockTimestamp=1730721769 noteEncryptedLogLength=986 noteEncryptedLogCount=2 encryptedLogLength=16 encryptedLogCount=0 unencryptedLogCount=0 unencryptedLogSize=16 +98ms - aztec:sequencer [VERBOSE] Collecting attestations +0ms - aztec:sequencer [VERBOSE] Attestations collected +2ms - aztec:sequencer [VERBOSE] Collecting proof quotes +0ms - aztec:sequencer [VERBOSE] No epoch to prove +5ms - aztec:sequencer [VERBOSE] No proof quote available +0ms - aztec:sequencer:publisher [VERBOSE] Submitting propose transaction +0ms - aztec:sequencer:publisher [INFO] Published L2 block to L1 rollup contract gasPrice=1118014540 gasUsed=505591 transactionHash=0xf2f27461f78ab187fda838fb4231b74866c53729a108efdfdad21b36c78c679f calldataGas=29584 calldataSize=2596 sender=0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266 txCount=2 blockNumber=1 blockTimestamp=1730721769 noteEncryptedLogLength=986 noteEncryptedLogCount=2 encryptedLogLength=16 encryptedLogCount=0 unencryptedLogCount=0 unencryptedLogSize=16 eventName=rollup-published-to-l1 slotNumber=4 blockHash=0x075453d18c5628590a03126ea7b14261b7a9ceb6bf955897bcf5d2f371aa479d +129ms - aztec:sequencer [INFO] Submitted rollup block 1 with 2 transactions duration=99ms (Submitter: 0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266) +135ms - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 18 and 18. +4s - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 18 and 18. +3s - aztec:archiver [VERBOSE] Processed 1 new L2 blocks up to 1 +29ms - aztec:archiver [VERBOSE] Processed 1 new L2 blocks up to 1 +34ms - aztec:cheat_codes:eth [VERBOSE] Mined 1 L1 blocks +0ms - aztec:cheat_codes:eth [VERBOSE] Warped L1 timestamp to 1730721793 +0ms - aztec:utils:watcher [INFO] Slot 4 was filled, jumped to next slot +6s - aztec:world_state [VERBOSE] Handling new L2 blocks from 1 to 1 +5s - aztec:world_state [VERBOSE] Handled new L2 block eventName=l2-block-handled duration=12.292266011238098 unfinalisedBlockNumber=1 finalisedBlockNumber=0 oldestHistoricBlock=1 txCount=2 blockNumber=1 blockTimestamp=1730721769 noteEncryptedLogLength=986 noteEncryptedLogCount=2 encryptedLogLength=16 encryptedLogCount=0 unencryptedLogCount=0 unencryptedLogSize=16 +12ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=1 worldStateHash=0x075453d18c5628590a03126ea7b14261b7a9ceb6bf955897bcf5d2f371aa479d l2BlockSourceNumber=1 l2BlockSourceHash=0x075453d18c5628590a03126ea7b14261b7a9ceb6bf955897bcf5d2f371aa479d p2pNumber=1 l1ToL2MessageSourceNumber=1 +1s - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 19 and 19. +1s - aztec:note_processor [VERBOSE] Added incoming note for contract 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 at slot 0x0000000000000000000000000000000000000000000000000000000000000001 with nullifier 0x2f4f1f56d24ad9206fe9f38a2082eaf201770210741046204dbd66117d5dd1be +0ms - aztec:note_processor [VERBOSE] Added outgoing note for contract 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 at slot 0x0000000000000000000000000000000000000000000000000000000000000001 +0ms - aztec:archiver [VERBOSE] No blocks to retrieve from 19 to 19 +4ms - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 19 and 19. +1s - aztec:archiver [VERBOSE] No blocks to retrieve from 19 to 19 +5ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:note_processor [VERBOSE] Added incoming note for contract 0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8 at slot 0x0000000000000000000000000000000000000000000000000000000000000001 with nullifier 0x26481455528ca0656413bf6cc1945ef1c8987543c0be40115a73346780787849 +0ms - aztec:note_processor [VERBOSE] Added outgoing note for contract 0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8 at slot 0x0000000000000000000000000000000000000000000000000000000000000001 +0ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] State transition for 2_accounts complete. +6s - aztec:full_prover_test:full_prover [VERBOSE] Wallet 0 address: 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +2s - aztec:full_prover_test:full_prover [VERBOSE] Wallet 1 address: 0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8 +0ms - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] Applying state transition for client_prover_integration... +21ms - aztec:full_prover_test:full_prover [VERBOSE] Public deploy accounts... +0ms - aztec:simulator:private_execution [VERBOSE] Executing external function SchnorrAccount:entrypoint@0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:simulator:private_execution [VERBOSE] Executing external function ContractClassRegisterer:register@0x0000000000000000000000000000000000000000000000000000000000000003 +0ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=1 worldStateHash=0x075453d18c5628590a03126ea7b14261b7a9ceb6bf955897bcf5d2f371aa479d l2BlockSourceNumber=1 l2BlockSourceHash=0x075453d18c5628590a03126ea7b14261b7a9ceb6bf955897bcf5d2f371aa479d p2pNumber=1 l1ToL2MessageSourceNumber=1 +1s - aztec:simulator:client_execution_context [VERBOSE] debug_log ContractClassRegistered: 0x0f1cf77f11813ebba27b8b30eed1dad1afd77eeb44ff6be5002a15ce844a766b,0x07d2a22d4babe42ea1f8c46ee6fd73305d85030a74fe8bb4bb22532320170996,0x261ee24b32b2d7345986eec3c4e38d886a35b32cea86c9c78fd14a7e8413a66d,0x0000000000000000000000000000000000000000000000000000000000000000 +0ms - aztec:simulator:client_execution_context [VERBOSE] Emitted unencrypted log from ContractClassRegisterer: "UnencryptedL2Log(contractAddress: 0x0000000000000000000000000000000000000000000000000000000000000003..." +13ms - aztec:simulator:private_execution [VERBOSE] Executing external function ContractInstanceDeployer:deploy@0x0000000000000000000000000000000000000000000000000000000000000002 +0ms - aztec:simulator:client_execution_context [VERBOSE] debug_log ContractInstanceDeployed: 0x0000000085864497636cf755ae7bde03f267ce01a520981c21c3682aaf82a631,0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78,0x0000000000000000000000000000000000000000000000000000000000000001,0x0000000000000000000000000000000000000000000000000000000000000001,0x0f1cf77f11813ebba27b8b30eed1dad1afd77eeb44ff6be5002a15ce844a766b,0x16536eaed44698e50960339aebe76f2147a740c4516d6dbce33201672bab35ac,0x29501668793e5436f9108cca6132e58380e6e0a707e88ba7d92cfcb98545dd6c,0x06a6ba4a51a3e920693e5fe897610e365723e2fe1890bd75b58d5251308312d9,0x1f5a861b3439fa587dec457787566fb08bf82afc11cc35c1a9207d2233aba0c0,0x0c9a56ee7dfaa1dc39effd31ed32ab151f5857f3cb7f63729a4f48a3a74a9b20,0x019984bda08609d5cc10f4de27bbcc0defdeb897e93433dd396bfc5ba5bae210,0x170bfbb133bab1d946bec2ee7b21d0187f04aea092a87d9807c9027cdb8f2ad6,0x2fa4a1684b268c844526d7a56bac304b1da3b054e1b3334129e5d14ffda3d31f,0x107fd85bac5ee96326da88923c66f61aef89577bb82091c018363dbe6d4d76de,0x0000000000000000000000000000000000000000000000000000000000000000 +148ms - aztec:simulator:client_execution_context [VERBOSE] Emitted unencrypted log: "UnencryptedL2Log(contractAddress: 0x0000000000000000000000000000000000000000000000000000000000000002..." +61ms - aztec:simulator:private_execution [VERBOSE] Executing external function ContractInstanceDeployer:deploy@0x0000000000000000000000000000000000000000000000000000000000000002 +0ms - aztec:simulator:client_execution_context [VERBOSE] debug_log ContractInstanceDeployed: 0x0000000085864497636cf755ae7bde03f267ce01a520981c21c3682aaf82a631,0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8,0x0000000000000000000000000000000000000000000000000000000000000001,0x0000000000000000000000000000000000000000000000000000000000000001,0x0f1cf77f11813ebba27b8b30eed1dad1afd77eeb44ff6be5002a15ce844a766b,0x27ae1e82fbedce1fd145b7afe7a4c8d0bcb6a9049d4dba3bc9a0afe3d4bcdf8f,0x29fec3bdb4e5b751d427770de3fda46ab827e3bddf35e33847aa989222b99383,0x0fd1fe7b90590ae5a86a8ebab477a26d846bf685ed5be6460e42298b094b8589,0x2db565edfe5fc461654a5a011870cebf5c39ffe9dd12421a377514a3967c4659,0x09ed668a46b90dd9fc5d4b3327e9f67b38b3f0641398524f566814b9b4dbe992,0x2821116ece19dbe5f9e026fda962fbcbc33fc304662feaae87cf01a500ab0f3d,0x1d0f56c83990cc996be49d34f916da5212e1389a8f6092f1ee209067f132ef11,0x2b2677d624f6f486b3d54ae493617ec448d9c7157d59afd4cd41b2c27920ab2e,0x26382608904ff524fdb2a872416b2d716dbb5c3497c0cdba50ca52ba0a0a07ee,0x0000000000000000000000000000000000000000000000000000000000000000 +115ms - aztec:simulator:client_execution_context [VERBOSE] Emitted unencrypted log: "UnencryptedL2Log(contractAddress: 0x0000000000000000000000000000000000000000000000000000000000000002..." +61ms - aztec:pxe_service [VERBOSE] Simulation completed for 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78:entrypoint +3s - aztec:node [INFO] Simulating tx 18ecb1116941710b3b184c78ef6bbc0ad2fa3ab374c8be2597012b92b7394fd2 +4s - aztec:pxe_service [INFO] Executed local simulation for 18ecb1116941710b3b184c78ef6bbc0ad2fa3ab374c8be2597012b92b7394fd2 +900ms - aztec:pxe_service [INFO] Sending transaction 18ecb1116941710b3b184c78ef6bbc0ad2fa3ab374c8be2597012b92b7394fd2 +845ms - aztec:node [INFO] Received tx 18ecb1116941710b3b184c78ef6bbc0ad2fa3ab374c8be2597012b92b7394fd2 +871ms - aztec:tx_pool [INFO] Adding tx with id 18ecb1116941710b3b184c78ef6bbc0ad2fa3ab374c8be2597012b92b7394fd2 eventName=tx-added-to-pool txHash=18ecb1116941710b3b184c78ef6bbc0ad2fa3ab374c8be2597012b92b7394fd2 noteEncryptedLogCount=0 encryptedLogCount=0 unencryptedLogCount=3 noteEncryptedLogSize=8 encryptedLogSize=8 unencryptedLogSize=97248 newCommitmentCount=0 newNullifierCount=4 proofSize=0 size=113696 feePaymentMethod=none classRegisteredCount=1 +5s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=1 worldStateHash=0x075453d18c5628590a03126ea7b14261b7a9ceb6bf955897bcf5d2f371aa479d l2BlockSourceNumber=1 l2BlockSourceHash=0x075453d18c5628590a03126ea7b14261b7a9ceb6bf955897bcf5d2f371aa479d p2pNumber=1 l1ToL2MessageSourceNumber=1 +3s - aztec:pxe_service [INFO] Sent transaction 18ecb1116941710b3b184c78ef6bbc0ad2fa3ab374c8be2597012b92b7394fd2 +24ms - aztec:sequencer [INFO] Building blockNumber=2 txCount=1 slotNumber=5 +35ms - aztec:sequencer [VERBOSE] Retrieved 0 L1 to L2 messages for block 2 +0ms - aztec:sequencer-client:block_builder_light [VERBOSE] Starting new block numTxs=2 globalVariables=[object Object] l1ToL2Messages= +0ms - aztec:sequencer-client:block_builder_light [VERBOSE] Adding new tx to block txHash=18ecb1116941710b3b184c78ef6bbc0ad2fa3ab374c8be2597012b92b7394fd2 +13ms - aztec:sequencer-client:block_builder_light [VERBOSE] Setting block as completed and adding 1 padding txs +36ms - aztec:sequencer-client:block_builder_light [VERBOSE] Adding new tx to block txHash=0000000000000000000000000000000000000000000000000000000000000000 +1ms - aztec:sequencer-client:block_builder_light [VERBOSE] Finalising block +41ms - aztec:sequencer [VERBOSE] Assembled block 2 (txEffectsHash: 00d35199a7b27b9ef5ce510c850229c46abf2adc075269ed64690d77d66129aa) eventName=l2-block-built creator=0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266 duration=108.54652404785156 publicProcessDuration=43.66900098323822 rollupCircuitsDuration=105.03170502185822 txCount=1 blockNumber=2 blockTimestamp=1730721793 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=3 unencryptedLogSize=97248 +109ms - aztec:sequencer [VERBOSE] Collecting attestations +0ms - aztec:sequencer [VERBOSE] Attestations collected +1ms - aztec:sequencer [VERBOSE] Collecting proof quotes +0ms - aztec:sequencer [VERBOSE] Retrieved 0 quotes, slot: 5, epoch to prove: 0 +2ms - aztec:sequencer [VERBOSE] Failed to find any valid proof quotes +0ms - aztec:sequencer [VERBOSE] No proof quote available +0ms - aztec:sequencer:publisher [VERBOSE] Submitting propose transaction +5s - aztec:sequencer:publisher [INFO] Published L2 block to L1 rollup contract gasPrice=1090789956 gasUsed=1136949 transactionHash=0x3c183334867a66ccbf8d51ee2b264e41878eef1d8fd39854c54ec62ff053afc0 calldataGas=411384 calldataSize=98532 sender=0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266 txCount=1 blockNumber=2 blockTimestamp=1730721793 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=3 unencryptedLogSize=97248 eventName=rollup-published-to-l1 slotNumber=5 blockHash=0x2b02a99d07f1b16bbb991886965c0212dce143e768ea8612ecda7e08743d9881 +78ms - aztec:sequencer [INFO] Submitted rollup block 2 with 1 transactions duration=109ms (Submitter: 0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266) +84ms - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 20 and 20. +4s - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 20 and 20. +4s - aztec:archiver:block-helper [VERBOSE] Registering contract class 0x0f1cf77f11813ebba27b8b30eed1dad1afd77eeb44ff6be5002a15ce844a766b +0ms - aztec:archiver:block-helper [VERBOSE] Store contract instance at 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +1ms - aztec:archiver:block-helper [VERBOSE] Store contract instance at 0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8 +0ms - aztec:archiver:block-helper [VERBOSE] Registering contract class 0x0f1cf77f11813ebba27b8b30eed1dad1afd77eeb44ff6be5002a15ce844a766b +0ms - aztec:archiver:block-helper [VERBOSE] Store contract instance at 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:archiver:block-helper [VERBOSE] Store contract instance at 0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8 +1ms - aztec:archiver [VERBOSE] Processed 1 new L2 blocks up to 2 +60ms - aztec:archiver [VERBOSE] Processed 1 new L2 blocks up to 2 +59ms - aztec:cheat_codes:eth [VERBOSE] Mined 1 L1 blocks +5s - aztec:cheat_codes:eth [VERBOSE] Warped L1 timestamp to 1730721817 +0ms - aztec:utils:watcher [INFO] Slot 5 was filled, jumped to next slot +5s - aztec:world_state [VERBOSE] Handling new L2 blocks from 2 to 2 +5s - aztec:world_state [VERBOSE] Handled new L2 block eventName=l2-block-handled duration=11.513062953948975 unfinalisedBlockNumber=2 finalisedBlockNumber=0 oldestHistoricBlock=1 txCount=1 blockNumber=2 blockTimestamp=1730721793 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=3 unencryptedLogSize=97248 +12ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=2 worldStateHash=0x2b02a99d07f1b16bbb991886965c0212dce143e768ea8612ecda7e08743d9881 l2BlockSourceNumber=2 l2BlockSourceHash=0x2b02a99d07f1b16bbb991886965c0212dce143e768ea8612ecda7e08743d9881 p2pNumber=2 l1ToL2MessageSourceNumber=2 +1s - aztec:full_prover_test:full_prover [VERBOSE] Deploying TokenContract... +5s - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 21 and 21. +1s - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 21 and 21. +1s - aztec:archiver [VERBOSE] No blocks to retrieve from 21 to 21 +4ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=2 worldStateHash=0x2b02a99d07f1b16bbb991886965c0212dce143e768ea8612ecda7e08743d9881 l2BlockSourceNumber=2 l2BlockSourceHash=0x2b02a99d07f1b16bbb991886965c0212dce143e768ea8612ecda7e08743d9881 p2pNumber=2 l1ToL2MessageSourceNumber=2 +1s - aztec:archiver [VERBOSE] No blocks to retrieve from 21 to 21 +5ms - aztec:pxe_service [INFO] Added contract Token at 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c +2s - aztec:js:contract_interaction [INFO] Creating request for registering contract class 0x0cfc2c573815736bafb92d98afc3ec28d763d621235ca034fe25e5cdd012fe61 as part of deployment for 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c +0ms - aztec:simulator:private_execution [VERBOSE] Executing external function SchnorrAccount:entrypoint@0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:simulator:private_execution [VERBOSE] Executing external function ContractClassRegisterer:register@0x0000000000000000000000000000000000000000000000000000000000000003 +0ms - aztec:simulator:client_execution_context [VERBOSE] debug_log ContractClassRegistered: 0x0cfc2c573815736bafb92d98afc3ec28d763d621235ca034fe25e5cdd012fe61,0x10e7f93c0592f499b03de7838b183dc1df835312bd3ba9fa5058d5c450c08e26,0x2780317acaab64db6994ff1e8eb81f55ea762e550f23c0a6a25897ea98655aeb,0x3051f8fcd023b241ed42a6f0a2b42f80150d3da65e4fd2d1e82a28f5ff57ab99 +0ms - aztec:simulator:client_execution_context [VERBOSE] Emitted unencrypted log from ContractClassRegisterer: "UnencryptedL2Log(contractAddress: 0x0000000000000000000000000000000000000000000000000000000000000003..." +19ms - aztec:simulator:private_execution [VERBOSE] Executing external function ContractInstanceDeployer:deploy@0x0000000000000000000000000000000000000000000000000000000000000002 +0ms - aztec:simulator:client_execution_context [VERBOSE] debug_log ContractInstanceDeployed: 0x0000000085864497636cf755ae7bde03f267ce01a520981c21c3682aaf82a631,0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c,0x0000000000000000000000000000000000000000000000000000000000000001,0x1cb302369847cad4ed714021718e4e74d323f7b90c4d34f5b99a0c96dabce512,0x0cfc2c573815736bafb92d98afc3ec28d763d621235ca034fe25e5cdd012fe61,0x021b180c8d4ab9818f0f8f4aeb842e3ac311dc0b8d522028c933d8bbe1fe748b,0x01498945581e0eb9f8427ad6021184c700ef091d570892c437d12c7d90364bbd,0x170ae506787c5c43d6ca9255d571c10fa9ffa9d141666e290c347c5c9ab7e344,0x00c044b05b6ca83b9c2dbae79cc1135155956a64e136819136e9947fe5e5866c,0x1c1f0ca244c7cd46b682552bff8ae77dea40b966a71de076ec3b7678f2bdb151,0x1b00316144359e9a3ec8e49c1cdb7eeb0cedd190dfd9dc90eea5115aa779e287,0x080ffc74d7a8b0bccb88ac11f45874172f3847eb8b92654aaa58a3d2b8dc7833,0x019c111f36ad3fc1d9b7a7a14344314d2864b94f030594cd67f753ef774a1efb,0x2039907fe37f08d10739255141bb066c506a12f7d1e8dfec21abc58494705b6f,0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +151ms - aztec:simulator:client_execution_context [VERBOSE] Emitted unencrypted log: "UnencryptedL2Log(contractAddress: 0x0000000000000000000000000000000000000000000000000000000000000002..." +62ms - aztec:simulator:client_execution_context [VERBOSE] Created PublicExecutionRequest to public_dispatch@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c, of type [enqueued], side-effect counter [11] +184ms - aztec:pxe_service [VERBOSE] Simulation completed for 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78:entrypoint +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=2 worldStateHash=0x2b02a99d07f1b16bbb991886965c0212dce143e768ea8612ecda7e08743d9881 l2BlockSourceNumber=2 l2BlockSourceHash=0x2b02a99d07f1b16bbb991886965c0212dce143e768ea8612ecda7e08743d9881 p2pNumber=2 l1ToL2MessageSourceNumber=2 +2s - aztec:node [INFO] Simulating tx 0e8c4aa9fdd145482da5308d7016f4ed47d06a6f2183d9326cbbb6a6c17ea293 +5s - aztec:sequencer [VERBOSE] Processing tx 0e8c4aa9fdd145482da5308d7016f4ed47d06a6f2183d9326cbbb6a6c17ea293 +0ms - aztec:simulator:public_executor [VERBOSE] [AVM] Executing public external function Token:constructor (via dispatch)@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c with 12000000 allocated L2 gas. +0ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:constructor (via dispatch) returned, reverted: false. eventName=avm-simulation appCircuitName=Token:constructor (via dispatch) duration=365.93981993198395 bytecodeSize=37708 +367ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:constructor (via dispatch) simulation complete. Reverted=false. Consumed 577335 L2 gas, ending with 11422665 L2 gas left. +2ms - aztec:pxe_service [INFO] Executed local simulation for 0e8c4aa9fdd145482da5308d7016f4ed47d06a6f2183d9326cbbb6a6c17ea293 +2s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=2 worldStateHash=0x2b02a99d07f1b16bbb991886965c0212dce143e768ea8612ecda7e08743d9881 l2BlockSourceNumber=2 l2BlockSourceHash=0x2b02a99d07f1b16bbb991886965c0212dce143e768ea8612ecda7e08743d9881 p2pNumber=2 l1ToL2MessageSourceNumber=2 +2s - aztec:pxe_service [INFO] Sending transaction 0e8c4aa9fdd145482da5308d7016f4ed47d06a6f2183d9326cbbb6a6c17ea293 +765ms - aztec:node [INFO] Received tx 0e8c4aa9fdd145482da5308d7016f4ed47d06a6f2183d9326cbbb6a6c17ea293 +2s - aztec:tx_pool [INFO] Adding tx with id 0e8c4aa9fdd145482da5308d7016f4ed47d06a6f2183d9326cbbb6a6c17ea293 eventName=tx-added-to-pool txHash=0e8c4aa9fdd145482da5308d7016f4ed47d06a6f2183d9326cbbb6a6c17ea293 noteEncryptedLogCount=0 encryptedLogCount=0 unencryptedLogCount=2 noteEncryptedLogSize=8 encryptedLogSize=8 unencryptedLogSize=96728 newCommitmentCount=0 newNullifierCount=3 proofSize=0 size=167566 feePaymentMethod=none classRegisteredCount=1 +7s - aztec:pxe_service [INFO] Sent transaction 0e8c4aa9fdd145482da5308d7016f4ed47d06a6f2183d9326cbbb6a6c17ea293 +25ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=2 worldStateHash=0x2b02a99d07f1b16bbb991886965c0212dce143e768ea8612ecda7e08743d9881 l2BlockSourceNumber=2 l2BlockSourceHash=0x2b02a99d07f1b16bbb991886965c0212dce143e768ea8612ecda7e08743d9881 p2pNumber=2 l1ToL2MessageSourceNumber=2 +1s - aztec:sequencer [INFO] Building blockNumber=3 txCount=1 slotNumber=6 +171ms - aztec:sequencer [VERBOSE] Retrieved 0 L1 to L2 messages for block 3 +0ms - aztec:sequencer-client:block_builder_light [VERBOSE] Starting new block numTxs=2 globalVariables=[object Object] l1ToL2Messages= +0ms - aztec:sequencer [VERBOSE] Processing tx 0e8c4aa9fdd145482da5308d7016f4ed47d06a6f2183d9326cbbb6a6c17ea293 +0ms - aztec:simulator:public_executor [VERBOSE] [AVM] Executing public external function Token:constructor (via dispatch)@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c with 12000000 allocated L2 gas. +3s - aztec:simulator:public_executor [VERBOSE] [AVM] Token:constructor (via dispatch) returned, reverted: false. eventName=avm-simulation appCircuitName=Token:constructor (via dispatch) duration=308.17090797424316 bytecodeSize=37708 +308ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:constructor (via dispatch) simulation complete. Reverted=false. Consumed 577335 L2 gas, ending with 11422665 L2 gas left. +2ms - aztec:sequencer-client:block_builder_light [VERBOSE] Adding new tx to block txHash=0e8c4aa9fdd145482da5308d7016f4ed47d06a6f2183d9326cbbb6a6c17ea293 +2s - aztec:sequencer-client:block_builder_light [VERBOSE] Setting block as completed and adding 1 padding txs +35ms - aztec:sequencer-client:block_builder_light [VERBOSE] Adding new tx to block txHash=0000000000000000000000000000000000000000000000000000000000000000 +1ms - aztec:sequencer-client:block_builder_light [VERBOSE] Finalising block +33ms - aztec:sequencer [VERBOSE] Assembled block 3 (txEffectsHash: 00f2826a9a0d64c5c4bb921a9e4c9e800127204ce8bec09d90080890b39b61a1) eventName=l2-block-built creator=0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266 duration=1654.1064660549164 publicProcessDuration=1598.6484649181366 rollupCircuitsDuration=1651.166855931282 txCount=1 blockNumber=3 blockTimestamp=1730721817 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=2 unencryptedLogSize=96732 +2s - aztec:sequencer [VERBOSE] Collecting attestations +1ms - aztec:sequencer [VERBOSE] Attestations collected +1ms - aztec:sequencer [VERBOSE] Collecting proof quotes +0ms - aztec:sequencer [VERBOSE] Retrieved 0 quotes, slot: 6, epoch to prove: 0 +1ms - aztec:sequencer [VERBOSE] Failed to find any valid proof quotes +0ms - aztec:sequencer [VERBOSE] No proof quote available +0ms - aztec:sequencer:publisher [VERBOSE] Submitting propose transaction +10s - aztec:sequencer:publisher [INFO] Published L2 block to L1 rollup contract gasPrice=1070263732 gasUsed=1501893 transactionHash=0x4be02bb3ec515e6b887f4d3fa704a0dee1ab1c6a4933894c84b911ed75c26acd calldataGas=780648 calldataSize=98532 sender=0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266 txCount=1 blockNumber=3 blockTimestamp=1730721817 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=2 unencryptedLogSize=96732 eventName=rollup-published-to-l1 slotNumber=6 blockHash=0x2d553a49bc6a58b175776c7117ee3a8aeb005375b35b2c098b14aab65838e70e +69ms - aztec:sequencer [INFO] Submitted rollup block 3 with 1 transactions duration=1655ms (Submitter: 0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266) +75ms - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 22 and 22. +8s - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 22 and 22. +8s - aztec:archiver:block-helper [VERBOSE] Registering contract class 0x0cfc2c573815736bafb92d98afc3ec28d763d621235ca034fe25e5cdd012fe61 +10s - aztec:archiver:block-helper [VERBOSE] Store contract instance at 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c +0ms - aztec:cheat_codes:eth [VERBOSE] Mined 1 L1 blocks +10s - aztec:cheat_codes:eth [VERBOSE] Warped L1 timestamp to 1730721841 +0ms - aztec:utils:watcher [INFO] Slot 6 was filled, jumped to next slot +10s - aztec:archiver:block-helper [VERBOSE] Registering contract class 0x0cfc2c573815736bafb92d98afc3ec28d763d621235ca034fe25e5cdd012fe61 +10s - aztec:archiver:block-helper [VERBOSE] Store contract instance at 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c +0ms - aztec:archiver [VERBOSE] Processed 1 new L2 blocks up to 3 +287ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=2 worldStateHash=0x2b02a99d07f1b16bbb991886965c0212dce143e768ea8612ecda7e08743d9881 l2BlockSourceNumber=2 l2BlockSourceHash=0x2b02a99d07f1b16bbb991886965c0212dce143e768ea8612ecda7e08743d9881 p2pNumber=2 l1ToL2MessageSourceNumber=2 +1s - aztec:archiver [VERBOSE] Processed 1 new L2 blocks up to 3 +300ms - aztec:sequencer [VERBOSE] Rejected from being able to propose at next block with 1026f26937952b6a1acba65067a0ed2d7af776fd9535955918694d76d5647a88: Rollup__InvalidArchive(0x116b11e13de15b73f9a9fa701a199a3ac5bb0d5f52bec7c8381c25d346e023e3, 0x1026f26937952b6a1acba65067a0ed2d7af776fd9535955918694d76d5647a88) +13ms - aztec:world_state [VERBOSE] Handling new L2 blocks from 3 to 3 +10s - aztec:world_state [VERBOSE] Handled new L2 block eventName=l2-block-handled duration=11.268115997314453 unfinalisedBlockNumber=3 finalisedBlockNumber=0 oldestHistoricBlock=1 txCount=1 blockNumber=3 blockTimestamp=1730721817 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=2 unencryptedLogSize=96732 +12ms - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 23 and 23. +1s - aztec:archiver [VERBOSE] No blocks to retrieve from 23 to 23 +3ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=3 worldStateHash=0x2d553a49bc6a58b175776c7117ee3a8aeb005375b35b2c098b14aab65838e70e l2BlockSourceNumber=3 l2BlockSourceHash=0x2d553a49bc6a58b175776c7117ee3a8aeb005375b35b2c098b14aab65838e70e p2pNumber=3 l1ToL2MessageSourceNumber=3 +1s - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 23 and 23. +1s - aztec:archiver [VERBOSE] No blocks to retrieve from 23 to 23 +5ms - aztec:js:deploy_sent_tx [INFO] Contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c successfully deployed. +0ms - aztec:full_prover_test:full_prover [VERBOSE] Token deployed to 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c +10s - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] State transition for client_prover_integration complete. +15s - aztec:full_prover_test:full_prover [VERBOSE] Token contract address: 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c +5ms - aztec:simulator:private_execution [VERBOSE] Executing external function SchnorrAccount:entrypoint@0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:simulator:client_execution_context [VERBOSE] Created PublicExecutionRequest to public_dispatch@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c, of type [enqueued], side-effect counter [3] +0ms - aztec:pxe_service [VERBOSE] Simulation completed for 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78:entrypoint +5s - aztec:node [INFO] Simulating tx 0df03c3fadf4b7bfbe0641c3cd3e631d718d23c9dcd28f6d13b451159605afd3 +6s - aztec:sequencer [VERBOSE] Processing tx 0df03c3fadf4b7bfbe0641c3cd3e631d718d23c9dcd28f6d13b451159605afd3 +0ms - aztec:simulator:public_executor [VERBOSE] [AVM] Executing public external function Token:get_admin (via dispatch)@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c with 12000000 allocated L2 gas. +4s - aztec:simulator:public_executor [VERBOSE] [AVM] Token:get_admin (via dispatch) returned, reverted: false. eventName=avm-simulation appCircuitName=Token:get_admin (via dispatch) duration=138.90042388439178 bytecodeSize=37708 +139ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:get_admin (via dispatch) simulation complete. Reverted=false. Consumed 7658 L2 gas, ending with 11992342 L2 gas left. +0ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=3 worldStateHash=0x2d553a49bc6a58b175776c7117ee3a8aeb005375b35b2c098b14aab65838e70e l2BlockSourceNumber=3 l2BlockSourceHash=0x2d553a49bc6a58b175776c7117ee3a8aeb005375b35b2c098b14aab65838e70e p2pNumber=3 l1ToL2MessageSourceNumber=3 +1s - aztec:pxe_service [INFO] Executed local simulation for 0df03c3fadf4b7bfbe0641c3cd3e631d718d23c9dcd28f6d13b451159605afd3 +2s - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] Applying state transition for mint... +2s - aztec:full_prover_test:full_prover [VERBOSE] Minting 20000 publicly... +2s - aztec:simulator:private_execution [VERBOSE] Executing external function SchnorrAccount:entrypoint@0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:simulator:client_execution_context [VERBOSE] Created PublicExecutionRequest to public_dispatch@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c, of type [enqueued], side-effect counter [3] +0ms - aztec:pxe_service [VERBOSE] Simulation completed for 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78:entrypoint +153ms - aztec:node [INFO] Simulating tx 22aa5e4893398170c0876245287b81bf4dc0689f2498f9404652776180feb59a +2s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=3 worldStateHash=0x2d553a49bc6a58b175776c7117ee3a8aeb005375b35b2c098b14aab65838e70e l2BlockSourceNumber=3 l2BlockSourceHash=0x2d553a49bc6a58b175776c7117ee3a8aeb005375b35b2c098b14aab65838e70e p2pNumber=3 l1ToL2MessageSourceNumber=3 +1s - aztec:sequencer [VERBOSE] Processing tx 22aa5e4893398170c0876245287b81bf4dc0689f2498f9404652776180feb59a +0ms - aztec:simulator:public_executor [VERBOSE] [AVM] Executing public external function Token:mint_public (via dispatch)@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c with 12000000 allocated L2 gas. +2s - aztec:simulator:public_executor [VERBOSE] [AVM] Token:mint_public (via dispatch) returned, reverted: false. eventName=avm-simulation appCircuitName=Token:mint_public (via dispatch) duration=161.70831406116486 bytecodeSize=37708 +162ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:mint_public (via dispatch) simulation complete. Reverted=false. Consumed 79633 L2 gas, ending with 11920367 L2 gas left. +0ms - aztec:pxe_service [INFO] Executed local simulation for 22aa5e4893398170c0876245287b81bf4dc0689f2498f9404652776180feb59a +2s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=3 worldStateHash=0x2d553a49bc6a58b175776c7117ee3a8aeb005375b35b2c098b14aab65838e70e l2BlockSourceNumber=3 l2BlockSourceHash=0x2d553a49bc6a58b175776c7117ee3a8aeb005375b35b2c098b14aab65838e70e p2pNumber=3 l1ToL2MessageSourceNumber=3 +1s - aztec:pxe_service [INFO] Sending transaction 22aa5e4893398170c0876245287b81bf4dc0689f2498f9404652776180feb59a +489ms - aztec:node [INFO] Received tx 22aa5e4893398170c0876245287b81bf4dc0689f2498f9404652776180feb59a +2s - aztec:tx_pool [INFO] Adding tx with id 22aa5e4893398170c0876245287b81bf4dc0689f2498f9404652776180feb59a eventName=tx-added-to-pool txHash=22aa5e4893398170c0876245287b81bf4dc0689f2498f9404652776180feb59a noteEncryptedLogCount=0 encryptedLogCount=0 unencryptedLogCount=0 noteEncryptedLogSize=8 encryptedLogSize=8 unencryptedLogSize=8 newCommitmentCount=0 newNullifierCount=1 proofSize=0 size=68862 feePaymentMethod=none classRegisteredCount=0 +9s - aztec:pxe_service [INFO] Sent transaction 22aa5e4893398170c0876245287b81bf4dc0689f2498f9404652776180feb59a +23ms - aztec:sequencer [INFO] Building blockNumber=4 txCount=1 slotNumber=7 +448ms - aztec:sequencer [VERBOSE] Retrieved 0 L1 to L2 messages for block 4 +0ms - aztec:sequencer-client:block_builder_light [VERBOSE] Starting new block numTxs=2 globalVariables=[object Object] l1ToL2Messages= +0ms - aztec:sequencer [VERBOSE] Processing tx 22aa5e4893398170c0876245287b81bf4dc0689f2498f9404652776180feb59a +0ms - aztec:simulator:public_executor [VERBOSE] [AVM] Executing public external function Token:mint_public (via dispatch)@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c with 12000000 allocated L2 gas. +2s - aztec:simulator:public_executor [VERBOSE] [AVM] Token:mint_public (via dispatch) returned, reverted: false. eventName=avm-simulation appCircuitName=Token:mint_public (via dispatch) duration=149.76076197624207 bytecodeSize=37708 +150ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:mint_public (via dispatch) simulation complete. Reverted=false. Consumed 79633 L2 gas, ending with 11920367 L2 gas left. +1ms - aztec:sequencer-client:block_builder_light [VERBOSE] Adding new tx to block txHash=22aa5e4893398170c0876245287b81bf4dc0689f2498f9404652776180feb59a +1s - aztec:sequencer-client:block_builder_light [VERBOSE] Setting block as completed and adding 1 padding txs +39ms - aztec:sequencer-client:block_builder_light [VERBOSE] Adding new tx to block txHash=0000000000000000000000000000000000000000000000000000000000000000 +0ms - aztec:sequencer-client:block_builder_light [VERBOSE] Finalising block +30ms - aztec:sequencer [VERBOSE] Assembled block 4 (txEffectsHash: 0026d4ace6620451799dedcbec8cdc0026c39429716452e30f025391098e7a89) eventName=l2-block-built creator=0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266 duration=1273.3109329938889 publicProcessDuration=1229.1223089694977 rollupCircuitsDuration=1270.4626879692078 txCount=1 blockNumber=4 blockTimestamp=1730721841 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=0 unencryptedLogSize=12 +1s - aztec:sequencer [VERBOSE] Collecting attestations +0ms - aztec:sequencer [VERBOSE] Attestations collected +2ms - aztec:sequencer [VERBOSE] Collecting proof quotes +0ms - aztec:sequencer [VERBOSE] Retrieved 0 quotes, slot: 7, epoch to prove: 0 +1ms - aztec:sequencer [VERBOSE] Failed to find any valid proof quotes +0ms - aztec:sequencer [VERBOSE] No proof quote available +0ms - aztec:sequencer:publisher [VERBOSE] Submitting propose transaction +8s - aztec:sequencer:publisher [INFO] Published L2 block to L1 rollup contract gasPrice=1054565150 gasUsed=337297 transactionHash=0xaa1a9bb43740c5ef61022cb1dbfda25b5d4be19cfb8131d045e4aafb2d66ec88 calldataGas=11368 calldataSize=1348 sender=0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266 txCount=1 blockNumber=4 blockTimestamp=1730721841 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=0 unencryptedLogSize=12 eventName=rollup-published-to-l1 slotNumber=7 blockHash=0x1f4fb60e8032968586c9f30830db1847ef8ddc9836764b0add48c289ec20db32 +128ms - aztec:sequencer [INFO] Submitted rollup block 4 with 1 transactions duration=1274ms (Submitter: 0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266) +134ms - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 24 and 24. +6s - aztec:archiver [VERBOSE] Processed 1 new L2 blocks up to 4 +9ms - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 24 and 24. +6s - aztec:archiver [VERBOSE] Processed 1 new L2 blocks up to 4 +11ms - aztec:world_state [VERBOSE] Handling new L2 blocks from 4 to 4 +7s - aztec:cheat_codes:eth [VERBOSE] Mined 1 L1 blocks +7s - aztec:cheat_codes:eth [VERBOSE] Warped L1 timestamp to 1730721865 +0ms - aztec:utils:watcher [INFO] Slot 7 was filled, jumped to next slot +7s - aztec:world_state [VERBOSE] Handled new L2 block eventName=l2-block-handled duration=11.00186800956726 unfinalisedBlockNumber=4 finalisedBlockNumber=0 oldestHistoricBlock=1 txCount=1 blockNumber=4 blockTimestamp=1730721841 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=0 unencryptedLogSize=12 +11ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=4 worldStateHash=0x1f4fb60e8032968586c9f30830db1847ef8ddc9836764b0add48c289ec20db32 l2BlockSourceNumber=4 l2BlockSourceHash=0x1f4fb60e8032968586c9f30830db1847ef8ddc9836764b0add48c289ec20db32 p2pNumber=4 l1ToL2MessageSourceNumber=4 +1s - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 25 and 25. +1s - aztec:archiver [VERBOSE] No blocks to retrieve from 25 to 25 +5ms - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 25 and 25. +1s - aztec:archiver [VERBOSE] No blocks to retrieve from 25 to 25 +2ms - aztec:full_prover_test:full_prover [VERBOSE] Transferring 10000 to private... +5s - aztec:simulator:private_execution [VERBOSE] Executing external function SchnorrAccount:entrypoint@0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:simulator:private_execution [VERBOSE] Executing external function Token:transfer_to_private@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c +0ms - aztec:simulator:client_execution_context [VERBOSE] Created PublicExecutionRequest to public_dispatch@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c, of type [enqueued], side-effect counter [5] +0ms - aztec:simulator:client_execution_context [VERBOSE] Created PublicExecutionRequest to public_dispatch@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c, of type [enqueued], side-effect counter [6] +20ms - aztec:pxe_service [VERBOSE] Simulation completed for 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78:entrypoint +3s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=4 worldStateHash=0x1f4fb60e8032968586c9f30830db1847ef8ddc9836764b0add48c289ec20db32 l2BlockSourceNumber=4 l2BlockSourceHash=0x1f4fb60e8032968586c9f30830db1847ef8ddc9836764b0add48c289ec20db32 p2pNumber=4 l1ToL2MessageSourceNumber=4 +1s - aztec:node [INFO] Simulating tx 010045d2dc98ffea582fd78a3a51832e250ffd48f79a26b7a5b60beeb75eba04 +4s - aztec:sequencer [VERBOSE] Processing tx 010045d2dc98ffea582fd78a3a51832e250ffd48f79a26b7a5b60beeb75eba04 +0ms - aztec:simulator:public_executor [VERBOSE] [AVM] Executing public external function Token:_store_payload_in_transient_storage_unsafe (via dispatch)@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c with 12000000 allocated L2 gas. +4s - aztec:simulator:public_executor [VERBOSE] [AVM] Token:_store_payload_in_transient_storage_unsafe (via dispatch) returned, reverted: false. eventName=avm-simulation appCircuitName=Token:_store_payload_in_transient_storage_unsafe (via dispatch) duration=166.0742290019989 bytecodeSize=37708 +166ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:_store_payload_in_transient_storage_unsafe (via dispatch) simulation complete. Reverted=false. Consumed 183638 L2 gas, ending with 11816362 L2 gas left. +1ms - aztec:simulator:public_executor [VERBOSE] [AVM] Executing public external function Token:_finalize_transfer_to_private_unsafe (via dispatch)@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c with 12000000 allocated L2 gas. +331ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=4 worldStateHash=0x1f4fb60e8032968586c9f30830db1847ef8ddc9836764b0add48c289ec20db32 l2BlockSourceNumber=4 l2BlockSourceHash=0x1f4fb60e8032968586c9f30830db1847ef8ddc9836764b0add48c289ec20db32 p2pNumber=4 l1ToL2MessageSourceNumber=4 +1s - aztec:simulator:public_executor [VERBOSE] [AVM] Token:_finalize_transfer_to_private_unsafe (via dispatch) returned, reverted: false. eventName=avm-simulation appCircuitName=Token:_finalize_transfer_to_private_unsafe (via dispatch) duration=749.2268440723419 bytecodeSize=37708 +749ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:_finalize_transfer_to_private_unsafe (via dispatch) simulation complete. Reverted=false. Consumed 2899731 L2 gas, ending with 9100269 L2 gas left. +1ms - aztec:pxe_service [INFO] Executed local simulation for 010045d2dc98ffea582fd78a3a51832e250ffd48f79a26b7a5b60beeb75eba04 +3s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=4 worldStateHash=0x1f4fb60e8032968586c9f30830db1847ef8ddc9836764b0add48c289ec20db32 l2BlockSourceNumber=4 l2BlockSourceHash=0x1f4fb60e8032968586c9f30830db1847ef8ddc9836764b0add48c289ec20db32 p2pNumber=4 l1ToL2MessageSourceNumber=4 +2s - aztec:pxe_service [INFO] Sending transaction 010045d2dc98ffea582fd78a3a51832e250ffd48f79a26b7a5b60beeb75eba04 +743ms - aztec:node [INFO] Received tx 010045d2dc98ffea582fd78a3a51832e250ffd48f79a26b7a5b60beeb75eba04 +3s - aztec:tx_pool [INFO] Adding tx with id 010045d2dc98ffea582fd78a3a51832e250ffd48f79a26b7a5b60beeb75eba04 eventName=tx-added-to-pool txHash=010045d2dc98ffea582fd78a3a51832e250ffd48f79a26b7a5b60beeb75eba04 noteEncryptedLogCount=0 encryptedLogCount=0 unencryptedLogCount=0 noteEncryptedLogSize=8 encryptedLogSize=8 unencryptedLogSize=8 newCommitmentCount=0 newNullifierCount=1 proofSize=0 size=69607 feePaymentMethod=none classRegisteredCount=0 +7s - aztec:pxe_service [INFO] Sent transaction 010045d2dc98ffea582fd78a3a51832e250ffd48f79a26b7a5b60beeb75eba04 +21ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=4 worldStateHash=0x1f4fb60e8032968586c9f30830db1847ef8ddc9836764b0add48c289ec20db32 l2BlockSourceNumber=4 l2BlockSourceHash=0x1f4fb60e8032968586c9f30830db1847ef8ddc9836764b0add48c289ec20db32 p2pNumber=4 l1ToL2MessageSourceNumber=4 +1s - aztec:sequencer [INFO] Building blockNumber=5 txCount=1 slotNumber=8 +15ms - aztec:sequencer [VERBOSE] Retrieved 0 L1 to L2 messages for block 5 +0ms - aztec:sequencer-client:block_builder_light [VERBOSE] Starting new block numTxs=2 globalVariables=[object Object] l1ToL2Messages= +0ms - aztec:sequencer [VERBOSE] Processing tx 010045d2dc98ffea582fd78a3a51832e250ffd48f79a26b7a5b60beeb75eba04 +0ms - aztec:simulator:public_executor [VERBOSE] [AVM] Executing public external function Token:_store_payload_in_transient_storage_unsafe (via dispatch)@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c with 12000000 allocated L2 gas. +3s - aztec:simulator:public_executor [VERBOSE] [AVM] Token:_store_payload_in_transient_storage_unsafe (via dispatch) returned, reverted: false. eventName=avm-simulation appCircuitName=Token:_store_payload_in_transient_storage_unsafe (via dispatch) duration=177.79542303085327 bytecodeSize=37708 +178ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:_store_payload_in_transient_storage_unsafe (via dispatch) simulation complete. Reverted=false. Consumed 183638 L2 gas, ending with 11816362 L2 gas left. +1ms - aztec:simulator:public_executor [VERBOSE] [AVM] Executing public external function Token:_finalize_transfer_to_private_unsafe (via dispatch)@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c with 12000000 allocated L2 gas. +450ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:_finalize_transfer_to_private_unsafe (via dispatch) returned, reverted: false. eventName=avm-simulation appCircuitName=Token:_finalize_transfer_to_private_unsafe (via dispatch) duration=690.0573049783707 bytecodeSize=37708 +690ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:_finalize_transfer_to_private_unsafe (via dispatch) simulation complete. Reverted=false. Consumed 2899731 L2 gas, ending with 9100269 L2 gas left. +1ms - aztec:sequencer-client:block_builder_light [VERBOSE] Adding new tx to block txHash=010045d2dc98ffea582fd78a3a51832e250ffd48f79a26b7a5b60beeb75eba04 +2s - aztec:sequencer-client:block_builder_light [VERBOSE] Setting block as completed and adding 1 padding txs +41ms - aztec:sequencer-client:block_builder_light [VERBOSE] Adding new tx to block txHash=0000000000000000000000000000000000000000000000000000000000000000 +1ms - aztec:sequencer-client:block_builder_light [VERBOSE] Finalising block +31ms - aztec:sequencer [VERBOSE] Assembled block 5 (txEffectsHash: 00912532fb9bca2dd8e3b48cc62e7f1ab96ceadd16878d53fb6f88e9b2b804e7) eventName=l2-block-built creator=0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266 duration=2504.0955830812454 publicProcessDuration=2455.7526779174805 rollupCircuitsDuration=2501.2212669849396 txCount=1 blockNumber=5 blockTimestamp=1730721865 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=1 unencryptedLogSize=16472 +3s - aztec:sequencer [VERBOSE] Collecting attestations +0ms - aztec:sequencer [VERBOSE] Attestations collected +2ms - aztec:sequencer [VERBOSE] Collecting proof quotes +0ms - aztec:sequencer [VERBOSE] Retrieved 0 quotes, slot: 8, epoch to prove: 0 +2ms - aztec:sequencer [VERBOSE] Failed to find any valid proof quotes +0ms - aztec:sequencer [VERBOSE] No proof quote available +0ms - aztec:sequencer:publisher [VERBOSE] Submitting propose transaction +9s - aztec:sequencer:publisher [INFO] Published L2 block to L1 rollup contract gasPrice=1041910645 gasUsed=485371 transactionHash=0x67904abf060547749ec53f3c139384a93c34cd175e40b0a5213f9f0d66d5ffcc calldataGas=99748 calldataSize=18916 sender=0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266 txCount=1 blockNumber=5 blockTimestamp=1730721865 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=1 unencryptedLogSize=16472 eventName=rollup-published-to-l1 slotNumber=8 blockHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a +135ms - aztec:sequencer [INFO] Submitted rollup block 5 with 1 transactions duration=2505ms (Submitter: 0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266) +140ms - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 26 and 26. +9s - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 26 and 26. +9s - aztec:archiver [VERBOSE] Processed 1 new L2 blocks up to 5 +22ms - aztec:cheat_codes:eth [VERBOSE] Mined 1 L1 blocks +10s - aztec:cheat_codes:eth [VERBOSE] Warped L1 timestamp to 1730721889 +0ms - aztec:utils:watcher [INFO] Slot 8 was filled, jumped to next slot +10s - aztec:archiver [VERBOSE] Processed 1 new L2 blocks up to 5 +22ms - aztec:world_state [VERBOSE] Handling new L2 blocks from 5 to 5 +10s - aztec:world_state [VERBOSE] Handled new L2 block eventName=l2-block-handled duration=12.738456010818481 unfinalisedBlockNumber=5 finalisedBlockNumber=0 oldestHistoricBlock=1 txCount=1 blockNumber=5 blockTimestamp=1730721865 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=1 unencryptedLogSize=16472 +13ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:note_processor [VERBOSE] Added incoming note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 with nullifier 0x07a74314953ab8349f3f6a16f51454ff87e37aca412b144d568e5175fcd20250 +31s - aztec:note_processor [VERBOSE] Added outgoing note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 +0ms - aztec:full_prover_test:full_prover [VERBOSE] Minting complete. +9s - aztec:snapshot_manager:full_prover_integration/full_prover [VERBOSE] State transition for mint complete. +15s - aztec:simulator:private_execution [VERBOSE] Executing external function SchnorrAccount:entrypoint@0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:simulator:client_execution_context [VERBOSE] Created PublicExecutionRequest to public_dispatch@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c, of type [enqueued], side-effect counter [3] +0ms - aztec:pxe_service [VERBOSE] Simulation completed for 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78:entrypoint +5s - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 27 and 27. +1s - aztec:archiver [VERBOSE] No blocks to retrieve from 27 to 27 +3ms - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 27 and 27. +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:archiver [VERBOSE] No blocks to retrieve from 27 to 27 +4ms - aztec:node [INFO] Simulating tx 1b77b739a22f65f7e80b29c2d32164ed5a8822c013c29a6a560929d0bff09c3d +6s - aztec:sequencer [VERBOSE] Processing tx 1b77b739a22f65f7e80b29c2d32164ed5a8822c013c29a6a560929d0bff09c3d +0ms - aztec:simulator:public_executor [VERBOSE] [AVM] Executing public external function Token:balance_of_public (via dispatch)@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c with 12000000 allocated L2 gas. +4s - aztec:simulator:public_executor [VERBOSE] [AVM] Token:balance_of_public (via dispatch) returned, reverted: false. eventName=avm-simulation appCircuitName=Token:balance_of_public (via dispatch) duration=140.60849404335022 bytecodeSize=37708 +141ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:balance_of_public (via dispatch) simulation complete. Reverted=false. Consumed 27282 L2 gas, ending with 11972718 L2 gas left. +0ms - aztec:pxe_service [INFO] Executed local simulation for 1b77b739a22f65f7e80b29c2d32164ed5a8822c013c29a6a560929d0bff09c3d +2s - aztec:full_prover_test:full_prover [VERBOSE] Public balance of wallet 0: 10000 +2s - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x4375727c(balance_of_private) +0ms - aztec:pxe_service [VERBOSE] Unconstrained simulation for 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c.balance_of_private completed +29ms - aztec:full_prover_test:full_prover [VERBOSE] Private balance of wallet 0: 10000 +29ms - aztec:simulator:private_execution [VERBOSE] Executing external function SchnorrAccount:entrypoint@0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:simulator:client_execution_context [VERBOSE] Created PublicExecutionRequest to public_dispatch@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c, of type [enqueued], side-effect counter [3] +0ms - aztec:pxe_service [VERBOSE] Simulation completed for 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78:entrypoint +155ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +2s - aztec:node [INFO] Simulating tx 0c5f5b1fa46d847a0cde11d21e85a89fd335742799e3f5da3f7088b306de92a5 +2s - aztec:sequencer [VERBOSE] Processing tx 0c5f5b1fa46d847a0cde11d21e85a89fd335742799e3f5da3f7088b306de92a5 +0ms - aztec:simulator:public_executor [VERBOSE] [AVM] Executing public external function Token:total_supply (via dispatch)@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c with 12000000 allocated L2 gas. +2s - aztec:simulator:public_executor [VERBOSE] [AVM] Token:total_supply (via dispatch) returned, reverted: false. eventName=avm-simulation appCircuitName=Token:total_supply (via dispatch) duration=138.82356190681458 bytecodeSize=37708 +139ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:total_supply (via dispatch) simulation complete. Reverted=false. Consumed 8579 L2 gas, ending with 11991421 L2 gas left. +0ms - aztec:pxe_service [INFO] Executed local simulation for 0c5f5b1fa46d847a0cde11d21e85a89fd335742799e3f5da3f7088b306de92a5 +2s - aztec:full_prover_test:full_prover [VERBOSE] Total supply: 20000 +2s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +2s - aztec:full_prover_test:full_prover [VERBOSE] Using native ACVM binary at ../../noir/noir-repo/target/release/acvm with working directory /tmp/bb0f0000/acvm +3ms - aztec:full_prover_test:full_prover [VERBOSE] Move to a clean epoch +2ms - aztec:cheat_codes:eth [VERBOSE] Mined 1 L1 blocks +0ms - aztec:cheat_codes:eth [VERBOSE] Warped L1 timestamp to 1730722081 +0ms - aztec:js:cheat_codes [VERBOSE] Advanced to next epoch +0ms - aztec:full_prover_test:full_prover [VERBOSE] Marking current block as proven +8ms - aztec:cheat_codes:eth [VERBOSE] Impersonating 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266 +3ms - aztec:js:cheat_codes [VERBOSE] Marked 5 as proven +6ms - aztec:cheat_codes:eth [VERBOSE] Stopped impersonating 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266 +4ms - aztec:full_prover_test:full_prover [VERBOSE] Main setup completed, initializing full prover PXE, Node, and Prover Node +7ms - aztec:pxe:keystore:lmdb [INFO] Creating pxe_key_store ephemeral data store +0ms - aztec:pxe:data:lmdb [INFO] Creating pxe_data ephemeral data store +0ms - aztec:pxe_synchronizer_bc0f00 [INFO] Initial sync complete +0ms - aztec:pxe_service_bc0f00 [INFO] Added protocol contract AuthRegistry at 0x0000000000000000000000000000000000000000000000000000000000000001 +0ms - aztec:pxe_service_bc0f00 [INFO] Added protocol contract ContractInstanceDeployer at 0x0000000000000000000000000000000000000000000000000000000000000002 +16ms - aztec:pxe_service_bc0f00 [INFO] Added protocol contract ContractClassRegisterer at 0x0000000000000000000000000000000000000000000000000000000000000003 +26ms - aztec:prover-node:claims-monitor [VERBOSE] Found new claim for epoch 0 by 0x0000000000000000000000000000000000000000 +41s - aztec:pxe_service_bc0f00 [INFO] Added protocol contract MultiCallEntrypoint at 0x0000000000000000000000000000000000000000000000000000000000000004 +13ms - aztec:pxe_service_bc0f00 [INFO] Added protocol contract FeeJuice at 0x0000000000000000000000000000000000000000000000000000000000000005 +56ms - aztec:pxe_service_bc0f00 [INFO] Added protocol contract Router at 0x0000000000000000000000000000000000000000000000000000000000000006 +36ms - aztec:pxe_service_bc0f00 [INFO] Started PXE connected to chain 31337 version 1 +0ms - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 28 and 29. +4s - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 28 and 29. +4s - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +4ms - aztec:archiver [VERBOSE] No blocks to retrieve from 28 to 29 +1ms - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +4ms - aztec:archiver [VERBOSE] No blocks to retrieve from 28 to 29 +1ms - aztec:pxe_service_bc0f00 [INFO] Added contract Token at 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c +300ms - aztec:world_state [VERBOSE] Handling new L2 blocks from 1 to 5 +42s - aztec:world_state [VERBOSE] Handled new L2 block eventName=l2-block-handled duration=11.817592978477478 unfinalisedBlockNumber=1 finalisedBlockNumber=0 oldestHistoricBlock=1 txCount=2 blockNumber=1 blockTimestamp=1730721769 noteEncryptedLogLength=986 noteEncryptedLogCount=2 encryptedLogLength=16 encryptedLogCount=0 unencryptedLogCount=0 unencryptedLogSize=16 +12ms - aztec:world_state [VERBOSE] Handled new L2 block eventName=l2-block-handled duration=11.539334058761597 unfinalisedBlockNumber=2 finalisedBlockNumber=0 oldestHistoricBlock=1 txCount=1 blockNumber=2 blockTimestamp=1730721793 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=3 unencryptedLogSize=97248 +12ms - aztec:pxe_service_bc0f00 [INFO] Registered account 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +45ms - aztec:world_state [VERBOSE] Handled new L2 block eventName=l2-block-handled duration=11.57401704788208 unfinalisedBlockNumber=3 finalisedBlockNumber=0 oldestHistoricBlock=1 txCount=1 blockNumber=3 blockTimestamp=1730721817 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=2 unencryptedLogSize=96732 +12ms - aztec:world_state [VERBOSE] Handled new L2 block eventName=l2-block-handled duration=11.796320915222168 unfinalisedBlockNumber=4 finalisedBlockNumber=0 oldestHistoricBlock=1 txCount=1 blockNumber=4 blockTimestamp=1730721841 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=0 unencryptedLogSize=12 +12ms - aztec:world_state [VERBOSE] Handled new L2 block eventName=l2-block-handled duration=11.77389109134674 unfinalisedBlockNumber=5 finalisedBlockNumber=0 oldestHistoricBlock=1 txCount=1 blockNumber=5 blockTimestamp=1730721865 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=1 unencryptedLogSize=16472 +12ms - aztec:world_state [VERBOSE] Chain proven at block 5 +0ms - aztec:world_state [VERBOSE] Chain finalized at block 5 +0ms - aztec:world_state [VERBOSE] Chain proven at block 5 +5s - aztec:world_state [VERBOSE] Chain finalized at block 5 +0ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:note_processor [WARN] DB has no contract with address 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:note_processor [WARN] DB has no contract with address 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:note_processor [VERBOSE] Deferred incoming note for contract 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 at slot 0x0000000000000000000000000000000000000000000000000000000000000001 in tx 1478643d3efe3b3d82b1e56ec7e120a23b3bbc759456975c168ea2ecb518e481 +13ms - aztec:note_processor [VERBOSE] Deferred outgoing note for contract 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 at slot 0x0000000000000000000000000000000000000000000000000000000000000001 in tx 1478643d3efe3b3d82b1e56ec7e120a23b3bbc759456975c168ea2ecb518e481 +0ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:note_processor [VERBOSE] Added incoming note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 with nullifier 0x07a74314953ab8349f3f6a16f51454ff87e37aca412b144d568e5175fcd20250 +286ms - aztec:note_processor [VERBOSE] Added outgoing note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 +1ms - aztec:pxe_service [INFO] Registered account 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +2s - aztec:pxe_service_bc0f00 [INFO] Registered account 0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:note_processor [WARN] DB has no contract with address 0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8 +0ms - aztec:note_processor [WARN] DB has no contract with address 0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8 +0ms - aztec:note_processor [VERBOSE] Deferred incoming note for contract 0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8 at slot 0x0000000000000000000000000000000000000000000000000000000000000001 in tx 1facd099f88103cb546eb65fed65a75a731c3fa779aced2f8c9fe176a2afcce9 +11ms - aztec:note_processor [VERBOSE] Deferred outgoing note for contract 0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8 at slot 0x0000000000000000000000000000000000000000000000000000000000000001 in tx 1facd099f88103cb546eb65fed65a75a731c3fa779aced2f8c9fe176a2afcce9 +0ms - aztec:pxe_service [INFO] Registered account 0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8 +1s - aztec:pxe_service_bc0f00 [INFO] Added contract SchnorrAccount at 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +1s - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:pxe:keystore:lmdb [INFO] Creating pxe_key_store ephemeral data store +0ms - aztec:pxe:data:lmdb [INFO] Creating pxe_data ephemeral data store +0ms - aztec:pxe_synchronizer_bd0f00 [INFO] Initial sync complete +0ms - aztec:pxe_service_bd0f00 [INFO] Added protocol contract AuthRegistry at 0x0000000000000000000000000000000000000000000000000000000000000001 +0ms - aztec:pxe_service_bd0f00 [INFO] Added protocol contract ContractInstanceDeployer at 0x0000000000000000000000000000000000000000000000000000000000000002 +16ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe_service_bd0f00 [INFO] Added protocol contract ContractClassRegisterer at 0x0000000000000000000000000000000000000000000000000000000000000003 +23ms - aztec:pxe_service_bd0f00 [INFO] Added protocol contract MultiCallEntrypoint at 0x0000000000000000000000000000000000000000000000000000000000000004 +14ms - aztec:pxe_service_bd0f00 [INFO] Added protocol contract FeeJuice at 0x0000000000000000000000000000000000000000000000000000000000000005 +61ms - aztec:pxe_service_bd0f00 [INFO] Added protocol contract Router at 0x0000000000000000000000000000000000000000000000000000000000000006 +28ms - aztec:pxe_service_bd0f00 [INFO] Started PXE connected to chain 31337 version 1 +1ms - aztec:pxe_service_bd0f00 [INFO] Added contract Token at 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c +283ms - aztec:pxe_service_bd0f00 [INFO] Registered account 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +37ms - aztec:note_processor [WARN] DB has no contract with address 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:note_processor [WARN] DB has no contract with address 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:note_processor [VERBOSE] Deferred incoming note for contract 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 at slot 0x0000000000000000000000000000000000000000000000000000000000000001 in tx 1478643d3efe3b3d82b1e56ec7e120a23b3bbc759456975c168ea2ecb518e481 +11ms - aztec:note_processor [VERBOSE] Deferred outgoing note for contract 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 at slot 0x0000000000000000000000000000000000000000000000000000000000000001 in tx 1478643d3efe3b3d82b1e56ec7e120a23b3bbc759456975c168ea2ecb518e481 +0ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:note_processor [VERBOSE] Added incoming note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 with nullifier 0x07a74314953ab8349f3f6a16f51454ff87e37aca412b144d568e5175fcd20250 +232ms - aztec:note_processor [VERBOSE] Added outgoing note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 +0ms - aztec:pxe_service [INFO] Registered account 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +2s - aztec:pxe_service_bd0f00 [INFO] Registered account 0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8 +1s - aztec:note_processor [WARN] DB has no contract with address 0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8 +0ms - aztec:note_processor [WARN] DB has no contract with address 0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8 +1ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:note_processor [VERBOSE] Deferred incoming note for contract 0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8 at slot 0x0000000000000000000000000000000000000000000000000000000000000001 in tx 1facd099f88103cb546eb65fed65a75a731c3fa779aced2f8c9fe176a2afcce9 +14ms - aztec:note_processor [VERBOSE] Deferred outgoing note for contract 0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8 at slot 0x0000000000000000000000000000000000000000000000000000000000000001 in tx 1facd099f88103cb546eb65fed65a75a731c3fa779aced2f8c9fe176a2afcce9 +0ms - aztec:pxe_service [INFO] Registered account 0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8 +1s - aztec:pxe_service_bd0f00 [INFO] Added contract SchnorrAccount at 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +1s - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:full_prover_test:full_prover [INFO] Full prover PXE started +6s - aztec:full_prover_test:full_prover [VERBOSE] Shutting down simulated prover node +0ms - aztec:prover-node [INFO] Stopping ProverNode +47s - aztec:prover-node:epoch-monitor [INFO] Stopped EpochMonitor +47s - aztec:prover-node:claims-monitor [VERBOSE] Stopping ClaimsMonitor +5s - aztec:prover-node:claims-monitor [INFO] Stopped ClaimsMonitor +0ms - aztec:prover-client:prover-agent [INFO] Agent stopped +47s - aztec:prover-client:prover-pool:queue [INFO] Proving queue stopped +47s - aztec:archiver [INFO] Stopped. +5s - aztec:world_state [INFO] Stopped world state synchronizer +5s - aztec:prover-node [INFO] Stopped ProverNode +176ms - aztec:full_prover_test:full_prover [VERBOSE] Starting archiver for new prover node +176ms - aztec:archiver:lmdb [INFO] Creating archiver ephemeral data store +0ms - aztec:archiver [INFO] Performing initial chain sync to rollup contract 0x2279b7a0a67db372996a5fab50d91eaa73d2ebe6 +0ms - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 10 and 29. +3ms - aztec:archiver:block-helper [VERBOSE] Registering contract class 0x0f1cf77f11813ebba27b8b30eed1dad1afd77eeb44ff6be5002a15ce844a766b +0ms - aztec:archiver:block-helper [VERBOSE] Store contract instance at 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +1ms - aztec:archiver:block-helper [VERBOSE] Store contract instance at 0x2f3bf1886050056aabbe425c72e2dfa653f71080bb7696647cab05c54f497ae8 +0ms - aztec:archiver:block-helper [VERBOSE] Registering contract class 0x0cfc2c573815736bafb92d98afc3ec28d763d621235ca034fe25e5cdd012fe61 +133ms - aztec:archiver:block-helper [VERBOSE] Store contract instance at 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c +0ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:archiver [VERBOSE] Processed 5 new L2 blocks up to 5 +211ms - aztec:full_prover_test:full_prover [VERBOSE] Funding prover node at 0x3C44CdDdB6a900fa2b585dd299e03d12FA4293BC +539ms - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +74ms - aztec:archiver [VERBOSE] No blocks to retrieve from 27 to 29 +0ms - aztec:full_prover_test:full_prover [VERBOSE] Starting prover node +16ms - aztec:prover [VERBOSE] Created archiver and synced to block 5 +0ms - aztec:world_state [VERBOSE] Starting sync from 1 to latest block 5 +0ms - aztec:l2_block_stream [VERBOSE] Starting L2 block stream proven=true pollIntervalMS=100 batchSize=undefined +0ms - aztec:world_state [INFO] Started world state synchronizer from block 1 +0ms - aztec:world_state [VERBOSE] Handling new L2 blocks from 1 to 5 +4ms - aztec:world_state [VERBOSE] Handled new L2 block eventName=l2-block-handled duration=8.873653054237366 unfinalisedBlockNumber=1 finalisedBlockNumber=0 oldestHistoricBlock=1 txCount=2 blockNumber=1 blockTimestamp=1730721769 noteEncryptedLogLength=986 noteEncryptedLogCount=2 encryptedLogLength=16 encryptedLogCount=0 unencryptedLogCount=0 unencryptedLogSize=16 +9ms - aztec:world_state [VERBOSE] Handled new L2 block eventName=l2-block-handled duration=9.729269981384277 unfinalisedBlockNumber=2 finalisedBlockNumber=0 oldestHistoricBlock=1 txCount=1 blockNumber=2 blockTimestamp=1730721793 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=3 unencryptedLogSize=97248 +10ms - aztec:world_state [VERBOSE] Handled new L2 block eventName=l2-block-handled duration=10.336301922798157 unfinalisedBlockNumber=3 finalisedBlockNumber=0 oldestHistoricBlock=1 txCount=1 blockNumber=3 blockTimestamp=1730721817 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=2 unencryptedLogSize=96732 +11ms - aztec:world_state [VERBOSE] Handled new L2 block eventName=l2-block-handled duration=24.42427897453308 unfinalisedBlockNumber=4 finalisedBlockNumber=0 oldestHistoricBlock=1 txCount=1 blockNumber=4 blockTimestamp=1730721841 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=0 unencryptedLogSize=12 +24ms - aztec:world_state [VERBOSE] Handled new L2 block eventName=l2-block-handled duration=12.378659963607788 unfinalisedBlockNumber=5 finalisedBlockNumber=0 oldestHistoricBlock=1 txCount=1 blockNumber=5 blockTimestamp=1730721865 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=1 unencryptedLogSize=16472 +13ms - aztec:world_state [VERBOSE] Chain proven at block 5 +0ms - aztec:world_state [VERBOSE] Chain finalized at block 5 +0ms - aztec:prover [INFO] Using native ACVM at ../../noir/noir-repo/target/release/acvm and working directory /tmp/02000000/acvm +206ms - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 30 and 30. +6s - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +2ms - aztec:archiver [VERBOSE] No blocks to retrieve from 30 to 30 +0ms - aztec:bb-prover [INFO] Using native BB at /mnt/user-data/mara/aztec-packages/barretenberg/cpp/build/bin/bb and working directory /tmp/bb-wPFf4H +0ms - aztec:bb-prover [INFO] Using native ACVM at ../../noir/noir-repo/target/release/acvm and working directory /tmp/02000000/acvm +0ms - aztec:prover-client:prover-pool:queue [INFO] Proving queue started +0ms - aztec:prover-client:prover-agent [INFO] Agent started with concurrency=2 +0ms - aztec:createProverCoordination [INFO] Using prover coordination via aztec node +0ms - aztec:prover-node:bond-manager [VERBOSE] Prover bond top-up 4000 required to get 2000 to target 6000 +0ms - aztec:prover-node:bond-manager [VERBOSE] Prover bond top-up of 4000 completed +25ms - aztec:prover-node:epoch-monitor [INFO] Started EpochMonitor maxPendingJobs=100 pollingIntervalMs=100 +0ms - aztec:prover-node:claims-monitor [INFO] Started ClaimsMonitor with prover address 0x3c44cdddb6a900fa2b585dd299e03d12fa4293bc maxPendingJobs=100 pollingIntervalMs=100 +0ms - aztec:prover-node [INFO] Started ProverNode pollingIntervalMs=100 maxPendingJobs=100 +0ms - aztec:full_prover_test:full_prover [WARN] Proofs are now enabled +244ms - aztec:prover-node:claims-monitor [VERBOSE] Found new claim for epoch 0 by 0x0000000000000000000000000000000000000000 +5ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 30 and 31. +1s - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +2ms - aztec:archiver [VERBOSE] No blocks to retrieve from 30 to 31 +0ms - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 31 and 31. +1s - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +2ms - aztec:archiver [VERBOSE] No blocks to retrieve from 31 to 31 +0ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +3s - aztec:full_prover_test:full_prover [INFO] Deployed real RootRollupArtifact verifier at 0x59b670e9fa9d0a427751af201d676719a970857b +14s - aztec:full_prover_test:full_prover [INFO] Rollup only accepts valid proofs now +9ms - aztec:full [INFO] Running test: full_prover makes both public and private transfers +0ms - aztec:full_prover_test:full_prover [INFO] Starting test for public and private transfer +3ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x4375727c(balance_of_private) +0ms - aztec:pxe_service_bc0f00 [VERBOSE] Unconstrained simulation for 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c.balance_of_private completed +18s - aztec:simulator:private_execution [VERBOSE] Executing external function SchnorrAccount:entrypoint@0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:simulator:client_execution_context [VERBOSE] Created PublicExecutionRequest to public_dispatch@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c, of type [enqueued], side-effect counter [3] +0ms - aztec:pxe_service_bd0f00 [VERBOSE] Simulation completed for 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78:entrypoint +15s - aztec:node [INFO] Simulating tx 22cff2770557773aac187bc3da54fd57eaddefc876e823a08f6a40687144341d +23s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 32 and 33. +14s - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 32 and 33. +14s - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +6ms - aztec:archiver [VERBOSE] No blocks to retrieve from 32 to 33 +1ms - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +6ms - aztec:archiver [VERBOSE] No blocks to retrieve from 32 to 33 +1ms - aztec:sequencer [VERBOSE] Processing tx 22cff2770557773aac187bc3da54fd57eaddefc876e823a08f6a40687144341d +0ms - aztec:simulator:public_executor [VERBOSE] [AVM] Executing public external function Token:balance_of_public (via dispatch)@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c with 12000000 allocated L2 gas. +23s - aztec:simulator:public_executor [VERBOSE] [AVM] Token:balance_of_public (via dispatch) returned, reverted: false. eventName=avm-simulation appCircuitName=Token:balance_of_public (via dispatch) duration=183.24685895442963 bytecodeSize=37708 +183ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:balance_of_public (via dispatch) simulation complete. Reverted=false. Consumed 27282 L2 gas, ending with 11972718 L2 gas left. +1ms - aztec:pxe_service_bd0f00 [INFO] Executed local simulation for 22cff2770557773aac187bc3da54fd57eaddefc876e823a08f6a40687144341d +2s - aztec:full_prover_test:full_prover [INFO] Proving txs +3s - aztec:simulator:private_execution [VERBOSE] Executing external function SchnorrAccount:entrypoint@0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:simulator:private_execution [VERBOSE] Executing external function SchnorrAccount:entrypoint@0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:simulator:client_execution_context [VERBOSE] Created PublicExecutionRequest to public_dispatch@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c, of type [enqueued], side-effect counter [3] +0ms - aztec:simulator:private_execution [VERBOSE] Executing external function Token:transfer@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c +0ms - aztec:pxe_service_bd0f00 [VERBOSE] Simulation completed for 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78:entrypoint +379ms - aztec:pxe_service_bc0f00 [VERBOSE] Simulation completed for 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78:entrypoint +3s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +3s - aztec:pxe_service_bd0f00 [INFO] Executed local simulation for 066c31b890e14a6067f23e7466039d03693d47c4c4087e5e4b2b8b692fc7e52f +1s - aztec:pxe:bb-native-prover:bd0f00 [INFO] Computing VK of App(SchnorrAccount:entrypoint) circuit... +0ms - aztec:pxe_service_bc0f00 [INFO] Executed local simulation for 07897be8931c4abd36f752ef215f6ada2d48e373d6a8fb7b7fd7bab33a952d54 +2s - aztec:pxe:bb-native-prover:bc0f00 [INFO] Computing VK of App(SchnorrAccount:entrypoint) circuit... +0ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +2s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] Generated App(SchnorrAccount:entrypoint) VK in 4712 ms +5s - aztec:pxe:bb-native-prover:bd0f00 [INFO] Generated App(SchnorrAccount:entrypoint) VK in 5693 ms +6s - aztec:pxe:bb-native-prover:bc0f00 [INFO] Computing VK of App(Token:transfer) circuit... +371ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] Generating Client IVC proof +2s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +2s - aztec:pxe:bb-native-prover:bd0f00 [INFO] bytecodePath /tmp/bb-nz67DO/tmp-5jz7vF/acir.msgpack +560ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] outputPath /tmp/bb-nz67DO/tmp-5jz7vF +0ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - Executing BB with: client_ivc_prove_output_all_msgpack -o /tmp/bb-nz67DO/tmp-5jz7vF -b /tmp/bb-nz67DO/tmp-5jz7vF/acir.msgpack -w /tmp/bb-nz67DO/tmp-5jz7vF/witnesses.msgpack -v +0ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - bb command is: client_ivc_prove_output_all_msgpack -using cached bn254 crs of size 53687092 at "/mnt/user-data/mara/.bb-crs/bn254_g1.dat" +93ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +2s - aztec:pxe:bb-native-prover:bc0f00 [INFO] Generated App(Token:transfer) VK in 3404 ms +3s - aztec:pxe:bb-native-prover:bc0f00 [INFO] Generating Client IVC proof +2s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +2s - aztec:pxe:bb-native-prover:bc0f00 [INFO] bytecodePath /tmp/bb-nz67DO/tmp-AZENIR/acir.msgpack +651ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] outputPath /tmp/bb-nz67DO/tmp-AZENIR +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Executing BB with: client_ivc_prove_output_all_msgpack -o /tmp/bb-nz67DO/tmp-AZENIR -b /tmp/bb-nz67DO/tmp-AZENIR/acir.msgpack -w /tmp/bb-nz67DO/tmp-AZENIR/witnesses.msgpack -v +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - bb command is: client_ivc_prove_output_all_msgpack +85ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - using cached bn254 crs of size 53687092 at "/mnt/user-data/mara/.bb-crs/bn254_g1.dat" +0ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +2s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - Initializing BN254 prover CRS from memory with num points = 16777217 +7s - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - using cached grumpkin crs of size 419430 at: "/mnt/user-data/mara/.bb-crs/grumpkin_g1.dat" +77ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - Initializing Grumpkin prover CRS from memory with num points = 32769 +16ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - DeciderProvingKey(Circuit&) -creating decider proving key +997ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - Finalized circuit size: 26369 +7ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - Log dyadic circuit size: 20 +2ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - constructing proving key +0ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating wires +0ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating gate selectors +54ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating non-gate selectors +11ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating ecc op wires and selector +81ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating table polynomials +14ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating sigmas and ids +4ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating lookup read counts and tags +108ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating lookup and databus inverses +3ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - constructing z_perm +5ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating lagrange polynomials +13ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - constructing prover instance after trace populate -constructing databus polynomials +60ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - constructing lookup table polynomials +36ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - constructing lookup read counts +1ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - ACIR: Setting is_kernel to TRUE. +1s - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - we are never here I assume? +0ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - DeciderProvingKey(Circuit&) -creating decider proving key +25ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - Finalized circuit size: 39047 +5ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - Log dyadic circuit size: 20 +4ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - constructing proving key +0ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating wires +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Initializing BN254 prover CRS from memory with num points = 16777217 +6s - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating gate selectors +51ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating non-gate selectors +9ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - using cached grumpkin crs of size 419430 at: "/mnt/user-data/mara/.bb-crs/grumpkin_g1.dat" +69ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Initializing Grumpkin prover CRS from memory with num points = 32769 +13ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating ecc op wires and selector +82ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating table polynomials +14ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating sigmas and ids +6ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating lookup read counts and tags +109ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating lookup and databus inverses +1ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - constructing z_perm +7ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating lagrange polynomials +14ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - constructing prover instance after trace populate -constructing databus polynomials +65ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - constructing lookup table polynomials -constructing lookup read counts +35ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - DeciderProvingKey(Circuit&) -creating decider proving key +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Finalized circuit size: 26369 +7ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Log dyadic circuit size: 20 +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing proving key +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating wires +0ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating gate selectors +75ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating non-gate selectors +10ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating ecc op wires and selector +133ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating table polynomials +24ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating sigmas and ids +6ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup read counts and tags +179ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup and databus inverses +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing z_perm +12ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lagrange polynomials +24ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing prover instance after trace populate -constructing databus polynomials +97ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing lookup table polynomials +64ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing lookup read counts +1ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - ACIR: Setting is_kernel to TRUE. +4s - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - we are never here I assume? +15ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - DeciderProvingKey(Circuit&) -creating decider proving key +124ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - Finalized circuit size: 89829 +22ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - Log dyadic circuit size: 20 -constructing proving key -allocating wires +1ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating gate selectors +9ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating non-gate selectors +2ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating ecc op wires and selector +65ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating table polynomials +21ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating sigmas and ids +0ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating lookup read counts and tags +156ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating lookup and databus inverses +2ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - constructing z_perm +10ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating lagrange polynomials +22ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - constructing prover instance after trace populate -constructing databus polynomials +180ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - constructing lookup table polynomials -constructing lookup read counts +36ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - ACIR: Setting is_kernel to TRUE. -we are never here I assume? +3s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - DeciderProvingKey(Circuit&) -creating decider proving key +95ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Finalized circuit size: 39047 +9ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Log dyadic circuit size: 20 -constructing proving key -allocating wires +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating gate selectors +73ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating non-gate selectors +4ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating ecc op wires and selector +119ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating table polynomials +38ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating sigmas and ids +6ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup read counts and tags +188ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup and databus inverses +4ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing z_perm +9ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lagrange polynomials +31ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing prover instance after trace populate -constructing databus polynomials +159ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing lookup table polynomials -constructing lookup read counts +62ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - ACIR: Setting is_kernel to TRUE. -we are never here I assume? +5s - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - DeciderProvingKey(Circuit&) -creating decider proving key +193ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - Finalized circuit size: 52452 +31ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - Log dyadic circuit size: 20 +3ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - constructing proving key -allocating wires +3ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating gate selectors +67ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating non-gate selectors +7ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating ecc op wires and selector +76ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating table polynomials +16ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating sigmas and ids +4ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - DeciderProvingKey(Circuit&) -creating decider proving key +4s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Finalized circuit size: 27958 +9ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Log dyadic circuit size: 20 -constructing proving key -allocating wires +6ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating gate selectors +14ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating non-gate selectors +3ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating lookup read counts and tags -allocating lookup and databus inverses -constructing z_perm +161ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - allocating lagrange polynomials +24ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating ecc op wires and selector +57ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating table polynomials +14ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating sigmas and ids +1ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - constructing prover instance after trace populate -constructing databus polynomials +174ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup read counts and tags +149ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup and databus inverses +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing z_perm +7ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lagrange polynomials +25ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - constructing lookup table polynomials -constructing lookup read counts +60ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing prover instance after trace populate -constructing databus polynomials +78ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing lookup table polynomials -constructing lookup read counts +58ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - Minimum required block sizes for structured trace: - -executing relation checking rounds... +4s - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - ecc_op: 380 -pub_inputs: 3029 -busread: 2960 -arithmetic: 39255 -delta_range: 7936 -elliptic: 1386 -aux: 10761 -poseidon2_external: 4242 -poseidon2_internal: 24170 -lookup: 2076 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - ACIR: Setting is_kernel to TRUE. +4s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - ACIR: Setting is_kernel to TRUE. +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - we are never here I assume? +1ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - DeciderProvingKey(Circuit&) -creating decider proving key +494ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Finalized circuit size: 78173 +13ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Log dyadic circuit size: 20 -constructing proving key -allocating wires +12ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating gate selectors +12ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating non-gate selectors +9ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating ecc op wires and selector +12ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating table polynomials +6ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating sigmas and ids +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup read counts and tags -allocating lookup and databus inverses -constructing z_perm +161ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lagrange polynomials +8ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 0 +934ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing prover instance after trace populate -constructing databus polynomials +217ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 1 +118ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing lookup table polynomials -constructing lookup read counts +75ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 2 +83ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 3 +38ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 4 +27ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 5 +18ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 6 +13ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 7 +13ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 8 +15ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 9 +27ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 10 +2ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 11 +5ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 12 +0ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 13 +3ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 14 +1ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 15 +6ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 16 +4ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 17 +4ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 18 -completed sumcheck round 19 +13ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - executing pcs opening rounds... +78ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - made commitment key +6ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - executed multivariate-to-univarite reduction +1s - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - computed opening proof +170ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - num_public_inputs of the last folding proof BEFORE SUBTRACTION3029 -num_public_inputs of the last folding proof 2997 +179ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - DeciderProvingKey(Circuit&) -creating decider proving key +212ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - Finalized circuit size: 124461 -Log dyadic circuit size: 17 +4ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - constructing proving key +1ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - constructing prover instance after trace populate -constructing databus polynomials +76ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - constructing lookup table polynomials -constructing lookup read counts +3ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - created oink prover +226ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - created oink proof +266ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - created decider prover -executing relation checking rounds... +1ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 0 +33ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 1 +14ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 2 +8ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 3 +5ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 4 +4ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 5 +1ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 6 +3ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 7 +1ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 8 +3ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 9 +2ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 10 +1ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 11 +4ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 12 +5ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 13 +1ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 14 +4ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 15 +2ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 16 +2ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - executing pcs opening rounds... +0ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - made commitment key +2ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - executed multivariate-to-univarite reduction +229ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - computed opening proof +22ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - ACIR: Setting is_kernel to TRUE. +3s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - we are never here I assume? +9ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - DeciderProvingKey(Circuit&) -creating decider proving key +72ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Finalized circuit size: 89829 +13ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Log dyadic circuit size: 20 +10ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing proving key +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating wires +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating gate selectors +13ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating non-gate selectors +10ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating ecc op wires and selector +9ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating table polynomials +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating sigmas and ids +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup read counts and tags +31ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup and databus inverses +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing z_perm +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lagrange polynomials +17ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing prover instance after trace populate -constructing databus polynomials +103ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing lookup table polynomials -constructing lookup read counts +38ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 0 +433ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 1 +26ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 2 +24ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 3 +78ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 4 +25ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 5 +16ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 6 +13ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 7 +16ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 8 +10ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 9 +7ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 10 +7ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 11 +4ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 12 +5ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 0 +1s - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 1 +18ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 2 +17ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 3 +10ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 4 +7ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 5 +6ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 6 +7ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 7 +3ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 8 +3ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 9 +3ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 10 +16ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 11 +11ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 12 +6ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 13 +5ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - completed sumcheck round 14 +10ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - am I here at all? +566ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - Ultra verified: 1 -Merge verified: 1 +26ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - ECCVM verified: 1 +36ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - ACIR: Setting is_kernel to TRUE. +2s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - we are never here I assume? +1ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - Translator verified: 1 1 -Goblin verified: 1 +22ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] client ivc proof BB out - ensure valid proof: 1 -write proof and vk data to files.. +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - DeciderProvingKey(Circuit&) -creating decider proving key +87ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Finalized circuit size: 25399 +4ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Log dyadic circuit size: 20 +3ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing proving key -allocating wires +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating gate selectors +12ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating non-gate selectors +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating ecc op wires and selector +21ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating table polynomials +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating sigmas and ids +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup read counts and tags +22ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup and databus inverses +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing z_perm +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lagrange polynomials +6ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing prover instance after trace populate -constructing databus polynomials +32ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing lookup table polynomials -constructing lookup read counts +37ms - aztec:pxe:bb-native-prover:bd0f00 [INFO] Generated IVC proof duration=31245.895462989807 eventName=circuit-proving +310ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Minimum required block sizes for structured trace: +2s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - -executing relation checking rounds... +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - ecc_op: 756 -pub_inputs: 696 -busread: 3489 -arithmetic: 44430 -delta_range: 8040 -elliptic: 3920 -aux: 10761 -poseidon2_external: 4242 -poseidon2_internal: 24170 -lookup: 2076 +0ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 0 +479ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 1 +98ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 2 +44ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 3 +23ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 4 +13ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 5 +8ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 6 +4ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 7 +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 8 +3ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 9 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 10 +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 11 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 12 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 13 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 14 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 15 -completed sumcheck round 16 -completed sumcheck round 17 +4ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 18 +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 19 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - executing pcs opening rounds... +25ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - made commitment key +16ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - executed multivariate-to-univarite reduction +616ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - computed opening proof +161ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - num_public_inputs of the last folding proof BEFORE SUBTRACTION696 -num_public_inputs of the last folding proof 664 +113ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - DeciderProvingKey(Circuit&) -creating decider proving key +126ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Finalized circuit size: 51411 -Log dyadic circuit size: 16 +4ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing proving key +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing prover instance after trace populate -constructing databus polynomials +40ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing lookup table polynomials -constructing lookup read counts +3ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - created oink prover +148ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - created oink proof +145ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - created decider prover -executing relation checking rounds... +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 0 +20ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 1 +7ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 2 +4ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 3 +4ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 4 -completed sumcheck round 5 -completed sumcheck round 6 -completed sumcheck round 7 +6ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 8 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 9 +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 10 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 11 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 12 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 13 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 14 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 15 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - executing pcs opening rounds... -made commitment key +3ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - executed multivariate-to-univarite reduction +153ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - computed opening proof +11ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 0 +561ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 1 +34ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 2 +27ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 3 +24ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 4 +22ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 5 +19ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 6 +15ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 7 +16ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 8 +15ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 9 +9ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 10 +7ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 11 +5ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 12 +6ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 13 +3ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 0 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 1 +30ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 2 +17ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 3 +12ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 4 +8ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 5 +6ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 6 +10ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 7 +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 8 +5ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 9 +5ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 10 +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 11 +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 12 +3ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 13 +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 14 +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 15 +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - am I here at all? +416ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Ultra verified: 1 +9ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Merge verified: 1 +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - ECCVM verified: 1 +37ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Translator verified: 1 1 -Goblin verified: 1 +26ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - ensure valid proof: 1 -write proof and vk data to files.. +0ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] Generated IVC proof duration=35331.581261992455 eventName=circuit-proving +376ms - aztec:full_prover_test:full_prover [INFO] Verifying txs +49s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:full_prover_test:full_prover [INFO] Sending private tx +6s - aztec:pxe_service_bc0f00 [INFO] Sending transaction 07897be8931c4abd36f752ef215f6ada2d48e373d6a8fb7b7fd7bab33a952d54 +52s - aztec:node [INFO] Received tx 07897be8931c4abd36f752ef215f6ada2d48e373d6a8fb7b7fd7bab33a952d54 +56s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:tx_pool [INFO] Adding tx with id 07897be8931c4abd36f752ef215f6ada2d48e373d6a8fb7b7fd7bab33a952d54 eventName=tx-added-to-pool txHash=07897be8931c4abd36f752ef215f6ada2d48e373d6a8fb7b7fd7bab33a952d54 noteEncryptedLogCount=2 encryptedLogCount=1 unencryptedLogCount=0 noteEncryptedLogSize=1042 encryptedLogSize=492 unencryptedLogSize=8 newCommitmentCount=2 newNullifierCount=2 proofSize=174512 size=195248 feePaymentMethod=none classRegisteredCount=0 +1m - aztec:pxe_service_bc0f00 [INFO] Sent transaction 07897be8931c4abd36f752ef215f6ada2d48e373d6a8fb7b7fd7bab33a952d54 +3s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=5 worldStateHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a l2BlockSourceNumber=5 l2BlockSourceHash=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a p2pNumber=5 l1ToL2MessageSourceNumber=5 +1s - aztec:sequencer [INFO] Building blockNumber=6 txCount=1 slotNumber=20 +10ms - aztec:sequencer [VERBOSE] Retrieved 0 L1 to L2 messages for block 6 +1ms - aztec:sequencer-client:block_builder_light [VERBOSE] Starting new block numTxs=2 globalVariables=[object Object] l1ToL2Messages= +0ms - aztec:sequencer-client:block_builder_light [VERBOSE] Adding new tx to block txHash=07897be8931c4abd36f752ef215f6ada2d48e373d6a8fb7b7fd7bab33a952d54 +6ms - aztec:sequencer-client:block_builder_light [VERBOSE] Setting block as completed and adding 1 padding txs +35ms - aztec:sequencer-client:block_builder_light [VERBOSE] Adding new tx to block txHash=0000000000000000000000000000000000000000000000000000000000000000 +1ms - aztec:sequencer-client:block_builder_light [VERBOSE] Finalising block +40ms - aztec:sequencer [VERBOSE] Assembled block 6 (txEffectsHash: 0012e188f0a7519216cfdd5cb073177d49e20423c7d60030718725d66d328b49) eventName=l2-block-built creator=0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266 duration=93.63208198547363 publicProcessDuration=36.537405014038086 rollupCircuitsDuration=89.9549629688263 txCount=1 blockNumber=6 blockTimestamp=1730722153 noteEncryptedLogLength=1042 noteEncryptedLogCount=2 encryptedLogLength=492 encryptedLogCount=1 unencryptedLogCount=0 unencryptedLogSize=8 +93ms - aztec:sequencer [VERBOSE] Collecting attestations +0ms - aztec:sequencer [VERBOSE] Attestations collected +2ms - aztec:sequencer [VERBOSE] Collecting proof quotes +0ms - aztec:sequencer [VERBOSE] No epoch to prove +4ms - aztec:sequencer [VERBOSE] No proof quote available +0ms - aztec:sequencer:publisher [VERBOSE] Submitting propose transaction +1m - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 34 and 34. +59s - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +5ms - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +17ms - aztec:archiver [VERBOSE] Processed 1 new L2 blocks up to 6 +1ms - aztec:sequencer:publisher [INFO] Published L2 block to L1 rollup contract gasPrice=1015183034 gasUsed=432404 transactionHash=0x2eab44de90743d5fd6334f7e3bc25aad94a2d7eb8fde12d960ca48f31c75dcdb calldataGas=35108 calldataSize=2852 sender=0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266 txCount=1 blockNumber=6 blockTimestamp=1730722153 noteEncryptedLogLength=1042 noteEncryptedLogCount=2 encryptedLogLength=492 encryptedLogCount=1 unencryptedLogCount=0 unencryptedLogSize=8 eventName=rollup-published-to-l1 slotNumber=20 blockHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e +128ms - aztec:sequencer [INFO] Submitted rollup block 6 with 1 transactions duration=94ms (Submitter: 0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266) +132ms - aztec:cheat_codes:eth [VERBOSE] Mined 1 L1 blocks +1m - aztec:cheat_codes:eth [VERBOSE] Warped L1 timestamp to 1730722177 +1ms - aztec:utils:watcher [INFO] Slot 20 was filled, jumped to next slot +1m - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 34 and 35. +60s - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +2ms - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +7ms - aztec:archiver [VERBOSE] Processed 1 new L2 blocks up to 6 +0ms - aztec:world_state [VERBOSE] Handling new L2 blocks from 6 to 6 +1m - aztec:world_state [VERBOSE] Handled new L2 block eventName=l2-block-handled duration=10.995765089988708 unfinalisedBlockNumber=6 finalisedBlockNumber=5 oldestHistoricBlock=1 txCount=1 blockNumber=6 blockTimestamp=1730722153 noteEncryptedLogLength=1042 noteEncryptedLogCount=2 encryptedLogLength=492 encryptedLogCount=1 unencryptedLogCount=0 unencryptedLogSize=8 +11ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:note_processor [VERBOSE] Added incoming note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 with nullifier 0x245fd71c0e990b8e76f9b36908b9178c63e760e99dc6d7e8550d87a336a04c3a +1m - aztec:note_processor [VERBOSE] Added outgoing note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 +0ms - aztec:note_processor [VERBOSE] Added outgoing note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x2fce74e6edc05fa6cde410c374f1643470f06d24252aa16676f7a5ebbeddb154 +0ms - aztec:note_processor [VERBOSE] Removed note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 with nullifier 0x07a74314953ab8349f3f6a16f51454ff87e37aca412b144d568e5175fcd20250 +4ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:note_processor [VERBOSE] Added incoming note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x2fce74e6edc05fa6cde410c374f1643470f06d24252aa16676f7a5ebbeddb154 with nullifier 0x10e5f6e4b5ecd335c4e11fedfc69a68cad6f46dbaa2c4a4c9b6e88fdde437bc0 +1m - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 35 and 35. +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=6 worldStateHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e l2BlockSourceNumber=6 l2BlockSourceHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e p2pNumber=6 l1ToL2MessageSourceNumber=6 +1s - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +6ms - aztec:archiver [VERBOSE] No blocks to retrieve from 35 to 35 +0ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:note_processor [VERBOSE] Added incoming note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 with nullifier 0x245fd71c0e990b8e76f9b36908b9178c63e760e99dc6d7e8550d87a336a04c3a +1m - aztec:note_processor [VERBOSE] Added outgoing note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 +0ms - aztec:note_processor [VERBOSE] Added outgoing note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x2fce74e6edc05fa6cde410c374f1643470f06d24252aa16676f7a5ebbeddb154 +0ms - aztec:note_processor [VERBOSE] Removed note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 with nullifier 0x07a74314953ab8349f3f6a16f51454ff87e37aca412b144d568e5175fcd20250 +4ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:note_processor [VERBOSE] Added incoming note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x2fce74e6edc05fa6cde410c374f1643470f06d24252aa16676f7a5ebbeddb154 with nullifier 0x10e5f6e4b5ecd335c4e11fedfc69a68cad6f46dbaa2c4a4c9b6e88fdde437bc0 +1m - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:note_processor [VERBOSE] Added incoming note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 with nullifier 0x245fd71c0e990b8e76f9b36908b9178c63e760e99dc6d7e8550d87a336a04c3a +1m - aztec:note_processor [VERBOSE] Added outgoing note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 +0ms - aztec:note_processor [VERBOSE] Added outgoing note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x2fce74e6edc05fa6cde410c374f1643470f06d24252aa16676f7a5ebbeddb154 +0ms - aztec:note_processor [VERBOSE] Removed note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 with nullifier 0x07a74314953ab8349f3f6a16f51454ff87e37aca412b144d568e5175fcd20250 +2ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:note_processor [VERBOSE] Added incoming note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x2fce74e6edc05fa6cde410c374f1643470f06d24252aa16676f7a5ebbeddb154 with nullifier 0x10e5f6e4b5ecd335c4e11fedfc69a68cad6f46dbaa2c4a4c9b6e88fdde437bc0 +2m - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +1s - aztec:archiver [VERBOSE] No blocks to retrieve from 35 to 35 +1ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=6 worldStateHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e l2BlockSourceNumber=6 l2BlockSourceHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e p2pNumber=6 l1ToL2MessageSourceNumber=6 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=6 worldStateHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e l2BlockSourceNumber=6 l2BlockSourceHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e p2pNumber=6 l1ToL2MessageSourceNumber=6 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=6 worldStateHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e l2BlockSourceNumber=6 l2BlockSourceHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e p2pNumber=6 l1ToL2MessageSourceNumber=6 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=6 worldStateHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e l2BlockSourceNumber=6 l2BlockSourceHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e p2pNumber=6 l1ToL2MessageSourceNumber=6 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=6 worldStateHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e l2BlockSourceNumber=6 l2BlockSourceHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e p2pNumber=6 l1ToL2MessageSourceNumber=6 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=6 worldStateHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e l2BlockSourceNumber=6 l2BlockSourceHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e p2pNumber=6 l1ToL2MessageSourceNumber=6 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=6 worldStateHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e l2BlockSourceNumber=6 l2BlockSourceHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e p2pNumber=6 l1ToL2MessageSourceNumber=6 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=6 worldStateHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e l2BlockSourceNumber=6 l2BlockSourceHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e p2pNumber=6 l1ToL2MessageSourceNumber=6 +1s - aztec:full_prover_test:full_prover [INFO] Sending public tx +13s - aztec:pxe_service_bd0f00 [INFO] Sending transaction 066c31b890e14a6067f23e7466039d03693d47c4c4087e5e4b2b8b692fc7e52f +1m - aztec:node [INFO] Received tx 066c31b890e14a6067f23e7466039d03693d47c4c4087e5e4b2b8b692fc7e52f +13s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=6 worldStateHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e l2BlockSourceNumber=6 l2BlockSourceHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e p2pNumber=6 l1ToL2MessageSourceNumber=6 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=6 worldStateHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e l2BlockSourceNumber=6 l2BlockSourceHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e p2pNumber=6 l1ToL2MessageSourceNumber=6 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=6 worldStateHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e l2BlockSourceNumber=6 l2BlockSourceHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e p2pNumber=6 l1ToL2MessageSourceNumber=6 +1s - aztec:tx_pool [INFO] Adding tx with id 066c31b890e14a6067f23e7466039d03693d47c4c4087e5e4b2b8b692fc7e52f eventName=tx-added-to-pool txHash=066c31b890e14a6067f23e7466039d03693d47c4c4087e5e4b2b8b692fc7e52f noteEncryptedLogCount=0 encryptedLogCount=0 unencryptedLogCount=0 noteEncryptedLogSize=8 encryptedLogSize=8 unencryptedLogSize=8 newCommitmentCount=0 newNullifierCount=1 proofSize=248944 size=320600 feePaymentMethod=none classRegisteredCount=0 +13s - aztec:pxe_service_bd0f00 [INFO] Sent transaction 066c31b890e14a6067f23e7466039d03693d47c4c4087e5e4b2b8b692fc7e52f +3s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=6 worldStateHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e l2BlockSourceNumber=6 l2BlockSourceHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e p2pNumber=6 l1ToL2MessageSourceNumber=6 +1s - aztec:sequencer [INFO] Building blockNumber=7 txCount=1 slotNumber=21 +13ms - aztec:sequencer [VERBOSE] Retrieved 0 L1 to L2 messages for block 7 +0ms - aztec:sequencer-client:block_builder_light [VERBOSE] Starting new block numTxs=2 globalVariables=[object Object] l1ToL2Messages= +0ms - aztec:sequencer [VERBOSE] Processing tx 066c31b890e14a6067f23e7466039d03693d47c4c4087e5e4b2b8b692fc7e52f +0ms - aztec:simulator:public_executor [VERBOSE] [AVM] Executing public external function Token:transfer_public (via dispatch)@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c with 12000000 allocated L2 gas. +1m - aztec:simulator:public_executor [VERBOSE] [AVM] Token:transfer_public (via dispatch) returned, reverted: false. eventName=avm-simulation appCircuitName=Token:transfer_public (via dispatch) duration=168.7266330718994 bytecodeSize=37708 +169ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:transfer_public (via dispatch) simulation complete. Reverted=false. Consumed 98016 L2 gas, ending with 11901984 L2 gas left. +1ms - aztec:sequencer-client:block_builder_light [VERBOSE] Adding new tx to block txHash=066c31b890e14a6067f23e7466039d03693d47c4c4087e5e4b2b8b692fc7e52f +1s - aztec:sequencer-client:block_builder_light [VERBOSE] Setting block as completed and adding 1 padding txs +37ms - aztec:sequencer-client:block_builder_light [VERBOSE] Adding new tx to block txHash=0000000000000000000000000000000000000000000000000000000000000000 +0ms - aztec:sequencer-client:block_builder_light [VERBOSE] Finalising block +33ms - aztec:sequencer [VERBOSE] Assembled block 7 (txEffectsHash: 004ac8d903cf98771d3b25def338fe5bb27ef2c8698bb532df7c98eeb8117dcf) eventName=l2-block-built creator=0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266 duration=1317.5684319734573 publicProcessDuration=1270.929134964943 rollupCircuitsDuration=1314.2239660024643 txCount=1 blockNumber=7 blockTimestamp=1730722177 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=0 unencryptedLogSize=12 +1s - aztec:sequencer [VERBOSE] Collecting attestations +1ms - aztec:sequencer [VERBOSE] Attestations collected +1ms - aztec:sequencer [VERBOSE] Collecting proof quotes +0ms - aztec:sequencer [VERBOSE] Retrieved 0 quotes, slot: 21, epoch to prove: 1 +1ms - aztec:sequencer [VERBOSE] Failed to find any valid proof quotes +0ms - aztec:sequencer [VERBOSE] No proof quote available +0ms - aztec:sequencer:publisher [VERBOSE] Submitting propose transaction +15s - aztec:sequencer:publisher [INFO] Published L2 block to L1 rollup contract gasPrice=1011672382 gasUsed=333051 transactionHash=0x5bba24c42d4efcd8a167d647ec208b0a72b22f1641fe2a98277dcd2f64d0ce45 calldataGas=11380 calldataSize=1348 sender=0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266 txCount=1 blockNumber=7 blockTimestamp=1730722177 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=0 unencryptedLogSize=12 eventName=rollup-published-to-l1 slotNumber=21 blockHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 +124ms - aztec:sequencer [INFO] Submitted rollup block 7 with 1 transactions duration=1318ms (Submitter: 0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266) +128ms - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 36 and 36. +13s - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +3ms - aztec:cheat_codes:eth [VERBOSE] Mined 1 L1 blocks +14s - aztec:cheat_codes:eth [VERBOSE] Warped L1 timestamp to 1730722201 +0ms - aztec:utils:watcher [INFO] Slot 21 was filled, jumped to next slot +14s - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +12ms - aztec:archiver [VERBOSE] Processed 1 new L2 blocks up to 7 +0ms - aztec:world_state [VERBOSE] Handling new L2 blocks from 7 to 7 +14s - aztec:world_state [VERBOSE] Handled new L2 block eventName=l2-block-handled duration=9.692675948143005 unfinalisedBlockNumber=7 finalisedBlockNumber=5 oldestHistoricBlock=1 txCount=1 blockNumber=7 blockTimestamp=1730722177 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=0 unencryptedLogSize=12 +10ms - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 36 and 37. +14s - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +2ms - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +11ms - aztec:archiver [VERBOSE] Processed 1 new L2 blocks up to 7 +0ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 37 and 37. +1s - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +2ms - aztec:archiver [VERBOSE] No blocks to retrieve from 37 to 37 +0ms - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +1s - aztec:archiver [VERBOSE] No blocks to retrieve from 37 to 37 +1ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:full_prover_test:full_prover [INFO] Both txs have been mined +13s - aztec:full_prover_test:full_prover [INFO] Advancing from epoch 1 to next epoch +7ms - aztec:cheat_codes:eth [VERBOSE] Mined 1 L1 blocks +2m - aztec:cheat_codes:eth [VERBOSE] Warped L1 timestamp to 1730722465 +0ms - aztec:js:cheat_codes [VERBOSE] Advanced to next epoch +2m - aztec:full_prover_test:full_prover [INFO] Waiting for prover node to submit quote for epoch 1 +6ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 38 and 38. +7s - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +2ms - aztec:archiver [VERBOSE] No blocks to retrieve from 38 to 38 +1ms - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 38 and 38. +7s - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +2ms - aztec:archiver [VERBOSE] No blocks to retrieve from 38 to 38 +0ms - aztec:prover-node [INFO] Sending quote for epoch epochToProve=1 validUntilSlot=9007199254740991 bondAmount=1000 prover=0x3c44cdddb6a900fa2b585dd299e03d12fa4293bc basisPointFee=100 +2m - aztec:p2p [INFO] Broadcasting epoch proof quote quote=[object Object] signature=[object Object] +2m - aztec:full_prover_test:full_prover [INFO] Sending tx to trigger a new block that includes the quote from the prover node +1s - aztec:simulator:private_execution [VERBOSE] Executing external function SchnorrAccount:entrypoint@0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:simulator:private_execution [VERBOSE] Executing external function Token:transfer@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c +0ms - aztec:pxe_service_bc0f00 [VERBOSE] Simulation completed for 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78:entrypoint +24s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +2s - aztec:pxe_service_bc0f00 [INFO] Executed local simulation for 124b28b89139b80f3d23a7602e0880bdd24fb5c154b138eaa4bc397aab567f96 +732ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] Computing VK of App(SchnorrAccount:entrypoint) circuit... +34s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] Generated App(SchnorrAccount:entrypoint) VK in 2816 ms +3s - aztec:pxe:bb-native-prover:bc0f00 [INFO] Computing VK of App(Token:transfer) circuit... +339ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] Generated App(Token:transfer) VK in 2882 ms +3s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] Generating Client IVC proof +2s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] bytecodePath /tmp/bb-nz67DO/tmp-FoGz42/acir.msgpack +604ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] outputPath /tmp/bb-nz67DO/tmp-FoGz42 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Executing BB with: client_ivc_prove_output_all_msgpack -o /tmp/bb-nz67DO/tmp-FoGz42 -b /tmp/bb-nz67DO/tmp-FoGz42/acir.msgpack -w /tmp/bb-nz67DO/tmp-FoGz42/witnesses.msgpack -v +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - bb command is: client_ivc_prove_output_all_msgpack +81ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - using cached bn254 crs of size 53687092 at "/mnt/user-data/mara/.bb-crs/bn254_g1.dat" +3ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Initializing BN254 prover CRS from memory with num points = 16777217 +6s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - using cached grumpkin crs of size 419430 at: "/mnt/user-data/mara/.bb-crs/grumpkin_g1.dat" +90ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Initializing Grumpkin prover CRS from memory with num points = 32769 +10ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - DeciderProvingKey(Circuit&) -creating decider proving key +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Finalized circuit size: 26369 +6ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Log dyadic circuit size: 20 +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing proving key -allocating wires +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating gate selectors +56ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating non-gate selectors +4ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating ecc op wires and selector +85ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating table polynomials -allocating sigmas and ids +20ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup read counts and tags +119ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup and databus inverses +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing z_perm +7ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lagrange polynomials +16ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing prover instance after trace populate -constructing databus polynomials +59ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing lookup table polynomials +36ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing lookup read counts +0ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - ACIR: Setting is_kernel to TRUE. +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - we are never here I assume? +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - DeciderProvingKey(Circuit&) -creating decider proving key +25ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Finalized circuit size: 39047 +6ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Log dyadic circuit size: 20 +3ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing proving key -allocating wires +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating gate selectors +44ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating non-gate selectors +6ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating ecc op wires and selector +83ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating table polynomials +15ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating sigmas and ids +4ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup read counts and tags +122ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup and databus inverses +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing z_perm +7ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lagrange polynomials +13ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing prover instance after trace populate -constructing databus polynomials +64ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing lookup table polynomials -constructing lookup read counts +35ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - DeciderProvingKey(Circuit&) -creating decider proving key +2s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Finalized circuit size: 27958 +6ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Log dyadic circuit size: 20 +3ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing proving key -allocating wires +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating gate selectors +11ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating non-gate selectors +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating ecc op wires and selector +42ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating table polynomials +16ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating sigmas and ids +6ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup read counts and tags +123ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup and databus inverses +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing z_perm +7ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lagrange polynomials +15ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing prover instance after trace populate -constructing databus polynomials +55ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing lookup table polynomials -constructing lookup read counts +36ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - ACIR: Setting is_kernel to TRUE. +2s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - ACIR: Setting is_kernel to TRUE. +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - we are never here I assume? +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - DeciderProvingKey(Circuit&) -creating decider proving key +90ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Finalized circuit size: 78173 +8ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Log dyadic circuit size: 20 +7ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing proving key +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating wires +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating gate selectors +10ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating non-gate selectors +3ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating ecc op wires and selector +15ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating table polynomials +3ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating sigmas and ids +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup read counts and tags +98ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup and databus inverses +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing z_perm +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lagrange polynomials +22ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing prover instance after trace populate -constructing databus polynomials +67ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing lookup table polynomials -constructing lookup read counts +40ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - ACIR: Setting is_kernel to TRUE. +2s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - we are never here I assume? +8ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - DeciderProvingKey(Circuit&) -creating decider proving key +65ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Finalized circuit size: 89829 +10ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Log dyadic circuit size: 20 +8ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing proving key -allocating wires +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating gate selectors +11ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating non-gate selectors +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating ecc op wires and selector +16ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating table polynomials +3ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating sigmas and ids +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup read counts and tags +32ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup and databus inverses +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing z_perm +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lagrange polynomials +16ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing prover instance after trace populate -constructing databus polynomials +55ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing lookup table polynomials -constructing lookup read counts +36ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - ACIR: Setting is_kernel to TRUE. +2s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - we are never here I assume? +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - DeciderProvingKey(Circuit&) -creating decider proving key +74ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Finalized circuit size: 25399 +4ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Log dyadic circuit size: 20 +4ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing proving key +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating wires +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating gate selectors +10ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating non-gate selectors +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating ecc op wires and selector +17ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating table polynomials +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating sigmas and ids +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup read counts and tags +22ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lookup and databus inverses +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing z_perm +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - allocating lagrange polynomials +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing prover instance after trace populate -constructing databus polynomials +71ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing lookup table polynomials -constructing lookup read counts +34ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Minimum required block sizes for structured trace: - +2s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - executing relation checking rounds... +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - ecc_op: 756 -pub_inputs: 696 -busread: 3489 -arithmetic: 44430 -delta_range: 8040 -elliptic: 3920 -aux: 10761 -poseidon2_external: 4242 -poseidon2_internal: 24170 -lookup: 2076 +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 0 +526ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 1 +87ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 2 +36ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 3 +25ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 4 +16ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 5 +9ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 6 +6ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 7 +3ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 8 +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 9 +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 10 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 11 -completed sumcheck round 12 +4ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 13 -completed sumcheck round 14 -completed sumcheck round 15 +3ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 16 +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 17 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 18 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 19 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - executing pcs opening rounds... +17ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - made commitment key +14ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - executed multivariate-to-univarite reduction +574ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - computed opening proof +142ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - num_public_inputs of the last folding proof BEFORE SUBTRACTION696 -num_public_inputs of the last folding proof 664 +124ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - DeciderProvingKey(Circuit&) -creating decider proving key +119ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Finalized circuit size: 51411 -Log dyadic circuit size: 16 +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing proving key +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing prover instance after trace populate -constructing databus polynomials +38ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - constructing lookup table polynomials -constructing lookup read counts +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - created oink prover +117ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - created oink proof +146ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - created decider prover -executing relation checking rounds... +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 0 +21ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 1 +6ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 2 +5ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 3 +3ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 4 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 5 +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 6 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 7 +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 8 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 9 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 10 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 11 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 12 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 13 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 14 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 15 +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - executing pcs opening rounds... +0ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - made commitment key +3ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - executed multivariate-to-univarite reduction +147ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - computed opening proof +20ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 0 +587ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 1 +35ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 2 +27ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 3 +32ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 4 +15ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 5 +17ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 6 +18ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 7 +15ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 8 +16ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 9 +10ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 10 +7ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 11 +6ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 12 +7ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 13 +2ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 0 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 1 +28ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 2 +17ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 3 +11ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 4 +9ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 5 +8ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 6 +4ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 7 +6ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 8 +5ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 9 +3ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 10 +4ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 11 +3ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 12 +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 13 +3ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 14 +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - completed sumcheck round 15 +2ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - am I here at all? +434ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Ultra verified: 1 +17ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Merge verified: 1 +1ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - ECCVM verified: 1 +37ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:pxe:bb-native-prover:bc0f00 [INFO] client ivc proof BB out - Translator verified: 1 1 -Goblin verified: 1 -ensure valid proof: 1 -write proof and vk data to files.. +27ms - aztec:pxe:bb-native-prover:bc0f00 [INFO] Generated IVC proof duration=26844.259041070938 eventName=circuit-proving +388ms - aztec:pxe_service_bc0f00 [INFO] Sending transaction 124b28b89139b80f3d23a7602e0880bdd24fb5c154b138eaa4bc397aab567f96 +35s - aztec:node [INFO] Received tx 124b28b89139b80f3d23a7602e0880bdd24fb5c154b138eaa4bc397aab567f96 +50s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:tx_pool [INFO] Adding tx with id 124b28b89139b80f3d23a7602e0880bdd24fb5c154b138eaa4bc397aab567f96 eventName=tx-added-to-pool txHash=124b28b89139b80f3d23a7602e0880bdd24fb5c154b138eaa4bc397aab567f96 noteEncryptedLogCount=2 encryptedLogCount=1 unencryptedLogCount=0 noteEncryptedLogSize=1042 encryptedLogSize=492 unencryptedLogSize=8 newCommitmentCount=2 newNullifierCount=2 proofSize=174512 size=195248 feePaymentMethod=none classRegisteredCount=0 +50s - aztec:pxe_service_bc0f00 [INFO] Sent transaction 124b28b89139b80f3d23a7602e0880bdd24fb5c154b138eaa4bc397aab567f96 +3s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=7 worldStateHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 l2BlockSourceNumber=7 l2BlockSourceHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 p2pNumber=7 l1ToL2MessageSourceNumber=7 +1s - aztec:sequencer [INFO] Building blockNumber=8 txCount=1 slotNumber=33 +11ms - aztec:sequencer [VERBOSE] Retrieved 0 L1 to L2 messages for block 8 +0ms - aztec:sequencer-client:block_builder_light [VERBOSE] Starting new block numTxs=2 globalVariables=[object Object] l1ToL2Messages= +0ms - aztec:sequencer-client:block_builder_light [VERBOSE] Adding new tx to block txHash=124b28b89139b80f3d23a7602e0880bdd24fb5c154b138eaa4bc397aab567f96 +5ms - aztec:sequencer-client:block_builder_light [VERBOSE] Setting block as completed and adding 1 padding txs +45ms - aztec:sequencer-client:block_builder_light [VERBOSE] Adding new tx to block txHash=0000000000000000000000000000000000000000000000000000000000000000 +1ms - aztec:sequencer-client:block_builder_light [VERBOSE] Finalising block +31ms - aztec:sequencer [VERBOSE] Assembled block 8 (txEffectsHash: 0032be708d87bdcee2d4529d5bec29bf626953761e283e10af2c6db2472f309b) eventName=l2-block-built creator=0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266 duration=92.9155410528183 publicProcessDuration=46.41527700424194 rollupCircuitsDuration=89.81721198558807 txCount=1 blockNumber=8 blockTimestamp=1730722465 noteEncryptedLogLength=1042 noteEncryptedLogCount=2 encryptedLogLength=492 encryptedLogCount=1 unencryptedLogCount=0 unencryptedLogSize=8 +93ms - aztec:sequencer [VERBOSE] Collecting attestations +0ms - aztec:sequencer [VERBOSE] Attestations collected +2ms - aztec:sequencer [VERBOSE] Collecting proof quotes +0ms - aztec:sequencer [VERBOSE] Retrieved 1 quotes, slot: 33, epoch to prove: 1 +1ms - aztec:sequencer [VERBOSE] EpochProofQuotePayload { epochToProve: 1, validUntilSlot: 9007199254740991, bondAmount: 1000, prover: 0x3c44cdddb6a900fa2b585dd299e03d12fa4293bc, basisPointFee: 100 } +1ms - aztec:sequencer [VERBOSE] Using proof quote EpochProofQuotePayload { epochToProve: 1, validUntilSlot: 9007199254740991, bondAmount: 1000, prover: 0x3c44cdddb6a900fa2b585dd299e03d12fa4293bc, basisPointFee: 100 } +2ms - aztec:sequencer:publisher [VERBOSE] Submitting propose transaction +48s - aztec:sequencer:publisher [INFO] ProposeAndClaim +11ms - aztec:sequencer:publisher [INFO] EpochProofQuotePayload { epochToProve: 1, validUntilSlot: 9007199254740991, bondAmount: 1000, prover: 0x3c44cdddb6a900fa2b585dd299e03d12fa4293bc, basisPointFee: 100 } +0ms - aztec:prover-node:claims-monitor [VERBOSE] Found new claim for epoch 1 by 0x3c44cdddb6a900fa2b585dd299e03d12fa4293bc +2m - aztec:prover-node [VERBOSE] Creating proving job for epoch 1 for block range 6 to 7 +39s - aztec:epoch-proving-job [INFO] Starting epoch proving job epochSize=2 epochNumber=1 uuid=437af23b-1395-41d0-acbe-1e6905a31db3 +0ms - aztec:prover:proving-orchestrator [INFO] Starting epoch 1 with 2 blocks +0ms - aztec:epoch-proving-job [VERBOSE] Starting block processing number=6 blockHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e lastArchive=0x027b0406fe5a8e6cba7b3edc27420a4ab878c2ee34ad2fa9bd909e0516246066 noteHashTreeRoot=0x28d9ef7b9247a9a10bfdb9d4bd9a7ee8db9f6976648f148778981a9998e6313b nullifierTreeRoot=0x075b1b73c5f470e261283a225454093050810a429af69b2769c1e363bb95f11b publicDataTreeRoot=0x00f38a48ddf068b8e56e95f303e7c1343df1276ccd73a8949055d3e38a8857a7 previousHeader=0x247883468590886041ad708495d733453f0963d99f323181a78428115b82783a uuid=437af23b-1395-41d0-acbe-1e6905a31db3 chainId=0x0000000000000000000000000000000000000000000000000000000000007a69 version=0x0000000000000000000000000000000000000000000000000000000000000001 blockNumber=0x0000000000000000000000000000000000000000000000000000000000000006 slotNumber=0x0000000000000000000000000000000000000000000000000000000000000014 timestamp=0x000000000000000000000000000000000000000000000000000000006728b969 coinbase=0x0100000001000000010000000100000001000000 feeRecipient=0x0000000000000000000000000000000000000000000000000000000000000000 gasFees=[object Object] +3ms - aztec:prover:proving-orchestrator [INFO] Starting block 0x0000000000000000000000000000000000000000000000000000000000000006 for slot 0x0000000000000000000000000000000000000000000000000000000000000014 with 2 transactions +4ms - aztec:prover:proving-orchestrator [INFO] Received transaction: 07897be8931c4abd36f752ef215f6ada2d48e373d6a8fb7b7fd7bab33a952d54 +10ms - aztec:sequencer:publisher [INFO] Published L2 block to L1 rollup contract gasPrice=1007844389 gasUsed=558746 transactionHash=0x2edf715ef6cb0742cd71c9df44e96b146da4326ee23273fad51e0401b5e10030 calldataGas=37448 calldataSize=3140 sender=0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266 txCount=1 blockNumber=8 blockTimestamp=1730722465 noteEncryptedLogLength=1042 noteEncryptedLogCount=2 encryptedLogLength=492 encryptedLogCount=1 unencryptedLogCount=0 unencryptedLogSize=8 eventName=rollup-published-to-l1 slotNumber=33 blockHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b +219ms - aztec:sequencer [INFO] Submitted rollup block 8 with 1 transactions duration=93ms (Submitter: 0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266) +236ms - aztec:bb-prover [INFO] Generated witness circuitName=base-parity duration=182.22486805915833 inputSize=160 outputSize=96 eventName=circuit-witness-generation +2m - aztec:bb-prover [INFO] Generated witness circuitName=base-parity duration=188.72121703624725 inputSize=160 outputSize=96 eventName=circuit-witness-generation +7ms - aztec:epoch-proving-job [VERBOSE] Processed all txs for block blockNumber=6 blockHash=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e uuid=437af23b-1395-41d0-acbe-1e6905a31db3 +208ms - aztec:prover:proving-orchestrator [VERBOSE] Block 0x0000000000000000000000000000000000000000000000000000000000000006 completed. Assembling header. +359ms - aztec:prover:proving-orchestrator [VERBOSE] Updating archive tree with block 6 header 0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e +5ms - aztec:prover:proving-orchestrator [VERBOSE] Orchestrator finalised block 6 +3ms - aztec:epoch-proving-job [VERBOSE] Starting block processing number=7 blockHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 lastArchive=0x0eec27012e1397dbb95c6b0b07f5885f41af317d8e7e77d1bf47e692f9645132 noteHashTreeRoot=0x28d9ef7b9247a9a10bfdb9d4bd9a7ee8db9f6976648f148778981a9998e6313b nullifierTreeRoot=0x0714fa2421c49c67440ece58802c5e6c351b0a9a90c8ecac53a63bd4a11f04c4 publicDataTreeRoot=0x06d0f7883dc0f5ad9665762d46b917aef7e429e1dc05923b445724c00415b75c previousHeader=0x2830caea817f12eff4d8403899d64d48c6afa76b3e075b7336c48eb3cecf539e uuid=437af23b-1395-41d0-acbe-1e6905a31db3 chainId=0x0000000000000000000000000000000000000000000000000000000000007a69 version=0x0000000000000000000000000000000000000000000000000000000000000001 blockNumber=0x0000000000000000000000000000000000000000000000000000000000000007 slotNumber=0x0000000000000000000000000000000000000000000000000000000000000015 timestamp=0x000000000000000000000000000000000000000000000000000000006728b981 coinbase=0x0100000001000000010000000100000001000000 feeRecipient=0x0000000000000000000000000000000000000000000000000000000000000000 gasFees=[object Object] +176ms - aztec:prover:proving-orchestrator [INFO] Starting block 0x0000000000000000000000000000000000000000000000000000000000000007 for slot 0x0000000000000000000000000000000000000000000000000000000000000015 with 2 transactions +7ms - aztec:sequencer [VERBOSE] Processing tx 066c31b890e14a6067f23e7466039d03693d47c4c4087e5e4b2b8b692fc7e52f +0ms - aztec:simulator:public_executor [VERBOSE] [AVM] Executing public external function undefined (via dispatch)@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c with 12000000 allocated L2 gas. +50s - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 39 and 39. +40s - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 39 and 39. +40s - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +23ms - aztec:simulator:public_executor [VERBOSE] [AVM] undefined (via dispatch) returned, reverted: false. eventName=avm-simulation appCircuitName=undefined (via dispatch) duration=247.45395696163177 bytecodeSize=37708 +248ms - aztec:simulator:public_executor [VERBOSE] [AVM] undefined (via dispatch) simulation complete. Reverted=false. Consumed 98016 L2 gas, ending with 11901984 L2 gas left. +1ms - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +40ms - aztec:cheat_codes:eth [VERBOSE] Mined 1 L1 blocks +48s - aztec:cheat_codes:eth [VERBOSE] Warped L1 timestamp to 1730722489 +0ms - aztec:utils:watcher [INFO] Slot 33 was filled, jumped to next slot +48s - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +90ms - aztec:archiver [VERBOSE] Processed 1 new L2 blocks up to 8 +0ms - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +75ms - aztec:archiver [VERBOSE] Processed 1 new L2 blocks up to 8 +0ms - aztec:world_state [VERBOSE] Handling new L2 blocks from 8 to 8 +49s - aztec:world_state [VERBOSE] Handled new L2 block eventName=l2-block-handled duration=16.504996061325073 unfinalisedBlockNumber=8 finalisedBlockNumber=5 oldestHistoricBlock=1 txCount=1 blockNumber=8 blockTimestamp=1730722465 noteEncryptedLogLength=1042 noteEncryptedLogCount=2 encryptedLogLength=492 encryptedLogCount=1 unencryptedLogCount=0 unencryptedLogSize=8 +17ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:note_processor [VERBOSE] Added incoming note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 with nullifier 0x0a4a2b220dffa5d577c2cdac69351abcf35ba64be2f7fee8153b3a4ffed4c07f +1m - aztec:note_processor [VERBOSE] Added outgoing note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 +0ms - aztec:note_processor [VERBOSE] Added outgoing note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x2fce74e6edc05fa6cde410c374f1643470f06d24252aa16676f7a5ebbeddb154 +0ms - aztec:note_processor [VERBOSE] Removed note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 with nullifier 0x245fd71c0e990b8e76f9b36908b9178c63e760e99dc6d7e8550d87a336a04c3a +3ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:note_processor [VERBOSE] Added incoming note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x2fce74e6edc05fa6cde410c374f1643470f06d24252aa16676f7a5ebbeddb154 with nullifier 0x03f6a926a082287362aea367c34f9705a8f25b15795bdd01c90902b1d1596aa0 +1m - aztec:note_processor [VERBOSE] Added incoming note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 with nullifier 0x0a4a2b220dffa5d577c2cdac69351abcf35ba64be2f7fee8153b3a4ffed4c07f +1m - aztec:note_processor [VERBOSE] Added outgoing note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 +0ms - aztec:note_processor [VERBOSE] Added outgoing note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x2fce74e6edc05fa6cde410c374f1643470f06d24252aa16676f7a5ebbeddb154 +0ms - aztec:note_processor [VERBOSE] Added incoming note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 with nullifier 0x0a4a2b220dffa5d577c2cdac69351abcf35ba64be2f7fee8153b3a4ffed4c07f +1m - aztec:note_processor [VERBOSE] Added outgoing note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 +0ms - aztec:note_processor [VERBOSE] Added outgoing note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x2fce74e6edc05fa6cde410c374f1643470f06d24252aa16676f7a5ebbeddb154 +0ms - aztec:note_processor [VERBOSE] Removed note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 with nullifier 0x245fd71c0e990b8e76f9b36908b9178c63e760e99dc6d7e8550d87a336a04c3a +50ms - aztec:note_processor [VERBOSE] Removed note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x14dc51e784653ae0b921e36abc5436089ac7dbeb89c13a15c83238eb9fcf0841 with nullifier 0x245fd71c0e990b8e76f9b36908b9178c63e760e99dc6d7e8550d87a336a04c3a +52ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x00000000(compute_note_hash_and_optionally_a_nullifier) +0ms - aztec:note_processor [VERBOSE] Added incoming note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x2fce74e6edc05fa6cde410c374f1643470f06d24252aa16676f7a5ebbeddb154 with nullifier 0x03f6a926a082287362aea367c34f9705a8f25b15795bdd01c90902b1d1596aa0 +1m - aztec:note_processor [VERBOSE] Added incoming note for contract 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c at slot 0x2fce74e6edc05fa6cde410c374f1643470f06d24252aa16676f7a5ebbeddb154 with nullifier 0x03f6a926a082287362aea367c34f9705a8f25b15795bdd01c90902b1d1596aa0 +1m - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 40 and 40. +1s - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 40 and 40. +1s - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +3ms - aztec:archiver [VERBOSE] No blocks to retrieve from 40 to 40 +0ms - aztec:archiver [VERBOSE] Updating the proven block number to 5 and epoch to 0 +3ms - aztec:archiver [VERBOSE] No blocks to retrieve from 40 to 40 +0ms - aztec:prover:proving-orchestrator [INFO] Received transaction: 066c31b890e14a6067f23e7466039d03693d47c4c4087e5e4b2b8b692fc7e52f +2s - aztec:epoch-proving-job [VERBOSE] Processed all txs for block blockNumber=7 blockHash=0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 uuid=437af23b-1395-41d0-acbe-1e6905a31db3 +2s - aztec:prover:proving-orchestrator [VERBOSE] Block 0x0000000000000000000000000000000000000000000000000000000000000007 completed. Assembling header. +93ms - aztec:prover:proving-orchestrator [VERBOSE] Updating archive tree with block 7 header 0x0caa1f0a94ae077ae264266bce65ea36113de82fe509b9e13a113b6c702c78f8 +4ms - aztec:prover:proving-orchestrator [VERBOSE] Orchestrator finalised block 7 +3ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated proof for BaseParityArtifact in 3373 ms, size: 463 fields circuitName=base-parity circuitSize=65536 duration=3372.366648077965 inputSize=96 proofSize=14916 eventName=circuit-proving numPublicInputs=19 +3s - aztec:bb-prover [INFO] Successfully verified proof from key in 110.77479696273804 ms +115ms - aztec:prover-client:prover-agent [VERBOSE] Processed proving job id=d8120000 type=BASE_PARITY duration=3693.593539953232ms +2m - aztec:bb-prover [INFO] Generated witness circuitName=base-parity duration=103.11092901229858 inputSize=160 outputSize=96 eventName=circuit-witness-generation +155ms - aztec:bb-prover [INFO] Generated proof for BaseParityArtifact in 3612 ms, size: 463 fields circuitName=base-parity circuitSize=65536 duration=3611.4261549711227 inputSize=96 proofSize=14916 eventName=circuit-proving numPublicInputs=19 +47ms - aztec:bb-prover [INFO] Successfully verified proof from key in 92.37063300609589 ms +95ms - aztec:prover-client:prover-agent [VERBOSE] Processed proving job id=d9120000 type=BASE_PARITY duration=3990.225529074669ms +297ms - aztec:bb-prover [INFO] Generated witness circuitName=base-parity duration=103.00989198684692 inputSize=160 outputSize=96 eventName=circuit-witness-generation +157ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated proof for BaseParityArtifact in 3176 ms, size: 463 fields circuitName=base-parity circuitSize=65536 duration=3175.549411058426 inputSize=96 proofSize=14916 eventName=circuit-proving numPublicInputs=19 +3s - aztec:bb-prover [INFO] Successfully verified proof from key in 110.10255098342896 ms +117ms - aztec:prover-client:prover-agent [VERBOSE] Processed proving job id=da120000 type=BASE_PARITY duration=3411.370728969574ms +3s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - Executing BB with: prove_tube -o /tmp/bb-wPFf4H/tmp-VsTQLz -v +112ms - aztec:bb-prover [INFO] Generated proof for BaseParityArtifact in 3168 ms, size: 463 fields circuitName=base-parity circuitSize=65536 duration=3167.956606030464 inputSize=96 proofSize=14916 eventName=circuit-proving numPublicInputs=19 +59ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - bb command is: prove_tube -PLEASE BE HERE -using cached bn254 crs of size 53687092 at "/mnt/user-data/mara/.bb-crs/bn254_g1.dat" +60ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Successfully verified proof from key in 103.10861790180206 ms +45ms - aztec:prover-client:prover-agent [VERBOSE] Processed proving job id=db120000 type=BASE_PARITY duration=3386.8516100645065ms +276ms - aztec:bb-prover [INFO] Generated witness circuitName=empty-nested duration=59.791340947151184 inputSize=0 outputSize=0 eventName=circuit-witness-generation +114ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:full_prover_test:full_prover [INFO] Awaiting proof for the previous epoch +49s - aztec:bb-prover [INFO] Generated proof for EmptyNestedArtifact in 2538 ms, size: 463 fields circuitName=empty-nested circuitSize=64 duration=2537.3308089971542 inputSize=0 proofSize=14820 eventName=circuit-proving numPublicInputs=16 +3s - aztec:bb-prover [INFO] Successfully verified proof from key in 98.56563103199005 ms +100ms - aztec:bb-prover [INFO] Generated witness circuitName=private-kernel-empty duration=66.22513508796692 inputSize=34525 outputSize=16446 eventName=circuit-witness-generation +73ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - Initializing BN254 prover CRS from memory with num points = 33554433 +7s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - using cached grumpkin crs of size 419430 at: "/mnt/user-data/mara/.bb-crs/grumpkin_g1.dat" +228ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - Initializing Grumpkin prover CRS from memory with num points = 262145 +112ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - here -Number of public inputs BEFORE subtracting stuff in mega proof: 680 -Number of public inputs after subtracting stuff in mega proof: 664 +15ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated proof for PrivateKernelEmptyArtifact in 9729 ms, size: 463 fields circuitName=private-kernel-empty circuitSize=1048576 duration=9728.656173944473 inputSize=16446 proofSize=36068 eventName=circuit-proving numPublicInputs=680 +3s - aztec:bb-prover [INFO] Successfully verified proof from key in 93.87683200836182 ms +96ms - aztec:prover-client:prover-agent [VERBOSE] Processed proving job id=dd120000 type=PRIVATE_KERNEL_EMPTY duration=12612.859003067017ms +13s - aztec:bb-prover [INFO] Generated witness circuitName=root-parity duration=146.93796598911285 inputSize=136660 outputSize=96 eventName=circuit-witness-generation +213ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:prover-client:prover-agent [INFO] Agent is running with 2 in-flight jobs: id=dc120000,type=TUBE_PROOF id=68140000,type=ROOT_PARITY +24s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated proof for RootParityArtifact in 29764 ms, size: 463 fields circuitName=root-parity circuitSize=4194304 duration=29763.03623199463 inputSize=96 proofSize=14916 eventName=circuit-proving numPublicInputs=19 +30s - aztec:bb-prover [INFO] Successfully verified proof from key in 101.20787191390991 ms +106ms - aztec:prover-client:prover-agent [VERBOSE] Processed proving job id=68140000 type=ROOT_PARITY duration=30039.742568016052ms +6s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - DeciderProvingKey(Circuit&) -creating decider proving key +2s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - Finalized circuit size: 10059442 -Log dyadic circuit size: 24 -constructing proving key -allocating wires +6s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +2s - aztec:bb-prover [INFO] Generated witness circuitName=private-base-rollup duration=8344.693490982056 inputSize=226372 outputSize=696 eventName=circuit-witness-generation +624ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - allocating gate selectors +232ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - allocating non-gate selectors +212ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +2s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - allocating table polynomials -allocating sigmas and ids +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - allocating lookup read counts and tags +2s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - allocating lookup and databus inverses +0ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - constructing z_perm +86ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - allocating lagrange polynomials +218ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - constructing prover instance after trace populate -constructing lookup table polynomials -constructing lookup read counts +6s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - created oink prover +125ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - created oink proof +13s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - created decider prover -executing relation checking rounds... +0ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 0 +8s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 1 +2s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 2 +714ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 3 +404ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - +4ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 4 +217ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 5 +100ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 6 +50ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 7 +26ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 8 +16ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 9 +9ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 10 +6ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 11 +4ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 12 +2ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 13 +1ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 14 +2ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 15 +1ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 16 +2ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 17 +1ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 18 +1ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 19 +1ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 20 +1ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 21 +1ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 22 +1ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 23 +9ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - executing pcs opening rounds... +852ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated proof for PrivateBaseRollupArtifact in 35090 ms, size: 463 fields circuitName=private-base-rollup circuitSize=4194304 duration=35089.65271103382 inputSize=696 proofSize=15780 eventName=circuit-proving numPublicInputs=46 +695ms - aztec:bb-prover [INFO] Successfully verified proof from key in 200.11857199668884 ms +203ms - aztec:prover-client:prover-agent [VERBOSE] Processed proving job id=69140000 type=PRIVATE_BASE_ROLLUP duration=43732.12967503071ms +44s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated witness circuitName=base-parity duration=167.14842903614044 inputSize=160 outputSize=96 eventName=circuit-witness-generation +221ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - made commitment key +138ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated proof for BaseParityArtifact in 4598 ms, size: 463 fields circuitName=base-parity circuitSize=65536 duration=4597.972611069679 inputSize=96 proofSize=14916 eventName=circuit-proving numPublicInputs=19 +4s - aztec:bb-prover [INFO] Successfully verified proof from key in 246.07455801963806 ms +250ms - aztec:prover-client:prover-agent [VERBOSE] Processed proving job id=de120000 type=BASE_PARITY duration=5026.563683986664ms +5s - aztec:bb-prover [INFO] Generated witness circuitName=base-parity duration=192.3547660112381 inputSize=160 outputSize=96 eventName=circuit-witness-generation +240ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated proof for BaseParityArtifact in 3180 ms, size: 463 fields circuitName=base-parity circuitSize=65536 duration=3179.686544060707 inputSize=96 proofSize=14916 eventName=circuit-proving numPublicInputs=19 +3s - aztec:bb-prover [INFO] Successfully verified proof from key in 228.42799699306488 ms +233ms - aztec:prover-client:prover-agent [VERBOSE] Processed proving job id=df120000 type=BASE_PARITY duration=3615.343491077423ms +4s - aztec:bb-prover [INFO] Generated witness circuitName=base-parity duration=198.9564299583435 inputSize=160 outputSize=96 eventName=circuit-witness-generation +248ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:prover-client:prover-agent [INFO] Agent is running with 2 in-flight jobs: id=dc120000,type=TUBE_PROOF id=e0120000,type=BASE_PARITY +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated proof for BaseParityArtifact in 3861 ms, size: 463 fields circuitName=base-parity circuitSize=65536 duration=3860.271543979645 inputSize=96 proofSize=14916 eventName=circuit-proving numPublicInputs=19 +4s - aztec:bb-prover [INFO] Successfully verified proof from key in 208.59258794784546 ms +210ms - aztec:prover-client:prover-agent [VERBOSE] Processed proving job id=e0120000 type=BASE_PARITY duration=4272.846822023392ms +3s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - executed multivariate-to-univarite reduction +19ms - aztec:bb-prover [INFO] Generated witness circuitName=base-parity duration=208.88281798362732 inputSize=160 outputSize=96 eventName=circuit-witness-generation +244ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - computed opening proof +3s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - tube proof length 1127 +40ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated proof for BaseParityArtifact in 3938 ms, size: 463 fields circuitName=base-parity circuitSize=65536 duration=3937.8369719982147 inputSize=96 proofSize=14916 eventName=circuit-proving numPublicInputs=19 +1s - aztec:bb-prover [INFO] Successfully verified proof from key in 240.1481729745865 ms +242ms - aztec:prover-client:prover-agent [VERBOSE] Processed proving job id=e1120000 type=BASE_PARITY duration=4402.227629065514ms +4s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - Executing BB with: prove_tube -o /tmp/bb-wPFf4H/tmp-s8f9jk -v +61ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - bb command is: prove_tube -PLEASE BE HERE +219ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - using cached bn254 crs of size 53687092 at "/mnt/user-data/mara/.bb-crs/bn254_g1.dat" +0ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - verificaton key length in fields:128 +9s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - Native verification of the tube_proof +0ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - am I here at all? +0ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - Tube proof verification: 1 +17ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - Initializing BN254 prover CRS from memory with num points = 33554433 +926ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - using cached grumpkin crs of size 419430 at: "/mnt/user-data/mara/.bb-crs/grumpkin_g1.dat" +138ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - Initializing Grumpkin prover CRS from memory with num points = 262145 +82ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - here -Number of public inputs BEFORE subtracting stuff in mega proof: 3013 -Number of public inputs after subtracting stuff in mega proof: 2997 +14ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Successfully verified proof from key in 207.85654199123383 ms +3s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated proof for tubeCircuit in 117511 ms, size: 463 fields +4ms - aztec:prover-client:prover-agent [VERBOSE] Processed proving job id=dc120000 type=TUBE_PROOF duration=117746.24552500248ms +14s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated witness circuitName=private-base-rollup duration=8821.229328989983 inputSize=226372 outputSize=696 eventName=circuit-witness-generation +9s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +2s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - DeciderProvingKey(Circuit&) -creating decider proving key +9s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - Finalized circuit size: 7190007 -Log dyadic circuit size: 23 +4s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - constructing proving key +2ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - allocating wires +0ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - allocating gate selectors +441ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - allocating non-gate selectors +148ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - allocating table polynomials +667ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - allocating sigmas and ids +0ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - allocating lookup read counts and tags +876ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - allocating lookup and databus inverses +0ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - constructing z_perm +18ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - allocating lagrange polynomials +110ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - constructing prover instance after trace populate -constructing lookup table polynomials -constructing lookup read counts +3s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - created oink prover +91ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - created oink proof +7s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - created decider prover -executing relation checking rounds... +1ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 0 +4s - aztec:prover-client:prover-agent [INFO] Agent is running with 2 in-flight jobs: id=65140000,type=TUBE_PROOF id=6b140000,type=PRIVATE_BASE_ROLLUP +39s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 1 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 2 +394ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 3 +229ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 4 +142ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 5 +211ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 6 +39ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 7 +12ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 8 +10ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 9 +7ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 10 +3ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 11 +2ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 12 +2ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 13 +2ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 14 +1ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 15 +2ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 16 +1ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 17 +1ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 18 +1ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 19 +1ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 20 +1ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 21 +1ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - completed sumcheck round 22 +1ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - executing pcs opening rounds... +434ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - made commitment key +164ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated proof for PrivateBaseRollupArtifact in 36709 ms, size: 463 fields circuitName=private-base-rollup circuitSize=4194304 duration=36708.58653497696 inputSize=696 proofSize=15780 eventName=circuit-proving numPublicInputs=46 +4s - aztec:bb-prover [INFO] Successfully verified proof from key in 214.36560606956482 ms +221ms - aztec:prover-client:prover-agent [VERBOSE] Processed proving job id=6b140000 type=PRIVATE_BASE_ROLLUP duration=45853.62666094303ms +7s - aztec:bb-prover [INFO] Generated witness circuitName=block-root-rollup duration=243.70688104629517 inputSize=107031 outputSize=1648 eventName=circuit-witness-generation +310ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - executed multivariate-to-univarite reduction +2s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - computed opening proof +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - tube proof length 3460 +24ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - verificaton key length in fields:128 +5s - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - Native verification of the tube_proof -am I here at all? +0ms - aztec:bb-prover [VERBOSE] TubeCircuit (prove) BB out - Tube proof verification: 1 +25ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Successfully verified proof from key in 207.38051295280457 ms +3s - aztec:bb-prover [INFO] Generated proof for tubeCircuit in 70226 ms, size: 463 fields +1ms - aztec:prover-client:prover-agent [VERBOSE] Processed proving job id=65140000 type=TUBE_PROOF duration=70461.74008393288ms +11s - aztec:bb-prover [INFO] Proving avm-circuit for undefined (via dispatch)... +47ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - Executing BB with: avm_prove --avm-calldata /tmp/bb-wPFf4H/tmp-pzyWFt/avm_calldata.bin --avm-public-inputs /tmp/bb-wPFf4H/tmp-pzyWFt/avm_public_inputs.bin --avm-hints /tmp/bb-wPFf4H/tmp-pzyWFt/avm_hints.bin -o /tmp/bb-wPFf4H/tmp-pzyWFt -v +27ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - bb command is: avm_prove +184ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - bytecode size: 37708 -calldata size: 5 -public_inputs size: 866 -hints.storage_value_hints size: 2 -hints.note_hash_exists_hints size: 0 -hints.nullifier_exists_hints size: 1 -hints.l1_to_l2_message_exists_hints size: 0 -hints.externalcall_hints size: 0 -hints.contract_instance_hints size: 0 -hints.contract_bytecode_hints size: 1 -initializing crs with size: 4194304 -using cached bn254 crs of size 53687092 at "/mnt/user-data/mara/.bb-crs/bn254_g1.dat" +0ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - Initializing BN254 prover CRS from memory with num points = 4194305 +4s - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - ------- GENERATING TRACE ------- +25ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - Deserialized 7931 instructions +4ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - +4ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - range_check_required: 1 -full_precomputed_tables: 1 +130ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - Trace sizes before padding: - main_trace_size: 3572 - mem_trace_size: 6526 - alu_trace_size: 911 - range_check_size: 65536 - conv_trace_size: 0 - bin_trace_size: 0 - sha256_trace_size: 0 - poseidon2_trace_size: 4 - gas_trace_size: 3567 - fixed_gas_table_size: 68 - slice_trace_size: 7 - range_check_trace_size: 16188 - cmp_trace_size: 1181 - keccak_trace_size: 0 - kernel_trace_size: 8 - KERNEL_INPUTS_LENGTH: 15 - KERNEL_OUTPUTS_LENGTH: 230 - calldata_size: 5 +5s - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - Built trace size: 196608 (next power: 2^18) -Number of columns: 766 -Relation degrees: - alu: [6°: 2, 4°: 6, 3°: 11, 2°: 25, 1°: 5] - binary: [3°: 1, 2°: 9] - bytecode: [2°: 1] - cmp: [4°: 3, 3°: 1, 2°: 21, 1°: 2] - conversion: [2°: 1] - gas: [4°: 2, 3°: 2, 2°: 6] - keccakf1600: [2°: 1] - kernel: [3°: 3, 2°: 39] - main: [4°: 3, 3°: 8, 2°: 99, 1°: 3] - mem: [4°: 1, 3°: 7, 2°: 42, 1°: 3] - mem_slice: [3°: 3, 2°: 7, 1°: 1] - merkle_tree: [4°: 1, 3°: 5, 2°: 3] - poseidon2: [6°: 256, 2°: 19, 1°: 1] - poseidon2_full: [5°: 4, 4°: 1, 3°: 2, 2°: 12, 1°: 1] - range_check: [3°: 1, 2°: 15, 1°: 9] - sha256: [2°: 1] +0ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - Number of non-zero elements: 1787272/150601728 (1%) +2s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - Number of non-zero columns: 652/766 (85%) +2s - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - Circuit subgroup size: 2^21 +79ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - ------- PROVING EXECUTION ------- +3s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated proof for BlockRootRollupArtifact in 29243 ms, size: 463 fields circuitName=block-root-rollup circuitSize=4194304 duration=29242.519376039505 inputSize=1648 proofSize=16740 eventName=circuit-proving numPublicInputs=76 +2s - aztec:bb-prover [INFO] Successfully verified proof from key in 224.7451640367508 ms +240ms - aztec:prover-client:prover-agent [VERBOSE] Processed proving job id=6c140000 type=BLOCK_ROOT_ROLLUP duration=29757.040691018105ms +19s - aztec:bb-prover [INFO] Generated witness circuitName=empty-nested duration=211.0107820034027 inputSize=0 outputSize=0 eventName=circuit-witness-generation +274ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated proof for EmptyNestedArtifact in 2831 ms, size: 463 fields circuitName=empty-nested circuitSize=64 duration=2830.018119931221 inputSize=0 proofSize=14820 eventName=circuit-proving numPublicInputs=16 +3s - aztec:bb-prover [INFO] Successfully verified proof from key in 227.6086059808731 ms +231ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated witness circuitName=private-kernel-empty duration=215.04656100273132 inputSize=34525 outputSize=16446 eventName=circuit-witness-generation +226ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated proof for PrivateKernelEmptyArtifact in 10250 ms, size: 463 fields circuitName=private-kernel-empty circuitSize=1048576 duration=10249.666541099548 inputSize=16446 proofSize=36068 eventName=circuit-proving numPublicInputs=680 +10s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Successfully verified proof from key in 175.39184296131134 ms +178ms - aztec:prover-client:prover-agent [VERBOSE] Processed proving job id=67140000 type=PRIVATE_KERNEL_EMPTY duration=13946.162760019302ms +14s - aztec:bb-prover [INFO] Generated witness circuitName=root-parity duration=226.3128650188446 inputSize=136660 outputSize=96 eventName=circuit-witness-generation +300ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:prover-client:prover-agent [INFO] Agent is running with 2 in-flight jobs: id=66140000,type=PUBLIC_VM id=6a140000,type=ROOT_PARITY +10s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - completed sumcheck round 0 +12s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - completed sumcheck round 1 +4s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - completed sumcheck round 2 +2s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - completed sumcheck round 3 +1s - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - completed sumcheck round 4 +528ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - completed sumcheck round 5 +255ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - completed sumcheck round 6 +138ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - completed sumcheck round 7 +100ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - completed sumcheck round 8 +50ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - completed sumcheck round 9 +45ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - completed sumcheck round 10 +31ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - completed sumcheck round 11 +26ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - completed sumcheck round 12 +22ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - completed sumcheck round 13 +20ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - completed sumcheck round 14 +18ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - completed sumcheck round 15 +25ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - completed sumcheck round 16 +1ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - completed sumcheck round 17 +9ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - completed sumcheck round 18 +6ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - completed sumcheck round 19 +5ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - completed sumcheck round 20 +4ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - vk fields size: 86 -circuit size: 2097152 -num of pub inputs: 0 +5s - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - proof written to: "/tmp/bb-wPFf4H/tmp-pzyWFt/proof" +1ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - vk written to: "/tmp/bb-wPFf4H/tmp-pzyWFt/vk" -vk as fields written to: "/tmp/bb-wPFf4H/tmp-pzyWFt/vk_fields.json" -------- STATS ------- +0ms - aztec:bb-prover [VERBOSE] AvmCircuit (prove) BB out - circuit_builder/init_polys_to_be_shifted_ms: 169 -circuit_builder/init_polys_unshifted_ms: 1051 -circuit_builder/set_polys_shifted_ms: 0 -circuit_builder/set_polys_unshifted_ms: 471 -composer/create_prover:commitment_key_ms: 409 -composer/create_prover:construct_prover_ms: 74 -composer/create_prover:proving_key_ms: 197 -composer/create_prover:witness_ms: 1776 -prove/all_ms: 54156 -prove/create_composer_ms: 0 -prove/create_prover_ms: 2457 -prove/create_verifier_ms: 54 -prove/execute_log_derivative_inverse_commitments_round_ms: 1718 -prove/execute_log_derivative_inverse_round_ms: 5708 -prove/execute_pcs_rounds_ms: 2271 -prove/execute_relation_check_rounds_ms: 30951 -prove/execute_wire_commitments_round_ms: 1507 -prove/gen_trace_ms: 8856 - +0ms - aztec:bb-prover [INFO] Generated proof for avm-circuit(undefined (via dispatch)) in 58578 ms circuitName=avm-circuit appCircuitName=undefined (via dispatch) duration=58577.719421982765 proofSize=161568 eventName=circuit-proving inputSize=58527 circuitSize=2097152 numPublicInputs=0 +261ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Successfully verified proof from key in 3748.427493929863 ms +4s - aztec:prover-client:prover-agent [VERBOSE] Processed proving job id=66140000 type=PUBLIC_VM duration=62380.79090106487ms +20s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated proof for RootParityArtifact in 35754 ms, size: 463 fields circuitName=root-parity circuitSize=4194304 duration=35753.890980005264 inputSize=96 proofSize=14916 eventName=circuit-proving numPublicInputs=19 +6s - aztec:bb-prover [INFO] Successfully verified proof from key in 208.57304298877716 ms +211ms - aztec:prover-client:prover-agent [VERBOSE] Processed proving job id=6a140000 type=ROOT_PARITY duration=36210.723355054855ms +7s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated witness circuitName=private-base-rollup duration=9583.480479955673 inputSize=226372 outputSize=696 eventName=circuit-witness-generation +3s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +2s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated witness circuitName=public-base-rollup duration=9414.830352067947 inputSize=653564 outputSize=696 eventName=circuit-witness-generation +6s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +2s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:prover-client:prover-agent [INFO] Agent is running with 2 in-flight jobs: id=6d140000,type=PRIVATE_BASE_ROLLUP id=6e140000,type=PUBLIC_BASE_ROLLUP +33s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated proof for PrivateBaseRollupArtifact in 38748 ms, size: 463 fields circuitName=private-base-rollup circuitSize=4194304 duration=38747.95427298546 inputSize=696 proofSize=15780 eventName=circuit-proving numPublicInputs=46 +32s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Successfully verified proof from key in 252.5432139635086 ms +257ms - aztec:prover-client:prover-agent [VERBOSE] Processed proving job id=6d140000 type=PRIVATE_BASE_ROLLUP duration=48690.385566949844ms +9s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated proof for PublicBaseRollupArtifact in 34308 ms, size: 463 fields circuitName=public-base-rollup circuitSize=4194304 duration=34307.68875694275 inputSize=696 proofSize=15780 eventName=circuit-proving numPublicInputs=46 +2s - aztec:bb-prover [INFO] Successfully verified proof from key in 205.41261994838715 ms +209ms - aztec:prover-client:prover-agent [VERBOSE] Processed proving job id=6e140000 type=PUBLIC_BASE_ROLLUP duration=44085.496753931046ms +2s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated witness circuitName=block-root-rollup duration=242.3942620754242 inputSize=107031 outputSize=1648 eventName=circuit-witness-generation +262ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated proof for BlockRootRollupArtifact in 26607 ms, size: 463 fields circuitName=block-root-rollup circuitSize=4194304 duration=26606.669782996178 inputSize=1648 proofSize=16740 eventName=circuit-proving numPublicInputs=76 +27s - aztec:bb-prover [INFO] Successfully verified proof from key in 204.72733795642853 ms +214ms - aztec:prover-client:prover-agent [VERBOSE] Processed proving job id=6f140000 type=BLOCK_ROOT_ROLLUP duration=27079.75703704357ms +27s - aztec:bb-prover [INFO] Generated witness circuitName=root-rollup duration=239.16752803325653 inputSize=75178 outputSize=1160 eventName=circuit-witness-generation +257ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-prover [INFO] Generated proof for RootRollupArtifact in 16822 ms circuitName=root-rollup duration=16821.483263015747 proofSize=16228 eventName=circuit-proving inputSize=1160 circuitSize=2097152 numPublicInputs=60 +17s - aztec:bb-prover [INFO] Successfully verified proof from key in 176.57848501205444 ms +179ms - aztec:prover-client:prover-agent [VERBOSE] Processed proving job id=70140000 type=ROOT_ROLLUP duration=17260.073858976364ms +17s - aztec:prover:proving-orchestrator [VERBOSE] Orchestrator completed root rollup for epoch 1 +6m - aztec:epoch-proving-job [INFO] Finalised proof for epoch epochNumber=1 uuid=437af23b-1395-41d0-acbe-1e6905a31db3 +6m - aztec:sequencer:publisher [INFO] SubmitEpochProof proofSize=14304 bytes +0ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer:publisher [INFO] Published epoch proof to L1 rollup contract gasPrice=1006037821 gasUsed=2454926 transactionHash=0xeacc935990c12a87e70d0249b3e53c09dc45396b27624d7a8d1c499b4b1fd89e calldataGas=181292 calldataSize=16292 sender=0x3c44cdddb6a900fa2b585dd299e03d12fa4293bc eventName=proof-published-to-l1 epochNumber=1 fromBlock=6 toBlock=7 +762ms - aztec:epoch-proving-job [INFO] Submitted proof for epoch epochNumber=1 uuid=437af23b-1395-41d0-acbe-1e6905a31db3 +769ms - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 41 and 41. +6m - aztec:archiver [VERBOSE] Retrieved no new L1 -> L2 messages between L1 blocks 41 and 41. +6m - aztec:archiver [VERBOSE] Updating the proven block number to 7 and epoch to 1 +3ms - aztec:archiver [VERBOSE] No blocks to retrieve from 41 to 41 +0ms - aztec:archiver [VERBOSE] Updating the proven block number to 7 and epoch to 1 +2ms - aztec:archiver [VERBOSE] No blocks to retrieve from 41 to 41 +1ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:world_state [VERBOSE] Handling new L2 blocks from 6 to 7 +8m - aztec:world_state [VERBOSE] Chain proven at block 7 +6m - aztec:world_state [VERBOSE] Chain finalized at block 7 +0ms - aztec:world_state [VERBOSE] Handled new L2 block eventName=l2-block-handled duration=15.916776895523071 unfinalisedBlockNumber=6 finalisedBlockNumber=5 oldestHistoricBlock=1 txCount=1 blockNumber=6 blockTimestamp=1730722153 noteEncryptedLogLength=1042 noteEncryptedLogCount=2 encryptedLogLength=492 encryptedLogCount=1 unencryptedLogCount=0 unencryptedLogSize=8 +16ms - aztec:world_state [VERBOSE] Handled new L2 block eventName=l2-block-handled duration=11.487579941749573 unfinalisedBlockNumber=7 finalisedBlockNumber=5 oldestHistoricBlock=1 txCount=1 blockNumber=7 blockTimestamp=1730722177 noteEncryptedLogLength=8 noteEncryptedLogCount=0 encryptedLogLength=8 encryptedLogCount=0 unencryptedLogCount=0 unencryptedLogSize=12 +12ms - aztec:world_state [VERBOSE] Chain proven at block 7 +0ms - aztec:world_state [VERBOSE] Chain finalized at block 7 +0ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:simulator:private_execution [VERBOSE] Executing external function SchnorrAccount:entrypoint@0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:simulator:client_execution_context [VERBOSE] Created PublicExecutionRequest to public_dispatch@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c, of type [enqueued], side-effect counter [3] +0ms - aztec:simulator:client_execution_context [VERBOSE] Created PublicExecutionRequest to public_dispatch@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c, of type [enqueued], side-effect counter [4] +21ms - aztec:simulator:client_execution_context [VERBOSE] Created PublicExecutionRequest to public_dispatch@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c, of type [enqueued], side-effect counter [5] +22ms - aztec:pxe_service [VERBOSE] Simulation completed for 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78:entrypoint +8m - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:node [INFO] Simulating tx 2ba7d56d9951d97fdd029f2b4aa1d16642df5eb9ebed1b022a73beeec38f1e02 +6m - aztec:sequencer [VERBOSE] Processing tx 2ba7d56d9951d97fdd029f2b4aa1d16642df5eb9ebed1b022a73beeec38f1e02 +0ms - aztec:simulator:public_executor [VERBOSE] [AVM] Executing public external function Token:total_supply (via dispatch)@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c with 12000000 allocated L2 gas. +6m - aztec:simulator:public_executor [VERBOSE] [AVM] Token:total_supply (via dispatch) returned, reverted: false. eventName=avm-simulation appCircuitName=Token:total_supply (via dispatch) duration=172.55546307563782 bytecodeSize=37708 +172ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:total_supply (via dispatch) simulation complete. Reverted=false. Consumed 8579 L2 gas, ending with 11991421 L2 gas left. +1ms - aztec:simulator:public_executor [VERBOSE] [AVM] Executing public external function Token:balance_of_public (via dispatch)@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c with 12000000 allocated L2 gas. +311ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:balance_of_public (via dispatch) returned, reverted: false. eventName=avm-simulation appCircuitName=Token:balance_of_public (via dispatch) duration=167.56814897060394 bytecodeSize=37708 +167ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:balance_of_public (via dispatch) simulation complete. Reverted=false. Consumed 27282 L2 gas, ending with 11972718 L2 gas left. +1ms - aztec:simulator:public_executor [VERBOSE] [AVM] Executing public external function Token:balance_of_public (via dispatch)@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c with 12000000 allocated L2 gas. +373ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +2s - aztec:simulator:public_executor [VERBOSE] [AVM] Token:balance_of_public (via dispatch) returned, reverted: false. eventName=avm-simulation appCircuitName=Token:balance_of_public (via dispatch) duration=208.96653497219086 bytecodeSize=37708 +210ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:balance_of_public (via dispatch) simulation complete. Reverted=false. Consumed 27282 L2 gas, ending with 11972718 L2 gas left. +0ms - aztec:pxe_service [INFO] Executed local simulation for 2ba7d56d9951d97fdd029f2b4aa1d16642df5eb9ebed1b022a73beeec38f1e02 +3s - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x4375727c(balance_of_private) +0ms - aztec:pxe_service [VERBOSE] Unconstrained simulation for 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c.balance_of_private completed +34ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x4375727c(balance_of_private) +0ms - aztec:pxe_service [VERBOSE] Unconstrained simulation for 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c.balance_of_private completed +39ms - aztec:simulator:private_execution [VERBOSE] Executing external function SchnorrAccount:entrypoint@0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:pxe_service [VERBOSE] Simulation completed for 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78:entrypoint +141ms - aztec:node [INFO] Simulating tx 260b926d08260cc753cc63219ccf032dae955533e373de8055e1d5ba7507604c +3s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +2s - aztec:pxe_service [INFO] Executed local simulation for 260b926d08260cc753cc63219ccf032dae955533e373de8055e1d5ba7507604c +478ms - aztec:full [INFO] Running test: full_prover rejects txs with invalid proofs +0ms - aztec:simulator:private_execution [VERBOSE] Executing external function SchnorrAccount:entrypoint@0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:simulator:private_execution [VERBOSE] Executing external function Token:transfer@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c +0ms - aztec:pxe_service [VERBOSE] Simulation completed for 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78:entrypoint +370ms - aztec:pxe_service [INFO] Executed local simulation for 2833362bd834abab8883ca38ece34b1f72f4ee6c7c6f66cfe71412fe1e960b24 +828ms - aztec:simulator:private_execution [VERBOSE] Executing external function SchnorrAccount:entrypoint@0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:simulator:client_execution_context [VERBOSE] Created PublicExecutionRequest to public_dispatch@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c, of type [enqueued], side-effect counter [3] +0ms - aztec:pxe_service [VERBOSE] Simulation completed for 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78:entrypoint +155ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:pxe_service [INFO] Executed local simulation for 0d78e78dfb3e8d5f400343f6d43e50e4f0761aee19c27a1a2a8a2e1779fd0a4a +518ms - aztec:pxe_service [INFO] Sending transaction 2833362bd834abab8883ca38ece34b1f72f4ee6c7c6f66cfe71412fe1e960b24 +731ms - aztec:node [INFO] Received tx 2833362bd834abab8883ca38ece34b1f72f4ee6c7c6f66cfe71412fe1e960b24 +3s - aztec:pxe_service [INFO] Sending transaction 0d78e78dfb3e8d5f400343f6d43e50e4f0761aee19c27a1a2a8a2e1779fd0a4a +512ms - aztec:node [INFO] Received tx 0d78e78dfb3e8d5f400343f6d43e50e4f0761aee19c27a1a2a8a2e1779fd0a4a +513ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +2s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:bb-verifier [WARN] Failed to verify ClientIVC proof for tx 2833362bd834abab8883ca38ece34b1f72f4ee6c7c6f66cfe71412fe1e960b24: Error: Failed to verify ClientIVC proof! +0ms - aztec:sequencer:tx_validator:private_proof [WARN] Rejecting tx 2833362bd834abab8883ca38ece34b1f72f4ee6c7c6f66cfe71412fe1e960b24 for invalid proof +0ms - aztec:node [WARN] Rejecting tx 2833362bd834abab8883ca38ece34b1f72f4ee6c7c6f66cfe71412fe1e960b24 because of validation errors +3s - aztec:pxe_service [INFO] Sent transaction 2833362bd834abab8883ca38ece34b1f72f4ee6c7c6f66cfe71412fe1e960b24 +3s - aztec:bb-verifier [WARN] Failed to verify ClientIVC proof for tx 0d78e78dfb3e8d5f400343f6d43e50e4f0761aee19c27a1a2a8a2e1779fd0a4a: Error: Failed to verify ClientIVC proof! +457ms - aztec:sequencer:tx_validator:private_proof [WARN] Rejecting tx 0d78e78dfb3e8d5f400343f6d43e50e4f0761aee19c27a1a2a8a2e1779fd0a4a for invalid proof +0ms - aztec:node [WARN] Rejecting tx 0d78e78dfb3e8d5f400343f6d43e50e4f0761aee19c27a1a2a8a2e1779fd0a4a because of validation errors +456ms - aztec:pxe_service [INFO] Sent transaction 0d78e78dfb3e8d5f400343f6d43e50e4f0761aee19c27a1a2a8a2e1779fd0a4a +457ms - aztec:simulator:private_execution [VERBOSE] Executing external function SchnorrAccount:entrypoint@0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:simulator:client_execution_context [VERBOSE] Created PublicExecutionRequest to public_dispatch@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c, of type [enqueued], side-effect counter [3] +0ms - aztec:simulator:client_execution_context [VERBOSE] Created PublicExecutionRequest to public_dispatch@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c, of type [enqueued], side-effect counter [4] +23ms - aztec:simulator:client_execution_context [VERBOSE] Created PublicExecutionRequest to public_dispatch@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c, of type [enqueued], side-effect counter [5] +25ms - aztec:pxe_service [VERBOSE] Simulation completed for 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78:entrypoint +200ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +1s - aztec:node [INFO] Simulating tx 03f9eba0b82f51a10127eb28bc547994532ccee5bb58a5d649ad2662bc2e7d04 +691ms - aztec:sequencer [VERBOSE] Processing tx 03f9eba0b82f51a10127eb28bc547994532ccee5bb58a5d649ad2662bc2e7d04 +0ms - aztec:simulator:public_executor [VERBOSE] [AVM] Executing public external function Token:total_supply (via dispatch)@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c with 12000000 allocated L2 gas. +9s - aztec:simulator:public_executor [VERBOSE] [AVM] Token:total_supply (via dispatch) returned, reverted: false. eventName=avm-simulation appCircuitName=Token:total_supply (via dispatch) duration=163.14211106300354 bytecodeSize=37708 +163ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:total_supply (via dispatch) simulation complete. Reverted=false. Consumed 8579 L2 gas, ending with 11991421 L2 gas left. +1ms - aztec:simulator:public_executor [VERBOSE] [AVM] Executing public external function Token:balance_of_public (via dispatch)@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c with 12000000 allocated L2 gas. +362ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:balance_of_public (via dispatch) returned, reverted: false. eventName=avm-simulation appCircuitName=Token:balance_of_public (via dispatch) duration=168.12057399749756 bytecodeSize=37708 +168ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:balance_of_public (via dispatch) simulation complete. Reverted=false. Consumed 27282 L2 gas, ending with 11972718 L2 gas left. +1ms - aztec:simulator:public_executor [VERBOSE] [AVM] Executing public external function Token:balance_of_public (via dispatch)@0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c with 12000000 allocated L2 gas. +356ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +2s - aztec:simulator:public_executor [VERBOSE] [AVM] Token:balance_of_public (via dispatch) returned, reverted: false. eventName=avm-simulation appCircuitName=Token:balance_of_public (via dispatch) duration=192.6574889421463 bytecodeSize=37708 +193ms - aztec:simulator:public_executor [VERBOSE] [AVM] Token:balance_of_public (via dispatch) simulation complete. Reverted=false. Consumed 27282 L2 gas, ending with 11972718 L2 gas left. +1ms - aztec:pxe_service [INFO] Executed local simulation for 03f9eba0b82f51a10127eb28bc547994532ccee5bb58a5d649ad2662bc2e7d04 +3s - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x4375727c(balance_of_private) +0ms - aztec:pxe_service [VERBOSE] Unconstrained simulation for 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c.balance_of_private completed +48ms - aztec:simulator:unconstrained_execution [VERBOSE] Executing unconstrained function 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c:0x4375727c(balance_of_private) +0ms - aztec:pxe_service [VERBOSE] Unconstrained simulation for 0x1b99a46103bf947d5e422e7bf39fedcc759d29b98ff583cd9b4c5ea79e28f30c.balance_of_private completed +54ms - aztec:simulator:private_execution [VERBOSE] Executing external function SchnorrAccount:entrypoint@0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78 +0ms - aztec:pxe_service [VERBOSE] Simulation completed for 0x08a884b761bf510bc032a884aa3ec2ce1f326b2463039e0b64df4c05a6273b78:entrypoint +167ms - aztec:sequencer [VERBOSE] Sequencer sync check succeeded worldStateNumber=8 worldStateHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b l2BlockSourceNumber=8 l2BlockSourceHash=0x1bd2250736e76438fab780a4ac1e7c02444601ec879f32d09a331381da68448b p2pNumber=8 l1ToL2MessageSourceNumber=8 +2s - aztec:node [INFO] Simulating tx 0661d9dc7eb81944a60bb39cd8facb641a734cee2d5ba177491a06d608e29349 +3s - aztec:pxe_service [INFO] Executed local simulation for 0661d9dc7eb81944a60bb39cd8facb641a734cee2d5ba177491a06d608e29349 +516ms - aztec:prover-node [INFO] Stopping ProverNode +8m - aztec:prover-node:epoch-monitor [INFO] Stopped EpochMonitor +8m - aztec:prover-node:claims-monitor [VERBOSE] Stopping ClaimsMonitor +8m - aztec:prover-node:claims-monitor [INFO] Stopped ClaimsMonitor +0ms - aztec:archiver [INFO] Stopped. +8m - aztec:world_state [INFO] Stopped world state synchronizer +8m - aztec:prover-node [INFO] Stopped ProverNode +1ms - aztec:node [INFO] Stopping +33ms - aztec:sequencer [INFO] Stopped sequencer +423ms - aztec:p2p [INFO] P2P client stopped. +7m - aztec:world_state [INFO] Stopped world state synchronizer +23s - aztec:archiver [INFO] Stopped. +23s - aztec:node [INFO] Stopped +4ms - aztec:pxe_service [INFO] Cancelled Job Queue +8ms - aztec:pxe_synchronizer [INFO] Stopped +9m - aztec:pxe_service [INFO] Stopped Synchronizer +0ms - aztec:pxe_service_bc0f00 [INFO] Cancelled Job Queue +6m - aztec:pxe_synchronizer_bc0f00 [INFO] Stopped +8m - aztec:pxe_service_bc0f00 [INFO] Stopped Synchronizer +0ms - aztec:pxe_service_bd0f00 [INFO] Cancelled Job Queue +7m - aztec:pxe_synchronizer_bd0f00 [INFO] Stopped +8m - aztec:pxe_service_bd0f00 [INFO] Stopped Synchronizer +0ms - aztec:prover-node [INFO] Stopping ProverNode +6m - aztec:prover-node:epoch-monitor [INFO] Stopped EpochMonitor +8m - aztec:prover-node:claims-monitor [VERBOSE] Stopping ClaimsMonitor +6m - aztec:prover-node:claims-monitor [INFO] Stopped ClaimsMonitor +0ms - aztec:prover-client:prover-agent [INFO] Agent stopped +26s - aztec:prover-client:prover-pool:queue [INFO] Proving queue stopped +8m - aztec:archiver [ERROR] Error syncing archiver: HttpRequestError: HTTP request failed. - -URL: http://127.0.0.1:46619 -Request body: {"method":"eth_blockNumber"} - -Details: fetch failed -Version: viem@2.10.2 - at Object.request (/mnt/user-data/mara/aztec-packages/yarn-project/node_modules/viem/utils/rpc/http.ts:141:15) - at processTicksAndRejections (node:internal/process/task_queues:95:5) - at fn (/mnt/user-data/mara/aztec-packages/yarn-project/node_modules/viem/clients/transports/http.ts:124:19) - at request (/mnt/user-data/mara/aztec-packages/yarn-project/node_modules/viem/clients/transports/http.ts:129:39) - at withRetry.delay.count.count (/mnt/user-data/mara/aztec-packages/yarn-project/node_modules/viem/utils/buildRequest.ts:104:18) - at attemptRetry (/mnt/user-data/mara/aztec-packages/yarn-project/node_modules/viem/utils/promise/withRetry.ts:44:22) { - details: 'fetch failed', - docsPath: undefined, - metaMessages: [ - 'URL: http://127.0.0.1:46619', - 'Request body: {"method":"eth_blockNumber"}' - ], - shortMessage: 'HTTP request failed.', - version: 'viem@2.10.2', - body: { method: 'eth_blockNumber', params: undefined }, - headers: undefined, - status: undefined, - url: 'http://127.0.0.1:46619' -} +25s - aztec:archiver [INFO] Stopped. +1s - aztec:world_state [INFO] Stopped world state synchronizer +26s - aztec:node [INFO] Stopping +3s - aztec:sequencer [INFO] Stopped sequencer +3s - aztec:p2p [INFO] P2P client stopped. +3s - aztec:world_state [INFO] Stopped world state synchronizer +3s - aztec:archiver [INFO] Stopped. +3s - aztec:node [INFO] Stopped +0ms - aztec:prover-node [INFO] Stopped ProverNode +3s -PASS src/e2e_prover/full.test.ts (558.02 s) - full_prover - ✓ makes both public and private transfers (476868 ms) - ✓ rejects txs with invalid proofs (10452 ms) - -Test Suites: 1 passed, 1 total -Tests: 2 passed, 2 total -Snapshots: 0 total -Time: 558.066 s, estimated 1701 s -Ran all test suites matching /full.test.ts/i. -Force exiting Jest: Have you considered using `--detectOpenHandles` to detect async operations that kept running after all tests finished? diff --git a/yarn-project/bb-prover/src/bb/execute.ts b/yarn-project/bb-prover/src/bb/execute.ts index e73a210f3f9..a14598863a9 100644 --- a/yarn-project/bb-prover/src/bb/execute.ts +++ b/yarn-project/bb-prover/src/bb/execute.ts @@ -225,13 +225,25 @@ export async function executeBbClientIvcProof( // Write the bytecode to the working directory log(`bytecodePath ${bytecodeStackPath}`); log(`outputPath ${outputPath}`); - const args = ['-o', outputPath, '-b', bytecodeStackPath, '-w', witnessStackPath, '-v']; + const args = [ + '-o', + outputPath, + '-b', + bytecodeStackPath, + '-w', + witnessStackPath, + '-v', + '--scheme', + 'client_ivc', + '--input_type', + 'runtime_stack', + ]; const timer = new Timer(); const logFunction = (message: string) => { log(`bb - ${message}`); }; - const result = await executeBB(pathToBB, 'client_ivc_prove_output_all_msgpack', args, logFunction); + const result = await executeBB(pathToBB, 'prove', args, logFunction); const durationMs = timer.ms(); if (result.status == BB_RESULT.SUCCESS) { @@ -636,9 +648,9 @@ export async function verifyClientIvcProof( } try { - const args = ['-o', targetPath]; + const args = ['-o', targetPath, '--scheme', 'client_ivc']; const timer = new Timer(); - const command = 'verify_client_ivc'; + const command = 'verify'; const result = await executeBB(pathToBB, command, args, log); const duration = timer.ms(); if (result.status == BB_RESULT.SUCCESS) { From 84ff623ea00d0c6da4db960653655d7d485bccb1 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 3 Dec 2024 14:57:04 +0000 Subject: [PATCH 03/24] feat: optionally emit public bytecode (#10365) This PR builds on top of #10009 and makes publishing bytecode conditionally on a function parameter such that it might be enabled/disabled on just some networks --- .../src/events/class_registered.nr | 20 +++++++++---------- .../src/main.nr | 14 ++++--------- .../aztec.js/src/deployment/register_class.ts | 9 ++++++++- .../contract_class_registration.test.ts | 10 +++++++++- 4 files changed, 30 insertions(+), 23 deletions(-) diff --git a/noir-projects/noir-contracts/contracts/contract_class_registerer_contract/src/events/class_registered.nr b/noir-projects/noir-contracts/contracts/contract_class_registerer_contract/src/events/class_registered.nr index 34949a5b691..8e6edbfdb57 100644 --- a/noir-projects/noir-contracts/contracts/contract_class_registerer_contract/src/events/class_registered.nr +++ b/noir-projects/noir-contracts/contracts/contract_class_registerer_contract/src/events/class_registered.nr @@ -1,30 +1,28 @@ use dep::aztec::protocol_types::{ - constants::REGISTERER_CONTRACT_CLASS_REGISTERED_MAGIC_VALUE, contract_class_id::ContractClassId, + constants::{ + MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS, REGISTERER_CONTRACT_CLASS_REGISTERED_MAGIC_VALUE, + }, + contract_class_id::ContractClassId, traits::Serialize, }; -// TODO(#10007): Use MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS instead -pub global MAX_BROADCASTEABLE_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS: u32 = 100; - pub struct ContractClassRegistered { contract_class_id: ContractClassId, version: Field, artifact_hash: Field, private_functions_root: Field, - packed_public_bytecode: [Field; MAX_BROADCASTEABLE_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS], + packed_public_bytecode: [Field; MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS], } -impl Serialize for ContractClassRegistered { - fn serialize( - self: Self, - ) -> [Field; MAX_BROADCASTEABLE_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS + 5] { - let mut packed = [0; MAX_BROADCASTEABLE_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS + 5]; +impl Serialize for ContractClassRegistered { + fn serialize(self: Self) -> [Field; MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS + 5] { + let mut packed = [0; MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS + 5]; packed[0] = REGISTERER_CONTRACT_CLASS_REGISTERED_MAGIC_VALUE; packed[1] = self.contract_class_id.to_field(); packed[2] = self.version; packed[3] = self.artifact_hash; packed[4] = self.private_functions_root; - for i in 0..MAX_BROADCASTEABLE_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS { + for i in 0..MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS { packed[i + 5] = self.packed_public_bytecode[i]; } packed diff --git a/noir-projects/noir-contracts/contracts/contract_class_registerer_contract/src/main.nr b/noir-projects/noir-contracts/contracts/contract_class_registerer_contract/src/main.nr index 1e8fb176fd1..6c90b6d4cf5 100644 --- a/noir-projects/noir-contracts/contracts/contract_class_registerer_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/contract_class_registerer_contract/src/main.nr @@ -22,9 +22,7 @@ contract ContractClassRegisterer { }; use crate::events::{ - class_registered::{ - ContractClassRegistered, MAX_BROADCASTEABLE_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS, - }, + class_registered::ContractClassRegistered, private_function_broadcasted::{ ClassPrivateFunctionBroadcasted, InnerPrivateFunction, PrivateFunction, }, @@ -44,6 +42,7 @@ contract ContractClassRegisterer { artifact_hash: Field, private_functions_root: Field, public_bytecode_commitment: Field, + emit: bool, ) { // TODO: Validate public_bytecode_commitment is the correct commitment of packed_public_bytecode // TODO: We should be able to remove public_bytecode_commitment from the input if it's calculated in this function @@ -98,18 +97,13 @@ contract ContractClassRegisterer { // TODO(#10007): Drop this conditional and always emit the bytecode. We allow skipping the broadcast // as a stopgap solution to allow txs to fit in Sepolia when we broadcast public bytecode. - if bytecode_length_in_fields <= MAX_BROADCASTEABLE_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS { - let mut event_public_bytecode = - [0; MAX_BROADCASTEABLE_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS]; - for i in 0..MAX_BROADCASTEABLE_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS { - event_public_bytecode[i] = packed_public_bytecode[i]; - } + if emit { let event = ContractClassRegistered { contract_class_id, version: 1, artifact_hash, private_functions_root, - packed_public_bytecode: event_public_bytecode, + packed_public_bytecode, }; emit_contract_class_log(&mut context, event.serialize()); } diff --git a/yarn-project/aztec.js/src/deployment/register_class.ts b/yarn-project/aztec.js/src/deployment/register_class.ts index 8ba9c99f55a..eaaba5e8b95 100644 --- a/yarn-project/aztec.js/src/deployment/register_class.ts +++ b/yarn-project/aztec.js/src/deployment/register_class.ts @@ -5,15 +5,22 @@ import { type ContractFunctionInteraction } from '../contract/contract_function_ import { type Wallet } from '../wallet/index.js'; import { getRegistererContract } from './protocol_contracts.js'; +const defaultEmitPublicBytecode = + // guard against `process` not being defined (e.g. in the browser) + typeof process === 'object' && typeof process.env === 'object' + ? ['1', 'true', 'yes', ''].includes(process.env.AZTEC_EMIT_PUBLIC_BYTECODE ?? '') + : true; + /** Sets up a call to register a contract class given its artifact. */ export async function registerContractClass( wallet: Wallet, artifact: ContractArtifact, + emitPublicBytecode = defaultEmitPublicBytecode, ): Promise { const { artifactHash, privateFunctionsRoot, publicBytecodeCommitment, packedBytecode } = getContractClassFromArtifact(artifact); const encodedBytecode = bufferAsFields(packedBytecode, MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS); const registerer = getRegistererContract(wallet); await wallet.addCapsule(encodedBytecode); - return registerer.methods.register(artifactHash, privateFunctionsRoot, publicBytecodeCommitment); + return registerer.methods.register(artifactHash, privateFunctionsRoot, publicBytecodeCommitment, emitPublicBytecode); } diff --git a/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts b/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts index f74795170f2..d436c38e0ac 100644 --- a/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts +++ b/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts @@ -49,7 +49,7 @@ describe('e2e_deploy_contract contract class registration', () => { beforeAll(async () => { artifact = StatefulTestContract.artifact; - registrationTxReceipt = await registerContractClass(wallet, artifact).then(c => c.send().wait()); + registrationTxReceipt = await registerContractClass(wallet, artifact, false).then(c => c.send().wait()); contractClass = getContractClassFromArtifact(artifact); // TODO(#10007) Remove this call. Node should get the bytecode from the event broadcast. @@ -58,6 +58,14 @@ describe('e2e_deploy_contract contract class registration', () => { }); describe('registering a contract class', () => { + it('optionally emits public bytecode', async () => { + const registrationTxReceipt = await registerContractClass(wallet, TestContract.artifact, true).then(c => + c.send().wait(), + ); + const logs = await aztecNode.getContractClassLogs({ txHash: registrationTxReceipt.txHash }); + expect(logs.logs.length).toEqual(1); + }); + // TODO(#10007) Remove this test. We should always broadcast public bytecode. it('bypasses broadcast if exceeds bytecode limit for event size', async () => { const logs = await aztecNode.getContractClassLogs({ txHash: registrationTxReceipt.txHash }); From da809c58290f9590836f45ec59376cbf04d3c4ce Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Tue, 3 Dec 2024 10:44:23 -0500 Subject: [PATCH 04/24] chore: redo typo PR by Dimitrolito (#10364) Thanks Dimitrolito for https://github.com/AztecProtocol/aztec-packages/pull/10171. Our policy is to redo typo changes to dissuade metric farming. This is an automated script. --- barretenberg/cpp/src/barretenberg/ecc/fields/field_docs.md | 2 +- barretenberg/cpp/src/barretenberg/ecc/pippenger.md | 2 +- docs/docs/aztec/concepts/accounts/keys.md | 2 +- docs/docs/protocol-specs/addresses-and-keys/precompiles.md | 2 +- docs/docs/protocol-specs/calls/public-private-messaging.md | 2 +- docs/docs/protocol-specs/decentralization/p2p-network.md | 2 +- docs/docs/protocol-specs/intro.md | 4 ++-- docs/docs/protocol-specs/public-vm/alu.md | 2 +- .../developer_references/common_errors/aztecnr-errors.md | 2 +- 9 files changed, 10 insertions(+), 10 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/ecc/fields/field_docs.md b/barretenberg/cpp/src/barretenberg/ecc/fields/field_docs.md index 912804f408e..1c52ca64813 100644 --- a/barretenberg/cpp/src/barretenberg/ecc/fields/field_docs.md +++ b/barretenberg/cpp/src/barretenberg/ecc/fields/field_docs.md @@ -11,7 +11,7 @@ We use Montgomery reduction to speed up field multiplication. For an original el The goal of using Montgomery form is to avoid heavy division modulo \f$p\f$. To compute a representative of element $$c = a⋅b\ mod\ p$$ we compute $$c⋅R = (a⋅R)⋅(b⋅R) / R\ mod\ p$$, but we use an efficient division trick to avoid straight modular division. Let's look into the standard 4⋅64 case: 1. First, we compute the value $$c_r=c⋅R⋅R = aR⋅bR$$ in integers and get a value with 8 64-bit limbs 2. Then we take the lowest limb of \f$c_r\f$ (\f$c_r[0]\f$) and multiply it by a special value $$r_{inv} = -1 ⋅ p^{-1}\ mod\ 2^{64}$$ As a result we get $$k = r_{inv}⋅ c_r[0]\ mod\ 2^{64}$$ -3. Next we update \f$c_r\f$ in integers by adding a value \f$k⋅p\f$: $$c_r += k⋅p$$ You might notice that the value of \f$c_r\ mod\ p\f$ hasn't changed, since we've added a multiple of the modulus. A the same time, if we look at the expression modulo \f$2^{64}\f$: $$c_r + k⋅p = c_r + c_r⋅r_{inv}⋅p = c_r + c_r⋅ (-1)⋅p^{-1}⋅p = c_r - c_r = 0\ mod\ 2^{64}$$ The result is equivalent modulo \f$p\f$, but we zeroed out the lowest limb +3. Next we update \f$c_r\f$ in integers by adding a value \f$k⋅p\f$: $$c_r += k⋅p$$ You might notice that the value of \f$c_r\ mod\ p\f$ hasn't changed, since we've added a multiple of the modulus. At the same time, if we look at the expression modulo \f$2^{64}\f$: $$c_r + k⋅p = c_r + c_r⋅r_{inv}⋅p = c_r + c_r⋅ (-1)⋅p^{-1}⋅p = c_r - c_r = 0\ mod\ 2^{64}$$ The result is equivalent modulo \f$p\f$, but we zeroed out the lowest limb 4. We perform the same operation for \f$c_r[1]\f$, but instead of adding \f$k⋅p\f$, we add \f$2^{64}⋅k⋅p\f$. In the implementation, instead of adding \f$k⋅ p\f$ to limbs of \f$c_r\f$ starting with zero, we just start with limb 1. This ensures that \f$c_r[1]=0\f$. We then perform the same operation for 2 more limbs. 5. At this stage we are left with a version of \f$c_r\f$ where the first 4 limbs of the total 8 limbs are zero. So if we treat the 4 high limbs as a separate integer \f$c_{r.high}\f$, $$c_r = c_{r.high}⋅2^{256}=c_{r.high}⋅R\ mod\ p \Rightarrow c_{r.high} = c\cdot R\ mod\ p$$ and we can get the evaluation simply by taking the 4 high limbs of \f$c_r\f$. 6. The previous step has reduced the intermediate value of \f$cR\f$ to range \f$[0,2p)\f$, so we must check if it is more than \f$p\f$ and subtract the modulus once if it overflows. diff --git a/barretenberg/cpp/src/barretenberg/ecc/pippenger.md b/barretenberg/cpp/src/barretenberg/ecc/pippenger.md index a7463663216..9756ace72e9 100644 --- a/barretenberg/cpp/src/barretenberg/ecc/pippenger.md +++ b/barretenberg/cpp/src/barretenberg/ecc/pippenger.md @@ -24,7 +24,7 @@ For example, let's say that our bit slice is 6 bits. The first round will take t So, for example, if the most significant 6 bits of a scalar are `011001` (25), we add the scalar's point into the 25th bucket. -At the end of each round, we then 'concatenate' all of the buckets into a sum. Let's represent each bucket accumulator in an array `A[num_buckets]`. The concatenation phase will compute `A[0] + 2A[1] + 3A[2] + 4A[3] + 5A[4] + ... = Sum`. +At the end of each round, we then 'concatenate' all the buckets into a sum. Let's represent each bucket accumulator in an array `A[num_buckets]`. The concatenation phase will compute `A[0] + 2A[1] + 3A[2] + 4A[3] + 5A[4] + ... = Sum`. Finally, we add each `Sum` point into an overall accumulator. For example, for a set of 254 bit scalars, if we evaluate the most 6 significant bits of each scalar and accumulate the resulting point into `Sum`, we actually need `(2^{248}).Sum` to accommodate for the bit shift. diff --git a/docs/docs/aztec/concepts/accounts/keys.md b/docs/docs/aztec/concepts/accounts/keys.md index cf731a5813c..87572444485 100644 --- a/docs/docs/aztec/concepts/accounts/keys.md +++ b/docs/docs/aztec/concepts/accounts/keys.md @@ -111,7 +111,7 @@ In the following section we describe a few ways how an account contract could be #### Using a private note -Storing the signing public key in a private note makes it accessible from the entrypoint function, which is required to be a private function, and allows for rotating the key when needed. However, keep in mind that reading a private note requires nullifying it to ensure it is up to date, so each transaction you send will destroy and recreate the public key. This has the side effect of enforcing a strict ordering across all transactions, since each transaction will refer the instantiation of the private note from the previous one. +Storing the signing public key in a private note makes it accessible from the entrypoint function, which is required to be a private function, and allows for rotating the key when needed. However, keep in mind that reading a private note requires nullifying it to ensure it is up-to-date, so each transaction you send will destroy and recreate the public key. This has the side effect of enforcing a strict ordering across all transactions, since each transaction will refer the instantiation of the private note from the previous one. #### Using an immutable private note diff --git a/docs/docs/protocol-specs/addresses-and-keys/precompiles.md b/docs/docs/protocol-specs/addresses-and-keys/precompiles.md index 07348290ea1..720e2c13d4d 100644 --- a/docs/docs/protocol-specs/addresses-and-keys/precompiles.md +++ b/docs/docs/protocol-specs/addresses-and-keys/precompiles.md @@ -8,7 +8,7 @@ Precompiled contracts, which borrow their name from Ethereum's, are contracts no Note that, unlike user-defined contracts, the address of a precompiled [contract instance](../contract-deployment/instances.md) and the [identifier of its class](../contract-deployment/classes.md#class-identifier) both have no known preimage. -The rationale for precompiled contracts is to provide a set of vetted primitives for [note encryption](../private-message-delivery/private-msg-delivery.md#encryption-and-decryption) and [tagging](../private-message-delivery/private-msg-delivery.md#note-tagging) that applications can use safely. These primitives are guaranteed to be always-satisfiable when called with valid arguments. This allows account contracts to choose their preferred method of encryption and tagging from any primitive in this set, and application contracts to call into them without the risk of calling into a untrusted code, which could potentially halt the execution flow via an unsatisfiable constraint. Furthermore, by exposing these primitives in a reserved set of well-known addresses, applications can be forward-compatible and incorporate new encryption and tagging methods as accounts opt into them. +The rationale for precompiled contracts is to provide a set of vetted primitives for [note encryption](../private-message-delivery/private-msg-delivery.md#encryption-and-decryption) and [tagging](../private-message-delivery/private-msg-delivery.md#note-tagging) that applications can use safely. These primitives are guaranteed to be always-satisfiable when called with valid arguments. This allows account contracts to choose their preferred method of encryption and tagging from any primitive in this set, and application contracts to call into them without the risk of calling into an untrusted code, which could potentially halt the execution flow via an unsatisfiable constraint. Furthermore, by exposing these primitives in a reserved set of well-known addresses, applications can be forward-compatible and incorporate new encryption and tagging methods as accounts opt into them. ## Constants diff --git a/docs/docs/protocol-specs/calls/public-private-messaging.md b/docs/docs/protocol-specs/calls/public-private-messaging.md index e438f49d96e..3dc4ab83e5e 100644 --- a/docs/docs/protocol-specs/calls/public-private-messaging.md +++ b/docs/docs/protocol-specs/calls/public-private-messaging.md @@ -15,7 +15,7 @@ Private functions are executed locally by the user, so that the user can ensure Given this natural flow from private-land to public-land, private functions can enqueue calls to public functions. But the opposite direction is not true. We'll see [below](#public-to-private-messaging) that public functions cannot "call" private functions, but rather they must pass messages. -Since private functions execute first, they cannot 'wait' on the results of any of their calls to public functions. +Since private functions execute first, they cannot 'wait' on the results of their calls to public functions. By way of example, suppose a function makes a call to a public function, and then to a private function. The public function will not be executed immediately, but will instead be enqueued for the sequencer to execute later. diff --git a/docs/docs/protocol-specs/decentralization/p2p-network.md b/docs/docs/protocol-specs/decentralization/p2p-network.md index 62c1bbe4c7f..971955e125c 100644 --- a/docs/docs/protocol-specs/decentralization/p2p-network.md +++ b/docs/docs/protocol-specs/decentralization/p2p-network.md @@ -61,7 +61,7 @@ When new participants join the network for the first time, they will need to loc Whilst the DiscV5 specification is still under development, the protocol is currently in use by Ethereum's consensus layer with 100,000s of participants. Nodes maintain a DHT routing table of Ethereum Node Records (ENRs), periodically flushing nodes that are no longer responsive and searching for new nodes by requesting records from their neighbours. -Neighbours in this sense are not necessarily in geographical proximity. Node distance is defined as the bitwise XOR of the nodes 32 bit IDs. +Neighbours in this sense are not necessarily in geographical proximity. Node distance is defined as the bitwise XOR of the nodes 32-bit IDs. ``` distance(Id1, Id2) = Id1 XOR Id2 diff --git a/docs/docs/protocol-specs/intro.md b/docs/docs/protocol-specs/intro.md index 275b370ed15..58966b25c73 100644 --- a/docs/docs/protocol-specs/intro.md +++ b/docs/docs/protocol-specs/intro.md @@ -22,7 +22,7 @@ Some of the info we need to populate this document might have already been writt ## Diagrams -To increase the probability of diagrams being up to date we encourage you to write them in `mermaid`. Mermaid is a markdown-like language for generating diagrams and is supported by Docusaurus, so it will be rendered automatically for you. +To increase the probability of diagrams being up-to-date we encourage you to write them in `mermaid`. Mermaid is a markdown-like language for generating diagrams and is supported by Docusaurus, so it will be rendered automatically for you. You simply create a codeblock specifying the language as `mermaid` and write your diagram in the codeblock. For example: ````txt @@ -87,7 +87,7 @@ classDiagram If mermaid doesn't cover your case, please add both the rendered image and the source code to the documentation. Most of the tools for diagramming can export a non-rendered representation that can then be updated by other people. Please name it such that it is clear what tool was used. -This should allow us to keep the diagrams up to date, by allowing others to update them. +This should allow us to keep the diagrams up-to-date, by allowing others to update them. ## For each protocol feature diff --git a/docs/docs/protocol-specs/public-vm/alu.md b/docs/docs/protocol-specs/public-vm/alu.md index bf30043974c..70a902406ad 100644 --- a/docs/docs/protocol-specs/public-vm/alu.md +++ b/docs/docs/protocol-specs/public-vm/alu.md @@ -4,7 +4,7 @@ The algebraic logic unit performs operations analogous to an arithmetic unit in This component of the VM circuit evaluates both base-2 arithmetic operations and prime-field operation. It takes its input/output from the intermediate registers in the state controller. -The following block diagram maps out an draft of the internal components of the "ALU" +The following block diagram maps out a draft of the internal components of the "ALU" ![](/img/protocol-specs/public-vm/alu.png) diff --git a/docs/docs/reference/developer_references/common_errors/aztecnr-errors.md b/docs/docs/reference/developer_references/common_errors/aztecnr-errors.md index e76d9206355..2a2b3f3d381 100644 --- a/docs/docs/reference/developer_references/common_errors/aztecnr-errors.md +++ b/docs/docs/reference/developer_references/common_errors/aztecnr-errors.md @@ -72,4 +72,4 @@ To address the error, register the account by calling `server.registerAccount(.. You may encounter this error when trying to send a transaction that is using an invalid contract. The contract may compile without errors and you only encounter this when sending the transaction. -This error may arise when function parameters are not properly formatted, when trying to "double-spend" a note, or it may indicate that there is a bug deeper in the stack (e.g. a bug in the Aztec.nr library or deeper). If you hit this error, double check your contract implementation, but also consider [opening an issue (GitHub link)](https://github.com/AztecProtocol/aztec-packages/issues/new). +This error may arise when function parameters are not properly formatted, when trying to "double-spend" a note, or it may indicate that there is a bug deeper in the stack (e.g. a bug in the Aztec.nr library or deeper). If you hit this error, double-check your contract implementation, but also consider [opening an issue (GitHub link)](https://github.com/AztecProtocol/aztec-packages/issues/new). From 5a024803648e8a645cbafdeb4e2ab9f6bfa26117 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 3 Dec 2024 16:45:20 +0000 Subject: [PATCH 05/24] fix: await block unwind when a reorg happens (#10380) Noticed the unwind operation did not block on the delete operation (I saw this in the prover node logs where logs were printed out of order) --- .../archiver/src/archiver/archiver.ts | 31 +++++++++++-------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index ff6b9624902..550e98ce172 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -955,22 +955,27 @@ class ArchiverStoreHelper // from - blocksToUnwind = the new head, so + 1 for what we need to remove const blocks = await this.getBlocks(from - blocksToUnwind + 1, blocksToUnwind); - return [ + const opResults = await Promise.all([ // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them - ...(await Promise.all( - blocks.map(async block => { - const contractClassLogs = block.data.body.txEffects - .flatMap(txEffect => (txEffect ? [txEffect.contractClassLogs] : [])) - .flatMap(txLog => txLog.unrollLogs()); - // ContractInstanceDeployed event logs are broadcast in privateLogs. - const privateLogs = block.data.body.txEffects.flatMap(txEffect => txEffect.privateLogs); - await this.#updateRegisteredContractClasses(contractClassLogs, block.data.number, Operation.Delete); - await this.#updateDeployedContractInstances(privateLogs, block.data.number, Operation.Delete); - }), - )), + ...blocks.map(async block => { + const contractClassLogs = block.data.body.txEffects + .flatMap(txEffect => (txEffect ? [txEffect.contractClassLogs] : [])) + .flatMap(txLog => txLog.unrollLogs()); + + // ContractInstanceDeployed event logs are broadcast in privateLogs. + const privateLogs = block.data.body.txEffects.flatMap(txEffect => txEffect.privateLogs); + + return ( + (await this.#updateRegisteredContractClasses(contractClassLogs, block.data.number, Operation.Delete)) && + (await this.#updateDeployedContractInstances(privateLogs, block.data.number, Operation.Delete)) + ); + }), + this.store.deleteLogs(blocks.map(b => b.data)), this.store.unwindBlocks(from, blocksToUnwind), - ].every(Boolean); + ]); + + return opResults.every(Boolean); } getBlocks(from: number, limit: number): Promise[]> { From 7e19b3991ca34bcf9dd43284d4d21ded87824366 Mon Sep 17 00:00:00 2001 From: just-mitch <68168980+just-mitch@users.noreply.github.com> Date: Tue, 3 Dec 2024 12:00:06 -0500 Subject: [PATCH 06/24] feat: deploy networks via github actions (#10381) I used locally with [act](https://github.com/nektos/act) to deploy a smoke network in the new aztec-gke cluster. --- .github/.gitignore | 1 + .github/workflows/network-deploy.yml | 94 ++++++++++++++++-------- spartan/terraform/deploy-release/main.tf | 4 +- spartan/terraform/gke-cluster/main.tf | 18 +++++ 4 files changed, 84 insertions(+), 33 deletions(-) create mode 100644 .github/.gitignore diff --git a/.github/.gitignore b/.github/.gitignore new file mode 100644 index 00000000000..b4ddc884c6b --- /dev/null +++ b/.github/.gitignore @@ -0,0 +1 @@ +.secrets \ No newline at end of file diff --git a/.github/workflows/network-deploy.yml b/.github/workflows/network-deploy.yml index 2aa862f4499..8ceba615141 100644 --- a/.github/workflows/network-deploy.yml +++ b/.github/workflows/network-deploy.yml @@ -1,18 +1,17 @@ -name: Aztec Network EKS Deployment - -# Manual trigerring of this workflow is intentionally disabled -# Helm deployments do not support lock files -# Without a lockfile, manual trigerring can lead to corrupted or partial deployments +name: Aztec Network Deployment on: - push: - branches: - - staging - - production - pull_request: - branches: - - staging - - production + workflow_dispatch: + inputs: + namespace: + description: The namespace to deploy to, e.g. smoke + required: true + values_file: + description: The values file to use, e.g. 1-validators.yaml + required: true + aztec_docker_image: + description: The Aztec Docker image to use, e.g. aztecprotocol/aztec:da809c58290f9590836f45ec59376cbf04d3c4ce-x86_64 + required: true jobs: network_deployment: @@ -24,34 +23,67 @@ jobs: # Set up a variable based on the branch name env: - NAMESPACE: ${{ github.ref == 'refs/heads/production' && 'production' || 'staging' }} + AZTEC_DOCKER_IMAGE: ${{ inputs.aztec_docker_image }} + NAMESPACE: ${{ inputs.namespace }} + VALUES_FILE: ${{ inputs.values_file }} CHART_PATH: ./spartan/aztec-network + CLUSTER_NAME: aztec-gke + REGION: us-west1-a + TF_STATE_BUCKET: aztec-terraform + GKE_CLUSTER_CONTEXT: gke_testnet-440309_us-west1-a_aztec-gke steps: - # Step 1: Check out the repository's code - name: Checkout code uses: actions/checkout@v3 - # Step 2: Configure AWS credentials using GitHub Secrets - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v2 + - name: Authenticate to Google Cloud + uses: google-github-actions/auth@v2 with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: us-east-1 + credentials_json: ${{ secrets.GCP_SA_KEY }} + + - name: Set up Cloud SDK + uses: google-github-actions/setup-gcloud@v2 - # Step 3: Set up Kubernetes context for AWS EKS - - name: Configure kubectl with EKS cluster + - name: Install GKE Auth Plugin run: | - aws eks update-kubeconfig --region us-east-1 --name spartan + gcloud components install gke-gcloud-auth-plugin --quiet - # Step 4: Install Helm - - name: Install Helm + - name: Configure kubectl with GKE cluster run: | - curl https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3 | bash + gcloud container clusters get-credentials ${{ env.CLUSTER_NAME }} --region ${{ env.REGION }} - # Step 5: Apply Helm Chart - - name: Deploy Helm chart + - name: Ensure Terraform state bucket exists run: | - helm dependency update ${{ env.CHART_PATH }} - helm upgrade --install ${{ env.NAMESPACE }} ${{ env.CHART_PATH }} --namespace ${{ env.NAMESPACE }} --set network.public=true --atomic --create-namespace --timeout 20m + if ! gsutil ls gs://${{ env.TF_STATE_BUCKET }} >/dev/null 2>&1; then + echo "Creating GCS bucket for Terraform state..." + gsutil mb -l us-east4 gs://${{ env.TF_STATE_BUCKET }} + gsutil versioning set on gs://${{ env.TF_STATE_BUCKET }} + else + echo "Terraform state bucket already exists" + fi + + - name: Setup Terraform + uses: hashicorp/setup-terraform@v2 + with: + terraform_version: "1.5.0" # Specify your desired version + + - name: Terraform Init + working-directory: ./spartan/terraform/deploy-release + run: | + terraform init \ + -backend-config="bucket=${{ env.TF_STATE_BUCKET }}" \ + -backend-config="prefix=network-deploy/${{ env.REGION }}/${{ env.CLUSTER_NAME }}/${{ env.NAMESPACE }}/terraform.tfstate" \ + + - name: Terraform Plan + working-directory: ./spartan/terraform/deploy-release + run: | + terraform plan \ + -var="release_name=${{ env.NAMESPACE }}" \ + -var="values_file=${{ env.VALUES_FILE }}" \ + -var="gke_cluster_context=${{ env.GKE_CLUSTER_CONTEXT }}" \ + -var="aztec_docker_image=${{ env.AZTEC_DOCKER_IMAGE }}" \ + -out=tfplan + + - name: Terraform Apply + working-directory: ./spartan/terraform/deploy-release + run: terraform apply -auto-approve tfplan diff --git a/spartan/terraform/deploy-release/main.tf b/spartan/terraform/deploy-release/main.tf index 3b26f247325..bd98f0897a8 100644 --- a/spartan/terraform/deploy-release/main.tf +++ b/spartan/terraform/deploy-release/main.tf @@ -1,7 +1,7 @@ terraform { - backend "s3" { + backend "gcs" { bucket = "aztec-terraform" - region = "eu-west-2" + prefix = "terraform/state" } required_providers { helm = { diff --git a/spartan/terraform/gke-cluster/main.tf b/spartan/terraform/gke-cluster/main.tf index ed5f0ed4baf..971a4aacdbc 100644 --- a/spartan/terraform/gke-cluster/main.tf +++ b/spartan/terraform/gke-cluster/main.tf @@ -38,6 +38,24 @@ resource "google_project_iam_member" "gke_sa_roles" { member = "serviceAccount:${google_service_account.gke_sa.email}" } +# Create a new service account for Helm +resource "google_service_account" "helm_sa" { + account_id = "helm-sa" + display_name = "Helm Service Account" + description = "Service account for Helm operations" +} + +# Add IAM roles to the Helm service account +resource "google_project_iam_member" "helm_sa_roles" { + for_each = toset([ + "roles/container.admin", + "roles/storage.admin" + ]) + project = var.project + role = each.key + member = "serviceAccount:${google_service_account.helm_sa.email}" +} + # Create a GKE cluster resource "google_container_cluster" "primary" { name = "spartan-gke" From 90668c35a8556c4e77fce9fb4e6e0de931c7f872 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicol=C3=A1s=20Venturo?= Date: Tue, 3 Dec 2024 14:34:23 -0300 Subject: [PATCH 07/24] feat: AMM (#10153) Opening in favor of https://github.com/AztecProtocol/aztec-packages/pull/8644 due to @benesjan being away on vacation, and that PR using Graphite. Work here starts from commit a404b58e7d049ee7a56310702046f03a624fd1ee, which has been squashed into 81d7607d9d551aea4d6de78ec3ff535ec5d5a29a. This is the first implementation of Uniswap v2 style AMM that provides identity privacy. The contract is a single pool for two tokens with a fixed 0.3% swap fee. Adding and removing liquidity is done proportionally to the current ratio, resulting in no price impact and therefore no fees. Swaps can be performed by specifying either the amount in or the amount out. All three operations are completed in a single transaction each by leveraging partial notes. I created https://github.com/AztecProtocol/aztec-packages/issues/10225 to track pending work. Some of the tasks in that epic are only work that arises from the AMM, but are not technically required to have a fully functioning system: this PR already achieves that. Only a happy-path end to end test is included here, since TXE currently lacks some of the features required in order to properly deal with partial notes. We should later write those as this will be a good test of TXE's capabilities and user experience, given the complexity of the setup. --- I added the e2e to be run on CI on all branches since it combines multiple complex features, and likely contains our largest transactions yet. --- .../src/core/libraries/ConstantsGen.sol | 4 +- .../aztec-nr/aztec/src/macros/mod.nr | 8 +- noir-projects/noir-contracts/Nargo.toml | 1 + .../contracts/amm_contract/Nargo.toml | 9 + .../contracts/amm_contract/src/config.nr | 29 + .../contracts/amm_contract/src/lib.nr | 96 ++++ .../contracts/amm_contract/src/main.nr | 531 ++++++++++++++++++ .../lending_contract/src/interest_math.nr | 2 +- .../contracts/token_contract/src/main.nr | 5 +- .../src/test/transfer_to_private.nr | 8 +- .../crates/types/src/constants.nr | 2 +- scripts/ci/get_e2e_jobs.sh | 1 + yarn-project/circuits.js/src/constants.gen.ts | 4 +- .../end-to-end/scripts/e2e_test_config.yml | 1 + yarn-project/end-to-end/src/e2e_amm.test.ts | 338 +++++++++++ 15 files changed, 1023 insertions(+), 16 deletions(-) create mode 100644 noir-projects/noir-contracts/contracts/amm_contract/Nargo.toml create mode 100644 noir-projects/noir-contracts/contracts/amm_contract/src/config.nr create mode 100644 noir-projects/noir-contracts/contracts/amm_contract/src/lib.nr create mode 100644 noir-projects/noir-contracts/contracts/amm_contract/src/main.nr create mode 100644 yarn-project/end-to-end/src/e2e_amm.test.ts diff --git a/l1-contracts/src/core/libraries/ConstantsGen.sol b/l1-contracts/src/core/libraries/ConstantsGen.sol index 31ec0c2410a..f76deaaf78c 100644 --- a/l1-contracts/src/core/libraries/ConstantsGen.sol +++ b/l1-contracts/src/core/libraries/ConstantsGen.sol @@ -18,7 +18,7 @@ library Constants { uint256 internal constant ARGS_LENGTH = 16; uint256 internal constant MAX_NOTE_HASHES_PER_CALL = 16; uint256 internal constant MAX_NULLIFIERS_PER_CALL = 16; - uint256 internal constant MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL = 4; + uint256 internal constant MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL = 5; uint256 internal constant MAX_ENQUEUED_CALLS_PER_CALL = 16; uint256 internal constant MAX_L2_TO_L1_MSGS_PER_CALL = 2; uint256 internal constant MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL = 64; @@ -200,7 +200,7 @@ library Constants { uint256 internal constant TOTAL_FEES_LENGTH = 1; uint256 internal constant TOTAL_MANA_USED_LENGTH = 1; uint256 internal constant HEADER_LENGTH = 25; - uint256 internal constant PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH = 731; + uint256 internal constant PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH = 739; uint256 internal constant PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH = 867; uint256 internal constant PRIVATE_CONTEXT_INPUTS_LENGTH = 38; uint256 internal constant FEE_RECIPIENT_LENGTH = 2; diff --git a/noir-projects/aztec-nr/aztec/src/macros/mod.nr b/noir-projects/aztec-nr/aztec/src/macros/mod.nr index 924c5bcf8e0..883a2028326 100644 --- a/noir-projects/aztec-nr/aztec/src/macros/mod.nr +++ b/noir-projects/aztec-nr/aztec/src/macros/mod.nr @@ -78,9 +78,9 @@ comptime fn generate_contract_interface(m: Module) -> Quoted { $fn_stubs_quote pub fn at( - target_contract: aztec::protocol_types::address::AztecAddress + addr: aztec::protocol_types::address::AztecAddress ) -> Self { - Self { target_contract } + Self { target_contract: addr } } pub fn interface() -> Self { @@ -92,9 +92,9 @@ comptime fn generate_contract_interface(m: Module) -> Quoted { #[contract_library_method] pub fn at( - target_contract: aztec::protocol_types::address::AztecAddress + addr: aztec::protocol_types::address::AztecAddress ) -> $module_name { - $module_name { target_contract } + $module_name { target_contract: addr } } #[contract_library_method] diff --git a/noir-projects/noir-contracts/Nargo.toml b/noir-projects/noir-contracts/Nargo.toml index df510e99432..18ba10820a7 100644 --- a/noir-projects/noir-contracts/Nargo.toml +++ b/noir-projects/noir-contracts/Nargo.toml @@ -1,5 +1,6 @@ [workspace] members = [ + "contracts/amm_contract", "contracts/app_subscription_contract", "contracts/auth_contract", "contracts/auth_registry_contract", diff --git a/noir-projects/noir-contracts/contracts/amm_contract/Nargo.toml b/noir-projects/noir-contracts/contracts/amm_contract/Nargo.toml new file mode 100644 index 00000000000..e5c4e342ed8 --- /dev/null +++ b/noir-projects/noir-contracts/contracts/amm_contract/Nargo.toml @@ -0,0 +1,9 @@ +[package] +name = "amm_contract" +authors = [""] +compiler_version = ">=0.25.0" +type = "contract" + +[dependencies] +aztec = { path = "../../../aztec-nr/aztec" } +token = { path = "../token_contract" } \ No newline at end of file diff --git a/noir-projects/noir-contracts/contracts/amm_contract/src/config.nr b/noir-projects/noir-contracts/contracts/amm_contract/src/config.nr new file mode 100644 index 00000000000..c83648c4a39 --- /dev/null +++ b/noir-projects/noir-contracts/contracts/amm_contract/src/config.nr @@ -0,0 +1,29 @@ +use dep::aztec::protocol_types::{address::AztecAddress, traits::{Deserialize, Serialize}}; + +global CONFIG_LENGTH: u32 = 3; + +/// We store the tokens of the pool in a struct such that to load it from SharedImmutable asserts only a single +/// merkle proof. +/// (Once we actually do the optimization. WIP in https://github.com/AztecProtocol/aztec-packages/pull/8022). +pub struct Config { + pub token0: AztecAddress, + pub token1: AztecAddress, + pub liquidity_token: AztecAddress, +} + +// Note: I could not get #[derive(Serialize)] to work so I had to implement it manually. +impl Serialize for Config { + fn serialize(self: Self) -> [Field; CONFIG_LENGTH] { + [self.token0.to_field(), self.token1.to_field(), self.liquidity_token.to_field()] + } +} + +impl Deserialize for Config { + fn deserialize(fields: [Field; CONFIG_LENGTH]) -> Self { + Self { + token0: AztecAddress::from_field(fields[0]), + token1: AztecAddress::from_field(fields[1]), + liquidity_token: AztecAddress::from_field(fields[2]), + } + } +} diff --git a/noir-projects/noir-contracts/contracts/amm_contract/src/lib.nr b/noir-projects/noir-contracts/contracts/amm_contract/src/lib.nr new file mode 100644 index 00000000000..6d8e4d89790 --- /dev/null +++ b/noir-projects/noir-contracts/contracts/amm_contract/src/lib.nr @@ -0,0 +1,96 @@ +/// Given an input amount of an asset and pair balances, returns the maximum output amount of the other asset. +pub fn get_amount_out(amount_in: U128, balance_in: U128, balance_out: U128) -> U128 { + assert(amount_in > U128::zero(), "INSUFFICIENT_INPUT_AMOUNT"); + assert((balance_in > U128::zero()) & (balance_out > U128::zero()), "INSUFFICIENT_LIQUIDITY"); + + // The expression below is: + // (amount_in * 997 * balance_out) / (balance_in * 10000 + amount_in * 997) + // which is equivalent to: + // balance_out * ((amount_in * 0.997) / (balance_in + amount_in * 0.997)) + // resulting in an implicit 0.3% fee on the amount in, as the fee tokens are not taken into consideration. + + let amount_in_with_fee = amount_in * U128::from_integer(997); + let numerator = amount_in_with_fee * balance_out; + let denominator = balance_in * U128::from_integer(1000) + amount_in_with_fee; + numerator / denominator +} + +/// Given an output amount of an asset and pair balances, returns a required input amount of the other asset. +pub fn get_amount_in(amount_out: U128, balance_in: U128, balance_out: U128) -> U128 { + assert(amount_out > U128::zero(), "INSUFFICIENT_OUTPUT_AMOUNT"); + assert((balance_in > U128::zero()) & (balance_out > U128::zero()), "INSUFFICIENT_LIQUIDITY"); + + // The expression below is: + // (balance_in * amount_out * 1000) / (balance_out - amout_out * 997) + 1 + // which is equivalent to: + // balance_in * (amount_out / (balance_in + amount_in)) * 1/0.997 + 1 + // resulting in an implicit 0.3% fee on the amount in, as the fee tokens are not taken into consideration. The +1 + // at the end ensures the rounding error favors the pool. + + let numerator = balance_in * amount_out * U128::from_integer(1000); + let denominator = (balance_out - amount_out) * U128::from_integer(997); + (numerator / denominator) + U128::from_integer(1) +} + +/// Given the desired amounts and balances of token0 and token1 returns the optimal amount of token0 and token1 to be added to the pool. +pub fn get_amounts_to_add( + amount0_max: U128, + amount1_max: U128, + amount0_min: U128, + amount1_min: U128, + balance0: U128, + balance1: U128, +) -> (U128, U128) { + // When adding tokens, both balances must grow by the same ratio, which means that their spot price is unchanged. + // Since any swaps would affect these ratios, liquidity providers supply a range of minimum and maximum balances + // they are willing to supply for each token (which translates to minimum and maximum relative prices of the + // tokens, preventing loss of value outside of this range due to e.g. front-running). + + if (balance0 == U128::zero()) | (balance1 == U128::zero()) { + // The token balances should only be zero when initializing the pool. In this scenario there is no prior ratio + // to follow so we simply transfer the full maximum balance - it is up to the caller to make sure that the ratio + // they've chosen results in a a reasonable spot price. + (amount0_max, amount1_max) + } else { + // There is a huge number of amount combinations that respect the minimum and maximum for each token, but we'll + // only consider the two scenarios in which one of the amounts is the maximum amount. + + // First we calculate the token1 amount that'd need to be supplied if we used the maximum amount for token0. + let amount1_equivalent = get_equivalent_amount(amount0_max, balance0, balance1); + if (amount1_equivalent <= amount1_max) { + assert(amount1_equivalent >= amount1_min, "AMOUNT_1_BELOW_MINIMUM"); + (amount0_max, amount1_equivalent) + } else { + // If the max amount for token0 results in a token1 amount larger than the maximum, then we try with the + // maximum token1 amount, hoping that it'll result in a token0 amount larger than the minimum. + let amount0_equivalent = get_equivalent_amount(amount1_max, balance1, balance0); + // This should never happen, as it'd imply that the maximum is lower than the minimum. + assert(amount0_equivalent <= amount0_max); + + assert(amount0_equivalent >= amount0_min, "AMOUNT_0_BELOW_MINIMUM"); + (amount0_equivalent, amount1_max) + } + } +} + +/// Returns the amount of tokens to return to a liquidity provider when they remove liquidity from the pool. +pub fn get_amounts_on_remove( + to_burn: U128, + total_supply: U128, + balance0: U128, + balance1: U128, +) -> (U128, U128) { + // Since the liquidity token tracks ownership of the pool, the liquidity provider gets a proportional share of each + // token. + (to_burn * balance0 / total_supply, to_burn * balance1 / total_supply) +} + +/// Given some amount of an asset and pair balances, returns an equivalent amount of the other asset. Tokens should be +/// added and removed from the Pool respecting this ratio. +fn get_equivalent_amount(amount0: U128, balance0: U128, balance1: U128) -> U128 { + assert((balance0 > U128::zero()) & (balance1 > U128::zero()), "INSUFFICIENT_LIQUIDITY"); + + // This is essentially the Rule of Three, since we're computing proportional ratios. Note we divide at the end to + // avoid introducing too much error due to truncation. + (amount0 * balance1) / balance0 +} diff --git a/noir-projects/noir-contracts/contracts/amm_contract/src/main.nr b/noir-projects/noir-contracts/contracts/amm_contract/src/main.nr new file mode 100644 index 00000000000..fe405512cf4 --- /dev/null +++ b/noir-projects/noir-contracts/contracts/amm_contract/src/main.nr @@ -0,0 +1,531 @@ +mod lib; +mod config; + +use dep::aztec::macros::aztec; + +/// ## Overview +/// This contract demonstrates how to implement an **Automated Market Maker (AMM)** that maintains **public state** +/// while still achieving **identity privacy**. However, it does **not provide function privacy**: +/// - Anyone can observe **what actions** were performed. +/// - All amounts involved are visible, but **who** performed the action remains private. +/// +/// Unlike most Ethereum AMMs, the AMM contract is not itself the token that tracks participation of liquidity +/// providers, mostly due to Noir lacking inheritance as a feature. Instead, the AMM is expected to have mint and burn +/// permission over an external token contract. +/// +/// **Note:** +/// This is purely a demonstration. The **Aztec team** does not consider this the optimal design for building a DEX. +/// +/// ## Reentrancy Guard Considerations +/// +/// ### 1. Private Functions: +/// Reentrancy protection is typically necessary if entering an intermediate state that is only valid when +/// the action completes uninterrupted. This follows the **Checks-Effects-Interactions** pattern. +/// +/// - In this contract, **private functions** do not introduce intermediate states. +/// - All operations will be fully executed in **public** without needing intermediate checks. +/// +/// ### 2. Public Functions: +/// No **reentrancy guard** is required for public functions because: +/// - All public functions are marked as **internal** with a **single callsite** - from a private function. +/// - Public functions **cannot call private functions**, eliminating the risk of reentering into them from private. +/// - Since public functions are internal-only, **external contracts cannot access them**, ensuring no external +/// contract can trigger a reentrant call. This eliminates the following attack vector: +/// `AMM.private_fn --> AMM.public_fn --> ExternalContract.fn --> AMM.public_fn`. +#[aztec] +contract AMM { + use crate::{ + config::Config, + lib::{get_amount_in, get_amount_out, get_amounts_on_remove, get_amounts_to_add}, + }; + use dep::aztec::{ + macros::{functions::{initializer, internal, private, public}, storage::storage}, + prelude::{AztecAddress, PublicImmutable}, + }; + use dep::token::Token; + + #[storage] + struct Storage { + config: PublicImmutable, + } + + /// Amount of liquidity which gets locked when liquidity is provided for the first time. Its purpose is to prevent + /// the pool from ever emptying which could lead to undefined behavior. + global MINIMUM_LIQUIDITY: U128 = U128::from_integer(1000); + /// We set it to 99 times the minimum liquidity. That way the first LP gets 99% of the value of their deposit. + global INITIAL_LIQUIDITY: U128 = U128::from_integer(99000); + + // TODO(#9480): Either deploy the liquidity contract in the constructor or verify it that it corresponds to what + // this contract expects (i.e. that the AMM has permission to mint and burn). + #[public] + #[initializer] + fn constructor(token0: AztecAddress, token1: AztecAddress, liquidity_token: AztecAddress) { + storage.config.initialize(Config { token0, token1, liquidity_token }); + } + + /// Privately adds liquidity to the pool. This function receives the minimum and maximum number of tokens the caller + /// is willing to add, in order to account for changing market conditions, and will try to add as many tokens as + /// possible. + /// + /// `nonce` can be any non-zero value, as it's only used to isolate token transfer authwits to this specific call. + /// + /// The identity of the liquidity provider is not revealed, but the action and amounts are. + #[private] + fn add_liquidity( + amount0_max: Field, + amount1_max: Field, + amount0_min: Field, + amount1_min: Field, + nonce: Field, + ) { + assert( + amount0_min.lt(amount0_max) | (amount0_min == amount0_max), + "INCORRECT_TOKEN0_LIMITS", + ); + assert( + amount1_min.lt(amount1_max) | (amount1_min == amount1_max), + "INCORRECT_TOKEN1_LIMITS", + ); + assert(0.lt(amount0_max) & 0.lt(amount1_max), "INSUFFICIENT_INPUT_AMOUNTS"); + + let config = storage.config.read(); + + let token0 = Token::at(config.token0); + let token1 = Token::at(config.token1); + let liquidity_token = Token::at(config.liquidity_token); + + let sender = context.msg_sender(); + + // We don't yet know how many tokens the sender will actually supply - that can only be computed during public + // execution since the amounts supplied must have the same ratio as the live balances. We therefore transfer the + // maximum amounts here, and prepare partial notes that return the change to the sender (if any). + // TODO(#10286): consider merging these two calls + token0.transfer_to_public(sender, context.this_address(), amount0_max, nonce).call( + &mut context, + ); + let refund_token0_hiding_point_slot = + token0.prepare_private_balance_increase(sender, sender).call(&mut context); + + token1.transfer_to_public(sender, context.this_address(), amount1_max, nonce).call( + &mut context, + ); + let refund_token1_hiding_point_slot = + token1.prepare_private_balance_increase(sender, sender).call(&mut context); + + // The number of liquidity tokens to mint for the caller depends on both the live balances and the amount + // supplied, both of which can only be known during public execution. We therefore prepare a partial note that + // will get completed via minting. + let liquidity_hiding_point_slot = + liquidity_token.prepare_private_balance_increase(sender, sender).call(&mut context); + + // We then complete the flow in public. Note that the type of operation and amounts will all be publicly known, + // but the identity of the caller is not revealed despite us being able to send tokens to them by completing the + // partial notees. + AMM::at(context.this_address()) + ._add_liquidity( + config, + refund_token0_hiding_point_slot, + refund_token1_hiding_point_slot, + liquidity_hiding_point_slot, + amount0_max, + amount1_max, + amount0_min, + amount1_min, + ) + .enqueue(&mut context); + } + + #[public] + #[internal] + fn _add_liquidity( + config: Config, // We could read this in public, but it's cheaper to receive from private + refund_token0_hiding_point_slot: Field, + refund_token1_hiding_point_slot: Field, + liquidity_hiding_point_slot: Field, + amount0_max: Field, + amount1_max: Field, + amount0_min: Field, + amount1_min: Field, + ) { + // TODO(#8271): Type the args as U128 and nuke these ugly casts + let amount0_max = U128::from_integer(amount0_max); + let amount1_max = U128::from_integer(amount1_max); + let amount0_min = U128::from_integer(amount0_min); + let amount1_min = U128::from_integer(amount1_min); + + let token0 = Token::at(config.token0); + let token1 = Token::at(config.token1); + let liquidity_token = Token::at(config.liquidity_token); + + // We read the current AMM balance of both tokens. Note that by the time this function is called the token + // transfers have already been completed (since those calls were enqueued before this call), and so we need to + // substract the transfer amount to get the pre-deposit balance. + let balance0_plus_amount0_max = U128::from_integer(token0 + .balance_of_public(context.this_address()) + .view(&mut context)); + let balance0 = balance0_plus_amount0_max - amount0_max; + + let balance1_plus_amount1_max = U128::from_integer(token1 + .balance_of_public(context.this_address()) + .view(&mut context)); + let balance1 = balance1_plus_amount1_max - amount1_max; + + // With the current balances known, we can calculate the token amounts to the pool, respecting the user's + // minimum deposit preferences. + let (amount0, amount1) = get_amounts_to_add( + amount0_max, + amount1_max, + amount0_min, + amount1_min, + balance0, + balance1, + ); + + // Return any excess from the original token deposits. + let refund_amount_token0 = amount0_max - amount0; + let refund_amount_token1 = amount1_max - amount1; + + // We can simply skip the refund if the amount to return is 0 in order to save gas: the partial note will + // simply stay in public storage and not be completed, but this is not an issue. + if (refund_amount_token0 > U128::zero()) { + token0 + .finalize_transfer_to_private( + refund_amount_token0.to_integer(), + refund_token0_hiding_point_slot, + ) + .call(&mut context); + } + if (refund_amount_token1 > U128::zero()) { + token1 + .finalize_transfer_to_private( + refund_amount_token1.to_integer(), + refund_token1_hiding_point_slot, + ) + .call(&mut context); + } + + // With the deposit amounts known, we can compute the number of liquidity tokens to mint and finalize the + // depositor's partial note. + let total_supply = U128::from_integer(liquidity_token.total_supply().view(&mut context)); + let liquidity_amount = if total_supply != U128::zero() { + // The liquidity token supply increases by the same ratio as the balances. In case one of the token balances + // increased with a ratio different from the other one, we simply take the smallest value. + std::cmp::min( + (amount0 * total_supply) / balance0, + (amount1 * total_supply) / balance1, + ) + } else { + // The zero total supply case (i.e. pool initialization) is special as we can't increase the supply + // proportionally. We instead set the initial liquidity to an arbitrary amount. + // We could set the initial liquidity to be equal to the pool invariant (i.e. sqrt(amount0 * amount1)) if + // we wanted to collect protocol fees over swap fees (in the style of Uniswap v2), but we choose not to in + // order to keep things simple. + + // As part of initialization, we mint some tokens to the zero address to 'lock' them (i.e. make them + // impossible to redeem), guaranteeing total supply will never be zero again. + liquidity_token + .mint_to_public(AztecAddress::zero(), MINIMUM_LIQUIDITY.to_integer()) + .call(&mut context); + + INITIAL_LIQUIDITY + }; + + assert(liquidity_amount > U128::zero(), "INSUFFICIENT_LIQUIDITY_MINTED"); + liquidity_token + .finalize_mint_to_private(liquidity_amount.to_integer(), liquidity_hiding_point_slot) + .call(&mut context); + } + + /// Privately removes liquidity from the pool. This function receives how many liquidity tokens to burn, and the + /// minimum number of tokens the caller is willing to receive, in order to account for changing market conditions. + /// + /// `nonce` can be any non-zero value, as it's only used to isolate token transfer authwits to this specific call. + /// + /// The identity of the liquidity provider is not revealed, but the action and amounts are. + #[private] + fn remove_liquidity(liquidity: Field, amount0_min: Field, amount1_min: Field, nonce: Field) { + let config = storage.config.read(); + + let liquidity_token = Token::at(config.liquidity_token); + let token0 = Token::at(config.token0); + let token1 = Token::at(config.token1); + + let sender = context.msg_sender(); + + // Liquidity tokens are burned when liquidity is removed in order to reduce the total supply. However, we lack + // a function to privately burn, so we instead transfer the tokens into the AMM's public balance, and them have + // the AMM publicly burn its own tokens. + // TODO(#10287): consider adding a private burn + liquidity_token.transfer_to_public(sender, context.this_address(), liquidity, nonce).call( + &mut context, + ); + + // We don't yet know how many tokens the sender will get - that can only be computed during public execution + // since the it depends on the live balances. We therefore simply prepare partial notes to the sender. + let token0_hiding_point_slot = + token0.prepare_private_balance_increase(sender, sender).call(&mut context); + let token1_hiding_point_slot = + token1.prepare_private_balance_increase(sender, sender).call(&mut context); + + // We then complete the flow in public. Note that the type of operation and amounts will all be publicly known, + // but the identity of the caller is not revealed despite us being able to send tokens to them by completing the + // partial notees. + AMM::at(context.this_address()) + ._remove_liquidity( + config, + liquidity, + token0_hiding_point_slot, + token1_hiding_point_slot, + amount0_min, + amount1_min, + ) + .enqueue(&mut context); + } + + #[public] + #[internal] + fn _remove_liquidity( + config: Config, // We could read this in public, but it's cheaper to receive from private + liquidity: Field, + token0_hiding_point_slot: Field, + token1_hiding_point_slot: Field, + amount0_min: Field, + amount1_min: Field, + ) { + // TODO(#8271): Type the args as U128 and nuke these ugly casts + let liquidity = U128::from_integer(liquidity); + let amount0_min = U128::from_integer(amount0_min); + let amount1_min = U128::from_integer(amount1_min); + + let token0 = Token::at(config.token0); + let token1 = Token::at(config.token1); + let liquidity_token = Token::at(config.liquidity_token); + + // We need the current balance of both tokens as well as the liquidity token total supply in order to compute + // the amounts to send the user. + let balance0 = U128::from_integer(token0.balance_of_public(context.this_address()).view( + &mut context, + )); + let balance1 = U128::from_integer(token1.balance_of_public(context.this_address()).view( + &mut context, + )); + let total_supply = U128::from_integer(liquidity_token.total_supply().view(&mut context)); + + // We calculate the amounts of token0 and token1 the user is entitled to based on the amount of liquidity they + // are removing, and check that they are above the minimum amounts they requested. + let (amount0, amount1) = get_amounts_on_remove(liquidity, total_supply, balance0, balance1); + assert(amount0 >= amount0_min, "INSUFFICIENT_0_AMOUNT"); + assert(amount1 >= amount1_min, "INSUFFICIENT_1_AMOUNT"); + + // We can now burn the liquidity tokens that had been privately transferred into the AMM, as well as complete + // both partial notes. + liquidity_token.burn_public(context.this_address(), liquidity.to_integer(), 0).call( + &mut context, + ); + token0.finalize_transfer_to_private(amount0.to_integer(), token0_hiding_point_slot).call( + &mut context, + ); + token1.finalize_transfer_to_private(amount1.to_integer(), token1_hiding_point_slot).call( + &mut context, + ); + } + + /// Privately swaps `amount_in` `token_in` tokens for at least `amount_out_mint` `token_out` tokens with the pool. + /// + /// `nonce` can be any non-zero value, as it's only used to isolate token transfer authwits to this specific call. + /// + /// The identity of the swapper is not revealed, but the action and amounts are. + #[private] + fn swap_exact_tokens_for_tokens( + token_in: AztecAddress, + token_out: AztecAddress, + amount_in: Field, + amount_out_min: Field, + nonce: Field, + ) { + let config = storage.config.read(); + + assert((token_in == config.token0) | (token_in == config.token1), "TOKEN_IN_IS_INVALID"); + assert((token_out == config.token0) | (token_out == config.token1), "TOKEN_OUT_IS_INVALID"); + assert(token_in != token_out, "SAME_TOKEN_SWAP"); + + let sender = context.msg_sender(); + + // We transfer the full amount in, since it is an exact amount, and prepare a partial note for the amount out, + // which will only be known during public execution as it depends on the live balances. + // TODO(#10286): consider merging these two calls + Token::at(token_in) + .transfer_to_public(sender, context.this_address(), amount_in, nonce) + .call(&mut context); + let token_out_hiding_point_slot = Token::at(token_out) + .prepare_private_balance_increase(sender, sender) + .call(&mut context); + + AMM::at(context.this_address()) + ._swap_exact_tokens_for_tokens( + token_in, + token_out, + amount_in, + amount_out_min, + token_out_hiding_point_slot, + ) + .enqueue(&mut context); + } + + #[public] + #[internal] + fn _swap_exact_tokens_for_tokens( + token_in: AztecAddress, + token_out: AztecAddress, + amount_in: Field, + amount_out_min: Field, + token_out_hiding_point_slot: Field, + ) { + // TODO(#8271): Type the args as U128 and nuke these ugly casts + let amount_in = U128::from_integer(amount_in); + let amount_out_min = U128::from_integer(amount_out_min); + + // In order to compute the amount to swap we need the live token balances. Note that at this state the token in + // transfer has already been completed as that function call was enqueued before this one. We therefore need to + // subtract the amount in to get the pre-swap balances. + let balance_in_plus_amount_in = U128::from_integer(Token::at(token_in) + .balance_of_public(context.this_address()) + .view(&mut context)); + let balance_in = balance_in_plus_amount_in - amount_in; + + let balance_out = U128::from_integer(Token::at(token_out) + .balance_of_public(context.this_address()) + .view(&mut context)); + + // We can now compute the number of tokens to transfer and complete the partial note. + let amount_out = get_amount_out(amount_in, balance_in, balance_out); + assert(amount_out >= amount_out_min, "INSUFFICIENT_OUTPUT_AMOUNT"); + + Token::at(token_out) + .finalize_transfer_to_private(amount_out.to_integer(), token_out_hiding_point_slot) + .call(&mut context); + } + + /// Privately swaps at most `amount_in_max` `token_in` tokens for `amount_out` `token_out` tokens with the pool. + /// + /// `nonce` can be any non-zero value, as it's only used to isolate token transfer authwits to this specific call. + /// + /// The identity of the swapper is not revealed, but the action and amounts are. + #[private] + fn swap_tokens_for_exact_tokens( + token_in: AztecAddress, + token_out: AztecAddress, + amount_out: Field, + amount_in_max: Field, + nonce: Field, + ) { + let config = storage.config.read(); + + assert((token_in == config.token0) | (token_in == config.token1), "TOKEN_IN_IS_INVALID"); + assert((token_out == config.token0) | (token_out == config.token1), "TOKEN_OUT_IS_INVALID"); + assert(token_in != token_out, "SAME_TOKEN_SWAP"); + + let sender = context.msg_sender(); + + // We don't know how many tokens we'll receive from the user, since the swap amount will only be known during + // public execution as it depends on the live balances. We therefore transfer the full maximum amount and + // prepare partial notes both for the token out and the refund. + // Technically the token out note does not need to be partial, since we do know the amount out, but we do want + // to wait until the swap has been completed before commiting the note to the tree to avoid it being spent too + // early. + // TODO(#10286): consider merging these two calls + Token::at(token_in) + .transfer_to_public(sender, context.this_address(), amount_in_max, nonce) + .call(&mut context); + let change_token_in_hiding_point_slot = + Token::at(token_in).prepare_private_balance_increase(sender, sender).call(&mut context); + + let token_out_hiding_point_slot = Token::at(token_out) + .prepare_private_balance_increase(sender, sender) + .call(&mut context); + + AMM::at(context.this_address()) + ._swap_tokens_for_exact_tokens( + token_in, + token_out, + amount_in_max, + amount_out, + change_token_in_hiding_point_slot, + token_out_hiding_point_slot, + ) + .enqueue(&mut context); + } + + #[public] + #[internal] + fn _swap_tokens_for_exact_tokens( + token_in: AztecAddress, + token_out: AztecAddress, + amount_in_max: Field, + amount_out: Field, + change_token_in_hiding_point_slot: Field, + token_out_hiding_point_slot: Field, + ) { + // TODO(#8271): Type the args as U128 and nuke these ugly casts + let amount_out = U128::from_integer(amount_out); + let amount_in_max = U128::from_integer(amount_in_max); + + // In order to compute the amount to swap we need the live token balances. Note that at this state the token in + // transfer has already been completed as that function call was enqueued before this one. We therefore need to + // subtract the amount in to get the pre-swap balances. + let balance_in_plus_amount_in_max = U128::from_integer(Token::at(token_in) + .balance_of_public(context.this_address()) + .view(&mut context)); + let balance_in = balance_in_plus_amount_in_max - amount_in_max; + + let balance_out = U128::from_integer(Token::at(token_out) + .balance_of_public(context.this_address()) + .view(&mut context)); + + // We can now compute the number of tokens we need to receive and complete the partial note with the change. + let amount_in = get_amount_in(amount_out, balance_in, balance_out); + assert(amount_in <= amount_in_max, "INSUFFICIENT_OUTPUT_AMOUNT"); + + let change = amount_in_max - amount_in; + if (change > U128::zero()) { + Token::at(token_in) + .finalize_transfer_to_private(change.to_integer(), change_token_in_hiding_point_slot + ) + .call(&mut context); + } + + // Note again that we already knew the amount out, but for consistency we want to only commit this note once + // all other steps have been performed. + Token::at(token_out) + .finalize_transfer_to_private(amount_out.to_integer(), token_out_hiding_point_slot) + .call(&mut context); + } + + unconstrained fn get_amount_out_for_exact_in( + balance_in: Field, + balance_out: Field, + amount_in: Field, + ) -> Field { + // Ideally we'd call the token contract in order to read the current balance, but we can't due to #7524. + get_amount_out( + U128::from_integer(amount_in), + U128::from_integer(balance_in), + U128::from_integer(balance_out), + ) + .to_integer() + } + + unconstrained fn get_amount_in_for_exact_out( + balance_in: Field, + balance_out: Field, + amount_out: Field, + ) -> Field { + // Ideally we'd call the token contract in order to read the current balance, but we can't due to #7524. + get_amount_in( + U128::from_integer(amount_out), + U128::from_integer(balance_in), + U128::from_integer(balance_out), + ) + .to_integer() + } +} diff --git a/noir-projects/noir-contracts/contracts/lending_contract/src/interest_math.nr b/noir-projects/noir-contracts/contracts/lending_contract/src/interest_math.nr index e92c91f908d..e3e1e2e1d1b 100644 --- a/noir-projects/noir-contracts/contracts/lending_contract/src/interest_math.nr +++ b/noir-projects/noir-contracts/contracts/lending_contract/src/interest_math.nr @@ -1,7 +1,7 @@ // Binomial approximation of exponential // using lower than desired precisions for everything due to u128 limit // (1+x)^n = 1+n*x+[n/2*(n-1)]*x^2+[n/6*(n-1)*(n-2)*x^3]... -// we are loosing around almost 8 digits of precision from yearly -> daily interest +// we are losing around almost 8 digits of precision from yearly -> daily interest // dividing with 31536000 (seconds per year). // rate must be measured with higher precision than 10^9. // we use e18, and rates >= 4% yearly. Otherwise need more precision diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr index b2cd2094edc..fad92b5675a 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr @@ -446,8 +446,9 @@ contract Token { /// some of the finalization functions (`finalize_transfer_to_private`, `finalize_mint_to_private`). /// Returns a hiding point slot. #[private] - fn prepare_private_balance_increase(to: AztecAddress) -> Field { - let from = context.msg_sender(); + fn prepare_private_balance_increase(to: AztecAddress, from: AztecAddress) -> Field { + // TODO(#9887): ideally we'd not have `from` here, but we do need a `from` address to produce a tagging secret + // with `to`. _prepare_private_balance_increase(from, to, &mut context, storage) } // docs:end:prepare_private_balance_increase diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_private.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_private.nr index f48bfb6127e..6c2ce223916 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_private.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_private.nr @@ -23,7 +23,7 @@ unconstrained fn transfer_to_private_internal_orchestration() { #[test] unconstrained fn transfer_to_private_external_orchestration() { // Setup without account contracts. We are not using authwits here, so dummy accounts are enough - let (env, token_contract_address, _, recipient, amount) = + let (env, token_contract_address, owner, recipient, amount) = utils::setup_and_mint_to_public(/* with_account_contracts */ false); let note_randomness = random(); @@ -33,7 +33,7 @@ unconstrained fn transfer_to_private_external_orchestration() { // We prepare the transfer let hiding_point_slot: Field = Token::at(token_contract_address) - .prepare_private_balance_increase(recipient) + .prepare_private_balance_increase(recipient, owner) .call(&mut env.private()); // Finalize the transfer of the tokens (message sender owns the tokens in public) @@ -72,14 +72,14 @@ unconstrained fn transfer_to_private_transfer_not_prepared() { #[test(should_fail_with = "Assertion failed: attempt to subtract with underflow 'hi == high'")] unconstrained fn transfer_to_private_failure_not_an_owner() { // Setup without account contracts. We are not using authwits here, so dummy accounts are enough - let (env, token_contract_address, _, not_owner, amount) = + let (env, token_contract_address, owner, not_owner, amount) = utils::setup_and_mint_to_public(/* with_account_contracts */ false); // (For this specific test we could set a random value for the commitment and not do the call to `prepare...` // as the token balance check is before we use the value but that would made the test less robust against changes // in the contract.) let hiding_point_slot: Field = Token::at(token_contract_address) - .prepare_private_balance_increase(not_owner) + .prepare_private_balance_increase(not_owner, owner) .call(&mut env.private()); // Try transferring someone else's token balance diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr index 3695c499bbf..c49164566fc 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr @@ -28,7 +28,7 @@ pub global ARGS_LENGTH: u32 = 16; // "PER CALL" CONSTANTS pub global MAX_NOTE_HASHES_PER_CALL: u32 = 16; pub global MAX_NULLIFIERS_PER_CALL: u32 = 16; -pub global MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL: u32 = 4; +pub global MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL: u32 = 5; pub global MAX_ENQUEUED_CALLS_PER_CALL: u32 = 16; pub global MAX_L2_TO_L1_MSGS_PER_CALL: u32 = 2; pub global MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL: u32 = 64; diff --git a/scripts/ci/get_e2e_jobs.sh b/scripts/ci/get_e2e_jobs.sh index 2dbdb42ac40..ed6379e6a46 100755 --- a/scripts/ci/get_e2e_jobs.sh +++ b/scripts/ci/get_e2e_jobs.sh @@ -20,6 +20,7 @@ full_list=$(get_test_names) allow_list=( "e2e_2_pxes" "e2e_authwit" + "e2e_amm" "e2e_avm_simulator" "e2e_block_building" "e2e_cross_chain_messaging" diff --git a/yarn-project/circuits.js/src/constants.gen.ts b/yarn-project/circuits.js/src/constants.gen.ts index 8672f42cdea..3168b6099f6 100644 --- a/yarn-project/circuits.js/src/constants.gen.ts +++ b/yarn-project/circuits.js/src/constants.gen.ts @@ -4,7 +4,7 @@ export const MAX_FIELD_VALUE = 2188824287183927522224640574525727508854836440041 export const ARGS_LENGTH = 16; export const MAX_NOTE_HASHES_PER_CALL = 16; export const MAX_NULLIFIERS_PER_CALL = 16; -export const MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL = 4; +export const MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL = 5; export const MAX_ENQUEUED_CALLS_PER_CALL = 16; export const MAX_L2_TO_L1_MSGS_PER_CALL = 2; export const MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL = 64; @@ -178,7 +178,7 @@ export const TX_REQUEST_LENGTH = 12; export const TOTAL_FEES_LENGTH = 1; export const TOTAL_MANA_USED_LENGTH = 1; export const HEADER_LENGTH = 25; -export const PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH = 731; +export const PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH = 739; export const PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH = 867; export const PRIVATE_CONTEXT_INPUTS_LENGTH = 38; export const FEE_RECIPIENT_LENGTH = 2; diff --git a/yarn-project/end-to-end/scripts/e2e_test_config.yml b/yarn-project/end-to-end/scripts/e2e_test_config.yml index a09f5f5a568..8a65a011708 100644 --- a/yarn-project/end-to-end/scripts/e2e_test_config.yml +++ b/yarn-project/end-to-end/scripts/e2e_test_config.yml @@ -20,6 +20,7 @@ tests: command: './scripts/e2e_compose_test.sh bench_tx_size' e2e_2_pxes: {} e2e_account_contracts: {} + e2e_amm: {} e2e_authwit: {} e2e_avm_simulator: {} e2e_blacklist_token_contract: {} diff --git a/yarn-project/end-to-end/src/e2e_amm.test.ts b/yarn-project/end-to-end/src/e2e_amm.test.ts new file mode 100644 index 00000000000..6b1d741487f --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_amm.test.ts @@ -0,0 +1,338 @@ +import { type AccountWallet, type DebugLogger, Fr, type Wallet } from '@aztec/aztec.js'; +import { AMMContract, type TokenContract } from '@aztec/noir-contracts.js'; + +import { jest } from '@jest/globals'; + +import { deployToken, mintTokensToPrivate } from './fixtures/token_utils.js'; +import { setup } from './fixtures/utils.js'; + +const TIMEOUT = 120_000; + +describe('AMM', () => { + jest.setTimeout(TIMEOUT); + + let teardown: () => Promise; + + let logger: DebugLogger; + + let adminWallet: AccountWallet; + let liquidityProvider: AccountWallet; + let otherLiquidityProvider: AccountWallet; + let swapper: AccountWallet; + + let token0: TokenContract; + let token1: TokenContract; + let liquidityToken: TokenContract; + + let amm: AMMContract; + + const INITIAL_AMM_TOTAL_SUPPLY = 100000n; + + // We need a large token amount so that the swap fee (0.3%) is observable. + const INITIAL_TOKEN_BALANCE = 1_000_000_000n; + + beforeAll(async () => { + ({ + teardown, + wallets: [adminWallet, liquidityProvider, otherLiquidityProvider, swapper], + logger, + } = await setup(4)); + + token0 = await deployToken(adminWallet, 0n, logger); + token1 = await deployToken(adminWallet, 0n, logger); + liquidityToken = await deployToken(adminWallet, 0n, logger); + + amm = await AMMContract.deploy(adminWallet, token0.address, token1.address, liquidityToken.address) + .send() + .deployed(); + + // TODO(#9480): consider deploying the token by some factory when the AMM is deployed, and making the AMM be the + // minter there. + await liquidityToken.methods.set_minter(amm.address, true).send().wait(); + + // We mint the tokens to both liquidity providers and the swapper + await mintTokensToPrivate(token0, adminWallet, liquidityProvider.getAddress(), INITIAL_TOKEN_BALANCE); + await mintTokensToPrivate(token1, adminWallet, liquidityProvider.getAddress(), INITIAL_TOKEN_BALANCE); + + await mintTokensToPrivate(token0, adminWallet, otherLiquidityProvider.getAddress(), INITIAL_TOKEN_BALANCE); + await mintTokensToPrivate(token1, adminWallet, otherLiquidityProvider.getAddress(), INITIAL_TOKEN_BALANCE); + + // Note that the swapper only holds token0, not token1 + await mintTokensToPrivate(token0, adminWallet, swapper.getAddress(), INITIAL_TOKEN_BALANCE); + }); + + afterAll(() => teardown()); + + describe('full flow', () => { + // This is an integration test in which we perform an entire run of the happy path. Thorough unit testing is not + // included. + + type Balance = { + token0: bigint; + token1: bigint; + }; + + async function getAmmBalances(): Promise { + return { + token0: await token0.methods.balance_of_public(amm.address).simulate(), + token1: await token1.methods.balance_of_public(amm.address).simulate(), + }; + } + + async function getWalletBalances(lp: Wallet): Promise { + return { + token0: await token0.withWallet(lp).methods.balance_of_private(lp.getAddress()).simulate(), + token1: await token1.withWallet(lp).methods.balance_of_private(lp.getAddress()).simulate(), + }; + } + + function assertBalancesDelta(before: Balance, after: Balance, delta: Balance) { + expect(after.token0 - before.token0).toEqual(delta.token0); + expect(after.token1 - before.token1).toEqual(delta.token1); + } + + it('add initial liquidity', async () => { + const ammBalancesBefore = await getAmmBalances(); + const lpBalancesBefore = await getWalletBalances(liquidityProvider); + + const amount0Max = lpBalancesBefore.token0; + const amount0Min = lpBalancesBefore.token0 / 2n; + const amount1Max = lpBalancesBefore.token1; + const amount1Min = lpBalancesBefore.token1 / 2n; + + // First we need to add authwits such that the AMM can transfer the tokens from the liquidity provider. These + // authwits are for the full amount, since the AMM will first transfer that to itself, and later refund any excess + // during public execution. + const nonceForAuthwits = Fr.random(); + await liquidityProvider.createAuthWit({ + caller: amm.address, + action: token0.methods.transfer_to_public( + liquidityProvider.getAddress(), + amm.address, + amount0Max, + nonceForAuthwits, + ), + }); + await liquidityProvider.createAuthWit({ + caller: amm.address, + action: token1.methods.transfer_to_public( + liquidityProvider.getAddress(), + amm.address, + amount1Max, + nonceForAuthwits, + ), + }); + + await amm + .withWallet(liquidityProvider) + .methods.add_liquidity(amount0Max, amount1Max, amount0Min, amount1Min, nonceForAuthwits) + .send() + .wait(); + + const ammBalancesAfter = await getAmmBalances(); + const lpBalancesAfter = await getWalletBalances(liquidityProvider); + + // Since the LP was the first one to enter the pool, the maximum amounts of tokens should have been deposited as + // there is no prior token ratio to follow. + assertBalancesDelta(ammBalancesBefore, ammBalancesAfter, { token0: amount0Max, token1: amount1Max }); + assertBalancesDelta(lpBalancesBefore, lpBalancesAfter, { token0: -amount0Max, token1: -amount1Max }); + + // Liquidity tokens should also be minted for the liquidity provider, as well as locked at the zero address. + const expectedLiquidityTokens = (INITIAL_AMM_TOTAL_SUPPLY * 99n) / 100n; + expect(await liquidityToken.methods.balance_of_private(liquidityProvider.getAddress()).simulate()).toEqual( + expectedLiquidityTokens, + ); + expect(await liquidityToken.methods.total_supply().simulate()).toEqual(INITIAL_AMM_TOTAL_SUPPLY); + }); + + it('add liquidity from another lp', async () => { + // This is the same as when we add liquidity for the first time, but we'll be going through a different code path + // since total supply for the liquidity token is non-zero + + const ammBalancesBefore = await getAmmBalances(); + const lpBalancesBefore = await getWalletBalances(otherLiquidityProvider); + + const liquidityTokenSupplyBefore = await liquidityToken.methods.total_supply().simulate(); + + // The pool currently has the same number of tokens for token0 and token1, since that is the ratio the first + // liquidity provider used. Our maximum values have a diferent ratio (6:5 instead of 1:1), so we will end up + // adding the maximum amount that does result in the correct ratio (i.e. using amount1Max and a 1:1 ratio). + const amount0Max = (lpBalancesBefore.token0 * 6n) / 10n; + const amount0Min = (lpBalancesBefore.token0 * 4n) / 10n; + const amount1Max = (lpBalancesBefore.token1 * 5n) / 10n; + const amount1Min = (lpBalancesBefore.token1 * 4n) / 10n; + + const expectedAmount0 = amount1Max; + const expectedAmount1 = amount1Max; + + // We again add authwits such that the AMM can transfer the tokens from the liquidity provider. These authwits are + // for the full amount, since the AMM will first transfer that to itself, and later refund any excess during + // public execution. We expect for there to be excess since our maximum amounts do not have the same balance ratio + // as the pool currently holds. + const nonceForAuthwits = Fr.random(); + await otherLiquidityProvider.createAuthWit({ + caller: amm.address, + action: token0.methods.transfer_to_public( + otherLiquidityProvider.getAddress(), + amm.address, + amount0Max, + nonceForAuthwits, + ), + }); + await otherLiquidityProvider.createAuthWit({ + caller: amm.address, + action: token1.methods.transfer_to_public( + otherLiquidityProvider.getAddress(), + amm.address, + amount1Max, + nonceForAuthwits, + ), + }); + + await amm + .withWallet(otherLiquidityProvider) + .methods.add_liquidity(amount0Max, amount1Max, amount0Min, amount1Min, nonceForAuthwits) + .send() + .wait(); + + const ammBalancesAfter = await getAmmBalances(); + const lpBalancesAfter = await getWalletBalances(otherLiquidityProvider); + + assertBalancesDelta(ammBalancesBefore, ammBalancesAfter, { token0: expectedAmount0, token1: expectedAmount1 }); + assertBalancesDelta(lpBalancesBefore, lpBalancesAfter, { token0: -expectedAmount0, token1: -expectedAmount1 }); + + // The liquidity token supply should have grown with the same proportion as the pool balances + const expectedTotalSupply = + (liquidityTokenSupplyBefore * (ammBalancesBefore.token0 + expectedAmount0)) / ammBalancesBefore.token0; + const expectedLiquidityTokens = expectedTotalSupply - INITIAL_AMM_TOTAL_SUPPLY; + + expect(await liquidityToken.methods.total_supply().simulate()).toEqual(expectedTotalSupply); + expect(await liquidityToken.methods.balance_of_private(otherLiquidityProvider.getAddress()).simulate()).toEqual( + expectedLiquidityTokens, + ); + }); + + it('swap exact tokens in', async () => { + const swapperBalancesBefore = await getWalletBalances(swapper); + const ammBalancesBefore = await getAmmBalances(); + + // The token in will be token0 + const amountIn = swapperBalancesBefore.token0 / 10n; + + // Swaps also transfer tokens into the AMM, so we provide an authwit for the full amount in. + const nonceForAuthwits = Fr.random(); + await swapper.createAuthWit({ + caller: amm.address, + action: token0.methods.transfer_to_public(swapper.getAddress(), amm.address, amountIn, nonceForAuthwits), + }); + + // We compute the expected amount out and set it as the minimum. In a real-life scenario we'd choose a slightly + // lower value to account for slippage, but since we're the only actor interacting with the AMM we can afford to + // just pass the exact value. Of course any lower value would also suffice. + const amountOutMin = await amm.methods + .get_amount_out_for_exact_in(ammBalancesBefore.token0, ammBalancesBefore.token1, amountIn) + .simulate(); + await amm + .withWallet(swapper) + .methods.swap_exact_tokens_for_tokens(token0.address, token1.address, amountIn, amountOutMin, nonceForAuthwits) + .send() + .wait(); + + // We know exactly how many tokens we're supposed to get because we know nobody else interacted with the AMM + // before we did. + const swapperBalancesAfter = await getWalletBalances(swapper); + assertBalancesDelta(swapperBalancesBefore, swapperBalancesAfter, { token0: -amountIn, token1: amountOutMin }); + }); + + it('swap exact tokens out', async () => { + const swapperBalancesBefore = await getWalletBalances(swapper); + const ammBalancesBefore = await getAmmBalances(); + + // We want to undo the previous swap (except for the fees, which we can't recover), so we try to send the full + // token1 balance (since the swapper held no token1 tokens prior to the swap). However, we're using the method + // that receives an exact amount of tokens *out*, not in, so we can't quite specify this. What we do instead is + // query the contract for how much token0 we'd get if we sent our entire token1 balance, and then request exactly + // that amount. This would fail in a real-life scenario since we'd need to account for slippage, but we can do it + // in this test environment since there's nobody else interacting with the AMM. + const amountOut = await amm.methods + .get_amount_out_for_exact_in(ammBalancesBefore.token1, ammBalancesBefore.token0, swapperBalancesBefore.token1) + .simulate(); + const amountInMax = swapperBalancesBefore.token1; + + // Swaps also transfer tokens into the AMM, so we provide an authwit for the full amount in (any change will be + // later returned, though in this case there won't be any). + const nonceForAuthwits = Fr.random(); + await swapper.createAuthWit({ + caller: amm.address, + action: token1.methods.transfer_to_public(swapper.getAddress(), amm.address, amountInMax, nonceForAuthwits), + }); + + await amm + .withWallet(swapper) + .methods.swap_tokens_for_exact_tokens(token1.address, token0.address, amountOut, amountInMax, nonceForAuthwits) + .send() + .wait(); + + // Because nobody else interacted with the AMM, we know the amount in will be the maximum (i.e. the value the + // contract returned as what we'd need to send in order to get the amount out we requested). + const swapperBalancesAfter = await getWalletBalances(swapper); + assertBalancesDelta(swapperBalancesBefore, swapperBalancesAfter, { token0: amountOut, token1: -amountInMax }); + + // We can also check that the swapper ends up with fewer tokens than they started with, since they had to pay + // swap fees during both swaps. + expect(swapperBalancesAfter.token0).toBeLessThan(INITIAL_TOKEN_BALANCE); + }); + + it('remove liquidity', async () => { + // We now withdraw all of the tokens of one of the liquidity providers by burning their entire liquidity token + // balance. + const liquidityTokenBalance = await liquidityToken + .withWallet(otherLiquidityProvider) + .methods.balance_of_private(otherLiquidityProvider.getAddress()) + .simulate(); + + // Because private burning requires first transfering the tokens into the AMM, we again need to provide an + // authwit. + const nonceForAuthwits = Fr.random(); + await otherLiquidityProvider.createAuthWit({ + caller: amm.address, + action: liquidityToken.methods.transfer_to_public( + otherLiquidityProvider.getAddress(), + amm.address, + liquidityTokenBalance, + nonceForAuthwits, + ), + }); + + // We don't bother setting the minimum amounts, since we know nobody else is interacting with the AMM. In a + // real-life scenario we'd need to choose sensible amounts to avoid losing value due to slippage. + const amount0Min = 1n; + const amount1Min = 1n; + await amm + .withWallet(otherLiquidityProvider) + .methods.remove_liquidity(liquidityTokenBalance, amount0Min, amount1Min, nonceForAuthwits) + .send() + .wait(); + + // The liquidity provider should have no remaining liquidity tokens, and should have recovered the value they + // originally deposited. + expect( + await liquidityToken + .withWallet(otherLiquidityProvider) + .methods.balance_of_private(otherLiquidityProvider.getAddress()) + .simulate(), + ).toEqual(0n); + + // We now assert that the liquidity provider ended up with more tokens than they began with. These extra tokens + // come from the swap fees paid during each of the swaps. While swap fees are always collected on the token in, + // the net fees will all be accrued on token0 due to how the swaps were orchestrated. This can be intuited by the + // fact that the swapper held no token1 initially, so it'd be impossible for them to cause an increase in the + // AMM's token1 balance. + // We perform this test using the second liquidity provider, since the first one did lose some percentage of the + // value of their deposit during setup when liquidity was locked by minting tokens for the zero address. + const lpBalancesAfter = await getWalletBalances(otherLiquidityProvider); + expect(lpBalancesAfter.token0).toBeGreaterThan(INITIAL_TOKEN_BALANCE); + expect(lpBalancesAfter.token1).toEqual(INITIAL_TOKEN_BALANCE); + }); + }); +}); From ac7c0da38ff05d6f11c4d6a6244c4526ac00232e Mon Sep 17 00:00:00 2001 From: ledwards2225 <98505400+ledwards2225@users.noreply.github.com> Date: Tue, 3 Dec 2024 11:50:53 -0700 Subject: [PATCH 08/24] feat: mock IVC state from arbitrary acir IVC recursion constraints (#10314) Generating a bberg kernel circuit from a noir kernel program represented as acir requires an IVC instance containing certain state including a verifier accumulator and verification queue containing proofs/VKs for input to recursive verifiers. In the context of a write_vk flow, this data is not known and must be mocked so that the recursive verifiers in the kernel can be constructed properly. (Similar to how we construct a dummy proof to generate a Honk recursive verifier). The main method in this PR is `create_mock_ivc_from_constraints()` which constructs an IVC instance with mocked state based on the IVC recursion constraints present in the acir data. For example, if there are two PG recursive verifications in the constraint system, we must generate two mocked PG proofs plus some other auxiliary data. So no actual write_vk flow exists but the logic is tested though the `IvcRecursionConstraintTest` suite which constructs VKs from programs containing each of the 3 different possible combinations of IVC recursion constraints that appear in Aztec kernel circuits. (These are: (a) 1 Oink recursive verification (init kernel), (b) 1 PG recursive verification (reset or tail kernel), and (c) 2 PG recursive verifications (inner kernel)). --- .../honk_recursion_constraint.test.cpp | 5 +- .../acir_format/ivc_recursion_constraint.cpp | 228 ++++++++++++++---- .../acir_format/ivc_recursion_constraint.hpp | 24 +- .../ivc_recursion_constraint.test.cpp | 201 +++++++++++---- .../dsl/acir_format/proof_surgeon.hpp | 4 +- .../protogalaxy_recursive_verifier.cpp | 2 + .../stdlib_circuit_builders/databus.hpp | 10 + 7 files changed, 368 insertions(+), 106 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.test.cpp index de6005c1942..e431959807e 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.test.cpp @@ -153,10 +153,11 @@ class AcirHonkRecursionConstraint : public ::testing::Test { std::vector key_witnesses = verification_key->to_field_elements(); std::vector proof_witnesses = inner_proof; - const size_t num_public_inputs = inner_circuit.get_public_inputs().size(); + const size_t num_public_inputs_to_extract = + inner_circuit.get_public_inputs().size() - bb::PAIRING_POINT_ACCUMULATOR_SIZE; auto [key_indices, proof_indices, inner_public_inputs] = ProofSurgeon::populate_recursion_witness_data( - witness, proof_witnesses, key_witnesses, num_public_inputs); + witness, proof_witnesses, key_witnesses, num_public_inputs_to_extract); RecursionConstraint honk_recursion_constraint{ .key = key_indices, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ivc_recursion_constraint.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ivc_recursion_constraint.cpp index 4bd58eff722..f815610631e 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ivc_recursion_constraint.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ivc_recursion_constraint.cpp @@ -13,91 +13,222 @@ namespace acir_format { using namespace bb; -using field_ct = stdlib::field_t; -ClientIVC create_mock_ivc_from_constraints(const std::vector& constraints) +/** + * @brief Create an IVC object with mocked state corresponding to a set of IVC recursion constraints + * @details Construction of a kernel circuit requires two inputs: kernel prgram acir constraints and an IVC instance + * containing state needed to complete the kernel logic, e.g. proofs for input to recursive verifiers. To construct + * verification keys for kernel circuits without running a full IVC, we mock the IVC state corresponding to a provided + * set of IVC recurson constraints. For example, if the constraints contain a single PG recursive verification, we + * initialize an IVC with mocked data for the verifier accumulator, the folding proof, the circuit verification key, + * and a merge proof. + * @note There are only three valid combinations of IVC recursion constraints for a kernel program. See below for + * details. + * + * @param constraints IVC recursion constraints from a kernel circuit + * @param trace_settings + * @return ClientIVC + */ +ClientIVC create_mock_ivc_from_constraints(const std::vector& constraints, + const TraceSettings& trace_settings) { - ClientIVC ivc{ { SMALL_TEST_STRUCTURE } }; + ClientIVC ivc{ trace_settings }; - for (const auto& constraint : constraints) { - if (static_cast(PROOF_TYPE::OINK) == constraint.proof_type) { - mock_ivc_oink_accumulation(ivc, constraint.public_inputs.size()); - } else if (static_cast(PROOF_TYPE::PG) == constraint.proof_type) { - // perform equivalent mocking for PG accumulation - } + uint32_t oink_type = static_cast(PROOF_TYPE::OINK); + uint32_t pg_type = static_cast(PROOF_TYPE::PG); + + // There are only three valid combinations of IVC recursion constraints for Aztec kernel circuits: + + // Case: INIT kernel; single Oink recursive verification of an app + if (constraints.size() == 1 && constraints[0].proof_type == oink_type) { + mock_ivc_accumulation(ivc, ClientIVC::QUEUE_TYPE::OINK, /*is_kernel=*/false); + return ivc; + } + + // Case: RESET or TAIL kernel; single PG recursive verification of a kernel + if (constraints.size() == 1 && constraints[0].proof_type == pg_type) { + ivc.verifier_accumulator = create_mock_decider_vk(); + mock_ivc_accumulation(ivc, ClientIVC::QUEUE_TYPE::PG, /*is_kernel=*/true); + return ivc; } - return ivc; + // Case: INNER kernel; two PG recursive verifications, kernel and app in that order + if (constraints.size() == 2) { + ASSERT(constraints[0].proof_type == pg_type && constraints[1].proof_type == pg_type); + ivc.verifier_accumulator = create_mock_decider_vk(); + mock_ivc_accumulation(ivc, ClientIVC::QUEUE_TYPE::PG, /*is_kernel=*/true); + mock_ivc_accumulation(ivc, ClientIVC::QUEUE_TYPE::PG, /*is_kernel=*/false); + return ivc; + } + + ASSERT(false && "WARNING: Invalid set of IVC recursion constraints!"); + return ClientIVC{}; } /** - * @brief Populate an IVC instance with data that mimics the state after accumulating the first app (which runs the oink - * prover) - *@details Mock state consists a mock verification queue entry of type OINK (proof, VK) and a mocked merge proof + * @brief Populate an IVC instance with data that mimics the state after a single IVC accumulation (Oink or PG) + * @details Mock state consists of a mock verification queue entry of type OINK (proof, VK) and a mocked merge proof * * @param ivc * @param num_public_inputs_app num pub inputs in accumulated app, excluding fixed components, e.g. pairing points */ -void mock_ivc_oink_accumulation(ClientIVC& ivc, size_t num_public_inputs_app) +void mock_ivc_accumulation(ClientIVC& ivc, ClientIVC::QUEUE_TYPE type, const bool is_kernel) { - ClientIVC::VerifierInputs oink_entry = - acir_format::create_dummy_vkey_and_proof_oink(ivc.trace_settings, num_public_inputs_app); - ivc.verification_queue.emplace_back(oink_entry); + ClientIVC::VerifierInputs entry = + acir_format::create_mock_verification_queue_entry(type, ivc.trace_settings, is_kernel); + ivc.verification_queue.emplace_back(entry); ivc.merge_verification_queue.emplace_back(acir_format::create_dummy_merge_proof()); ivc.initialized = true; } /** - * @brief Create a mock oink proof and VK that have the correct structure but are not necessarily valid + * @brief Create a mock verification queue entry with proof and VK that have the correct structure but are not + * necessarily valid * */ -ClientIVC::VerifierInputs create_dummy_vkey_and_proof_oink(const TraceSettings& trace_settings, - const size_t num_public_inputs = 0) +ClientIVC::VerifierInputs create_mock_verification_queue_entry(const ClientIVC::QUEUE_TYPE verification_type, + const TraceSettings& trace_settings, + const bool is_kernel) { - using Flavor = MegaFlavor; - using FF = bb::fr; + using FF = ClientIVC::FF; + using MegaVerificationKey = ClientIVC::MegaVerificationKey; + // Use the trace settings to determine the correct dyadic size and the public inputs offset MegaExecutionTraceBlocks blocks; blocks.set_fixed_block_sizes(trace_settings); blocks.compute_offsets(/*is_structured=*/true); - size_t structured_dyadic_size = blocks.get_structured_dyadic_size(); + size_t dyadic_size = blocks.get_structured_dyadic_size(); size_t pub_inputs_offset = blocks.pub_inputs.trace_offset; + // All circuits have pairing point public inputs; kernels have additional public inputs for two databus commitments + size_t num_public_inputs = bb::PAIRING_POINT_ACCUMULATOR_SIZE; + if (is_kernel) { + num_public_inputs += bb::PROPAGATED_DATABUS_COMMITMENTS_SIZE; + } - ClientIVC::VerifierInputs verifier_inputs; - verifier_inputs.type = ClientIVC::QUEUE_TYPE::OINK; + // Construct a mock Oink or PG proof + std::vector proof; + if (verification_type == ClientIVC::QUEUE_TYPE::OINK) { + proof = create_mock_oink_proof(dyadic_size, num_public_inputs, pub_inputs_offset); + } else { // ClientIVC::QUEUE_TYPE::PG) + proof = create_mock_pg_proof(dyadic_size, num_public_inputs, pub_inputs_offset); + } - FF mock_val(5); + // Construct a mock MegaHonk verification key + std::shared_ptr verification_key = + create_mock_honk_vk(dyadic_size, num_public_inputs, pub_inputs_offset); - auto mock_commitment = curve::BN254::AffineElement::one() * mock_val; - std::vector mock_commitment_frs = field_conversion::convert_to_bn254_frs(mock_commitment); + // If the verification queue entry corresponds to a kernel circuit, set the databus data to indicate the presence of + // propagated return data commitments on the public inputs + if (is_kernel) { + verification_key->databus_propagation_data = bb::DatabusPropagationData::kernel_default(); + } + + return ClientIVC::VerifierInputs{ proof, verification_key, verification_type }; +} + +/** + * @brief Create a mock oink proof that has the correct structure but is not in general valid + * + */ +std::vector create_mock_oink_proof(const size_t dyadic_size, + const size_t num_public_inputs, + const size_t pub_inputs_offset) +{ + using Flavor = ClientIVC::Flavor; + using FF = ClientIVC::FF; - // Set proof preamble (metadata plus public inputs) - size_t total_num_public_inputs = num_public_inputs + bb::PAIRING_POINT_ACCUMULATOR_SIZE; - verifier_inputs.proof.emplace_back(structured_dyadic_size); - verifier_inputs.proof.emplace_back(total_num_public_inputs); - verifier_inputs.proof.emplace_back(pub_inputs_offset); - for (size_t i = 0; i < total_num_public_inputs; ++i) { - verifier_inputs.proof.emplace_back(0); + std::vector proof; + + // Populate proof metadata + proof.emplace_back(dyadic_size); + proof.emplace_back(num_public_inputs); + proof.emplace_back(pub_inputs_offset); + + // Populate mock public inputs + for (size_t i = 0; i < num_public_inputs; ++i) { + proof.emplace_back(0); } - // Witness polynomial commitments + // Populate mock witness polynomial commitments + auto mock_commitment = curve::BN254::AffineElement::one(); + std::vector mock_commitment_frs = field_conversion::convert_to_bn254_frs(mock_commitment); for (size_t i = 0; i < Flavor::NUM_WITNESS_ENTITIES; ++i) { for (const FF& val : mock_commitment_frs) { - verifier_inputs.proof.emplace_back(val); + proof.emplace_back(val); } } + return proof; +} + +/** + * @brief Create a mock PG proof that has the correct structure but is not in general valid + * + */ +std::vector create_mock_pg_proof(const size_t dyadic_size, + const size_t num_public_inputs, + const size_t pub_inputs_offset) +{ + using FF = ClientIVC::FF; + using DeciderProvingKeys = ClientIVC::DeciderProvingKeys; + + // The first part of a PG proof is an Oink proof + std::vector proof = create_mock_oink_proof(dyadic_size, num_public_inputs, pub_inputs_offset); + + // Populate mock perturbator coefficients + for (size_t idx = 1; idx <= CONST_PG_LOG_N; idx++) { + proof.emplace_back(0); + } + + // Populate mock combiner quotient coefficients + for (size_t idx = DeciderProvingKeys::NUM; idx < DeciderProvingKeys::BATCHED_EXTENDED_LENGTH; idx++) { + proof.emplace_back(0); + } + + return proof; +} + +/** + * @brief Create a mock MegaHonk VK that has the correct structure + * + */ +std::shared_ptr create_mock_honk_vk(const size_t dyadic_size, + const size_t num_public_inputs, + const size_t pub_inputs_offset) +{ + // Set relevant VK metadata and commitments + auto honk_verification_key = std::make_shared(); + honk_verification_key->circuit_size = dyadic_size; + honk_verification_key->num_public_inputs = num_public_inputs; + honk_verification_key->pub_inputs_offset = pub_inputs_offset; // must be set correctly + honk_verification_key->contains_pairing_point_accumulator = true; + + for (auto& commitment : honk_verification_key->get_all()) { + commitment = curve::BN254::AffineElement::one(); // arbitrary mock commitment + } + + return honk_verification_key; +} + +/** + * @brief Create a mock Decider verification key for initilization of a mock verifier accumulator + * + */ +std::shared_ptr create_mock_decider_vk() +{ + using FF = ClientIVC::FF; + // Set relevant VK metadata and commitments - verifier_inputs.honk_verification_key = std::make_shared(); - verifier_inputs.honk_verification_key->circuit_size = structured_dyadic_size; - verifier_inputs.honk_verification_key->num_public_inputs = total_num_public_inputs; - verifier_inputs.honk_verification_key->pub_inputs_offset = blocks.pub_inputs.trace_offset; // must be set correctly - verifier_inputs.honk_verification_key->contains_pairing_point_accumulator = true; - for (auto& commitment : verifier_inputs.honk_verification_key->get_all()) { - commitment = mock_commitment; + auto decider_verification_key = std::make_shared(); + decider_verification_key->verification_key = create_mock_honk_vk(0, 0, 0); // metadata does not need to be accurate + decider_verification_key->is_accumulator = true; + decider_verification_key->gate_challenges = std::vector(static_cast(CONST_PG_LOG_N), 0); + + for (auto& commitment : decider_verification_key->witness_commitments.get_all()) { + commitment = curve::BN254::AffineElement::one(); // arbitrary mock commitment } - return verifier_inputs; + return decider_verification_key; } /** @@ -107,12 +238,12 @@ ClientIVC::VerifierInputs create_dummy_vkey_and_proof_oink(const TraceSettings& */ ClientIVC::MergeProof create_dummy_merge_proof() { - using FF = bb::fr; + using FF = ClientIVC::FF; std::vector proof; FF mock_val(5); - auto mock_commitment = curve::BN254::AffineElement::one() * mock_val; + auto mock_commitment = curve::BN254::AffineElement::one(); std::vector mock_commitment_frs = field_conversion::convert_to_bn254_frs(mock_commitment); // There are 12 entities in the merge protocol (4 columns x 3 components; aggregate transcript, previous aggregate @@ -148,8 +279,7 @@ void populate_dummy_vk_in_constraint(MegaCircuitBuilder& builder, const std::shared_ptr& mock_verification_key, std::vector& key_witness_indices) { - using Flavor = MegaFlavor; - using FF = Flavor::FF; + using FF = ClientIVC::FF; // Convert the VerificationKey to fields std::vector mock_vk_fields = mock_verification_key->to_field_elements(); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ivc_recursion_constraint.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ivc_recursion_constraint.hpp index 7709bee73c0..8d89c6ecfc5 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ivc_recursion_constraint.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ivc_recursion_constraint.hpp @@ -11,12 +11,28 @@ using namespace bb; // TODO(https://github.com/AztecProtocol/barretenberg/issues/1148): logic in this file is incomplete. See issue for // details. -ClientIVC create_mock_ivc_from_constraints(const std::vector& constraints); +ClientIVC create_mock_ivc_from_constraints(const std::vector& constraints, + const TraceSettings& trace_settings); -void mock_ivc_oink_accumulation(ClientIVC& ivc, size_t num_public_inputs_app = 0); +void mock_ivc_accumulation(ClientIVC& ivc, ClientIVC::QUEUE_TYPE type, const bool is_kernel); -ClientIVC::VerifierInputs create_dummy_vkey_and_proof_oink(const TraceSettings& trace_settings, - const size_t num_public_inputs); +std::vector create_mock_oink_proof(const size_t dyadic_size, + const size_t num_public_inputs, + const size_t pub_inputs_offset); + +std::vector create_mock_pg_proof(const size_t dyadic_size, + const size_t num_public_inputs, + const size_t pub_inputs_offset); + +std::shared_ptr create_mock_honk_vk(const size_t dyadic_size, + const size_t num_public_inputs, + const size_t pub_inputs_offset); + +std::shared_ptr create_mock_decider_vk(); + +ClientIVC::VerifierInputs create_mock_verification_queue_entry(const ClientIVC::QUEUE_TYPE type, + const TraceSettings& trace_settings, + const bool is_kernel); ClientIVC::MergeProof create_dummy_merge_proof(); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ivc_recursion_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ivc_recursion_constraint.test.cpp index 4ea3df6a72e..1cd9d5b5595 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ivc_recursion_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ivc_recursion_constraint.test.cpp @@ -28,17 +28,13 @@ class IvcRecursionConstraintTest : public ::testing::Test { /** * @brief Constuct a simple arbitrary circuit to represent a mock app circuit - * @details Includes a single unique public input for robustness and to distinguish the public inputs of one "app" - * from another in testing. * */ static Builder construct_mock_app_circuit(ClientIVC& ivc) { Builder circuit{ ivc.goblin.op_queue }; - GoblinMockCircuits::construct_simple_circuit(circuit); - - // add a random (unique) public input - circuit.add_public_variable(FF::random_element()); + GoblinMockCircuits::add_some_ecc_op_gates(circuit); + MockCircuits::add_arithmetic_gates(circuit); return circuit; } @@ -49,20 +45,17 @@ class IvcRecursionConstraintTest : public ::testing::Test { * * @param input bberg style proof and verification key * @param witness Array of witnesses into which the above data is placed - * @param num_public_inputs Number of public inputs to be extracted from the proof * @return RecursionConstraint */ - static RecursionConstraint create_recursion_constraint(const VerifierInputs& input, - SlabVector& witness, - const size_t num_public_inputs) + static RecursionConstraint create_recursion_constraint(const VerifierInputs& input, SlabVector& witness) { // Assemble simple vectors of witnesses for vkey and proof std::vector key_witnesses = input.honk_verification_key->to_field_elements(); std::vector proof_witnesses = input.proof; // proof contains the public inputs at this stage // Construct witness indices for each component in the constraint; populate the witness array - auto [key_indices, proof_indices, public_inputs_indices] = - ProofSurgeon::populate_recursion_witness_data(witness, proof_witnesses, key_witnesses, num_public_inputs); + auto [key_indices, proof_indices, public_inputs_indices] = ProofSurgeon::populate_recursion_witness_data( + witness, proof_witnesses, key_witnesses, /*num_public_inputs_to_extract=*/0); // The proof type can be either Oink or PG PROOF_TYPE proof_type = input.type == QUEUE_TYPE::OINK ? OINK : PG; @@ -88,19 +81,15 @@ class IvcRecursionConstraintTest : public ::testing::Test { * @param inner_circuit_num_pub_inputs Num pub inputs for each circuit whose accumulation is recursively verified * @return Builder */ - static AcirProgram construct_mock_kernel_program(const VerificationQueue& verification_queue, - const std::vector& inner_circuit_num_pub_inputs) + static AcirProgram construct_mock_kernel_program(const VerificationQueue& verification_queue) { - ASSERT(verification_queue.size() == inner_circuit_num_pub_inputs.size()); - AcirProgram program; // Construct recursion constraints based on the ivc verification queue; populate the witness along the way std::vector ivc_recursion_constraints; ivc_recursion_constraints.reserve(verification_queue.size()); - for (size_t idx = 0; idx < verification_queue.size(); ++idx) { - ivc_recursion_constraints.push_back(create_recursion_constraint( - verification_queue[idx], program.witness, inner_circuit_num_pub_inputs[idx])); + for (const auto& queue_entry : verification_queue) { + ivc_recursion_constraints.push_back(create_recursion_constraint(queue_entry, program.witness)); } // Construct a constraint system containing the business logic and ivc recursion constraints @@ -113,6 +102,32 @@ class IvcRecursionConstraintTest : public ::testing::Test { return program; } + /** + * @brief Construct a kernel circuit VK from an acir program with IVC recursion constraints + * + * @param program Acir program representing a kernel circuit + * @param trace_settings needed for construction of the VK + * @return std::shared_ptr + */ + static std::shared_ptr construct_kernel_vk_from_acir_program( + AcirProgram& program, const TraceSettings& trace_settings) + { + // Create a mock IVC instance from the IVC recursion constraints in the kernel program + ClientIVC mock_ivc = + create_mock_ivc_from_constraints(program.constraints.ivc_recursion_constraints, trace_settings); + + // Create kernel circuit from kernel program and the mocked IVC (empty witness mimics VK construction context) + Builder kernel = acir_format::create_kernel_circuit(program.constraints, mock_ivc, /*witness=*/{}); + // Note: adding pairing point normally happens in accumulate() + kernel.add_pairing_point_accumulator(stdlib::recursion::init_default_agg_obj_indices(kernel)); + + // Manually construct the VK for the kernel circuit + auto proving_key = std::make_shared(kernel, trace_settings); + MegaProver prover(proving_key); + + return std::make_shared(prover.proving_key->proving_key); + } + protected: void SetUp() override { @@ -136,7 +151,7 @@ TEST_F(IvcRecursionConstraintTest, AccumulateTwo) ivc.accumulate(app_circuit); // Construct kernel_0 consisting only of the kernel completion logic - AcirProgram program_0 = construct_mock_kernel_program(ivc.verification_queue, { app_circuit.public_inputs.size() }); + AcirProgram program_0 = construct_mock_kernel_program(ivc.verification_queue); Builder kernel_0 = acir_format::create_kernel_circuit(program_0.constraints, ivc, program_0.witness); EXPECT_TRUE(CircuitChecker::check(kernel_0)); @@ -158,8 +173,7 @@ TEST_F(IvcRecursionConstraintTest, AccumulateFour) ivc.accumulate(app_circuit_0); // Construct kernel_0; consists of a single oink recursive verification for app (plus databus/merge logic) - size_t num_pub_inputs_app_0 = app_circuit_0.public_inputs.size(); - AcirProgram program_0 = construct_mock_kernel_program(ivc.verification_queue, { num_pub_inputs_app_0 }); + AcirProgram program_0 = construct_mock_kernel_program(ivc.verification_queue); Builder kernel_0 = acir_format::create_kernel_circuit(program_0.constraints, ivc, program_0.witness); ivc.accumulate(kernel_0); @@ -168,10 +182,7 @@ TEST_F(IvcRecursionConstraintTest, AccumulateFour) ivc.accumulate(app_circuit_1); // Construct kernel_1; consists of two PG recursive verifications for kernel_0 and app_1 (plus databus/merge logic) - size_t num_pub_inputs_kernel_0 = kernel_0.public_inputs.size(); - size_t num_pub_inputs_app_1 = app_circuit_0.public_inputs.size(); - AcirProgram program_1 = - construct_mock_kernel_program(ivc.verification_queue, { num_pub_inputs_kernel_0, num_pub_inputs_app_1 }); + AcirProgram program_1 = construct_mock_kernel_program(ivc.verification_queue); Builder kernel_1 = acir_format::create_kernel_circuit(program_1.constraints, ivc, program_1.witness); EXPECT_TRUE(CircuitChecker::check(kernel_1)); @@ -187,17 +198,15 @@ TEST_F(IvcRecursionConstraintTest, GenerateVK) // First, construct the kernel VK by running the full IVC (accumulate one app and one kernel) std::shared_ptr expected_kernel_vk; - size_t num_app_public_inputs = 0; { ClientIVC ivc{ trace_settings }; // Construct and accumulate mock app_circuit Builder app_circuit = construct_mock_app_circuit(ivc); ivc.accumulate(app_circuit); - num_app_public_inputs = app_circuit.public_inputs.size(); // Construct and accumulate kernel consisting only of the kernel completion logic - AcirProgram program = construct_mock_kernel_program(ivc.verification_queue, { num_app_public_inputs }); + AcirProgram program = construct_mock_kernel_program(ivc.verification_queue); Builder kernel = acir_format::create_kernel_circuit(program.constraints, ivc, program.witness); ivc.accumulate(kernel); expected_kernel_vk = ivc.verification_queue.back().honk_verification_key; @@ -208,10 +217,10 @@ TEST_F(IvcRecursionConstraintTest, GenerateVK) { ClientIVC ivc{ trace_settings }; - acir_format::mock_ivc_oink_accumulation(ivc, num_app_public_inputs - bb::PAIRING_POINT_ACCUMULATOR_SIZE); + acir_format::mock_ivc_accumulation(ivc, ClientIVC::QUEUE_TYPE::OINK, /*is_kernel=*/false); // Construct kernel consisting only of the kernel completion logic - AcirProgram program = construct_mock_kernel_program(ivc.verification_queue, { num_app_public_inputs }); + AcirProgram program = construct_mock_kernel_program(ivc.verification_queue); Builder kernel = acir_format::create_kernel_circuit(program.constraints, ivc); // Note that this would normally happen in accumulate() kernel.add_pairing_point_accumulator(stdlib::recursion::init_default_agg_obj_indices(kernel)); @@ -229,23 +238,21 @@ TEST_F(IvcRecursionConstraintTest, GenerateVK) } // Test generation of "init" kernel VK via dummy IVC data -TEST_F(IvcRecursionConstraintTest, GenerateVKFromConstraints) +TEST_F(IvcRecursionConstraintTest, GenerateInitKernelVKFromConstraints) { const TraceSettings trace_settings{ SMALL_TEST_STRUCTURE }; // First, construct the kernel VK by running the full IVC (accumulate one app and one kernel) std::shared_ptr expected_kernel_vk; - size_t num_app_public_inputs = 0; { ClientIVC ivc{ trace_settings }; // Construct and accumulate mock app_circuit Builder app_circuit = construct_mock_app_circuit(ivc); ivc.accumulate(app_circuit); - num_app_public_inputs = app_circuit.public_inputs.size(); // Construct and accumulate kernel consisting only of the kernel completion logic - AcirProgram program = construct_mock_kernel_program(ivc.verification_queue, { num_app_public_inputs }); + AcirProgram program = construct_mock_kernel_program(ivc.verification_queue); Builder kernel = acir_format::create_kernel_circuit(program.constraints, ivc, program.witness); ivc.accumulate(kernel); @@ -258,22 +265,120 @@ TEST_F(IvcRecursionConstraintTest, GenerateVKFromConstraints) ClientIVC ivc{ trace_settings }; // Construct kernel consisting only of the kernel completion logic - acir_format::mock_ivc_oink_accumulation(ivc, num_app_public_inputs - bb::PAIRING_POINT_ACCUMULATOR_SIZE); - AcirProgram program = construct_mock_kernel_program(ivc.verification_queue, { num_app_public_inputs }); - program.witness = {}; // erase witness to mimic VK construction context + acir_format::mock_ivc_accumulation(ivc, ClientIVC::QUEUE_TYPE::OINK, /*is_kernel=*/false); + AcirProgram program = construct_mock_kernel_program(ivc.verification_queue); - // Create a mock IVC instance from the IVC recursion constraints in the kernel program - ClientIVC mock_ivc = create_mock_ivc_from_constraints(program.constraints.ivc_recursion_constraints); + kernel_vk = construct_kernel_vk_from_acir_program(program, trace_settings); + } - // Create a kernel circuit from the kernel program and the mocked IVC - Builder kernel = acir_format::create_kernel_circuit(program.constraints, mock_ivc); - // Note: adding pairing point normally happens in accumulate() - kernel.add_pairing_point_accumulator(stdlib::recursion::init_default_agg_obj_indices(kernel)); + // PCS verification keys will not match so set to null before comparing + kernel_vk->pcs_verification_key = nullptr; + expected_kernel_vk->pcs_verification_key = nullptr; - // Manually construct the VK for the kernel circuit - auto proving_key = std::make_shared>(kernel, ivc.trace_settings); - MegaProver prover(proving_key); - kernel_vk = std::make_shared(prover.proving_key->proving_key); + // Compare the VK constructed via running the IVc with the one constructed via mocking + EXPECT_EQ(*kernel_vk.get(), *expected_kernel_vk.get()); +} + +// Test generation of "reset" or "tail" kernel VK via dummy IVC data +TEST_F(IvcRecursionConstraintTest, GenerateResetKernelVKFromConstraints) +{ + const TraceSettings trace_settings{ SMALL_TEST_STRUCTURE }; + + // First, construct the kernel VK by running the full IVC (accumulate one app and one kernel) + std::shared_ptr expected_kernel_vk; + { + ClientIVC ivc{ trace_settings }; + + // Construct and accumulate mock app_circuit + Builder app_circuit = construct_mock_app_circuit(ivc); + ivc.accumulate(app_circuit); + + { // Construct and accumulate a mock INIT kernel (oink recursion for app accumulation) + AcirProgram program = construct_mock_kernel_program(ivc.verification_queue); + Builder kernel = acir_format::create_kernel_circuit(program.constraints, ivc, program.witness); + ivc.accumulate(kernel); + } + + { // Construct and accumulate a mock RESET kernel (PG recursion for kernel accumulation) + EXPECT_TRUE(ivc.verification_queue.size() == 1); + EXPECT_TRUE(ivc.verification_queue[0].type == bb::ClientIVC::QUEUE_TYPE::PG); + AcirProgram program = construct_mock_kernel_program(ivc.verification_queue); + Builder kernel = acir_format::create_kernel_circuit(program.constraints, ivc, program.witness); + ivc.accumulate(kernel); + } + + expected_kernel_vk = ivc.verification_queue.back().honk_verification_key; + } + + // Now, construct the kernel VK by mocking the IVC state prior to kernel construction + std::shared_ptr kernel_vk; + { + ClientIVC ivc{ trace_settings }; + + // Construct kernel consisting only of the kernel completion logic + acir_format::mock_ivc_accumulation(ivc, ClientIVC::QUEUE_TYPE::PG, /*is_kernel=*/true); + AcirProgram program = construct_mock_kernel_program(ivc.verification_queue); + + kernel_vk = construct_kernel_vk_from_acir_program(program, trace_settings); + } + + // PCS verification keys will not match so set to null before comparing + kernel_vk->pcs_verification_key = nullptr; + expected_kernel_vk->pcs_verification_key = nullptr; + + // Compare the VK constructed via running the IVc with the one constructed via mocking + EXPECT_EQ(*kernel_vk.get(), *expected_kernel_vk.get()); +} + +// Test generation of "inner" kernel VK via dummy IVC data +TEST_F(IvcRecursionConstraintTest, GenerateInnerKernelVKFromConstraints) +{ + const TraceSettings trace_settings{ SMALL_TEST_STRUCTURE }; + + // First, construct the kernel VK by running the full IVC (accumulate one app and one kernel) + std::shared_ptr expected_kernel_vk; + { + ClientIVC ivc{ trace_settings }; + + { // Construct and accumulate mock app_circuit + Builder app_circuit = construct_mock_app_circuit(ivc); + ivc.accumulate(app_circuit); + } + + { // Construct and accumulate a mock INIT kernel (oink recursion for app accumulation) + AcirProgram program = construct_mock_kernel_program(ivc.verification_queue); + Builder kernel = acir_format::create_kernel_circuit(program.constraints, ivc, program.witness); + ivc.accumulate(kernel); + } + + { // Construct and accumulate a second mock app_circuit + Builder app_circuit = construct_mock_app_circuit(ivc); + ivc.accumulate(app_circuit); + } + + { // Construct and accumulate a mock RESET kernel (PG recursion for kernel accumulation) + EXPECT_TRUE(ivc.verification_queue.size() == 2); + EXPECT_TRUE(ivc.verification_queue[0].type == bb::ClientIVC::QUEUE_TYPE::PG); + EXPECT_TRUE(ivc.verification_queue[1].type == bb::ClientIVC::QUEUE_TYPE::PG); + AcirProgram program = construct_mock_kernel_program(ivc.verification_queue); + Builder kernel = acir_format::create_kernel_circuit(program.constraints, ivc, program.witness); + ivc.accumulate(kernel); + } + + expected_kernel_vk = ivc.verification_queue.back().honk_verification_key; + } + + // Now, construct the kernel VK by mocking the IVC state prior to kernel construction + std::shared_ptr kernel_vk; + { + ClientIVC ivc{ trace_settings }; + + // Construct kernel consisting only of the kernel completion logic + acir_format::mock_ivc_accumulation(ivc, ClientIVC::QUEUE_TYPE::PG, /*is_kernel=*/true); + acir_format::mock_ivc_accumulation(ivc, ClientIVC::QUEUE_TYPE::PG, /*is_kernel=*/false); + AcirProgram program = construct_mock_kernel_program(ivc.verification_queue); + + kernel_vk = construct_kernel_vk_from_acir_program(program, trace_settings); } // PCS verification keys will not match so set to null before comparing diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/proof_surgeon.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/proof_surgeon.hpp index 0b2d1768bca..40bbedb02a3 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/proof_surgeon.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/proof_surgeon.hpp @@ -146,11 +146,9 @@ class ProofSurgeon { static RecursionWitnessData populate_recursion_witness_data(bb::SlabVector& witness, std::vector& proof_witnesses, const std::vector& key_witnesses, - const size_t num_public_inputs) + const size_t num_public_inputs_to_extract) { // Extract all public inputs except for those corresponding to the aggregation object - ASSERT(num_public_inputs >= bb::PAIRING_POINT_ACCUMULATOR_SIZE); - const size_t num_public_inputs_to_extract = num_public_inputs - bb::PAIRING_POINT_ACCUMULATOR_SIZE; std::vector public_input_witnesses = cut_public_inputs_from_proof(proof_witnesses, num_public_inputs_to_extract); diff --git a/barretenberg/cpp/src/barretenberg/stdlib/protogalaxy_verifier/protogalaxy_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/protogalaxy_verifier/protogalaxy_recursive_verifier.cpp index 10739d2aa45..607f7b17f17 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/protogalaxy_verifier/protogalaxy_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/protogalaxy_verifier/protogalaxy_recursive_verifier.cpp @@ -167,6 +167,7 @@ std::shared_ptr ProtogalaxyRecursiv accumulator->is_accumulator = true; accumulator->target_sum = perturbator_evaluation * lagranges[0] + vanishing_polynomial_at_challenge * combiner_quotient_at_challenge; + accumulator->gate_challenges = update_gate_challenges(perturbator_challenge, accumulator->gate_challenges, deltas); // Set the accumulator circuit size data based on the max of the keys being accumulated @@ -178,6 +179,7 @@ std::shared_ptr ProtogalaxyRecursiv for (auto [combination, to_combine] : zip_view(accumulator->alphas, keys_to_fold.get_alphas())) { combination = linear_combination(to_combine, lagranges); } + for (auto [combination, to_combine] : zip_view(accumulator->relation_parameters.get_to_fold(), keys_to_fold.get_relation_parameters())) { combination = linear_combination(to_combine, lagranges); diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/databus.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/databus.hpp index 92fda074a99..44e5797f51e 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/databus.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/databus.hpp @@ -95,6 +95,16 @@ struct DatabusPropagationData { return os; }; + // Construct an instance of this class with the default settings for a kernel circuit + static DatabusPropagationData kernel_default() + { + DatabusPropagationData data; + data.kernel_return_data_public_input_idx = 0; // kernel return data commitment is first public input + data.app_return_data_public_input_idx = 8; // followed by app return data commitment + data.is_kernel = true; + return data; + } + MSGPACK_FIELDS(app_return_data_public_input_idx, kernel_return_data_public_input_idx, is_kernel); }; From 93cd323e493118ce91097934216a364855a991db Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 3 Dec 2024 18:59:53 +0000 Subject: [PATCH 09/24] chore!: remove SchnorrVerify opcode (#9897) Please read [contributing guidelines](CONTRIBUTING.md) and remove this line. --- .../cpp/src/barretenberg/dsl/CMakeLists.txt | 2 +- .../dsl/acir_format/acir_format.cpp | 8 - .../dsl/acir_format/acir_format.hpp | 4 - .../dsl/acir_format/acir_format.test.cpp | 217 +----------------- .../dsl/acir_format/acir_format_mocks.cpp | 4 - .../acir_format/acir_to_constraint_buf.cpp | 12 - .../acir_format/bigint_constraint.test.cpp | 5 - .../dsl/acir_format/block_constraint.test.cpp | 6 +- .../dsl/acir_format/ec_operations.test.cpp | 4 +- .../dsl/acir_format/ecdsa_secp256k1.test.cpp | 6 +- .../dsl/acir_format/ecdsa_secp256r1.test.cpp | 8 +- .../honk_recursion_constraint.test.cpp | 2 +- .../dsl/acir_format/multi_scalar_mul.test.cpp | 2 +- .../acir_format/poseidon2_constraint.test.cpp | 2 +- .../acir_format/recursion_constraint.test.cpp | 4 +- .../dsl/acir_format/schnorr_verify.cpp | 111 --------- .../dsl/acir_format/schnorr_verify.hpp | 52 ----- .../dsl/acir_format/serde/acir.hpp | 160 ------------- .../acir_format/sha256_constraint.test.cpp | 2 +- .../schnorr_account_contract/src/main.nr | 25 +- .../src/main.nr | 15 +- .../src/util.nr | 15 +- .../noir-repo/acvm-repo/acir/codegen/acir.cpp | 128 +---------- .../acir/src/circuit/black_box_functions.rs | 30 --- .../acvm-repo/acir/src/circuit/mod.rs | 20 +- .../opcodes/black_box_function_call.rs | 52 ----- .../acir/tests/test_program_serialization.rs | 65 +----- .../acvm-repo/acvm/src/pwg/blackbox/mod.rs | 20 +- .../acvm/src/pwg/blackbox/signature/mod.rs | 1 - .../src/pwg/blackbox/signature/schnorr.rs | 36 --- .../test/browser/execute_circuit.test.ts | 10 - .../acvm_js/test/node/execute_circuit.test.ts | 10 - .../acvm_js/test/shared/multi_scalar_mul.ts | 6 +- .../acvm_js/test/shared/schnorr_verify.ts | 101 -------- .../src/curve_specific_solver.rs | 16 -- .../benches/criterion.rs | 32 +-- .../src/embedded_curve_ops.rs | 45 ++-- .../bn254_blackbox_solver/src/lib.rs | 20 -- .../src/pedersen/commitment.rs | 77 ------- .../src/pedersen/hash.rs | 69 ------ .../bn254_blackbox_solver/src/pedersen/mod.rs | 2 - .../bn254_blackbox_solver/src/schnorr/mod.rs | 147 ------------ .../acvm-repo/brillig/src/black_box.rs | 9 +- .../acvm-repo/brillig_vm/src/black_box.rs | 12 - .../src/acir/generated_acir.rs | 21 +- .../brillig/brillig_gen/brillig_black_box.rs | 21 -- .../noirc_evaluator/src/brillig/brillig_ir.rs | 9 - .../src/brillig/brillig_ir/debug_show.rs | 17 -- .../src/ssa/ir/instruction/call.rs | 1 - .../src/ssa/ir/instruction/call/blackbox.rs | 33 --- .../cryptographic_primitives/schnorr.mdx | 10 - noir/noir-repo/noir_stdlib/src/schnorr.nr | 25 +- .../schnorr_simplification/src/main.nr | 11 +- .../execution_success/schnorr/src/main.nr | 12 +- noir/noir-repo/tooling/lsp/src/solver.rs | 10 - .../tooling/profiler/src/opcode_formatter.rs | 2 - 56 files changed, 98 insertions(+), 1648 deletions(-) delete mode 100644 barretenberg/cpp/src/barretenberg/dsl/acir_format/schnorr_verify.cpp delete mode 100644 barretenberg/cpp/src/barretenberg/dsl/acir_format/schnorr_verify.hpp delete mode 100644 noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/schnorr.rs delete mode 100644 noir/noir-repo/acvm-repo/acvm_js/test/shared/schnorr_verify.ts delete mode 100644 noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/pedersen/commitment.rs delete mode 100644 noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/pedersen/hash.rs delete mode 100644 noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/pedersen/mod.rs delete mode 100644 noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/schnorr/mod.rs diff --git a/barretenberg/cpp/src/barretenberg/dsl/CMakeLists.txt b/barretenberg/cpp/src/barretenberg/dsl/CMakeLists.txt index 1f44c3c2746..b0b43a74775 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/CMakeLists.txt +++ b/barretenberg/cpp/src/barretenberg/dsl/CMakeLists.txt @@ -20,4 +20,4 @@ endif() barretenberg_module( dsl ${DSL_DEPENDENCIES} -) \ No newline at end of file +) diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp index 1cb9bb642bd..dd48e644a22 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp @@ -109,14 +109,6 @@ void build_constraints(Builder& builder, constraint_system.original_opcode_indices.sha256_compression[i]); } - // Add schnorr constraints - for (size_t i = 0; i < constraint_system.schnorr_constraints.size(); ++i) { - const auto& constraint = constraint_system.schnorr_constraints.at(i); - create_schnorr_verify_constraints(builder, constraint); - gate_counter.track_diff(constraint_system.gates_per_opcode, - constraint_system.original_opcode_indices.schnorr_constraints.at(i)); - } - // Add ECDSA k1 constraints for (size_t i = 0; i < constraint_system.ecdsa_k1_constraints.size(); ++i) { const auto& constraint = constraint_system.ecdsa_k1_constraints.at(i); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp index c092e55fd6f..aaa7d40ac0b 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp @@ -22,7 +22,6 @@ #include "poseidon2_constraint.hpp" #include "range_constraint.hpp" #include "recursion_constraint.hpp" -#include "schnorr_verify.hpp" #include "sha256_constraint.hpp" #include #include @@ -41,7 +40,6 @@ struct AcirFormatOriginalOpcodeIndices { std::vector range_constraints; std::vector aes128_constraints; std::vector sha256_compression; - std::vector schnorr_constraints; std::vector ecdsa_k1_constraints; std::vector ecdsa_r1_constraints; std::vector blake2s_constraints; @@ -85,7 +83,6 @@ struct AcirFormat { std::vector range_constraints; std::vector aes128_constraints; std::vector sha256_compression; - std::vector schnorr_constraints; std::vector ecdsa_k1_constraints; std::vector ecdsa_r1_constraints; std::vector blake2s_constraints; @@ -134,7 +131,6 @@ struct AcirFormat { range_constraints, aes128_constraints, sha256_compression, - schnorr_constraints, ecdsa_k1_constraints, ecdsa_r1_constraints, blake2s_constraints, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.test.cpp index d3fb922732e..87e95cbcd8a 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.test.cpp @@ -5,7 +5,6 @@ #include "acir_format.hpp" #include "acir_format_mocks.hpp" #include "barretenberg/common/streams.hpp" -#include "barretenberg/crypto/schnorr/schnorr.hpp" #include "barretenberg/plonk/composer/standard_composer.hpp" #include "barretenberg/plonk/composer/ultra_composer.hpp" #include "barretenberg/plonk/proof_system/types/proof.hpp" @@ -45,7 +44,6 @@ TEST_F(AcirFormatTests, TestASingleConstraintNoPubInputs) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, @@ -165,7 +163,6 @@ TEST_F(AcirFormatTests, TestLogicGateFromNoirCircuit) .range_constraints = { range_a, range_b }, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, @@ -205,215 +202,6 @@ TEST_F(AcirFormatTests, TestLogicGateFromNoirCircuit) EXPECT_EQ(verifier.verify_proof(proof), true); } -TEST_F(AcirFormatTests, TestSchnorrVerifyPass) -{ - std::vector range_constraints; - std::vector range_opcode_indices; - size_t current_opcode = 0; - for (uint32_t i = 0; i < 10; i++) { - range_constraints.push_back(RangeConstraint{ - .witness = i, - .num_bits = 15, - }); - range_opcode_indices.push_back(current_opcode++); - } - - std::array signature; - for (uint32_t i = 0, value = 12; i < 64; i++, value++) { - signature[i] = value; - range_constraints.push_back(RangeConstraint{ - .witness = value, - .num_bits = 15, - }); - range_opcode_indices.push_back(current_opcode++); - } - - SchnorrConstraint schnorr_constraint{ - .message = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }, - .public_key_x = 10, - .public_key_y = 11, - .result = 76, - .signature = signature, - }; - - AcirFormat constraint_system{ - .varnum = 81, - .num_acir_opcodes = 76, - .public_inputs = {}, - .logic_constraints = {}, - .range_constraints = range_constraints, - .aes128_constraints = {}, - .sha256_compression = {}, - .schnorr_constraints = { schnorr_constraint }, - .ecdsa_k1_constraints = {}, - .ecdsa_r1_constraints = {}, - .blake2s_constraints = {}, - .blake3_constraints = {}, - .keccak_permutations = {}, - .poseidon2_constraints = {}, - .multi_scalar_mul_constraints = {}, - .ec_add_constraints = {}, - .recursion_constraints = {}, - .honk_recursion_constraints = {}, - .avm_recursion_constraints = {}, - .ivc_recursion_constraints = {}, - .bigint_from_le_bytes_constraints = {}, - .bigint_to_le_bytes_constraints = {}, - .bigint_operations = {}, - .assert_equalities = {}, - .poly_triple_constraints = { poly_triple{ - .a = schnorr_constraint.result, - .b = schnorr_constraint.result, - .c = schnorr_constraint.result, - .q_m = 0, - .q_l = 0, - .q_r = 0, - .q_o = 1, - .q_c = fr::neg_one(), - } }, - .quad_constraints = {}, - .big_quad_constraints = {}, - .block_constraints = {}, - .original_opcode_indices = create_empty_original_opcode_indices(), - }; - mock_opcode_indices(constraint_system); - - std::string message_string = "tenletters"; - schnorr_key_pair account; - account.private_key = grumpkin::fr::random_element(); - account.public_key = grumpkin::g1::one * account.private_key; - schnorr_signature signature_raw = - schnorr_construct_signature(message_string, account); - uint256_t pub_x = account.public_key.x; - uint256_t pub_y = account.public_key.y; - WitnessVector witness{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, pub_x, pub_y, 5, 202, 31, 146, - 81, 242, 246, 69, 43, 107, 249, 153, 198, 44, 14, 111, 191, 121, 137, 166, - 160, 103, 18, 181, 243, 233, 226, 95, 67, 16, 37, 128, 85, 76, 19, 253, - 30, 77, 192, 53, 138, 205, 69, 33, 236, 163, 83, 194, 84, 137, 184, 221, - 176, 121, 179, 27, 63, 70, 54, 16, 176, 250, 39, 239, 1, 0, 0, 0 }; - for (size_t i = 0; i < 32; ++i) { - witness[13 + i - 1] = signature_raw.s[i]; - witness[13 + 32 + i - 1] = signature_raw.e[i]; - } - for (size_t i = 0; i < 10; ++i) { - witness[i] = message_string[i]; - } - - auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, witness); - - auto composer = Composer(); - auto prover = composer.create_ultra_with_keccak_prover(builder); - auto proof = prover.construct_proof(); - - auto verifier = composer.create_ultra_with_keccak_verifier(builder); - - EXPECT_EQ(verifier.verify_proof(proof), true); -} - -TEST_F(AcirFormatTests, TestSchnorrVerifySmallRange) -{ - std::vector range_constraints; - std::vector range_opcode_indices; - size_t current_opcode = 0; - - for (uint32_t i = 0; i < 10; i++) { - range_constraints.push_back(RangeConstraint{ - .witness = i, - .num_bits = 8, - }); - range_opcode_indices.push_back(current_opcode++); - } - - std::array signature; - for (uint32_t i = 0, value = 12; i < 64; i++, value++) { - signature[i] = value; - range_constraints.push_back(RangeConstraint{ - .witness = value, - .num_bits = 8, - }); - range_opcode_indices.push_back(current_opcode++); - } - - SchnorrConstraint schnorr_constraint{ - .message = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }, - .public_key_x = 10, - .public_key_y = 11, - .result = 76, - .signature = signature, - }; - AcirFormat constraint_system{ - .varnum = 81, - .num_acir_opcodes = 76, - .public_inputs = {}, - .logic_constraints = {}, - .range_constraints = range_constraints, - .aes128_constraints = {}, - .sha256_compression = {}, - .schnorr_constraints = { schnorr_constraint }, - .ecdsa_k1_constraints = {}, - .ecdsa_r1_constraints = {}, - .blake2s_constraints = {}, - .blake3_constraints = {}, - .keccak_permutations = {}, - .poseidon2_constraints = {}, - .multi_scalar_mul_constraints = {}, - .ec_add_constraints = {}, - .recursion_constraints = {}, - .honk_recursion_constraints = {}, - .avm_recursion_constraints = {}, - .ivc_recursion_constraints = {}, - .bigint_from_le_bytes_constraints = {}, - .bigint_to_le_bytes_constraints = {}, - .bigint_operations = {}, - .assert_equalities = {}, - .poly_triple_constraints = { poly_triple{ - .a = schnorr_constraint.result, - .b = schnorr_constraint.result, - .c = schnorr_constraint.result, - .q_m = 0, - .q_l = 0, - .q_r = 0, - .q_o = 1, - .q_c = fr::neg_one(), - } }, - .quad_constraints = {}, - .big_quad_constraints = {}, - .block_constraints = {}, - .original_opcode_indices = create_empty_original_opcode_indices(), - }; - mock_opcode_indices(constraint_system); - - std::string message_string = "tenletters"; - schnorr_key_pair account; - account.private_key = grumpkin::fr::random_element(); - account.public_key = grumpkin::g1::one * account.private_key; - schnorr_signature signature_raw = - schnorr_construct_signature(message_string, account); - uint256_t pub_x = account.public_key.x; - uint256_t pub_y = account.public_key.y; - WitnessVector witness{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, pub_x, pub_y, 5, 202, 31, 146, - 81, 242, 246, 69, 43, 107, 249, 153, 198, 44, 14, 111, 191, 121, 137, 166, - 160, 103, 18, 181, 243, 233, 226, 95, 67, 16, 37, 128, 85, 76, 19, 253, - 30, 77, 192, 53, 138, 205, 69, 33, 236, 163, 83, 194, 84, 137, 184, 221, - 176, 121, 179, 27, 63, 70, 54, 16, 176, 250, 39, 239, 1, 0, 0, 0 }; - for (size_t i = 0; i < 32; ++i) { - witness[13 + i - 1] = signature_raw.s[i]; - witness[13 + 32 + i - 1] = signature_raw.e[i]; - } - for (size_t i = 0; i < 10; ++i) { - witness[i] = message_string[i]; - } - - // TODO: actually sign a schnorr signature! - auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, witness); - - auto composer = Composer(); - auto prover = composer.create_ultra_with_keccak_prover(builder); - auto proof = prover.construct_proof(); - auto verifier = composer.create_ultra_with_keccak_verifier(builder); - EXPECT_EQ(verifier.verify_proof(proof), true); -} - TEST_F(AcirFormatTests, TestKeccakPermutation) { Keccakf1600 @@ -457,7 +245,6 @@ TEST_F(AcirFormatTests, TestKeccakPermutation) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, @@ -530,7 +317,6 @@ TEST_F(AcirFormatTests, TestCollectsGateCounts) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, @@ -660,7 +446,6 @@ TEST_F(AcirFormatTests, TestBigAdd) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, @@ -695,4 +480,4 @@ TEST_F(AcirFormatTests, TestBigAdd) EXPECT_TRUE(CircuitChecker::check(builder)); auto verifier = composer.create_verifier(builder); EXPECT_EQ(verifier.verify_proof(proof), true); -} \ No newline at end of file +} diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format_mocks.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format_mocks.cpp index fdf5a363532..5799df0835e 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format_mocks.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format_mocks.cpp @@ -7,7 +7,6 @@ acir_format::AcirFormatOriginalOpcodeIndices create_empty_original_opcode_indice .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, @@ -45,9 +44,6 @@ void mock_opcode_indices(acir_format::AcirFormat& constraint_system) for (size_t i = 0; i < constraint_system.sha256_compression.size(); i++) { constraint_system.original_opcode_indices.sha256_compression.push_back(current_opcode++); } - for (size_t i = 0; i < constraint_system.schnorr_constraints.size(); i++) { - constraint_system.original_opcode_indices.schnorr_constraints.push_back(current_opcode++); - } for (size_t i = 0; i < constraint_system.ecdsa_k1_constraints.size(); i++) { constraint_system.original_opcode_indices.ecdsa_k1_constraints.push_back(current_opcode++); } diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.cpp index 0a4c292db20..a0d24e70e0b 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.cpp @@ -560,18 +560,6 @@ void handle_blackbox_func_call(Program::Opcode::BlackBoxFuncCall const& arg, af.constrained_witness.insert(output); } af.original_opcode_indices.blake3_constraints.push_back(opcode_index); - } else if constexpr (std::is_same_v) { - auto input_pkey_x = get_witness_from_function_input(arg.public_key_x); - auto input_pkey_y = get_witness_from_function_input(arg.public_key_y); - af.schnorr_constraints.push_back(SchnorrConstraint{ - .message = map(arg.message, [](auto& e) { return get_witness_from_function_input(e); }), - .public_key_x = input_pkey_x, - .public_key_y = input_pkey_y, - .result = arg.output.value, - .signature = map(arg.signature, [](auto& e) { return get_witness_from_function_input(e); }), - }); - af.original_opcode_indices.schnorr_constraints.push_back(opcode_index); - af.constrained_witness.insert(af.schnorr_constraints.back().result); } else if constexpr (std::is_same_v) { af.ecdsa_k1_constraints.push_back(EcdsaSecp256k1Constraint{ .hashed_message = diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/bigint_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/bigint_constraint.test.cpp index fbb07a8bdc1..00021bcbbba 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/bigint_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/bigint_constraint.test.cpp @@ -178,7 +178,6 @@ TEST_F(BigIntTests, TestBigIntConstraintMultiple) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, @@ -250,7 +249,6 @@ TEST_F(BigIntTests, TestBigIntConstraintSimple) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, @@ -306,7 +304,6 @@ TEST_F(BigIntTests, TestBigIntConstraintReuse) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, @@ -367,7 +364,6 @@ TEST_F(BigIntTests, TestBigIntConstraintReuse2) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, @@ -449,7 +445,6 @@ TEST_F(BigIntTests, TestBigIntDIV) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.test.cpp index 571172e6876..6eed50bf027 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.test.cpp @@ -146,7 +146,7 @@ TEST_F(UltraPlonkRAM, TestBlockConstraint) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, + .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, @@ -196,7 +196,7 @@ TEST_F(MegaHonk, Databus) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, + .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, @@ -301,7 +301,7 @@ TEST_F(MegaHonk, DatabusReturn) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, + .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ec_operations.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ec_operations.test.cpp index 8f254c77c0d..a9e08c2eadf 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ec_operations.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ec_operations.test.cpp @@ -68,7 +68,7 @@ TEST_F(EcOperations, TestECOperations) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, + .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, @@ -203,7 +203,7 @@ TEST_F(EcOperations, TestECMultiScalarMul) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, + .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256k1.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256k1.test.cpp index e6a9245a640..9c088a9a9cd 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256k1.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256k1.test.cpp @@ -100,7 +100,7 @@ TEST_F(ECDSASecp256k1, TestECDSAConstraintSucceed) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, + .ecdsa_k1_constraints = { ecdsa_k1_constraint }, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, @@ -153,7 +153,7 @@ TEST_F(ECDSASecp256k1, TestECDSACompilesForVerifier) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, + .ecdsa_k1_constraints = { ecdsa_k1_constraint }, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, @@ -201,7 +201,7 @@ TEST_F(ECDSASecp256k1, TestECDSAConstraintFail) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, + .ecdsa_k1_constraints = { ecdsa_k1_constraint }, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp index 077f51b1bc2..5d46d49e701 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp @@ -134,7 +134,7 @@ TEST(ECDSASecp256r1, test_hardcoded) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, + .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = { ecdsa_r1_constraint }, .blake2s_constraints = {}, @@ -189,7 +189,7 @@ TEST(ECDSASecp256r1, TestECDSAConstraintSucceed) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, + .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = { ecdsa_r1_constraint }, .blake2s_constraints = {}, @@ -242,7 +242,7 @@ TEST(ECDSASecp256r1, TestECDSACompilesForVerifier) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, + .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = { ecdsa_r1_constraint }, .blake2s_constraints = {}, @@ -291,7 +291,7 @@ TEST(ECDSASecp256r1, TestECDSAConstraintFail) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, + .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = { ecdsa_r1_constraint }, .blake2s_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.test.cpp index e431959807e..50b92017818 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.test.cpp @@ -96,7 +96,7 @@ class AcirHonkRecursionConstraint : public ::testing::Test { .range_constraints = { range_a, range_b }, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, + .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/multi_scalar_mul.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/multi_scalar_mul.test.cpp index fa8e711d0af..a30a79985b1 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/multi_scalar_mul.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/multi_scalar_mul.test.cpp @@ -68,7 +68,7 @@ TEST_F(MSMTests, TestMSM) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, + .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/poseidon2_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/poseidon2_constraint.test.cpp index 6cb0592d9aa..269898e1225 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/poseidon2_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/poseidon2_constraint.test.cpp @@ -48,7 +48,7 @@ TEST_F(Poseidon2Tests, TestPoseidon2Permutation) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, + .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.test.cpp index 0ab9970c65c..e9e8c8ace77 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.test.cpp @@ -93,7 +93,7 @@ Builder create_inner_circuit() .range_constraints = { range_a, range_b }, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, + .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, @@ -255,7 +255,7 @@ Builder create_outer_circuit(std::vector& inner_circuits) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = {}, - .schnorr_constraints = {}, + .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/schnorr_verify.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/schnorr_verify.cpp deleted file mode 100644 index e65224b429c..00000000000 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/schnorr_verify.cpp +++ /dev/null @@ -1,111 +0,0 @@ -#include "schnorr_verify.hpp" -#include "barretenberg/crypto/schnorr/schnorr.hpp" -#include "barretenberg/stdlib/encryption/schnorr/schnorr.hpp" - -namespace acir_format { - -using namespace bb; -using namespace bb::stdlib; - -template -crypto::schnorr_signature convert_signature(Builder& builder, std::array signature) -{ - - crypto::schnorr_signature signature_cr; - - // Get the witness assignment for each witness index - // Write the witness assignment to the byte_array - - for (unsigned int i = 0; i < 32; i++) { - auto witness_index = signature[i]; - - std::vector fr_bytes(sizeof(fr)); - - fr value = builder.get_variable(witness_index); - - fr::serialize_to_buffer(value, &fr_bytes[0]); - - signature_cr.s[i] = fr_bytes.back(); - } - - for (unsigned int i = 32; i < 64; i++) { - auto witness_index = signature[i]; - - std::vector fr_bytes(sizeof(fr)); - - fr value = builder.get_variable(witness_index); - - fr::serialize_to_buffer(value, &fr_bytes[0]); - - signature_cr.e[i - 32] = fr_bytes.back(); - } - - return signature_cr; -} -// vector of bytes here, assumes that the witness indices point to a field element which can be represented -// with just a byte. -// notice that this function truncates each field_element to a byte -template -stdlib::byte_array vector_of_bytes_to_byte_array(Builder& builder, std::vector vector_of_bytes) -{ - using byte_array_ct = stdlib::byte_array; - using field_ct = stdlib::field_t; - - byte_array_ct arr(&builder); - - // Get the witness assignment for each witness index - // Write the witness assignment to the byte_array - for (const auto& witness_index : vector_of_bytes) { - - field_ct element = field_ct::from_witness_index(&builder, witness_index); - size_t num_bytes = 1; - - byte_array_ct element_bytes(element, num_bytes); - arr.write(element_bytes); - } - return arr; -} - -template stdlib::witness_t index_to_witness(Builder& builder, uint32_t index) -{ - fr value = builder.get_variable(index); - return { &builder, value }; -} - -template void create_schnorr_verify_constraints(Builder& builder, const SchnorrConstraint& input) -{ - using witness_ct = stdlib::witness_t; - using cycle_group_ct = stdlib::cycle_group; - using schnorr_signature_bits_ct = stdlib::schnorr_signature_bits; - using bool_ct = stdlib::bool_t; - - auto new_sig = convert_signature(builder, input.signature); - // From ignorance, you will see me convert a bunch of witnesses from ByteArray -> BitArray - // This may not be the most efficient way to do it. It is being used as it is known to work, - // optimizations are welcome! - - // First convert the message of u8 witnesses into a byte_array - // Do this by taking each element as a u8 and writing it to the byte array - - auto message = vector_of_bytes_to_byte_array(builder, input.message); - - fr pubkey_value_x = builder.get_variable(input.public_key_x); - fr pubkey_value_y = builder.get_variable(input.public_key_y); - - cycle_group_ct pub_key{ witness_ct(&builder, pubkey_value_x), witness_ct(&builder, pubkey_value_y), false }; - - schnorr_signature_bits_ct sig = schnorr_convert_signature(&builder, new_sig); - - bool_ct signature_result = schnorr_signature_verification_result(message, pub_key, sig); - - bool_ct signature_result_normalized = signature_result.normalize(); - - builder.assert_equal(signature_result_normalized.witness_index, input.result); -} - -template void create_schnorr_verify_constraints(UltraCircuitBuilder& builder, - const SchnorrConstraint& input); -template void create_schnorr_verify_constraints(MegaCircuitBuilder& builder, - const SchnorrConstraint& input); - -} // namespace acir_format diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/schnorr_verify.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/schnorr_verify.hpp deleted file mode 100644 index b125b3375ab..00000000000 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/schnorr_verify.hpp +++ /dev/null @@ -1,52 +0,0 @@ -#pragma once -#include "barretenberg/common/serialize.hpp" -#include -#include -#include - -namespace acir_format { - -struct SchnorrConstraint { - // This is just a bunch of bytes - // which need to be interpreted as a string - // Note this must be a bunch of bytes - std::vector message; - - // This is the supposed public key which signed the - // message, giving rise to the signature - uint32_t public_key_x; - uint32_t public_key_y; - - // This is the result of verifying the signature - uint32_t result; - - // This is the computed signature - // - std::array signature; - - friend bool operator==(SchnorrConstraint const& lhs, SchnorrConstraint const& rhs) = default; -}; - -template void create_schnorr_verify_constraints(Builder& builder, const SchnorrConstraint& input); - -template inline void read(B& buf, SchnorrConstraint& constraint) -{ - using serialize::read; - read(buf, constraint.message); - read(buf, constraint.signature); - read(buf, constraint.public_key_x); - read(buf, constraint.public_key_y); - read(buf, constraint.result); -} - -template inline void write(B& buf, SchnorrConstraint const& constraint) -{ - using serialize::write; - write(buf, constraint.message); - write(buf, constraint.signature); - write(buf, constraint.public_key_x); - write(buf, constraint.public_key_y); - write(buf, constraint.result); -} - -} // namespace acir_format diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp index e65e5f00f1e..fda8739c9fb 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp @@ -318,18 +318,6 @@ struct BlackBoxOp { static EcdsaSecp256r1 bincodeDeserialize(std::vector); }; - struct SchnorrVerify { - Program::MemoryAddress public_key_x; - Program::MemoryAddress public_key_y; - Program::HeapVector message; - Program::HeapVector signature; - Program::MemoryAddress result; - - friend bool operator==(const SchnorrVerify&, const SchnorrVerify&); - std::vector bincodeSerialize() const; - static SchnorrVerify bincodeDeserialize(std::vector); - }; - struct MultiScalarMul { Program::HeapVector points; Program::HeapVector scalars; @@ -450,7 +438,6 @@ struct BlackBoxOp { Keccakf1600, EcdsaSecp256k1, EcdsaSecp256r1, - SchnorrVerify, MultiScalarMul, EmbeddedCurveAdd, BigIntAdd, @@ -855,18 +842,6 @@ struct BlackBoxFuncCall { static Blake3 bincodeDeserialize(std::vector); }; - struct SchnorrVerify { - Program::FunctionInput public_key_x; - Program::FunctionInput public_key_y; - std::array signature; - std::vector message; - Program::Witness output; - - friend bool operator==(const SchnorrVerify&, const SchnorrVerify&); - std::vector bincodeSerialize() const; - static SchnorrVerify bincodeDeserialize(std::vector); - }; - struct EcdsaSecp256k1 { std::array public_key_x; std::array public_key_y; @@ -1017,7 +992,6 @@ struct BlackBoxFuncCall { RANGE, Blake2s, Blake3, - SchnorrVerify, EcdsaSecp256k1, EcdsaSecp256r1, MultiScalarMul, @@ -2818,73 +2792,6 @@ Program::BlackBoxFuncCall::Blake3 serde::Deserializable BlackBoxFuncCall::SchnorrVerify::bincodeSerialize() const -{ - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); -} - -inline BlackBoxFuncCall::SchnorrVerify BlackBoxFuncCall::SchnorrVerify::bincodeDeserialize(std::vector input) -{ - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw_or_abort("Some input bytes were not read"); - } - return value; -} - -} // end of namespace Program - -template <> -template -void serde::Serializable::serialize( - const Program::BlackBoxFuncCall::SchnorrVerify& obj, Serializer& serializer) -{ - serde::Serializable::serialize(obj.public_key_x, serializer); - serde::Serializable::serialize(obj.public_key_y, serializer); - serde::Serializable::serialize(obj.signature, serializer); - serde::Serializable::serialize(obj.message, serializer); - serde::Serializable::serialize(obj.output, serializer); -} - -template <> -template -Program::BlackBoxFuncCall::SchnorrVerify serde::Deserializable::deserialize( - Deserializer& deserializer) -{ - Program::BlackBoxFuncCall::SchnorrVerify obj; - obj.public_key_x = serde::Deserializable::deserialize(deserializer); - obj.public_key_y = serde::Deserializable::deserialize(deserializer); - obj.signature = serde::Deserializable::deserialize(deserializer); - obj.message = serde::Deserializable::deserialize(deserializer); - obj.output = serde::Deserializable::deserialize(deserializer); - return obj; -} - -namespace Program { - inline bool operator==(const BlackBoxFuncCall::EcdsaSecp256k1& lhs, const BlackBoxFuncCall::EcdsaSecp256k1& rhs) { if (!(lhs.public_key_x == rhs.public_key_x)) { @@ -4111,73 +4018,6 @@ Program::BlackBoxOp::EcdsaSecp256r1 serde::Deserializable BlackBoxOp::SchnorrVerify::bincodeSerialize() const -{ - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); -} - -inline BlackBoxOp::SchnorrVerify BlackBoxOp::SchnorrVerify::bincodeDeserialize(std::vector input) -{ - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw_or_abort("Some input bytes were not read"); - } - return value; -} - -} // end of namespace Program - -template <> -template -void serde::Serializable::serialize(const Program::BlackBoxOp::SchnorrVerify& obj, - Serializer& serializer) -{ - serde::Serializable::serialize(obj.public_key_x, serializer); - serde::Serializable::serialize(obj.public_key_y, serializer); - serde::Serializable::serialize(obj.message, serializer); - serde::Serializable::serialize(obj.signature, serializer); - serde::Serializable::serialize(obj.result, serializer); -} - -template <> -template -Program::BlackBoxOp::SchnorrVerify serde::Deserializable::deserialize( - Deserializer& deserializer) -{ - Program::BlackBoxOp::SchnorrVerify obj; - obj.public_key_x = serde::Deserializable::deserialize(deserializer); - obj.public_key_y = serde::Deserializable::deserialize(deserializer); - obj.message = serde::Deserializable::deserialize(deserializer); - obj.signature = serde::Deserializable::deserialize(deserializer); - obj.result = serde::Deserializable::deserialize(deserializer); - return obj; -} - -namespace Program { - inline bool operator==(const BlackBoxOp::MultiScalarMul& lhs, const BlackBoxOp::MultiScalarMul& rhs) { if (!(lhs.points == rhs.points)) { diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/sha256_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/sha256_constraint.test.cpp index f59dfb8b9b8..6a256234353 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/sha256_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/sha256_constraint.test.cpp @@ -43,7 +43,7 @@ TEST_F(Sha256Tests, TestSha256Compression) .range_constraints = {}, .aes128_constraints = {}, .sha256_compression = { sha256_compression }, - .schnorr_constraints = {}, + .ecdsa_k1_constraints = {}, .ecdsa_r1_constraints = {}, .blake2s_constraints = {}, diff --git a/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr index 84379b702a1..fdd886d232e 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr @@ -77,13 +77,13 @@ contract SchnorrAccount { signature[i] = witness[i] as u8; } + let pub_key = std::embedded_curve_ops::EmbeddedCurvePoint { + x: public_key.x, + y: public_key.y, + is_infinite: false, + }; // Verify signature of the payload bytes - std::schnorr::verify_signature( - public_key.x, - public_key.y, - signature, - outer_hash.to_be_bytes::<32>(), - ) + std::schnorr::verify_signature(pub_key, signature, outer_hash.to_be_bytes::<32>()) // docs:end:is_valid_impl } @@ -108,12 +108,13 @@ contract SchnorrAccount { for i in 0..64 { signature[i] = witness[i] as u8; } - let valid_in_private = std::schnorr::verify_signature( - public_key.x, - public_key.y, - signature, - message_hash.to_be_bytes::<32>(), - ); + let pub_key = std::embedded_curve_ops::EmbeddedCurvePoint { + x: public_key.x, + y: public_key.y, + is_infinite: false, + }; + let valid_in_private = + std::schnorr::verify_signature(pub_key, signature, message_hash.to_be_bytes::<32>()); // Compute the nullifier and check if it is spent // This will BLINDLY TRUST the oracle, but the oracle is us, and diff --git a/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/src/main.nr index 1eea24b7356..1bed3b932e7 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/src/main.nr @@ -12,9 +12,13 @@ contract SchnorrHardcodedAccount { use dep::aztec::prelude::PrivateContext; use dep::aztec::macros::functions::{private, view}; + use std::embedded_curve_ops::EmbeddedCurvePoint; - global public_key_x: Field = 0x16b93f4afae55cab8507baeb8e7ab4de80f5ab1e9e1f5149bf8cd0d375451d90; - global public_key_y: Field = 0x208d44b36eb6e73b254921134d002da1a90b41131024e3b1d721259182106205; + global public_key: EmbeddedCurvePoint = EmbeddedCurvePoint { + x: 0x16b93f4afae55cab8507baeb8e7ab4de80f5ab1e9e1f5149bf8cd0d375451d90, + y: 0x208d44b36eb6e73b254921134d002da1a90b41131024e3b1d721259182106205, + is_infinite: false, + }; // Note: If you globally change the entrypoint signature don't forget to update account_entrypoint.ts #[private] @@ -41,12 +45,7 @@ contract SchnorrHardcodedAccount { } // Verify signature using hardcoded public key - std::schnorr::verify_signature( - public_key_x, - public_key_y, - signature, - outer_hash.to_be_bytes::<32>(), - ) + std::schnorr::verify_signature(public_key, signature, outer_hash.to_be_bytes::<32>()) } // docs:end:is-valid } diff --git a/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/util.nr b/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/util.nr index b4abeeff735..a3610085cea 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/util.nr +++ b/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/util.nr @@ -1,16 +1,17 @@ use crate::auth_oracle::AuthWitness; use dep::aztec::prelude::AztecAddress; -use std::schnorr::verify_signature; +use std::{embedded_curve_ops::EmbeddedCurvePoint, schnorr::verify_signature}; pub fn recover_address(message_hash: Field, witness: AuthWitness) -> AztecAddress { let message_bytes: [u8; 32] = message_hash.to_be_bytes(); + let public_key = EmbeddedCurvePoint { + x: witness.keys.ivpk_m.inner.x, + y: witness.keys.ivpk_m.inner.y, + is_infinite: false, + }; + // In a single key account contract we re-used ivpk_m as signing key - let verification = verify_signature( - witness.keys.ivpk_m.inner.x, - witness.keys.ivpk_m.inner.y, - witness.signature, - message_bytes, - ); + let verification = verify_signature(public_key, witness.signature, message_bytes); assert(verification == true); AztecAddress::compute(witness.keys, witness.partial_address) diff --git a/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp b/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp index 2ae9a31d6ca..e94f36535d2 100644 --- a/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp +++ b/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp @@ -318,18 +318,6 @@ namespace Program { static EcdsaSecp256r1 bincodeDeserialize(std::vector); }; - struct SchnorrVerify { - Program::MemoryAddress public_key_x; - Program::MemoryAddress public_key_y; - Program::HeapVector message; - Program::HeapVector signature; - Program::MemoryAddress result; - - friend bool operator==(const SchnorrVerify&, const SchnorrVerify&); - std::vector bincodeSerialize() const; - static SchnorrVerify bincodeDeserialize(std::vector); - }; - struct MultiScalarMul { Program::HeapVector points; Program::HeapVector scalars; @@ -444,7 +432,7 @@ namespace Program { static ToRadix bincodeDeserialize(std::vector); }; - std::variant value; + std::variant value; friend bool operator==(const BlackBoxOp&, const BlackBoxOp&); std::vector bincodeSerialize() const; @@ -817,18 +805,6 @@ namespace Program { static Blake3 bincodeDeserialize(std::vector); }; - struct SchnorrVerify { - Program::FunctionInput public_key_x; - Program::FunctionInput public_key_y; - std::array signature; - std::vector message; - Program::Witness output; - - friend bool operator==(const SchnorrVerify&, const SchnorrVerify&); - std::vector bincodeSerialize() const; - static SchnorrVerify bincodeDeserialize(std::vector); - }; - struct EcdsaSecp256k1 { std::array public_key_x; std::array public_key_y; @@ -973,7 +949,7 @@ namespace Program { static Sha256Compression bincodeDeserialize(std::vector); }; - std::variant value; + std::variant value; friend bool operator==(const BlackBoxFuncCall&, const BlackBoxFuncCall&); std::vector bincodeSerialize() const; @@ -2528,56 +2504,6 @@ Program::BlackBoxFuncCall::Blake3 serde::Deserializable BlackBoxFuncCall::SchnorrVerify::bincodeSerialize() const { - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); - } - - inline BlackBoxFuncCall::SchnorrVerify BlackBoxFuncCall::SchnorrVerify::bincodeDeserialize(std::vector input) { - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw serde::deserialization_error("Some input bytes were not read"); - } - return value; - } - -} // end of namespace Program - -template <> -template -void serde::Serializable::serialize(const Program::BlackBoxFuncCall::SchnorrVerify &obj, Serializer &serializer) { - serde::Serializable::serialize(obj.public_key_x, serializer); - serde::Serializable::serialize(obj.public_key_y, serializer); - serde::Serializable::serialize(obj.signature, serializer); - serde::Serializable::serialize(obj.message, serializer); - serde::Serializable::serialize(obj.output, serializer); -} - -template <> -template -Program::BlackBoxFuncCall::SchnorrVerify serde::Deserializable::deserialize(Deserializer &deserializer) { - Program::BlackBoxFuncCall::SchnorrVerify obj; - obj.public_key_x = serde::Deserializable::deserialize(deserializer); - obj.public_key_y = serde::Deserializable::deserialize(deserializer); - obj.signature = serde::Deserializable::deserialize(deserializer); - obj.message = serde::Deserializable::deserialize(deserializer); - obj.output = serde::Deserializable::deserialize(deserializer); - return obj; -} - namespace Program { inline bool operator==(const BlackBoxFuncCall::EcdsaSecp256k1 &lhs, const BlackBoxFuncCall::EcdsaSecp256k1 &rhs) { @@ -3518,56 +3444,6 @@ Program::BlackBoxOp::EcdsaSecp256r1 serde::Deserializable BlackBoxOp::SchnorrVerify::bincodeSerialize() const { - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); - } - - inline BlackBoxOp::SchnorrVerify BlackBoxOp::SchnorrVerify::bincodeDeserialize(std::vector input) { - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw serde::deserialization_error("Some input bytes were not read"); - } - return value; - } - -} // end of namespace Program - -template <> -template -void serde::Serializable::serialize(const Program::BlackBoxOp::SchnorrVerify &obj, Serializer &serializer) { - serde::Serializable::serialize(obj.public_key_x, serializer); - serde::Serializable::serialize(obj.public_key_y, serializer); - serde::Serializable::serialize(obj.message, serializer); - serde::Serializable::serialize(obj.signature, serializer); - serde::Serializable::serialize(obj.result, serializer); -} - -template <> -template -Program::BlackBoxOp::SchnorrVerify serde::Deserializable::deserialize(Deserializer &deserializer) { - Program::BlackBoxOp::SchnorrVerify obj; - obj.public_key_x = serde::Deserializable::deserialize(deserializer); - obj.public_key_y = serde::Deserializable::deserialize(deserializer); - obj.message = serde::Deserializable::deserialize(deserializer); - obj.signature = serde::Deserializable::deserialize(deserializer); - obj.result = serde::Deserializable::deserialize(deserializer); - return obj; -} - namespace Program { inline bool operator==(const BlackBoxOp::MultiScalarMul &lhs, const BlackBoxOp::MultiScalarMul &rhs) { diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/black_box_functions.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/black_box_functions.rs index 25842c14dbc..700589d2040 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/black_box_functions.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/black_box_functions.rs @@ -51,29 +51,6 @@ pub enum BlackBoxFunc { /// (witness, 8), constrained to be the blake3 of the inputs. Blake3, - /// Verify a Schnorr signature over the embedded curve - /// - inputs are: - /// - Public key as 2 (witness, 254) - /// - signature as a vector of 64 bytes (witness, 8) - /// - message as a vector of (witness, 8) - /// - output: A witness representing the result of the signature - /// verification; 0 for failure and 1 for success. - /// - /// Since the scalar field of the embedded curve is NOT the ACIR field, the - /// `(r,s)` signature is represented as a 64 bytes array for the two field - /// elements. On the other hand, the public key coordinates are ACIR fields. - /// The proving system decides how the message is to be hashed. Barretenberg - /// uses Blake2s. - /// - /// Verifies a Schnorr signature over a curve which is "pairing friendly" - /// with the curve on which the ACIR circuit is defined. - /// - /// The exact curve which this signature uses will vary based on the curve - /// being used by ACIR. For example, the BN254 curve supports Schnorr - /// signatures over the [Grumpkin][grumpkin] curve. - /// - /// [grumpkin]: https://hackmd.io/@aztec-network/ByzgNxBfd#2-Grumpkin---A-curve-on-top-of-BN-254-for-SNARK-efficient-group-operations - SchnorrVerify, /// Verifies a ECDSA signature over the secp256k1 curve. /// - inputs: /// - x coordinate of public key as 32 bytes @@ -81,11 +58,6 @@ pub enum BlackBoxFunc { /// - the signature, as a 64 bytes array /// - the hash of the message, as a vector of bytes /// - output: 0 for failure and 1 for success - /// - /// Inputs and outputs are similar to SchnorrVerify, except that because we - /// use a different curve (secp256k1), the field elements involved in the - /// signature and the public key are defined as an array of 32 bytes. - /// Another difference is that we assume the message is already hashed. EcdsaSecp256k1, /// Verifies a ECDSA signature over the secp256r1 curve. @@ -196,7 +168,6 @@ impl BlackBoxFunc { pub fn name(&self) -> &'static str { match self { BlackBoxFunc::AES128Encrypt => "aes128_encrypt", - BlackBoxFunc::SchnorrVerify => "schnorr_verify", BlackBoxFunc::Blake2s => "blake2s", BlackBoxFunc::Blake3 => "blake3", BlackBoxFunc::EcdsaSecp256k1 => "ecdsa_secp256k1", @@ -222,7 +193,6 @@ impl BlackBoxFunc { pub fn lookup(op_name: &str) -> Option { match op_name { "aes128_encrypt" => Some(BlackBoxFunc::AES128Encrypt), - "schnorr_verify" => Some(BlackBoxFunc::SchnorrVerify), "blake2s" => Some(BlackBoxFunc::Blake2s), "blake3" => Some(BlackBoxFunc::Blake3), "ecdsa_secp256k1" => Some(BlackBoxFunc::EcdsaSecp256k1), diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs index 33982065c2a..6282a33af6b 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs @@ -406,29 +406,12 @@ mod tests { Opcode::BlackBoxFuncCall(BlackBoxFuncCall::Keccakf1600 { inputs, outputs }) } - fn schnorr_verify_opcode() -> Opcode { - let public_key_x = FunctionInput::witness(Witness(1), FieldElement::max_num_bits()); - let public_key_y = FunctionInput::witness(Witness(2), FieldElement::max_num_bits()); - let signature: Box<[FunctionInput; 64]> = - Box::new(std::array::from_fn(|i| FunctionInput::witness(Witness(i as u32 + 3), 8))); - let message: Vec> = vec![FunctionInput::witness(Witness(67), 8)]; - let output = Witness(68); - - Opcode::BlackBoxFuncCall(BlackBoxFuncCall::SchnorrVerify { - public_key_x, - public_key_y, - signature, - message, - output, - }) - } - #[test] fn serialization_roundtrip() { let circuit = Circuit { current_witness_index: 5, expression_width: ExpressionWidth::Unbounded, - opcodes: vec![and_opcode::(), range_opcode(), schnorr_verify_opcode()], + opcodes: vec![and_opcode::(), range_opcode()], private_parameters: BTreeSet::new(), public_parameters: PublicInputs(BTreeSet::from_iter(vec![Witness(2), Witness(12)])), return_values: PublicInputs(BTreeSet::from_iter(vec![Witness(4), Witness(12)])), @@ -462,7 +445,6 @@ mod tests { range_opcode(), and_opcode(), keccakf1600_opcode(), - schnorr_verify_opcode(), ], private_parameters: BTreeSet::new(), public_parameters: PublicInputs(BTreeSet::from_iter(vec![Witness(2)])), diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs index fa51caf5155..dfdf9616306 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs @@ -108,17 +108,6 @@ pub enum BlackBoxFuncCall { inputs: Vec>, outputs: Box<[Witness; 32]>, }, - SchnorrVerify { - public_key_x: FunctionInput, - public_key_y: FunctionInput, - #[serde( - serialize_with = "serialize_big_array", - deserialize_with = "deserialize_big_array_into_box" - )] - signature: Box<[FunctionInput; 64]>, - message: Vec>, - output: Witness, - }, EcdsaSecp256k1 { public_key_x: Box<[FunctionInput; 32]>, public_key_y: Box<[FunctionInput; 32]>, @@ -234,7 +223,6 @@ impl BlackBoxFuncCall { BlackBoxFuncCall::RANGE { .. } => BlackBoxFunc::RANGE, BlackBoxFuncCall::Blake2s { .. } => BlackBoxFunc::Blake2s, BlackBoxFuncCall::Blake3 { .. } => BlackBoxFunc::Blake3, - BlackBoxFuncCall::SchnorrVerify { .. } => BlackBoxFunc::SchnorrVerify, BlackBoxFuncCall::EcdsaSecp256k1 { .. } => BlackBoxFunc::EcdsaSecp256k1, BlackBoxFuncCall::EcdsaSecp256r1 { .. } => BlackBoxFunc::EcdsaSecp256r1, BlackBoxFuncCall::MultiScalarMul { .. } => BlackBoxFunc::MultiScalarMul, @@ -288,21 +276,6 @@ impl BlackBoxFuncCall { vec![input1[0], input1[1], input2[0], input2[1]] } BlackBoxFuncCall::RANGE { input } => vec![*input], - BlackBoxFuncCall::SchnorrVerify { - public_key_x, - public_key_y, - signature, - message, - .. - } => { - let mut inputs: Vec> = - Vec::with_capacity(2 + signature.len() + message.len()); - inputs.push(*public_key_x); - inputs.push(*public_key_y); - inputs.extend(signature.iter().copied()); - inputs.extend(message.iter().copied()); - inputs - } BlackBoxFuncCall::EcdsaSecp256k1 { public_key_x, public_key_y, @@ -372,7 +345,6 @@ impl BlackBoxFuncCall { BlackBoxFuncCall::AND { output, .. } | BlackBoxFuncCall::XOR { output, .. } - | BlackBoxFuncCall::SchnorrVerify { output, .. } | BlackBoxFuncCall::EcdsaSecp256k1 { output, .. } | BlackBoxFuncCall::EcdsaSecp256r1 { output, .. } => vec![*output], BlackBoxFuncCall::MultiScalarMul { outputs, .. } @@ -525,22 +497,6 @@ mod tests { Opcode::BlackBoxFuncCall(BlackBoxFuncCall::Keccakf1600 { inputs, outputs }) } - fn schnorr_verify_opcode() -> Opcode { - let public_key_x = FunctionInput::witness(Witness(1), FieldElement::max_num_bits()); - let public_key_y = FunctionInput::witness(Witness(2), FieldElement::max_num_bits()); - let signature: Box<[FunctionInput; 64]> = - Box::new(std::array::from_fn(|i| FunctionInput::witness(Witness(i as u32 + 3), 8))); - let message: Vec> = vec![FunctionInput::witness(Witness(67), 8)]; - let output = Witness(68); - - Opcode::BlackBoxFuncCall(BlackBoxFuncCall::SchnorrVerify { - public_key_x, - public_key_y, - signature, - message, - output, - }) - } #[test] fn keccakf1600_serialization_roundtrip() { @@ -549,12 +505,4 @@ mod tests { let recovered_opcode = bincode::deserialize(&buf).unwrap(); assert_eq!(opcode, recovered_opcode); } - - #[test] - fn schnorr_serialization_roundtrip() { - let opcode = schnorr_verify_opcode::(); - let buf = bincode::serialize(&opcode).unwrap(); - let recovered_opcode = bincode::deserialize(&buf).unwrap(); - assert_eq!(opcode, recovered_opcode); - } } diff --git a/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs b/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs index 002bad0e7f3..305d94abcee 100644 --- a/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs +++ b/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs @@ -93,67 +93,10 @@ fn multi_scalar_mul_circuit() { let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 77, 9, 10, 0, 32, 8, 243, 236, 248, 255, 127, 35, - 163, 5, 35, 97, 184, 205, 169, 42, 183, 102, 65, 193, 21, 218, 73, 31, 44, 116, 35, 238, - 228, 189, 108, 208, 60, 193, 91, 161, 23, 6, 114, 73, 121, 195, 157, 32, 95, 232, 255, 191, - 203, 181, 1, 243, 231, 24, 106, 192, 0, 0, 0, - ]; - - assert_eq!(bytes, expected_serialization) -} - -#[test] -fn schnorr_verify_circuit() { - let public_key_x = FunctionInput::witness(Witness(1), FieldElement::max_num_bits()); - let public_key_y = FunctionInput::witness(Witness(2), FieldElement::max_num_bits()); - let signature: [FunctionInput; 64] = (3..(3 + 64)) - .map(|i| FunctionInput::witness(Witness(i), 8)) - .collect::>() - .try_into() - .unwrap(); - let message = - ((3 + 64)..(3 + 64 + 10)).map(|i| FunctionInput::witness(Witness(i), 8)).collect(); - let output = Witness(3 + 64 + 10); - let last_input = output.witness_index() - 1; - - let schnorr = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::SchnorrVerify { - public_key_x, - public_key_y, - signature: Box::new(signature), - message, - output, - }); - - let circuit: Circuit = Circuit { - current_witness_index: 100, - opcodes: vec![schnorr], - private_parameters: BTreeSet::from_iter((1..=last_input).map(Witness)), - return_values: PublicInputs(BTreeSet::from([output])), - ..Circuit::default() - }; - let program = Program { functions: vec![circuit], unconstrained_functions: vec![] }; - - let bytes = Program::serialize_program(&program); - - let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 77, 211, 103, 78, 2, 81, 24, 70, 225, 193, 130, 96, 239, - 189, 96, 239, 189, 35, 34, 34, 34, 82, 118, 193, 254, 151, 64, 224, 132, 111, 146, 67, 50, - 153, 39, 250, 3, 114, 239, 121, 51, 201, 240, 211, 29, 60, 153, 48, 239, 108, 188, 121, - 122, 241, 30, 145, 71, 7, 79, 46, 60, 38, 143, 203, 89, 121, 66, 206, 201, 121, 121, 82, - 158, 146, 167, 229, 25, 121, 86, 158, 147, 231, 229, 5, 121, 81, 94, 146, 151, 229, 21, - 121, 85, 94, 147, 215, 229, 13, 121, 83, 222, 146, 183, 229, 29, 121, 87, 222, 147, 11, - 242, 190, 124, 32, 31, 202, 71, 242, 177, 124, 34, 159, 202, 103, 242, 185, 124, 33, 95, - 202, 87, 242, 181, 124, 35, 223, 202, 119, 242, 189, 252, 32, 63, 202, 79, 242, 179, 252, - 34, 191, 202, 111, 242, 187, 92, 148, 63, 228, 146, 252, 41, 151, 229, 47, 185, 34, 127, - 203, 213, 48, 157, 38, 241, 183, 31, 253, 191, 38, 255, 202, 117, 249, 79, 110, 200, 255, - 114, 83, 110, 201, 237, 112, 39, 190, 191, 173, 223, 193, 54, 217, 36, 91, 100, 131, 108, - 47, 221, 92, 62, 126, 51, 155, 98, 75, 108, 136, 237, 176, 25, 182, 194, 70, 216, 6, 155, - 96, 11, 108, 128, 246, 105, 158, 214, 105, 156, 182, 105, 154, 150, 105, 152, 118, 105, - 182, 144, 12, 27, 165, 77, 154, 164, 69, 26, 164, 61, 154, 163, 53, 26, 163, 45, 154, 162, - 37, 26, 162, 29, 154, 161, 21, 26, 161, 13, 154, 160, 5, 26, 224, 238, 185, 115, 238, 154, - 59, 46, 198, 157, 150, 226, 14, 203, 113, 103, 149, 184, 163, 106, 220, 69, 45, 206, 190, - 30, 103, 221, 136, 179, 109, 198, 89, 166, 103, 150, 158, 91, 162, 243, 244, 167, 15, 14, - 161, 226, 6, 24, 5, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 77, 9, 10, 0, 48, 8, 114, 107, 231, 255, 255, 59, + 86, 204, 64, 22, 136, 102, 89, 5, 175, 182, 163, 80, 7, 47, 135, 73, 31, 56, 228, 42, 218, + 196, 203, 221, 38, 243, 78, 61, 28, 147, 119, 65, 31, 146, 53, 230, 210, 135, 252, 255, + 179, 90, 23, 212, 196, 199, 187, 192, 0, 0, 0, ]; assert_eq!(bytes, expected_serialization) diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/mod.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/mod.rs index c3b1627ba65..5137b18179b 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/mod.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/mod.rs @@ -27,10 +27,7 @@ use embedded_curve_ops::{embedded_curve_add, multi_scalar_mul}; use hash::{solve_generic_256_hash_opcode, solve_sha_256_permutation_opcode}; use logic::{and, xor}; pub(crate) use range::solve_range_opcode; -use signature::{ - ecdsa::{secp256k1_prehashed, secp256r1_prehashed}, - schnorr::schnorr_verify, -}; +use signature::ecdsa::{secp256k1_prehashed, secp256r1_prehashed}; /// Check if all of the inputs to the function have assignments /// @@ -103,21 +100,6 @@ pub(crate) fn solve( } Ok(()) } - BlackBoxFuncCall::SchnorrVerify { - public_key_x, - public_key_y, - signature, - message, - output, - } => schnorr_verify( - backend, - initial_witness, - *public_key_x, - *public_key_y, - signature.as_ref(), - message, - *output, - ), BlackBoxFuncCall::EcdsaSecp256k1 { public_key_x, public_key_y, diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/mod.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/mod.rs index 0cfb96740b8..b36ff499c6a 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/mod.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/mod.rs @@ -1,2 +1 @@ pub(super) mod ecdsa; -pub(super) mod schnorr; diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/schnorr.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/schnorr.rs deleted file mode 100644 index a856303d065..00000000000 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/blackbox/signature/schnorr.rs +++ /dev/null @@ -1,36 +0,0 @@ -use crate::{ - pwg::{ - blackbox::utils::{to_u8_array, to_u8_vec}, - input_to_value, insert_value, OpcodeResolutionError, - }, - BlackBoxFunctionSolver, -}; -use acir::{ - circuit::opcodes::FunctionInput, - native_types::{Witness, WitnessMap}, - AcirField, -}; - -#[allow(clippy::too_many_arguments)] -pub(crate) fn schnorr_verify( - backend: &impl BlackBoxFunctionSolver, - initial_witness: &mut WitnessMap, - public_key_x: FunctionInput, - public_key_y: FunctionInput, - signature: &[FunctionInput; 64], - message: &[FunctionInput], - output: Witness, -) -> Result<(), OpcodeResolutionError> { - let public_key_x: &F = &input_to_value(initial_witness, public_key_x, false)?; - let public_key_y: &F = &input_to_value(initial_witness, public_key_y, false)?; - - let signature = to_u8_array(initial_witness, signature)?; - let message = to_u8_vec(initial_witness, message)?; - - let valid_signature = - backend.schnorr_verify(public_key_x, public_key_y, &signature, &message)?; - - insert_value(&output, F::from(valid_signature), initial_witness)?; - - Ok(()) -} diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts b/noir/noir-repo/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts index aaa82f8f1e5..4d8f0acbd38 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts @@ -85,16 +85,6 @@ it('successfully executes a MultiScalarMul opcode', async () => { expect(solvedWitness).to.be.deep.eq(expectedWitnessMap); }); -it('successfully executes a SchnorrVerify opcode', async () => { - const { bytecode, initialWitnessMap, expectedWitnessMap } = await import('../shared/schnorr_verify'); - - const solvedWitness: WitnessMap = await executeCircuit(bytecode, initialWitnessMap, () => { - throw Error('unexpected oracle'); - }); - - expect(solvedWitness).to.be.deep.eq(expectedWitnessMap); -}); - it('successfully executes a MemoryOp opcode', async () => { const { bytecode, initialWitnessMap, expectedWitnessMap } = await import('../shared/memory_op'); diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/node/execute_circuit.test.ts b/noir/noir-repo/acvm-repo/acvm_js/test/node/execute_circuit.test.ts index 120ad0fa738..67f7de2129c 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/node/execute_circuit.test.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/node/execute_circuit.test.ts @@ -86,16 +86,6 @@ it('successfully executes a MultiScalarMul opcode', async () => { expect(solvedWitness).to.be.deep.eq(expectedWitnessMap); }); -it('successfully executes a SchnorrVerify opcode', async () => { - const { bytecode, initialWitnessMap, expectedWitnessMap } = await import('../shared/schnorr_verify'); - - const solvedWitness: WitnessMap = await executeCircuit(bytecode, initialWitnessMap, () => { - throw Error('unexpected oracle'); - }); - - expect(solvedWitness).to.be.deep.eq(expectedWitnessMap); -}); - it('successfully executes a MemoryOp opcode', async () => { const { bytecode, initialWitnessMap, expectedWitnessMap } = await import('../shared/memory_op'); diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts b/noir/noir-repo/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts index 3ec589dd0c8..fac77e4ee27 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts @@ -1,8 +1,8 @@ // See `multi_scalar_mul_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 77, 9, 10, 0, 32, 8, 243, 236, 248, 255, 127, 35, 163, 5, 35, 97, 184, 205, - 169, 42, 183, 102, 65, 193, 21, 218, 73, 31, 44, 116, 35, 238, 228, 189, 108, 208, 60, 193, 91, 161, 23, 6, 114, 73, - 121, 195, 157, 32, 95, 232, 255, 191, 203, 181, 1, 243, 231, 24, 106, 192, 0, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 77, 9, 10, 0, 48, 8, 114, 107, 231, 255, 255, 59, 86, 204, 64, 22, 136, 102, + 89, 5, 175, 182, 163, 80, 7, 47, 135, 73, 31, 56, 228, 42, 218, 196, 203, 221, 38, 243, 78, 61, 28, 147, 119, 65, 31, + 146, 53, 230, 210, 135, 252, 255, 179, 90, 23, 212, 196, 199, 187, 192, 0, 0, 0, ]); export const initialWitnessMap = new Map([ [1, '0x0000000000000000000000000000000000000000000000000000000000000001'], diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/shared/schnorr_verify.ts b/noir/noir-repo/acvm-repo/acvm_js/test/shared/schnorr_verify.ts deleted file mode 100644 index d2df63a8ddb..00000000000 --- a/noir/noir-repo/acvm-repo/acvm_js/test/shared/schnorr_verify.ts +++ /dev/null @@ -1,101 +0,0 @@ -// See `schnorr_verify_circuit` integration test in `acir/tests/test_program_serialization.rs`. -export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 77, 211, 103, 78, 2, 81, 24, 70, 225, 193, 130, 96, 239, 189, 96, 239, 189, 35, 34, - 34, 34, 82, 118, 193, 254, 151, 64, 224, 132, 111, 146, 67, 50, 153, 39, 250, 3, 114, 239, 121, 51, 201, 240, 211, 29, - 60, 153, 48, 239, 108, 188, 121, 122, 241, 30, 145, 71, 7, 79, 46, 60, 38, 143, 203, 89, 121, 66, 206, 201, 121, 121, - 82, 158, 146, 167, 229, 25, 121, 86, 158, 147, 231, 229, 5, 121, 81, 94, 146, 151, 229, 21, 121, 85, 94, 147, 215, - 229, 13, 121, 83, 222, 146, 183, 229, 29, 121, 87, 222, 147, 11, 242, 190, 124, 32, 31, 202, 71, 242, 177, 124, 34, - 159, 202, 103, 242, 185, 124, 33, 95, 202, 87, 242, 181, 124, 35, 223, 202, 119, 242, 189, 252, 32, 63, 202, 79, 242, - 179, 252, 34, 191, 202, 111, 242, 187, 92, 148, 63, 228, 146, 252, 41, 151, 229, 47, 185, 34, 127, 203, 213, 48, 157, - 38, 241, 183, 31, 253, 191, 38, 255, 202, 117, 249, 79, 110, 200, 255, 114, 83, 110, 201, 237, 112, 39, 190, 191, 173, - 223, 193, 54, 217, 36, 91, 100, 131, 108, 47, 221, 92, 62, 126, 51, 155, 98, 75, 108, 136, 237, 176, 25, 182, 194, 70, - 216, 6, 155, 96, 11, 108, 128, 246, 105, 158, 214, 105, 156, 182, 105, 154, 150, 105, 152, 118, 105, 182, 144, 12, 27, - 165, 77, 154, 164, 69, 26, 164, 61, 154, 163, 53, 26, 163, 45, 154, 162, 37, 26, 162, 29, 154, 161, 21, 26, 161, 13, - 154, 160, 5, 26, 224, 238, 185, 115, 238, 154, 59, 46, 198, 157, 150, 226, 14, 203, 113, 103, 149, 184, 163, 106, 220, - 69, 45, 206, 190, 30, 103, 221, 136, 179, 109, 198, 89, 166, 103, 150, 158, 91, 162, 243, 244, 167, 15, 14, 161, 226, - 6, 24, 5, 0, 0, -]); - -export const initialWitnessMap = new Map([ - [1, '0x04b260954662e97f00cab9adb773a259097f7a274b83b113532bce27fa3fb96a'], - [2, '0x2fd51571db6c08666b0edfbfbc57d432068bccd0110a39b166ab243da0037197'], - [3, '0x000000000000000000000000000000000000000000000000000000000000002e'], - [4, '0x00000000000000000000000000000000000000000000000000000000000000ce'], - [5, '0x0000000000000000000000000000000000000000000000000000000000000052'], - [6, '0x00000000000000000000000000000000000000000000000000000000000000aa'], - [7, '0x0000000000000000000000000000000000000000000000000000000000000087'], - [8, '0x000000000000000000000000000000000000000000000000000000000000002a'], - [9, '0x0000000000000000000000000000000000000000000000000000000000000049'], - [10, '0x000000000000000000000000000000000000000000000000000000000000009d'], - [11, '0x0000000000000000000000000000000000000000000000000000000000000050'], - [12, '0x000000000000000000000000000000000000000000000000000000000000007c'], - [13, '0x000000000000000000000000000000000000000000000000000000000000009a'], - [14, '0x00000000000000000000000000000000000000000000000000000000000000aa'], - [15, '0x00000000000000000000000000000000000000000000000000000000000000df'], - [16, '0x0000000000000000000000000000000000000000000000000000000000000023'], - [17, '0x0000000000000000000000000000000000000000000000000000000000000034'], - [18, '0x0000000000000000000000000000000000000000000000000000000000000010'], - [19, '0x000000000000000000000000000000000000000000000000000000000000008a'], - [20, '0x0000000000000000000000000000000000000000000000000000000000000047'], - [21, '0x0000000000000000000000000000000000000000000000000000000000000063'], - [22, '0x00000000000000000000000000000000000000000000000000000000000000e8'], - [23, '0x0000000000000000000000000000000000000000000000000000000000000037'], - [24, '0x0000000000000000000000000000000000000000000000000000000000000054'], - [25, '0x0000000000000000000000000000000000000000000000000000000000000096'], - [26, '0x000000000000000000000000000000000000000000000000000000000000003e'], - [27, '0x00000000000000000000000000000000000000000000000000000000000000d5'], - [28, '0x00000000000000000000000000000000000000000000000000000000000000ae'], - [29, '0x0000000000000000000000000000000000000000000000000000000000000024'], - [30, '0x000000000000000000000000000000000000000000000000000000000000002d'], - [31, '0x0000000000000000000000000000000000000000000000000000000000000020'], - [32, '0x0000000000000000000000000000000000000000000000000000000000000080'], - [33, '0x000000000000000000000000000000000000000000000000000000000000004d'], - [34, '0x0000000000000000000000000000000000000000000000000000000000000047'], - [35, '0x00000000000000000000000000000000000000000000000000000000000000a5'], - [36, '0x00000000000000000000000000000000000000000000000000000000000000bb'], - [37, '0x00000000000000000000000000000000000000000000000000000000000000f6'], - [38, '0x00000000000000000000000000000000000000000000000000000000000000c3'], - [39, '0x000000000000000000000000000000000000000000000000000000000000000b'], - [40, '0x000000000000000000000000000000000000000000000000000000000000003b'], - [41, '0x0000000000000000000000000000000000000000000000000000000000000065'], - [42, '0x00000000000000000000000000000000000000000000000000000000000000c9'], - [43, '0x0000000000000000000000000000000000000000000000000000000000000001'], - [44, '0x0000000000000000000000000000000000000000000000000000000000000085'], - [45, '0x0000000000000000000000000000000000000000000000000000000000000006'], - [46, '0x000000000000000000000000000000000000000000000000000000000000009e'], - [47, '0x000000000000000000000000000000000000000000000000000000000000002f'], - [48, '0x0000000000000000000000000000000000000000000000000000000000000010'], - [49, '0x00000000000000000000000000000000000000000000000000000000000000e6'], - [50, '0x0000000000000000000000000000000000000000000000000000000000000030'], - [51, '0x000000000000000000000000000000000000000000000000000000000000004a'], - [52, '0x0000000000000000000000000000000000000000000000000000000000000018'], - [53, '0x000000000000000000000000000000000000000000000000000000000000007c'], - [54, '0x00000000000000000000000000000000000000000000000000000000000000d0'], - [55, '0x00000000000000000000000000000000000000000000000000000000000000ab'], - [56, '0x0000000000000000000000000000000000000000000000000000000000000031'], - [57, '0x00000000000000000000000000000000000000000000000000000000000000d5'], - [58, '0x0000000000000000000000000000000000000000000000000000000000000063'], - [59, '0x0000000000000000000000000000000000000000000000000000000000000084'], - [60, '0x00000000000000000000000000000000000000000000000000000000000000a3'], - [61, '0x00000000000000000000000000000000000000000000000000000000000000a6'], - [62, '0x00000000000000000000000000000000000000000000000000000000000000d5'], - [63, '0x0000000000000000000000000000000000000000000000000000000000000091'], - [64, '0x000000000000000000000000000000000000000000000000000000000000000d'], - [65, '0x000000000000000000000000000000000000000000000000000000000000009c'], - [66, '0x00000000000000000000000000000000000000000000000000000000000000f9'], - [67, '0x0000000000000000000000000000000000000000000000000000000000000000'], - [68, '0x0000000000000000000000000000000000000000000000000000000000000001'], - [69, '0x0000000000000000000000000000000000000000000000000000000000000002'], - [70, '0x0000000000000000000000000000000000000000000000000000000000000003'], - [71, '0x0000000000000000000000000000000000000000000000000000000000000004'], - [72, '0x0000000000000000000000000000000000000000000000000000000000000005'], - [73, '0x0000000000000000000000000000000000000000000000000000000000000006'], - [74, '0x0000000000000000000000000000000000000000000000000000000000000007'], - [75, '0x0000000000000000000000000000000000000000000000000000000000000008'], - [76, '0x0000000000000000000000000000000000000000000000000000000000000009'], -]); - -export const expectedWitnessMap = new Map(initialWitnessMap).set( - 77, - '0x0000000000000000000000000000000000000000000000000000000000000001', -); diff --git a/noir/noir-repo/acvm-repo/blackbox_solver/src/curve_specific_solver.rs b/noir/noir-repo/acvm-repo/blackbox_solver/src/curve_specific_solver.rs index 869017f52ee..b8fc3f47033 100644 --- a/noir/noir-repo/acvm-repo/blackbox_solver/src/curve_specific_solver.rs +++ b/noir/noir-repo/acvm-repo/blackbox_solver/src/curve_specific_solver.rs @@ -7,13 +7,6 @@ use crate::BlackBoxResolutionError; /// /// Returns an [`BlackBoxResolutionError`] if the backend does not support the given [`acir::BlackBoxFunc`]. pub trait BlackBoxFunctionSolver { - fn schnorr_verify( - &self, - public_key_x: &F, - public_key_y: &F, - signature: &[u8; 64], - message: &[u8], - ) -> Result; fn multi_scalar_mul( &self, points: &[F], @@ -48,15 +41,6 @@ impl StubbedBlackBoxSolver { } impl BlackBoxFunctionSolver for StubbedBlackBoxSolver { - fn schnorr_verify( - &self, - _public_key_x: &F, - _public_key_y: &F, - _signature: &[u8; 64], - _message: &[u8], - ) -> Result { - Err(Self::fail(BlackBoxFunc::SchnorrVerify)) - } fn multi_scalar_mul( &self, _points: &[F], diff --git a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/benches/criterion.rs b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/benches/criterion.rs index e7917fa1adc..8bf239eec8a 100644 --- a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/benches/criterion.rs +++ b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/benches/criterion.rs @@ -13,40 +13,10 @@ fn bench_poseidon2(c: &mut Criterion) { c.bench_function("poseidon2", |b| b.iter(|| poseidon2_permutation(black_box(&inputs), 4))); } -fn bench_schnorr_verify(c: &mut Criterion) { - let pub_key_x = FieldElement::from_hex( - "0x04b260954662e97f00cab9adb773a259097f7a274b83b113532bce27fa3fb96a", - ) - .unwrap(); - let pub_key_y = FieldElement::from_hex( - "0x2fd51571db6c08666b0edfbfbc57d432068bccd0110a39b166ab243da0037197", - ) - .unwrap(); - let sig_bytes: [u8; 64] = [ - 1, 13, 119, 112, 212, 39, 233, 41, 84, 235, 255, 93, 245, 172, 186, 83, 157, 253, 76, 77, - 33, 128, 178, 15, 214, 67, 105, 107, 177, 234, 77, 48, 27, 237, 155, 84, 39, 84, 247, 27, - 22, 8, 176, 230, 24, 115, 145, 220, 254, 122, 135, 179, 171, 4, 214, 202, 64, 199, 19, 84, - 239, 138, 124, 12, - ]; - - let message: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; - - c.bench_function("schnorr_verify", |b| { - b.iter(|| { - Bn254BlackBoxSolver.schnorr_verify( - black_box(&pub_key_x), - black_box(&pub_key_y), - black_box(&sig_bytes), - black_box(message), - ) - }) - }); -} - criterion_group!( name = benches; config = Criterion::default().sample_size(40).measurement_time(Duration::from_secs(20)).with_profiler(PProfProfiler::new(100, Output::Flamegraph(None))); - targets = bench_poseidon2, bench_schnorr_verify + targets = bench_poseidon2 ); criterion_main!(benches); diff --git a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/embedded_curve_ops.rs b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/embedded_curve_ops.rs index a02711fda1e..e599fd25593 100644 --- a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/embedded_curve_ops.rs +++ b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/embedded_curve_ops.rs @@ -1,6 +1,5 @@ // TODO(https://github.com/noir-lang/noir/issues/4932): rename this file to something more generic use ark_ec::AffineRepr; -use ark_ff::MontConfig; use num_bigint::BigUint; use crate::FieldElement; @@ -46,15 +45,15 @@ pub fn multi_scalar_mul( let mut bytes = scalar_high.to_be_bytes().to_vec(); bytes.extend_from_slice(&scalar_low.to_be_bytes()); - // Check if this is smaller than the grumpkin modulus let grumpkin_integer = BigUint::from_bytes_be(&bytes); - if grumpkin_integer >= grumpkin::FrConfig::MODULUS.into() { - return Err(BlackBoxResolutionError::Failed( - BlackBoxFunc::MultiScalarMul, - format!("{} is not a valid grumpkin scalar", grumpkin_integer.to_str_radix(16)), - )); - } + // Check if this is smaller than the grumpkin modulus + // if grumpkin_integer >= grumpkin::FrConfig::MODULUS.into() { + // return Err(BlackBoxResolutionError::Failed( + // BlackBoxFunc::MultiScalarMul, + // format!("{} is not a valid grumpkin scalar", grumpkin_integer.to_str_radix(16)), + // )); + // } let iteration_output_point = grumpkin::SWAffine::from(point.mul_bigint(grumpkin_integer.to_u64_digits())); @@ -120,8 +119,6 @@ fn create_point( mod tests { use super::*; - use ark_ff::BigInteger; - fn get_generator() -> [FieldElement; 3] { let generator = grumpkin::SWAffine::generator(); let generator_x = FieldElement::from_repr(*generator.x().unwrap()); @@ -175,23 +172,23 @@ mod tests { assert_eq!(res, expected_error); } - #[test] - fn rejects_grumpkin_modulus() { - let x = grumpkin::FrConfig::MODULUS.to_bytes_be(); + // #[test] + // fn rejects_grumpkin_modulus() { + // let x = grumpkin::FrConfig::MODULUS.to_bytes_be(); - let low = FieldElement::from_be_bytes_reduce(&x[16..32]); - let high = FieldElement::from_be_bytes_reduce(&x[0..16]); + // let low = FieldElement::from_be_bytes_reduce(&x[16..32]); + // let high = FieldElement::from_be_bytes_reduce(&x[0..16]); - let res = multi_scalar_mul(&get_generator(), &[low], &[high]); + // let res = multi_scalar_mul(&get_generator(), &[low], &[high]); - assert_eq!( - res, - Err(BlackBoxResolutionError::Failed( - BlackBoxFunc::MultiScalarMul, - "30644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd47 is not a valid grumpkin scalar".into(), - )) - ); - } + // assert_eq!( + // res, + // Err(BlackBoxResolutionError::Failed( + // BlackBoxFunc::MultiScalarMul, + // "30644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd47 is not a valid grumpkin scalar".into(), + // )) + // ); + // } #[test] fn rejects_invalid_point() { diff --git a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/lib.rs b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/lib.rs index d74c17a52b5..f738a375ab1 100644 --- a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/lib.rs +++ b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/lib.rs @@ -6,9 +6,7 @@ use acvm_blackbox_solver::{BlackBoxFunctionSolver, BlackBoxResolutionError}; mod embedded_curve_ops; mod generator; -mod pedersen; mod poseidon2; -mod schnorr; pub use embedded_curve_ops::{embedded_curve_add, multi_scalar_mul}; pub use generator::generators::derive_generators; @@ -25,24 +23,6 @@ type FieldElement = acir::acir_field::GenericFieldElement; pub struct Bn254BlackBoxSolver; impl BlackBoxFunctionSolver for Bn254BlackBoxSolver { - fn schnorr_verify( - &self, - public_key_x: &FieldElement, - public_key_y: &FieldElement, - signature: &[u8; 64], - message: &[u8], - ) -> Result { - let sig_s: [u8; 32] = signature[0..32].try_into().unwrap(); - let sig_e: [u8; 32] = signature[32..64].try_into().unwrap(); - Ok(schnorr::verify_signature( - public_key_x.into_repr(), - public_key_y.into_repr(), - sig_s, - sig_e, - message, - )) - } - fn multi_scalar_mul( &self, points: &[FieldElement], diff --git a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/pedersen/commitment.rs b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/pedersen/commitment.rs deleted file mode 100644 index 03f03fcf5ab..00000000000 --- a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/pedersen/commitment.rs +++ /dev/null @@ -1,77 +0,0 @@ -// Taken from: https://github.com/laudiacay/barustenberg/blob/df6bc6f095fe7f288bf6a12e7317fd8eb33d68ae/barustenberg/src/crypto/pedersen/pederson.rs - -use ark_ec::{short_weierstrass::Affine, AffineRepr, CurveGroup}; -use ark_ff::{MontConfig, PrimeField}; -use grumpkin::{Fq, FqConfig, Fr, FrConfig, GrumpkinParameters}; - -use crate::generator::generators::{derive_generators, DEFAULT_DOMAIN_SEPARATOR}; - -/// Given a vector of fields, generate a pedersen commitment using the indexed generators. -pub(crate) fn commit_native_with_index( - inputs: &[Fq], - starting_index: u32, -) -> Affine { - let generators = - derive_generators(DEFAULT_DOMAIN_SEPARATOR, inputs.len() as u32, starting_index); - - // As |F_r| > |F_q|, we can safely convert any `F_q` into an `F_r` uniquely. - assert!(FrConfig::MODULUS > FqConfig::MODULUS); - - inputs.iter().enumerate().fold(Affine::zero(), |mut acc, (i, input)| { - acc = (acc + (generators[i] * Fr::from_bigint(input.into_bigint()).unwrap()).into_affine()) - .into_affine(); - acc - }) -} - -#[cfg(test)] -mod test { - - use acir::AcirField; - use ark_ec::short_weierstrass::Affine; - use ark_std::{One, Zero}; - use grumpkin::Fq; - - use crate::pedersen::commitment::commit_native_with_index; - use crate::FieldElement; - - #[test] - fn commitment() { - // https://github.com/AztecProtocol/aztec-packages/blob/72931bdb8202c34042cdfb8cee2ef44b75939879/barretenberg/cpp/src/barretenberg/crypto/pedersen_commitment/pedersen.test.cpp#L10-L18 - let res = commit_native_with_index(&[Fq::one(), Fq::one()], 0); - let expected = Affine::new( - FieldElement::from_hex( - "0x2f7a8f9a6c96926682205fb73ee43215bf13523c19d7afe36f12760266cdfe15", - ) - .unwrap() - .into_repr(), - FieldElement::from_hex( - "0x01916b316adbbf0e10e39b18c1d24b33ec84b46daddf72f43878bcc92b6057e6", - ) - .unwrap() - .into_repr(), - ); - - assert_eq!(res, expected); - } - - #[test] - fn commitment_with_zero() { - // https://github.com/AztecProtocol/aztec-packages/blob/72931bdb8202c34042cdfb8cee2ef44b75939879/barretenberg/cpp/src/barretenberg/crypto/pedersen_commitment/pedersen.test.cpp#L20-L29 - let res = commit_native_with_index(&[Fq::zero(), Fq::one()], 0); - let expected = Affine::new( - FieldElement::from_hex( - "0x054aa86a73cb8a34525e5bbed6e43ba1198e860f5f3950268f71df4591bde402", - ) - .unwrap() - .into_repr(), - FieldElement::from_hex( - "0x209dcfbf2cfb57f9f6046f44d71ac6faf87254afc7407c04eb621a6287cac126", - ) - .unwrap() - .into_repr(), - ); - - assert_eq!(res, expected); - } -} diff --git a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/pedersen/hash.rs b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/pedersen/hash.rs deleted file mode 100644 index 152526a9943..00000000000 --- a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/pedersen/hash.rs +++ /dev/null @@ -1,69 +0,0 @@ -// Taken from: https://github.com/laudiacay/barustenberg/blob/df6bc6f095fe7f288bf6a12e7317fd8eb33d68ae/barustenberg/src/crypto/pedersen/pederson_hash.rs - -use std::sync::OnceLock; - -use ark_ec::{short_weierstrass::Affine, CurveConfig, CurveGroup}; -use grumpkin::GrumpkinParameters; - -use crate::generator::generators::derive_generators; - -use super::commitment::commit_native_with_index; - -/// Given a vector of fields, generate a pedersen hash using the indexed generators. -pub(crate) fn hash_with_index( - inputs: &[grumpkin::Fq], - starting_index: u32, -) -> ::BaseField { - let length_as_scalar: ::ScalarField = - (inputs.len() as u64).into(); - let length_prefix = *length_generator() * length_as_scalar; - let result = length_prefix + commit_native_with_index(inputs, starting_index); - result.into_affine().x -} - -fn length_generator() -> &'static Affine { - static INSTANCE: OnceLock> = OnceLock::new(); - INSTANCE.get_or_init(|| derive_generators("pedersen_hash_length".as_bytes(), 1, 0)[0]) -} - -#[cfg(test)] -pub(crate) mod test { - - use super::*; - use crate::FieldElement; - - use acir::AcirField; - use ark_std::One; - use grumpkin::Fq; - - //reference: https://github.com/AztecProtocol/barretenberg/blob/master/cpp/src/barretenberg/crypto/pedersen_hash/pedersen.test.cpp - #[test] - fn hash_one() { - // https://github.com/AztecProtocol/aztec-packages/blob/72931bdb8202c34042cdfb8cee2ef44b75939879/barretenberg/cpp/src/barretenberg/crypto/pedersen_hash/pedersen.test.cpp#L21-L26 - let res = hash_with_index(&[Fq::one(), Fq::one()], 0); - - assert_eq!( - res, - FieldElement::from_hex( - "0x07ebfbf4df29888c6cd6dca13d4bb9d1a923013ddbbcbdc3378ab8845463297b", - ) - .unwrap() - .into_repr(), - ); - } - - #[test] - fn test_hash_with_index() { - // https://github.com/AztecProtocol/aztec-packages/blob/72931bdb8202c34042cdfb8cee2ef44b75939879/barretenberg/cpp/src/barretenberg/crypto/pedersen_hash/pedersen.test.cpp#L28-L33 - let res = hash_with_index(&[Fq::one(), Fq::one()], 5); - - assert_eq!( - res, - FieldElement::from_hex( - "0x1c446df60816b897cda124524e6b03f36df0cec333fad87617aab70d7861daa6", - ) - .unwrap() - .into_repr(), - ); - } -} diff --git a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/pedersen/mod.rs b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/pedersen/mod.rs deleted file mode 100644 index c3c4ed56450..00000000000 --- a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/pedersen/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub(crate) mod commitment; -pub(crate) mod hash; diff --git a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/schnorr/mod.rs b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/schnorr/mod.rs deleted file mode 100644 index 8e3a40803f8..00000000000 --- a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/src/schnorr/mod.rs +++ /dev/null @@ -1,147 +0,0 @@ -use acvm_blackbox_solver::blake2s; -use ark_ec::{ - short_weierstrass::{Affine, SWCurveConfig}, - AffineRepr, CurveConfig, CurveGroup, -}; -use ark_ff::{BigInteger, PrimeField, Zero}; -use grumpkin::{Fq, GrumpkinParameters}; - -pub(crate) fn verify_signature( - pub_key_x: Fq, - pub_key_y: Fq, - sig_s_bytes: [u8; 32], - sig_e_bytes: [u8; 32], - message: &[u8], -) -> bool { - let pub_key = Affine::::new_unchecked(pub_key_x, pub_key_y); - - if !pub_key.is_on_curve() - || !pub_key.is_in_correct_subgroup_assuming_on_curve() - || pub_key.is_zero() - { - return false; - } - - let sig_s = - ::ScalarField::from_be_bytes_mod_order(&sig_s_bytes); - let sig_e = - ::ScalarField::from_be_bytes_mod_order(&sig_e_bytes); - - if sig_s.is_zero() || sig_e.is_zero() { - return false; - } - - // R = g^{sig.s} • pub^{sig.e} - let r = GrumpkinParameters::GENERATOR * sig_s + pub_key * sig_e; - if r.is_zero() { - // this result implies k == 0, which would be catastrophic for the prover. - // it is a cheap check that ensures this doesn't happen. - return false; - } - - // compare the _hashes_ rather than field elements modulo r - // e = H(pedersen(r, pk.x, pk.y), m), where r = R.x - let target_e_bytes = schnorr_generate_challenge(message, pub_key_x, pub_key_y, r.into_affine()); - - sig_e_bytes == target_e_bytes -} - -fn schnorr_generate_challenge( - message: &[u8], - pub_key_x: Fq, - pub_key_y: Fq, - r: Affine, -) -> [u8; 32] { - // create challenge message pedersen_commitment(R.x, pubkey) - - let r_x = *r.x().expect("r has been checked to be non-zero"); - let pedersen_hash = crate::pedersen::hash::hash_with_index(&[r_x, pub_key_x, pub_key_y], 0); - - let mut hash_input: Vec = pedersen_hash.into_bigint().to_bytes_be(); - hash_input.extend(message); - - blake2s(&hash_input).unwrap() -} - -#[cfg(test)] -mod schnorr_tests { - use acir::AcirField; - - use super::verify_signature; - use crate::FieldElement; - - #[test] - fn verifies_valid_signature() { - let pub_key_x: grumpkin::Fq = FieldElement::from_hex( - "0x04b260954662e97f00cab9adb773a259097f7a274b83b113532bce27fa3fb96a", - ) - .unwrap() - .into_repr(); - let pub_key_y: grumpkin::Fq = FieldElement::from_hex( - "0x2fd51571db6c08666b0edfbfbc57d432068bccd0110a39b166ab243da0037197", - ) - .unwrap() - .into_repr(); - let sig_s_bytes: [u8; 32] = [ - 1, 13, 119, 112, 212, 39, 233, 41, 84, 235, 255, 93, 245, 172, 186, 83, 157, 253, 76, - 77, 33, 128, 178, 15, 214, 67, 105, 107, 177, 234, 77, 48, - ]; - let sig_e_bytes: [u8; 32] = [ - 27, 237, 155, 84, 39, 84, 247, 27, 22, 8, 176, 230, 24, 115, 145, 220, 254, 122, 135, - 179, 171, 4, 214, 202, 64, 199, 19, 84, 239, 138, 124, 12, - ]; - let message: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; - - assert!(verify_signature(pub_key_x, pub_key_y, sig_s_bytes, sig_e_bytes, message)); - } - - #[test] - fn rejects_zero_e() { - let pub_key_x: grumpkin::Fq = FieldElement::from_hex( - "0x04b260954662e97f00cab9adb773a259097f7a274b83b113532bce27fa3fb96a", - ) - .unwrap() - .into_repr(); - let pub_key_y: grumpkin::Fq = FieldElement::from_hex( - "0x2fd51571db6c08666b0edfbfbc57d432068bccd0110a39b166ab243da0037197", - ) - .unwrap() - .into_repr(); - let sig_s_bytes: [u8; 32] = [ - 1, 13, 119, 112, 212, 39, 233, 41, 84, 235, 255, 93, 245, 172, 186, 83, 157, 253, 76, - 77, 33, 128, 178, 15, 214, 67, 105, 107, 177, 234, 77, 48, - ]; - let sig_e_bytes: [u8; 32] = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, - ]; - let message: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; - - assert!(!verify_signature(pub_key_x, pub_key_y, sig_s_bytes, sig_e_bytes, message)); - } - - #[test] - fn rejects_zero_s() { - let pub_key_x: grumpkin::Fq = FieldElement::from_hex( - "0x04b260954662e97f00cab9adb773a259097f7a274b83b113532bce27fa3fb96a", - ) - .unwrap() - .into_repr(); - let pub_key_y: grumpkin::Fq = FieldElement::from_hex( - "0x2fd51571db6c08666b0edfbfbc57d432068bccd0110a39b166ab243da0037197", - ) - .unwrap() - .into_repr(); - let sig_s_bytes: [u8; 32] = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, - ]; - let sig_e_bytes: [u8; 32] = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, - ]; - let message: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; - - assert!(!verify_signature(pub_key_x, pub_key_y, sig_s_bytes, sig_e_bytes, message)); - } -} diff --git a/noir/noir-repo/acvm-repo/brillig/src/black_box.rs b/noir/noir-repo/acvm-repo/brillig/src/black_box.rs index 3264388c8ef..9cc5349e45b 100644 --- a/noir/noir-repo/acvm-repo/brillig/src/black_box.rs +++ b/noir/noir-repo/acvm-repo/brillig/src/black_box.rs @@ -43,14 +43,7 @@ pub enum BlackBoxOp { signature: HeapArray, result: MemoryAddress, }, - /// Verifies a Schnorr signature over a curve which is "pairing friendly" with the curve on which the Brillig bytecode is defined. - SchnorrVerify { - public_key_x: MemoryAddress, - public_key_y: MemoryAddress, - message: HeapVector, - signature: HeapVector, - result: MemoryAddress, - }, + /// Performs multi scalar multiplication over the embedded curve. MultiScalarMul { points: HeapVector, diff --git a/noir/noir-repo/acvm-repo/brillig_vm/src/black_box.rs b/noir/noir-repo/acvm-repo/brillig_vm/src/black_box.rs index 19e2dd7553d..79aea2adf76 100644 --- a/noir/noir-repo/acvm-repo/brillig_vm/src/black_box.rs +++ b/noir/noir-repo/acvm-repo/brillig_vm/src/black_box.rs @@ -141,17 +141,6 @@ pub(crate) fn evaluate_black_box memory.write(*result_address, result.into()); Ok(()) } - BlackBoxOp::SchnorrVerify { public_key_x, public_key_y, message, signature, result } => { - let public_key_x = *memory.read(*public_key_x).extract_field().unwrap(); - let public_key_y = *memory.read(*public_key_y).extract_field().unwrap(); - let message: Vec = to_u8_vec(read_heap_vector(memory, message)); - let signature: [u8; 64] = - to_u8_vec(read_heap_vector(memory, signature)).try_into().unwrap(); - let verified = - solver.schnorr_verify(&public_key_x, &public_key_y, &signature, &message)?; - memory.write(*result, verified.into()); - Ok(()) - } BlackBoxOp::MultiScalarMul { points, scalars, outputs: result } => { let points: Vec = read_heap_vector(memory, points) .iter() @@ -362,7 +351,6 @@ fn black_box_function_from_op(op: &BlackBoxOp) -> BlackBoxFunc { BlackBoxOp::Keccakf1600 { .. } => BlackBoxFunc::Keccakf1600, BlackBoxOp::EcdsaSecp256k1 { .. } => BlackBoxFunc::EcdsaSecp256k1, BlackBoxOp::EcdsaSecp256r1 { .. } => BlackBoxFunc::EcdsaSecp256r1, - BlackBoxOp::SchnorrVerify { .. } => BlackBoxFunc::SchnorrVerify, BlackBoxOp::MultiScalarMul { .. } => BlackBoxFunc::MultiScalarMul, BlackBoxOp::EmbeddedCurveAdd { .. } => BlackBoxFunc::EmbeddedCurveAdd, BlackBoxOp::BigIntAdd { .. } => BlackBoxFunc::BigIntAdd, diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/acir/generated_acir.rs b/noir/noir-repo/compiler/noirc_evaluator/src/acir/generated_acir.rs index 91206abe732..3b29c0319ab 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/acir/generated_acir.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/acir/generated_acir.rs @@ -225,19 +225,6 @@ impl GeneratedAcir { inputs: inputs[0].clone(), outputs: outputs.try_into().expect("Compiler should generate correct size outputs"), }, - BlackBoxFunc::SchnorrVerify => { - BlackBoxFuncCall::SchnorrVerify { - public_key_x: inputs[0][0], - public_key_y: inputs[1][0], - // Schnorr signature is an r & s, 32 bytes each - signature: inputs[2] - .clone() - .try_into() - .expect("Compiler should generate correct size inputs"), - message: inputs[3].clone(), - output: outputs[0], - } - } BlackBoxFunc::EcdsaSecp256k1 => { BlackBoxFuncCall::EcdsaSecp256k1 { // 32 bytes for each public key co-ordinate @@ -715,9 +702,7 @@ fn black_box_func_expected_input_size(name: BlackBoxFunc) -> Option { // Signature verification algorithms will take in a variable // number of inputs, since the message/hashed-message can vary in size. - BlackBoxFunc::SchnorrVerify - | BlackBoxFunc::EcdsaSecp256k1 - | BlackBoxFunc::EcdsaSecp256r1 => None, + BlackBoxFunc::EcdsaSecp256k1 | BlackBoxFunc::EcdsaSecp256r1 => None, // Inputs for multi scalar multiplication is an arbitrary number of [point, scalar] pairs. BlackBoxFunc::MultiScalarMul => None, @@ -762,9 +747,7 @@ fn black_box_expected_output_size(name: BlackBoxFunc) -> Option { BlackBoxFunc::RANGE => Some(0), // Signature verification algorithms will return a boolean - BlackBoxFunc::SchnorrVerify - | BlackBoxFunc::EcdsaSecp256k1 - | BlackBoxFunc::EcdsaSecp256r1 => Some(1), + BlackBoxFunc::EcdsaSecp256k1 | BlackBoxFunc::EcdsaSecp256r1 => Some(1), // Output of operations over the embedded curve // will be 2 field elements representing the point. diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs index 3685c9540f3..2ddcea26570 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs @@ -144,27 +144,6 @@ pub(crate) fn convert_black_box_call { - if let ( - [BrilligVariable::SingleAddr(public_key_x), BrilligVariable::SingleAddr(public_key_y), signature, message], - [BrilligVariable::SingleAddr(result_register)], - ) = (function_arguments, function_results) - { - let message = convert_array_or_vector(brillig_context, *message, bb_func); - let signature = convert_array_or_vector(brillig_context, *signature, bb_func); - brillig_context.black_box_op_instruction(BlackBoxOp::SchnorrVerify { - public_key_x: public_key_x.address, - public_key_y: public_key_y.address, - message, - signature, - result: result_register.address, - }); - brillig_context.deallocate_heap_vector(message); - brillig_context.deallocate_heap_vector(signature); - } else { - unreachable!("ICE: Schnorr verify expects two registers for the public key, an array for signature, an array for the message hash and one result register") - } - } BlackBoxFunc::MultiScalarMul => { if let ([points, scalars], [BrilligVariable::BrilligArray(outputs)]) = (function_arguments, function_results) diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir.rs index b4e10035af6..8d5f14cee94 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir.rs @@ -253,15 +253,6 @@ pub(crate) mod tests { pub(crate) struct DummyBlackBoxSolver; impl BlackBoxFunctionSolver for DummyBlackBoxSolver { - fn schnorr_verify( - &self, - _public_key_x: &FieldElement, - _public_key_y: &FieldElement, - _signature: &[u8; 64], - _message: &[u8], - ) -> Result { - Ok(true) - } fn multi_scalar_mul( &self, _points: &[FieldElement], diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs index 55a24264fbb..ef1b5432128 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs @@ -326,23 +326,6 @@ impl DebugShow { result ); } - BlackBoxOp::SchnorrVerify { - public_key_x, - public_key_y, - message, - signature, - result, - } => { - debug_println!( - self.enable_debug_trace, - " SCHNORR_VERIFY {} {} {} {} -> {}", - public_key_x, - public_key_y, - message, - signature, - result - ); - } BlackBoxOp::BigIntAdd { lhs, rhs, output } => { debug_println!( self.enable_debug_trace, diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 67222d06ea8..6ebe80128c0 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -586,7 +586,6 @@ fn simplify_black_box_func( BlackBoxFunc::EmbeddedCurveAdd => { blackbox::simplify_ec_add(dfg, solver, arguments, block, call_stack) } - BlackBoxFunc::SchnorrVerify => blackbox::simplify_schnorr_verify(dfg, solver, arguments), BlackBoxFunc::BigIntAdd | BlackBoxFunc::BigIntSub diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call/blackbox.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call/blackbox.rs index 301b75e0bd4..b9faf1c46ec 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call/blackbox.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call/blackbox.rs @@ -156,39 +156,6 @@ pub(super) fn simplify_poseidon2_permutation( } } -pub(super) fn simplify_schnorr_verify( - dfg: &mut DataFlowGraph, - solver: impl BlackBoxFunctionSolver, - arguments: &[ValueId], -) -> SimplifyResult { - match ( - dfg.get_numeric_constant(arguments[0]), - dfg.get_numeric_constant(arguments[1]), - dfg.get_array_constant(arguments[2]), - dfg.get_array_constant(arguments[3]), - ) { - (Some(public_key_x), Some(public_key_y), Some((signature, _)), Some((message, _))) - if array_is_constant(dfg, &signature) && array_is_constant(dfg, &message) => - { - let signature = to_u8_vec(dfg, signature); - let signature: [u8; 64] = - signature.try_into().expect("Compiler should produce correctly sized signature"); - - let message = to_u8_vec(dfg, message); - - let Ok(valid_signature) = - solver.schnorr_verify(&public_key_x, &public_key_y, &signature, &message) - else { - return SimplifyResult::None; - }; - - let valid_signature = dfg.make_constant(valid_signature.into(), Type::bool()); - SimplifyResult::SimplifiedTo(valid_signature) - } - _ => SimplifyResult::None, - } -} - pub(super) fn simplify_hash( dfg: &mut DataFlowGraph, arguments: &[ValueId], diff --git a/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/schnorr.mdx b/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/schnorr.mdx index 286a0ac6c7d..4c859043787 100644 --- a/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/schnorr.mdx +++ b/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/schnorr.mdx @@ -10,7 +10,6 @@ import BlackBoxInfo from '@site/src/components/Notes/_blackbox'; ## schnorr::verify_signature Verifier for Schnorr signatures over the embedded curve (for BN254 it is Grumpkin). -See schnorr::verify_signature_slice for a version that works directly on slices. #include_code schnorr_verify noir_stdlib/src/schnorr.nr rust @@ -34,13 +33,4 @@ const signature = Array.from( ... ``` - -## schnorr::verify_signature_slice - -Verifier for Schnorr signatures over the embedded curve (for BN254 it is Grumpkin) -where the message is a slice. - -#include_code schnorr_verify_slice noir_stdlib/src/schnorr.nr rust - - diff --git a/noir/noir-repo/noir_stdlib/src/schnorr.nr b/noir/noir-repo/noir_stdlib/src/schnorr.nr index a43e75537ee..d9d494e3093 100644 --- a/noir/noir-repo/noir_stdlib/src/schnorr.nr +++ b/noir/noir-repo/noir_stdlib/src/schnorr.nr @@ -1,32 +1,13 @@ use crate::embedded_curve_ops::{EmbeddedCurvePoint, EmbeddedCurveScalar}; -#[foreign(schnorr_verify)] // docs:start:schnorr_verify pub fn verify_signature( - public_key_x: Field, - public_key_y: Field, + public_key: EmbeddedCurvePoint, signature: [u8; 64], message: [u8; N], ) -> bool // docs:end:schnorr_verify -{} - -#[foreign(schnorr_verify)] -// docs:start:schnorr_verify_slice -pub fn verify_signature_slice( - public_key_x: Field, - public_key_y: Field, - signature: [u8; 64], - message: [u8], -) -> bool -// docs:end:schnorr_verify_slice -{} - -pub fn verify_signature_noir( - public_key: EmbeddedCurvePoint, - signature: [u8; 64], - message: [u8; N], -) -> bool { +{ //scalar lo/hi from bytes let sig_s = EmbeddedCurveScalar::from_bytes(signature, 0); let sig_e = EmbeddedCurveScalar::from_bytes(signature, 32); @@ -109,6 +90,6 @@ fn test_zero_signature() { }; let signature: [u8; 64] = [0; 64]; let message: [u8; _] = [2; 64]; // every message - let verified = verify_signature_noir(public_key, signature, message); + let verified = verify_signature(public_key, signature, message); assert(!verified); } diff --git a/noir/noir-repo/test_programs/compile_success_empty/schnorr_simplification/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/schnorr_simplification/src/main.nr index cdfa8337094..53b71fc3842 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/schnorr_simplification/src/main.nr +++ b/noir/noir-repo/test_programs/compile_success_empty/schnorr_simplification/src/main.nr @@ -1,9 +1,14 @@ +use std::embedded_curve_ops::EmbeddedCurvePoint; + // Note: If main has any unsized types, then the verifier will never be able // to figure out the circuit instance fn main() { let message = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; - let pub_key_x = 0x04b260954662e97f00cab9adb773a259097f7a274b83b113532bce27fa3fb96a; - let pub_key_y = 0x2fd51571db6c08666b0edfbfbc57d432068bccd0110a39b166ab243da0037197; + let pub_key = EmbeddedCurvePoint { + x: 0x04b260954662e97f00cab9adb773a259097f7a274b83b113532bce27fa3fb96a, + y: 0x2fd51571db6c08666b0edfbfbc57d432068bccd0110a39b166ab243da0037197, + is_infinite: false, + }; let signature = [ 1, 13, 119, 112, 212, 39, 233, 41, 84, 235, 255, 93, 245, 172, 186, 83, 157, 253, 76, 77, 33, 128, 178, 15, 214, 67, 105, 107, 177, 234, 77, 48, 27, 237, 155, 84, 39, 84, 247, 27, @@ -11,6 +16,6 @@ fn main() { 239, 138, 124, 12, ]; - let valid_signature = std::schnorr::verify_signature(pub_key_x, pub_key_y, signature, message); + let valid_signature = std::schnorr::verify_signature(pub_key, signature, message); assert(valid_signature); } diff --git a/noir/noir-repo/test_programs/execution_success/schnorr/src/main.nr b/noir/noir-repo/test_programs/execution_success/schnorr/src/main.nr index 21845cd54fa..ab3c65372c5 100644 --- a/noir/noir-repo/test_programs/execution_success/schnorr/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/schnorr/src/main.nr @@ -13,18 +13,12 @@ fn main( // We want to make sure that we can accurately verify a signature whose message is a slice vs. an array let message_field_bytes: [u8; 10] = message_field.to_be_bytes(); - // Is there ever a situation where someone would want - // to ensure that a signature was invalid? - // Check that passing a slice as the message is valid - let valid_signature = - std::schnorr::verify_signature_slice(pub_key_x, pub_key_y, signature, message_field_bytes); - assert(valid_signature); // Check that passing an array as the message is valid - let valid_signature = std::schnorr::verify_signature(pub_key_x, pub_key_y, signature, message); - assert(valid_signature); let pub_key = embedded_curve_ops::EmbeddedCurvePoint { x: pub_key_x, y: pub_key_y, is_infinite: false }; - let valid_signature = std::schnorr::verify_signature_noir(pub_key, signature, message); + let valid_signature = std::schnorr::verify_signature(pub_key, signature, message_field_bytes); + assert(valid_signature); + let valid_signature = std::schnorr::verify_signature(pub_key, signature, message); assert(valid_signature); std::schnorr::assert_valid_signature(pub_key, signature, message); } diff --git a/noir/noir-repo/tooling/lsp/src/solver.rs b/noir/noir-repo/tooling/lsp/src/solver.rs index 3c2d7499880..a36e30a944e 100644 --- a/noir/noir-repo/tooling/lsp/src/solver.rs +++ b/noir/noir-repo/tooling/lsp/src/solver.rs @@ -6,16 +6,6 @@ use acvm::BlackBoxFunctionSolver; pub(super) struct WrapperSolver(pub(super) Box>); impl BlackBoxFunctionSolver for WrapperSolver { - fn schnorr_verify( - &self, - public_key_x: &acvm::FieldElement, - public_key_y: &acvm::FieldElement, - signature: &[u8; 64], - message: &[u8], - ) -> Result { - self.0.schnorr_verify(public_key_x, public_key_y, signature, message) - } - fn multi_scalar_mul( &self, points: &[acvm::FieldElement], diff --git a/noir/noir-repo/tooling/profiler/src/opcode_formatter.rs b/noir/noir-repo/tooling/profiler/src/opcode_formatter.rs index b4367de9e7e..d1081de6c8f 100644 --- a/noir/noir-repo/tooling/profiler/src/opcode_formatter.rs +++ b/noir/noir-repo/tooling/profiler/src/opcode_formatter.rs @@ -10,7 +10,6 @@ fn format_blackbox_function(call: &BlackBoxFuncCall) -> String { BlackBoxFuncCall::RANGE { .. } => "range".to_string(), BlackBoxFuncCall::Blake2s { .. } => "blake2s".to_string(), BlackBoxFuncCall::Blake3 { .. } => "blake3".to_string(), - BlackBoxFuncCall::SchnorrVerify { .. } => "schnorr_verify".to_string(), BlackBoxFuncCall::EcdsaSecp256k1 { .. } => "ecdsa_secp256k1".to_string(), BlackBoxFuncCall::EcdsaSecp256r1 { .. } => "ecdsa_secp256r1".to_string(), BlackBoxFuncCall::MultiScalarMul { .. } => "multi_scalar_mul".to_string(), @@ -33,7 +32,6 @@ fn format_blackbox_op(call: &BlackBoxOp) -> String { BlackBoxOp::AES128Encrypt { .. } => "aes128_encrypt".to_string(), BlackBoxOp::Blake2s { .. } => "blake2s".to_string(), BlackBoxOp::Blake3 { .. } => "blake3".to_string(), - BlackBoxOp::SchnorrVerify { .. } => "schnorr_verify".to_string(), BlackBoxOp::EcdsaSecp256k1 { .. } => "ecdsa_secp256k1".to_string(), BlackBoxOp::EcdsaSecp256r1 { .. } => "ecdsa_secp256r1".to_string(), BlackBoxOp::MultiScalarMul { .. } => "multi_scalar_mul".to_string(), From e6060ecca318ca4cdc60f1df77c1e7639a745f79 Mon Sep 17 00:00:00 2001 From: just-mitch <68168980+just-mitch@users.noreply.github.com> Date: Tue, 3 Dec 2024 18:04:32 -0500 Subject: [PATCH 10/24] feat: test release network via ci workflow (#10388) New workflow to run an arbitrary test on an arbitrary namespace (within our aztec-gke cluster) with an arbitrary e2e image. I tested this with a temporary `on: push` flag. See successful run [here](https://github.com/AztecProtocol/aztec-packages/actions/runs/12148432809/job/33876880156) using smoke test with a locally built image against the smoke cluster. Also tweak our release to also publish our `end-to-end` image so it can be used by this workflow more easily. Fix #10247 Fix #10383 --- .github/workflows/network-test.yml | 86 +++++++++++++++++++ .github/workflows/publish-aztec-packages.yml | 4 +- yarn-project/Earthfile | 20 ++++- .../end-to-end/scripts/network_test.sh | 4 +- .../end-to-end/src/spartan/4epochs.test.ts | 4 +- .../src/spartan/gating-passive.test.ts | 4 +- .../end-to-end/src/spartan/proving.test.ts | 4 +- .../end-to-end/src/spartan/reorg.test.ts | 4 +- .../end-to-end/src/spartan/smoke.test.ts | 4 +- .../end-to-end/src/spartan/transfer.test.ts | 4 +- yarn-project/end-to-end/src/spartan/utils.ts | 34 ++++++-- 11 files changed, 146 insertions(+), 26 deletions(-) create mode 100644 .github/workflows/network-test.yml diff --git a/.github/workflows/network-test.yml b/.github/workflows/network-test.yml new file mode 100644 index 00000000000..8ed37dfa51d --- /dev/null +++ b/.github/workflows/network-test.yml @@ -0,0 +1,86 @@ +name: Aztec Network Test + +on: + workflow_dispatch: + inputs: + namespace: + description: The namespace to deploy to, e.g. smoke + required: true + test: + description: The test to run, e.g. spartan/smoke.test.ts + required: true + aztec_e2e_docker_image: + description: The Aztec E2E Docker image to use, e.g. aztecprotocol/end-to-end:da809c58290f9590836f45ec59376cbf04d3c4ce-x86_64 + required: true + +jobs: + network_test: + runs-on: ubuntu-latest + + env: + TEST_DOCKER_IMAGE: ${{ inputs.aztec_e2e_docker_image }} + NAMESPACE: ${{ inputs.namespace }} + TEST: ${{ inputs.test }} + CHART_PATH: ./spartan/aztec-network + CLUSTER_NAME: aztec-gke + REGION: us-west1-a + PROJECT_ID: testnet-440309 + GKE_CLUSTER_CONTEXT: gke_testnet-440309_us-west1-a_aztec-gke + + steps: + - name: Checkout code + uses: actions/checkout@v3 + + - name: Authenticate to Google Cloud + uses: google-github-actions/auth@v2 + with: + credentials_json: ${{ secrets.GCP_SA_KEY }} + + - name: Set up Cloud SDK + uses: google-github-actions/setup-gcloud@v2 + with: + install_components: gke-gcloud-auth-plugin + + - name: Configure kubectl with GKE cluster + run: | + gcloud container clusters get-credentials ${{ env.CLUSTER_NAME }} --region ${{ env.REGION }} + + - name: Run test + run: | + + # Find 3 free ports between 9000 and 10000 + FREE_PORTS=$(comm -23 <(seq 9000 10000 | sort) <(ss -Htan | awk '{print $4}' | cut -d':' -f2 | sort -u) | shuf | head -n 3) + + # Extract the free ports from the list + PXE_PORT=$(echo $FREE_PORTS | awk '{print $1}') + ANVIL_PORT=$(echo $FREE_PORTS | awk '{print $2}') + METRICS_PORT=$(echo $FREE_PORTS | awk '{print $3}') + + export GRAFANA_PASSWORD=$(kubectl get secrets -n metrics metrics-grafana -o jsonpath='{.data.admin-password}' | base64 --decode) + + gcloud config set project ${{ env.PROJECT_ID }} + + GCLOUD_CONFIG_DIR=$(gcloud info --format='value(config. paths. global_config_dir)') + + echo "gcloud config dir: [$GCLOUD_CONFIG_DIR]" + + docker run --rm --network=host \ + -v ~/.kube:/root/.kube \ + -v $GCLOUD_CONFIG_DIR:/root/.config/gcloud \ + -e K8S=gcloud \ + -e CLUSTER_NAME=${{ env.CLUSTER_NAME }} \ + -e REGION=${{ env.REGION }} \ + -e INSTANCE_NAME=${{ env.NAMESPACE }} \ + -e SPARTAN_DIR="/usr/src/spartan" \ + -e NAMESPACE=${{ env.NAMESPACE }} \ + -e HOST_PXE_PORT=$PXE_PORT \ + -e CONTAINER_PXE_PORT=8081 \ + -e HOST_ETHEREUM_PORT=$ANVIL_PORT \ + -e CONTAINER_ETHEREUM_PORT=8545 \ + -e HOST_METRICS_PORT=$METRICS_PORT \ + -e CONTAINER_METRICS_PORT=80 \ + -e GRAFANA_PASSWORD=$GRAFANA_PASSWORD \ + -e DEBUG="aztec:*" \ + -e LOG_JSON=1 \ + -e LOG_LEVEL=debug \ + ${{ env.TEST_DOCKER_IMAGE }} ${{ env.TEST }} diff --git a/.github/workflows/publish-aztec-packages.yml b/.github/workflows/publish-aztec-packages.yml index d28c577f875..2695f252c68 100644 --- a/.github/workflows/publish-aztec-packages.yml +++ b/.github/workflows/publish-aztec-packages.yml @@ -100,13 +100,13 @@ jobs: with: concurrency_key: build-aztec dockerhub_password: "${{ env.DOCKERHUB_PASSWORD }}" - - name: Build & Push Aztec x86_64 + - name: Build & Push Aztec and End-to-End x86_64 timeout-minutes: 40 run: | earthly-ci \ --no-output \ --push \ - ./yarn-project+export-aztec-arch \ + ./yarn-project+export-images-arch \ --DIST_TAG=${{ env.GIT_COMMIT }} \ --ARCH=x86_64 diff --git a/yarn-project/Earthfile b/yarn-project/Earthfile index 7fd30660cfd..c25214fc97a 100644 --- a/yarn-project/Earthfile +++ b/yarn-project/Earthfile @@ -215,11 +215,17 @@ end-to-end-base: && apt-get install -y wget gnupg \ && wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - \ && echo "deb [arch=$(dpkg --print-architecture)] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google-chrome.list \ - && apt update && apt install curl chromium nodejs netcat-openbsd -y \ + && apt update && apt install curl chromium nodejs netcat-openbsd git -y \ && rm -rf /var/lib/apt/lists/* \ && mkdir -p /usr/local/bin \ && curl -fsSL -o /usr/local/bin/kubectl "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl" \ && chmod +x /usr/local/bin/kubectl \ + && curl -O https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/google-cloud-cli-linux-x86_64.tar.gz \ + && tar xf google-cloud-cli-linux-x86_64.tar.gz \ + && mv google-cloud-sdk /usr/lib/google-cloud-sdk \ + && /usr/lib/google-cloud-sdk/install.sh --additional-components gke-gcloud-auth-plugin --path-update false --quiet \ + && ln -s /usr/lib/google-cloud-sdk/bin/gcloud /usr/bin/gcloud \ + && rm google-cloud-cli-linux-x86_64.tar.gz \ && curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3 \ && chmod +x get_helm.sh \ && ./get_helm.sh \ @@ -270,10 +276,22 @@ export-end-to-end: FROM +end-to-end SAVE IMAGE aztecprotocol/end-to-end:$EARTHLY_GIT_HASH +export-end-to-end-arch: + FROM +end-to-end + ARG DIST_TAG="latest" + ARG ARCH + SAVE IMAGE --push aztecprotocol/end-to-end:${DIST_TAG}${ARCH:+-$ARCH} + export-e2e-test-images: BUILD +export-aztec BUILD +export-end-to-end +export-images-arch: + ARG DIST_TAG="latest" + ARG ARCH + BUILD +export-aztec-arch + BUILD +export-end-to-end-arch + format-check: FROM +build RUN yarn formatting diff --git a/yarn-project/end-to-end/scripts/network_test.sh b/yarn-project/end-to-end/scripts/network_test.sh index 2266d5e0d40..e5de5ca7185 100755 --- a/yarn-project/end-to-end/scripts/network_test.sh +++ b/yarn-project/end-to-end/scripts/network_test.sh @@ -142,7 +142,7 @@ kubectl wait pod -l app==pxe --for=condition=Ready -n "$NAMESPACE" --timeout=10m # Find 3 free ports between 9000 and 10000 FREE_PORTS=$(comm -23 <(seq 9000 10000 | sort) <(ss -Htan | awk '{print $4}' | cut -d':' -f2 | sort -u) | shuf | head -n 3) -# Extract the two free ports from the list +# Extract the free ports from the list PXE_PORT=$(echo $FREE_PORTS | awk '{print $1}') ANVIL_PORT=$(echo $FREE_PORTS | awk '{print $2}') METRICS_PORT=$(echo $FREE_PORTS | awk '{print $3}') @@ -165,7 +165,7 @@ fi docker run --rm --network=host \ -v ~/.kube:/root/.kube \ - -e K8S=true \ + -e K8S=local \ -e INSTANCE_NAME="spartan" \ -e SPARTAN_DIR="/usr/src/spartan" \ -e NAMESPACE="$NAMESPACE" \ diff --git a/yarn-project/end-to-end/src/spartan/4epochs.test.ts b/yarn-project/end-to-end/src/spartan/4epochs.test.ts index feef5c9f243..35a16b1f896 100644 --- a/yarn-project/end-to-end/src/spartan/4epochs.test.ts +++ b/yarn-project/end-to-end/src/spartan/4epochs.test.ts @@ -7,9 +7,9 @@ import { jest } from '@jest/globals'; import { RollupCheatCodes } from '../../../aztec.js/src/utils/cheat_codes.js'; import { type TestWallets, setupTestWalletsWithTokens } from './setup_test_wallets.js'; -import { getConfig, isK8sConfig, startPortForward } from './utils.js'; +import { isK8sConfig, setupEnvironment, startPortForward } from './utils.js'; -const config = getConfig(process.env); +const config = setupEnvironment(process.env); describe('token transfer test', () => { jest.setTimeout(10 * 60 * 4000); // 40 minutes diff --git a/yarn-project/end-to-end/src/spartan/gating-passive.test.ts b/yarn-project/end-to-end/src/spartan/gating-passive.test.ts index 95921c66db9..6369f912a7a 100644 --- a/yarn-project/end-to-end/src/spartan/gating-passive.test.ts +++ b/yarn-project/end-to-end/src/spartan/gating-passive.test.ts @@ -11,10 +11,10 @@ import { applyValidatorKill, awaitL2BlockNumber, enableValidatorDynamicBootNode, - getConfig, isK8sConfig, restartBot, runAlertCheck, + setupEnvironment, startPortForward, } from './utils.js'; @@ -28,7 +28,7 @@ const qosAlerts: AlertConfig[] = [ }, ]; -const config = getConfig(process.env); +const config = setupEnvironment(process.env); if (!isK8sConfig(config)) { throw new Error('This test must be run in a k8s environment'); } diff --git a/yarn-project/end-to-end/src/spartan/proving.test.ts b/yarn-project/end-to-end/src/spartan/proving.test.ts index 8681f17601c..c4ea1fc0288 100644 --- a/yarn-project/end-to-end/src/spartan/proving.test.ts +++ b/yarn-project/end-to-end/src/spartan/proving.test.ts @@ -4,11 +4,11 @@ import { createDebugLogger } from '@aztec/foundation/log'; import { jest } from '@jest/globals'; import { type ChildProcess } from 'child_process'; -import { getConfig, isK8sConfig, startPortForward } from './utils.js'; +import { isK8sConfig, setupEnvironment, startPortForward } from './utils.js'; jest.setTimeout(2_400_000); // 40 minutes -const config = getConfig(process.env); +const config = setupEnvironment(process.env); const debugLogger = createDebugLogger('aztec:spartan-test:proving'); const SLEEP_MS = 1000; diff --git a/yarn-project/end-to-end/src/spartan/reorg.test.ts b/yarn-project/end-to-end/src/spartan/reorg.test.ts index c315fe05def..92f724c77ea 100644 --- a/yarn-project/end-to-end/src/spartan/reorg.test.ts +++ b/yarn-project/end-to-end/src/spartan/reorg.test.ts @@ -8,13 +8,13 @@ import { type TestWallets, performTransfers, setupTestWalletsWithTokens } from ' import { applyProverFailure, deleteResourceByLabel, - getConfig, isK8sConfig, + setupEnvironment, startPortForward, waitForResourceByLabel, } from './utils.js'; -const config = getConfig(process.env); +const config = setupEnvironment(process.env); if (!isK8sConfig(config)) { throw new Error('This test must be run in a k8s environment'); } diff --git a/yarn-project/end-to-end/src/spartan/smoke.test.ts b/yarn-project/end-to-end/src/spartan/smoke.test.ts index 49bf8324a28..f58a2d6a469 100644 --- a/yarn-project/end-to-end/src/spartan/smoke.test.ts +++ b/yarn-project/end-to-end/src/spartan/smoke.test.ts @@ -6,9 +6,9 @@ import { createPublicClient, getAddress, getContract, http } from 'viem'; import { foundry } from 'viem/chains'; import { type AlertConfig } from '../quality_of_service/alert_checker.js'; -import { getConfig, isK8sConfig, runAlertCheck, startPortForward } from './utils.js'; +import { isK8sConfig, runAlertCheck, setupEnvironment, startPortForward } from './utils.js'; -const config = getConfig(process.env); +const config = setupEnvironment(process.env); const debugLogger = createDebugLogger('aztec:spartan-test:smoke'); diff --git a/yarn-project/end-to-end/src/spartan/transfer.test.ts b/yarn-project/end-to-end/src/spartan/transfer.test.ts index a1a9d7aea9a..79cd761cfd4 100644 --- a/yarn-project/end-to-end/src/spartan/transfer.test.ts +++ b/yarn-project/end-to-end/src/spartan/transfer.test.ts @@ -5,9 +5,9 @@ import { TokenContract } from '@aztec/noir-contracts.js'; import { jest } from '@jest/globals'; import { type TestWallets, setupTestWalletsWithTokens } from './setup_test_wallets.js'; -import { getConfig, isK8sConfig, startPortForward } from './utils.js'; +import { isK8sConfig, setupEnvironment, startPortForward } from './utils.js'; -const config = getConfig(process.env); +const config = setupEnvironment(process.env); describe('token transfer test', () => { jest.setTimeout(10 * 60 * 2000); // 20 minutes diff --git a/yarn-project/end-to-end/src/spartan/utils.ts b/yarn-project/end-to-end/src/spartan/utils.ts index 3152de9c53d..120b3b3adcd 100644 --- a/yarn-project/end-to-end/src/spartan/utils.ts +++ b/yarn-project/end-to-end/src/spartan/utils.ts @@ -1,7 +1,7 @@ import { createDebugLogger, sleep } from '@aztec/aztec.js'; import type { Logger } from '@aztec/foundation/log'; -import { exec, spawn } from 'child_process'; +import { exec, execSync, spawn } from 'child_process'; import path from 'path'; import { promisify } from 'util'; import { z } from 'zod'; @@ -13,7 +13,7 @@ const execAsync = promisify(exec); const logger = createDebugLogger('k8s-utils'); -const k8sConfigSchema = z.object({ +const k8sLocalConfigSchema = z.object({ INSTANCE_NAME: z.string().min(1, 'INSTANCE_NAME env variable must be set'), NAMESPACE: z.string().min(1, 'NAMESPACE env variable must be set'), HOST_PXE_PORT: z.coerce.number().min(1, 'HOST_PXE_PORT env variable must be set'), @@ -25,7 +25,13 @@ const k8sConfigSchema = z.object({ GRAFANA_PASSWORD: z.string().min(1, 'GRAFANA_PASSWORD env variable must be set'), METRICS_API_PATH: z.string().default('/api/datasources/proxy/uid/spartan-metrics-prometheus/api/v1/query'), SPARTAN_DIR: z.string().min(1, 'SPARTAN_DIR env variable must be set'), - K8S: z.literal('true'), + K8S: z.literal('local'), +}); + +const k8sGCloudConfigSchema = k8sLocalConfigSchema.extend({ + K8S: z.literal('gcloud'), + CLUSTER_NAME: z.string().min(1, 'CLUSTER_NAME env variable must be set'), + REGION: z.string().min(1, 'REGION env variable must be set'), }); const directConfigSchema = z.object({ @@ -34,18 +40,28 @@ const directConfigSchema = z.object({ K8S: z.literal('false'), }); -const envSchema = z.discriminatedUnion('K8S', [k8sConfigSchema, directConfigSchema]); +const envSchema = z.discriminatedUnion('K8S', [k8sLocalConfigSchema, k8sGCloudConfigSchema, directConfigSchema]); -export type K8sConfig = z.infer; +export type K8sLocalConfig = z.infer; +export type K8sGCloudConfig = z.infer; export type DirectConfig = z.infer; export type EnvConfig = z.infer; -export function getConfig(env: unknown): EnvConfig { - return envSchema.parse(env); +export function isK8sConfig(config: EnvConfig): config is K8sLocalConfig | K8sGCloudConfig { + return config.K8S === 'local' || config.K8S === 'gcloud'; } -export function isK8sConfig(config: EnvConfig): config is K8sConfig { - return config.K8S === 'true'; +export function isGCloudConfig(config: EnvConfig): config is K8sGCloudConfig { + return config.K8S === 'gcloud'; +} + +export function setupEnvironment(env: unknown): EnvConfig { + const config = envSchema.parse(env); + if (isGCloudConfig(config)) { + const command = `gcloud container clusters get-credentials ${config.CLUSTER_NAME} --region=${config.REGION}`; + execSync(command); + } + return config; } export async function startPortForward({ From 94e6e1a954911b81e6af85edff55c64f13595b20 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Tue, 3 Dec 2024 23:05:52 +0000 Subject: [PATCH 11/24] fix: publicly register contract classes (#10385) --- .../archiver/src/archiver/archiver.ts | 44 ++++++++++--------- .../aztec.js/src/contract/deploy_method.ts | 6 ++- 2 files changed, 28 insertions(+), 22 deletions(-) diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 550e98ce172..94c664d0f11 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -922,28 +922,28 @@ class ArchiverStoreHelper } async addBlocks(blocks: L1Published[]): Promise { - return [ + const opResults = await Promise.all([ this.store.addLogs(blocks.map(block => block.data)), // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them - ...(await Promise.all( - blocks.map(async block => { - const contractClassLogs = block.data.body.txEffects - .flatMap(txEffect => (txEffect ? [txEffect.contractClassLogs] : [])) - .flatMap(txLog => txLog.unrollLogs()); - // ContractInstanceDeployed event logs are broadcast in privateLogs. - const privateLogs = block.data.body.txEffects.flatMap(txEffect => txEffect.privateLogs); - return ( - await Promise.all([ - this.#updateRegisteredContractClasses(contractClassLogs, block.data.number, Operation.Store), - this.#updateDeployedContractInstances(privateLogs, block.data.number, Operation.Store), - this.#storeBroadcastedIndividualFunctions(contractClassLogs, block.data.number), - ]) - ).every(Boolean); - }), - )), + ...blocks.map(async block => { + const contractClassLogs = block.data.body.txEffects + .flatMap(txEffect => (txEffect ? [txEffect.contractClassLogs] : [])) + .flatMap(txLog => txLog.unrollLogs()); + // ContractInstanceDeployed event logs are broadcast in privateLogs. + const privateLogs = block.data.body.txEffects.flatMap(txEffect => txEffect.privateLogs); + return ( + await Promise.all([ + this.#updateRegisteredContractClasses(contractClassLogs, block.data.number, Operation.Store), + this.#updateDeployedContractInstances(privateLogs, block.data.number, Operation.Store), + this.#storeBroadcastedIndividualFunctions(contractClassLogs, block.data.number), + ]) + ).every(Boolean); + }), this.store.addNullifiers(blocks.map(block => block.data)), this.store.addBlocks(blocks), - ].every(Boolean); + ]); + + return opResults.every(Boolean); } async unwindBlocks(from: number, blocksToUnwind: number): Promise { @@ -966,9 +966,11 @@ class ArchiverStoreHelper const privateLogs = block.data.body.txEffects.flatMap(txEffect => txEffect.privateLogs); return ( - (await this.#updateRegisteredContractClasses(contractClassLogs, block.data.number, Operation.Delete)) && - (await this.#updateDeployedContractInstances(privateLogs, block.data.number, Operation.Delete)) - ); + await Promise.all([ + this.#updateRegisteredContractClasses(contractClassLogs, block.data.number, Operation.Delete), + this.#updateDeployedContractInstances(privateLogs, block.data.number, Operation.Delete), + ]) + ).every(Boolean); }), this.store.deleteLogs(blocks.map(b => b.data)), diff --git a/yarn-project/aztec.js/src/contract/deploy_method.ts b/yarn-project/aztec.js/src/contract/deploy_method.ts index 9b45db9b49c..869981308a7 100644 --- a/yarn-project/aztec.js/src/contract/deploy_method.ts +++ b/yarn-project/aztec.js/src/contract/deploy_method.ts @@ -89,6 +89,11 @@ export class DeployMethod extends Bas * it returns a promise for an array instead of a function call directly. */ public async request(options: DeployOptions = {}): Promise { + const deployment = await this.getDeploymentFunctionCalls(options); + + // NOTE: MEGA HACK. Remove with #10007 + // register the contract after generating deployment function calls in order to publicly register the class and (optioanlly) emit its bytecode + // // TODO: Should we add the contracts to the DB here, or once the tx has been sent or mined? // Note that we need to run this registerContract here so it's available when computeFeeOptionsFromEstimatedGas // runs, since it needs the contract to have been registered in order to estimate gas for its initialization, @@ -97,7 +102,6 @@ export class DeployMethod extends Bas // once this tx has gone through. await this.wallet.registerContract({ artifact: this.artifact, instance: this.getInstance(options) }); - const deployment = await this.getDeploymentFunctionCalls(options); const bootstrap = await this.getInitializeFunctionCalls(options); if (deployment.calls.length + bootstrap.calls.length === 0) { From 6591b63fd5ed1b910b2e74b056022f0a1e9a55dc Mon Sep 17 00:00:00 2001 From: AztecBot Date: Wed, 4 Dec 2024 02:29:28 +0000 Subject: [PATCH 12/24] git subrepo push --branch=master barretenberg subrepo: subdir: "barretenberg" merged: "3195a1b30b" upstream: origin: "https://github.com/AztecProtocol/barretenberg" branch: "master" commit: "3195a1b30b" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- barretenberg/.gitrepo | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index 4f192f1c1a9..38a4c4fc6ef 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = 0fe0a5d6dff43547aaec45256440982184e93bb0 - parent = da1470d074f4884e61b51e450a661432c6f0a10f + commit = 3195a1b30b3bcfd635f9b4a899c49cb517283685 + parent = 94e6e1a954911b81e6af85edff55c64f13595b20 method = merge cmdver = 0.4.6 From e7a99f28cdb54c7d462a43c8e971fa59696900f2 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Wed, 4 Dec 2024 02:30:00 +0000 Subject: [PATCH 13/24] chore: replace relative paths to noir-protocol-circuits --- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 7a1f1af5863..5fbd827e148 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { path = "../../noir-protocol-circuits/crates/types" } +protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.65.2", directory="noir-projects/noir-protocol-circuits/crates/types" } From a8486c30e2d7622030da24d8510c9b0555f8f1af Mon Sep 17 00:00:00 2001 From: AztecBot Date: Wed, 4 Dec 2024 02:30:00 +0000 Subject: [PATCH 14/24] git_subrepo.sh: Fix parent in .gitrepo file. [skip ci] --- noir-projects/aztec-nr/.gitrepo | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index e3be0061d71..0d4161abc56 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -9,4 +9,4 @@ remote = https://github.com/AztecProtocol/aztec-nr commit = 6e54884e4b0ea5204966b98c490f6e93722b7ad9 method = merge cmdver = 0.4.6 - parent = 9d0a890cedaa29019e6312fbb24164b8adbd8bc4 + parent = d697142f2fc6d1f508680e02966d5b3bff346dca From 20b88ba7297458e73d7f84bc0c22529a82ab56e6 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Wed, 4 Dec 2024 02:30:04 +0000 Subject: [PATCH 15/24] git subrepo push --branch=master noir-projects/aztec-nr subrepo: subdir: "noir-projects/aztec-nr" merged: "cd730ebea1" upstream: origin: "https://github.com/AztecProtocol/aztec-nr" branch: "master" commit: "cd730ebea1" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- noir-projects/aztec-nr/.gitrepo | 4 ++-- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index 0d4161abc56..7405293e1c3 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = 6e54884e4b0ea5204966b98c490f6e93722b7ad9 + commit = cd730ebea17805c9c25886d7d983d462a7232a8a method = merge cmdver = 0.4.6 - parent = d697142f2fc6d1f508680e02966d5b3bff346dca + parent = 52936248d8455885f3576ccc4dec904a5d941ab4 diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 5fbd827e148..7a1f1af5863 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.65.2", directory="noir-projects/noir-protocol-circuits/crates/types" } +protocol_types = { path = "../../noir-protocol-circuits/crates/types" } From be54cc3e2e58b809c3795a2b85e76711cdff2216 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Pedro=20Sousa?= Date: Wed, 4 Dec 2024 08:32:24 +0000 Subject: [PATCH 16/24] feat: making testnet script write a docker compose file (#10333) Some QoL stuff for our S&P friends. The tutorial can be simplified by using docker compose. Assuming UNIX distros and node installed, this should work out of the box: ```bash npx aztec-spartan install # creates the docker-compose config npx aztec-spartan start/stop/logs/update/etc ``` --- .github/workflows/ci.yml | 13 + .github/workflows/publish-aztec-packages.yml | 12 + spartan/releases/rough-rhino/.gitignore | 176 +++++++++++ spartan/releases/rough-rhino/Earthfile | 101 +++++++ spartan/releases/rough-rhino/README.md | 37 +++ .../releases/rough-rhino/assets/banner.jpeg | Bin 0 -> 68390 bytes spartan/releases/rough-rhino/aztec-spartan.sh | 285 ++++++++++++++++++ .../releases/rough-rhino/create-spartan.sh | 20 ++ spartan/releases/rough-rhino/full-node.sh | 39 --- spartan/releases/rough-rhino/validator.sh | 42 --- 10 files changed, 644 insertions(+), 81 deletions(-) create mode 100644 spartan/releases/rough-rhino/.gitignore create mode 100644 spartan/releases/rough-rhino/Earthfile create mode 100644 spartan/releases/rough-rhino/README.md create mode 100644 spartan/releases/rough-rhino/assets/banner.jpeg create mode 100755 spartan/releases/rough-rhino/aztec-spartan.sh create mode 100755 spartan/releases/rough-rhino/create-spartan.sh delete mode 100755 spartan/releases/rough-rhino/full-node.sh delete mode 100755 spartan/releases/rough-rhino/validator.sh diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 27f6df05ded..99625e35fb3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -875,6 +875,19 @@ jobs: timeout-minutes: 40 run: earthly-ci -P --no-output +test --box=${{ matrix.box }} --browser=${{ matrix.browser }} --mode=cache + rough-rhino-installer: + needs: [configure] + runs-on: ${{ needs.configure.outputs.username }}-x86 + steps: + - uses: actions/checkout@v4 + with: { ref: "${{ github.event.pull_request.head.sha }}" } + - uses: ./.github/ci-setup-action + with: + concurrency_key: rough-rhino-installer + - name: Rough Rhino Installer Helper Script + working-directory: ./spartan/releases/rough-rhino + run: earthly-ci +test-all + protocol-circuits-gates-report: needs: [build, configure] if: needs.configure.outputs.non-docs == 'true' && needs.configure.outputs.non-barretenberg-cpp == 'true' diff --git a/.github/workflows/publish-aztec-packages.yml b/.github/workflows/publish-aztec-packages.yml index 2695f252c68..c2fe77d9b88 100644 --- a/.github/workflows/publish-aztec-packages.yml +++ b/.github/workflows/publish-aztec-packages.yml @@ -312,6 +312,18 @@ jobs: --VERSION=$VERSION \ --DRY_RUN=${{ (github.event.inputs.publish == 'false') && '1' || '0' }} + - name: Publish spartan NPM package + run: | + DEPLOY_TAG=${{ env.DEPLOY_TAG }} + VERSION=${DEPLOY_TAG#aztec-packages-v} + earthly-ci \ + --no-output \ + --secret NPM_TOKEN=${{ env.NPM_TOKEN }} \ + ./spartan/releases/rough-rhino+publish-npm \ + --DIST_TAG=latest \ + --VERSION=$VERSION \ + --DRY_RUN=${{ (github.event.inputs.publish == 'false') && '1' || '0' }} + publish-aztec-up: needs: [configure, publish-manifests] runs-on: ubuntu-latest diff --git a/spartan/releases/rough-rhino/.gitignore b/spartan/releases/rough-rhino/.gitignore new file mode 100644 index 00000000000..23ce2843a4a --- /dev/null +++ b/spartan/releases/rough-rhino/.gitignore @@ -0,0 +1,176 @@ +# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore + +# Logs + +logs +_.log +npm-debug.log_ +yarn-debug.log* +yarn-error.log* +lerna-debug.log* +.pnpm-debug.log* + +# Caches + +.cache + +# Diagnostic reports (https://nodejs.org/api/report.html) + +report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json + +# Runtime data + +pids +_.pid +_.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover + +lib-cov + +# Coverage directory used by tools like istanbul + +coverage +*.lcov + +# nyc test coverage + +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) + +.grunt + +# Bower dependency directory (https://bower.io/) + +bower_components + +# node-waf configuration + +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) + +build/Release + +# Dependency directories + +node_modules/ +jspm_packages/ + +# Snowpack dependency directory (https://snowpack.dev/) + +web_modules/ + +# TypeScript cache + +*.tsbuildinfo + +# Optional npm cache directory + +.npm + +# Optional eslint cache + +.eslintcache + +# Optional stylelint cache + +.stylelintcache + +# Microbundle cache + +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history + +.node_repl_history + +# Output of 'npm pack' + +*.tgz + +# Yarn Integrity file + +.yarn-integrity + +# dotenv environment variable files + +.env +.env.development.local +.env.test.local +.env.production.local +.env.local + +# parcel-bundler cache (https://parceljs.org/) + +.parcel-cache + +# Next.js build output + +.next +out + +# Nuxt.js build / generate output + +.nuxt +dist + +# Gatsby files + +# Comment in the public line in if your project uses Gatsby and not Next.js + +# https://nextjs.org/blog/next-9-1#public-directory-support + +# public + +# vuepress build output + +.vuepress/dist + +# vuepress v2.x temp and cache directory + +.temp + +# Docusaurus cache and generated files + +.docusaurus + +# Serverless directories + +.serverless/ + +# FuseBox cache + +.fusebox/ + +# DynamoDB Local files + +.dynamodb/ + +# TernJS port file + +.tern-port + +# Stores VSCode versions used for testing VSCode extensions + +.vscode-test + +# yarn v2 + +.yarn/cache +.yarn/unplugged +.yarn/build-state.yml +.yarn/install-state.gz +.pnp.* + +# IntelliJ based IDEs +.idea + +# Finder (MacOS) folder config +.DS_Store +docker-compose.yml diff --git a/spartan/releases/rough-rhino/Earthfile b/spartan/releases/rough-rhino/Earthfile new file mode 100644 index 00000000000..53e1f6365a7 --- /dev/null +++ b/spartan/releases/rough-rhino/Earthfile @@ -0,0 +1,101 @@ +VERSION 0.7 + +FROM ubuntu:22.04 +WORKDIR /app + +deps: + RUN apt-get update && apt-get install -y \ + curl \ + git \ + make \ + nodejs \ + npm \ + unzip + +test-setup: + FROM +deps + COPY aztec-spartan.sh . + RUN chmod +x aztec-spartan.sh + # Mock docker and docker compose commands for testing + RUN mkdir -p /usr/local/bin && \ + echo '#!/bin/bash\necho "Docker command: $@"' > /usr/local/bin/docker && \ + echo '#!/bin/bash\necho "Docker compose command: $@"' > /usr/local/bin/docker-compose && \ + chmod +x /usr/local/bin/docker /usr/local/bin/docker-compose + +test-help: + FROM +test-setup + RUN ./aztec-spartan.sh | grep -q "Commands:" && \ + echo "✅ Help command test passed" || \ + (echo "❌ Help command test failed" && exit 1) + +test-no-config: + FROM +test-setup + RUN if ./aztec-spartan.sh start 2>&1 | grep -q "Configuration not found"; then \ + echo "✅ No config test passed"; \ + else \ + echo "❌ No config test failed" && exit 1; \ + fi + +test-install: + FROM +test-setup + # Test installation with CLI arguments + RUN echo -e "\n\n" | ./aztec-spartan.sh config \ + -p 8080 \ + -p2p 40400 \ + -ip 1.2.3.4 \ + -k 0x00 \ + -n test-validator + # Verify docker-compose.yml was created and contains correct values + RUN test -f docker-compose.yml && \ + grep -q "name: test-validator" docker-compose.yml && \ + grep -q "P2P_UDP_ANNOUNCE_ADDR=1.2.3.4:40400" docker-compose.yml && \ + grep -q "AZTEC_PORT=8080" docker-compose.yml && \ + grep -q "VALIDATOR_PRIVATE_KEY=0x00" docker-compose.yml && \ + echo "✅ Config test passed" || \ + (echo "❌ Config test failed" && exit 1) + +test-docker-check: + FROM +deps + COPY aztec-spartan.sh . + RUN chmod +x aztec-spartan.sh + # Remove docker to test docker installation check + RUN rm -f /usr/local/bin/docker /usr/local/bin/docker-compose + # Test docker check (should fail since docker is not installed) + RUN if ./aztec-spartan.sh config 2>&1 | grep -q "Docker or Docker Compose not found"; then \ + echo "✅ Docker check test passed"; \ + else \ + echo "❌ Docker check test failed" && exit 1; \ + fi + +test-start-stop: + FROM +test-setup + # First install with test configuration + RUN echo -e "\n\n" | ./aztec-spartan.sh config \ + -p 8080 \ + -p2p 40400 \ + -ip 1.2.3.4 \ + -k 0x00 \ + -n test-validator + # Test start command + RUN ./aztec-spartan.sh start 2>&1 | grep -q "Starting containers" && \ + echo "✅ Start command test passed" || \ + (echo "❌ Start command test failed" && exit 1) + # Test stop command + RUN ./aztec-spartan.sh stop 2>&1 | grep -q "Stopping containers" && \ + echo "✅ Stop command test passed" || \ + (echo "❌ Stop command test failed" && exit 1) + +test-update: + FROM +test-setup + RUN ./aztec-spartan.sh update 2>&1 | grep -q "Pulling latest images" && \ + echo "✅ Update command test passed" || \ + (echo "❌ Update command test failed" && exit 1) + +test-all: + BUILD +test-help + BUILD +test-no-config + BUILD +test-install + BUILD +test-docker-check + BUILD +test-start-stop + BUILD +test-update + diff --git a/spartan/releases/rough-rhino/README.md b/spartan/releases/rough-rhino/README.md new file mode 100644 index 00000000000..7e64b12a3aa --- /dev/null +++ b/spartan/releases/rough-rhino/README.md @@ -0,0 +1,37 @@ +# Aztec Spartan + +This tool helps easing the entry barrier to boot an Aztec Sequencer and Prover (S&P) Testnet. + +![Aztec Sparta Meme](./assets/banner.jpeg) + +For once, there's no rocket science here. This script does the following: + +- Checks for the presence of Docker in your machine +- Prompts you for some environment variables +- Outputs a templated docker-compose file with your variables +- Runs the docker compose file + +It should work in most UNIX-based machines. + +## Installation + +To configure a new node, create a new directory and run the install script: + +```bash +cd val1 +curl -L https://raw.githubusercontent.com/AztecProtocol/aztec-packages/refs/heads/master/spartan/releases/rough-rhino/create-spartan.sh | bash +``` + +This will install `aztec-spartan.sh` in the current directory. You can now run it: + +```bash +./aztec-spartan.sh config +``` + +If you don't have Docker installed, the script will do it for you. It will then prompt for any required environment variables and output a `docker-compose.yml` file. + +You can run the command without any command to see all available options, and pass them as flags, i.e. `npx aztec-spartan config -p 8080 -p2p 40400 -n nameme`. + +## Running + +To spare you a few keystrokes, you can use `npx aztec-spartan [start/stop/logs/update]` to start, stop, output logs or pull the latest docker images. diff --git a/spartan/releases/rough-rhino/assets/banner.jpeg b/spartan/releases/rough-rhino/assets/banner.jpeg new file mode 100644 index 0000000000000000000000000000000000000000..e91ed867f600d25d3711709e4736e9b691c4b014 GIT binary patch literal 68390 zcmb6AbyQnj^e&7BD^jFbi)$zZ3T<(B3BiLCptuD0qNR8#?uFn44-UbL6fe@?QlPlg zqJ={H@t)sz$2s?pbH})IkG=MjIp>=5$r@{nJ@?vc|6BOC4tN4lR#gUIVF3VG4+r4i z2G)YAg2EeZsFt#-y3+p`h5-+P`w9SXarO3us>rh#8Jn=+um4Yv|7@1lUhe;0|1a_o z_iFCH=m5Yp@BhW;|CgH3#@5UF!Qtn_3G{qO{y>)M0aMxk59az0Tm27~{15wiyL&(Q zX#a;j^`Ht5*yaIq+5aEd>i@vj?w|JDHu006QG0H9^^f9wADn7CVd zTK=zaxDPG1ogDyhRt5l&7y|&5Qvd)F?0?xFn*R$omIo8V124CS!ye!aum!LHQ~|C4 zYXIK^;s?9}yaovWTLvfqaIpWA|5}^}!F`1LpAbBHg!hP$fQX2YfRK=gn2ea{G3jGM zLK1QkQnDvc$e$1qQ&3Vop?ttk{tJTjUr!udf`^Vz9uq!((Ek4^|9SxA1XxbkbvRgT z0BmwB9CECGUjR%GP#KOkG z1>liCqF}{;O+YE9L&auE$nWu}r0ZTcAvt&opzSV}`<6NjLJ zqODzIdda}dkFB4=B4WDw21fSb8U0*J);3Ya$i80>spzo)*#BQl064f<*m!_P_z%XH zV8~eddPQi-znvzY9ieKjuwLH6krH3H&eRAQyB>*wbLpX9Aa)1l~ z9Vgs1)0UX*Zy2D$P%BmOChahS%VYnIn0{W3tM2=+Wtav9NAP&i7;DqEefGF!*y|?! z*lU(t4O=+p7HSxh_UOkOl|E8m$~=G6p$FtB&Z_27M{?KKB#cMzPA30vTNbYA0PIB0 z*wv_+#>1O$dRf8=%4{do__U~UyjiQnkbC!Ahs#Orr ztfm@5p)fu957cKLh)CFPh;u!Geaj*e9$HBoE1xJ22PuPvjG8X z0U2zoNzfVy_l-mqIw;ozaq7bK}nxE^_@4r=u$AZOn?h-Eqquf4kOH3t9lQc?{5k-FyEYlczI^i!Qt(!q3)i$q2XrpRM! z-6mg?hbe#m%HCoerLs1Pr&D+$mvG4pPW}7+^*;a&RFzQyqquIq@DC6tGimVb{=;YD zclS~=GIs%Mw{%na4dMOVyPc!8UGiLNyj+|Yn?V%FFt4p$1ljlB)5W*JDmOjM+*-d! z_ja`f^GMyC5zM&Ip!9T!~_6^%RibmCWI(z$1xB7^8k?Lp7C+SA)7acOcD!uB0bG~DB z_#DD6mdwu0=!L@7_y1HYe(bmGxtl-H=xQ_90xc7!B=6~U-(YdtAEo|m*VNc8fAjnD z^M#E&36DiMKiZQ zyhj;1rd?(~+@NO8=mkeIp)$VI+2^=lN`l4(&p-KOQm=ppP>6=Rk?M1?y1Ma=j?so>unWTq|d-Gg;PdQZl@kj*^G-o+fC6TgW1d?%d znNJ=~=Q)kP(S>f)FJxbe)+-R?phwd!43X?^LH9iHq;y}Z+}R@l&?}oDf>(&Egt_ev zD=TOi_1IuLNl&HO)gd(^*^s~F1cdVy4BTiMR(=9T%GEs)&U3>5M8l95I74W8@JdT% ztYa=3Sr9D}7|Vs|SuIKJ+0rF0ogv+f)AjC=WC~VHp^|cZ0(o2(>z%RWsVsJ?`eazs zz;RnVmjCDrk0WbeZGZx6K{`ckhZY|@yMz*lfA!sf)IBIN7?K{xEeDGL1~nn0NJq_n zT58%5+d!i%lLySll#y@0Of;2lma*l`#7{890zXmFROqb7!pRYRF<{Bt8J;ofIQ{~T zS*|<~ods7)1W)X9Uoiy;2lH#@bVt|kZTYV_iKNqd&}!urIpIZ^!W5NVRnxc)1`sze zm@5NDDj3Hp34Oi<72yTb3fVSV8AjOOGc84_8cH3We)~n05`iGsOkgYr;^f2K;H(}S z8#5*0$BHmcj~E16n6jzr4E3Bxqj+0|eJ&Rf8w%zIVnF0`Nnit&X3Z-rGJWi_Q;W!Bd9*$v>a%p}HnEk4X{Wa}ize$`59T2$19PmZx9MBkk zQ9;33!@nH;)_iHCb#u&sVMHjK9xR_GD|%54Jpy1e(6YB?};YoRq74*r23@RRb>miD&IrwoIefk$13J&gwB2xCcV#!Ubjy1()&NS1l#9lUJ|Ji%Xw+r>w)0 zniDFTcE#>ZVJm}l{{Vk9&{|6NFQum$@yVt6OX^ALqMM^q&}KmLKW?mAiuiwl%0t__ z`AQ+*Rx1yg>K({PkG!9M9lJIrKc1X;hE{du;XO%Z<<00oX5<>l>5PDBFH1i+XAn6D ztM7HU!MUTq2>7*(BvJN|R0_niLjD1MnX?2_UzWaX#8=KM`2Att%ptqSRD;#gbrOJz zE8E!NO6P69B2sw4%Z-Bo(+>J;KckgYIc8&qz}I%Y%4ZcNt%FORF%x&e-<6uElwOUS zwzhyhPux1ZO??znHgb(DRn_4C0Cg3a{9sXU55pS8C^i*ey-euzi$YD&W&{Ta2bj7C zsKN3PF&H`2tUNF@ExQ@jEbUNDB>;m-g@_rGvqd+#EP2`gl6fh&uYK^tH}MkB9o*B> z*}i;N93WCDfYN{g;W>g%#Ys!vtx=E5i5942xpAa^a)W*iY;<4$V3*q;x3-@8B_x3! zTWG43{7@N3j~C6jN|naLdHHrw%Au^+@A|oS;{D$8yB$jX+T$3&g>AM3WMz{SlA9s>ImacuU!OTU-CxW1l2EVtrb)ret`vp%|8?+-h48qWBd95tpop~fl`7{6H4_%?VtZl5&#VpmN? zOsNcFVtFzslBVdwp>hBQ-fL)|GE3v#z4U(BS5tk!sV?q$pZ5>o9I_IFi9ZVpP3sWK zK;WVCOh~nxNp*)j%t-d@CpDYuuNxj}oh&7H{BzL*!+!vZVCMs7txkit?)wuN-C5c1 zOtm`e%UOd2GzrTU)E~*2q!`44;#5fTN+~4LUyD_e13++4`*8L*-K7xePE&;9@ zXQrhrNTQ@BE4C2XW)9%pVy`xXAJuw(OLV{de7%(T!TW!J-m8CrdtnrtVZ1dwu{g;6 zGEjX9T_pMOpvIQR;2rFB>9*L&#p-RH)FV7d1ul?saP$2n%0S9~o?gZvNbiuT)u9#r zI519}FTdk(>u=S7mpQ6}zlt_soznn)SjJL;O&KPQZ^N$NFMT_>H%eZg45wSi!S>LiJwEF#&P#bvTOynV?1-vY>2 z>wH21xLy#1jRlWSP}312Nna=lH&Bpa%y)2`JXfa26iU}a#9C|WB)gYQlR6qwgX=bR8gRpKkCz6sVMP<8~_Y?YKz={JzYJ@X|b5(C~lQnp+t zfH|q4$Df(2wK$iOI;T7T);@}B8BGt@9Pk23cqLpGUSX}XQ^{?Gk^YKRKeFNarSjC3 zFlrOKfO3?9_KGd#^CQT6P2FsEy%(N%EE%JiYR;}Fzqp3;8M_7v17GwRB z1T0YGuTE7`()OnUwf&UBSJX?hD)E&qw;0pKhhaDYu$&2QL;L;Frf~hW$_XkTFelbi z;;f;}PYu)P{jC3vZqS^3vT6BNN)+g2P5|^}?(0+kG*mz zpM@49<}L(NUf~Nx4;3l^V#oQXlmuyIo&>?pg^~FvR|S8DbCaYqGBMPifBjNyyYv?2 zomNn-W$d3(wib?=H6GTyk3X(*_ zjFC-$7%&mUS2U=s>!Fj1&0=n&FWow|@_Ko~;ShJ8KC;3p@_DG3;_ttnN>=Nlo3X10 zdb(?K&B;TO`rbBY*>#@-Mp$mMuk+y-%t%F0p3BHi*AhOa9v0z_FH|mG6JkCK)nnS0 zWpi1EYztz3eS}p;OR;Q;=cEk?`jQWczO+LL@YTc|-slVk&Amw(r>R0+ zS+_0>PIvi`>Y(~#Zoou1$=R5{bt$@MF1?rK$vKVhF_00lk)2$lZjj@05};bj!PIcboiSy<*sH1U+m*6&#&KPkYT)z4d2|3*FinkrX}=l5QBp z^jx{(*FQj2?Z*T(+oGU%57QuYd15R!g7{AbldJRxA>>Ab#xS+QMftFS!@+m{>noi@@tYQ!4|B099=(_{-aR~9;1WK zm8xcwV(tB2elnQyO(v((BE%}O`>Sh~*mNyncEF)D_hQ@OrF32e4+{7Oa$M@zvyr|0N3)9YY@ zT~YKNycutudm z_45nf!SC|W0&{4rCY2mu^Zif|DpeJ&$gE8F5!;#k#QXFAr!Im0PhIjsbZ=MlA7HKW zp)7%q{(edGGY6yo4{#~J{15OdyZW7T2QndM@7epoahvdbwZFQu9$S^ic@KOs@Bq_U2eN0Y?CP9yMM%ex3#0O9E^;m(2+VTj$P z188#ExxYiLIV8rMlAAqmzvwq_ETOMAT}e1fmu+eN8u`>*tsD#DBTKTU8Djb?_QYP@ z9Im3#lyk%dLcef-8cC*?wZ}5c{fq3%`K`kKY4k~nlMB!Eas+HJAA?pycFWhEBS?m= zcoKw*BE_KvRLMzlxcUh{b!v~zRDU8d_exH!rSPn2Z5Lg`IFNvqVAdH$fo$(Nona{f zOI#RX(+898slR!S=GDKees)a9dr;p9^BgU&IDT!o{RE5OU~7pi+W~X1P!Nbt5J=9% zN{_z5V>MvDpyFQP3=nk7!<&|H7G85ENEJBDBwi3CRdbf!%d-oPbL->JYADw{G3z6@!Ima zp#F%boGqVx!eJ=Nra~d`ZJA32b=6jraJ*xOTN{~R|(UfoygVqcPJ@tK$(+Wr>KgQ=aH z1jhd`>-g)<31mVBJbL2zUB$nCh0*itfj(gg5ER&EEWH+~w)6}Ix6{oSo34aeVkvlx zfUlod!IHjzTe7WcdWJ|PRRh7sD<*W4NaZe2SHYIp$JC#{)l{_cKwt(xT0;ZFlm*mm ztmiY@Ez98zbd57zErGc$H-v3h`>Ot=06i6(ymaXDhpgMnT^BH#V%K(##db2IC|>S3mDSq&iq?>;~OL>y_k7j_p%fZ>9fxvZ36fH{b0V3uxo`eLu%qAHA0_=|IQSpx=cHH-f zs%D!Jk~1vaYqsVJe>eCeNZ_2mw?&DK;S(Dr))(d5pwZ7n1{fIOO#?_m5-6H?QP1f+ zrL}siO>;mlC$LSyR_(iyLk|h_qtJBxeqqNSrV;{bZu&vPeQaia$u`m=s*HgGt_MJt z83>uz0}inftsz%{Tj-o*N!5<35J{J+Tw_~Yj;CJYahY40;1GyO6~r4RwU<1=@vWIq zlqzy>M2CLJda3XQ&x!H0yL$w2p4>$>o#oJo4)zD77J=NQp}9`8ijpuoug{n`KAdyV zudG)`!NoL;e`3|Q1nqtOi$soJ3a%bS692JEmZFw;t0E}pT+A9S@T1smbYb#`#y zER9nbrVow64PW{M!%7}}JQ~w-Rfr>YSsQ`}R@M^}yebi-db?69_XflTeYI)Bprdf2 zYZB%*gk>+?e8Bc7xeZvJ?c+(u`1U+hI<5JMbyN#dOuE?uBZRN+{lWrckBY03wEjs2 zZq07#uCD;!p|m!sVn z4;?rZ+NvpXQIQ<@v0q=ZbC$T?u#NTRN?? zjiP#n0exCDb?)9qEsD8m#2oCA6ELY( z&U`&k$KuNV15I_+!EL+ZZFe=KW&XYYZtVWFc&|pJVaHGf^^Z5r-uoLlj009_fw7AS z;w!38iw-U@zi|B*PmTR)4=W&=j{I`~Ndq+U#0Uq0@mn*2Hn&Brd#EiO^kjLP|IXr_ zky4^qw(~$|K%0q)^@ywLNI|L`a{awU=CyCWe)^32)ICM6{QDODH zBe=GWas8?xv7WkzirYMc;(QiEM9keS{J+jdRt0`fOaE#-G_Ev}cNg90NIQA0fq~}6 zYz5z}5NuR^c|2(vB<`f4Q60cjCfK#4T^H} z@Nt6cjKtzBI;qI_lszui>6!3Xm0eSk--iLe97#Bb-XuP`P8BKb;&sFiD}=Xhz1yB8 zej9vX7;}mr0RL6mu3y>|KgV%q#i@EPLaICJGSfnS_S8lYz(TymG;8j@8H-9vo z1KF4YQ`wBgyb((o3LqEQgyU!%+;5%^PL$P|HXTov-oJ9?#jw|X*EOgLHGysrsj3Qq zSFnbM*`?F!*O~%{=%^zurjZfv+C-S|#naoDjaXS2t~XM|M=1oh%dT}F@6^h?$G>m? zyaQ);WKK|DzZgS?{{!HezPmVx6JJCHOm1K{?LJ?6_LibNtI2_iF&}}d#7IXURcP$p zZZ0tFeH{5A)FdT-=#uNcFI6nf(m~5Y8PB*Abw$bsS)YlvW5xUx>x5mrkvF>aLwbv7 zau{8pnsc0uDN&6xw^_DWgu1bcUrMR~HWBah<@8Sn@DoYveg{NX>3FL0F4igC=2>?NQrX?9T%R=^Dd%bBMe-2XIf3d>a0$adfW9s)8c`A1 zd^hU64&c{wawD2=4c^g|=N3-U?54i6^0DqibZPIZd7q@WwN-ruLX=K`nO8BZewo|; zFPr1;uO!1q73r=-IrfQ$j z{V5gCQG(y&PdoUU={Tj^WZbDaUr1(@G?iwUKzBf_mOMa;bm9cLV;!=n?uOs-KU
vW_XYkH@_Lc1UjZU5CF!^&Jnj(L`iBMRi^cgIr6}sokkBxN zLlREBf%kFDtB$8?7boj0ia_H}(c~F*Sh6dH;C!z2qru6zH}W>S%d|D9_(rDFAGYBf z+y1IhA*&q7?14P75W~Z3Dv9%QvMWs6Q6pZoEUs`x zq(w!8d_x9i0jdYBt4AEnEj_GFx*y<<6b&N>!yapYf`|h1Ki}a=d2bf@(KH%*w#q|+ zk{;<-IG>rjKQU=QPVoOY;m>av(2HoCTwlFuf~6fgxvcPlRM`PUauQ|p$4@t-oNaIT z!qROyQ_bk7gP%1x+DZk$G&sM?@g?dG=)I%7Jrh5DS6FTEl{MXkoBi`#q`75V6L2kz z$E?Ri%*D<;sZBnggHMW2n8tSCeH|ywMv+l!%^XLIv79BWdtVFBe6cu}ht5kkyVdkN zuv?yha~l#YQgTy!;5Au}f@C=<+wsF4BWHQyTsYb2UEC7RQle>> zbE}vN{(AqJ>TaBzLbsUb3JqC{{i*jZl{=G3q>Nk4xt53ALKXVHYjgjt;NTD>fgZHTMrbq2r^E3kOSt8RS#E zdav(5-5wIdxarI~56{Av!QH{}9sEEpaXF~AmgPkS6XYoc>Ju)LR@d8(`hNfoOH^6u zP?JgSU>h`7o+@H1S=~(&WWWGSU9rkD?8;2j%Uxt@8WD>?ZHtFM{fC+bBe#Nan zb(0r|&f^^w#URe*!bs~G;`!>4q_m2X@?)v}QMR(d2qcG?eCzlt&Gz@#^jR))Gc6@) zscY=Cs1qu@C=BU_N1Wlr&4hTgNMf3SLmXPqj1Lfeh_4m zf(Rs1K5qb)4eM5NFkR%5LY!|bQeumrZ@2x52xHB`(`@i9@smy)`B^JilyI{v*g#bV zvmaKA9+MhygK>|_awsVz7F2>ZINj1_-NN?e(}x%^zfo98EoV}{%0=YU*i8I z=X$uANM>B6q#^!N>2<(JihZY4sq)uhF1QtdRGm?8qKxGP9t$ARl#5O7qM!!%O75RN zcP%9Q!-N3KCt-W7#@4dL2nerk&~wmkWaprV-};wqK8njf!>kf{wI{vcsPm0DzT?{a z^WJ8KHf`S)3%L7pH`^g&rmUm-Sn55M@`Z1nKScE5<)8P^%&SPDGs^gx13^N%gV$nC zK_q*p;5SxV3&0YpL|2CtwU~q_p$!F}f0*R;gSmM$r&pLw#G8s=P&@8Y=mphi(XzEP zTxlJ?0$Cp_lpHyeADb+Mjg77}TQh9Lk!}l>f6`(FRurf*U7qFj!=Y3BN-jA}JeAC( zKU6QN&n#F6Kb(6~aNE$%pzR9jbV>3l#!G}aD{~NA)X2jUS^zX+c_nxXIwFf* zH%odyXkB8s?2}d?5T?Q@n0vKAy8a^d2;}|P$zuuDXbyd%LX9wfdYr)0KvlbF07_DB z=AKs5`MAzO$h{=BAE;6NicSYhx|m^Z%!1d}5quMk;!$UPvHpU?_`0-GBj$OCRMz`a z%wiwL_ts)+SHInJQuhMyP@U=rv?!B3wbx~fahT=L3Ah4p*;#nA*sIu`{dqSVzjob9 zSKHMUm4`uBPtN08#MRIKjfNl7!6yFzN&&r~nbH^O{qaJ-QZQk|ST<+SV6sx#V>3K7 zI35(WmH<6>1@dyM4zrxS3YMcDXKo5Fay1fqA&uJ4s!|V32{*)R$)kTu%%Q^cM`g!x zz8611>?xYfyRlMW#yTwPC#R&R343T{>Z0iJaVXdR%+4YuQene@C=JJfu>uyT$FB$K zLhA!L2EatN6UwA=O#c9pE?~P?y-8$K*`8=}tI`+eNvWaL^MySdp-kOP>3Gpk^blKt z1`8*6z=)DCvho2c;d@9j6?McDJp0d_*r`;aX1L*r0jp&c2_!~7C&abC1V(BTWF=!~ z7)%-fD9sY$3b(XG=b2Pd(1qPK)$0-3J152wW-gZe!4wXU<2ahnLJ-ENsB4;F3!-AKh_pRPl&ob*oCN&S}SZdRGR%G58tmZ|W- z%t}<==9z5~RBstFG88m*m-*G0qq!Tg3ryAUut9y|G0G~zy~b|zD0?nRWDu6_)n;~X zJZH;u`#PCZN534(c6eIa1+TlXV zsu#zD{%AfB?iSHx2bZ2`MuNV|us>Y6OE#VMaP%&9AxTxNP?Q5GtYZ(vlwh$8y5TT)f!wut_K$7sc+zPo-U?X| zPP2S<%SOr~dpcHv)0;e%k^G5z^KJWpm^&UQR0RheG&6eQ{J8HbZ?H)2M8EmS( zRlX}#zPUm!z`#=MA(P=q9NpQiotmhVci9Hv#>JV$WV>SGjNHaNi-fjO#75Q zrwgsyb2aQno6@D0+l?mOrp*YCk(6-#;JN%88y}!XvFtrZ&fMBhCcOM!j++)g|4qPr zWz9UJ!msH?9;5u%BZT;^s=&Cvw`q6M-rxPa2iw|HqMsH~4zVF(VaXLUN* z-+h=ek9X8yF7jS^hSFI&x4Szept*^u=%y}RB&Bn;C2*PL^jH!r$59ZS%W4^XCPK9{ z5Yd9Lxz$gtBgm{$gKDon*}z&pG}32Iz1C7q`uyzZ;E_2?Z^myQ^+?W0Yll;XrOHF1 z$re!;35YvolU}}iBZnsTk}Q~JKsn%4ON?zJ5}?^1_FAF42saKU_pT}N`)^Jr-49VI zn?&OvIr&bq$W?2rDlZ7 zaH=1OlGW{I9}*CX?TQWs&7NztEOn?y?JUnkibiruh)yDPT$}bk%rQdRczL|A`reT` z!6>%TV=cn zzuA-T;Bw7opY$^%JyWRC(((0PX)zGd&uJgSBPlNN-a_YKKDZvqt4)V4-K9LG(V!P^ z-DW+PW_-UNsya}Nm?XM;C2q#HpJksvdA$<9;wv=4m!u&(`;84>!O>?XUojVtt=+$vXgluHI z-ODt+6glQdxMNYBUW4*<4}DNDJAISwtj|ytr~Jsw7vJ(vC(%ljUlkU> zDtxTmid};Iu|}BA$OTemI3}fxDs9VGI<4K)0axjIRZG)EPNjYTRZXd%Vf$RT868iaqNAiahgnGB;H zq@tx`>8+9lxnDe`97Yym2uF4D92?XKc=~c`NM7RXz!+_?AFWI1dM61#O_>@L+lVh! zv0YEQ>u3C3Dp6two6Jbmf?J`#g?j*O)1%c~LZu{heR}X(=-BD~%f=A;(bxAY7((qn zu%XvVxCwW^jsfE!L+gS>&M;$|lWFGTaZav@Z@mn5dAT2VfX>Y6EV+y>Ef5$BnXH6P zM(-%e`?!~)09F9&o5(=MD{Xms&syE=rL67QNDyvfd95_5gp!&BnrVS*!6$7-l|yDx z+6laVYnb@zGzksuDpbHV=pj8T*_CcX@uAF3vRC|fs!6xV#nJGz)-SCqF6$wfs*!8s z28PGyQwi?aPNFr+b}OS$2A|D~l8NPrOutrP;y;_&UpB2SaGk@vNXm8NwB7S`^S>Ud83jttqCq zeC-=Xkhde3wG_f*XIU9}D+tWfE%gp@wB%Qj6lgxuX2u5V99us>;G4={7R{Vu#;IvM zWU1mz>hI}5-Y*pHrrbDasY3z0CM%Wix0I_b%h>{37AzcGxB9*I;^~qGuY6Qw7Xu0l zU()g4IKS-Ap$Vmswh<)&Y2WqKF!*q)jPGBywHs%XvYAynRR@CP9<#g%Io*6yAjBJm zowCkNbA;!4ef(mtX-v7yty-ACsAUwK=*6XVRR*}x^Ehq#dn@r0z(IBPdZ=eDev{Bc z+pt_^u3(wE%z@UsCo)-+L=L6#hb71|$CSzODnVdl>{l=i{>1saf$y3-H!W^tR4n1Z)QIwcg$gw-c3mO2J2Hh*}0Hd91BM3w4KH{;~KNxfVL3_D(B zOYDEOs)r|B>Of@jWP}uWo;pCJbkLmenBx&n!poId`p!$C5!d!Df^M*A4`;T?%GTyE zjoyH#w{l6h8J{w#^ai~wWACmYWu=feOc(Ch7UFr|sPnbU$-6ZIIl@gtvqTEYQ=DfW z_VMSO!O)tkFLdfDSM~F(ZhkqomAV8ui|S;{ciF!sBzT4gOb`_uzRuDgU#2Osf z!I#k*S=B03I7nT0*G!jtA|zcvAwQeCnsq`E7#gVibw>1k&!*r2N73P^DoxEy#p@`` zwKnA(1IoCaeBhTQvhi$q$`lUGPC~S{x}suXax>6tO)!4IqJq;vy6{f)rCNvcGkn8HB>7D3*j(P4=H(NIKojBXGv$L7kMyj=0SV={ z_pS7rNuzJ84k>^^FP>x4sH6InykGYJ1Axa&;8U&jvAGM8JUH`nZR4162?6Ngp0906 zk$%?-=?H#yYs_IHv=|JZELhC>d9}qFKQ4sWJVOQDe!vog^K33h+BUcO*k82&5Yo}= zO{>Vnjv4x)vLtRgMyaKkx8PXX+bfMQ6niQCGGMk0YQ1ujLdSk{lzguzLO*m0Wgk7W zZU2xwvb26VU{5R&?ZhnRvpDS|UQhAMrB%3D=ryUUfv8j)it_l$o7A0(9)D^Takq^W z>N9pP+IJ(s6R5z#?aU$aw(6x`f4~{jJxmm5VT$&s{8zK(VIlo7%vFmb6|NptRIydU zAypo5S$i~>`*RUy>i}s<@i@0m+|-|kUG}=y{9t>NQ6P-SH>ow#`bF;DfVz2NzPi~) z|6!P>oZ(np3BhgR>+EQEd3>Uq9kuxUrrcWb<4e|=iXH$kDgRAhvsts2>Jb5nsgR&5 z8PktQq{^*^SM2?~lv=f^sXrz!c)~Y=!b63TegCTHKvIOmw5BR5jroOtM0nyvj;I`E zdK5{6aeKL1>Rs1+NpD4IQ>>3T;bd1oVF;M_k;izp*2a&pcasFr)xh98 z#ShCcR~EK<@Q_Dhjv@&30T|>GW=IEKDclo?J&?`{7HFD9T`=IYHt9(hvKR_Zy=gss zWW(QMOCf7a(*q6NWX__-)MWOhtcEu}w|JOyL~B`6XGuy{H#Ji-=XAg9oef-lX@JvC zSN8{!I+=Uxs>7#7^nnD!?nFKKy4Kn(Fg^Wd-;bz*1S&`R0XG>D`5feMr$30PvkWDKN%O-*_t9^4JSKpVAtif(UJ%IyDm#Y+j>}!X!}Sxx?l5%mxMHL2X9oVKIhX z@F2T(f5m~q{5XjwS=aB>Se&=QwYuMvd(_k0PnE@*lCeIbQpb_7dR`l(t1J)X?{ALMm%-A^h$16gQ0U%vq_#z zP{>!oNltz`zr5;-fIn{OK*cYOxFk7_&N&5RkeUd8*P-IE1!_{!S$bQWm{N#x1Elh@CjthWR+ylp{1T!e2`7}n|*-8JqaIw_qR zAa`tHhNen^ye-!zJ~^UB12JfoqSlss=O(&FI7j!ZwBC~2bZ zp}eOMa>-Z~_Jea@BcXR?ciHfGN){& zmnYBL6$yr`@hxMDh9^r8KCWc_e%rR&j%4mEa>5eV1gl5O1iPQvh*aQEhHN2nIy2u8 z`0p&c`EFa>25Ki7G)YBQn$TUA*o!d^Uq4L?d+O z=G+)6zwF-}6svlqa#}lK5{5cKvqn;Lz$XGjxi@~R|N1Ba9*!?;?!5eL2MyQRl5A`j zlZyT=_BZ>eq#g@&1RmnC*AO4`I>@#bJakA{@R7txjZJhDpV;?uM~y(1iu>d?vW!|4 zJ7GDkuo6P@Ee<}`p6W%7(3od*817n05b266i4Usfv!y)zkffPWAe z&OET;Slq1vx7bmz6CS^oSnqYo1>v{&L-W$+Zpz$!PU$wGDnbJ)MB+rt)AT$rTZ5Xa zq4i|5UVJX0?b;3;S|1jC&%^St%YWSUb@VaOG^@stb9*NT;faP81@P&N7JraK+$V~s z6iEWv(c2Eb3vI0(5^gX^?Hl*nPnbw>WY!lQ3* z^^YbuFR{&DspU2$vhCAn&qUtw$G&B({IbVG#Jl7xPLY&JAdy%LpM_g}f6Ag4^+fsG zif>t~;YY6+wbvPGn^Vwu%O>Z3gyoy^otRNVW*N)aUm6zHY-qt0Ss;@W{$u#C>?wQR+MnkY+;&#DXIp5 zQw8*&MuV+1q$Q+0rE2)=PE#a+C=@^2E~y{KPcnJ+lWTjKjpx1>E~!{4O+cRdMoBb( zivLbR^O38Bxp>}J-&T!}G!-0Tf=kO`c6xMR{AP#NYA_h=!;(EJ`r9b{GBinIHr<+? zF7-&urY@r8mF)F6i?0M+8!VWxE!CG&*Li$9jei!Vb|x%Xjr-HTC^qrr!^7^4;a`a_ zmAQRX7Xa6#!5YjX)0rG;R|)lz0l)UOesX?E8>V1VaMEf!)p_>eKxyqxWe4KX3W{B&3Rj&Wz*aM2uC8k)~e}H+VF3A2|ardYf#L=^80|D*q zy_-(pQQ=$5{$b3&pf<#it4Q|;^_^zsb*{!4CcT@w(aD)YGdPCkuaHr5N0&rf_hz;+ z1$#T;@Is&+$~EWbAHKC>+ORts9aj&8|2(i0V@~QS3ve+S!qwJjEHU8a=TzyEAl+oW zsP$TvrzZgJhDw1dm55|KVpr!=YuF4KK@}X19jQ794+ob4oTLy_W?eJNJ!JM~$i8*} z-k3e2FeO0?r8{!Z-u!*H2#AkUQxOP_*u}M%`Ul|N!;kp>SO&bu0XrcWHsn&>#mNuQ z5<2kNNbaPBGih)YWVCl{^Os6apb=k1J}>_{|; zoV{rdd@?6B4R@2R!X>lFb5r%4PkU9*Le2v`G9>6m@8VOadqOtMk+DE4H)Ht?TDCmX z<}NsuWCW;~rvOj34Ln&6=%0xg^QltPcr`uow527Cg6y*!(uiJ7oJ_-U+NA;3a1k|> zRY8br6X2B(f0i^*coQmM?^yfl-2xrtQLq+Gna0K9rAKQ;%{IM41*37N{9z!;jC`jk zvXEy9iR2!7S6%_{uVS2i21dtxIEQp!O?+Vv@M*eOR$aA>;*0BZ?dR&Y(eTN#yoA@CcGKRW0bf#ojhXv)$_CQx4`LIF@Y^C24dfha_{oG>GGFP zw&i@v)MC#@7-z_+-<6oAd^MyAk>mOc53O{XL$k=w4eEEXyZ%I4gv>Wn4w|e#38s1Z z5<oMYReT(6JF6o=TT`9^Sk^o>%D)fgv zCKZoe*BuZWuN?q^jh%5c&NqXQ7T8+ICzIMFX`A?h6Sq?W*?rlt@Q;pnFF9ll>kzEw zayN|!j^63%*pCQqp}_X6$;b+(blvAK0whM10V9I%552ZrrT#V@eQnuh0TJQ+Lh=N> za(di1f5NR8j;rsMRwf)t8VrI78xuV|Bhwlo`SUHyB)K;19K4n(SiI8x`Q`WZQCUq2axl&AMi^3fW+ zycntyByQ@lW_?NVSK6LxjtQS~SAI;?(-gw18e%NU)}s@>iK5Pq1Z0XS2M2dg1zE9w zG~pn{dhd{z-0iNGCS8=mm%E5vnOw??CHisd#H=tm8q~*X-S`5#00X6x5q%2SIayZG z>t(7##;HBrkXuUK#K;QP=efLmW279-JM}H>hXmN>mX?Ea#i_8Bu`Heu&bI2Q{!tu; zPyq;y>YLHJM`D#x^cgxU6mw{s{V|?QTtOsN-1}4YIaLb2Io}hq5~Q0~(=8Fl>{k)L zjDbQqZZ}#F(IcL1l|DTfnB+`ps@_) z`}ESo-25Fih0g-5a%}4A)5UW`uYfw1_0PWJoZ#l0=zF0RO$CR1G^#mg{7RMD!`Vzj z>oSttgbEi)BAGzkr6>n^U4$W$$IyEVe&D+a1nbJQxV5hzW6 z!J+t29lECNri0pOrJJ^z-Xlu0Yd*NM^2q)vyVf50mitWjw-<*$gWbb7!?e4C@o@L1 z<*q2P>gh~vB7)mwol#YDU9+>j*>;&c{86e$PEeaty?0Xo4*+dIlD`Su7lj|CYrjHF z`+C}0$UxoCy=k+6n|o>DKng#TwDpHW%+i-MB`a7{k)9*wY3y)E z;z0e>_6AwTv@rD-$;TF^&#^ec9#c)WBFF@_lY{CGYieA2*y3IaD{X&wZ>2r;A4J`H zeX3{ao76JIn2#3D;f>=w5&2?=S&*b9Y^?ya;Kx6*Yum32)H?oWn>?#x8e6{#UNENU zDG{_*yOz0HedPo&;?$F`+fio!9m@bq18XF*Y0g??QpteNwTW4HY!sn4QTZa6EziHRcU3TYaXw zxFcd}EZ3e++$xsg3H!q}rbY@BP(JN*AonMf!DuQRcl4lcsNV&e%BaY5Ys&`|UiIcY z7X0a7DpEgX-LEVNwSYC=Lj@fZg+#6g3zsU+XEifu7NY&w#(@!o>u>-&Qai7RvDTDxYMbwmrB zjF~x62b|Mxp0eC8%S2b3T2Pbsi0@WVu{#&-CT%%-P4Xa-x45TeHirOF=N^>XrgZ-R z=%`6$rGbHhDXUZ6E_!6G?cv6hIR5skcfAP=eF&>j+}N|kT)6;kAF>TZGVju_54cG< z^rcrTto!g&(&c`oUWD;XaV5e;lFAMVUF0q3o#B^zq}PGu?@9VriXuWz;FS>)fyJO#*6rWya*gt(I25 zF$&06JmZQgvFWL{sVaVBDq06>c2%b9sAi=QqK4ZiVvgEwyH=t#7swwfcOJY3IU3od zFLvocD3T_WsRQv$Eu9+i!S50OV$>?y=~ktF4AkwXr@^a)?XqVO*#V zNd45Q`q)U0R^RCj!}*>YG(M?m53>W|pQL8|BgqSGu;K_jf|3+Z@HER>amWYr z7x=@pgLOwj^d5t>t(NyYCD1ml!^&|%QkiBIxwJ!C3b+{^%}P3k^AlNV=+k__UVW6E z!208wjATPY43;FQfYHDdt+#FU9<6vRi}mdLoM}tGfods`uQKjPQTS0j%S@#>OZw84z2FmEf=A=SX=lR?ZG38iS(U~#M^9<_#1gtGCBaw9A!3RZdJ6qV?YZUrYe6t-LFX=+My%_dB-oVXNm z+ch8eE!W~ENqN2&cKrn@X}>W(X5)`)#ZSj|MMWtluoIAhu&WsDPXxc#BnrBPfR1QfV)Uq3{ zDS1dBRGw0zI=2<2ppvpP#eDQef_ApDN3Ls1V}_ZQu*`x;S}}u7FJp`z zw09t7M{TE6+L{Gm*7h(JyJ!ZEH4jd%2)HfyN!*i@j8Of*O-XX)@N>_Wb5v?)a*0H# zSi>ZeJ*jLKuF_CmOA9DK$vCM}$)U9L2&L&;7NDNSwNIaDx5ZVy3K6;EjicMvrE$l( zJjv=q4QU;7ILNrfj(4eFt0W)0LK=fX^_4Cav<^G*nzF-Fw`+=qn_v>y=L6D`{{Sf* zxdY@e-|0sB)!wBFJ9?C#Rl@4Uiwl^`gkvQ`JXCc%sM_WPg$WNh5(jLWz8corn}xZ= zNsbg0F@h?l^+!XqZ;+`?CkZ5+0a_Th)aOJ+ug9=`w<5_MZPu+Qq`I=&ex|*J;wMwa zk+EK3M+3R77dsqqcQ(>;d+|)jutrnMT97_d^9s~7(BFuc%4CUu>*wlXOLTse=EuPQH|;UJ_4p znk|FWod>8I2~Eu?QSLG-h`92HAw&;O4nL|{x!TZ zGt|8*t!Jj7rB};aMaM!1FSe3Ye=sv#yq3eTpLP_c*4xairARywM;NZy!_~6XlM+e_ zn{XD_k&s8X;avRjq2cXXYV!X8QS_V=Ewfj?TWC&vm6Pr4Pd}Y{j}e)~3Y_&k=6*SI z%zhU(!JaikO#GTQ?fKYOn!;q=1QFr@dd29 zGk#)qmRg>QN_^;UtMyI*e}S%|bRWbff$MFVx2<;uqTN6r6A9IXx)=TEJ;&ji<1ia? zNdY+Io=4$NdRtg(9UFL$degT_5T-GOHtS0uf;kBzu?PC=D?7y{>RPefevH?vr#(*V zsv0s%{#x--*$5=0r{6V4x(W7xM!GW#P4zJWsVD)o+!8)Qf4&3P@~c@Jsn)Bc6w0?g z+=gBmO3<*NLHs|WyoN?KnB6?vrPm|&L2j(y7jAq(h6;kePS|WAdlF9wGp)<1`TDq{dL#tkb+Z zyQ>HTAXKuLbPK3{<;`0*p#vmPZT)hjJ+)wM{3%(yw8KG4T5<=erU|;XF;c?7IIcu; zdV#f+4VkPK;i)OkGJ6kNBez?oNO_qNT3#3Q%>knO9%aBd=iExTA4-?>j1z9gR2IHO-OclWDp-oy-lX2eD0EK->ve{D~CGa{6~hiy3}K?)yzW zS~La4QH0kPN{>0uYLsHguuLj$BTW|mfD=v9)h62MVJJ~y(}987j&uidKzj&KU{dT? z2U}cqK2ir6_oftShcx_!N|U)k!^ZAvV2KU}0-ab-8RXN`8YbPTnMzzBVBsdKQvuM1 zyB^Ah+{m)acT83sRET7w9C1#X8A(w7 z_nHyZS4-WoW8i~|PH4$ccG{V8ZA1f5-r0149!J96Sp`0X)~!XVAz9L;+QJi(2q1Uj zqxQ8svf2L2X!*nGOD*=%sa&A{?R)$+@XO@Q#FZ(fCBdFXQbsC`IjX0`WlNM+;uJ7>-~&%RIqEXB{-BP>oJ3hm z!p80fXn9zFE|`8NV&!T6^5bY*`>|{zP50vxwHk#e8(hpsx%2-o`l?D3_)fDQf2vyxN z*0Pf6DMn5*amVqgcHmN!C8Vqz{VEIA53Y8xivc(aRD?FsJm9H7XOG6WDIZ-4k8-iF z7MCrJJr+Mtw8fg-<1Waa8&l;35|NCY{_3%IOLdz~=~#_8mi|W5o(CBjH6LhwDW~oF zHq~!jhNRyZZI=g>aso&hAC3)5)}10QE+I3Dwv25?cs}34vYVN-sf}dVyaI?Yi`Al4 z!v-Pp`)6+yR}#@?n8;Lpq^xHi)cog9pe8;N%2ILs+8erQOU0#M5o&csBb-&6g?=2y31++&pz!tgzDN&JVFqq$s;aaImSl7{OAm9m_ua&b}Kr|IR!8jP4pI~0AW zZC;N;9S_KK3=NMQs{zt8j@9&`2>~iE08`v%kzFBPqZIB$@Vo- zui`si8!+DC8=9)e0YW><-tvzL9qIBEwjXIPqzG9*{)$178I0j`46Q@Iz{+)XIo1cE;vD56Jt_S zuVP4!NBDR}Qj!$r-e@Z2te0nSqB=ksG*@xEGPV?_RzB~&K^;QBH6BSzN*E;i(9YYT zCwun_<@HH2f|z|kk5QV7wXNlgyRNXL0th&v$v4RDD7cjpRCC_7z>sco%9JpZ?@ZB9 ztnwlSk1#}yu2Rt4=a@JI6Sp+aa(z+aZrU=jl0`pj3u{ekGqZ$)lTGlfuIgKoVX}Wz zg<$>_xmzzDXVk@U#khW8BdKlDN|JfznJ!To2vdO|RQ~{U=;G5C%#1%|p5ID_b&i(1 zS%ZmnwG@JJ#Y$vXXKM@bIt>vcsemPA3G}A-oGAH#`DuEATdMN`2kXUaYAa7TQtHOa zI)m?UFW*2*hYS+@btC!0tPYTHsxv`Nk@5&2`tdM?t{##cg$llF4#X zzHhB%hvU$ib^^;Wov7qVx0K0JbKF#6deyfErm41;cJ>t|S*($6vKChGE&CH%F4`8> z94cjmxUzVrlV{w`&rB!MyIHWM~K0px=-Xfym;t&(RR! zGPeqb;mHQCT}PJTt1$U#k8gJvSEhX&jQaye6r8YM$tVemW zgashe^lOE^mmuXBOi0mki+ovE#aG|zBe+p}rtt~N9 zqNxe(?@8cw8r0*TE9ockqJ;tpu4nfMU9vFDjHd(B_o7_bF=vW01Al4GxCfNZ))lP09Uk4n_uY}T6<9-7s@xsbNQ>T zZ|hAVbI}@tMV92sl%^J-lZ<((2ZAfrc)aDgc(=KWn}}&`h{a!5$p`kh&p7rT)#(DZ zfn@bF!47n1sBvuJO(|(#e)TJKv&06t>Eq3!B;zLBkD2K1pOrlN3W_%?IVX``wxlN| zXCMw!^)$9l9XSj49m7+S^gfr==VPu(PPDP`vVmScq}@G7_X zYI6Sotn9DpyJOjrYdHz`N%pDUZ%(yLh>2zBju!yJyZ5glk%wM2+)VWqX0@mQi2neV zanP)#C2RCGnssg2K!#f>-N-aUVd%}wM)PE~f;qt*slM^2CBWX)R*K6<1c6j0o$480 zz`KoWy4>U+?1=t3nWRT)K6NnSnZYD#a*dB)KVe0DgcmOUY=E>by+@NH*<%`lZB zjl|ya!&5fyE-?B)aAKZczsY5m)Z2>AeJI~bcz?OiT4YZr7~P7KA#_U0%vSy(q?IK= z8l$pqEEJB!iS+KVyO7!oZjr#wDnM&hO?E0QWuMt!6k;+uD z+=_}IL$O38QZ^)f}EV;)AVMtuhhtvmy<^ds7ywxM|j% z-3b8)zDK=PYv$TA4;5ZQKijmb?Wz)6n3s~SPc@?BA&HWH6>JU;4MiuZ6YX~Zk3D`! zIOODFo>SB)bDcxYpPwM8{V9gTbtRTf9zvo5Fp^%B>C_v=-4011Ed_nkQkJIE8gEP4 zL+mA#Z9!4wcf|$ih<97XkrV7lSy4vq)B~(+{aw_P9JMx=RMJTMx%Qw|t!wsr5_1}~ zO|P>8^**H#b$yFe#yAL)T3!#KBmC82bo8b!i$mxqBn%wYb=Q3;)^`0C^oda2OYoK+ zNdWd4%^CEETPpEoVc{=h(eNBcs6MscM-*D0BE`IBM*=~TFIeus2c>2!JeHI{g>znO zk|gS>PN?AyK2i_YJXP=Orl)0EUPC+;hKV>oN}=%NE>@yt)U~-Qd=;bU6`cP73UkBO zYRB)~50Siu^tGzoz<6tSwx+mvd=|aIM7*sbCa^008cNvsU9zc=zyxo1ML{)1`&57T9sf z=}TOZ$yV>BJVoEcTb(EX5|ds!Q99y>*t2NqtqA81Wv}gL+xS#nFQyvR_k*#m@=6?0 zLPF95Ii?Y8i+033QaG?E@85rs71KgN$W{{UO2 zYg?-BV!qD(gL5kBLJ!-MNi)TDh}MIOa&(pt`tYD0Ulw6`1^ znn+GR4k-F-s7N{Ib`4ezhifryQ?O(!<=u*%CFGPNU;pru}OWT^}OX&lq;yw=NkPQ_*M^PDFXKhl=|H(RqF z>h2kQkhBq>l#Cj7q!m}l`ia|XS!WS1zuDg-QuP8g>rr0PWygY`z;$EptYBbg=~tFC zD{Zx5)GICw5Hs5&nyh^k)!HYfx*e@2qZ&gFI7`l?5EkQ#=O-Qe*2lV+t(?9s=NE<$ ze&=J4!`88~FGRDD20vCEwvNb_o!7&kHvnVOo7AK_uPcg=G>=pnoX3+n3QEA|JW^}l zR4um4k`T9cp);6m(P`A@eQyAOLbQXtobU zFOp%q5Ga>pEdKz-sK=<9F8QZcw&iXKPH};XDpZVh0g;+`eP-QD)H3`-38#SyKJ+f$ zx0e;>%TI0c`R_n-B)peaT~dHeCXG10wH^5ttzI2rG-sW&D#CVU)980!VEJO%IFB%3 zWYo=Y>b1Qp5?^&5Qbv2%I;@u?YII6v(uIz5Msqr#`_T~{PAyp+^I4Z*Fm11>?HPa% zw6VQS>%N${GGj$bcc*}^DlxNlov0F2^T#MoNT@b##!cnVkl`s&Ja?zNMYH2=u5N6V z4m(i}4Qhd$%YL%U8&%5!*IVuPRtYjT4%HNEy>8sOnQlkPp82O8FVglW+Cz>yq_}$w z)4cxx4+7x~DKqe+^`Icfsi7F&>)eXr>QQoK9Bpz^Cq<%VyHCB2v?n z$;}~~(;ZS2pEd)DLI)WJh;;6On<^rkaix4fPXpN1oscg8dV{SCw>I}AsCw?u<9<`_ zq@;`iOj?nxGkrmA#@ir{=A#?NpVT&a3u*QiHl8psLVAx;->pn<0clUYEmk7$lAfbh zUc;?Ly2GbdBuHDa*4U`t<)mZB+?JmCzJ`$Ab*27bBhFFJ0~DGZU0omt+hykxdF)MU zB7_kKu(Y_kYTmh*dwUI%3$G+6A5tpbS$btg6Bk`eKTOQq~91F-_KJ>C?znIQYBRH;3IH;Sb z_sv0fQobJC`N1cEYnTQa5L{07VOFHop%(PLZ;m&F6U9RJy;G{COj>p?YWT(tC`O>v zV5S?E=W>D0Q%-QzGE9bK$hNo=f(pJ`wSP}?3hoYCEgGI&wYsQor6-cEaA?lm(`{8U zD(8Go)F+TLoKyVXq1fUjFxh~ltRN@udg6y#>eisO7wv)>4LnIgi1e$g2Raf62sU8# z!%WTil{G$cxHvrGinMo3ZL-`l9Jm|~e)UH>qgGtY>XRX|HyrUy4fS5-Xay0UaU=1j zR~4*M1gOYGGIG>fL*NkVZSzJ#cpIu1pQ!IPJL;7B%GqP<#%Y#kSZ+@;@Ahkob$Log zX|}}+QtIajbt@soa&hTXP8Y@g@2M7Kf^;N|-nbWd_#@0-I2fqUt9=)j)r(4{K)05z zQGr`70ge9v-qh8h`e5CtB1F}0C!89WZI(!BZ4EdD2ooj#;lV%nS! zxyB2%rr}|^NwnRgEs0T_C~*q}9MUpII63?)q8=9V%~otkIuaJlEXGH%qEr= zPZOgqA5`S(2ZVhwqhgi5>VSRJj0C4X^r{AtgJ`q29_Ry-RE)S(IVzxtULrKrrW=!+ zA^8gjcNrq0-Cg2d=XsE}?a@|*C$iKC&(g3{9j!f0BY8pGCDpou-o~qsrmb+>S$WWx z3P;@?{{X6|T{{;_!hpkjTHml0XBh_-NTO@|itm4=6A$|()qKi6lzN?Isa;Q-71tEC zhJ5uQ*F$H89P?FXgj!2MJEpX_T_-7)8Z76~iV445 zrOvvyYEV*?wQ_Mz(ys2_wF9!CK_QT=4Wo=wsMeJ+a~BzrRGxAQg(jtonWnX+C<{aV z71L`mgwyk3A!;BM5-C=r=@y8(NN7KANWljfrM5j~499LVKnED7=Vmi4fct97g*pg9 zxS1rkUZ9QJqg_0e0Wey&G==Tj2*z_zey8b8yFpz3Bu4WhIIc+@g-d#x{Wjk*5AR+Q zdkO&8{Y8g!w7UjkGQFe@C~J^efR=1yt;P*2WYYSRQzkMZHd`YH2BiDvhK(tEw=3b; z^46s1H5m9?wW9H> zmlJUTL!4rwJvlkW-I-~HZ%ULhKAEnl_3NazZD*us#C#zy`D#*C>zr3RG>=Q%)7pY~ zluM1qbz4v2Pvhh$Q(JqA-AM#G%dgUPLW`gxEfKzDAO&Dijf&lDN>wbf1FIzSQ0GcD zj%|k68B*JCfyGL;+ni*8r&?ME;aSh`OD^tqIYnBpPNPuTCBRE4SD4bUoxgbFwR0xn zf4p?Fs258+id!l#D|;j!27Nv2r-u6lO)a!Kv}Gp*cLuq`;_Af9T|<>BGL~H_E*A*G zyPD9&)3?IWTdXvFmJMr;r8#=CtUUh!>lTug0&!6mnO~ha5Rx`HqE0)EcFiHUy5ouX zXI@g(_|uMxVQ*d;h!jdfw;X=z)>>AIx{HfjCe4`FsvCLRo6o5Q01y&0$WA%tni@-w zD`G=sL~xZP=O4b0bq1N4KUt$rCL60%leqySlaZRq{S`k;$wSj^bEBps<(Yy@1t1*! zxFinX8a;M}UrgS7oYL-+85ABXD;$oNU&0HIP+q+vKu%Yr%lXn9VxSEy`6 zYoCLzm5Gbe7P}d)V4Qm&pbjB z@LooLbx%{S+J5sHGowlpn{a1xRgx>0wtE66MpNrbgNQiXI9d%DY5iBO^m>rhms-5Y z7{VX&#gewx~tPYXw#6bm(bI$j3hiZB%h{0 zp!efyts`-EU9NMEFr&FCO0o?ul(20Wt?OA`nP}Y6mZ|Qm%7&Mg;5bqArExl3+!QKO zmKjM;JAG;h(Y_+?He!}9amq+LO4U-tUbtH|6cZh(DM%xeP5>$!GdQ5I=}gmkgQhKk z$xli{g@Kcm4@yC#y1Aq7(-&uH#3N_Q6h;T{q3u^`kEtO(m=elVw4MUH)o^!CYPfgj zEzp>Gl_g{cN2sFI!qLPU4ve&&EkW{YTwD%vcmYqt5p)^2RvVgCry+B3{u?5JW;?foB2TEf^lBQvd z6_bIAiEjETU68461ptGd25NfhX00(DL(svHw2m+{nv1P#y=&{*ph#_Pg?`8#>sE(l z9GW3&g4{>u30h&vxvR=cMO%Go$=J=tQDMZL{eZ_cCh5H+{GGlVeE8wCcHFUwoOGXt z?LDcAH45EI(3J%giW}!IElneZ#-38JX&#(vZ9^mrZMD@UM?0`UsoEz(bsQZuWxmRr zDFBcY>sF&k=`Ag$Vj+ldT1i0lPc)}pUu}@|`^`JCv?yb>FLkB;LF0gIcjm%~PHF2~Umuzm8UguEK_k+&XsZ|KXiK`f42Ko- z5P7TdRp=Np?%u@C;nVo{fPwI+Km(i#WOt@etuhRzBaO$u6)b7pL1nriwA;u_3eOSq+xf5r~0#;o|(}Q?<#h4)R#sxt=ROp4yTyo49i_bh`70{@*C@r>7 zv?o6Jq!$e#X_cQfgb+vbX^P8L==C9paX9g)I%RyYJF-*<2Gi&>MB0At)s_+wCJ`kg zo!nPKHRnhy_Z6uu+mgKh0DDkgx9AHm#dG1HY*^!g&1hAajrC}o(0#z|F15-cO)QkeX(5q(idmj)K2tYTW3@aK z;iUfnMIn`61Kf6-1a2B~&N8P$Kpp8!X0*Iil&rQ0_cWsAr^#h+xcP0qiZl4B~i}D8cK2q^~n6``=)xAOmzcI{+EYuZ4M-1 z=I}BP;ZGJV32C^)lWCsFeTP8>E9bbO%|FuzIx~KeIveY9&u~^tU4vxNbQyb-%4e&z zRh{PBlO8AvDg@wSx7>PnZ@5_7TX9q7r2vDDdsDWhy25JSA47>yCvZ9A6#b>HaV|FI zl-b;!xHp`0Hq7636g}I=kk3m!Bth707xC^8( zb`$e~#w%k{z9J};ubeBmBefM+sSqpyluB1kCF%RKZrmGiHM?=ned<%v8kWaz9z;~5 zDNYH;V^hAS(N+yn8FAiaD1T{4P!$Nq=$!2{sGX4Og~&=)dG@2(Zy_Ki&Y>)Aa>|Ho z=K%M`6R-CeZU-g)Xjdi`;G}G8tNnoGcvSf0Z{y;({y&Eo~c#M`>`S z7!68qKP!!%{{X<6>c2?zy{A-Z+h(-W7pC2{-I?=VNFyz{bGRDv-bSaTzN4wBiD)B6 zN6++MPf3&IKxh{ezyNnNb*211+=EG6Xe5K!sAi>mZkf}zmxIZ14nCFTS@!o8Fxak^ z#;s^mNJ!c{ic64T*^U55p)0C9INztZg7m<^$6zo=@T3>-2}WW_dgi68(m4r#3uuuYB;`iLCrC!5gIg~mpuI3Rar}Z(JAtGrjK1g6Ld6! z^mkNPLC)b`=^nH(bBXXO8(bdsmZiHft#rGNDM(pyJo@8`saFkOJ4@;%#BXv+70Dls zO?AFPupfvU7qsIL|YukCDwWRhqeez{%3(uZ7sVNl8E zy)asg>%+xScP%8~nxz;o+dtwpZbsAu{L2O5-%?v`nzlo+n*lqpNaNP7Y#XBQ5kt{< zKu;Axx^1VYTBV`pH*ay!2<=zTN5^j6rP@Ns^9y4tKKP?ip}3{%t!`KDyY(Jbl`dKs zbeG&wPBYFbx7-lw7;$Q7d9su6UZQk|e8yZ*!1Nr@gZ9YxS#hGrPz7!$ zkTZ-CN*B}aI4KWS= zz)FD5GwZ<>5op&FW7G{H#o=jQ7Oe0NeJfqql-P*SKK;v!ap-)9<7vLA>G;Lz5eiJC zAOb=_#&CG{t7)seH)wwmT@{B4+_=$iHwej5d1R!AlAd>L?m;zQG#6Im={p2BWH=s# zwvvaLS1BOlnz3Cj>i+;wnhiN%yR#808-#M85|tkO)?l%VZh8}nEKo$}Ua9cQ*Y1^H zWoel@PJO{FzvVSDDgv2v2k#GDej=tWmiRI1=C8ZKe_T8)mW8Q4d+SguN>#=JdH#yK zmpyKo(R`7v8f1t3)hsKx_WUSUQ}s(qS|Z4D?XB8(cg>VJg$#XjReZa5#0fRwZ=z?> zbZ(aDZJFI0prT{VNOo1h#bi1M-XxR91GQCtv|icN(%+911%?!?s1KD?@6^u|Ftu~) zuxZ&!F_H2Y5P4VPYBHAmw;(v{soatdK>q+e3aTvU@)=`bO3h+cjc7|3w_-4kNjV*GOGyhUQjg3{73!rjok1bOt)vxT=RcJPzVz0e5(W-)@`~G|k?vq#6!jc!aFuVl zC*p^m!j-`t#a|AQ=#<*DTqUWsDk;N_EFN-s?^JHY+tXfTloOR?ioD$g)>53ZBT3wr z3t>O0D)cz4pEAyB(#?+&pft5zICc1M+oUw$O4819ee!Wr(^4(E!rA&eQ(K}=Y2oFm zx0*`QN7N`{tNno@-D`yF=pg55P(AsnGTD1>B2WrcT2es=y<>Rnm#KdKqgnKl`(JV_ z>j&B4(#>qCH%%DYbnVU1SBT0!Qq}cI;C@26{nu{^9aiZSx16|Gou6+RDV)j*+sC;B z<{!Gd9ca{yIuM6e6mh{L7^ivBE!Ws9Vsv)hW#P4GaUn@i{QWD?)@!(PtoJj;)>d_B zpPi+;q&%qd-B?l*K?)cN{JnoVO4L<(K4f|ChR{h_Dj*-GE3UpGKF*$)btN(``eBsm z`@4XxE=Ftyl&?*|5s%$m)axgPPOfxn_T{qqW`0Z+uWK@6*6)dsk>$FA`qphwv_~ofH6_Gzm~{%%ev`%he1w>Bf(rK8L}fE|b9M;2vT=Pz;{Lzwphy!pxC z(Tn_XtaDjk;$QW4n~)JcJEb}M$F&nVsLkFX_;yk8L~uetKfb34T28@xj<)2+ZEME_ z3Kx&kF#>6s5ucRu0321trH;f$lL4m^xpgi+rIBVLX5`>-Gu%@jNwgf@Lm?ADtfGrhbu(S}RJ{ zfJQ|{8i!O|?5-)55ExL$Y*#9S?oTu8az$5&^rm}}R}iN?&O1<7QTop2po+U|E|g@0 z-yBdxJN}$$PRN`R;?0?ezwz>{{ZF2 ziMgzywQhPENdS)hyHtul=Cuoahk=%q z?Z*D}rQW69klFJaQBd!NcB}kE&gU(f?)FI!45<{$t|V!A_E+UPSwXkse$o{PWAI7K{{zT}fPsvkvMYJ*r zB{gwWhr&R|y%sq+fXdKRl1fjlSg|8gH7nfLmuZ&f zn7uev9oGnqvU_CbAJbCKkGC@d+{Q(SA@58`3z>^icT1Z>>Z5FRmsg<#}&K%3nOGU4<6ib#9E49a7e& z5|TJ1j8sL|CR?^N#BJ#F2pGZVwO7r1c)#223-YBrl!L|<=}}!y$B*JV8nB$VEWV@a zPJ)YejN|f{No1Y4Am*gKGpOtr=*WKD`$`H=1Obs;s<`Sq?apdKACU6>x#FiDhH8yn zr&eX%Zf-}3{Kc!drY*o^UO>#}%T_#nOV@mx8Mc2$C5D_5QlxC|4tOP1&Hn%zUKA6e zB(JGD7CKh~T8i!wTN(0}e)51GndkGXgAb|GH3|_P(9v+?0n#bb4v3L`W##I(Q!YEm zP;sX|=t=#Yd(`6HW+MhZr7o>5uR?K*?NXDqMO#4$QSL>3kO3s{p8o(!v|cRyEXwKK z<=tPRA}U6nnxdx}X*pCy9&yK~e@{xQyUtP)2uL^!Q2v_s6yzpFV~@FuVlo|1vaV8) zHiNl2^&Zt=JT7>NyL77l)||TDBpo+3zzaoZYb#e7_5-y*74lpSlA=h!$s^O+y4rqq zDn#mS_}X?N*Q#CADP?xo6kBm2#5xXggV@p9HLJFpa$x*vTclusd9HkTT=5p{Xb9>4 zq?QBd{{XC{tyswKw4cK@WgzOPW?E%To|P-UH}FMxT)cZIu!-ui867t#D_z=5#3}by zNIZ7C>lYaPN{Ci!6xI-tI5#dNg38=Z($V7-@uc+9bjFmM zdzm|PfwJe{^~EKOjE%(mjb_@GFHj$@n!eKQ`aQJ|2W1t2Ksi0>^GRtKu@eI7@)UQF zqn}Eyw#%y;x2ez1n=vY3mw#zpxix1IbGJ>?>4ySaL1`Njps`bvj@6yPV`L6GCRlp| z@s|cWamXWJ98qnyA`$^wMo6ejq?d=S(SJwY0+g@&8=1(dVHcF7CB(W4N3A)HYN`#z zZS0j;^)dh{Qqr1)diBypbstX4X(&jzA~UIMsD1p75EP!6|sh&B|MDI$WMaa zB3mAhYP(jbusdd)oCe$4bKFxFqWvYOFHSRQl=>PZl&lkhP>zk&*9{{*sZ5n5Ed0qQ zxT#+I)GJl$6H8eE@{ybxS*YUM)M+wVmt3_sxvh1E^s?rqeISp!xc>m2l1h(tyeU5& zc6Gp|V1lFTKzCh8pExa-z%2F$)|Xj&gk+?7`*M{hACaPbd;Cia*bQ=%eumKyBDLiw z=2`iQfX)xri1pp#-=g%=ko%3T*OGRO^HCkQtc=^)1+{$3Cp`NKj(UaEi;k|6jnAku z-bOR{(yNbSS?(0%1g~D>HQL}VPR)H}m94^C`hsYD%XyHnr#VQ#r2IP$BDP<&Nnzwp4S6SeOmo`cQNFe|b!N(uQuso_c1T6HPBn@(x4@_SnU9@8sN1G<+ z8v(VCQQ!B`WtQYoZcKSdP;m!pl%dBJR_q;e)4FlwJcoWDr@C|8)7AUMJZT>-rV?W| zqzsaI-Ru2z=PSg#^{Kf0U4s)WQZ-94rLFg7wuYJe#H)cxH4lm{DWf1Q=+e`3oPbh@ z%`b2V>s0#b)cE%)F3Ef?USZ0Ka{`pX)^^pSd|1skM*(=gK9tv|m>-^foY%;t4tYJt;F9f^*18Q=lXq zrzpTRxAesIxB^xP&v904y9w-f9IFzVk5MJO>lWN|;w|!=PJ0ki2l?q!vMbLn-lf9)mUG~$jzT|dLpwpwE9 z-4>*&<^!#hw5^l46#a3UU^3Knz0B;4F_M3YV|50!v&M#`y0(QBuxM-qhz8 zM&4~VM8C_lLAxaN_AM|`_V+CF?P5`Zu*U}-^LVp z2B5q17ie0MhZBQ^fzRVpBlpW~!%noB53re$8w6oPxS`}!%0O6k3f{~%)LtI+>-I(6 z?Q;WX%PPq{V-*>6`ohZQEt@bh=0$Lz?d2o!{nS&_&ZAtlHKONfb;6}F9YG-{BLj*@ z(5pq!ryidjB$wq9qmN@qv2cvA;TAhcj?FTC{rB`>r$?*-5d&9n>i$cDMi~)^=j8}O}1258!r9T z9Dq$;#3`jDr6?FYoTi6$O@=k`p@y1Fgb$He_4KYe#Qy+{k&Hi?QXurFt53R#7XXAa zjFkd$PH5;qQKu<$Szgqre58ZO_o=;|K{nYjojZ9_aD`z`V~kXZ5=(AJCM&2@Kw%_y z#YoI=<5Ry(Kkjk6LflfFX=hM525Ur*_(Ho&rtj)<%Wh$@w=vrp{{Vt3huRg=tz~9P z5B8QG)ED)Byk36T}qsEskb|QDSV4WmYPes z1tfN)kREz%tpyADngf$hF&uVNDMOA74l!B40n~izNBY{+c#{uxwQ6lc*AyoX&#DmC z<<0?V`i^RLcHY~Jt)~D`+r>t6hr!&QV{ZWa@leUG#O=YvTXmC}B7Q$HSgy^j6hTeQ zkIYeHJ6*u%@CKa`4GL%|593thIs?YjIYj<*r(c7@K31EbM&szl_bbtOZ8IqbA#uu1B!8@hY|ERN)4e%z zE+zYVMtdnK0)Vw2M&r$CP-9Dd4J>15Clo~rCpPsezgYZ8zo~V~lH8Jp8gQWVwyNu^Qx|3MEC~I4LLLr&B9tkJXw5m06 z-usImmwMFSru0-zM9bF;11%k+AY!CGpLDQZm@Cv${FES(v$r%+(;pAFtsiZ3{;ZPX z)Av2AbnPRnq4|*@Ts*~+a+LQJmDe*d0QDGJl~-hUBn>yDEc#*y_!R`D1Mi<&X4bt< zjbv-cM7HzTkfEB4wIS;Lw%FuG+j+zUoNez_-uS+x&%Bmyk)*PMu2kW`swFF7(K111 z=~h^4{{UHZ=je$rCbB20N_LbiZW%RNHMdB#9;0nnC=u6dmhmbnN8J87qdRV!jUi1i z6uDB#05+4JYJTZ2gxkikeXFjh3uYpS+QB3YQ?)q7{{U?6GI1AwR)WP~Y9iDJ17^G1&cUyAJq7=`t-}0`-_0YZxgx!3RCLqP~>%F8=`1 zjd5+(HYX3C3BaYado3QRSD~jeYDuqdYg{)CDgU_3#-x)Ubs#8Hj+8>6cdl% zQSP+Tb2QD}pN-UMea4*1!d3|4tu!v9!SOAmtrv|qs!?rJ#DOv}P(s>w19Au-T+&@< z;V(^pC*8ZE{JD}tWkjiD|LG!|OI$j>}xt#?AYpBCPgPq|#$0&228e|n-d ziDx2uon>gtt@kb(m~CxWZ%l}?5U-OXp7l=r zO?ZkeL6K`nR^_&%yVL&werox-A2`k$Q2~ ztL@g?A5h<%@)?kMY@A?MJT(TVH*ILlxr!y19R!T=-mHDL^6ble8+=z)@XKs&!Qg%v ztA2yIY*_2%a(9}ywXFoi3!N$pS` zfwesj{O_AF^)i!`pse8W_|=%w8Ya}M1KN)8<~M>c3H!dE(L;Lv5wj()7oV`yTdgE% zyOp)W7F!9BfI>nQ%~iXnRQ0Wve9bEJuE~ijb8V!N@{YoY^{vhwyHTo4Ij*-Hyi$~v z_vWLS5#)}e8>FWewp(zNp!Ph_kre*`sAW|?L#Y7k3$DM@TDnD=KbarTP;n~83CT2V zW!L>mv)EJ>EycPBDCd*gikh`YN*As#?P^4{gpzwp+Ky!qf@x zj%lye(`xCx%Qh{_{{TVMo%|>D3s!3#HRS6XJCVyN{al9lfD*0%&pxyZaFwj0*wb&cGa<(v0H}a?{OB87 ze=GEMjlEs3aukPC0moPlxHlntPAb1KCqzau>IvC0qj=qr#r@_kHl5Bxw?0`e?i(I+ zjD!)~07vnvl7~Q6$Lo>Wse*<{ z3RV=J=_4C!*5CkY(4KE8`P;KMKKXsb6`Yjd52Yy**2Ah&Tp*_y=9`Eh_vG>oda|Xd zO3&V=+NIcv07mr`%arTxsl}|4eQOBIah2|AML&gVtb*`HDt8B+`8Rq#s)I1bXpYwSnB?#Pu>#w-NLh6^=35zORVmk4ieOhUYicbsa31 z729NoP~l1U6lV`zT%AK(Bp;PvepMYb*;0bah7A+g?TGu+Vrm$5g(mf!c>_U}@4j?CLJ%ZfQ9;Un6T zXBGMrN`qn66z(DMt$j_Ne~FWOykl00z%0PDd1i%Fu2iU3mwQk^tt3A_7V` zqiRUyK}2SM za(8oE!-;LZ$qHTuc~(7ZH(Tt>J^DHZKD7?o%zrp3JF)X~&TGqLM%>HJ%u>*W{GfmU z2cK#~mJ<}*WVz(D+IK9h@UNi7Bu`LuoG3nR#qWk~Yukc3QgC?~t9%ScLN57*KQN)l zEIu1r17%MjE9LGhCanu-g}UiVN5AbUJAq({_g;#?A^ zOA&CcHt#4&$XLe&&{e;~ErD}mycJF8z;CXdV8n(Ya%Sma+2gyLR}reqG8o>#yc* z)?FTR^vIFtZcB1`rhihRgztehTmRnRJTBw49sZ&uXFQXmNE zSDWdIT%Mfi_|V%^uo^2@D$0Fo8q$4Q)Kl(ipDy0DwuAD3o<&G4^|}MD`8O!8w2{Xt z98y6IaDu>0i?vRgwdl^RTW-pN-@{JJMB}o(Kcc55($hgHLt_CQ{{S3fq0Y5xtvi0Q zw|TWn3x3w(Un3YGZteVPUC_6CUcSZVdpQQ!#@zZS6ahaG4 zZn>4HtJfy0qO07HLG=uhgcg7&U?Q96zZNWI$a!Ps1Sg*6vwBYCj3{g#v=@50xpupz z*BxJ}WQ95OpmN#jSRF?fbp5jR0mr5knJ*_OSE?vJKBu^7ca>|p?{Xqf$nGfNfw7&5 z#?$ZIP=`}qg9A%VpK@$15z)MM$@&VqeU6NR;YQ~rbPCcnt%*(zp;rcT{{R?QYII|k z=@v_S3v&a=NAy<{bb^qLQ)SAyOt$|3!45={_V zjTSP~9o^YaYRbncOIp0l;3|Ri#ri_lw8?Po910<|-r<{);|DR3!K|Oang$y#SIft# z#YUREhA!D3x0*hUFXZ{T0OK?j)Pw}b63{srsOP?<2#VJJ)q<|sqGn{Wq>k92J4~YM zXi`=;5sD&)@^Q7*eAUv>j6BFW2qac&nH+?Ik}!Coi_P9eqSYo`NXn455LOYMJy=b_rL+aJ+kr3gBRoUcvcd1OfY>3&w>n;+Pich6^22S+`-*EwH;2ig-o7_fa1*xZ#gpYcQ=FObl^$;UIFr{ND z_O4RHsW6{eU#=GGYR$E*zsN~RsQ%Al*Un29mE4pIrzNsaBif`?#lqW-Aw;cXx%HyU zZk`E_73JrQ)Yw28kaqy0>TAWd3Qg|Zl`@3#r3}?}>+e!6cWJHK^wcC7E}grOk>B#8 z&k>zXouy>Cr{OE-bs;DN+|q9koe5)7mi1PpkLJAcywDUq&2?m_arzZhZY%Jo(Rbxa zYf$QVrEd?JAw%n*TE6yZ4ZwheB>@QRl!^zlSZ#VvAZ|MeZ6P4wQXOH{Hf*p5mmmdp z#%PA+PRtFw?>bnUsanQUH#AvoP9HFw5)BY(H8#|A2}<7M$=cvh$Ri$~*Yv9SaO$m@ ztErhG!0d${0#0c6O6z;H{gPO=x#gZgAkvjs#km+qr9$XR%j46)dpDT!`oYobT14Or z_j_YW!W{#Vk~kpn zL9Tvy?k2)g4RII2y}B>N9<|PhdDOvPw=lLGQ@^y5*~_xYEl{ zAxK_INMG!$*pEuHJ`H?Y>Mnq4R9LT1J0nP3mx&S9qp^i6g0zm^vHPf**sE~)suFPW z+JnKpNkUgdy0xk==6dvNiF0-oPI*7NpX{9s>rIDH#CGOiVm9%$jmpAm-Pk(cXS{Vv z!Yy+nIbge6l_e@lhE%W@-|)bx30FuuqW-x=q(+I@9>*1o_p<#6k2zNRi5R0tUM;#& z7ac=%b(ON$4T5les;~9h#b?wuXC9X!O}?OZeMMNG5glE&*%)1_L%{&xBm>xDsb5re zskmF6kqyw}X-<3BRY_yY0qyHjy6P=F+#9+gHsMwr7(bnK{{X|sO?o4I(Kb~)>n=PQ z9(t9GgN|#QnpW8Eqq;XS?m$=FtE;ATjGZ6TaWpJOTgl-=f_SFw4%z_vfCGu&^Bj7c zY16iCWpIt7idPk)5Eg^NPaIM$P1LLXs>02Ii6F9;LY(K)fuapo7VEGXE(MZQRiB&d zKpjiha_tP8e3-$xO9WJW9LVHkmV^46UQN5|c+pZ%kcY;3?sHPL+FUfsj_E)yxY~2a ztyi5E9=_^!TzniPg&*Nl#-(5;?`%v(VHr+xcs;6_AyIU(>^0HZlJ1b{A6E6-Eh^D7 z;9wMkza(TDe4F6^0I2GcBqv+BzaS_DDy3eR>g%K%6gWt2*4j&B=JYi#>33&dWbT}m zGQsTPziN`l8|aAT`CNTU-)Qh@Ijxf}`lgxmgdF?fGhKUfGLR(T!74o>+LX*$XpWjrQ#s@^0=`OxAEJ#|cmyhg* zlt&0E=V<&Yew}uWqBZQOQ9o4zTWuJ~AI&HMC->2#S`r6|TU**f=_w9Kg0F~SPalz_ zAvsY$kX1cX+g+MpQ#I3{bD8@~X(xfRnIFQ12|zee8T7A4ju}CV=$}(M`n(4tC3W^e>}uN+h$GzgzOSq5=~?xCfOtN& zi6uEb!Stop>wD6QN>EY>s=b_67P`%i+}mu1>(4fZ+TBuCpS3!Mk{UY_quh#a)7ogU zmvW*=sb@-Q{{W4S({Db6yvL^BO2NL~M}NfMqK$wA&q-OH+@Slx#`xq8-%69M7KAA6 zNZ~33e(Lj0>Bz}W&n+MV0?sj$??)|L;ze}{!hEEoHIIH#JqhWs@WPFmDBMsI zTLZVPEthO(;R|i6=4{q*OEOypg(M&xecE|y5Osc5z_jCVtfslQ+o zhY|n-<@LopA~au-l}1XkFiN}j%`0j$RE0g~VZp#fd=?S`BV(E5`_z{^+|M{$gBr=F zowTi}MF68IOG@m1rmN)D>RA&UUDU)|45+r+8(1g5Z{b#(S3^33PDrt^g(uE$3sMdh ziiBD4$)Og@PZ^hQjbYz4U#`M)_+qs&vK0q&CnE;U!TSb}i++8t)7C(jfx~4f%8HM1 z{S@q;o!uZhsd`YDGn14zwY&tMbz-Hv1(A(13ZW^-60j1FFis@efcgsIcr0y zQnR;=inHei=m|u{EAB7Rg6b^MKS<6hd4z3!T;;=>BfH!rLX{eFos=78Aqi}(usqdFdK|F{;NaNEeRK# zu-k_rSv(3$rMlUq^#BWEYErksDFlIK z(tr(@PhA;m=ESq7WhGfHtvdny@-tdh;H#$3xQX%>Q2lzBO3$gGURV-YQbT8zx9U&0 zrwlgS*!PIY3tV*t5sph6wHwlW3y_Z9Q#Qdqx z^ww5Q7SQ`1ro}ncW-k{WLJ^zf0qJ%%tyHg35=`RI${ehV)J0DnItJ>OJu90WI{h-nPyP$>;JE z&m;JW@mN`I(za`E8;u>TDD(1)ROXV?lO9LK!Dk9MKPatoB0kiO?mhy48dDWfof%Vu zildXQnIT5{fSpO{7M?cQE!x`+i6|*MM+Ey+57B!!R4fPNj@j=&~vstd~+v7q{!?;kKpmBpu{d?50HHEo`t*dP8Sa9U}R5NLY=BU(> z*<))eLV^m1u%}lanry|M{WgTM&uY04tqYpB3Y#tAAK@Mc@x>?7ijx`|r%WYMA5mMm z{%P&~RM$35y@>5Za!O1~b0JQWcv8Kq%|EJQT$wuvC|augO}Il$o~KUdVd-?nIaT;?$40&P}3lM`iZS`)*#<{#| ziSMOIL!}J!+yZF$bTj>cmB@8bsZ6x0LbXMx5CTlb!1w#YyCtDC;$E>^o>4m!85qxg z)yEwcP&IXxPYj?oAy%(zZ4; zO$wJ5J;|gxt&1w)=NYGnH^?%OWD<~`;Cs+7QZDeKPK_!%fDlJ&%KS&kl6k#(Y4XFP zftqK&+y&0s>~{`SM;z2rI*ydd30t`s#b~^BF__Hfa0>PYgT=Sh;M#>Y!7ZRmWbQ(- zjQUg4ceO_$FCc|}G!tUgEISo5ojXvol5^=;&(~Iz%1Rp`jQ3Ssa~{Nk{{Y028oIM7 zJ6s`H1bo#M>dy{ZH&xyaq=_Nhb;K{s4=|t!KHa-fb*}Gin$lk~T1g`~3KY$B*Ady4 zS%~_+MPQV2KRUAVPQ;F4E3;koZ%&Ziw^+8R_K+~#_L5XkKfcCjx;C(|+*lGV)7p~J z9E2W6)QXQKbsReh+jOR!eWyFN5uE<|jxIV&RmzmR$4>-^PM%cCzT<=2p4D|!x7pF0 zUyL^EEEHa>>$zE6KcrxfHK!XmQNhj+(xaUV)|<`glWCN-qT?U+ZiA7#9>e!j-j&o2 zED zn6bmy-NNAEbxQ$9KBUsNx5C;Y9UGX7ELv`5;^t<`%2-YcIL0Wl;IGjzmg;b%p(l`i zg?%$qS?pG&N`2;;Wu%!52aKMOX3deto2iN8!L{! z;?;l=k_SGt80FskuLF_ey!XR43VpflRyV?LhAUN()oSbGK75C;w0VRadeu!xqy0g+ z=ExhbsV8)Mqx)^l-SrG&?6u)UE3o{kZEdD4h)Yd2lpzW11wO@U6j;FsLQ)bi0H#~2 z*;qKh$fcZw^#)tGnOCH4I|(0$dfuC|q()OKZz(7({KG#f?w`V%V>cNJDpG+t?MmfK zWwaqJl#Sef@9jcXW*@!zY*CkC9s4Na*vpTagXC(r^V#DC&y>zdCcTc{E{|!ZR%64YrQ{V)Hlar#%fzl zw<~@^5TyW9p1@=JYqYjV{Wx~OT8Dj%pv+C~yoZyM^5-VFRSQXU?@vWt%TwzGNR-oq zWGD$B^E22>1KW;=e616x+^W1t=snxmh5{Ozl-bQ1z6z&!< z5B}n(pY;rlp)Tg<0zr{$$N{Avm?-uFnH$i$676}JHsckvJhkKIUpsNntu00b%@3dmr;e{uV~*y%_BaGZ~EjMR=gk8FNRmaAjl zbM5k_agW|9s!vh0-HJk`>OD;!D~IMoC|D;R+-9oJTl<$5GV}~E{!Hn+dkwJMCL^Jg zjjW*`F|TIj$Y5HUc%dWvSV1PE^mvx(Gft*W&f zyw}}Y#s(AfWcKgHQg*ymc!BGT_a)2`V%b&msU(1~pP|ku3$5NJ=M663Nc2^~O(pdx z+~y>brR4J7e?I>JN)W|%eTXw+o`9A|rkz={-t{)G>U_1O_FRx+T0uW`gcW?>nA=tR z#Fna?)tz3+xlKO>YGF>HBY>0Z`EV(hL)@9OXeMou=IllW;hc~$LAt{7{3{*SM*Z_8 zhT}AzA;U=incP^mWt=f*q4dEgaKLX(iAu28MR1dBa zq5G+l=hf?$l+p6rv0tRIGGYv86h2ieaKeH9HF|nc(CY<*pw^oW?$+9+$%rJr=00RR zv->1{HutJa;sc;siqn67O{!B)!`@aJ2?;-+)|;m?wzrH*%nOpRz{nQGVVi!M6o$)) z-Lk_mWLLS<@}I5VrDD?deSg1q3BXAdjUx zK)WI!9%0~mRqk-ZsW_@pw9-}5PdeiPPp1T&b5iz{b(Wi0Wg+!11gIQhpnD%tED>U~ z%$CYj6_p*PieU9xz^^!n9)Ypd4W0wtjhq+q2-C0|;{{aP((UK)}JJ^Aff=f^XhiL#zrury4v@$QT_ z<+jAVs0VV76#Wwa02OjdgW1Y6@}BijWOeQ|WU3Qu4kUBt4AWw~NX=xZ#ZEq43a9p# zvS0hnRsLnwk$QsD3VlfdL!R`;_Pc-ji@)^+ew0hP{{U0%45xgQw&84TaD&g&6vHcCHv7qb5|Y$^+UJ_5 zL##6#com^rRrkSHe0Hs`^@+z@Pz7n<+ymN-z9^@tq4OUh?8|Vjs$3z0V+NgVrEW%f zCXo)PJ1$#KzZD4Q<`_KHTaDFrP|Ij4b0fccbpHTVsLpLuL?oxs(HF;Y?@@R1JN(Ny zt#=B0!ApHn(AdFBf#!*t)KMLZ7Sx8++sO&q76Bl7^Y~RcPgV9UKs*=b@IfPrbGG$4 zSqP6U;M{O)1gn~j^P|8$)X#HwDyy$&K%F@(3 zh;@g!4LAWxN9BqZwe>dQ(o~~y*}P*a!KbTlQYAQBO4gTl@~^F37S%n#TD*h|z3D!t z+OCbUZIIMI5sz1 zgsXQ2mEhA&w*LSX^||r88VE_+bKFz4y5nw=<7+P^NCzXedD{_{`V-PtyzA3KiTz$( z^GDw7H@3qwksOwmJ1suB`u3-<4|mwibf(za);Bf|a&hP=9(}Icc5&xeNG>fn$;Lkp zDWcq)j+{Z$%UarmrKwE5-y`|YVq2l3Z}%Ln33Fe(AHf|)C)W-v2eE)SPNEh z{WL|XW$132+FX|9?XQh#B;Ys+3dd@xf9r?W5p7?|{WP`4j^c?xK<%1pv*^uz)CtIb z@hV%5x~;7 z_>w|`N997@C+Ov$PC{XCu$8-k-8|-`{Yy43iqjHqq@l2coO=3kSoxP%VH8v*g%vW6 zOk``~7pZ@s?5=71bNG405VM{^J?fyYa+r*TJnEKvoSx+S)8|q3+|5aIks;T2?pfM? zlm^t5K7}D`-Gv^0m9vLIr2rn{IX4bb6@_8D>;;pwCQ3Q{>!rGK*CzU#Q@<9qp}6h- z(Z}Ij#cgYj>K{tEX+cBr6*gIXRqMSs5-fNe`Avk4>hDc?3XP|zwlnefI*P@i=WQ)D z4O<;bIOn|wUTra?F!HVwQm0wY2I`|Qyn1z3BG6rBg#>~y54gv*G`r&e09DGBB2=_1 zd$~JzaaoIjX^s5qWzL7D(|A!al5!7lX`fZ}+fcp4CqN@4?ln@_@uAcUd{EW0-pUB$ zcI*@UD_b5gdbN4Ew;ksj0o4RyD?DfNtBe~Lu_|s>=t~^)`j*t%ZEd=z+-&^GLCz?O z(Pz2e8$p*9*2wN?N2k3(wpyB0o47J=Qc@L#9oQ%Fsn?}9)M^CCGob__MC{0=Z8jm% z5QwD4s1`eIu>mq}Ye*yJMowu2?x*yFKnGKLm_pC&x|5P>L!YJMCj8yS<~OnjD9<0d zhC1uv&p`EF!iSiQn6l8`;Is@9tWxV>Q@4zYOY{$N64qWf`de#CnbY=!NO3#`d0^(F zJIC4!s@B14Pgz-tc^TN6fx5NP>*q_aPR5YZmlX;)cy`4AFt$+WT!MCIn)F!sc=o8U z_cJZzkHMsq)ISiNLFzF-rRSlPlii`UU%Uzig{W?A+!&WKe7D0#KRcv?JwXK2krShl zFZyfr-ky}DuV&>Zz6~+y-j_<7IZ-Dhr9k7its9*xcmb)xhhjA(u9x*{-KQ^6CPb23 zgCK(QN<55(% zUwP#T4wbD3Zgc$<1jDZ>`A&OYgT%`L2gJWuI+v?s*yUZ;nDZ5a+Esu?fA!Rff0Jg2 z6J?m7+{CAPmAw4Ms&0?e4rzTs8uH3=whl!RzkSu6K-OY)t+&*-2u2d4ftsb*m4?{s zQfax_ip$;5y-gm$5$83@UeZe20HZkz}1&o5UdvLVng_Sy_rN>-Kj^XK72c;U- zwOS)aX-Nq3$7-d0U-)Zm*5KMCpCzY313chVq0#+wF!I2+w+3F&@f(RgV0q zt?>{40A<=$@uMF;XQeevyCH0;LD9z*7qiqjON>&fBRKb=>*rHqLup%O=ZyX8&2DF+ zB(k78+&j5yg+zGe{3SUj29iIjrQCGw-r)^SW;|6aBiDrY6%Ihwvb8kw#6ji)tQ;KF z{nA|^^ytzZoY72In^R7uL%2Sa_8n}EvlPm)S#+|ScyHAYb;Djr(F{pdrA#!b5~s)@ zxLgEhXy=YkdOGT#fqi|gWJr-GtQRQ|kl_i8wPCQId=4tV_=D1#v%=?y8Cs7>t;qI! z)2elc8_GaJ)so*Gfa8jlEnYM)dVbCNF6|xpFF2f+g7;^St!U(YszcUdxb;%qJY&=T z0W`#GxQnT^YqFa_!D+PZ`M#%$vYjySsnE?4AL8R_n9}9(wb{VEf<3q9s??7eR`w+= z_Sll-g018^jz5hJT|9W|_g^WA8A*(lWAD<;Gj1AF9xIDT{%2FI-AmJ&V#d{*dl1Vb zVq0w~M+3JB;Ia?Nv`w>f3&|x|_|_ZnY!=6t-2jar)!E z9~3(|XCC62xarr%WcM$`CsZRK%698-^3uGiEF+Mg_`cMwj}#ND_-2y zLe*N9;cbK)wO$wOKv~HjT8^K%&XVbU=?<+*_ffz#zg{4| zb7Ki(Zyj~g4O*W>CYK>;`-xFG%|dI{BKN6<;{KmCl&C2!az@a5Q1fA+tgL?OcD2QY zAzK`EDFph{2872N0$#n?eZu`i*1NpOskCXf1)2&5%(H-2KDe#@HR;sNQ*lhGISE>~ zsmBg?P;K_r1``Q(9Dt<{M1V4LQkPGUlY1tr}pMUkS+2XlQBMu2~gdi$X1x0S}g$LNGL*b zR4{UyFz{g+8i&M|qSeF~6CNg7QoDlg;r{^o)=hlrtg{)hb0y9dMCd%&j3(OLG!2bM zWeETeKx(!1p2HGb%XemTz$9_rtYnGOotE@PBlSBmx}q+n*^BYZ$N@+~u%Z2R=dQfd zZ!tCLubOaa0Ym8|d*pvLl%=UD8A^bl>&~9N>1QOO;R(RdRs!1@1XYF6h$cxxC*((M z-oHvOou}I#hsjOxNU?u-qk>fDJ6Aa%8mX5?aQ2%6+MtP+FTxlGs{^ z;8lg{ABUFSoMmZ(SX0G`;S%Cx_^9Fv7$nl|_QUu}ir~

~N!*vKtOF`r29<+<} zbXMO}t{;;-Qk+oNFsvhZrDlF+I95lYt?~Lf8Rtt*2U32OJ-r6n5~Q+K=OBf1 z>}gXl!%iT_j;9o)e82rG33AP)6nEtXx8 z%$G2fdV@>T1`LcLAno_XE&a*C>NGq$kt-Vd2N~d0%kFXM&MP1FyvSGfx0n0PT4?r2 zAwF0~BXpWKf0)qZp!6*A7eq~9K;lAXulM-L!!T$g?Gt1~kr?#!d#<=AB z4|=&VAO=+FOQ|R)8&7drO4GKt^670VP~_zzrF^sgWFG@j^h~2tv`qwV$)|Qn8y$SRI-vY0Cw48UCM3gXsf_Ch{rrVcGEJ;V3GCOMXdD^4`1sQy;Wc|dc z@KzwzZ7EznqFx|8UVbygXE`)=VC$lW#+@AcZau1Vx^zEHPC|oL`1w~OC<8PPdGO7r zA5Iub*^!g|uYf6mnT=<&sIT_C%Foxiji;@)*$VO{>`@~J2elMi_3LxmNrNp7ve$2$ zy+fC84LYLQ;@z-M532<2B<6vq#Mjz2(#_7_DTdoPDgdblr{c%E_7OYe$i@Evbx4;s z!PMtjNGncKua_g~M%KN1Wt|+eJtqf{r1u7>{*mf*#*~=PzM_ohYL5doF4#46ci2j@ zp}35Y;r@z7?-;w}vFbAx(WjQPMciU(m)vocIU+pQ3yN0l#Tfv5{V2luq;#gdg|=M5 zv11%4gH^K0tmeRJC9j>LF}E~xXVqwsk2VyAM|TAElHZXr73YaQ2c7h_=oU{_e^CyD#ImiMNUXbAG7IImByTmuPye< zN)OCFzVv!B(;+mz*u7~+_21E=MnQZeR1@oiNhNfXV$0d1>zO%50@=CQR6j@ zeFsEM8k>WLGMD(5)A1*1iScBR>%$}i-`CctJB{+ZjG+eyitMja{5)Cpj8~lws27vm zcB(nm?+G_O7-W1aDpO8p3Q95u=UW~!^FncN%v(8rcLWbmXTm0`X*!X5M*ODH4&V>N zsndj&5skmXrY(8X@8o{0Zftgdj`H#jFgsKcqjXEQ04-$BCKbI^~>3Y6}> zO}#1vZ7;2`CBy+R=BtFh*z7WRClovRJ-ZV4?htngI*Ze$wZ4XYTa$?iYjE10=M_e- z)@jx|gOvrqrN9lZ6cTpf0g=7Q@sBjoQ+Y9&iU6dlU7p z!^v!Wh-l76GUQdweadyFEla&!*o1+>Pzudi2otoQOJFqQc+H8)Zqh6%J>RX8rtw`R= z&NrkJ#bydQw{u3^qpqep<55~A-q?A`$SK{ljQ1vqs=)DzKX+fJh;mseNPlu;v3@pLDuZa@_t|>&bSsM6{cVZ9k*0} zck7M9bHRq>DDO}S0bR*GsvWpJF%lNa-8f2$NYAYhbk9o6)%v~_x-&kc>9&#gK7 zD$zL+7ksvM)aXBiT{$-HmhKXmF0Ff9!5zC*#@0H{$6lN=Co-2 zdveg)h9;u2KI<~#60qxP!N(aNUe&6Y{{WbYu#V(~r>xp8@h)za)%I_KzQ855WQ6Zf z;P(|D>is`%vR#!D8dMqGji-=*Iw{kdjuxfVP0GBIE)PGy%vS|zCnx&q%4@$47B}?d zO+}_2M9DA254eX2!VW2R$Z8wcQQB2)Ugbj5j^3b2RJ&{irGPTD=eQoU=KlawCb+PV ziBd{V0y}M|N30r(B*_sZJ@CjvNM6MHRAV)!TA1rixLin41!QtfGfG#L*@MV+#`Oy( z>id-CB)u(5QjP*a%{Q*)bA83O!NOFbfS%{_rtLP>x>eRRE3AVWAz0k#?kd{adSlZ# zmV0H}Ov#UN3P$H#^Ae>b{XnR;rKNxZO6CV%rA)lWyTf@VYbCEaJCas7BmDe!tQ@dQ zZpxd|C8bRHl$3rRUVYM5ncVD${YdwuQ}%w!*jtLmbhLme!ViI?59B1x^I_3C$+C>LhEO zLvDoQTO&B~QZO)TIj}JQ01C4Ef(Cf5OKUjwT zKV6}PDL|BlH)HAds$J9n0Au~t%erL9>WEFPYTTqE84FPM&ib}}CDW*^<1a2{>$!>>6U&Xezv;G^7!(H z0H4D(tEc@0(!C7U5`U8+?A;n@DNlD$Nmu#m*G)b8-y0&Z)D4Mh!;V{pB$TG@R~HDJR~H5i!VY zySR*Ck@cw4TWTn?WGRC#CK-^GHMFC5TchgH-7q~0(!nk;LHEY^nDJ{6da*~{p@ARRK zQ);$Lj`MP(HHCweD;!a2@?4)&uD}YLkR%zlsY5J0+lolaj(bw(5{WCy+#3gBj`gDX z)AmsFk6IdADM`r#u{6q=7mF#`J&!Rd`=uP|BdUC_@26 zaqUXkQk^JqCqxKI25MXG&k2;iPpWi z2}w931bbG*WP%gi6N*%~SOF;ca-I!pb%<@YwIv}+BR%Vh-?`co!v$|X9*(f5 zH)Nkmb4-6V#R(}5ae_y!G{t&2Nl|oyp4c?CySoWzFO;oWVoYYiQPl(g4tYJt}K?a{A#*phEp6eaN!}WAO_jCVEXI9v2IY;wxz@#p~juNIEU_KU-3S1c#rB9TxKwCQz3;b1iTWbEMQ_VBgB7~ur zB=28kfUml0Otk7%$@140R&#@m-OWDhzYbbURjG3=N&OXYCq8RDCqGe8h3`uB8>F(< ztCUw}L(j<}q!lC&az;R{YVV^VbiU?O)02jDdlJIMy*)2$7Zn2E-6XBDDXczi1eRO;apPKT(Dgu=gPYpJhxw6Kdo`ptr_E*gF&b4JA<>~${KN_ z9q&B}UBW!)9F(su#UC&H9%utYX<6mj`f^nAmD`j`0UVM0{*^!U>#A)sWK2s|ZO?0t{TM6WA|(U^H&IHA~(H&J>OtF z=vj_}&ePV`+qcbHIQ?MvHDQ=HsF&%^r7yj7%KHgTnRdc}Micd+9YNC_V;Pl7MTOri z@|~$CKf7j@RThRauN4I9<;$HO7&kvD{8F`T%#xMg4XZrrc)&QQwC|A^}$&} z0mqne{tZ}emL}0*j?-*5%a1sKwJmAL%~ylM%Exm=x^q|0v^X2{R8jTkiqpzgaRXcG zbZ4bLDmA8qZ8n5i8l4R(%Eo$YAlu$ z<{k?0II96*>TZp`$7#8&M^R81As`y_`4u?&iJ?W>JC;LQE2iB~v`B>g#t|&w@BPGs6vqC(Mw$K_TxT20evZELd2axWaqKKKfMwKzUtBHe!y?slfVCya%SlPiZtY!rnIV}Qsd4%x*5%pbw{Ez}DvEG6 zliIBAg6b4I^uU`^!5RCb+PQh8bt|!ECNso9-X402Ec&_&6V>{cRuiAZ)>ck1 z_9l&1^7lG9q^B7uLvjID58YKb>RlIQ)KFcAG8;~%U5Op3g?DI?oR2!%o$b1itfX;I zv9%IhE$>(M!`K1EWpgTf{6@*xlvBiKM6Mc2<@!?E(NtFz$VmH~_o&)EuGOzJ%$+TE zNJQ39OetsYC1aoSioRWQixTN&aVl&rJ5J(9YUj?UxiO^mI^&rEpxe9Bee!Yrbp1Ab zqhs*}pgJ%|K-hL(J zggo#IX~DtWjyqAOMta#k%(nF2pg444El5@XSR4vAU0mx7eam6l#mWy**Th#(tha|8 zxTZ2t_7(Gi+N-UbMMY`n1Us8=I*gKpo<=IzTy9M32=2Eqr6J6x9^m@bSLzm$yY$ak z3$!;>sBgFwR1Q;&_M+kv5C(zEs3Vphr?FLkUCl6Ul3$Oo;Y3uSYq@IcBxJngu{EZ~ zcCV9KScz<1t~Kr|5QikT6s3J>x@NGAXl$YNg(K9{1Zc$r1MU}Fy321ue2qnD4L7@$ z5yoiJcUN>Kr_-s^%v~9dj9?Ae#Y9~r=WFA6Uny;Vm0&gJ z*#mF2+RWaBb<~hEce#1G@6{_tgVGOM;+Bx& z)3hNcw-w6nuxm!ImnU5>DaEa7Avx{MP8#d2nkT7jszvJlwx3x0r9=R6^r#kJPc0Yw zs@%0|b`;vl*c7gRjRzYYtNT2?)^h1&Y|L3xQm2?sK>OMBtr#InZ9|-3#MQR&arS3v{{R#H6NT05uUD>iDN`;pB8XA#Exd-3%JvMYPJ*Iv zPhpIKjxuzE0wVe=E1Uf@n|ILBwFRL_VoWpz5*3B04r<1DQ}H8GsM;<>!y-sMD)oWLH>hdSu>uZ`HeI zv%1?to^BJ?#aZPzl%$ZBT@N~ixK02IEvKCB7{O8T{y6486YhifH}{-au>x%N8t3Us1o5@6criAoFJ3;j_wHhccr>dsy-m4S)@jd z)w{GbTSA+1G>Isg=Nq2)vks!B=z_a>y?m)tH)OJT-kt~(K%QXFMV zIB=Zv{A;3-zQpY*5q@JnAuj=5=TEYK{c2vXv7*YXXT)9UAb&GYvY-C|TA8@#Y^eBO>Qj;LT^i9}WZs|FeNgIVg_YM^yDey0*2s)Ln=5Ku zkBWfQr72*$fr9SP2lI35fO?;?J4>VxJVajbf9?Z6>8gl@Jwx+GA7ME-Qg9c)l5vbt z?@4s6+ViSnKGX#+EOqW%s!kGQqQM$k>2k;Qb^wb05H=}+eycOw3 z_1&AOf6ILcmm7~FX^yD#ZHsYbE&1*4T1#Y>&>c#KR$l_4i+OQR&zJsa%u)ty4IT6Ijn6uO2xmq-!j zDYmSw>`8I7sD%EzIOmFGx_yzaURAo|;*(On_}Lr(08dhISd9}a`jrP@NV{G5zMX#5 zPQ;A(O}#29;QisA)T^f2{>^#0%eaXxHNC)`DP4^l53#<~&R_okT!r;ZKq^jbEn_(2 zKd$D6x(W7Xuc@6M(Y;as09-BZ)gx9}qFx<{#hJCCg7aa|EGWDRw(iDw&NG49mdo(s z`uL&IjN*A8(^<@kB+b3VlLOCYb4u8i9u@Scy4!QI-DH)>p28N>%I>aDH68e`=}x@x zhplv8hijS*%0TL z*NE%tPILaywV(Ow-pBDqn$2X)+kCit1BHP$JLRbmK{qJsDo@Lo1v1>8g>jO!iH#^{ zuvJVg{gD1p32(&qp#X3{>Jwk-=pV0M0Uhww(Y;C5NZob3U!K#+ir|AH)P`GlJ1M3} zDh`q`9nWt{&u;+eODjzg{X~ueGRXG-00KKP3#cJ2k=#%_ty!cbg-B?O?I50OR=n48 zExj{l@{AR{-x=-+6>FpYkq~Z?<6PC^KKg~pWe%+*S?6i7wf>g0 z7ZTCv>SnfmBee!l)^+{v%uCZEtxe-O?g*!NJ!I38+e&3g_;(4$(g>m6k7&N7c#qdi zbbF=N_GVrdmt2!BT8nXEMr}bWkotg8Ltv>RfJsu4kO?4F{nFoJ_eg(4xBjUeVAj`L zxg}1$WK2tZVV=oLZnXNi?YOU=_#LWO2)FK%e5+N`N|6g#Smrd9!=A#FuTA4L^BYt} zipX28wY2)~s|(c6v4=~wWL%zxb)Qkfy)cz6xZ0!2lNw5~@}RpJrj*FeGQE!f0E1Kt z!J#^o&t9xP8~Tjo?KgP1>dSVSxd#v&b|1vZZLc*WJ6ul3Lx@m2R+5ql1RA6G<=;^C z`LnL%(Dt!|9rd`2iWY?rySue$kJLy{x?3_?NKS(LZ$j955m`zQWXDlHT2$ipB}iL| z&Nmd5BpEZppV`qH#UDZ}ePg!{yofrouq&2s%FnoF6^ZI>nbA| z-Ady1O6t^vy695X+a*<7yf}4|o?T_){#rM9alAj}wAjk9ak$p!wPOpzrAp2UQG?HF zr}o~qNoheNb7enxo^wPDb<#0fbCo&Rl6W7LW{(I`I-X@n*5#zPUiUH`l`$>uBo1lu zyM%Tl2iGAlD>z67t1Y*xaHTetIQlRSa7`4o)oVFJYBGn@X{B%RPCW?SeonUbWcB(* zg_hOk1Sc6#HKt6KMD0^Sl6+a1j!aBWY- zZ8NKHMj5ut!z$bjYLMl+R8_*258?sHT3l~(2&Q=2iIpKG4vhMwnpUgRVbtJYdVngt zE<>;*OMPxMOpA5*hV3zaZC9Nn;j`Qold-jatw-zM6%5^>^%JIYL`_F;b=ZxbA82nv zhq)s-tB%`orVlxXk_x-0u>IAHmM!-Nm2SDhVpMWAsVi+N$o%U{pDSg3G*;gmRzM_s zm0sDe8lJ;qB+!zNnON`mb5dT9>O)sZV%qGIB*jpX6fcj~Cz1Q`DJM}pFzNfBn{&{Y zGi#J`vm1J_v+eY#UE4mk>FvcW>Z@DqvX$X7kP4E1xg6G_*-lz=UW~=a!@7aQ_9sjF zW31+CJ0x4J>9){TornOA6jAI^^ zY%CMwIyV@4tpBdVgNPDlfllb%hL28s6 z^IIG#K%bzZN3?~vP(@UTjiF~8#xoo+m#+p-MwT$A9Yz9myvF>qJcd>={=ZaOSF7aeWeniA!Y8gm9=u1xP+jiG2 z(uhIbfzNSLyLHsI!X1gz4-CCnv};`}7SD2bGaE=r$QkWLnu=A6Fwv`=d1 z-$}ZQFGA|5)1fIzjG_;IJJ(1hPqK9RTy*^Qm6oRE1`Rb>^Ria<_>-N0luDEjTzJ&}Fk;N{fpo zqL5m3f$}V$yaiYdorr5(5yZk3O;wvnDg%zI%!;k4&$UL^naa4v^Bs^NoQRIKKV6qDS^cG1xFI?eVc>Dz2>j&&1Ow;dm(ZaSsAbMI&7 zTQQ`)!MHw_F%RQb`3Q5J4c(q|a@04Z_(H zdXNxgu-nBW({e$_@vbv{t9?GWUc7ACbpDC7CF}0Fv00jFhLo1oI(xEShR9NsLU$0; zDBPj|2~J1?tiOvt06k@9B%O2NE3JC=-l*v=JEC6IQv(GFPu+m(o&8xD;g<cHzjt-U1>s9LWI^z!X<(cO-42R#99%7k&i0m)UC1_G)xPTIIwMWyMf`1Y~ zm-nH+WOs)>6{Vv80IMt+eL>X@rMIQNyM&lK>N!=^HmIZq&LH!NyWg8O2PA#SX9O-lF(t>Q<-d zXHXwa&_KhG=lMkq&xGJ5nFvWIZshQ!)1Yd`t8;_tB`jjjIX0$$+)bayfs=ZHa9>yfze_$|-DsG6qP= zsd={2(`acQbqNU{fUC;R33$-8BS!eX>gBgjHRMNf^d_yO@CxNH+=mb5yBr&`ZL>U-6~=c**%8+zCy`6+40u~`UFam1@|RCONM!5sIYzZ2T8MqE5} z^;q59W=*-P^6Z;MyrV8*Zo|9ALZ6Nn3$SGb zk2*7;eb*aSQhf-*hEkwUPJr4AuR6z5_`B+lKyKQ4BufU2vDxl+siCJ6B4VVrl{~d5 zt7?!GzbtJ?!D(3}_IdGzY>RgArPc_OBcbFYL|$YiD;WXCp}6B}{6f%@KA=*rxp;EW z-E_Ao-Dlr4)}m@LiwjffyMvbdq-M8o^1Rm1>sj5&X~3jpl>ilm4nX*~=?{y)3Q}IQ z>;A6mO1MpY^y{IJdTFa--fpOYV)&<57{+Pf(ok2B=Jg{fJnaV_gSB7E{Pg%Ai1Peg^pbms z0k!H_;RJf{s{IR8DdaafhK{8v@dx&!^smG3R`oAN^%qU`ZO-MVejN_Vf~ej@rRdE* zoNsU_03HDG+!~xb7z}tz=>Y!#yxUR#0LhVDcKb;!vgdf?>Z;?mRECvmQuyuv08(Ms zV*DBmcuwgE_R*G~`Z6j(+0>Gx0Ax}2oOGRCLh%0p16M$dr!IXx)Ryb>ep_3dR8sI_ zN^{&GA<*)_#mBW4 zZZZ|ZUQrlOYJrsHe4XqryeL8~SNZ zDmoPIe)c(~e2!7XpxC3h-;Ko?mx|c+8v65TcE{5AcD1#zwAylhxUTML@AiV*{wB}- zfm}lIa8ivf+eC!pp54#mUA@!a?FF~|O`rJ!mGEwC#pHh^#(d(0$>Ki%N~z;Vq{_Q> z!^3AbN>L|CBy!K0;SKiv-^lTqy8xa{G%GWbg;Tk^u0-?Y}$p!lG5&$`14eR=Ls%0 zm5?*&H&;1+&d2+QDqO!r35|9$J4)GkJ!-0~rnL#9yepb*uE1?}N z{{Z64NdEx1FhA(%syp_J0sgz+2S@(>oBseRTI;7yg;Y1we!oAid_{G8pz^fqQ3p*E zWO_Bl5(&p=SVscLF#Bb23#v@csV)$}8fd{6k0o0Fx12I#|B9X^1-4 zqdMl*c{@v{(4;-~6EcS{-N3kG}r^xw60J zl~s4_rT*eA7ydhU{{SdIT{=x3CA>gs{{Re~C!f{5Ph+^2?_~X!iG4x)N(;6Q{YCAL5E34ND58X=v3QC#S5P;IuNhFd<#xOhQjFYf> z)S9B|jan|h7%y@oyX6?a3A4-cS#KHmh37f!oMW8T`D^R$UcNksFH)a9Yk8A`KYD2~ zVR<4inBXUa_KXh+fCzZ$E`p<|#c+7zBEOpE{IqO?XnIo&g8c)T?aiv$K*1 zz^iw!6jcMFfyTm~hCn{zsB@Fik}#ZlkSlM=Ylh^gXVY~mje5B-pp?F_6Wf}V^+!h) z515hp{3sH3g}6jnNo9HbDpvZR@hH?>pP8MvsMok3D~k^gP@~$6EPY`3FdcdI`xKGd zqj_3x;SnXm!9SHXIMSO|)uRgf)xHX>5?3nfx|UYc)TXB&iA2%~8~|vpA5c%87H1_} zfBADLOjQ>!mhN)i2&B}v>yaYYO2lt`?ku<{8bzV2%-<PtO7e2g&;(LwvUV!OIG8cEzl9g?l<_8kUR@4*t4%B_q{ZFcF zEQ=PeWk_|UVW*OEg(uURXIkBDNikh3N#!d|Dvx)4;98o!!8`)zr42GzeN3)TjiVuU zJ;{36t{Md{iD|+UgUu4c>cC;NI3{jkCm2e&&#hNCY-yk7t`*TuE7u2#LY*C}pGTE! zzq0xP&fVU=zO?OpUU>YxER0jIHYYrI0SMTU3G#OM=QDd>32FhE7d1 z-gJ3)Dnm(6)OV#S%yIA3dh7*n*(09)>+ax>dUv>Kxscu3locfNO^b^u1n!Qo2d*nX zWLIFBt&3%6u`-V)L^TH*QObGmM;d>rZrhTCINtvN=VVX@7n~AD<6i-{J5DoD)UBPt zhYECLqp3BOk5I4@pOIv8KD8Q22t99`Bu$63t+7z3c_qVRk;D`I=7)3GP8AOM~gnV ze^EhfA_MOzpG+LrmOImuB?YmJ4_|tfddq3IYV9Qay)h-J2ysQ!r78tOIW=8$%;{E{ zMjXJjEhlM0yMjF{Vx#g_14OJkex#qNbqB;+-C();z?`QQ2wd)RXTGJ&Cvp|FJW-~g zy3BMX5TLlF9OUgZ5iN;pX1?atjcr+@bzX(wcVLXP|DKkA8o=1vV5kpf4>!U zd}Z`gQuvAR*{pg!qwW^ySDW%7DkIkgtr5bb&0;}H)P$)+a#<+^B=Se40$fR!G??>a zH8LCRzQgECtvcF*lC&sfC=tN~5J9e6_}%bls5;f->q^-AaVS_5t@QdY_shUalr8Im z;brMz0ZAoA9|VM@B{_9qlAX#}=@&_~WR9G4vqtpxT>k)$e@p56b(zUr{=IRdHqmpQ zeqxONbroH8ww_OqEjghbRB?7kO0KgIwJ4=w4_#s5wE8P>i0be;0j%Q&x=fpN&7x(4 zPg`u43;g@!m@!+9?aFv)NtUFf0_(m~5RK}_PSk;)N^_yErdoAv`==VJ$DnU5+BG%C z$vP#L;Npr=4s(nxEIddGAR)qqWF($D9@IVI>fP20H%D~Uldm$ZlJ&K=FTvbqTjef8 za|3E9ZS^f_NiMChg)fX1l@W~Kbm_{jr+u4#BQ=+d4QHnG^AaD@*Q<)}$68(#ejO1d zt!@;lc`ANGYTnUpKQSpv)Eh#=h41?|bbGx!@c#gK>ErhVPjgD^tw&^Bq&8bjg+`dB zi-9Dap<6SIoM*YK71Ui|dV8R?4!RnC<$M}`+alzvb+rjxsYz)CWep`n1t@@UGn3c? za1C_(HZ_^*M?i0VMK<5j=T&RXM|&{*?>FK8CSiBW-?cmT@5b$;IL-%Z?M{-viVnO- z@YB&t?^$JOmvsYHHzBsGdvb(jJLk!1jE4#q1`E5s>CQ36b6n@@58F|%^#y`$m!@42 zfu*kUkXns*xL%FOMhA~EG5#H-m4V3Ir6e3>D;Y_*pK5m=2X*=Oe$D<=i(D~@2H|im zq8m$I)m6n#{sD3kRx_ja+} zn9}LE&YI$ixk?r7N+I;mR+P0aC5l7J+z$viH8%AFK)UZm(RZ53i!M83b6auPQqOYo z3lCZlq?y^D3I71xv(eA@fv3O8%vB%zW$v*o`iG%&X0#k-2U2cH{{X?YcswW!cv|S- z{{XxVJ^oH&sGr+aY?F7o^g0woZ*hOhKn6Si0JXHkdVn6`zu5089Nh`k8E_I3%es~_ zk_V^uFsrZDoqZ#vT_M(;O^Ix7;{KGd&b+q~#|vqFQP^97vY-h_J8_&I^+$fjD}0Mz zO}d*3QVLveQpXAh{8&})>VAT@^-rf7pQg8ol43#CHi>q5FU2YInrZfw+LVpJr*H}+ zWD%cAmGvObH2AUc{{XAJS@fFSpf24YgQla$y~n$zb2nAV-dkzr!+T3f13rFtWc}P@ zoQbcYA7|g%6XCB|t5@`wt35D+BoqsUvPIJ1>TbB5J551E$kC^{5mDB!ISC;_45eL^a6*O-4>-Z<4xheTbv=gL zsVvf)lW)63W^_l}P*PoG)CHknj^L$0kH)xf^EyYpE$fZHS!*pp(cM|AG>03J5%sp(oon@hD%^yr3*d|tEo(eQbh^>jj)v=QnckUgZC>os8ewa1 z%pBrVU3M6|9h;-r*p z^7cf|Lh?$$#~G>TS6^+KUrJuJHR@YYZT5?FnbO~M;SDm=YFgH#=mJtn`U;uTfj$Xm znO-XTG|B)7XHhT@)Ztb746XxsF49u0{w4Y?b$Fe!wIc)%$yMK)q7X+?&NO?RNb%U) zf9u5SV~R*B3AJNC?o>MJuY-^7QRyH1&9#5gkzA(n=HeUiiPoaE0hHQ+$;bZyx}n!r zd>nsrk4TUG=GuSg$f{TS+8W(_X(ed)XcBI7EbT~1nFS3x^I7?l+qk%*KBzRZ zs=~M@!C0~8n@Mqi{LKFW0;5#u#`~g;`jTfBz`#*oXJH_|= z>;C}G7Ye?bg5;>QI*nrL zcDIR$T1*1Hd;UEs@V2^6y$AkqXy(F~)Hxr8PIqh60@Pp>6%A}P5->z#j&i?@7ANvza zz<=fs`i@>NVHUKK94p*Zb6vO7-|Ypr{7s+v0=Sdo0#G_xwi2|FpGxhvp8o)7Ex+Py z{{YAp-UnkBNBN!Sz!`mi6J@^b0_fz{VaQ94TjZ)Flmn8}p($R~dlH4AUxI}!s;%Mw z0IG;zjXx5d5SuBat(lfRRc&Yf$l_ONg&JDF!AV3R__%7+*3`t5BkXGa=S^-u`^30^ z&>&Q~_LhHf2ULH>Ab*r&R0Z}iYb#8SoOLs&NqZMdW~%)(w$B@*%3%zpN%z8`tRHR3 z&v91s#h*xToqF)?t-4F5rNWJM)DT;48?#oVFdl98-bh-Kl@JMU83{PgYT-H>bhrNi zi!CGm;K2U?qoSzq+Ac@>?|dB(U*)r3(eJ^J^cTCze`E)pKkYmH%1_Hdc(v1Fi zu5bAN0M{1V$J!0ygFwSra!#AnR$0>&m8C(oOuNSkY=Cl=gvLt9AbhSM`qxgKGpc>z z`~LuxSH(g90Qn6c{!B$&8i=}c&_CoSh#szW)!j&$tLkvEw$;5ce817{+u`{KE^a=$q?4}s-MWVmQZvOz~2kWOvL&g_~JBNlIhTS^7Y||0% z@7tzBj3B=yOd(gj8?+$6@5tYE0vx; z{?Bfe_($mFlc?GkS!&C*!sDsq*&Sx=>}4{RT#G5%fqgsN+)a7msu`QO8n{T?sL&ykNa1Wc`=|zm` z%VO4-oQ{kdt6e+Say0DJLYV0>l&_a3gw=$Ns!XU%yuyN-aB!e&10xygZoN(XT$QnP z43QSq0Y%1&00WW5F0);&7dZ)oH9@w4`4!umj$XLD>x)5f0yyCJq)y!BL{3ygdrm>1 ze1Bb!aiYV~lE$jh%9Niz-0)HW;)0}T2jrn&9~(c-P!zLHbr=LDJefHFV+Yox3sv?F z(keh&2hh+S`)V81B4bx?=0Z~XOEvT<##k!~;~4ayT|?0Ou-@ws2uE^p&0PyC_9)3e z7_{IF?EV2Y9p}w1C78{ z&%u;6xhUBfr+Sw2Omz*1c9dLMbtk?QGAQCEqqJOH!!BiDbGw050g3mkLjM3$CPZ~4 z@toq0Y&~SX!3d8urQk91q>!r2$?(WeHJbRIaH$IW{{Zq`CRlCfJ|;4-KN@A%T6TV- zwK&DH?Cqv-AFGtv``=27w2xb0y2D73AqXq(6s~Glvg&A;i0LujZD%I{s2av>dvM#D zjm^`MkDQ!eu{ra*_Ho4=^u}XRBuOuF0&)SM==SMS;!K>+ljT7|7WVhhg<(qw8&aYa zvyw6r2?HeH0%>)Y;qXXz0|bus?XxuXbvzae@w-xLk0BEfUqd*+;Gf_6T)-K=Wt(fW|hRF%XWlw4+(I^koF=sWDMA+= zgl|a7)HaXCwKS6Rp)UUb3f8UoFLMd>&9N}Sudq)PxyxFVQW+HdXltArtPUA-t_NH{^x zf1;@QN(O<{ve4{$nz}jC`Tbhe1^^B?(IBlsM?Z~Ey;tyor*uY?Ma79r_=4W!mlQ}e z(cyololn#Hrc01~xNP9=TC<#E@TvExmQ6{gEeg8oc`LlZa1v6INkw5_n8{$%12t;G zZUoMJz|}gRNl1DeN$+4Rr6_QcF`um|vT7@Q{Z7@>R5l6{cPq;n1pR79>n*0odWyy0 zPFWmb<&u>o0zayvwlqeed7`g{Q)(Y{E7r8Du`XY?Q#Qn8Zsdh~>SdDYb86XVwld2A z?L)gqIrph94^3vK(H`k|vZvwXfwAI&j%u{RvRz}{m|^I?KATvaE-471t4PH!e9fP#(SwwZbr?9ML z8=J__&MTfeuf%s;nd{7N5*O?8#=eO;Gb26aZH~ifNkEE*vwh|J z>B!3JFGsbETk0AKlq@1NsB3U=RD$!(EeSpLkfNO9aADxRX@MbVRxd`)MxAJ5PmT2iIGF>=Xmgf3&rZhUvSWh0vZA zlOLa*^1P z`>M+6o{;H%EQXA0V=HmA{KTMSngjU+SVhNXjUFj{e`_xieF)SYAD7|5{Vt38;V4{= zr85g&M7E#dpM-CA?<;TL7~P(6gIRhTrLM1C9kxcbC30F$?T#d8)84NHeIC{2EwhsK(wraR zLb~yeuIddTBHeh>{{X3RB%O#z!2GJ=Uv8Gz=w-OSIO8}^udNT=@2$Rq${G1~;|fm{ z1;1_o08^Pr^#HnY_ND4xlXUY*bgf<(owyw*Vu525RjUgh6Y#On&zt4^61HyHyKd!U zILYro-Xr`+>JJu|-k<6_r%PMWEoh}&cBOZ}(gt6}{5Ssqz-wmThW`6I{V~jebkY_H z?EGEE*B>4CgbdJ4%cP6eO_+Bz67zgppBbEcXxKqaz6@`-1}7eoY&v*aVUTNW`Fbw73p_QZQValoT)3h zy04cH41vejH6lKUh;7N!p+t)rkX%_(0Lc1PhGQTD+$ZVPyWEdDwc}%{ekOWNsroRl z4325U>ZpGO`bo`FWc)wQm2dLC4IBRNZNi267vj6GJ__`V4xRNMOgFEzOcIvu#7y{m zjje*LskYWqq`cZeQA2CmMnXf0KuS`tT=<;PRw(k|Sl}=egz+tC2}(Y=^{BaxZD#63 z#>_oEch=WpyfYp<9$ZoRf_VP`gH{<=dn*QMUqV~4_Pz8xvXu^xbzHcv$rjipB>w<~ z@qg7-qr|U|KM+19tkQaA)83d$(E2jqvenVm%MQ=D#oT{XoHVgn8>bLC5T% zOvoFZ=RFFo(?!|08_X$_w zR&p=f4XP%(8jImTxwzmI>wM?`03fIXSokcBe0}@0dCs!p@>19*y#?tn4LbWu>iBFM zgN?&mpam$C#ZEtwsIHXv8p!cCsoxk`)Ob?7NcBfsb)Na9PMMRcL{f`V<{3eP%tmA5 z&GOsdpC@lRfnJ?GM+T5b4&H={p`5-MzP^Eb%P}o}E@$5c&i4GF%!te1$Y<8cJx6)rMBg+o8DaMcL z)wF!WJ%rXi*&Re>{2B0~puFS%0EWN&1!bg|&^oTzWLxg6xYVR&O{pLNeMfq$eQ42- z^#1@?q1Ksf!*#;8mXX4N$*QJZs^u*A4ET7if$46c{{TV!UTThnb!V%cV{4)Y{I09r z1=QPyp!jdmxPC*sJl}?2o&Nw_V>?mzGmda3&-P!nkyg>+8RCE6q5lBTDymp-$(Y-3 zEGG;3HsiPc_^B4j@Gdl$oQ%i^jFgH_{{Sa`KlpduzA z;eFdWb#;b)jLtf@&iG*}I@$Alzk*KvyZ3F}oZ}fC>JISD)C=c?PNPcdjjN|;SUEFJ zJz;VHx%>~8=17kBo91~==N}KZZqfI%o-j{jO|gxww#zAXA1>gL%_WkkX?ZF}&4<26 zdQRUjkXc*^+W1OUN^g6T(|>E`u2hj3&k2q;Nyg)?IR5~kRcq)roO4HM&63bUw&h<+ zDY?e9KA`WC&{yHy)|rn8u#b?u4Wl4eX680+Vxl!KiQ}uleqijuezxi9I_|@=3q#Kibo-ACe=V2`;0Y6&-2Y{{SGMHe$hPjh;hkPt_uuC9`hF znwA<#^!Y^qg(oNWcAS4W z_oxni)d*otp((;f++w0FV=@D2kf#`H%mX0fp7o)Fe#oHaU zE+0fXFM4+B@)VTK)lM}Xh^zgTc@f-kPY(GWV}6{P=ky+;>ll4+vG|GCvZQLgM{d;+ zH9X@YKxK2BjUl%~Az>;FCk=+u5`vb}5)zywCYv=4%sC7?z(YzxxyPuXYo4WbVId?b zIOeqlwpe{I>r9*egf-IFEn!T0j1F{{Yx4nX@iQk_u2U z`BD(Gjk%|*M{jXUE3G{lay(W2s2x@DEup3K!pEYm`WH*x17hfgXD)2nX+rmTMg5JX zEfN#CA-%_L3bvIbzW7-CQWxQeqBcH^j)VUIDStw{G1GDypWyuOK1+{d%J@e%?ejMM z^PFdl;;7AiMflg(<}10={{Z%K9RC39wtxKX{{XO52jfHS7<0xSORYULJs3JfTQvh0 z-;MrJe1Dnc$B@sJ<@_sH&%|!sxZ`Fq&P6=0tA%1t=Yx(b-aSTZ>iUZ-JSAD&v$%ab ziayZ&LzQSiwN7*@KkSuyP!Jh7&3S~Jg%SwtMF!e_SZb^__9c5S* zZ0nqLNJ}z^QcrAEf3oUpg_UU7>XwgUdr`z4XBz5+xci|FC$~Pejczkm(^`z6{^YWj z?F1p%%#ub4B8V*)rrRV(a|J$`sN&CdOR$s?DLML@RRwYu=H{g1C!T3$3VLYj+J6#Q zZ&^5m`Eo!?PZ>SUC}UYhVYL(?O8SG_>p{@%YM9X5BXYudtkzX7>uh^#f1|EJ$uZUHgW8U)nu^1HoW!eKtQUw1LXT>#_PsvqgdhXIJd;jR zpS$U#vpUx4NM%5%XX{GaiCYa+dzVa`0sDz=Td+cP#jp^9wGeVR;}x;an?#iIKyPuz zc|25K)9$m|?va}PBz&h(fE1#8nwuf!e&#Z_a{Ng4_0lUO z*A|tsl(;+X9q2|^hr2|l4&@{#JS9W5Pl~j>wU>9~nnOO?=a!&VwPV-bsJRv|rk4R* z_X$>Ye}$M6mQch-){qIx6N-lQFNEo_e8(Y&o8;hC{{XeNvhZq4c?tD2<1WQ)nA!M* zGE?crF<+U=r1172ti-O*#-}H5eHGVM1(z9! zebh)lz|d_)MC4&=W@h6QYR;f(%e?(9D)p=4YFQe247quDF(VhCXB8OeF1C_(Fsv0i z5(ot&I2n4G)SGM93%NaEYDL;}64Pb9{*IE`Y^|(j7(+l_9RkIB{f8 zvAQkN!)Zf@MO*h21Dq?{@t_S$($1eSr;A!z8aDRE?mvwvnII2QyD`Z+CJWno(WLF# zhC6mU@|v~D)Q`!#?Y6jv=SMck%qi8j5J7F(2zh>9GBT2+j0^y3#%61#^u_AwbzJM4 z7H@{SNO&l?x5T+BPUR8vkjmK^=YR%yz<9ptiv_#>vbHkT5A5gs^{U?50s~NTgb+g2a;naFrd#$7QDk)xPlSXSKa2d$3wsk8+`! z`^46k)6u3gA&W$I+){>Al;EBzP{*F3df_WyMl}039Yr0NB6*K17#RB1+^}?KSGxn^ zl@z4`KPo_~*U=^!YpL)M+i6ODfclE)-kgy4=*E-g91%$7u?-)@P*-xjX)8js=A$t$ z(H)MGtnWy`BBgtNpwMf(wPm=#4>KU_0l)}6R-T}+$qI0`oxu0UH5I+hCY(QcMq7;V_vdLjTwUevTXs3Whw88p4qO+9tdk%SwI2`VT;a0ta% z3nZxug2I&QiSmc^3X1yScDTm1EHN#_WkehwN+`&1;BG8&k%8Ep$-i5l)AQsaQp#r} zlq9+CJJf4(X5;ZI0YV48_*AqhYf`sxI5jbNi38&l_~F8$N{4z&ry#_YWQyd7=y63n zk~yzYO_~7A3##MLfqExSOqlVMsOuR#ap_VL^gZ4hQFye)b1e{F0B}VdX~+=SijOKX z6Ugi;N`T8>qm+*=J5-KFJJPr06>XSGssUgIjkQ6T(Yd(dTo?&;qud{F!lak9R*$(y ziE%KSL+x_X!g)dZRAplVKChV_pOLk0B_p?bm#zN*+8amgfxx74Y%C*G0A| zuS_Kowm>^Q0UT26E`(N7B3px>UwU5^M8{T5IKXwyfNwO{ME3!$9;WCP#x2{Ff`s5> zHCgY>HE;18(}BcolF1|wQ-f9#?DL@t0m3qByWi54c!srZ8O(p`G<;w*WHgv2MoNlH zZI!J@Imc>jmj-IocY=mcGPRmNVV1TZ)s6u4`crpN8EuU++Sx!sTZsxI)L_tV>(r?m zf!6B;i`;SMm3#|I%Cq}ooo?~1x^fboxLhO0X$_S(9H=Eu9Cj2}q*-gogx0~aO^lCv zi2PS*T`2r?T-~%F$z`&nI2>3hKY&&$wD!VHhlmmg)*ddWY5B6?#P}%4VbX*)l=;sn z{veEg>Ik}cjOwpbr^eHESaKw;Ue?hZxN^VvIOC3gDihIqV#9mC%>6BFYF5{ZM*Q}` z`qjqh7J<>W0`&Vvo{1VkN>7-SlCI<2oK~ah7kY~u3v?oV2jM=`a<)Oc^^~8Kv!ba$zKrSfKQ+_vi$3TZQJ*kT7qvQ^MWY$&e zS-UgUcdw3SG_b& zX|oMLpb^h;K&`ko#NOZ^YGqQN*IvG9%d*^@u{I(ykd=e$`TZ%Sj}m&4>vDQFxzebS z1>OKCBnp|eWxf-Oc`Jy;e2bPgBYH9~E9Zn>QsZ4zAJd zFv=|LO0}_);*ad)_x7)Pi#`uf^!#{sx#*VkHQ^_22=(`5q;o{S6-^_rd=FJ!hs*HqjGX^t?dPfDByUd66W%vH-|X z92#)9>Q-eWuK#(s>!H)>gt- zQmoBi<{R8Dk2578sb#+X_@@Uh4Zm*+OPiFc2$b`ywzjpbl@#}_mUrGL2`(G~+Keb) za9=Y*$k)_Bbv|%WY$uF*8Y_>hnrWn|rIi%{f=_CR6#L4PDJ8-}QODA@!<8XLyt<>n zsJ8_y8!)uxvYknOPNh_qc87UBnWjsH_E11Uc!Z@x=FJK?`^ef-;)fpf^tW79xx%HB z%>-d3exX|_j9qioSS}Q!a#QLL(xIJssMfb#SSio9truM1bu6eQ!U6A5hOPliDmh)dCc|?O@$GP== zUsu&_O3y3Yp4qRf>gm(r!N~4w#FE(sz7BI<*O;gvA7V%(dX6wJ_XsP%b!2;HjqLjE zkkS(Jwk)1`sM$QZ&1ABC%;u828D2X^LnN@)9d5`{J{jDVa6moi_St(}n5$EYAbJed zOt4Ip-N73lSm)lSZ5bv*t)t9!O8JL+%D#n00&7uwG?lEA*m1QN5(1OhR(I=kXYJ0o z76?*%drHDmnE_sL+eVj+!~nrUw*v0(~P-sc}PgW zNmpvf$nmO&V8&^@ytD@Q898Y(P*3AXAGxn3DSRYnJQ{_~!f28)CC>iz^8#$Q9!Ok; zpTyE_M!MW^cO}@D#zGPtdy(HJl3m=3Ju6TJS@ocHUwF8@xULO6LV3?5jpr1jFB1u& GKmXak4z2D0 literal 0 HcmV?d00001 diff --git a/spartan/releases/rough-rhino/aztec-spartan.sh b/spartan/releases/rough-rhino/aztec-spartan.sh new file mode 100755 index 00000000000..5198a7bf78c --- /dev/null +++ b/spartan/releases/rough-rhino/aztec-spartan.sh @@ -0,0 +1,285 @@ +#!/bin/bash + +# Colors and formatting +BLUE='\033[0;34m' +GREEN='\033[0;32m' +RED='\033[0;31m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color + +# Global variables +ARCH=$(uname -m) +DEFAULT_P2P_PORT="40400" +DEFAULT_PORT="8080" +DEFAULT_KEY="0x0000000000000000000000000000000000000000000000000000000000000001" +# Try to get default IP from ipify API, otherwise leave empty to require user input +DEFAULT_IP=$(curl -s --connect-timeout 5 https://api.ipify.org?format=json | grep -o '"ip":"[^"]*' | cut -d'"' -f4 || echo "") +DEFAULT_NAME="validator-1" + +# Parse command line arguments +parse_args() { + while [[ $# -gt 0 ]]; do + case $1 in + -p|--port) + CLI_PORT="$2" + shift 2 + ;; + -p2p|--p2p-port) + CLI_P2P_PORT="$2" + shift 2 + ;; + -ip|--ip) + CLI_IP="$2" + shift 2 + ;; + -k|--key) + CLI_KEY="$2" + shift 2 + ;; + -n|--name) + CLI_NAME="$2" + shift 2 + ;; + *) + shift + ;; + esac + done +} + +# Show banner function +show_banner() { + echo -e "${BLUE}" + echo " _ ____ _____ _____ ____ _____ _____ ____ _____ _ _ _____ _____ " + echo " / \ |_ /|_ _| ____| _ \ |_ _| ____/ ___|_ _| \ | | ____|_ _|" + echo " / _ \ / / | | | _| | |_) | | | | _| \___ \ | | | \| | _| | | " + echo " / ___ \/ /_ | | | |___| _ < | | | |___ ___) || | | |\ | |___ | | " + echo "/_/ \_\____| |_| |_____|_| \_\ |_| |_____|____/ |_| |_| \_|_____| |_| " + echo -e "${NC}" +} + +# Check if Docker is installed +check_docker() { + echo -e "${BLUE}Checking Docker installation...${NC}" + if command -v docker >/dev/null 2>&1 && command -v docker compose >/dev/null 2>&1; then + echo -e "${GREEN}Docker and Docker Compose are installed${NC}" + return 0 + else + echo -e "${RED}Docker or Docker Compose not found${NC}" + read -p "Would you like to install Docker? [Y/n] " -n 1 -r + echo + if [[ $REPLY =~ ^[Yy]$ ]] || [[ -z $REPLY ]]; then + install_docker + return $? + fi + return 1 + fi +} + +# Install Docker +install_docker() { + echo -e "${BLUE}Installing Docker...${NC}" + if curl -fsSL https://get.docker.com | sh; then + sudo usermod -aG docker $USER + echo -e "${GREEN}Docker installed successfully${NC}" + echo -e "${YELLOW}Please log out and back in for group changes to take effect${NC}" + return 0 + else + echo -e "${RED}Failed to install Docker${NC}" + return 1 + fi +} + +# Get public IP +get_public_ip() { + echo -e "${BLUE}Fetching public IP...${NC}" + PUBLIC_IP=$(curl -s https://api.ipify.org?format=json | grep -o '"ip":"[^"]*' | cut -d'"' -f4) + if [ -n "$PUBLIC_IP" ]; then + echo -e "${GREEN}Public IP: $PUBLIC_IP${NC}" + return 0 + else + echo -e "${YELLOW}Failed to get public IP${NC}" + return 1 + fi +} + +# Configure environment +configure_environment() { + local args=("$@") + parse_args "${args[@]}" + + echo -e "${BLUE}Configuring environment...${NC}" + + # Use CLI arguments if provided, otherwise use defaults or prompt + if [ -n "$CLI_NAME" ]; then + NAME="$CLI_NAME" + else + read -p "Validator Name [$DEFAULT_NAME]: " NAME + NAME=${NAME:-$DEFAULT_NAME} + fi + + if [ -n "$CLI_P2P_PORT" ]; then + P2P_PORT="$CLI_P2P_PORT" + else + read -p "P2P Port [$DEFAULT_P2P_PORT]: " P2P_PORT + P2P_PORT=${P2P_PORT:-$DEFAULT_P2P_PORT} + fi + + if [ -n "$CLI_PORT" ]; then + PORT="$CLI_PORT" + else + read -p "Node Port [$DEFAULT_PORT]: " PORT + PORT=${PORT:-$DEFAULT_PORT} + fi + + if [ -n "$CLI_KEY" ]; then + KEY="$CLI_KEY" + else + while true; do + read -p "Validator Private Key: " KEY + if [ -z "$KEY" ]; then + echo -e "${RED}Error: Validator Private Key is required${NC}" + else + break + fi + done + fi + + if [ -n "$CLI_IP" ]; then + IP="$CLI_IP" + else + if [ -z "$DEFAULT_IP" ]; then + while true; do + read -p "Public IP: " IP + if [ -z "$IP" ]; then + echo -e "${RED}Error: Public IP is required${NC}" + else + break + fi + done + else + read -p "Public IP [$DEFAULT_IP]: " IP + IP=${IP:-$DEFAULT_IP} + fi + fi + + # Generate docker-compose.yml + cat > docker-compose.yml << EOF +name: ${NAME} +services: + validator: + network_mode: host + restart: unless-stopped + environment: + - P2P_UDP_ANNOUNCE_ADDR=${IP}:${P2P_PORT} + - P2P_TCP_ANNOUNCE_ADDR=${IP}:${P2P_PORT} + - COINBASE=0xbaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + - VALIDATOR_DISABLED=false + - VALIDATOR_PRIVATE_KEY=${KEY} + - SEQ_PUBLISHER_PRIVATE_KEY=${KEY} + - L1_PRIVATE_KEY=${KEY} + - DEBUG=aztec:*,-aztec:avm_simulator*,-aztec:circuits:artifact_hash,-aztec:libp2p_service,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream* + - LOG_LEVEL=debug + - AZTEC_PORT=${PORT} + - P2P_ENABLED=true + - L1_CHAIN_ID=1337 + - PROVER_REAL_PROOFS=true + - PXE_PROVER_ENABLED=true + - ETHEREUM_SLOT_DURATION=12sec + - AZTEC_SLOT_DURATION=36 + - AZTEC_EPOCH_DURATION=32 + - AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS=13 + - ETHEREUM_HOST=http://34.48.76.131:8545 + - BOOTSTRAP_NODES=enr:-Jq4QO_3szmgtG2cbEdnFDIhpGAQkc1HwfNy4-M6sG9QmQbPTmp9PMOHR3xslfR23hORiU-GpA7uM9uXw49lFcnuuvYGjWF6dGVjX25ldHdvcmsBgmlkgnY0gmlwhCIwTIOJc2VjcDI1NmsxoQKQTN17XKCwjYSSwmTc-6YzCMhd3v6Ofl8TS-WunX6LCoN0Y3CCndCDdWRwgp3Q + - REGISTRY_CONTRACT_ADDRESS=0x5fbdb2315678afecb367f032d93f642f64180aa3 + - GOVERNANCE_PROPOSER_CONTRACT_ADDRESS=0x9fe46736679d2d9a65f0992f2272de9f3c7fa6e0 + - FEE_JUICE_CONTRACT_ADDRESS=0xe7f1725e7734ce288f8367e1bb143e90bb3f0512 + - ROLLUP_CONTRACT_ADDRESS=0x2279b7a0a67db372996a5fab50d91eaa73d2ebe6 + - REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=0x5fc8d32690cc91d4c39d9d3abcbd16989f875707 + - GOVERNANCE_CONTRACT_ADDRESS=0xcf7ed3acca5a467e9e704c703e8d87f634fb0fc9 + - COIN_ISSUER_CONTRACT_ADDRESS=0xdc64a140aa3e981100a9beca4e685f962f0cf6c9 + - FEE_JUICE_PORTAL_CONTRACT_ADDRESS=0x0165878a594ca255338adfa4d48449f69242eb8f + - INBOX_CONTRACT_ADDRESS=0xed179b78d5781f93eb169730d8ad1be7313123f4 + - OUTBOX_CONTRACT_ADDRESS=0x1016b5aaa3270a65c315c664ecb238b6db270b64 + - P2P_UDP_LISTEN_ADDR=0.0.0.0:${P2P_PORT} + - P2P_TCP_LISTEN_ADDR=0.0.0.0:${P2P_PORT} + image: aztecprotocol/aztec:698cd3d62680629a3f1bfc0f82604534cedbccf3-${ARCH} + command: start --node --archiver --sequencer +EOF + + echo -e "${GREEN}Configuration complete! Use './aztec-spartan.sh start' to launch your node.${NC}" +} + +# Docker commands +start_node() { + if [ ! -f "docker-compose.yml" ]; then + echo -e "${RED}Configuration not found. Please run './aztec-spartan.sh config' first.${NC}" + exit 1 + fi + echo -e "${BLUE}Starting containers...${NC}" + if docker compose up -d; then + echo -e "${GREEN}Containers started successfully${NC}" + else + echo -e "${RED}Failed to start containers${NC}" + exit 1 + fi +} + +stop_node() { + echo -e "${BLUE}Stopping containers...${NC}" + if docker compose down; then + echo -e "${GREEN}Containers stopped successfully${NC}" + else + echo -e "${RED}Failed to stop containers${NC}" + exit 1 + fi +} + +update_node() { + echo -e "${BLUE}Pulling latest images...${NC}" + if docker compose pull; then + echo -e "${GREEN}Images updated successfully${NC}" + else + echo -e "${RED}Failed to update images${NC}" + exit 1 + fi +} + +show_logs() { + echo -e "${BLUE}Fetching logs...${NC}" + if ! docker compose logs -f; then + echo -e "${RED}Failed to fetch logs${NC}" + exit 1 + fi +} + +# Main script +case "$1" in + "config") + show_banner + check_docker + configure_environment "$@" + ;; + "start") + start_node + ;; + "stop") + stop_node + ;; + "update") + update_node + ;; + "logs") + show_logs + ;; + *) + echo "Usage: $0 {config|start|stop|update|logs}" + echo "Commands:" + echo " config - Install and configure Aztec Testnet node" + echo " start - Start Aztec Testnet node" + echo " stop - Stop Aztec Testnet node" + echo " update - Update Aztec Testnet node images" + echo " logs - Show Aztec Testnet node logs" + exit 1 + ;; +esac diff --git a/spartan/releases/rough-rhino/create-spartan.sh b/spartan/releases/rough-rhino/create-spartan.sh new file mode 100755 index 00000000000..870263926eb --- /dev/null +++ b/spartan/releases/rough-rhino/create-spartan.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +# URL of the aztec-spartan.sh script +DEFAULT_URL="https://raw.githubusercontent.com/AztecProtocol/aztec-packages/refs/heads/zpedro/testnet_docker_compose/spartan/releases/rough-rhino/aztec-spartan.sh" + +# Colors for output +GREEN='\033[0;32m' +RED='\033[0;31m' +NC='\033[0m' # No Color + +# Download the script +echo "Downloading aztec-spartan.sh..." +if curl -L -o aztec-spartan.sh "${1:-$DEFAULT_URL}"; then + chmod +x aztec-spartan.sh + echo -e "${GREEN}✓ aztec-spartan.sh has been downloaded and made executable${NC}" + echo "You can now run it with: ./aztec-spartan.sh" +else + echo -e "${RED}✗ Failed to download aztec-spartan.sh${NC}" + exit 1 +fi diff --git a/spartan/releases/rough-rhino/full-node.sh b/spartan/releases/rough-rhino/full-node.sh deleted file mode 100755 index 75a0d33bca1..00000000000 --- a/spartan/releases/rough-rhino/full-node.sh +++ /dev/null @@ -1,39 +0,0 @@ -#!/bin/bash - -set -eu - -# get host arch -ARCH=$(uname -m) -IMAGE="aztecprotocol/aztec:698cd3d62680629a3f1bfc0f82604534cedbccf3-${ARCH}" - -docker run --rm --network=host \ - -e P2P_UDP_ANNOUNCE_ADDR=$PUBLIC_IP:$P2P_PORT \ - -e P2P_TCP_ANNOUNCE_ADDR=$PUBLIC_IP:$P2P_PORT \ - -e COINBASE=0xbaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa \ - -e DEBUG="aztec:*,-aztec:avm_simulator*,-aztec:circuits:artifact_hash,-aztec:libp2p_service,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" \ - -e LOG_LEVEL=debug \ - -e AZTEC_PORT=$NODE_PORT \ - -e P2P_ENABLED=true \ - -e VALIDATOR_DISABLED=true \ - -e L1_CHAIN_ID=1337 \ - -e PROVER_REAL_PROOFS=true \ - -e PXE_PROVER_ENABLED=true \ - -e ETHEREUM_SLOT_DURATION=12sec \ - -e AZTEC_SLOT_DURATION=36 \ - -e AZTEC_EPOCH_DURATION=32 \ - -e AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS=13 \ - -e ETHEREUM_HOST=http://34.48.76.131:8545 \ - -e BOOTSTRAP_NODES=enr:-Jq4QO_3szmgtG2cbEdnFDIhpGAQkc1HwfNy4-M6sG9QmQbPTmp9PMOHR3xslfR23hORiU-GpA7uM9uXw49lFcnuuvYGjWF6dGVjX25ldHdvcmsBgmlkgnY0gmlwhCIwTIOJc2VjcDI1NmsxoQKQTN17XKCwjYSSwmTc-6YzCMhd3v6Ofl8TS-WunX6LCoN0Y3CCndCDdWRwgp3Q \ - -e REGISTRY_CONTRACT_ADDRESS=0x5fbdb2315678afecb367f032d93f642f64180aa3 \ - -e GOVERNANCE_PROPOSER_CONTRACT_ADDRESS=0x9fe46736679d2d9a65f0992f2272de9f3c7fa6e0 \ - -e FEE_JUICE_CONTRACT_ADDRESS=0xe7f1725e7734ce288f8367e1bb143e90bb3f0512 \ - -e ROLLUP_CONTRACT_ADDRESS=0x2279b7a0a67db372996a5fab50d91eaa73d2ebe6 \ - -e REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=0x5fc8d32690cc91d4c39d9d3abcbd16989f875707 \ - -e GOVERNANCE_CONTRACT_ADDRESS=0xcf7ed3acca5a467e9e704c703e8d87f634fb0fc9 \ - -e COIN_ISSUER_CONTRACT_ADDRESS=0xdc64a140aa3e981100a9beca4e685f962f0cf6c9 \ - -e FEE_JUICE_PORTAL_CONTRACT_ADDRESS=0x0165878a594ca255338adfa4d48449f69242eb8f \ - -e INBOX_CONTRACT_ADDRESS=0xed179b78d5781f93eb169730d8ad1be7313123f4 \ - -e OUTBOX_CONTRACT_ADDRESS=0x1016b5aaa3270a65c315c664ecb238b6db270b64 \ - -e P2P_UDP_LISTEN_ADDR=0.0.0.0:$P2P_PORT \ - -e P2P_TCP_LISTEN_ADDR=0.0.0.0:$P2P_PORT \ - $IMAGE start --node --archiver --sequencer diff --git a/spartan/releases/rough-rhino/validator.sh b/spartan/releases/rough-rhino/validator.sh deleted file mode 100755 index 246e59527bb..00000000000 --- a/spartan/releases/rough-rhino/validator.sh +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/bash - -set -eu - -# get host arch -ARCH=$(uname -m) -IMAGE="aztecprotocol/aztec:698cd3d62680629a3f1bfc0f82604534cedbccf3-${ARCH}" - -docker run --rm --network=host \ - -e P2P_UDP_ANNOUNCE_ADDR=$PUBLIC_IP:$P2P_PORT \ - -e P2P_TCP_ANNOUNCE_ADDR=$PUBLIC_IP:$P2P_PORT \ - -e COINBASE=0xbaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa \ - -e VALIDATOR_DISABLED=false \ - -e VALIDATOR_PRIVATE_KEY=$VALIDATOR_PKEY \ - -e SEQ_PUBLISHER_PRIVATE_KEY=$VALIDATOR_PKEY \ - -e L1_PRIVATE_KEY=$VALIDATOR_PKEY \ - -e DEBUG="aztec:*,-aztec:avm_simulator*,-aztec:circuits:artifact_hash,-aztec:libp2p_service,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" \ - -e LOG_LEVEL=debug \ - -e AZTEC_PORT=$NODE_PORT \ - -e P2P_ENABLED=true \ - -e L1_CHAIN_ID=1337 \ - -e PROVER_REAL_PROOFS=true \ - -e PXE_PROVER_ENABLED=true \ - -e ETHEREUM_SLOT_DURATION=12sec \ - -e AZTEC_SLOT_DURATION=36 \ - -e AZTEC_EPOCH_DURATION=32 \ - -e AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS=13 \ - -e ETHEREUM_HOST=http://34.48.76.131:8545 \ - -e BOOTSTRAP_NODES=enr:-Jq4QO_3szmgtG2cbEdnFDIhpGAQkc1HwfNy4-M6sG9QmQbPTmp9PMOHR3xslfR23hORiU-GpA7uM9uXw49lFcnuuvYGjWF6dGVjX25ldHdvcmsBgmlkgnY0gmlwhCIwTIOJc2VjcDI1NmsxoQKQTN17XKCwjYSSwmTc-6YzCMhd3v6Ofl8TS-WunX6LCoN0Y3CCndCDdWRwgp3Q \ - -e REGISTRY_CONTRACT_ADDRESS=0x5fbdb2315678afecb367f032d93f642f64180aa3 \ - -e GOVERNANCE_PROPOSER_CONTRACT_ADDRESS=0x9fe46736679d2d9a65f0992f2272de9f3c7fa6e0 \ - -e FEE_JUICE_CONTRACT_ADDRESS=0xe7f1725e7734ce288f8367e1bb143e90bb3f0512 \ - -e ROLLUP_CONTRACT_ADDRESS=0x2279b7a0a67db372996a5fab50d91eaa73d2ebe6 \ - -e REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=0x5fc8d32690cc91d4c39d9d3abcbd16989f875707 \ - -e GOVERNANCE_CONTRACT_ADDRESS=0xcf7ed3acca5a467e9e704c703e8d87f634fb0fc9 \ - -e COIN_ISSUER_CONTRACT_ADDRESS=0xdc64a140aa3e981100a9beca4e685f962f0cf6c9 \ - -e FEE_JUICE_PORTAL_CONTRACT_ADDRESS=0x0165878a594ca255338adfa4d48449f69242eb8f \ - -e INBOX_CONTRACT_ADDRESS=0xed179b78d5781f93eb169730d8ad1be7313123f4 \ - -e OUTBOX_CONTRACT_ADDRESS=0x1016b5aaa3270a65c315c664ecb238b6db270b64 \ - -e P2P_UDP_LISTEN_ADDR=0.0.0.0:$P2P_PORT \ - -e P2P_TCP_LISTEN_ADDR=0.0.0.0:$P2P_PORT \ - $IMAGE start --node --archiver --sequencer From 621cbafc49acee6fa4422fd5ebcccd6c27507670 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Rodr=C3=ADguez?= Date: Wed, 4 Dec 2024 10:06:31 +0100 Subject: [PATCH 17/24] chore: Refactor indexed tree to use traits (#10361) --- .../src/base/components/nullifier_tree.nr | 42 +--------- .../src/base/components/public_data_tree.nr | 44 ---------- .../types/src/abis/nullifier_leaf_preimage.nr | 19 ++++- .../data/public_data_tree_leaf_preimage.nr | 43 +++++++++- .../types/src/merkle_tree/indexed_tree.nr | 82 ++++++++++++------- .../indexed_tree/check_valid_low_leaf.nr | 29 ++++++- .../types/src/merkle_tree/leaf_preimage.nr | 18 +++- .../types/src/merkle_tree/membership.nr | 32 ++++++-- 8 files changed, 185 insertions(+), 124 deletions(-) diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/components/nullifier_tree.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/components/nullifier_tree.nr index 6f83aa7e5ee..3d273ae97f1 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/components/nullifier_tree.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/components/nullifier_tree.nr @@ -8,7 +8,6 @@ use dep::types::{ NULLIFIER_TREE_HEIGHT, }, merkle_tree::{indexed_tree, MembershipWitness}, - utils::field::full_field_less_than, }; pub(crate) fn nullifier_tree_batch_insert( @@ -20,50 +19,13 @@ pub(crate) fn nullifier_tree_batch_insert( nullifier_predecessor_preimages: [NullifierLeafPreimage; MAX_NULLIFIERS_PER_TX], nullifier_predecessor_membership_witnesses: [MembershipWitness; MAX_NULLIFIERS_PER_TX], ) -> AppendOnlyTreeSnapshot { - indexed_tree::batch_insert( + indexed_tree::batch_insert::<_, _, _, _, NULLIFIER_SUBTREE_HEIGHT, NULLIFIER_TREE_HEIGHT>( start_snapshot, nullifiers, sorted_nullifiers, sorted_nullifiers_indexes, nullifier_subtree_sibling_path, nullifier_predecessor_preimages, - nullifier_predecessor_membership_witnesses.map( - |witness: MembershipWitness| { - MembershipWitness { - leaf_index: witness.leaf_index, - sibling_path: witness.sibling_path, - } - }, - ), - |low_leaf: NullifierLeafPreimage, nullifier: Field| { - // Is valid low leaf - let is_less_than_nullifier = full_field_less_than(low_leaf.nullifier, nullifier); - let is_next_greater_than = full_field_less_than(nullifier, low_leaf.next_nullifier); - - (!low_leaf.is_empty()) - & is_less_than_nullifier - & ( - is_next_greater_than - | ((low_leaf.next_index == 0) & (low_leaf.next_nullifier == 0)) - ) - }, - |low_leaf: NullifierLeafPreimage, nullifier: Field, nullifier_index: u32| { - // Update low leaf - NullifierLeafPreimage { - nullifier: low_leaf.nullifier, - next_nullifier: nullifier, - next_index: nullifier_index, - } - }, - |nullifier: Field, low_leaf: NullifierLeafPreimage| { - // Build insertion leaf - NullifierLeafPreimage { - nullifier: nullifier, - next_nullifier: low_leaf.next_nullifier, - next_index: low_leaf.next_index, - } - }, - [0; NULLIFIER_SUBTREE_HEIGHT], - [0; NULLIFIER_TREE_HEIGHT], + nullifier_predecessor_membership_witnesses, ) } diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/components/public_data_tree.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/components/public_data_tree.nr index 4afaf9c75ce..129820e3d8c 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/components/public_data_tree.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/components/public_data_tree.nr @@ -4,7 +4,6 @@ use dep::types::{ data::{PublicDataTreeLeaf, PublicDataTreeLeafPreimage}, merkle_tree::{indexed_tree, MembershipWitness}, traits::is_empty, - utils::field::full_field_less_than, }; pub(crate) fn public_data_tree_insert( @@ -21,49 +20,6 @@ pub(crate) fn public_data_tree_insert( low_leaf_preimage, low_leaf_membership_witness, sibling_path, - |low_preimage: PublicDataTreeLeafPreimage, write: PublicDataTreeLeaf| { - // Is valid low preimage - let is_update = low_preimage.slot == write.slot; - let is_low_empty = low_preimage.is_empty(); - - let is_less_than_slot = full_field_less_than(low_preimage.slot, write.slot); - let is_next_greater_than = full_field_less_than(write.slot, low_preimage.next_slot); - let is_in_range = is_less_than_slot - & ( - is_next_greater_than - | ((low_preimage.next_index == 0) & (low_preimage.next_slot == 0)) - ); - - (!is_low_empty) & (is_update | is_in_range) - }, - |low_preimage: PublicDataTreeLeafPreimage, write: PublicDataTreeLeaf, write_index: u32| { - // Update low leaf - let is_update = low_preimage.slot == write.slot; - if is_update { - PublicDataTreeLeafPreimage { - slot: low_preimage.slot, - value: write.value, - next_slot: low_preimage.next_slot, - next_index: low_preimage.next_index, - } - } else { - PublicDataTreeLeafPreimage { - slot: low_preimage.slot, - value: low_preimage.value, - next_slot: write.slot, - next_index: write_index, - } - } - }, - |write: PublicDataTreeLeaf, low_preimage: PublicDataTreeLeafPreimage| { - // Build insertion leaf - PublicDataTreeLeafPreimage { - slot: write.slot, - value: write.value, - next_slot: low_preimage.next_slot, - next_index: low_preimage.next_index, - } - }, ) } else { start_snapshot diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_leaf_preimage.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_leaf_preimage.nr index 09504c10608..cdc559f353d 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_leaf_preimage.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/nullifier_leaf_preimage.nr @@ -39,7 +39,7 @@ impl LeafPreimage for NullifierLeafPreimage { } } -impl IndexedTreeLeafPreimage for NullifierLeafPreimage { +impl IndexedTreeLeafPreimage for NullifierLeafPreimage { fn get_key(self) -> Field { self.nullifier } @@ -48,9 +48,26 @@ impl IndexedTreeLeafPreimage for NullifierLeafPreimage { self.next_nullifier } + fn points_to_infinity(self) -> bool { + (self.next_nullifier == 0) & (self.next_index == 0) + } + fn as_leaf(self) -> Field { self.hash() } + + fn update_pointers(self, next_key: Field, next_index: u32) -> Self { + Self { nullifier: self.nullifier, next_nullifier: next_key, next_index } + } + + fn update_value(self, _nullifier: Field) -> Self { + assert(false, "Tried to update a nullifier"); + Self::empty() + } + + fn build_insertion_leaf(nullifier: Field, low_leaf: Self) -> Self { + Self { nullifier, next_nullifier: low_leaf.next_nullifier, next_index: low_leaf.next_index } + } } impl Readable for NullifierLeafPreimage { diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/data/public_data_tree_leaf_preimage.nr b/noir-projects/noir-protocol-circuits/crates/types/src/data/public_data_tree_leaf_preimage.nr index e895921ce1a..558c4169f01 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/data/public_data_tree_leaf_preimage.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/data/public_data_tree_leaf_preimage.nr @@ -1,4 +1,8 @@ -use crate::{merkle_tree::leaf_preimage::IndexedTreeLeafPreimage, traits::{Empty, Hash}}; +use crate::{ + data::public_data_tree_leaf::PublicDataTreeLeaf, + merkle_tree::leaf_preimage::IndexedTreeLeafPreimage, + traits::{Empty, Hash}, +}; pub struct PublicDataTreeLeafPreimage { pub slot: Field, @@ -13,6 +17,15 @@ impl Empty for PublicDataTreeLeafPreimage { } } +impl Eq for PublicDataTreeLeafPreimage { + fn eq(self, other: Self) -> bool { + (self.slot == other.slot) + & (self.value == other.value) + & (self.next_slot == other.next_slot) + & (self.next_index == other.next_index) + } +} + impl Hash for PublicDataTreeLeafPreimage { fn hash(self) -> Field { if self.is_empty() { @@ -28,7 +41,7 @@ impl Hash for PublicDataTreeLeafPreimage { } } -impl IndexedTreeLeafPreimage for PublicDataTreeLeafPreimage { +impl IndexedTreeLeafPreimage for PublicDataTreeLeafPreimage { fn get_key(self) -> Field { self.slot } @@ -37,9 +50,35 @@ impl IndexedTreeLeafPreimage for PublicDataTreeLeafPreimage { self.next_slot } + fn points_to_infinity(self) -> bool { + (self.next_slot == 0) & (self.next_index == 0) + } + fn as_leaf(self) -> Field { self.hash() } + + fn update_pointers(self, next_slot: Field, next_index: u32) -> Self { + Self { slot: self.slot, value: self.value, next_slot, next_index } + } + + fn update_value(self, write: PublicDataTreeLeaf) -> Self { + Self { + slot: self.slot, + value: write.value, + next_slot: self.next_slot, + next_index: self.next_index, + } + } + + fn build_insertion_leaf(write: PublicDataTreeLeaf, low_leaf: Self) -> Self { + Self { + slot: write.slot, + value: write.value, + next_slot: low_leaf.next_slot, + next_index: low_leaf.next_index, + } + } } impl PublicDataTreeLeafPreimage { diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/indexed_tree.nr b/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/indexed_tree.nr index 6b1be23c529..ec5f7b434d2 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/indexed_tree.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/indexed_tree.nr @@ -7,7 +7,7 @@ use crate::{ membership::{assert_check_membership, MembershipWitness}, root::{calculate_empty_tree_root, calculate_subtree_root, root_from_sibling_path}, }, - traits::{Empty, Hash, is_empty}, + traits::{Empty, is_empty}, utils::arrays::check_permutation, }; @@ -19,15 +19,10 @@ pub fn batch_insert; SubtreeWidth], - is_valid_low_leaf: fn(Leaf, Value) -> bool, - update_low_leaf: fn(Leaf, Value, u32) -> Leaf, - build_insertion_leaf: fn(Value, Leaf) -> Leaf, - _subtree_height: [Field; SubtreeHeight], - _tree_height: [Field; TreeHeight], ) -> AppendOnlyTreeSnapshot where - Value: Eq + Empty, - Leaf: Hash + Empty, + Value: IndexedTreeLeafValue, + Leaf: IndexedTreeLeafPreimage, { // A permutation to the values is provided to make the insertion use only one insertion strategy // However, for the actual insertion in the tree the original order is respected, the sorting is only used for validation of the links @@ -36,7 +31,7 @@ where // Now, update the existing leaves with the new leaves let mut current_tree_root = start_snapshot.root; - let mut insertion_subtree = [Leaf::empty(); SubtreeWidth]; + let mut insertion_subtree = [Empty::empty(); SubtreeWidth]; let start_insertion_index = start_snapshot.next_available_leaf_index; for i in 0..sorted_values.len() { @@ -45,11 +40,23 @@ where let low_leaf_preimage = low_leaf_preimages[i]; let witness = low_leaf_membership_witnesses[i]; - assert(is_valid_low_leaf(low_leaf_preimage, value), "Invalid low leaf"); + // validate the low leaf + assert(!is_empty(low_leaf_preimage), "Empty low leaf"); + let value_key = value.get_key(); + let low_leaf_key = low_leaf_preimage.get_key(); + let low_leaf_next_key = low_leaf_preimage.get_next_key(); + let is_update = value_key == low_leaf_key; + + let is_less_than_slot = low_leaf_key.lt(value_key); + let is_next_greater_than = value_key.lt(low_leaf_next_key); + let is_in_range = + is_less_than_slot & (is_next_greater_than | low_leaf_preimage.points_to_infinity()); + + assert(is_update | is_in_range, "Invalid low leaf"); // perform membership check for the low leaf against the original root assert_check_membership( - low_leaf_preimage.hash(), + low_leaf_preimage.as_leaf(), witness.leaf_index, witness.sibling_path, current_tree_root, @@ -58,19 +65,26 @@ where let value_index = sorted_values_indexes[i]; // Calculate the new value of the low_leaf - let updated_low_leaf = update_low_leaf( - low_leaf_preimage, - value, - start_insertion_index as u32 + value_index, - ); + let updated_low_leaf = if is_update { + low_leaf_preimage.update_value(value) + } else { + low_leaf_preimage.update_pointers( + value_key, + start_insertion_index as u32 + value_index, + ) + }; current_tree_root = root_from_sibling_path( - updated_low_leaf.hash(), + updated_low_leaf.as_leaf(), witness.leaf_index, witness.sibling_path, ); - insertion_subtree[value_index] = build_insertion_leaf(value, low_leaf_preimage); + insertion_subtree[value_index] = if is_update { + Empty::empty() + } else { + Leaf::build_insertion_leaf(value, low_leaf_preimage) + }; } } @@ -85,7 +99,7 @@ where ); // Create new subtree to insert into the whole indexed tree - let subtree_root = calculate_subtree_root(insertion_subtree.map(|leaf: Leaf| leaf.hash())); + let subtree_root = calculate_subtree_root(insertion_subtree.map(|leaf: Leaf| leaf.as_leaf())); // Calculate the new root // We are inserting a subtree rather than a full tree here @@ -108,15 +122,24 @@ pub fn insert( low_leaf_preimage: Leaf, low_leaf_membership_witness: MembershipWitness, insertion_sibling_path: [Field; TreeHeight], - is_valid_low_leaf: fn(Leaf, Value) -> bool, - update_low_leaf: fn(Leaf, Value, u32) -> Leaf, - build_insertion_leaf: fn(Value, Leaf) -> Leaf, ) -> AppendOnlyTreeSnapshot where Value: IndexedTreeLeafValue, - Leaf: IndexedTreeLeafPreimage, + Leaf: IndexedTreeLeafPreimage, { - assert(is_valid_low_leaf(low_leaf_preimage, value), "Invalid low leaf"); + // validate the low leaf + assert(!is_empty(low_leaf_preimage), "Empty low leaf"); + let value_key = value.get_key(); + let low_leaf_key = low_leaf_preimage.get_key(); + let low_leaf_next_key = low_leaf_preimage.get_next_key(); + let is_update = value_key == low_leaf_key; + + let is_less_than_slot = low_leaf_key.lt(value_key); + let is_next_greater_than = value_key.lt(low_leaf_next_key); + let is_in_range = + is_less_than_slot & (is_next_greater_than | low_leaf_preimage.points_to_infinity()); + + assert(is_update | is_in_range, "Invalid low leaf"); // perform membership check for the low leaf against the original root assert_check_membership( @@ -127,8 +150,11 @@ where ); // Calculate the new value of the low_leaf - let updated_low_leaf = - update_low_leaf(low_leaf_preimage, value, snapshot.next_available_leaf_index); + let updated_low_leaf = if is_update { + low_leaf_preimage.update_value(value) + } else { + low_leaf_preimage.update_pointers(value_key, snapshot.next_available_leaf_index) + }; // Update low leaf snapshot.root = root_from_sibling_path( @@ -137,11 +163,11 @@ where low_leaf_membership_witness.sibling_path, ); - if low_leaf_preimage.get_key() == value.get_key() { + if is_update { // If it's an update, we don't need to insert the new leaf and advance the tree snapshot } else { - let insertion_leaf = build_insertion_leaf(value, low_leaf_preimage); + let insertion_leaf = Leaf::build_insertion_leaf(value, low_leaf_preimage); assert_check_membership( 0, snapshot.next_available_leaf_index as Field, diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/indexed_tree/check_valid_low_leaf.nr b/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/indexed_tree/check_valid_low_leaf.nr index 9a42a21dafe..8fbe3334ee4 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/indexed_tree/check_valid_low_leaf.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/indexed_tree/check_valid_low_leaf.nr @@ -1,8 +1,11 @@ use crate::merkle_tree::leaf_preimage::IndexedTreeLeafPreimage; -pub fn assert_check_valid_low_leaf(key: Field, low_leaf_preimage: LEAF_PREIMAGE) +pub fn assert_check_valid_low_leaf( + key: Field, + low_leaf_preimage: LEAF_PREIMAGE, +) where - LEAF_PREIMAGE: IndexedTreeLeafPreimage, + LEAF_PREIMAGE: IndexedTreeLeafPreimage, { let low_key = low_leaf_preimage.get_key(); let next_key = low_leaf_preimage.get_next_key(); @@ -29,7 +32,13 @@ mod tests { } } - impl IndexedTreeLeafPreimage for TestLeafPreimage { + impl Eq for TestLeafPreimage { + fn eq(self, other: Self) -> bool { + (self.value == other.value) & (self.next_value == other.next_value) + } + } + + impl IndexedTreeLeafPreimage for TestLeafPreimage { fn get_key(self) -> Field { self.value } @@ -38,9 +47,23 @@ mod tests { self.next_value } + fn points_to_infinity(self) -> bool { + (self.next_value == 0) + } + fn as_leaf(self) -> Field { self.value } + + fn update_pointers(self, next_value: Field, _next_index: u32) -> Self { + Self { value: self.value, next_value } + } + fn update_value(self, value: Field) -> Self { + Self { value, next_value: self.next_value } + } + fn build_insertion_leaf(value: Field, low_leaf: Self) -> Self { + Self { value, next_value: low_leaf.next_value } + } } #[test] diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/leaf_preimage.nr b/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/leaf_preimage.nr index dab825f5664..c1192a03a0d 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/leaf_preimage.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/leaf_preimage.nr @@ -5,12 +5,28 @@ pub trait LeafPreimage { fn as_leaf(self) -> Field; } -pub trait IndexedTreeLeafPreimage: Empty { +pub trait IndexedTreeLeafPreimage: Eq + Empty { fn get_key(self) -> Field; + fn get_next_key(self) -> Field; + fn as_leaf(self) -> Field; + + fn points_to_infinity(self) -> bool; + + fn update_pointers(self, next_key: Field, next_index: u32) -> Self; + + fn update_value(self, value: Value) -> Self; + + fn build_insertion_leaf(value: Value, low_leaf: Self) -> Self; } pub trait IndexedTreeLeafValue: Eq + Empty { fn get_key(self) -> Field; } + +impl IndexedTreeLeafValue for Field { + fn get_key(self) -> Field { + self + } +} diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/membership.nr b/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/membership.nr index 34ab55a678d..61bc479d2fc 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/membership.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/merkle_tree/membership.nr @@ -36,14 +36,14 @@ pub fn assert_check_membership( assert(check_membership(leaf, index, sibling_path, root), "membership check failed"); } -pub fn assert_check_non_membership( +pub fn assert_check_non_membership( key: Field, low_leaf_preimage: LEAF_PREIMAGE, low_leaf_membership_witness: MembershipWitness, tree_root: Field, ) where - LEAF_PREIMAGE: IndexedTreeLeafPreimage, + LEAF_PREIMAGE: IndexedTreeLeafPreimage, { assert_check_valid_low_leaf(key, low_leaf_preimage); @@ -58,7 +58,7 @@ where // Prove either membership or non-membership depending on the value of `exists`. // If `exists` == false, `key` is not in the tree, `leaf_preimage` and `membership_witness` are for the low leaf. -pub fn conditionally_assert_check_membership( +pub fn conditionally_assert_check_membership( key: Field, exists: bool, leaf_preimage: LEAF_PREIMAGE, @@ -66,7 +66,7 @@ pub fn conditionally_assert_check_membership, { if exists { assert(key == leaf_preimage.get_key(), "Key does not match the key of the leaf preimage"); @@ -117,7 +117,13 @@ mod tests { } } - impl IndexedTreeLeafPreimage for TestLeafPreimage { + impl Eq for TestLeafPreimage { + fn eq(self, other: Self) -> bool { + (self.value == other.value) & (self.next_value == other.next_value) + } + } + + impl IndexedTreeLeafPreimage for TestLeafPreimage { fn get_key(self) -> Field { self.value } @@ -126,9 +132,25 @@ mod tests { self.next_value } + fn points_to_infinity(self) -> bool { + (self.next_value == 0) + } + fn as_leaf(self) -> Field { pedersen_hash([self.value]) } + + fn update_pointers(self, next_value: Field, _next_index: u32) -> Self { + Self { value: self.value, next_value } + } + + fn update_value(self, value: Field) -> Self { + Self { value, next_value: self.next_value } + } + + fn build_insertion_leaf(value: Field, low_leaf: Self) -> Self { + Self { value, next_value: low_leaf.next_value } + } } global leaf_preimages: [TestLeafPreimage; 4] = [ From d6985a80e82ee671a562866d7ed978c6f6e1b659 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Pedro=20Sousa?= Date: Wed, 4 Dec 2024 12:02:25 +0000 Subject: [PATCH 18/24] fix: nit (#10392) i'm an idiot, but since I'm here I'll set up the redirect too --- iac/main.tf | 49 ++++++++++++++++++ spartan/releases/{rough-rhino => }/.gitignore | 0 spartan/releases/{rough-rhino => }/README.md | 2 +- .../{rough-rhino => }/assets/banner.jpeg | Bin .../{rough-rhino => }/create-spartan.sh | 2 +- 5 files changed, 51 insertions(+), 2 deletions(-) rename spartan/releases/{rough-rhino => }/.gitignore (100%) rename spartan/releases/{rough-rhino => }/README.md (98%) rename spartan/releases/{rough-rhino => }/assets/banner.jpeg (100%) rename spartan/releases/{rough-rhino => }/create-spartan.sh (85%) diff --git a/iac/main.tf b/iac/main.tf index 5e1dec466d6..46b145be06a 100644 --- a/iac/main.tf +++ b/iac/main.tf @@ -125,3 +125,52 @@ resource "aws_route53_record" "static" { evaluate_target_health = true } } + +resource "aws_s3_bucket" "sp_testnet_redirect" { + bucket = "sp-testnet.aztec.network" + + website { + redirect_all_requests_to { + host_name = "github.com" + protocol = "https" + path = "/AztecProtocol/aztec-packages/refs/heads/master/spartan/releases/create-spartan.sh" + } + } +} + +resource "aws_s3_bucket_public_access_block" "sp_testnet_public_access" { + bucket = aws_s3_bucket.sp_testnet_redirect.id + + block_public_acls = false + block_public_policy = false + ignore_public_acls = false + restrict_public_buckets = false +} + +resource "aws_s3_bucket_policy" "sp_testnet_policy" { + bucket = aws_s3_bucket.sp_testnet_redirect.id + + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Effect = "Allow" + Principal = "*" + Action = "s3:GetObject" + Resource = "arn:aws:s3:::${aws_s3_bucket.sp_testnet_redirect.id}/*" + } + ] + }) +} + +resource "aws_route53_record" "sp_testnet" { + zone_id = data.terraform_remote_state.aztec2_iac.outputs.aws_route53_zone_id + name = "sp-testnet.aztec.network" + type = "A" + + alias { + name = aws_s3_bucket.sp_testnet_redirect.website_domain + zone_id = aws_s3_bucket.sp_testnet_redirect.hosted_zone_id + evaluate_target_health = true + } +} diff --git a/spartan/releases/rough-rhino/.gitignore b/spartan/releases/.gitignore similarity index 100% rename from spartan/releases/rough-rhino/.gitignore rename to spartan/releases/.gitignore diff --git a/spartan/releases/rough-rhino/README.md b/spartan/releases/README.md similarity index 98% rename from spartan/releases/rough-rhino/README.md rename to spartan/releases/README.md index 7e64b12a3aa..527762ae112 100644 --- a/spartan/releases/rough-rhino/README.md +++ b/spartan/releases/README.md @@ -18,7 +18,7 @@ It should work in most UNIX-based machines. To configure a new node, create a new directory and run the install script: ```bash -cd val1 +mkdir val1 && cd val1 curl -L https://raw.githubusercontent.com/AztecProtocol/aztec-packages/refs/heads/master/spartan/releases/rough-rhino/create-spartan.sh | bash ``` diff --git a/spartan/releases/rough-rhino/assets/banner.jpeg b/spartan/releases/assets/banner.jpeg similarity index 100% rename from spartan/releases/rough-rhino/assets/banner.jpeg rename to spartan/releases/assets/banner.jpeg diff --git a/spartan/releases/rough-rhino/create-spartan.sh b/spartan/releases/create-spartan.sh similarity index 85% rename from spartan/releases/rough-rhino/create-spartan.sh rename to spartan/releases/create-spartan.sh index 870263926eb..7fd1dd89d3b 100755 --- a/spartan/releases/rough-rhino/create-spartan.sh +++ b/spartan/releases/create-spartan.sh @@ -1,7 +1,7 @@ #!/bin/bash # URL of the aztec-spartan.sh script -DEFAULT_URL="https://raw.githubusercontent.com/AztecProtocol/aztec-packages/refs/heads/zpedro/testnet_docker_compose/spartan/releases/rough-rhino/aztec-spartan.sh" +DEFAULT_URL="https://raw.githubusercontent.com/AztecProtocol/aztec-packages/refs/heads/master/spartan/releases/rough-rhino/aztec-spartan.sh" # Colors for output GREEN='\033[0;32m' From 17fa214a5af4eb8364b09fc3e148fcd3a8949779 Mon Sep 17 00:00:00 2001 From: spypsy Date: Wed, 4 Dec 2024 13:05:15 +0000 Subject: [PATCH 19/24] feat: Gas Utils for L1 operations (#9834) Fixes #9833 Introducing `L1TxUtils` which is used for submitting & monitoring L1 transactions. The utils handle gas-pricing by adding safety buffers to ensure tx goes in + can monitor a stuck transaction and attempt to speed it up by re-submitting with a higher gas fee. --- cspell.json | 1 + .../files/config/config-prover-env.sh | 4 +- .../files/config/config-validator-env.sh | 6 +- .../aztec-node/src/aztec-node/server.ts | 24 ++ yarn-project/aztec.js/src/index.ts | 3 +- .../aztec.js/src/utils/cheat_codes.ts | 247 +---------- .../src/interfaces/aztec-node.test.ts | 28 ++ .../src/interfaces/aztec-node.ts | 11 + yarn-project/cli/src/cmds/l1/index.ts | 2 +- .../cli/src/cmds/l1/update_l1_validators.ts | 17 +- .../cli/src/cmds/pxe/get_node_info.ts | 11 +- yarn-project/cli/src/cmds/pxe/index.ts | 15 +- .../scripts/native-network/boot-node.sh | 8 +- .../native-network/deploy-l1-contracts.sh | 33 +- .../scripts/native-network/prover-node.sh | 11 +- .../end-to-end/scripts/native-network/pxe.sh | 11 +- .../scripts/native-network/test-transfer.sh | 1 + .../scripts/native-network/transaction-bot.sh | 29 +- .../scripts/native-network/validator.sh | 58 ++- .../scripts/native-network/validators.sh | 27 +- .../composed/integration_l1_publisher.test.ts | 20 +- yarn-project/ethereum/package.json | 2 + .../ethereum/src/deploy_l1_contracts.ts | 37 +- yarn-project/ethereum/src/eth_cheat_codes.ts | 316 ++++++++++++++ yarn-project/ethereum/src/index.ts | 4 +- yarn-project/ethereum/src/l1_tx_utils.test.ts | 302 +++++++++++++ yarn-project/ethereum/src/l1_tx_utils.ts | 400 ++++++++++++++++++ yarn-project/foundation/src/config/env_var.ts | 12 +- .../sequencer-client/src/publisher/config.ts | 11 +- .../src/publisher/l1-publisher.test.ts | 101 ++--- .../src/publisher/l1-publisher.ts | 118 ++++-- yarn-project/telemetry-client/src/config.ts | 6 +- 32 files changed, 1429 insertions(+), 447 deletions(-) create mode 100644 yarn-project/ethereum/src/eth_cheat_codes.ts create mode 100644 yarn-project/ethereum/src/l1_tx_utils.test.ts create mode 100644 yarn-project/ethereum/src/l1_tx_utils.ts diff --git a/cspell.json b/cspell.json index 6a62badf5be..8b9e5d15e7e 100644 --- a/cspell.json +++ b/cspell.json @@ -11,6 +11,7 @@ "asyncify", "auditability", "authwit", + "Automine", "autonat", "autorun", "awslogs", diff --git a/spartan/aztec-network/files/config/config-prover-env.sh b/spartan/aztec-network/files/config/config-prover-env.sh index 11c4ad5aef2..a3eccd01c1b 100644 --- a/spartan/aztec-network/files/config/config-prover-env.sh +++ b/spartan/aztec-network/files/config/config-prover-env.sh @@ -3,7 +3,7 @@ set -eu # Pass the bootnode url as an argument # Ask the bootnode for l1 contract addresses -output=$(node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js get-node-info -u $1) +output=$(node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js get-node-info --node-url $1) echo "$output" @@ -20,7 +20,7 @@ governance_proposer_address=$(echo "$output" | grep -oP 'GovernanceProposer Addr governance_address=$(echo "$output" | grep -oP 'Governance Address: \K0x[a-fA-F0-9]{40}') # Write the addresses to a file in the shared volume -cat < /shared/contracts/contracts.env +cat </shared/contracts/contracts.env export BOOTSTRAP_NODES=$boot_node_enr export ROLLUP_CONTRACT_ADDRESS=$rollup_address export REGISTRY_CONTRACT_ADDRESS=$registry_address diff --git a/spartan/aztec-network/files/config/config-validator-env.sh b/spartan/aztec-network/files/config/config-validator-env.sh index 71d03fbbc98..6483168f16d 100644 --- a/spartan/aztec-network/files/config/config-validator-env.sh +++ b/spartan/aztec-network/files/config/config-validator-env.sh @@ -1,10 +1,9 @@ #!/bin/bash set -eu - # Pass the bootnode url as an argument # Ask the bootnode for l1 contract addresses -output=$(node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js get-node-info -u $1) +output=$(node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js get-node-info --node-url $1) echo "$output" @@ -25,9 +24,8 @@ governance_address=$(echo "$output" | grep -oP 'Governance Address: \K0x[a-fA-F0 INDEX=$(echo $POD_NAME | awk -F'-' '{print $NF}') private_key=$(jq -r ".[$INDEX]" /app/config/keys.json) - # Write the addresses to a file in the shared volume -cat < /shared/contracts/contracts.env +cat </shared/contracts/contracts.env export BOOTSTRAP_NODES=$boot_node_enr export ROLLUP_CONTRACT_ADDRESS=$rollup_address export REGISTRY_CONTRACT_ADDRESS=$registry_address diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 587d97371a4..57690bd78d9 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -45,6 +45,7 @@ import { type L1_TO_L2_MSG_TREE_HEIGHT, type NOTE_HASH_TREE_HEIGHT, type NULLIFIER_TREE_HEIGHT, + type NodeInfo, type NullifierLeafPreimage, type PUBLIC_DATA_TREE_HEIGHT, type PrivateLog, @@ -237,6 +238,29 @@ export class AztecNodeService implements AztecNode { return Promise.resolve(this.p2pClient.isReady() ?? false); } + public async getNodeInfo(): Promise { + const [nodeVersion, protocolVersion, chainId, enr, contractAddresses, protocolContractAddresses] = + await Promise.all([ + this.getNodeVersion(), + this.getVersion(), + this.getChainId(), + this.getEncodedEnr(), + this.getL1ContractAddresses(), + this.getProtocolContractAddresses(), + ]); + + const nodeInfo: NodeInfo = { + nodeVersion, + l1ChainId: chainId, + protocolVersion, + enr, + l1ContractAddresses: contractAddresses, + protocolContractAddresses: protocolContractAddresses, + }; + + return nodeInfo; + } + /** * Get a block specified by its number. * @param number - The block number being requested. diff --git a/yarn-project/aztec.js/src/index.ts b/yarn-project/aztec.js/src/index.ts index ca6f1011172..3a67fd9a6fd 100644 --- a/yarn-project/aztec.js/src/index.ts +++ b/yarn-project/aztec.js/src/index.ts @@ -42,7 +42,6 @@ export { ContractDeployer } from './deployment/index.js'; export { AnvilTestWatcher, CheatCodes, - EthCheatCodes, L1FeeJuicePortalManager, L1ToL2TokenPortalManager, L1TokenManager, @@ -165,7 +164,7 @@ export { elapsed } from '@aztec/foundation/timer'; export { type FieldsOf } from '@aztec/foundation/types'; export { fileURLToPath } from '@aztec/foundation/url'; -export { deployL1Contract, deployL1Contracts, type DeployL1Contracts } from '@aztec/ethereum'; +export { type DeployL1Contracts, EthCheatCodes, deployL1Contract, deployL1Contracts } from '@aztec/ethereum'; // Start of section that exports public api via granular api. // Here you *can* do `export *` as the granular api defacto exports things explicitly. diff --git a/yarn-project/aztec.js/src/utils/cheat_codes.ts b/yarn-project/aztec.js/src/utils/cheat_codes.ts index f35ae53c25f..87048d1c0e1 100644 --- a/yarn-project/aztec.js/src/utils/cheat_codes.ts +++ b/yarn-project/aztec.js/src/utils/cheat_codes.ts @@ -1,13 +1,10 @@ import { type EpochProofClaim, type Note, type PXE } from '@aztec/circuit-types'; import { type AztecAddress, EthAddress, Fr } from '@aztec/circuits.js'; import { deriveStorageSlotInMap } from '@aztec/circuits.js/hash'; -import { type L1ContractAddresses } from '@aztec/ethereum'; -import { toBigIntBE, toHex } from '@aztec/foundation/bigint-buffer'; -import { keccak256 } from '@aztec/foundation/crypto'; +import { EthCheatCodes, type L1ContractAddresses } from '@aztec/ethereum'; import { createDebugLogger } from '@aztec/foundation/log'; import { RollupAbi } from '@aztec/l1-artifacts'; -import fs from 'fs'; import { type GetContractReturnType, type Hex, @@ -49,248 +46,6 @@ export class CheatCodes { } } -/** - * A class that provides utility functions for interacting with ethereum (L1). - */ -export class EthCheatCodes { - constructor( - /** - * The RPC URL to use for interacting with the chain - */ - public rpcUrl: string, - /** - * The logger to use for the eth cheatcodes - */ - public logger = createDebugLogger('aztec:cheat_codes:eth'), - ) {} - - async rpcCall(method: string, params: any[]) { - const paramsString = JSON.stringify(params); - const content = { - body: `{"jsonrpc":"2.0", "method": "${method}", "params": ${paramsString}, "id": 1}`, - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - }; - return await (await fetch(this.rpcUrl, content)).json(); - } - - /** - * Get the auto mine status of the underlying chain - * @returns True if automine is on, false otherwise - */ - public async isAutoMining(): Promise { - try { - const res = await this.rpcCall('anvil_getAutomine', []); - return res.result; - } catch (err) { - this.logger.error(`Calling "anvil_getAutomine" failed with:`, err); - } - return false; - } - - /** - * Get the current blocknumber - * @returns The current block number - */ - public async blockNumber(): Promise { - const res = await this.rpcCall('eth_blockNumber', []); - return parseInt(res.result, 16); - } - - /** - * Get the current chainId - * @returns The current chainId - */ - public async chainId(): Promise { - const res = await this.rpcCall('eth_chainId', []); - return parseInt(res.result, 16); - } - - /** - * Get the current timestamp - * @returns The current timestamp - */ - public async timestamp(): Promise { - const res = await this.rpcCall('eth_getBlockByNumber', ['latest', true]); - return parseInt(res.result.timestamp, 16); - } - - /** - * Advance the chain by a number of blocks - * @param numberOfBlocks - The number of blocks to mine - * @returns The current chainId - */ - public async mine(numberOfBlocks = 1): Promise { - const res = await this.rpcCall('hardhat_mine', [numberOfBlocks]); - if (res.error) { - throw new Error(`Error mining: ${res.error.message}`); - } - this.logger.verbose(`Mined ${numberOfBlocks} L1 blocks`); - } - - /** - * Set the balance of an account - * @param account - The account to set the balance for - * @param balance - The balance to set - */ - public async setBalance(account: EthAddress, balance: bigint): Promise { - const res = await this.rpcCall('anvil_setBalance', [account.toString(), toHex(balance)]); - if (res.error) { - throw new Error(`Error setting balance for ${account}: ${res.error.message}`); - } - this.logger.verbose(`Set balance for ${account} to ${balance}`); - } - - /** - * Set the interval between blocks (block time) - * @param interval - The interval to use between blocks - */ - public async setBlockInterval(interval: number): Promise { - const res = await this.rpcCall('anvil_setBlockTimestampInterval', [interval]); - if (res.error) { - throw new Error(`Error setting block interval: ${res.error.message}`); - } - this.logger.verbose(`Set L1 block interval to ${interval}`); - } - - /** - * Set the next block timestamp - * @param timestamp - The timestamp to set the next block to - */ - public async setNextBlockTimestamp(timestamp: number): Promise { - const res = await this.rpcCall('evm_setNextBlockTimestamp', [timestamp]); - if (res.error) { - throw new Error(`Error setting next block timestamp: ${res.error.message}`); - } - this.logger.verbose(`Set L1 next block timestamp to ${timestamp}`); - } - - /** - * Set the next block timestamp and mines the block - * @param timestamp - The timestamp to set the next block to - */ - public async warp(timestamp: number | bigint): Promise { - const res = await this.rpcCall('evm_setNextBlockTimestamp', [Number(timestamp)]); - if (res.error) { - throw new Error(`Error warping: ${res.error.message}`); - } - await this.mine(); - this.logger.verbose(`Warped L1 timestamp to ${timestamp}`); - } - - /** - * Dumps the current chain state to a file. - * @param fileName - The file name to dump state into - */ - public async dumpChainState(fileName: string): Promise { - const res = await this.rpcCall('hardhat_dumpState', []); - if (res.error) { - throw new Error(`Error dumping state: ${res.error.message}`); - } - const jsonContent = JSON.stringify(res.result); - fs.writeFileSync(`${fileName}.json`, jsonContent, 'utf8'); - this.logger.verbose(`Dumped state to ${fileName}`); - } - - /** - * Loads the chain state from a file. - * @param fileName - The file name to load state from - */ - public async loadChainState(fileName: string): Promise { - const data = JSON.parse(fs.readFileSync(`${fileName}.json`, 'utf8')); - const res = await this.rpcCall('hardhat_loadState', [data]); - if (res.error) { - throw new Error(`Error loading state: ${res.error.message}`); - } - this.logger.verbose(`Loaded state from ${fileName}`); - } - - /** - * Load the value at a storage slot of a contract address on eth - * @param contract - The contract address - * @param slot - The storage slot - * @returns - The value at the storage slot - */ - public async load(contract: EthAddress, slot: bigint): Promise { - const res = await this.rpcCall('eth_getStorageAt', [contract.toString(), toHex(slot), 'latest']); - return BigInt(res.result); - } - - /** - * Set the value at a storage slot of a contract address on eth - * @param contract - The contract address - * @param slot - The storage slot - * @param value - The value to set the storage slot to - */ - public async store(contract: EthAddress, slot: bigint, value: bigint): Promise { - // for the rpc call, we need to change value to be a 32 byte hex string. - const res = await this.rpcCall('hardhat_setStorageAt', [contract.toString(), toHex(slot), toHex(value, true)]); - if (res.error) { - throw new Error(`Error setting storage for contract ${contract} at ${slot}: ${res.error.message}`); - } - this.logger.verbose(`Set L1 storage for contract ${contract} at ${slot} to ${value}`); - } - - /** - * Computes the slot value for a given map and key. - * @param baseSlot - The base slot of the map (specified in Aztec.nr contract) - * @param key - The key to lookup in the map - * @returns The storage slot of the value in the map - */ - public keccak256(baseSlot: bigint, key: bigint): bigint { - // abi encode (removing the 0x) - concat key and baseSlot (both padded to 32 bytes) - const abiEncoded = toHex(key, true).substring(2) + toHex(baseSlot, true).substring(2); - return toBigIntBE(keccak256(Buffer.from(abiEncoded, 'hex'))); - } - - /** - * Send transactions impersonating an externally owned account or contract. - * @param who - The address to impersonate - */ - public async startImpersonating(who: EthAddress | Hex): Promise { - const res = await this.rpcCall('hardhat_impersonateAccount', [who.toString()]); - if (res.error) { - throw new Error(`Error impersonating ${who}: ${res.error.message}`); - } - this.logger.verbose(`Impersonating ${who}`); - } - - /** - * Stop impersonating an account that you are currently impersonating. - * @param who - The address to stop impersonating - */ - public async stopImpersonating(who: EthAddress | Hex): Promise { - const res = await this.rpcCall('hardhat_stopImpersonatingAccount', [who.toString()]); - if (res.error) { - throw new Error(`Error when stopping the impersonation of ${who}: ${res.error.message}`); - } - this.logger.verbose(`Stopped impersonating ${who}`); - } - - /** - * Set the bytecode for a contract - * @param contract - The contract address - * @param bytecode - The bytecode to set - */ - public async etch(contract: EthAddress, bytecode: `0x${string}`): Promise { - const res = await this.rpcCall('hardhat_setCode', [contract.toString(), bytecode]); - if (res.error) { - throw new Error(`Error setting bytecode for ${contract}: ${res.error.message}`); - } - this.logger.verbose(`Set bytecode for ${contract} to ${bytecode}`); - } - - /** - * Get the bytecode for a contract - * @param contract - The contract address - * @returns The bytecode for the contract - */ - public async getBytecode(contract: EthAddress): Promise<`0x${string}`> { - const res = await this.rpcCall('eth_getCode', [contract.toString(), 'latest']); - return res.result; - } -} - /** Cheat codes for the L1 rollup contract. */ export class RollupCheatCodes { private client: WalletClient & PublicClient; diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts index c2db85e5250..30d729f4750 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.test.ts @@ -10,6 +10,7 @@ import { L1_TO_L2_MSG_TREE_HEIGHT, NOTE_HASH_TREE_HEIGHT, NULLIFIER_TREE_HEIGHT, + type NodeInfo, PUBLIC_DATA_TREE_HEIGHT, PrivateLog, type ProtocolContractAddresses, @@ -179,6 +180,19 @@ describe('AztecNodeApiSchema', () => { expect(response).toBe(true); }); + it('getNodeInfo', async () => { + const response = await context.client.getNodeInfo(); + expect(response).toEqual({ + ...(await handler.getNodeInfo()), + l1ContractAddresses: Object.fromEntries( + L1ContractsNames.map(name => [name, expect.any(EthAddress)]), + ) as L1ContractAddresses, + protocolContractAddresses: Object.fromEntries( + ProtocolContractsNames.map(name => [name, expect.any(AztecAddress)]), + ) as ProtocolContractAddresses, + }); + }); + it('getBlocks', async () => { const response = await context.client.getBlocks(1, 1); expect(response).toHaveLength(1); @@ -451,6 +465,20 @@ class MockAztecNode implements AztecNode { isReady(): Promise { return Promise.resolve(true); } + getNodeInfo(): Promise { + return Promise.resolve({ + nodeVersion: '1.0', + l1ChainId: 1, + protocolVersion: 1, + enr: 'enr', + l1ContractAddresses: Object.fromEntries( + L1ContractsNames.map(name => [name, EthAddress.random()]), + ) as L1ContractAddresses, + protocolContractAddresses: Object.fromEntries( + ProtocolContractsNames.map(name => [name, AztecAddress.random()]), + ) as ProtocolContractAddresses, + }); + } getBlocks(from: number, limit: number): Promise { return Promise.resolve(times(limit, i => L2Block.random(from + i))); } diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.ts index 457d188acf0..96ac1a1f3ed 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.ts @@ -9,6 +9,8 @@ import { L1_TO_L2_MSG_TREE_HEIGHT, NOTE_HASH_TREE_HEIGHT, NULLIFIER_TREE_HEIGHT, + type NodeInfo, + NodeInfoSchema, PUBLIC_DATA_TREE_HEIGHT, PrivateLog, type ProtocolContractAddresses, @@ -230,6 +232,13 @@ export interface AztecNode */ isReady(): Promise; + /** + * Returns the information about the server's node. Includes current Node version, compatible Noir version, + * L1 chain identifier, protocol version, and L1 address of the rollup contract. + * @returns - The node information. + */ + getNodeInfo(): Promise; + /** * Method to request blocks. Will attempt to return all requested blocks but will return only those available. * @param from - The start of the range of blocks to return. @@ -508,6 +517,8 @@ export const AztecNodeApiSchema: ApiSchemaFor = { isReady: z.function().returns(z.boolean()), + getNodeInfo: z.function().returns(NodeInfoSchema), + getBlocks: z.function().args(z.number(), z.number()).returns(z.array(L2Block.schema)), getCurrentBaseFees: z.function().returns(GasFees.schema), diff --git a/yarn-project/cli/src/cmds/l1/index.ts b/yarn-project/cli/src/cmds/l1/index.ts index 80c0d514c3b..5bb1ff71240 100644 --- a/yarn-project/cli/src/cmds/l1/index.ts +++ b/yarn-project/cli/src/cmds/l1/index.ts @@ -109,7 +109,7 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL 'test test test test test test test test test test test junk', ) .addOption(l1ChainIdOption) - .option('--validator ', 'ethereum address of the validator', parseEthereumAddress) + .option('--validator

', 'ethereum address of the validator', parseEthereumAddress) .option('--rollup
', 'ethereum address of the rollup contract', parseEthereumAddress) .action(async options => { const { removeL1Validator } = await import('./update_l1_validators.js'); diff --git a/yarn-project/cli/src/cmds/l1/update_l1_validators.ts b/yarn-project/cli/src/cmds/l1/update_l1_validators.ts index 9827721418e..e08231e5b7f 100644 --- a/yarn-project/cli/src/cmds/l1/update_l1_validators.ts +++ b/yarn-project/cli/src/cmds/l1/update_l1_validators.ts @@ -1,6 +1,6 @@ import { EthCheatCodes } from '@aztec/aztec.js'; import { type EthAddress } from '@aztec/circuits.js'; -import { createEthereumChain, getL1ContractsConfigEnvVars } from '@aztec/ethereum'; +import { createEthereumChain, getL1ContractsConfigEnvVars, isAnvilTestChain } from '@aztec/ethereum'; import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; import { RollupAbi } from '@aztec/l1-artifacts'; @@ -53,9 +53,18 @@ export async function addL1Validator({ const txHash = await rollup.write.addValidator([validatorAddress.toString()]); dualLog(`Transaction hash: ${txHash}`); await publicClient.waitForTransactionReceipt({ hash: txHash }); - dualLog(`Funding validator on L1`); - const cheatCodes = new EthCheatCodes(rpcUrl, debugLogger); - await cheatCodes.setBalance(validatorAddress, 10n ** 20n); + if (isAnvilTestChain(chainId)) { + dualLog(`Funding validator on L1`); + const cheatCodes = new EthCheatCodes(rpcUrl, debugLogger); + await cheatCodes.setBalance(validatorAddress, 10n ** 20n); + } else { + const balance = await publicClient.getBalance({ address: validatorAddress.toString() }); + const balanceInEth = Number(balance) / 10 ** 18; + dualLog(`Validator balance: ${balanceInEth.toFixed(6)} ETH`); + if (balanceInEth === 0) { + dualLog(`WARNING: Validator has no balance. Remember to fund it!`); + } + } } export async function removeL1Validator({ diff --git a/yarn-project/cli/src/cmds/pxe/get_node_info.ts b/yarn-project/cli/src/cmds/pxe/get_node_info.ts index dd5939277fb..bbef7fde3e8 100644 --- a/yarn-project/cli/src/cmds/pxe/get_node_info.ts +++ b/yarn-project/cli/src/cmds/pxe/get_node_info.ts @@ -1,8 +1,13 @@ -import { createCompatibleClient } from '@aztec/aztec.js'; +import { type AztecNode, type PXE, createAztecNodeClient, createCompatibleClient } from '@aztec/aztec.js'; import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; -export async function getNodeInfo(rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { - const client = await createCompatibleClient(rpcUrl, debugLogger); +export async function getNodeInfo(rpcUrl: string, pxeRequest: boolean, debugLogger: DebugLogger, log: LogFn) { + let client: AztecNode | PXE; + if (pxeRequest) { + client = await createCompatibleClient(rpcUrl, debugLogger); + } else { + client = createAztecNodeClient(rpcUrl); + } const info = await client.getNodeInfo(); log(`Node Version: ${info.nodeVersion}`); log(`Chain Id: ${info.l1ChainId}`); diff --git a/yarn-project/cli/src/cmds/pxe/index.ts b/yarn-project/cli/src/cmds/pxe/index.ts index ad1d9ed59c3..ec3fec68ee7 100644 --- a/yarn-project/cli/src/cmds/pxe/index.ts +++ b/yarn-project/cli/src/cmds/pxe/index.ts @@ -4,7 +4,9 @@ import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; import { type Command } from 'commander'; import { + LOCALHOST, logJson, + makePxeOption, parseAztecAddress, parseEthereumAddress, parseField, @@ -142,11 +144,18 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL program .command('get-node-info') - .description('Gets the information of an aztec node at a URL.') - .addOption(pxeOption) + .description('Gets the information of an Aztec node from a PXE or directly from an Aztec node.') + .option('--node-url ', 'URL of the node.', `http://${LOCALHOST}:8080`) + .addOption(makePxeOption(false)) .action(async options => { const { getNodeInfo } = await import('./get_node_info.js'); - await getNodeInfo(options.rpcUrl, debugLogger, log); + let url: string; + if (options.nodeUrl) { + url = options.nodeUrl; + } else { + url = options.rpcUrl; + } + await getNodeInfo(url, !options.nodeUrl, debugLogger, log); }); program diff --git a/yarn-project/end-to-end/scripts/native-network/boot-node.sh b/yarn-project/end-to-end/scripts/native-network/boot-node.sh index 943bcdf4a4f..39067971ab9 100755 --- a/yarn-project/end-to-end/scripts/native-network/boot-node.sh +++ b/yarn-project/end-to-end/scripts/native-network/boot-node.sh @@ -13,7 +13,7 @@ exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname export PORT=${PORT:-"8080"} export DEBUG=${DEBUG:-"aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:l2_block_stream,-aztec:world-state:*"} export LOG_LEVEL=${LOG_LEVEL:-"debug"} -export ETHEREUM_HOST="http://127.0.0.1:8545" +export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} export P2P_ENABLED="true" export VALIDATOR_DISABLED="true" export SEQ_MAX_SECONDS_BETWEEN_BLOCKS="0" @@ -26,11 +26,11 @@ export OTEL_EXPORTER_OTLP_METRICS_ENDPOINT="${OTEL_EXPORTER_OTLP_METRICS_ENDPOIN export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT="${OTEL_EXPORTER_OTLP_TRACES_ENDPOINT:-}" export OTEL_EXPORTER_OTLP_LOGS_ENDPOINT="${OTEL_EXPORTER_OTLP_LOGS_ENDPOINT:-}" export OTEL_RESOURCE_ATTRIBUTES="service.name=boot-node" -export VALIDATOR_PRIVATE_KEY="0x47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a" +export VALIDATOR_PRIVATE_KEY=${VALIDATOR_PRIVATE_KEY:-"0x47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a"} REPO=$(git rev-parse --show-toplevel) echo "Waiting for l1 contracts to be deployed..." -until [ -f "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env ] ; do +until [ -f "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env ]; do sleep 1 done echo "Done waiting." @@ -42,4 +42,4 @@ function filter_noise() { } # Start the Aztec node with the sequencer and archiver -node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js start --node --archiver --sequencer --pxe 2>&1 | filter_noise \ No newline at end of file +node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js start --node --archiver --sequencer 2>&1 | filter_noise diff --git a/yarn-project/end-to-end/scripts/native-network/deploy-l1-contracts.sh b/yarn-project/end-to-end/scripts/native-network/deploy-l1-contracts.sh index 9e9dad3f195..2d4677b1660 100755 --- a/yarn-project/end-to-end/scripts/native-network/deploy-l1-contracts.sh +++ b/yarn-project/end-to-end/scripts/native-network/deploy-l1-contracts.sh @@ -18,21 +18,33 @@ else INIT_VALIDATORS="false" fi -echo "Waiting for Anvil to be up at port 8545..." +export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} +export L1_CHAIN_ID=${L1_CHAIN_ID:-"31337"} +export PRIVATE_KEY=${PRIVATE_KEY:-""} +export SALT=${SALT:-"1337"} + +echo "Waiting for Ethereum node to be up..." until curl -s -X POST -H 'Content-Type: application/json' \ --data '{"jsonrpc":"2.0","method":"eth_blockNumber","params":[],"id":1}' \ - http://127.0.0.1:8545 2>/dev/null | grep -q 'result' ; do + $ETHEREUM_HOST 2>/dev/null | grep -q 'result'; do sleep 1 done echo "Done waiting." -# Run the deploy-l1-contracts command and capture the output -export ETHEREUM_HOST="http://127.0.0.1:8545" -if [ "$INIT_VALIDATORS" = "true" ]; then - output=$(node --no-warnings $(git rev-parse --show-toplevel)/yarn-project/aztec/dest/bin/index.js deploy-l1-contracts --validators "$VALIDATOR_ADDRESSES" --salt 1337) -else - output=$(node --no-warnings $(git rev-parse --show-toplevel)/yarn-project/aztec/dest/bin/index.js deploy-l1-contracts --salt 1337) -fi +# Construct base command +COMMAND="node --no-warnings $(git rev-parse --show-toplevel)/yarn-project/aztec/dest/bin/index.js \ + deploy-l1-contracts \ + --rpc-url $ETHEREUM_HOST \ + --l1-chain-id $L1_CHAIN_ID \ + --salt $SALT" + +# Add validators if specified +[ "$INIT_VALIDATORS" = "true" ] && COMMAND="$COMMAND --validators $VALIDATOR_ADDRESSES" + +# Add private key if provided +[ -n "$PRIVATE_KEY" ] && COMMAND="$COMMAND --private-key $PRIVATE_KEY" + +output=$($COMMAND) echo "$output" @@ -48,9 +60,8 @@ REWARD_DISTRIBUTOR_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'RewardDistribut GOVERNANCE_PROPOSER_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'GovernanceProposer Address: \K0x[a-fA-F0-9]{40}') GOVERNANCE_CONTRACT_ADDRESS=$(echo "$output" | grep -oP 'Governance Address: \K0x[a-fA-F0-9]{40}') - # Save contract addresses to state/l1-contracts.env -cat << EOCONFIG > $(git rev-parse --show-toplevel)/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env +cat <$(git rev-parse --show-toplevel)/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env export ROLLUP_CONTRACT_ADDRESS=$ROLLUP_CONTRACT_ADDRESS export REGISTRY_CONTRACT_ADDRESS=$REGISTRY_CONTRACT_ADDRESS export INBOX_CONTRACT_ADDRESS=$INBOX_CONTRACT_ADDRESS diff --git a/yarn-project/end-to-end/scripts/native-network/prover-node.sh b/yarn-project/end-to-end/scripts/native-network/prover-node.sh index 1fa0ac6865c..866ee0f73e9 100755 --- a/yarn-project/end-to-end/scripts/native-network/prover-node.sh +++ b/yarn-project/end-to-end/scripts/native-network/prover-node.sh @@ -14,11 +14,11 @@ exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname REPO=$(git rev-parse --show-toplevel) echo "Waiting for l1 contracts to be deployed..." -until [ -f "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env ] ; do +until [ -f "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env ]; do sleep 1 done echo "Waiting for Aztec Node..." -until curl -s http://127.0.0.1:8080/status >/dev/null ; do +until curl -s http://127.0.0.1:8080/status >/dev/null; do sleep 1 done echo "Done waiting." @@ -26,7 +26,7 @@ echo "Done waiting." source "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env # Get node info from the boot node -output=$(node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js get-node-info -u http://127.0.0.1:8080) +output=$(node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js get-node-info --node-url http://127.0.0.1:8080) # Extract boot node ENR export BOOTSTRAP_NODES=$(echo "$output" | grep -oP 'Node ENR: \K.*') @@ -34,9 +34,10 @@ export BOOTSTRAP_NODES=$(echo "$output" | grep -oP 'Node ENR: \K.*') # Set environment variables export LOG_LEVEL=${LOG_LEVEL:-"debug"} export DEBUG=${DEBUG:-"aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:l2_block_stream,-aztec:world-state:*"} -export ETHEREUM_HOST="http://127.0.0.1:8545" +export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} export PROVER_AGENT_COUNT="1" -export PROVER_PUBLISHER_PRIVATE_KEY="0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" +export PROVER_AGENT_ENABLED="true" +export PROVER_PUBLISHER_PRIVATE_KEY=${PROVER_PUBLISHER_PRIVATE_KEY:-"0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"} export PROVER_COORDINATION_NODE_URL="http://127.0.0.1:8080" export AZTEC_NODE_URL="http://127.0.0.1:8080" export OTEL_RESOURCE_ATTRIBUTES="service.name=prover-node-${PORT}" diff --git a/yarn-project/end-to-end/scripts/native-network/pxe.sh b/yarn-project/end-to-end/scripts/native-network/pxe.sh index e02133cf943..c7db13a4c56 100755 --- a/yarn-project/end-to-end/scripts/native-network/pxe.sh +++ b/yarn-project/end-to-end/scripts/native-network/pxe.sh @@ -9,19 +9,20 @@ exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname # Starts the PXE (Private eXecution Environment) service # Set environment variables -export ETHEREUM_HOST="http://127.0.0.1:8545" -export AZTEC_NODE_URL="http://127.0.0.1:8080" +export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} +export AZTEC_NODE_URL=${AZTEC_NODE_URL:-"http://127.0.0.1:8080"} +export VALIDATOR_NODE_URL=${VALIDATOR_NODE_URL:-"http://127.0.0.1:8081"} export LOG_LEVEL=${LOG_LEVEL:-"debug"} export DEBUG="aztec:*" echo "Waiting for Aztec Node..." -until curl -s http://127.0.0.1:8080/status >/dev/null ; do +until curl -s $AZTEC_NODE_URL/status >/dev/null; do sleep 1 done # We need to also wait for the validator, as the initial node cannot # Produce blocks on it's own echo "Waiting for Validator 0..." -until curl -s http://127.0.0.1:8081/status >/dev/null ; do +until curl -s $VALIDATOR_NODE_URL/status >/dev/null; do sleep 1 done echo "Done waiting." @@ -31,4 +32,4 @@ function filter_noise() { } # Start the PXE service -node --no-warnings $(git rev-parse --show-toplevel)/yarn-project/aztec/dest/bin/index.js start --port=8079 --pxe 2>&1 | filter_noise \ No newline at end of file +node --no-warnings $(git rev-parse --show-toplevel)/yarn-project/aztec/dest/bin/index.js start --port=8079 --pxe 2>&1 | filter_noise diff --git a/yarn-project/end-to-end/scripts/native-network/test-transfer.sh b/yarn-project/end-to-end/scripts/native-network/test-transfer.sh index 50790afbe3e..e54d8966ede 100755 --- a/yarn-project/end-to-end/scripts/native-network/test-transfer.sh +++ b/yarn-project/end-to-end/scripts/native-network/test-transfer.sh @@ -11,6 +11,7 @@ exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname export BOOTNODE_URL=${BOOTNODE_URL:-http://127.0.0.1:8080} export PXE_URL=${PXE_URL:-http://127.0.0.1:8079} export ETHEREUM_HOST=${ETHEREUM_HOST:-http://127.0.0.1:8545} +export K8S=${K8S:-false} REPO=$(git rev-parse --show-toplevel) # Run our test assuming the port in pxe.sh diff --git a/yarn-project/end-to-end/scripts/native-network/transaction-bot.sh b/yarn-project/end-to-end/scripts/native-network/transaction-bot.sh index 722bfdcf0ce..a42c2417ffd 100755 --- a/yarn-project/end-to-end/scripts/native-network/transaction-bot.sh +++ b/yarn-project/end-to-end/scripts/native-network/transaction-bot.sh @@ -4,6 +4,10 @@ set -eu # Get the name of the script without the path and extension SCRIPT_NAME=$(basename "$0" .sh) +# Set the token contract to use +export BOT_TOKEN_CONTRACT=${BOT_TOKEN_CONTRACT:-"TokenContract"} +export BOT_PXE_URL=${BOT_PXE_URL:-"http://127.0.0.1:8079"} + # Redirect stdout and stderr to .log while also printing to the console exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log" >&2) @@ -11,24 +15,28 @@ exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname REPO=$(git rev-parse --show-toplevel) echo "Waiting for Aztec Node..." -until curl -s http://127.0.0.1:8080/status >/dev/null ; do +until curl -s http://127.0.0.1:8080/status >/dev/null; do sleep 1 done echo "Waiting for PXE service..." until curl -s -X POST -H 'content-type: application/json' \ -d '{"jsonrpc":"2.0","method":"pxe_getNodeInfo","params":[],"id":67}' \ - http://127.0.0.1:8079 | grep -q '"enr:-'; do - sleep 1 -done -echo "Waiting for l2 contracts to be deployed..." -until [ -f "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l2-contracts.env ] ; do + $BOT_PXE_URL | grep -q '"enr:-'; do sleep 1 done -echo "Done waiting." + +# Don't wait for l2 contracts if using EasyPrivateTokenContract +if [ "${BOT_TOKEN_CONTRACT:-TokenContract}" != "EasyPrivateTokenContract" ]; then + echo "Waiting for l2 contracts to be deployed..." + until [ -f "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l2-contracts.env ]; do + sleep 1 + done + echo "Done waiting." +fi # Set environment variables -export ETHEREUM_HOST="http://127.0.0.1:8545" -export AZTEC_NODE_URL="http://127.0.0.1:8080" +export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} +export AZTEC_NODE_URL=${AZTEC_NODE_URL:-"http://127.0.0.1:8080"} export LOG_LEVEL=${LOG_LEVEL:-"debug"} export DEBUG="aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:l2_block_stream,-aztec:world-state:*" export BOT_PRIVATE_KEY="0xcafe" @@ -42,4 +50,5 @@ export PXE_PROVER_ENABLED="false" export PROVER_REAL_PROOFS="false" # Start the bot -node --no-warnings $(git rev-parse --show-toplevel)/yarn-project/aztec/dest/bin/index.js start --port=8077 --pxe --bot + +node --no-warnings $REPO/yarn-project/aztec/dest/bin/index.js start --port=8077 --bot --pxe diff --git a/yarn-project/end-to-end/scripts/native-network/validator.sh b/yarn-project/end-to-end/scripts/native-network/validator.sh index 518dbb9db97..fa183829d61 100755 --- a/yarn-project/end-to-end/scripts/native-network/validator.sh +++ b/yarn-project/end-to-end/scripts/native-network/validator.sh @@ -10,19 +10,21 @@ exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname # PORTS PORT="$1" P2P_PORT="$2" +ADDRESS="${3:-${ADDRESS:-}}" +export VALIDATOR_PRIVATE_KEY="${4:-${VALIDATOR_PRIVATE_KEY:-}}" # Starts the Validator Node REPO=$(git rev-parse --show-toplevel) echo "Waiting for l1 contracts to be deployed..." -until [ -f "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env ] ; do +until [ -f "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env ]; do sleep 1 done source "$REPO"/yarn-project/end-to-end/scripts/native-network/state/l1-contracts.env echo "Waiting for Aztec Node..." -until curl -s http://127.0.0.1:8080/status >/dev/null ; do +until curl -s http://127.0.0.1:8080/status >/dev/null; do sleep 1 done echo "Done waiting." @@ -31,21 +33,32 @@ echo "Done waiting." BOOT_NODE_URL="http://127.0.0.1:8080" # Get node info from the boot node -output=$(node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js get-node-info -u $BOOT_NODE_URL) +output=$(node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js get-node-info --node-url $BOOT_NODE_URL) # Extract boot node ENR export BOOTSTRAP_NODES=$(echo "$output" | grep -oP 'Node ENR: \K.*') echo "BOOTSTRAP_NODES: $BOOTSTRAP_NODES" -# Generate a private key for the validator -json_account=$(node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js generate-l1-account) -export ADDRESS=$(echo $json_account | jq -r '.address') -export LOG_LEVEL=${LOG_LEVEL:-"debug"} -export VALIDATOR_PRIVATE_KEY=$(echo $json_account | jq -r '.privateKey') +# Generate a private key for the validator only if not already set +if [ -z "${VALIDATOR_PRIVATE_KEY:-}" ] || [ -z "${ADDRESS:-}" ]; then + echo "Generating new L1 Validator account..." + json_account=$(node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js generate-l1-account) + export ADDRESS=$(echo $json_account | jq -r '.address') + export VALIDATOR_PRIVATE_KEY=$(echo $json_account | jq -r '.privateKey') +fi + export L1_PRIVATE_KEY=$VALIDATOR_PRIVATE_KEY export SEQ_PUBLISHER_PRIVATE_KEY=$VALIDATOR_PRIVATE_KEY export DEBUG=${DEBUG:-"aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:l2_block_stream,-aztec:world-state:*"} -export ETHEREUM_HOST="http://127.0.0.1:8545" +export ETHEREUM_HOST=${ETHEREUM_HOST:-"http://127.0.0.1:8545"} + +# Automatically detect if we're using Anvil +if curl -s -H "Content-Type: application/json" -X POST --data '{"method":"web3_clientVersion","params":[],"id":49,"jsonrpc":"2.0"}' $ETHEREUM_HOST | jq .result | grep -q anvil; then + IS_ANVIL="true" +else + IS_ANVIL="false" +fi + export P2P_ENABLED="true" export VALIDATOR_DISABLED="false" export SEQ_MAX_SECONDS_BETWEEN_BLOCKS="0" @@ -59,15 +72,24 @@ export OTEL_EXPORTER_OTLP_METRICS_ENDPOINT="${OTEL_EXPORTER_OTLP_METRICS_ENDPOIN export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT="${OTEL_EXPORTER_OTLP_TRACES_ENDPOINT:-}" export OTEL_EXPORTER_OTLP_LOGS_ENDPOINT="${OTEL_EXPORTER_OTLP_LOGS_ENDPOINT:-}" -# Add L1 validator -# this may fail, so try 3 times -for i in {1..3}; do - node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js add-l1-validator --validator $ADDRESS --rollup $ROLLUP_CONTRACT_ADDRESS && break - sleep 1 -done +# Check if validator is already registered +echo "Checking if validator is already registered..." +debug_output=$(node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js debug-rollup --rollup $ROLLUP_CONTRACT_ADDRESS) +if echo "$debug_output" | grep -q "Validators:.*$ADDRESS"; then + echo "Validator $ADDRESS is already registered" +else + # Add L1 validator + # this may fail, so try 3 times + echo "Adding validator $ADDRESS..." + for i in {1..3}; do + node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js add-l1-validator --validator $ADDRESS --rollup $ROLLUP_CONTRACT_ADDRESS && break + sleep 1 + done +fi -# Fast forward epochs -node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js fast-forward-epochs --rollup $ROLLUP_CONTRACT_ADDRESS --count 1 +# Fast forward epochs if we're on an anvil chain +if [ "$IS_ANVIL" = "true" ]; then + node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js fast-forward-epochs --rollup $ROLLUP_CONTRACT_ADDRESS --count 1 +fi # Start the Validator Node with the sequencer and archiver node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js start --port="$PORT" --node --archiver --sequencer - diff --git a/yarn-project/end-to-end/scripts/native-network/validators.sh b/yarn-project/end-to-end/scripts/native-network/validators.sh index 6a9ac7f4f40..c2454b87481 100755 --- a/yarn-project/end-to-end/scripts/native-network/validators.sh +++ b/yarn-project/end-to-end/scripts/native-network/validators.sh @@ -16,19 +16,26 @@ cd "$(dirname "${BASH_SOURCE[0]}")" CMD=() # Generate validator commands -for ((i=0; i { // Expect the tx to revert await expect(publisher.proposeL2Block(block)).resolves.toEqual(false); - // Expect a proper error to be logged. Full message looks like: - // aztec:sequencer:publisher [ERROR] Rollup process tx reverted. The contract function "propose" reverted. Error: Rollup__InvalidInHash(bytes32 expected, bytes32 actual) (0x00089a9d421a82c4a25f7acbebe69e638d5b064fa8a60e018793dcb0be53752c, 0x00a5a12af159e0608de45d825718827a36d8a7cdfa9ecc7955bc62180ae78e51) blockNumber=1 slotNumber=49 blockHash=0x131c59ebc2ce21224de6473fe954b0d4eb918043432a3a95406bb7e7a4297fbd txHash=0xc01c3c26b6b67003a8cce352afe475faf7e0196a5a3bba963cfda3792750ed28 - expect(loggerErrorSpy).toHaveBeenCalledWith( - expect.stringMatching(/Rollup__InvalidInHash/), + // Test for both calls + expect(loggerErrorSpy).toHaveBeenCalledTimes(2); + + // Test first call + expect(loggerErrorSpy).toHaveBeenNthCalledWith( + 1, + expect.stringMatching(/^L1 Transaction 0x[a-f0-9]{64} reverted$/), + ); + + // Test second call + expect(loggerErrorSpy).toHaveBeenNthCalledWith( + 2, + expect.stringMatching( + /^Rollup process tx reverted\. The contract function "propose" reverted\. Error: Rollup__InvalidInHash/, + ), undefined, expect.objectContaining({ blockHash: expect.any(String), blockNumber: expect.any(Number), slotNumber: expect.any(BigInt), + txHash: expect.any(String), }), ); }); diff --git a/yarn-project/ethereum/package.json b/yarn-project/ethereum/package.json index 887ad01645d..f6be604435c 100644 --- a/yarn-project/ethereum/package.json +++ b/yarn-project/ethereum/package.json @@ -41,6 +41,8 @@ "@jest/globals": "^29.5.0", "@types/jest": "^29.5.0", "@types/node": "^18.14.6", + "@viem/anvil": "^0.0.10", + "get-port": "^7.1.0", "jest": "^29.5.0", "ts-node": "^10.9.1", "typescript": "^5.0.4" diff --git a/yarn-project/ethereum/src/deploy_l1_contracts.ts b/yarn-project/ethereum/src/deploy_l1_contracts.ts index e9d8522f636..8d2f6b64245 100644 --- a/yarn-project/ethereum/src/deploy_l1_contracts.ts +++ b/yarn-project/ethereum/src/deploy_l1_contracts.ts @@ -55,6 +55,7 @@ import { foundry } from 'viem/chains'; import { type L1ContractsConfig } from './config.js'; import { isAnvilTestChain } from './ethereum_chain.js'; import { type L1ContractAddresses } from './l1_contract_addresses.js'; +import { L1TxUtils } from './l1_tx_utils.js'; /** * Return type of the deployL1Contract function. @@ -607,7 +608,9 @@ export async function deployL1Contract( logger?: DebugLogger, ): Promise<{ address: EthAddress; txHash: Hex | undefined }> { let txHash: Hex | undefined = undefined; - let address: Hex | null | undefined = undefined; + let resultingAddress: Hex | null | undefined = undefined; + + const l1TxUtils = new L1TxUtils(publicClient, walletClient, logger); if (libraries) { // @note Assumes that we wont have nested external libraries. @@ -659,21 +662,31 @@ export async function deployL1Contract( const salt = padHex(maybeSalt, { size: 32 }); const deployer: Hex = '0x4e59b44847b379578588920cA78FbF26c0B4956C'; const calldata = encodeDeployData({ abi, bytecode, args }); - address = getContractAddress({ from: deployer, salt, bytecode: calldata, opcode: 'CREATE2' }); - const existing = await publicClient.getBytecode({ address }); + resultingAddress = getContractAddress({ from: deployer, salt, bytecode: calldata, opcode: 'CREATE2' }); + const existing = await publicClient.getBytecode({ address: resultingAddress }); if (existing === undefined || existing === '0x') { - txHash = await walletClient.sendTransaction({ to: deployer, data: concatHex([salt, calldata]) }); - logger?.verbose(`Deploying contract with salt ${salt} to address ${address} in tx ${txHash}`); + const res = await l1TxUtils.sendTransaction({ + to: deployer, + data: concatHex([salt, calldata]), + }); + txHash = res.txHash; + + logger?.verbose(`Deployed contract with salt ${salt} to address ${resultingAddress} in tx ${txHash}.`); } else { - logger?.verbose(`Skipping existing deployment of contract with salt ${salt} to address ${address}`); + logger?.verbose(`Skipping existing deployment of contract with salt ${salt} to address ${resultingAddress}`); } } else { - txHash = await walletClient.deployContract({ abi, bytecode, args }); - logger?.verbose(`Deploying contract in tx ${txHash}`); - const receipt = await publicClient.waitForTransactionReceipt({ hash: txHash, pollingInterval: 100 }); - address = receipt.contractAddress; - if (!address) { + // Regular deployment path + const deployData = encodeDeployData({ abi, bytecode, args }); + const receipt = await l1TxUtils.sendAndMonitorTransaction({ + to: null, + data: deployData, + }); + + txHash = receipt.transactionHash; + resultingAddress = receipt.contractAddress; + if (!resultingAddress) { throw new Error( `No contract address found in receipt: ${JSON.stringify(receipt, (_, val) => typeof val === 'bigint' ? String(val) : val, @@ -682,6 +695,6 @@ export async function deployL1Contract( } } - return { address: EthAddress.fromString(address!), txHash }; + return { address: EthAddress.fromString(resultingAddress!), txHash }; } // docs:end:deployL1Contract diff --git a/yarn-project/ethereum/src/eth_cheat_codes.ts b/yarn-project/ethereum/src/eth_cheat_codes.ts new file mode 100644 index 00000000000..74918bf4653 --- /dev/null +++ b/yarn-project/ethereum/src/eth_cheat_codes.ts @@ -0,0 +1,316 @@ +import { toBigIntBE, toHex } from '@aztec/foundation/bigint-buffer'; +import { keccak256 } from '@aztec/foundation/crypto'; +import { type EthAddress } from '@aztec/foundation/eth-address'; +import { createDebugLogger } from '@aztec/foundation/log'; + +import fs from 'fs'; +import { type Hex } from 'viem'; + +/** + * A class that provides utility functions for interacting with ethereum (L1). + */ +export class EthCheatCodes { + constructor( + /** + * The RPC URL to use for interacting with the chain + */ + public rpcUrl: string, + /** + * The logger to use for the eth cheatcodes + */ + public logger = createDebugLogger('aztec:cheat_codes:eth'), + ) {} + + async rpcCall(method: string, params: any[]) { + const paramsString = JSON.stringify(params); + const content = { + body: `{"jsonrpc":"2.0", "method": "${method}", "params": ${paramsString}, "id": 1}`, + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + }; + return await (await fetch(this.rpcUrl, content)).json(); + } + + /** + * Get the auto mine status of the underlying chain + * @returns True if automine is on, false otherwise + */ + public async isAutoMining(): Promise { + try { + const res = await this.rpcCall('anvil_getAutomine', []); + return res.result; + } catch (err) { + this.logger.error(`Calling "anvil_getAutomine" failed with:`, err); + } + return false; + } + + /** + * Get the current blocknumber + * @returns The current block number + */ + public async blockNumber(): Promise { + const res = await this.rpcCall('eth_blockNumber', []); + return parseInt(res.result, 16); + } + + /** + * Get the current chainId + * @returns The current chainId + */ + public async chainId(): Promise { + const res = await this.rpcCall('eth_chainId', []); + return parseInt(res.result, 16); + } + + /** + * Get the current timestamp + * @returns The current timestamp + */ + public async timestamp(): Promise { + const res = await this.rpcCall('eth_getBlockByNumber', ['latest', true]); + return parseInt(res.result.timestamp, 16); + } + + /** + * Advance the chain by a number of blocks + * @param numberOfBlocks - The number of blocks to mine + */ + public async mine(numberOfBlocks = 1): Promise { + const res = await this.rpcCall('hardhat_mine', [numberOfBlocks]); + if (res.error) { + throw new Error(`Error mining: ${res.error.message}`); + } + this.logger.verbose(`Mined ${numberOfBlocks} L1 blocks`); + } + + /** + * Mines a single block with evm_mine + */ + public async evmMine(): Promise { + const res = await this.rpcCall('evm_mine', []); + if (res.error) { + throw new Error(`Error mining: ${res.error.message}`); + } + } + + /** + * Set the balance of an account + * @param account - The account to set the balance for + * @param balance - The balance to set + */ + public async setBalance(account: EthAddress, balance: bigint): Promise { + const res = await this.rpcCall('anvil_setBalance', [account.toString(), toHex(balance)]); + if (res.error) { + throw new Error(`Error setting balance for ${account}: ${res.error.message}`); + } + this.logger.verbose(`Set balance for ${account} to ${balance}`); + } + + /** + * Set the interval between blocks (block time) + * @param interval - The interval to use between blocks + */ + public async setBlockInterval(interval: number): Promise { + const res = await this.rpcCall('anvil_setBlockTimestampInterval', [interval]); + if (res.error) { + throw new Error(`Error setting block interval: ${res.error.message}`); + } + this.logger.verbose(`Set L1 block interval to ${interval}`); + } + + /** + * Set the next block base fee per gas + * @param baseFee - The base fee to set + */ + public async setNextBlockBaseFeePerGas(baseFee: bigint): Promise { + const res = await this.rpcCall('anvil_setNextBlockBaseFeePerGas', [baseFee.toString()]); + if (res.error) { + throw new Error(`Error setting next block base fee per gas: ${res.error.message}`); + } + this.logger.verbose(`Set L1 next block base fee per gas to ${baseFee}`); + } + + /** + * Set the interval between blocks (block time) + * @param seconds - The interval to use between blocks + */ + public async setIntervalMining(seconds: number): Promise { + const res = await this.rpcCall('anvil_setIntervalMining', [seconds]); + if (res.error) { + throw new Error(`Error setting interval mining: ${res.error.message}`); + } + this.logger.verbose(`Set L1 interval mining to ${seconds} seconds`); + } + + /** + * Set the automine status of the underlying anvil chain + * @param automine - The automine status to set + */ + public async setAutomine(automine: boolean): Promise { + const res = await this.rpcCall('anvil_setAutomine', [automine]); + if (res.error) { + throw new Error(`Error setting automine: ${res.error.message}`); + } + this.logger.verbose(`Set L1 automine to ${automine}`); + } + + /** + * Drop a transaction from the mempool + * @param txHash - The transaction hash + */ + public async dropTransaction(txHash: Hex): Promise { + const res = await this.rpcCall('anvil_dropTransaction', [txHash]); + if (res.error) { + throw new Error(`Error dropping transaction: ${res.error.message}`); + } + this.logger.verbose(`Dropped transaction ${txHash}`); + } + + /** + * Set the next block timestamp + * @param timestamp - The timestamp to set the next block to + */ + public async setNextBlockTimestamp(timestamp: number): Promise { + const res = await this.rpcCall('evm_setNextBlockTimestamp', [timestamp]); + if (res.error) { + throw new Error(`Error setting next block timestamp: ${res.error.message}`); + } + this.logger.verbose(`Set L1 next block timestamp to ${timestamp}`); + } + + /** + * Set the next block timestamp and mines the block + * @param timestamp - The timestamp to set the next block to + */ + public async warp(timestamp: number | bigint): Promise { + const res = await this.rpcCall('evm_setNextBlockTimestamp', [Number(timestamp)]); + if (res.error) { + throw new Error(`Error warping: ${res.error.message}`); + } + await this.mine(); + this.logger.verbose(`Warped L1 timestamp to ${timestamp}`); + } + + /** + * Dumps the current chain state to a file. + * @param fileName - The file name to dump state into + */ + public async dumpChainState(fileName: string): Promise { + const res = await this.rpcCall('hardhat_dumpState', []); + if (res.error) { + throw new Error(`Error dumping state: ${res.error.message}`); + } + const jsonContent = JSON.stringify(res.result); + fs.writeFileSync(`${fileName}.json`, jsonContent, 'utf8'); + this.logger.verbose(`Dumped state to ${fileName}`); + } + + /** + * Loads the chain state from a file. + * @param fileName - The file name to load state from + */ + public async loadChainState(fileName: string): Promise { + const data = JSON.parse(fs.readFileSync(`${fileName}.json`, 'utf8')); + const res = await this.rpcCall('hardhat_loadState', [data]); + if (res.error) { + throw new Error(`Error loading state: ${res.error.message}`); + } + this.logger.verbose(`Loaded state from ${fileName}`); + } + + /** + * Load the value at a storage slot of a contract address on eth + * @param contract - The contract address + * @param slot - The storage slot + * @returns - The value at the storage slot + */ + public async load(contract: EthAddress, slot: bigint): Promise { + const res = await this.rpcCall('eth_getStorageAt', [contract.toString(), toHex(slot), 'latest']); + return BigInt(res.result); + } + + /** + * Set the value at a storage slot of a contract address on eth + * @param contract - The contract address + * @param slot - The storage slot + * @param value - The value to set the storage slot to + */ + public async store(contract: EthAddress, slot: bigint, value: bigint): Promise { + // for the rpc call, we need to change value to be a 32 byte hex string. + const res = await this.rpcCall('hardhat_setStorageAt', [contract.toString(), toHex(slot), toHex(value, true)]); + if (res.error) { + throw new Error(`Error setting storage for contract ${contract} at ${slot}: ${res.error.message}`); + } + this.logger.verbose(`Set L1 storage for contract ${contract} at ${slot} to ${value}`); + } + + /** + * Computes the slot value for a given map and key. + * @param baseSlot - The base slot of the map (specified in Aztec.nr contract) + * @param key - The key to lookup in the map + * @returns The storage slot of the value in the map + */ + public keccak256(baseSlot: bigint, key: bigint): bigint { + // abi encode (removing the 0x) - concat key and baseSlot (both padded to 32 bytes) + const abiEncoded = toHex(key, true).substring(2) + toHex(baseSlot, true).substring(2); + return toBigIntBE(keccak256(Buffer.from(abiEncoded, 'hex'))); + } + + /** + * Send transactions impersonating an externally owned account or contract. + * @param who - The address to impersonate + */ + public async startImpersonating(who: EthAddress | Hex): Promise { + const res = await this.rpcCall('hardhat_impersonateAccount', [who.toString()]); + if (res.error) { + throw new Error(`Error impersonating ${who}: ${res.error.message}`); + } + this.logger.verbose(`Impersonating ${who}`); + } + + /** + * Stop impersonating an account that you are currently impersonating. + * @param who - The address to stop impersonating + */ + public async stopImpersonating(who: EthAddress | Hex): Promise { + const res = await this.rpcCall('hardhat_stopImpersonatingAccount', [who.toString()]); + if (res.error) { + throw new Error(`Error when stopping the impersonation of ${who}: ${res.error.message}`); + } + this.logger.verbose(`Stopped impersonating ${who}`); + } + + /** + * Set the bytecode for a contract + * @param contract - The contract address + * @param bytecode - The bytecode to set + */ + public async etch(contract: EthAddress, bytecode: `0x${string}`): Promise { + const res = await this.rpcCall('hardhat_setCode', [contract.toString(), bytecode]); + if (res.error) { + throw new Error(`Error setting bytecode for ${contract}: ${res.error.message}`); + } + this.logger.verbose(`Set bytecode for ${contract} to ${bytecode}`); + } + + /** + * Get the bytecode for a contract + * @param contract - The contract address + * @returns The bytecode for the contract + */ + public async getBytecode(contract: EthAddress): Promise<`0x${string}`> { + const res = await this.rpcCall('eth_getCode', [contract.toString(), 'latest']); + return res.result; + } + + /** + * Get the raw transaction object for a given transaction hash + * @param txHash - The transaction hash + * @returns The raw transaction + */ + public async getRawTransaction(txHash: Hex): Promise<`0x${string}`> { + const res = await this.rpcCall('debug_getRawTransaction', [txHash]); + return res.result; + } +} diff --git a/yarn-project/ethereum/src/index.ts b/yarn-project/ethereum/src/index.ts index 30a990db651..d6393560093 100644 --- a/yarn-project/ethereum/src/index.ts +++ b/yarn-project/ethereum/src/index.ts @@ -1,8 +1,10 @@ export * from './constants.js'; export * from './deploy_l1_contracts.js'; +export * from './ethereum_chain.js'; +export * from './eth_cheat_codes.js'; +export * from './l1_tx_utils.js'; export * from './l1_contract_addresses.js'; export * from './l1_reader.js'; -export * from './ethereum_chain.js'; export * from './utils.js'; export * from './config.js'; export * from './types.js'; diff --git a/yarn-project/ethereum/src/l1_tx_utils.test.ts b/yarn-project/ethereum/src/l1_tx_utils.test.ts new file mode 100644 index 00000000000..7dffaf011ce --- /dev/null +++ b/yarn-project/ethereum/src/l1_tx_utils.test.ts @@ -0,0 +1,302 @@ +import { EthAddress } from '@aztec/foundation/eth-address'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { sleep } from '@aztec/foundation/sleep'; + +import { type Anvil } from '@viem/anvil'; +import { + type Account, + type Chain, + type HttpTransport, + type PublicClient, + type WalletClient, + createPublicClient, + createWalletClient, + http, +} from 'viem'; +import { mnemonicToAccount, privateKeyToAccount } from 'viem/accounts'; +import { foundry } from 'viem/chains'; + +import { EthCheatCodes } from './eth_cheat_codes.js'; +import { L1TxUtils, defaultL1TxUtilsConfig } from './l1_tx_utils.js'; +import { startAnvil } from './test/start_anvil.js'; + +const MNEMONIC = 'test test test test test test test test test test test junk'; +const WEI_CONST = 1_000_000_000n; +// Simple contract that just returns 42 +const SIMPLE_CONTRACT_BYTECODE = '0x69602a60005260206000f3600052600a6016f3'; + +export type PendingTransaction = { + hash: `0x${string}`; + maxFeePerGas: bigint; + maxPriorityFeePerGas: bigint; +}; + +describe('GasUtils', () => { + let gasUtils: L1TxUtils; + let walletClient: WalletClient; + let publicClient: PublicClient; + let anvil: Anvil; + let cheatCodes: EthCheatCodes; + const initialBaseFee = WEI_CONST; // 1 gwei + const logger = createDebugLogger('l1_gas_test'); + + beforeAll(async () => { + const { anvil: anvilInstance, rpcUrl } = await startAnvil(1); + anvil = anvilInstance; + cheatCodes = new EthCheatCodes(rpcUrl); + const hdAccount = mnemonicToAccount(MNEMONIC, { addressIndex: 0 }); + const privKeyRaw = hdAccount.getHdKey().privateKey; + if (!privKeyRaw) { + throw new Error('Failed to get private key'); + } + const privKey = Buffer.from(privKeyRaw).toString('hex'); + const account = privateKeyToAccount(`0x${privKey}`); + + publicClient = createPublicClient({ + transport: http(rpcUrl), + chain: foundry, + }); + + walletClient = createWalletClient({ + transport: http(rpcUrl), + chain: foundry, + account, + }); + + // set base fee + await publicClient.transport.request({ + method: 'anvil_setNextBlockBaseFeePerGas', + params: [initialBaseFee.toString()], + }); + await cheatCodes.evmMine(); + + gasUtils = new L1TxUtils(publicClient, walletClient, logger, { + gasLimitBufferPercentage: 20n, + maxGwei: 500n, + minGwei: 1n, + maxAttempts: 3, + checkIntervalMs: 100, + stallTimeMs: 1000, + }); + }); + + afterEach(async () => { + // Reset base fee + await cheatCodes.setNextBlockBaseFeePerGas(initialBaseFee); + await cheatCodes.evmMine(); + }); + afterAll(async () => { + // disabling interval mining as it seems to cause issues with stopping anvil + await cheatCodes.setIntervalMining(0); // Disable interval mining + await anvil.stop(); + }, 5_000); + + it('sends and monitors a simple transaction', async () => { + const receipt = await gasUtils.sendAndMonitorTransaction({ + to: '0x1234567890123456789012345678901234567890', + data: '0x', + value: 0n, + }); + + expect(receipt.status).toBe('success'); + }, 10_000); + + it('handles gas price spikes by retrying with higher gas price', async () => { + // Disable all forms of mining + await cheatCodes.setAutomine(false); + await cheatCodes.setIntervalMining(0); + + // Ensure initial base fee is low + await cheatCodes.setNextBlockBaseFeePerGas(initialBaseFee); + + const request = { + to: '0x1234567890123456789012345678901234567890' as `0x${string}`, + data: '0x' as `0x${string}`, + value: 0n, + }; + + const estimatedGas = await publicClient.estimateGas(request); + + const originalMaxFeePerGas = WEI_CONST * 10n; + const originalMaxPriorityFeePerGas = WEI_CONST; + + const txHash = await walletClient.sendTransaction({ + ...request, + gas: estimatedGas, + maxFeePerGas: originalMaxFeePerGas, + maxPriorityFeePerGas: originalMaxPriorityFeePerGas, + }); + + const rawTx = await cheatCodes.getRawTransaction(txHash); + + // Temporarily drop the transaction + await cheatCodes.dropTransaction(txHash); + + // Mine a block with higher base fee + await cheatCodes.setNextBlockBaseFeePerGas((WEI_CONST * 15n) / 10n); + await cheatCodes.evmMine(); + + // Re-add the original tx + await publicClient.transport.request({ + method: 'eth_sendRawTransaction', + params: [rawTx], + }); + + // keeping auto-mining disabled to simulate a stuck transaction + // The monitor should detect the stall and create a replacement tx + + // Monitor should detect stall and replace with higher gas price + const monitorFn = gasUtils.monitorTransaction(request, txHash, { gasLimit: estimatedGas }); + + await sleep(2000); + // re-enable mining + await cheatCodes.setIntervalMining(1); + const receipt = await monitorFn; + expect(receipt.status).toBe('success'); + // Verify that a replacement transaction was created + expect(receipt.transactionHash).not.toBe(txHash); + + // Get details of replacement tx to verify higher gas price + const replacementTx = await publicClient.getTransaction({ hash: receipt.transactionHash }); + + expect(replacementTx.maxFeePerGas!).toBeGreaterThan(originalMaxFeePerGas); + expect(replacementTx.maxPriorityFeePerGas!).toBeGreaterThan(originalMaxPriorityFeePerGas); + }, 20_000); + + it('respects max gas price limits during spikes', async () => { + const maxGwei = 500n; + const newBaseFee = (maxGwei - 10n) * WEI_CONST; + + // Set base fee high but still under our max + await cheatCodes.setNextBlockBaseFeePerGas(newBaseFee); + + // Mine a new block to make the base fee change take effect + await cheatCodes.evmMine(); + + const receipt = await gasUtils.sendAndMonitorTransaction({ + to: '0x1234567890123456789012345678901234567890', + data: '0x', + value: 0n, + }); + + expect(receipt.effectiveGasPrice).toBeLessThanOrEqual(maxGwei * WEI_CONST); + }, 60_000); + + it('adds appropriate buffer to gas estimation', async () => { + const stableBaseFee = WEI_CONST * 10n; + await cheatCodes.setNextBlockBaseFeePerGas(stableBaseFee); + await cheatCodes.evmMine(); + + // First deploy without any buffer + const baselineGasUtils = new L1TxUtils(publicClient, walletClient, logger, { + gasLimitBufferPercentage: 0n, + maxGwei: 500n, + minGwei: 10n, // Increased minimum gas price + maxAttempts: 5, + checkIntervalMs: 100, + stallTimeMs: 1000, + }); + + const baselineTx = await baselineGasUtils.sendAndMonitorTransaction({ + to: EthAddress.ZERO.toString(), + data: SIMPLE_CONTRACT_BYTECODE, + }); + + // Get the transaction details to see the gas limit + const baselineDetails = await publicClient.getTransaction({ + hash: baselineTx.transactionHash, + }); + + // Now deploy with 20% buffer + const bufferedGasUtils = new L1TxUtils(publicClient, walletClient, logger, { + gasLimitBufferPercentage: 20n, + maxGwei: 500n, + minGwei: 1n, + maxAttempts: 3, + checkIntervalMs: 100, + stallTimeMs: 1000, + }); + + const bufferedTx = await bufferedGasUtils.sendAndMonitorTransaction({ + to: EthAddress.ZERO.toString(), + data: SIMPLE_CONTRACT_BYTECODE, + }); + + const bufferedDetails = await publicClient.getTransaction({ + hash: bufferedTx.transactionHash, + }); + + // The gas limit should be ~20% higher + expect(bufferedDetails.gas).toBeGreaterThan(baselineDetails.gas); + expect(bufferedDetails.gas).toBeLessThanOrEqual((baselineDetails.gas * 120n) / 100n); + }, 20_000); + + it('calculates correct gas prices for initial attempt', async () => { + // Set base fee to 1 gwei + await cheatCodes.setNextBlockBaseFeePerGas(WEI_CONST); + await cheatCodes.evmMine(); + + const basePriorityFee = await publicClient.estimateMaxPriorityFeePerGas(); + const gasPrice = await gasUtils['getGasPrice'](); + + // With default config, priority fee should be bumped by 20% + const expectedPriorityFee = (basePriorityFee * 120n) / 100n; + + // Base fee should be bumped for potential stalls (1.125^(stallTimeMs/12000) = ~1.125 for default config) + const expectedMaxFee = (WEI_CONST * 1125n) / 1000n + expectedPriorityFee; + + expect(gasPrice.maxPriorityFeePerGas).toBe(expectedPriorityFee); + expect(gasPrice.maxFeePerGas).toBe(expectedMaxFee); + }); + + it('calculates correct gas prices for retry attempts', async () => { + await cheatCodes.setNextBlockBaseFeePerGas(WEI_CONST); + await cheatCodes.evmMine(); + + const initialGasPrice = await gasUtils['getGasPrice'](); + + // Get retry gas price for 2nd attempt + const retryGasPrice = await gasUtils['getGasPrice'](undefined, 1, initialGasPrice); + + // With default config, retry should bump fees by 50% + const expectedPriorityFee = (initialGasPrice.maxPriorityFeePerGas * 150n) / 100n; + const expectedMaxFee = (initialGasPrice.maxFeePerGas * 150n) / 100n; + + expect(retryGasPrice.maxPriorityFeePerGas).toBe(expectedPriorityFee); + expect(retryGasPrice.maxFeePerGas).toBe(expectedMaxFee); + }); + + it('respects minimum gas price bump for replacements', async () => { + const gasUtils = new L1TxUtils(publicClient, walletClient, logger, { + ...defaultL1TxUtilsConfig, + priorityFeeRetryBumpPercentage: 5n, // Set lower than minimum 10% + }); + + const initialGasPrice = await gasUtils['getGasPrice'](); + + // Get retry gas price with attempt = 1 + const retryGasPrice = await gasUtils['getGasPrice'](undefined, 1, initialGasPrice); + + // Should use 10% minimum bump even though config specified 5% + const expectedPriorityFee = (initialGasPrice.maxPriorityFeePerGas * 110n) / 100n; + const expectedMaxFee = (initialGasPrice.maxFeePerGas * 110n) / 100n; + + expect(retryGasPrice.maxPriorityFeePerGas).toBe(expectedPriorityFee); + expect(retryGasPrice.maxFeePerGas).toBe(expectedMaxFee); + }); + + it('adds correct buffer to gas estimation', async () => { + const request = { + to: '0x1234567890123456789012345678901234567890' as `0x${string}`, + data: '0x' as `0x${string}`, + value: 0n, + }; + + const baseEstimate = await publicClient.estimateGas(request); + const bufferedEstimate = await gasUtils.estimateGas(walletClient.account!, request); + + // adds 20% buffer + const expectedEstimate = baseEstimate + (baseEstimate * 20n) / 100n; + expect(bufferedEstimate).toBe(expectedEstimate); + }); +}); diff --git a/yarn-project/ethereum/src/l1_tx_utils.ts b/yarn-project/ethereum/src/l1_tx_utils.ts new file mode 100644 index 00000000000..f95610303b7 --- /dev/null +++ b/yarn-project/ethereum/src/l1_tx_utils.ts @@ -0,0 +1,400 @@ +import { + type ConfigMappingsType, + bigintConfigHelper, + getDefaultConfig, + numberConfigHelper, +} from '@aztec/foundation/config'; +import { type DebugLogger } from '@aztec/foundation/log'; +import { makeBackoff, retry } from '@aztec/foundation/retry'; +import { sleep } from '@aztec/foundation/sleep'; + +import { + type Account, + type Address, + type Chain, + type GetTransactionReturnType, + type Hex, + type HttpTransport, + type PublicClient, + type TransactionReceipt, + type WalletClient, + formatGwei, +} from 'viem'; + +// 1_000_000_000 Gwei = 1 ETH +// 1_000_000_000 Wei = 1 Gwei +// 1_000_000_000_000_000_000 Wei = 1 ETH + +const WEI_CONST = 1_000_000_000n; + +// setting a minimum bump percentage to 10% due to geth's implementation +// https://github.com/ethereum/go-ethereum/blob/e3d61e6db028c412f74bc4d4c7e117a9e29d0de0/core/txpool/legacypool/list.go#L298 +const MIN_REPLACEMENT_BUMP_PERCENTAGE = 10n; + +// Avg ethereum block time is ~12s +const BLOCK_TIME_MS = 12_000; + +export interface L1TxUtilsConfig { + /** + * How much to increase calculated gas limit. + */ + gasLimitBufferPercentage?: bigint; + /** + * Maximum gas price in gwei + */ + maxGwei?: bigint; + /** + * Minimum gas price in gwei + */ + minGwei?: bigint; + /** + * Priority fee bump percentage + */ + priorityFeeBumpPercentage?: bigint; + /** + * How much to increase priority fee by each attempt (percentage) + */ + priorityFeeRetryBumpPercentage?: bigint; + /** + * Maximum number of speed-up attempts + */ + maxAttempts?: number; + /** + * How often to check tx status + */ + checkIntervalMs?: number; + /** + * How long before considering tx stalled + */ + stallTimeMs?: number; + /** + * How long to wait for a tx to be mined before giving up + */ + txTimeoutMs?: number; +} + +export const l1TxUtilsConfigMappings: ConfigMappingsType = { + gasLimitBufferPercentage: { + description: 'How much to increase gas price by each attempt (percentage)', + env: 'L1_GAS_LIMIT_BUFFER_PERCENTAGE', + ...bigintConfigHelper(20n), + }, + minGwei: { + description: 'Minimum gas price in gwei', + env: 'L1_GAS_PRICE_MIN', + ...bigintConfigHelper(1n), + }, + maxGwei: { + description: 'Maximum gas price in gwei', + env: 'L1_GAS_PRICE_MAX', + ...bigintConfigHelper(100n), + }, + priorityFeeBumpPercentage: { + description: 'How much to increase priority fee by each attempt (percentage)', + env: 'L1_PRIORITY_FEE_BUMP_PERCENTAGE', + ...bigintConfigHelper(20n), + }, + priorityFeeRetryBumpPercentage: { + description: 'How much to increase priority fee by each retry attempt (percentage)', + env: 'L1_PRIORITY_FEE_RETRY_BUMP_PERCENTAGE', + ...bigintConfigHelper(50n), + }, + maxAttempts: { + description: 'Maximum number of speed-up attempts', + env: 'L1_TX_MONITOR_MAX_ATTEMPTS', + ...numberConfigHelper(3), + }, + checkIntervalMs: { + description: 'How often to check tx status', + env: 'L1_TX_MONITOR_CHECK_INTERVAL_MS', + ...numberConfigHelper(10_000), + }, + stallTimeMs: { + description: 'How long before considering tx stalled', + env: 'L1_TX_MONITOR_STALL_TIME_MS', + ...numberConfigHelper(30_000), + }, + txTimeoutMs: { + description: 'How long to wait for a tx to be mined before giving up. Set to 0 to disable.', + env: 'L1_TX_MONITOR_TX_TIMEOUT_MS', + ...numberConfigHelper(300_000), // 5 mins + }, +}; + +export const defaultL1TxUtilsConfig = getDefaultConfig(l1TxUtilsConfigMappings); + +export interface L1TxRequest { + to: Address | null; + data: Hex; + value?: bigint; +} + +interface GasPrice { + maxFeePerGas: bigint; + maxPriorityFeePerGas: bigint; +} + +export class L1TxUtils { + private readonly config: L1TxUtilsConfig; + + constructor( + private readonly publicClient: PublicClient, + private readonly walletClient: WalletClient, + private readonly logger?: DebugLogger, + config?: Partial, + ) { + this.config = { + ...defaultL1TxUtilsConfig, + ...(config || {}), + }; + } + + /** + * Sends a transaction with gas estimation and pricing + * @param request - The transaction request (to, data, value) + * @param gasConfig - Optional gas configuration + * @returns The transaction hash and parameters used + */ + public async sendTransaction( + request: L1TxRequest, + _gasConfig?: Partial & { fixedGas?: bigint }, + ): Promise<{ txHash: Hex; gasLimit: bigint; gasPrice: GasPrice }> { + const gasConfig = { ...this.config, ..._gasConfig }; + const account = this.walletClient.account; + let gasLimit: bigint; + + if (gasConfig.fixedGas) { + gasLimit = gasConfig.fixedGas; + } else { + gasLimit = await this.estimateGas(account, request); + } + + const gasPrice = await this.getGasPrice(gasConfig); + + const txHash = await this.walletClient.sendTransaction({ + ...request, + gas: gasLimit, + maxFeePerGas: gasPrice.maxFeePerGas, + maxPriorityFeePerGas: gasPrice.maxPriorityFeePerGas, + }); + + this.logger?.verbose( + `Sent L1 transaction ${txHash} with gas limit ${gasLimit} and price ${formatGwei(gasPrice.maxFeePerGas)} gwei`, + ); + + return { txHash, gasLimit, gasPrice }; + } + + /** + * Monitors a transaction until completion, handling speed-ups if needed + * @param request - Original transaction request (needed for speed-ups) + * @param initialTxHash - Hash of the initial transaction + * @param params - Parameters used in the initial transaction + * @param gasConfig - Optional gas configuration + */ + public async monitorTransaction( + request: L1TxRequest, + initialTxHash: Hex, + params: { gasLimit: bigint }, + _gasConfig?: Partial, + ): Promise { + const gasConfig = { ...this.config, ..._gasConfig }; + const account = this.walletClient.account; + + // Retry a few times, in case the tx is not yet propagated. + const tx = await retry( + () => this.publicClient.getTransaction({ hash: initialTxHash }), + `Getting L1 transaction ${initialTxHash}`, + makeBackoff([1, 2, 3]), + this.logger, + true, + ); + + if (tx?.nonce === undefined || tx?.nonce === null) { + throw new Error(`Failed to get L1 transaction ${initialTxHash} nonce`); + } + const nonce = tx.nonce; + + const txHashes = new Set([initialTxHash]); + let currentTxHash = initialTxHash; + let attempts = 0; + let lastAttemptSent = Date.now(); + const initialTxTime = lastAttemptSent; + let txTimedOut = false; + + while (!txTimedOut) { + try { + const currentNonce = await this.publicClient.getTransactionCount({ address: account.address }); + if (currentNonce > nonce) { + for (const hash of txHashes) { + try { + const receipt = await this.publicClient.getTransactionReceipt({ hash }); + if (receipt) { + this.logger?.debug(`L1 Transaction ${hash} confirmed`); + if (receipt.status === 'reverted') { + this.logger?.error(`L1 Transaction ${hash} reverted`); + } + return receipt; + } + } catch (err) { + if (err instanceof Error && err.message.includes('reverted')) { + throw err; + } + } + } + } + + // Retry a few times, in case the tx is not yet propagated. + const tx = await retry( + () => this.publicClient.getTransaction({ hash: currentTxHash }), + `Getting L1 transaction ${currentTxHash}`, + makeBackoff([1, 2, 3]), + this.logger, + true, + ); + const timePassed = Date.now() - lastAttemptSent; + + if (tx && timePassed < gasConfig.stallTimeMs!) { + this.logger?.debug(`L1 Transaction ${currentTxHash} pending. Time passed: ${timePassed}ms`); + + // Check timeout before continuing + if (gasConfig.txTimeoutMs) { + txTimedOut = Date.now() - initialTxTime > gasConfig.txTimeoutMs; + if (txTimedOut) { + break; + } + } + + await sleep(gasConfig.checkIntervalMs!); + continue; + } + + if (timePassed > gasConfig.stallTimeMs! && attempts < gasConfig.maxAttempts!) { + attempts++; + const newGasPrice = await this.getGasPrice( + gasConfig, + attempts, + tx.maxFeePerGas && tx.maxPriorityFeePerGas + ? { maxFeePerGas: tx.maxFeePerGas, maxPriorityFeePerGas: tx.maxPriorityFeePerGas } + : undefined, + ); + + this.logger?.debug( + `L1 Transaction ${currentTxHash} appears stuck. Attempting speed-up ${attempts}/${gasConfig.maxAttempts} ` + + `with new priority fee ${formatGwei(newGasPrice.maxPriorityFeePerGas)} gwei`, + ); + + currentTxHash = await this.walletClient.sendTransaction({ + ...request, + nonce, + gas: params.gasLimit, + maxFeePerGas: newGasPrice.maxFeePerGas, + maxPriorityFeePerGas: newGasPrice.maxPriorityFeePerGas, + }); + + txHashes.add(currentTxHash); + lastAttemptSent = Date.now(); + } + await sleep(gasConfig.checkIntervalMs!); + } catch (err: any) { + this.logger?.warn(`Error monitoring tx ${currentTxHash}:`, err); + if (err.message?.includes('reverted')) { + throw err; + } + await sleep(gasConfig.checkIntervalMs!); + } + // Check if tx has timed out. + if (gasConfig.txTimeoutMs) { + txTimedOut = Date.now() - initialTxTime > gasConfig.txTimeoutMs!; + } + } + throw new Error(`L1 Transaction ${currentTxHash} timed out`); + } + + /** + * Sends a transaction and monitors it until completion + * @param request - The transaction request (to, data, value) + * @param gasConfig - Optional gas configuration + * @returns The receipt of the successful transaction + */ + public async sendAndMonitorTransaction( + request: L1TxRequest, + gasConfig?: Partial & { fixedGas?: bigint }, + ): Promise { + const { txHash, gasLimit } = await this.sendTransaction(request, gasConfig); + return this.monitorTransaction(request, txHash, { gasLimit }, gasConfig); + } + + /** + * Gets the current gas price with bounds checking + */ + private async getGasPrice( + _gasConfig?: L1TxUtilsConfig, + attempt: number = 0, + previousGasPrice?: typeof attempt extends 0 ? never : GasPrice, + ): Promise { + const gasConfig = { ...this.config, ..._gasConfig }; + const block = await this.publicClient.getBlock({ blockTag: 'latest' }); + const baseFee = block.baseFeePerGas ?? 0n; + + // Get initial priority fee from the network + let priorityFee = await this.publicClient.estimateMaxPriorityFeePerGas(); + let maxFeePerGas = baseFee; + + // Bump base fee so it's valid for next blocks if it stalls + const numBlocks = Math.ceil(gasConfig.stallTimeMs! / BLOCK_TIME_MS); + for (let i = 0; i < numBlocks; i++) { + // each block can go up 12.5% from previous baseFee + maxFeePerGas = (maxFeePerGas * (1_000n + 125n)) / 1_000n; + } + + if (attempt > 0) { + const configBump = + gasConfig.priorityFeeRetryBumpPercentage ?? defaultL1TxUtilsConfig.priorityFeeRetryBumpPercentage!; + const bumpPercentage = + configBump > MIN_REPLACEMENT_BUMP_PERCENTAGE ? configBump : MIN_REPLACEMENT_BUMP_PERCENTAGE; + + // Calculate minimum required fees based on previous attempt + const minPriorityFee = (previousGasPrice!.maxPriorityFeePerGas * (100n + bumpPercentage)) / 100n; + const minMaxFee = (previousGasPrice!.maxFeePerGas * (100n + bumpPercentage)) / 100n; + + // Add priority fee to maxFeePerGas + maxFeePerGas += priorityFee; + + // Use maximum between current network values and minimum required values + priorityFee = priorityFee > minPriorityFee ? priorityFee : minPriorityFee; + maxFeePerGas = maxFeePerGas > minMaxFee ? maxFeePerGas : minMaxFee; + } else { + // first attempt, just bump priority fee + priorityFee = (priorityFee * (100n + (gasConfig.priorityFeeBumpPercentage || 0n))) / 100n; + maxFeePerGas += priorityFee; + } + + // Ensure we don't exceed maxGwei + const maxGweiInWei = gasConfig.maxGwei! * WEI_CONST; + maxFeePerGas = maxFeePerGas > maxGweiInWei ? maxGweiInWei : maxFeePerGas; + + // Ensure priority fee doesn't exceed max fee + const maxPriorityFeePerGas = priorityFee > maxFeePerGas ? maxFeePerGas : priorityFee; + + this.logger?.debug( + `Gas price calculation (attempt ${attempt}): baseFee=${formatGwei(baseFee)}, ` + + `maxPriorityFee=${formatGwei(maxPriorityFeePerGas)}, maxFee=${formatGwei(maxFeePerGas)}`, + ); + + return { maxFeePerGas, maxPriorityFeePerGas }; + } + + /** + * Estimates gas and adds buffer + */ + public async estimateGas(account: Account, request: L1TxRequest, _gasConfig?: L1TxUtilsConfig): Promise { + const gasConfig = { ...this.config, ..._gasConfig }; + const initialEstimate = await this.publicClient.estimateGas({ account, ...request }); + + // Add buffer based on either fixed amount or percentage + const withBuffer = initialEstimate + (initialEstimate * (gasConfig.gasLimitBufferPercentage ?? 0n)) / 100n; + + return withBuffer; + } +} diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 44ffca4981f..41a41143c91 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -170,4 +170,14 @@ export type EnvVar = | 'AZTEC_SLOT_DURATION' | 'AZTEC_EPOCH_DURATION' | 'AZTEC_TARGET_COMMITTEE_SIZE' - | 'AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS'; + | 'AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS' + | 'L1_GAS_LIMIT_BUFFER_PERCENTAGE' + | 'L1_GAS_LIMIT_BUFFER_FIXED' + | 'L1_GAS_PRICE_MIN' + | 'L1_GAS_PRICE_MAX' + | 'L1_PRIORITY_FEE_BUMP_PERCENTAGE' + | 'L1_PRIORITY_FEE_RETRY_BUMP_PERCENTAGE' + | 'L1_TX_MONITOR_MAX_ATTEMPTS' + | 'L1_TX_MONITOR_CHECK_INTERVAL_MS' + | 'L1_TX_MONITOR_STALL_TIME_MS' + | 'L1_TX_MONITOR_TX_TIMEOUT_MS'; diff --git a/yarn-project/sequencer-client/src/publisher/config.ts b/yarn-project/sequencer-client/src/publisher/config.ts index 561add17597..367f2aa6677 100644 --- a/yarn-project/sequencer-client/src/publisher/config.ts +++ b/yarn-project/sequencer-client/src/publisher/config.ts @@ -1,4 +1,4 @@ -import { type L1ReaderConfig, NULL_KEY } from '@aztec/ethereum'; +import { type L1ReaderConfig, type L1TxUtilsConfig, NULL_KEY, l1TxUtilsConfigMappings } from '@aztec/ethereum'; import { type ConfigMappingsType, getConfigFromMappings, numberConfigHelper } from '@aztec/foundation/config'; /** @@ -19,12 +19,12 @@ export type TxSenderConfig = L1ReaderConfig & { /** * Configuration of the L1Publisher. */ -export interface PublisherConfig { +export type PublisherConfig = L1TxUtilsConfig & { /** * The interval to wait between publish retries. */ l1PublishRetryIntervalMS: number; -} +}; export const getTxSenderConfigMappings: ( scope: 'PROVER' | 'SEQ', @@ -62,13 +62,16 @@ export function getTxSenderConfigFromEnv(scope: 'PROVER' | 'SEQ'): Omit ConfigMappingsType = scope => ({ +export const getPublisherConfigMappings: ( + scope: 'PROVER' | 'SEQ', +) => ConfigMappingsType = scope => ({ l1PublishRetryIntervalMS: { env: `${scope}_PUBLISH_RETRY_INTERVAL_MS`, parseEnv: (val: string) => +val, defaultValue: 1000, description: 'The interval to wait between publish retries.', }, + ...l1TxUtilsConfigMappings, }); export function getPublisherConfigFromEnv(scope: 'PROVER' | 'SEQ'): PublisherConfig { diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts index d1916020719..cedbfbe0d7d 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts @@ -1,17 +1,30 @@ import { L2Block } from '@aztec/circuit-types'; import { EthAddress } from '@aztec/circuits.js'; -import { type L1ContractsConfig, getL1ContractsConfigEnvVars } from '@aztec/ethereum'; +import { + type L1ContractsConfig, + type L1TxRequest, + type L1TxUtilsConfig, + defaultL1TxUtilsConfig, + getL1ContractsConfigEnvVars, +} from '@aztec/ethereum'; import { type ViemSignature } from '@aztec/foundation/eth-signature'; import { sleep } from '@aztec/foundation/sleep'; import { RollupAbi } from '@aztec/l1-artifacts'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { type MockProxy, mock } from 'jest-mock-extended'; -import { type GetTransactionReceiptReturnType, type PrivateKeyAccount } from 'viem'; +import { + type GetTransactionReceiptReturnType, + type PrivateKeyAccount, + type TransactionReceipt, + encodeFunctionData, +} from 'viem'; import { type PublisherConfig, type TxSenderConfig } from './config.js'; import { L1Publisher } from './l1-publisher.js'; +const mockRollupAddress = '0xcafe'; + interface MockPublicClient { getTransactionReceipt: ({ hash }: { hash: '0x${string}' }) => Promise; getBlock(): Promise<{ timestamp: bigint }>; @@ -19,6 +32,13 @@ interface MockPublicClient { estimateGas: ({ to, data }: { to: '0x${string}'; data: '0x${string}' }) => Promise; } +interface MockL1TxUtils { + sendAndMonitorTransaction: ( + request: L1TxRequest, + _gasConfig?: Partial, + ) => Promise; +} + interface MockRollupContractWrite { propose: ( args: readonly [`0x${string}`, `0x${string}`] | readonly [`0x${string}`, `0x${string}`, `0x${string}`], @@ -42,6 +62,9 @@ interface MockRollupContractRead { class MockRollupContract { constructor(public write: MockRollupContractWrite, public read: MockRollupContractRead, public abi = RollupAbi) {} + get address() { + return mockRollupAddress; + } } describe('L1Publisher', () => { @@ -50,6 +73,7 @@ describe('L1Publisher', () => { let rollupContract: MockRollupContract; let publicClient: MockProxy; + let l1TxUtils: MockProxy; let proposeTxHash: `0x${string}`; let proposeTxReceipt: GetTransactionReceiptReturnType; @@ -60,8 +84,6 @@ describe('L1Publisher', () => { let blockHash: Buffer; let body: Buffer; - let account: PrivateKeyAccount; - let publisher: L1Publisher; const GAS_GUESS = 300_000n; @@ -87,7 +109,7 @@ describe('L1Publisher', () => { rollupContract = new MockRollupContract(rollupContractWrite, rollupContractRead); publicClient = mock(); - + l1TxUtils = mock(); const config = { l1RpcUrl: `http://127.0.0.1:8545`, l1ChainId: 1, @@ -95,26 +117,30 @@ describe('L1Publisher', () => { l1Contracts: { rollupAddress: EthAddress.ZERO.toString() }, l1PublishRetryIntervalMS: 1, ethereumSlotDuration: getL1ContractsConfigEnvVars().ethereumSlotDuration, - } as unknown as TxSenderConfig & PublisherConfig & Pick; + ...defaultL1TxUtilsConfig, + } as unknown as TxSenderConfig & + PublisherConfig & + Pick & + L1TxUtilsConfig; publisher = new L1Publisher(config, new NoopTelemetryClient()); (publisher as any)['rollupContract'] = rollupContract; (publisher as any)['publicClient'] = publicClient; - - account = (publisher as any)['account']; + (publisher as any)['l1TxUtils'] = l1TxUtils; + publisher as any; rollupContractRead.getCurrentSlot.mockResolvedValue(l2Block.header.globalVariables.slotNumber.toBigInt()); publicClient.getBlock.mockResolvedValue({ timestamp: 12n }); publicClient.estimateGas.mockResolvedValue(GAS_GUESS); + l1TxUtils.sendAndMonitorTransaction.mockResolvedValue(proposeTxReceipt); + (l1TxUtils as any).estimateGas.mockResolvedValue(GAS_GUESS); }); it('publishes and propose l2 block to l1', async () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); rollupContractWrite.propose.mockResolvedValueOnce(proposeTxHash); - publicClient.getTransactionReceipt.mockResolvedValueOnce(proposeTxReceipt); - const result = await publisher.proposeL2Block(l2Block); expect(result).toEqual(true); @@ -133,21 +159,22 @@ describe('L1Publisher', () => { [], `0x${body.toString('hex')}`, ] as const; - expect(rollupContractWrite.propose).toHaveBeenCalledWith(args, { - account: account, - gas: L1Publisher.PROPOSE_GAS_GUESS + GAS_GUESS, - }); - expect(publicClient.getTransactionReceipt).toHaveBeenCalledWith({ hash: proposeTxHash }); + expect(l1TxUtils.sendAndMonitorTransaction).toHaveBeenCalledWith( + { + to: mockRollupAddress, + data: encodeFunctionData({ abi: rollupContract.abi, functionName: 'propose', args }), + }, + { fixedGas: GAS_GUESS + L1Publisher.PROPOSE_GAS_GUESS }, + ); }); it('does not retry if sending a propose tx fails', async () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - rollupContractWrite.propose.mockRejectedValueOnce(new Error()).mockResolvedValueOnce(proposeTxHash); + l1TxUtils.sendAndMonitorTransaction.mockRejectedValueOnce(new Error()).mockResolvedValueOnce(proposeTxReceipt); const result = await publisher.proposeL2Block(l2Block); expect(result).toEqual(false); - expect(rollupContractWrite.propose).toHaveBeenCalledTimes(1); }); it('does not retry if simulating a publish and propose tx fails', async () => { @@ -157,45 +184,20 @@ describe('L1Publisher', () => { await expect(publisher.proposeL2Block(l2Block)).rejects.toThrow(); expect(rollupContractRead.validateHeader).toHaveBeenCalledTimes(1); - expect(rollupContractWrite.propose).toHaveBeenCalledTimes(0); }); it('does not retry if sending a publish and propose tx fails', async () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - rollupContractWrite.propose.mockRejectedValueOnce(new Error()); + l1TxUtils.sendAndMonitorTransaction.mockRejectedValueOnce(new Error()).mockResolvedValueOnce(proposeTxReceipt); const result = await publisher.proposeL2Block(l2Block); expect(result).toEqual(false); - expect(rollupContractWrite.propose).toHaveBeenCalledTimes(1); - }); - - it('retries if fetching the receipt fails (propose)', async () => { - rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - rollupContractWrite.propose.mockResolvedValueOnce(proposeTxHash); - publicClient.getTransactionReceipt.mockRejectedValueOnce(new Error()).mockResolvedValueOnce(proposeTxReceipt); - - const result = await publisher.proposeL2Block(l2Block); - - expect(result).toEqual(true); - expect(publicClient.getTransactionReceipt).toHaveBeenCalledTimes(2); - }); - - it('retries if fetching the receipt fails (publish propose)', async () => { - rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - rollupContractWrite.propose.mockResolvedValueOnce(proposeTxHash as `0x${string}`); - publicClient.getTransactionReceipt.mockRejectedValueOnce(new Error()).mockResolvedValueOnce(proposeTxReceipt); - - const result = await publisher.proposeL2Block(l2Block); - - expect(result).toEqual(true); - expect(publicClient.getTransactionReceipt).toHaveBeenCalledTimes(2); }); it('returns false if publish and propose tx reverts', async () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - rollupContractWrite.propose.mockResolvedValueOnce(proposeTxHash); - publicClient.getTransactionReceipt.mockResolvedValueOnce({ ...proposeTxReceipt, status: 'reverted' }); + l1TxUtils.sendAndMonitorTransaction.mockResolvedValueOnce({ ...proposeTxReceipt, status: 'reverted' }); const result = await publisher.proposeL2Block(l2Block); @@ -205,7 +207,7 @@ describe('L1Publisher', () => { it('returns false if propose tx reverts', async () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - publicClient.getTransactionReceipt.mockResolvedValueOnce({ ...proposeTxReceipt, status: 'reverted' }); + l1TxUtils.sendAndMonitorTransaction.mockResolvedValueOnce({ ...proposeTxReceipt, status: 'reverted' }); const result = await publisher.proposeL2Block(l2Block); @@ -214,8 +216,9 @@ describe('L1Publisher', () => { it('returns false if sending publish and progress tx is interrupted', async () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - rollupContractWrite.propose.mockImplementationOnce(() => sleep(10, proposeTxHash) as Promise<`0x${string}`>); - + l1TxUtils.sendAndMonitorTransaction.mockImplementationOnce( + () => sleep(10, proposeTxReceipt) as Promise, + ); const resultPromise = publisher.proposeL2Block(l2Block); publisher.interrupt(); const result = await resultPromise; @@ -226,7 +229,9 @@ describe('L1Publisher', () => { it('returns false if sending propose tx is interrupted', async () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - rollupContractWrite.propose.mockImplementationOnce(() => sleep(10, proposeTxHash) as Promise<`0x${string}`>); + l1TxUtils.sendAndMonitorTransaction.mockImplementationOnce( + () => sleep(10, proposeTxReceipt) as Promise, + ); const resultPromise = publisher.proposeL2Block(l2Block); publisher.interrupt(); diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index b1e0aa5a50c..10e4b61f967 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -17,7 +17,13 @@ import { type Proof, type RootRollupPublicInputs, } from '@aztec/circuits.js'; -import { type EthereumChain, type L1ContractsConfig, createEthereumChain } from '@aztec/ethereum'; +import { + type EthereumChain, + type L1ContractsConfig, + L1TxUtils, + type L1TxUtilsConfig, + createEthereumChain, +} from '@aztec/ethereum'; import { makeTuple } from '@aztec/foundation/array'; import { areArraysEqual, compactArray, times } from '@aztec/foundation/collection'; import { type Signature } from '@aztec/foundation/eth-signature'; @@ -44,6 +50,7 @@ import { type PublicActions, type PublicClient, type PublicRpcSchema, + type TransactionReceipt, type WalletActions, type WalletClient, type WalletRpcSchema, @@ -161,8 +168,10 @@ export class L1Publisher { public static PROPOSE_GAS_GUESS: bigint = 12_000_000n; public static PROPOSE_AND_CLAIM_GAS_GUESS: bigint = this.PROPOSE_GAS_GUESS + 100_000n; + private readonly l1TxUtils: L1TxUtils; + constructor( - config: TxSenderConfig & PublisherConfig & Pick, + config: TxSenderConfig & PublisherConfig & Pick & L1TxUtilsConfig, client: TelemetryClient, ) { this.sleepTimeMs = config?.l1PublishRetryIntervalMS ?? 60_000; @@ -195,6 +204,8 @@ export class L1Publisher { client: this.walletClient, }); } + + this.l1TxUtils = new L1TxUtils(this.publicClient, this.walletClient, this.log, config); } protected createWalletClient( @@ -503,36 +514,30 @@ export class L1Publisher { }); this.log.verbose(`Submitting propose transaction`); - - const tx = proofQuote + const result = proofQuote ? await this.sendProposeAndClaimTx(proposeTxArgs, proofQuote) : await this.sendProposeTx(proposeTxArgs); - if (!tx) { + if (!result?.receipt) { this.log.info(`Failed to publish block ${block.number} to L1`, ctx); return false; } - const { hash: txHash, args, functionName, gasLimit } = tx; - - const receipt = await this.getTransactionReceipt(txHash); - if (!receipt) { - this.log.info(`Failed to get receipt for tx ${txHash}`, ctx); - return false; - } + const { receipt, args, functionName } = result; // Tx was mined successfully - if (receipt.status) { - const tx = await this.getTransactionStats(txHash); + if (receipt.status === 'success') { + const tx = await this.getTransactionStats(receipt.transactionHash); const stats: L1PublishBlockStats = { - ...pick(receipt, 'gasPrice', 'gasUsed', 'transactionHash'), + gasPrice: receipt.effectiveGasPrice, + gasUsed: receipt.gasUsed, + transactionHash: receipt.transactionHash, ...pick(tx!, 'calldataGas', 'calldataSize', 'sender'), ...block.getStats(), eventName: 'rollup-published-to-l1', }; this.log.info(`Published L2 block to L1 rollup contract`, { ...stats, ...ctx }); this.metrics.recordProcessBlockTx(timer.ms(), stats); - return true; } @@ -541,7 +546,6 @@ export class L1Publisher { const errorMsg = await this.tryGetErrorFromRevertedTx({ args, functionName, - gasLimit, abi: RollupAbi, address: this.rollupContract.address, blockNumber: receipt.blockNumber, @@ -557,7 +561,6 @@ export class L1Publisher { private async tryGetErrorFromRevertedTx(args: { args: any[]; functionName: string; - gasLimit: bigint; abi: any; address: Hex; blockNumber: bigint | undefined; @@ -720,17 +723,25 @@ export class L1Publisher { ] as const; this.log.info(`SubmitEpochProof proofSize=${args.proof.withoutPublicInputs().length} bytes`); - await this.rollupContract.simulate.submitEpochRootProof(txArgs, { account: this.account }); - return await this.rollupContract.write.submitEpochRootProof(txArgs, { account: this.account }); + + const txReceipt = await this.l1TxUtils.sendAndMonitorTransaction({ + to: this.rollupContract.address, + data: encodeFunctionData({ + abi: this.rollupContract.abi, + functionName: 'submitEpochRootProof', + args: txArgs, + }), + }); + + return txReceipt.transactionHash; } catch (err) { this.log.error(`Rollup submit epoch proof failed`, err); return undefined; } } - private async prepareProposeTx(encodedData: L1ProcessArgs, gasGuess: bigint) { - // We have to jump a few hoops because viem is not happy around estimating gas for view functions - const computeTxsEffectsHashGas = await this.publicClient.estimateGas({ + private async prepareProposeTx(encodedData: L1ProcessArgs) { + const computeTxsEffectsHashGas = await this.l1TxUtils.estimateGas(this.account, { to: this.rollupContract.address, data: encodeFunctionData({ abi: this.rollupContract.abi, @@ -744,7 +755,7 @@ export class L1Publisher { // we will fail estimation in the case where we are simulating for the // first ethereum block within our slot (as current time is not in the // slot yet). - const gasGuesstimate = computeTxsEffectsHashGas + gasGuess; + const gasGuesstimate = computeTxsEffectsHashGas + L1Publisher.PROPOSE_GAS_GUESS; const attestations = encodedData.attestations ? encodedData.attestations.map(attest => attest.toViemSignature()) @@ -766,7 +777,7 @@ export class L1Publisher { `0x${encodedData.body.toString('hex')}`, ] as const; - return { args, gasGuesstimate }; + return { args, gas: gasGuesstimate }; } private getSubmitEpochProofArgs(args: { @@ -797,25 +808,34 @@ export class L1Publisher { private async sendProposeTx( encodedData: L1ProcessArgs, - ): Promise<{ hash: string; args: any; functionName: string; gasLimit: bigint } | undefined> { + ): Promise<{ receipt: TransactionReceipt; args: any; functionName: string } | undefined> { if (this.interrupted) { return undefined; } try { - const { args, gasGuesstimate } = await this.prepareProposeTx(encodedData, L1Publisher.PROPOSE_GAS_GUESS); - + const { args, gas } = await this.prepareProposeTx(encodedData); + const receipt = await this.l1TxUtils.sendAndMonitorTransaction( + { + to: this.rollupContract.address, + data: encodeFunctionData({ + abi: this.rollupContract.abi, + functionName: 'propose', + args, + }), + }, + { + fixedGas: gas, + }, + ); return { - hash: await this.rollupContract.write.propose(args, { - account: this.account, - gas: gasGuesstimate, - }), + receipt, args, functionName: 'propose', - gasLimit: gasGuesstimate, }; } catch (err) { prettyLogViemError(err, this.log); - this.log.error(`Rollup publish failed`, err); + const errorMessage = err instanceof Error ? err.message : String(err); + this.log.error(`Rollup publish failed`, errorMessage); return undefined; } } @@ -823,30 +843,36 @@ export class L1Publisher { private async sendProposeAndClaimTx( encodedData: L1ProcessArgs, quote: EpochProofQuote, - ): Promise<{ hash: string; args: any; functionName: string; gasLimit: bigint } | undefined> { + ): Promise<{ receipt: TransactionReceipt; args: any; functionName: string } | undefined> { if (this.interrupted) { return undefined; } try { - const { args, gasGuesstimate } = await this.prepareProposeTx( - encodedData, - L1Publisher.PROPOSE_AND_CLAIM_GAS_GUESS, - ); this.log.info(`ProposeAndClaim`); this.log.info(inspect(quote.payload)); + const { args, gas } = await this.prepareProposeTx(encodedData); + const receipt = await this.l1TxUtils.sendAndMonitorTransaction( + { + to: this.rollupContract.address, + data: encodeFunctionData({ + abi: this.rollupContract.abi, + functionName: 'proposeAndClaim', + args: [...args, quote.toViemArgs()], + }), + }, + { fixedGas: gas }, + ); + return { - hash: await this.rollupContract.write.proposeAndClaim([...args, quote.toViemArgs()], { - account: this.account, - gas: gasGuesstimate, - }), - functionName: 'proposeAndClaim', + receipt, args, - gasLimit: gasGuesstimate, + functionName: 'proposeAndClaim', }; } catch (err) { prettyLogViemError(err, this.log); - this.log.error(`Rollup publish failed`, err); + const errorMessage = err instanceof Error ? err.message : String(err); + this.log.error(`Rollup publish failed`, errorMessage); return undefined; } } diff --git a/yarn-project/telemetry-client/src/config.ts b/yarn-project/telemetry-client/src/config.ts index 58c643c5076..dcb5d8a8a0c 100644 --- a/yarn-project/telemetry-client/src/config.ts +++ b/yarn-project/telemetry-client/src/config.ts @@ -14,17 +14,17 @@ export const telemetryClientConfigMappings: ConfigMappingsType new URL(val), + parseEnv: (val: string) => val && new URL(val), }, tracesCollectorUrl: { env: 'OTEL_EXPORTER_OTLP_TRACES_ENDPOINT', description: 'The URL of the telemetry collector for traces', - parseEnv: (val: string) => new URL(val), + parseEnv: (val: string) => val && new URL(val), }, logsCollectorUrl: { env: 'OTEL_EXPORTER_OTLP_LOGS_ENDPOINT', description: 'The URL of the telemetry collector for logs', - parseEnv: (val: string) => new URL(val), + parseEnv: (val: string) => val && new URL(val), }, serviceName: { env: 'OTEL_SERVICE_NAME', From 6cbd375c4fddc0108b72a3092fcd75816305adde Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 4 Dec 2024 13:16:27 +0000 Subject: [PATCH 20/24] feat: switch to using an external noir implementation of Schnorr (#10330) This PR replaces usage of the stdlib implementation of schnorr with the external library https://github.com/noir-lang/schnorr --- .../contracts/schnorr_account_contract/Nargo.toml | 1 + .../contracts/schnorr_account_contract/src/main.nr | 4 +--- .../contracts/schnorr_hardcoded_account_contract/Nargo.toml | 1 + .../contracts/schnorr_hardcoded_account_contract/src/main.nr | 2 +- .../contracts/schnorr_single_key_account_contract/Nargo.toml | 1 + .../schnorr_single_key_account_contract/src/util.nr | 5 ++--- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/noir-projects/noir-contracts/contracts/schnorr_account_contract/Nargo.toml b/noir-projects/noir-contracts/contracts/schnorr_account_contract/Nargo.toml index 12cf4db0fe8..1211ad63c05 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_account_contract/Nargo.toml +++ b/noir-projects/noir-contracts/contracts/schnorr_account_contract/Nargo.toml @@ -7,3 +7,4 @@ type = "contract" [dependencies] aztec = { path = "../../../aztec-nr/aztec" } authwit = { path = "../../../aztec-nr/authwit" } +schnorr = { tag = "v0.1.1", git = "https://github.com/noir-lang/schnorr" } diff --git a/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr index fdd886d232e..b040ba5f1fe 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr @@ -6,8 +6,6 @@ use dep::aztec::macros::aztec; #[aztec] contract SchnorrAccount { - use dep::std; - use dep::authwit::{ account::AccountActions, auth::{compute_authwit_message_hash, compute_authwit_nullifier}, @@ -83,7 +81,7 @@ contract SchnorrAccount { is_infinite: false, }; // Verify signature of the payload bytes - std::schnorr::verify_signature(pub_key, signature, outer_hash.to_be_bytes::<32>()) + schnorr::verify_signature(pub_key, signature, outer_hash.to_be_bytes::<32>()) // docs:end:is_valid_impl } diff --git a/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/Nargo.toml b/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/Nargo.toml index 877f369a800..771cfa8fd28 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/Nargo.toml +++ b/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/Nargo.toml @@ -7,3 +7,4 @@ type = "contract" [dependencies] aztec = { path = "../../../aztec-nr/aztec" } authwit = { path = "../../../aztec-nr/authwit" } +schnorr = { tag = "v0.1.1", git = "https://github.com/noir-lang/schnorr" } diff --git a/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/src/main.nr index 1bed3b932e7..02582e3e097 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/schnorr_hardcoded_account_contract/src/main.nr @@ -45,7 +45,7 @@ contract SchnorrHardcodedAccount { } // Verify signature using hardcoded public key - std::schnorr::verify_signature(public_key, signature, outer_hash.to_be_bytes::<32>()) + schnorr::verify_signature(public_key, signature, outer_hash.to_be_bytes::<32>()) } // docs:end:is-valid } diff --git a/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/Nargo.toml b/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/Nargo.toml index 80c39efcba2..161993c5a73 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/Nargo.toml +++ b/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/Nargo.toml @@ -7,3 +7,4 @@ type = "contract" [dependencies] aztec = { path = "../../../aztec-nr/aztec" } authwit = { path = "../../../aztec-nr/authwit" } +schnorr = { tag = "v0.1.1", git = "https://github.com/noir-lang/schnorr" } diff --git a/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/util.nr b/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/util.nr index a3610085cea..e77e943006e 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/util.nr +++ b/noir-projects/noir-contracts/contracts/schnorr_single_key_account_contract/src/util.nr @@ -1,6 +1,6 @@ use crate::auth_oracle::AuthWitness; use dep::aztec::prelude::AztecAddress; -use std::{embedded_curve_ops::EmbeddedCurvePoint, schnorr::verify_signature}; +use std::embedded_curve_ops::EmbeddedCurvePoint; pub fn recover_address(message_hash: Field, witness: AuthWitness) -> AztecAddress { let message_bytes: [u8; 32] = message_hash.to_be_bytes(); @@ -11,8 +11,7 @@ pub fn recover_address(message_hash: Field, witness: AuthWitness) -> AztecAddres }; // In a single key account contract we re-used ivpk_m as signing key - let verification = verify_signature(public_key, witness.signature, message_bytes); - assert(verification == true); + schnorr::assert_valid_signature(public_key, witness.signature, message_bytes); AztecAddress::compute(witness.keys, witness.partial_address) } From 5cef62834e76f57514d0d09c24e4a2c98ea05485 Mon Sep 17 00:00:00 2001 From: spypsy Date: Wed, 4 Dec 2024 13:42:47 +0000 Subject: [PATCH 21/24] fix: move spartan-script tf to spartan, use file in bucket (#10395) --- aztec-up/terraform/main.tf | 12 --- iac/main.tf | 49 --------- spartan/terraform/user-script/main.tf | 144 ++++++++++++++++++++++++++ 3 files changed, 144 insertions(+), 61 deletions(-) create mode 100644 spartan/terraform/user-script/main.tf diff --git a/aztec-up/terraform/main.tf b/aztec-up/terraform/main.tf index 63f44df06df..3adcae03b5d 100644 --- a/aztec-up/terraform/main.tf +++ b/aztec-up/terraform/main.tf @@ -108,18 +108,6 @@ resource "null_resource" "upload_public_directory" { } } -# resource "aws_route53_record" "subdomain_record" { -# zone_id = data.terraform_remote_state.aztec2_iac.outputs.aws_route53_zone_id -# name = "install.aztec.network" -# type = "A" - -# alias { -# name = aws_s3_bucket_website_configuration.website_bucket.website_domain -# zone_id = aws_s3_bucket.install_bucket.hosted_zone_id -# evaluate_target_health = true -# } -# } - resource "aws_cloudfront_distribution" "install" { origin { domain_name = aws_s3_bucket.install_bucket.website_endpoint diff --git a/iac/main.tf b/iac/main.tf index 46b145be06a..5e1dec466d6 100644 --- a/iac/main.tf +++ b/iac/main.tf @@ -125,52 +125,3 @@ resource "aws_route53_record" "static" { evaluate_target_health = true } } - -resource "aws_s3_bucket" "sp_testnet_redirect" { - bucket = "sp-testnet.aztec.network" - - website { - redirect_all_requests_to { - host_name = "github.com" - protocol = "https" - path = "/AztecProtocol/aztec-packages/refs/heads/master/spartan/releases/create-spartan.sh" - } - } -} - -resource "aws_s3_bucket_public_access_block" "sp_testnet_public_access" { - bucket = aws_s3_bucket.sp_testnet_redirect.id - - block_public_acls = false - block_public_policy = false - ignore_public_acls = false - restrict_public_buckets = false -} - -resource "aws_s3_bucket_policy" "sp_testnet_policy" { - bucket = aws_s3_bucket.sp_testnet_redirect.id - - policy = jsonencode({ - Version = "2012-10-17" - Statement = [ - { - Effect = "Allow" - Principal = "*" - Action = "s3:GetObject" - Resource = "arn:aws:s3:::${aws_s3_bucket.sp_testnet_redirect.id}/*" - } - ] - }) -} - -resource "aws_route53_record" "sp_testnet" { - zone_id = data.terraform_remote_state.aztec2_iac.outputs.aws_route53_zone_id - name = "sp-testnet.aztec.network" - type = "A" - - alias { - name = aws_s3_bucket.sp_testnet_redirect.website_domain - zone_id = aws_s3_bucket.sp_testnet_redirect.hosted_zone_id - evaluate_target_health = true - } -} diff --git a/spartan/terraform/user-script/main.tf b/spartan/terraform/user-script/main.tf new file mode 100644 index 00000000000..c3291292c2e --- /dev/null +++ b/spartan/terraform/user-script/main.tf @@ -0,0 +1,144 @@ +terraform { + backend "s3" { + bucket = "aztec-terraform" + key = "spartan-script" + region = "eu-west-2" + } + required_providers { + aws = { + source = "hashicorp/aws" + version = "5.29.0" + } + } +} + +provider "aws" { + region = "eu-west-2" +} + +data "terraform_remote_state" "aztec2_iac" { + backend = "s3" + config = { + bucket = "aztec-terraform" + key = "aztec2/iac" + region = "eu-west-2" + } +} + +resource "aws_s3_bucket" "sp_testnet_script" { + bucket = "sp-testnet.aztec.network" +} + +resource "aws_s3_bucket_website_configuration" "sp_testnet_script" { + bucket = aws_s3_bucket.sp_testnet_script.id + + index_document { + suffix = "create-spartan.sh" + } +} + +resource "aws_s3_bucket_public_access_block" "sp_testnet_public_access" { + bucket = aws_s3_bucket.sp_testnet_script.id + + block_public_acls = false + block_public_policy = false + ignore_public_acls = false + restrict_public_buckets = false +} + +resource "aws_s3_bucket_policy" "sp_testnet_policy" { + bucket = aws_s3_bucket.sp_testnet_script.id + + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Effect = "Allow" + Principal = "*" + Action = "s3:GetObject" + Resource = "arn:aws:s3:::${aws_s3_bucket.sp_testnet_script.id}/*" + } + ] + }) +} + +# Upload files to s3 bucket +resource "null_resource" "upload_script" { + triggers = { + always_run = "${timestamp()}" + } + + provisioner "local-exec" { + interpreter = ["/bin/bash", "-c"] + command = < Date: Wed, 4 Dec 2024 15:14:40 +0100 Subject: [PATCH 22/24] chore(avm): Fake verification routine for avm recursion in public base rollup (#10382) Resolves #10243 --- .../vm/avm/tests/recursive_verifier.test.cpp | 4 +-- .../rollup-lib/src/base/public_base_rollup.nr | 5 +++ .../src/abis/avm_circuit_public_inputs.nr | 33 +++++++++++++++++++ 3 files changed, 40 insertions(+), 2 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/tests/recursive_verifier.test.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/tests/recursive_verifier.test.cpp index 7397d26196b..0d9920ad09b 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/tests/recursive_verifier.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/tests/recursive_verifier.test.cpp @@ -109,8 +109,6 @@ TEST_F(AvmRecursiveTests, recursion) verification_key->pcs_verification_key->pairing_check(agg_output.P0.get_value(), agg_output.P1.get_value()); ASSERT_TRUE(agg_output_valid) << "Pairing points (aggregation state) are not valid."; - - vinfo("Recursive verifier: num gates = ", outer_circuit.num_gates); ASSERT_FALSE(outer_circuit.failed()) << "Outer circuit has failed."; bool outer_circuit_checked = CircuitChecker::check(outer_circuit); @@ -139,6 +137,8 @@ TEST_F(AvmRecursiveTests, recursion) auto ultra_verification_key = std::make_shared(ultra_instance->proving_key); OuterVerifier ultra_verifier(ultra_verification_key); + vinfo("Recursive verifier: finalized num gates = ", outer_circuit.num_gates); + auto recursion_proof = ultra_prover.construct_proof(); bool recursion_verified = ultra_verifier.verify_proof(recursion_proof); EXPECT_TRUE(recursion_verified) << "recursion proof verification failed"; diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/public_base_rollup.nr b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/public_base_rollup.nr index c994027d7ec..ce7add25489 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/public_base_rollup.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-lib/src/base/public_base_rollup.nr @@ -97,6 +97,11 @@ impl PublicBaseRollupInputs { // self.tube_data.vk_data.validate_in_vk_tree([TUBE_VK_INDEX]); } + // Warning: Fake verification! TODO(#8470) + if !dep::std::runtime::is_unconstrained() { + self.avm_proof_data.fake_verify(); + } + // TODO(#8470) // if !dep::std::runtime::is_unconstrained() { // self.avm_proof_data.verify(); diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/avm_circuit_public_inputs.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/avm_circuit_public_inputs.nr index 7affd9031c9..b88a123a1fd 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/avm_circuit_public_inputs.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/avm_circuit_public_inputs.nr @@ -21,6 +21,8 @@ use crate::{ utils::reader::Reader, }; +use std::hash::{poseidon2, poseidon2_permutation}; + pub struct AvmCircuitPublicInputs { /////////////////////////////////// // Inputs. @@ -180,6 +182,37 @@ pub struct AvmProofData { pub vk_data: VkData, } +// The number of columns for the AVM recursive verifier we want to fake, i.e., the resulting +// verify() routine below will create a similar number of gates as a real AVM recursive verifier +// with the number of columns set by this constant. +pub global DUMMY_AVM_VERIFIER_NUM_COLUMNS: u32 = 2200; + +// Current AVM recursive verifier has 9500 gates per column. +// Note that the addition of a single column in AVM recursive verifier incurs 8500 gates. +// (some additional costs are due to lookups, relations, ...). +// 78 gates per Poseidon permutation +// 9500/78 = 121.8 +pub global DUMMY_AVM_VERIFIER_NUM_ITERATIONS: u32 = DUMMY_AVM_VERIFIER_NUM_COLUMNS * 122; + +// Warning: This is a fake avm recursive verification whose sole goal is to reproduce a similar +// computational effort (number of gates) as the real recursive verifier. +// TODO(#8470): Replace with the real AVM recursive verifier +impl AvmProofData { + pub fn fake_verify(self) { + let mut input_hash = poseidon2::Poseidon2::hash( + [self.public_inputs.transaction_fee, self.proof.fields[0], self.vk_data.vk.key[0]], + 3, + ); + + let mut result: [Field; 4] = [input_hash, 0, 0, 0]; + for i in 0..DUMMY_AVM_VERIFIER_NUM_ITERATIONS { + result = poseidon2_permutation(result, 4); + } + + assert(!result[0].lt(1)); + } +} + impl Verifiable for AvmProofData { fn verify(self) { // TODO(#8470) From dddb008d0fe69da64574df9a21e0e91533f9ab15 Mon Sep 17 00:00:00 2001 From: Lasse Herskind <16536249+LHerskind@users.noreply.github.com> Date: Wed, 4 Dec 2024 14:34:49 +0000 Subject: [PATCH 23/24] chore: contracts on a diet (#10389) --- l1-contracts/.solhint.json | 2 +- l1-contracts/src/core/Leonidas.sol | 233 +----- l1-contracts/src/core/Rollup.sol | 727 +++++------------- .../src/core/interfaces/ILeonidas.sol | 21 + l1-contracts/src/core/interfaces/IRollup.sol | 74 +- .../src/core/libraries/EpochProofQuoteLib.sol | 51 -- .../libraries/LeonidasLib/LeonidasLib.sol | 255 ++++++ .../libraries/RollupLibs/EpochProofLib.sol | 276 +++++++ .../RollupLibs/EpochProofQuoteLib.sol | 51 ++ .../libraries/RollupLibs/ExtRollupLib.sol | 113 +++ .../libraries/{ => RollupLibs}/FeeMath.sol | 80 +- .../libraries/{ => RollupLibs}/HeaderLib.sol | 100 +-- .../libraries/RollupLibs/IntRollupLib.sol | 31 + .../libraries/{ => RollupLibs}/ProposeLib.sol | 2 +- .../libraries/{ => RollupLibs}/TxsDecoder.sol | 2 +- .../libraries/RollupLibs/ValidationLib.sol | 157 ++++ .../src/core/libraries/crypto/SampleLib.sol | 4 +- .../core/libraries/crypto/SignatureLib.sol | 14 +- l1-contracts/test/Rollup.t.sol | 47 +- l1-contracts/test/decoders/Decoders.t.sol | 4 +- .../test/decoders/helpers/HeaderLibHelper.sol | 4 +- .../decoders/helpers/TxsDecoderHelper.sol | 2 +- .../test/fees/FeeModelTestPoints.t.sol | 2 +- l1-contracts/test/fees/FeeRollup.t.sol | 19 +- l1-contracts/test/fees/MinimalFeeModel.sol | 25 +- l1-contracts/test/fees/MinimalFeeModel.t.sol | 10 +- l1-contracts/test/portals/TokenPortal.t.sol | 4 +- l1-contracts/test/sparta/Sparta.t.sol | 16 +- .../aztec.js/src/utils/cheat_codes.ts | 18 +- .../cli/src/cmds/l1/update_l1_validators.ts | 2 - .../e2e_prover_coordination.test.ts | 7 +- .../ethereum/src/deploy_l1_contracts.ts | 31 +- .../scripts/generate-artifacts.sh | 4 +- .../src/publisher/l1-publisher.ts | 11 +- 34 files changed, 1414 insertions(+), 985 deletions(-) delete mode 100644 l1-contracts/src/core/libraries/EpochProofQuoteLib.sol create mode 100644 l1-contracts/src/core/libraries/LeonidasLib/LeonidasLib.sol create mode 100644 l1-contracts/src/core/libraries/RollupLibs/EpochProofLib.sol create mode 100644 l1-contracts/src/core/libraries/RollupLibs/EpochProofQuoteLib.sol create mode 100644 l1-contracts/src/core/libraries/RollupLibs/ExtRollupLib.sol rename l1-contracts/src/core/libraries/{ => RollupLibs}/FeeMath.sol (64%) rename l1-contracts/src/core/libraries/{ => RollupLibs}/HeaderLib.sol (90%) create mode 100644 l1-contracts/src/core/libraries/RollupLibs/IntRollupLib.sol rename l1-contracts/src/core/libraries/{ => RollupLibs}/ProposeLib.sol (88%) rename l1-contracts/src/core/libraries/{ => RollupLibs}/TxsDecoder.sol (99%) create mode 100644 l1-contracts/src/core/libraries/RollupLibs/ValidationLib.sol diff --git a/l1-contracts/.solhint.json b/l1-contracts/.solhint.json index f3b1b7f84df..8c347972f9c 100644 --- a/l1-contracts/.solhint.json +++ b/l1-contracts/.solhint.json @@ -27,7 +27,7 @@ "no-unused-vars": "error", "state-visibility": "error", "var-name-mixedcase": "error", - "private-func-leading-underscore": "error", + "private-func-leading-underscore": "warn", "private-vars-no-leading-underscore": "error", "func-param-name-leading-underscore": "error", "func-param-name-mixedcase": "error", diff --git a/l1-contracts/src/core/Leonidas.sol b/l1-contracts/src/core/Leonidas.sol index 6602a0085e6..01899cd3003 100644 --- a/l1-contracts/src/core/Leonidas.sol +++ b/l1-contracts/src/core/Leonidas.sol @@ -2,16 +2,14 @@ // Copyright 2024 Aztec Labs. pragma solidity >=0.8.27; -import {ILeonidas} from "@aztec/core/interfaces/ILeonidas.sol"; -import {SampleLib} from "@aztec/core/libraries/crypto/SampleLib.sol"; -import {SignatureLib} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import {ILeonidas, EpochData, LeonidasStorage} from "@aztec/core/interfaces/ILeonidas.sol"; +import {Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; import {DataStructures} from "@aztec/core/libraries/DataStructures.sol"; -import {Errors} from "@aztec/core/libraries/Errors.sol"; +import {LeonidasLib} from "@aztec/core/libraries/LeonidasLib/LeonidasLib.sol"; import { Timestamp, Slot, Epoch, SlotLib, EpochLib, TimeFns } from "@aztec/core/libraries/TimeMath.sol"; import {Ownable} from "@oz/access/Ownable.sol"; -import {MessageHashUtils} from "@oz/utils/cryptography/MessageHashUtils.sol"; import {EnumerableSet} from "@oz/utils/structs/EnumerableSet.sol"; /** @@ -30,24 +28,11 @@ import {EnumerableSet} from "@oz/utils/structs/EnumerableSet.sol"; */ contract Leonidas is Ownable, TimeFns, ILeonidas { using EnumerableSet for EnumerableSet.AddressSet; - using SignatureLib for SignatureLib.Signature; - using MessageHashUtils for bytes32; + using LeonidasLib for LeonidasStorage; using SlotLib for Slot; using EpochLib for Epoch; - /** - * @notice The data structure for an epoch - * @param committee - The validator set for the epoch - * @param sampleSeed - The seed used to sample the validator set of the epoch - * @param nextSeed - The seed used to influence the NEXT epoch - */ - struct EpochData { - address[] committee; - uint256 sampleSeed; - uint256 nextSeed; - } - // The target number of validators in a committee // @todo #8021 uint256 public immutable TARGET_COMMITTEE_SIZE; @@ -55,14 +40,7 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { // The time that the contract was deployed Timestamp public immutable GENESIS_TIME; - // An enumerable set of validators that are up to date - EnumerableSet.AddressSet private validatorSet; - - // A mapping to snapshots of the validator set - mapping(Epoch => EpochData) public epochs; - - // The last stored randao value, same value as `seed` in the last inserted epoch - uint256 private lastSeed; + LeonidasStorage private store; constructor( address _ares, @@ -103,7 +81,7 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { */ function removeValidator(address _validator) external override(ILeonidas) onlyOwner { setupEpoch(); - validatorSet.remove(_validator); + store.validatorSet.remove(_validator); } /** @@ -121,7 +99,7 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { override(ILeonidas) returns (address[] memory) { - return epochs[_epoch].committee; + return store.epochs[_epoch].committee; } /** @@ -129,7 +107,7 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { * @return The validator set for the current epoch */ function getCurrentEpochCommittee() external view override(ILeonidas) returns (address[] memory) { - return _getCommitteeAt(Timestamp.wrap(block.timestamp)); + return store.getCommitteeAt(getCurrentEpoch(), TARGET_COMMITTEE_SIZE); } /** @@ -140,7 +118,7 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { * @return The validator set */ function getValidators() external view override(ILeonidas) returns (address[] memory) { - return validatorSet.values(); + return store.validatorSet.values(); } /** @@ -155,13 +133,13 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { */ function setupEpoch() public override(ILeonidas) { Epoch epochNumber = getCurrentEpoch(); - EpochData storage epoch = epochs[epochNumber]; + EpochData storage epoch = store.epochs[epochNumber]; if (epoch.sampleSeed == 0) { - epoch.sampleSeed = _getSampleSeed(epochNumber); - epoch.nextSeed = lastSeed = _computeNextSeed(epochNumber); + epoch.sampleSeed = store.getSampleSeed(epochNumber); + epoch.nextSeed = store.lastSeed = _computeNextSeed(epochNumber); - epoch.committee = _sampleValidators(epoch.sampleSeed); + epoch.committee = store.sampleValidators(epoch.sampleSeed, TARGET_COMMITTEE_SIZE); } } @@ -171,7 +149,7 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { * @return The number of validators in the validator set */ function getValidatorCount() public view override(ILeonidas) returns (uint256) { - return validatorSet.length(); + return store.validatorSet.length(); } /** @@ -180,7 +158,7 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { * @return The number of validators in the validator set */ function getValidatorAt(uint256 _index) public view override(ILeonidas) returns (address) { - return validatorSet.at(_index); + return store.validatorSet.at(_index); } /** @@ -191,7 +169,7 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { * @return True if the address is in the validator set, false otherwise */ function isValidator(address _validator) public view override(ILeonidas) returns (bool) { - return validatorSet.contains(_validator); + return store.validatorSet.contains(_validator); } /** @@ -261,31 +239,9 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { * @return The address of the proposer */ function getProposerAt(Timestamp _ts) public view override(ILeonidas) returns (address) { - Epoch epochNumber = getEpochAt(_ts); Slot slot = getSlotAt(_ts); - - EpochData storage epoch = epochs[epochNumber]; - - // If the epoch is setup, we can just return the proposer. Otherwise we have to emulate sampling - if (epoch.sampleSeed != 0) { - uint256 committeeSize = epoch.committee.length; - if (committeeSize == 0) { - return address(0); - } - - return - epoch.committee[_computeProposerIndex(epochNumber, slot, epoch.sampleSeed, committeeSize)]; - } - - // Allow anyone if there is no validator set - if (validatorSet.length() == 0) { - return address(0); - } - - // Emulate a sampling of the validators - uint256 sampleSeed = _getSampleSeed(epochNumber); - address[] memory committee = _sampleValidators(sampleSeed); - return committee[_computeProposerIndex(epochNumber, slot, sampleSeed, committee.length)]; + Epoch epochNumber = getEpochAtSlot(slot); + return store.getProposerAt(slot, epochNumber, TARGET_COMMITTEE_SIZE); } /** @@ -326,29 +282,7 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { * @param _validator - The validator to add */ function _addValidator(address _validator) internal { - validatorSet.add(_validator); - } - - function _getCommitteeAt(Timestamp _ts) internal view returns (address[] memory) { - Epoch epochNumber = getEpochAt(_ts); - EpochData storage epoch = epochs[epochNumber]; - - if (epoch.sampleSeed != 0) { - uint256 committeeSize = epoch.committee.length; - if (committeeSize == 0) { - return new address[](0); - } - return epoch.committee; - } - - // Allow anyone if there is no validator set - if (validatorSet.length() == 0) { - return new address[](0); - } - - // Emulate a sampling of the validators - uint256 sampleSeed = _getSampleSeed(epochNumber); - return _sampleValidators(sampleSeed); + store.validatorSet.add(_validator); } /** @@ -369,57 +303,12 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { */ function _validateLeonidas( Slot _slot, - SignatureLib.Signature[] memory _signatures, + Signature[] memory _signatures, bytes32 _digest, DataStructures.ExecutionFlags memory _flags ) internal view { - Timestamp ts = getTimestampForSlot(_slot); - address proposer = getProposerAt(ts); - - // @todo Consider getting rid of this option. - // If the proposer is open, we allow anyone to propose without needing any signatures - if (proposer == address(0)) { - return; - } - - // @todo We should allow to provide a signature instead of needing the proposer to broadcast. - require(proposer == msg.sender, Errors.Leonidas__InvalidProposer(proposer, msg.sender)); - - // @note This is NOT the efficient way to do it, but it is a very convenient way for us to do it - // that allows us to reduce the number of code paths. Also when changed with optimistic for - // pleistarchus, this will be changed, so we can live with it. - - if (_flags.ignoreSignatures) { - return; - } - - address[] memory committee = _getCommitteeAt(ts); - - uint256 needed = committee.length * 2 / 3 + 1; - require( - _signatures.length >= needed, - Errors.Leonidas__InsufficientAttestationsProvided(needed, _signatures.length) - ); - - // Validate the attestations - uint256 validAttestations = 0; - - bytes32 digest = _digest.toEthSignedMessageHash(); - for (uint256 i = 0; i < _signatures.length; i++) { - SignatureLib.Signature memory signature = _signatures[i]; - if (signature.isEmpty) { - continue; - } - - // The verification will throw if invalid - signature.verify(committee[i], digest); - validAttestations++; - } - - require( - validAttestations >= needed, - Errors.Leonidas__InsufficientAttestations(needed, validAttestations) - ); + Epoch epochNumber = getEpochAtSlot(_slot); + store.validateLeonidas(_slot, epochNumber, _signatures, _digest, _flags, TARGET_COMMITTEE_SIZE); } /** @@ -435,82 +324,4 @@ contract Leonidas is Ownable, TimeFns, ILeonidas { function _computeNextSeed(Epoch _epoch) private view returns (uint256) { return uint256(keccak256(abi.encode(_epoch, block.prevrandao))); } - - /** - * @notice Samples a validator set for a specific epoch - * - * @dev Only used internally, should never be called for anything but the "next" epoch - * Allowing us to always use `lastSeed`. - * - * @return The validators for the given epoch - */ - function _sampleValidators(uint256 _seed) private view returns (address[] memory) { - uint256 validatorSetSize = validatorSet.length(); - if (validatorSetSize == 0) { - return new address[](0); - } - - // If we have less validators than the target committee size, we just return the full set - if (validatorSetSize <= TARGET_COMMITTEE_SIZE) { - return validatorSet.values(); - } - - uint256[] memory indicies = - SampleLib.computeCommitteeClever(TARGET_COMMITTEE_SIZE, validatorSetSize, _seed); - - address[] memory committee = new address[](TARGET_COMMITTEE_SIZE); - for (uint256 i = 0; i < TARGET_COMMITTEE_SIZE; i++) { - committee[i] = validatorSet.at(indicies[i]); - } - return committee; - } - - /** - * @notice Get the sample seed for an epoch - * - * @dev This should behave as walking past the line, but it does not currently do that. - * If there are entire skips, e.g., 1, 2, 5 and we then go back and try executing - * for 4 we will get an invalid value because we will read lastSeed which is from 5. - * - * @dev The `_epoch` will never be 0 nor in the future - * - * @dev The return value will be equal to keccak256(n, block.prevrandao) for n being the last epoch - * setup. - * - * @return The sample seed for the epoch - */ - function _getSampleSeed(Epoch _epoch) private view returns (uint256) { - if (Epoch.unwrap(_epoch) == 0) { - return type(uint256).max; - } - uint256 sampleSeed = epochs[_epoch].sampleSeed; - if (sampleSeed != 0) { - return sampleSeed; - } - - sampleSeed = epochs[_epoch - Epoch.wrap(1)].nextSeed; - if (sampleSeed != 0) { - return sampleSeed; - } - - return lastSeed; - } - - /** - * @notice Computes the index of the committee member that acts as proposer for a given slot - * - * @param _epoch - The epoch to compute the proposer index for - * @param _slot - The slot to compute the proposer index for - * @param _seed - The seed to use for the computation - * @param _size - The size of the committee - * - * @return The index of the proposer - */ - function _computeProposerIndex(Epoch _epoch, Slot _slot, uint256 _seed, uint256 _size) - private - pure - returns (uint256) - { - return uint256(keccak256(abi.encode(_epoch, _slot, _seed))) % _size; - } } diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index ef98e18d6cc..f706f40d523 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -10,6 +10,9 @@ import { FeeHeader, ManaBaseFeeComponents, BlockLog, + ChainTips, + RollupStore, + L1GasOracleValues, L1FeeData, SubmitEpochRootProofArgs } from "@aztec/core/interfaces/IRollup.sol"; @@ -19,32 +22,28 @@ import {IOutbox} from "@aztec/core/interfaces/messagebridge/IOutbox.sol"; import {Leonidas} from "@aztec/core/Leonidas.sol"; import {Constants} from "@aztec/core/libraries/ConstantsGen.sol"; import {MerkleLib} from "@aztec/core/libraries/crypto/MerkleLib.sol"; -import {SignatureLib} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import {Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; import {DataStructures} from "@aztec/core/libraries/DataStructures.sol"; -import {EpochProofQuoteLib} from "@aztec/core/libraries/EpochProofQuoteLib.sol"; import {Errors} from "@aztec/core/libraries/Errors.sol"; -import {FeeMath} from "@aztec/core/libraries/FeeMath.sol"; -import {HeaderLib} from "@aztec/core/libraries/HeaderLib.sol"; -import {ProposeArgs, ProposeLib} from "@aztec/core/libraries/ProposeLib.sol"; +import { + ExtRollupLib, + ValidateHeaderArgs, + Header, + SignedEpochProofQuote, + SubmitEpochRootProofInterimValues +} from "@aztec/core/libraries/RollupLibs/ExtRollupLib.sol"; +import {IntRollupLib, EpochProofQuote} from "@aztec/core/libraries/RollupLibs/IntRollupLib.sol"; +import {ProposeArgs, ProposeLib} from "@aztec/core/libraries/RollupLibs/ProposeLib.sol"; import {Timestamp, Slot, Epoch, SlotLib, EpochLib} from "@aztec/core/libraries/TimeMath.sol"; -import {TxsDecoder} from "@aztec/core/libraries/TxsDecoder.sol"; import {Inbox} from "@aztec/core/messagebridge/Inbox.sol"; import {Outbox} from "@aztec/core/messagebridge/Outbox.sol"; import {ProofCommitmentEscrow} from "@aztec/core/ProofCommitmentEscrow.sol"; import {IRewardDistributor} from "@aztec/governance/interfaces/IRewardDistributor.sol"; import {MockVerifier} from "@aztec/mock/MockVerifier.sol"; import {IERC20} from "@oz/token/ERC20/IERC20.sol"; -import {SafeERC20} from "@oz/token/ERC20/utils/SafeERC20.sol"; import {EIP712} from "@oz/utils/cryptography/EIP712.sol"; -import {Math} from "@oz/utils/math/Math.sol"; -import {SafeCast} from "@oz/utils/math/SafeCast.sol"; import {Vm} from "forge-std/Vm.sol"; -struct ChainTips { - uint256 pendingBlockNumber; - uint256 provenBlockNumber; -} - struct Config { uint256 aztecSlotDuration; uint256 aztecEpochDuration; @@ -52,15 +51,6 @@ struct Config { uint256 aztecEpochProofClaimWindowInL2Slots; } -struct SubmitEpochRootProofInterimValues { - uint256 previousBlockNumber; - uint256 endBlockNumber; - Epoch epochToProve; - Epoch startEpoch; - bool isFeeCanonical; - bool isRewardDistributorCanonical; -} - /** * @title Rollup * @author Aztec Labs @@ -68,22 +58,11 @@ struct SubmitEpochRootProofInterimValues { * not giving a damn about gas costs. */ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { - using SafeCast for uint256; using SlotLib for Slot; using EpochLib for Epoch; - using SafeERC20 for IERC20; using ProposeLib for ProposeArgs; - using FeeMath for uint256; - using FeeMath for ManaBaseFeeComponents; - - struct L1GasOracleValues { - L1FeeData pre; - L1FeeData post; - Slot slotOfChange; - } - - uint256 internal constant BLOB_GAS_PER_BLOB = 2 ** 17; - uint256 internal constant GAS_PER_BLOB_POINT_EVALUATION = 50_000; + using IntRollupLib for uint256; + using IntRollupLib for ManaBaseFeeComponents; Slot public constant LIFETIME = Slot.wrap(5); Slot public constant LAG = Slot.wrap(2); @@ -109,27 +88,12 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { IRewardDistributor public immutable REWARD_DISTRIBUTOR; IERC20 public immutable ASSET; - IVerifier public epochProofVerifier; - - ChainTips public tips; - DataStructures.EpochProofClaim public proofClaim; - - // @todo Validate assumption: - // Currently we assume that the archive root following a block is specific to the block - // e.g., changing any values in the block or header should in the end make its way to the archive - // - // More direct approach would be storing keccak256(header) as well - mapping(uint256 blockNumber => BlockLog log) internal blocks; - - bytes32 public vkTreeRoot; - bytes32 public protocolContractTreeRoot; + RollupStore internal rollupStore; // @note Assume that all blocks up to this value (inclusive) are automatically proven. Speeds up bootstrapping. // Testing only. This should be removed eventually. uint256 private assumeProvenThroughBlockNumber; - L1GasOracleValues public l1GasOracleValues; - constructor( IFeeJuicePortal _fpcJuicePortal, IRewardDistributor _rewardDistributor, @@ -146,7 +110,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { _config.targetCommitteeSize ) { - epochProofVerifier = new MockVerifier(); + rollupStore.epochProofVerifier = new MockVerifier(); FEE_JUICE_PORTAL = _fpcJuicePortal; REWARD_DISTRIBUTOR = _rewardDistributor; ASSET = _fpcJuicePortal.UNDERLYING(); @@ -155,8 +119,8 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { ); INBOX = IInbox(address(new Inbox(address(this), Constants.L1_TO_L2_MSG_SUBTREE_HEIGHT))); OUTBOX = IOutbox(address(new Outbox(address(this)))); - vkTreeRoot = _vkTreeRoot; - protocolContractTreeRoot = _protocolContractTreeRoot; + rollupStore.vkTreeRoot = _vkTreeRoot; + rollupStore.protocolContractTreeRoot = _protocolContractTreeRoot; VERSION = 1; L1_BLOCK_AT_GENESIS = block.number; CLAIM_DURATION_IN_L2_SLOTS = _config.aztecEpochProofClaimWindowInL2Slots; @@ -164,7 +128,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { IS_FOUNDRY_TEST = VM_ADDRESS.code.length > 0; // Genesis block - blocks[0] = BlockLog({ + rollupStore.blocks[0] = BlockLog({ feeHeader: FeeHeader({ excessMana: 0, feeAssetPriceNumerator: 0, @@ -176,7 +140,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { blockHash: bytes32(0), // TODO(palla/prover): The first block does not have hash zero slotNumber: Slot.wrap(0) }); - l1GasOracleValues = L1GasOracleValues({ + rollupStore.l1GasOracleValues = L1GasOracleValues({ pre: L1FeeData({baseFee: 1 gwei, blobFee: 1}), post: L1FeeData({baseFee: block.basefee, blobFee: _getBlobBaseFee()}), slotOfChange: LIFETIME @@ -218,7 +182,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { * @param _verifier - The new verifier contract */ function setEpochVerifier(address _verifier) external override(ITestRollup) onlyOwner { - epochProofVerifier = IVerifier(_verifier); + rollupStore.epochProofVerifier = IVerifier(_verifier); } /** @@ -229,7 +193,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { * @param _vkTreeRoot - The new vkTreeRoot to be used by proofs */ function setVkTreeRoot(bytes32 _vkTreeRoot) external override(ITestRollup) onlyOwner { - vkTreeRoot = _vkTreeRoot; + rollupStore.vkTreeRoot = _vkTreeRoot; } /** @@ -244,7 +208,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { override(ITestRollup) onlyOwner { - protocolContractTreeRoot = _protocolContractTreeRoot; + rollupStore.protocolContractTreeRoot = _protocolContractTreeRoot; } /** @@ -257,9 +221,9 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { */ function proposeAndClaim( ProposeArgs calldata _args, - SignatureLib.Signature[] memory _signatures, + Signature[] memory _signatures, bytes calldata _body, - EpochProofQuoteLib.SignedEpochProofQuote calldata _quote + SignedEpochProofQuote calldata _quote ) external override(IRollup) { propose(_args, _signatures, _body); claimEpochProofRight(_quote); @@ -291,99 +255,43 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { _prune(); } + // We want to compute the two epoch values before hand. Could we do partial interim? + // We compute these in here to avoid a lot of pain with linking libraries and passing + // external functions into internal functions as args. SubmitEpochRootProofInterimValues memory interimValues; - - interimValues.previousBlockNumber = tips.provenBlockNumber; + interimValues.previousBlockNumber = rollupStore.tips.provenBlockNumber; interimValues.endBlockNumber = interimValues.previousBlockNumber + _args.epochSize; - // @note The getEpochForBlock is expected to revert if the block is beyond pending. + // @note The _getEpochForBlock is expected to revert if the block is beyond pending. // If this changes you are gonna get so rekt you won't believe it. // I mean proving blocks that have been pruned rekt. - interimValues.epochToProve = getEpochForBlock(interimValues.endBlockNumber); interimValues.startEpoch = getEpochForBlock(interimValues.previousBlockNumber + 1); + interimValues.epochToProve = getEpochForBlock(interimValues.endBlockNumber); - // Ensure that the proof is not across epochs - require( - interimValues.startEpoch == interimValues.epochToProve, - Errors.Rollup__InvalidEpoch(interimValues.startEpoch, interimValues.epochToProve) + uint256 endBlockNumber = ExtRollupLib.submitEpochRootProof( + rollupStore, + _args, + interimValues, + PROOF_COMMITMENT_ESCROW, + FEE_JUICE_PORTAL, + REWARD_DISTRIBUTOR, + ASSET, + CUAUHXICALLI ); + emit L2ProofVerified(endBlockNumber, _args.args[6]); + } - bytes32[] memory publicInputs = - getEpochProofPublicInputs(_args.epochSize, _args.args, _args.fees, _args.aggregationObject); - - require(epochProofVerifier.verify(_args.proof, publicInputs), Errors.Rollup__InvalidProof()); - - if (proofClaim.epochToProve == interimValues.epochToProve) { - PROOF_COMMITMENT_ESCROW.unstakeBond(proofClaim.bondProvider, proofClaim.bondAmount); - } - - tips.provenBlockNumber = interimValues.endBlockNumber; - - // @note Only if the rollup is the canonical will it be able to meaningfully claim fees - // Otherwise, the fees are unbacked #7938. - interimValues.isFeeCanonical = address(this) == FEE_JUICE_PORTAL.canonicalRollup(); - interimValues.isRewardDistributorCanonical = - address(this) == REWARD_DISTRIBUTOR.canonicalRollup(); - - uint256 totalProverReward = 0; - uint256 totalBurn = 0; - - if (interimValues.isFeeCanonical || interimValues.isRewardDistributorCanonical) { - for (uint256 i = 0; i < _args.epochSize; i++) { - address coinbase = address(uint160(uint256(publicInputs[9 + i * 2]))); - uint256 reward = 0; - uint256 toProver = 0; - uint256 burn = 0; - - if (interimValues.isFeeCanonical) { - uint256 fees = uint256(publicInputs[10 + i * 2]); - if (fees > 0) { - // This is insanely expensive, and will be fixed as part of the general storage cost reduction. - // See #9826. - FeeHeader storage feeHeader = - blocks[interimValues.previousBlockNumber + 1 + i].feeHeader; - burn += feeHeader.congestionCost * feeHeader.manaUsed; - - reward += (fees - burn); - FEE_JUICE_PORTAL.distributeFees(address(this), fees); - } - } - - if (interimValues.isRewardDistributorCanonical) { - reward += REWARD_DISTRIBUTOR.claim(address(this)); - } - - if (coinbase == address(0)) { - toProver = reward; - } else { - // @note We are getting value from the `proofClaim`, which are not cleared. - // So if someone is posting the proof before a new claim is made, - // the reward will calculated based on the previous values. - toProver = Math.mulDiv(reward, proofClaim.basisPointFee, 10_000); - } - - uint256 toCoinbase = reward - toProver; - if (toCoinbase > 0) { - ASSET.safeTransfer(coinbase, toCoinbase); - } - - totalProverReward += toProver; - totalBurn += burn; - } - - if (totalProverReward > 0) { - // If there is a bond-provider give him the reward, otherwise give it to the submitter. - address proofRewardRecipient = - proofClaim.bondProvider == address(0) ? msg.sender : proofClaim.bondProvider; - ASSET.safeTransfer(proofRewardRecipient, totalProverReward); - } - - if (totalBurn > 0) { - ASSET.safeTransfer(CUAUHXICALLI, totalBurn); - } - } + function getProofClaim() + external + view + override(IRollup) + returns (DataStructures.EpochProofClaim memory) + { + return rollupStore.proofClaim; + } - emit L2ProofVerified(interimValues.endBlockNumber, _args.args[6]); + function getTips() external view override(IRollup) returns (ChainTips memory) { + return rollupStore.tips; } function status(uint256 _myHeaderBlockNumber) @@ -400,12 +308,35 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { ) { return ( - tips.provenBlockNumber, - blocks[tips.provenBlockNumber].archive, - tips.pendingBlockNumber, - blocks[tips.pendingBlockNumber].archive, + rollupStore.tips.provenBlockNumber, + rollupStore.blocks[rollupStore.tips.provenBlockNumber].archive, + rollupStore.tips.pendingBlockNumber, + rollupStore.blocks[rollupStore.tips.pendingBlockNumber].archive, archiveAt(_myHeaderBlockNumber), - getEpochForBlock(tips.provenBlockNumber) + getEpochForBlock(rollupStore.tips.provenBlockNumber) + ); + } + + /** + * @notice Returns the computed public inputs for the given epoch proof. + * + * @dev Useful for debugging and testing. Allows submitter to compare their + * own public inputs used for generating the proof vs the ones assembled + * by this contract when verifying it. + * + * @param _epochSize - The size of the epoch (to be promoted to a constant) + * @param _args - Array of public inputs to the proof (previousArchive, endArchive, previousBlockHash, endBlockHash, endTimestamp, outHash, proverId) + * @param _fees - Array of recipient-value pairs with fees to be distributed for the epoch + * @param _aggregationObject - The aggregation object for the proof + */ + function getEpochProofPublicInputs( + uint256 _epochSize, + bytes32[7] calldata _args, + bytes32[] calldata _fees, + bytes calldata _aggregationObject + ) external view override(IRollup) returns (bytes32[] memory) { + return ExtRollupLib.getEpochProofPublicInputs( + rollupStore, _epochSize, _args, _fees, _aggregationObject ); } @@ -428,17 +359,17 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { // Consider if a prune will hit in this slot uint256 pendingBlockNumber = - canPruneAtTime(_ts) ? tips.provenBlockNumber : tips.pendingBlockNumber; + canPruneAtTime(_ts) ? rollupStore.tips.provenBlockNumber : rollupStore.tips.pendingBlockNumber; - Slot lastSlot = blocks[pendingBlockNumber].slotNumber; + Slot lastSlot = rollupStore.blocks[pendingBlockNumber].slotNumber; require(slot > lastSlot, Errors.Rollup__SlotAlreadyInChain(lastSlot, slot)); // Make sure that the proposer is up to date and on the right chain (ie no reorgs) - bytes32 tipArchive = blocks[pendingBlockNumber].archive; + bytes32 tipArchive = rollupStore.blocks[pendingBlockNumber].archive; require(tipArchive == _archive, Errors.Rollup__InvalidArchive(tipArchive, _archive)); - SignatureLib.Signature[] memory sigs = new SignatureLib.Signature[](0); + Signature[] memory sigs = new Signature[](0); DataStructures.ExecutionFlags memory flags = DataStructures.ExecutionFlags({ignoreDA: true, ignoreSignatures: true}); _validateLeonidas(slot, sigs, _archive, flags); @@ -460,14 +391,14 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { */ function validateHeader( bytes calldata _header, - SignatureLib.Signature[] memory _signatures, + Signature[] memory _signatures, bytes32 _digest, Timestamp _currentTime, bytes32 _txsEffectsHash, DataStructures.ExecutionFlags memory _flags ) external view override(IRollup) { uint256 manaBaseFee = getManaBaseFeeAt(_currentTime, true); - HeaderLib.Header memory header = HeaderLib.decode(_header); + Header memory header = ExtRollupLib.decodeHeader(_header); _validateHeader( header, _signatures, _digest, _currentTime, manaBaseFee, _txsEffectsHash, _flags ); @@ -481,12 +412,12 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { Epoch epochToProve = getEpochToProve(); require( // If the epoch has been claimed, it cannot be claimed again - proofClaim.epochToProve != epochToProve + rollupStore.proofClaim.epochToProve != epochToProve // Edge case for if no claim has been made yet. // We know that the bondProvider is always set, // Since otherwise the claimEpochProofRight would have reverted, // because the zero address cannot have deposited funds into escrow. - || proofClaim.bondProvider == address(0), + || rollupStore.proofClaim.bondProvider == address(0), Errors.Rollup__ProofRightAlreadyClaimed() ); return epochToProve; @@ -498,13 +429,10 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { override(IRollup) returns (bytes32) { - return TxsDecoder.decode(_body); + return ExtRollupLib.computeTxsEffectsHash(_body); } - function claimEpochProofRight(EpochProofQuoteLib.SignedEpochProofQuote calldata _quote) - public - override(IRollup) - { + function claimEpochProofRight(SignedEpochProofQuote calldata _quote) public override(IRollup) { validateEpochProofRightClaimAtTime(Timestamp.wrap(block.timestamp), _quote); Slot currentSlot = getCurrentSlot(); @@ -515,7 +443,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { // Blocked on submitting epoch proofs to this contract. PROOF_COMMITMENT_ESCROW.stakeBond(_quote.quote.prover, _quote.quote.bondAmount); - proofClaim = DataStructures.EpochProofClaim({ + rollupStore.proofClaim = DataStructures.EpochProofClaim({ epochToProve: epochToProve, basisPointFee: _quote.quote.basisPointFee, bondAmount: _quote.quote.bondAmount, @@ -536,11 +464,10 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { * @param _signatures - Signatures from the validators * @param _body - The body of the L2 block */ - function propose( - ProposeArgs calldata _args, - SignatureLib.Signature[] memory _signatures, - bytes calldata _body - ) public override(IRollup) { + function propose(ProposeArgs calldata _args, Signature[] memory _signatures, bytes calldata _body) + public + override(IRollup) + { if (canPrune()) { _prune(); } @@ -549,15 +476,15 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { // The `body` is passed outside the "args" as it does not directly need to be in the digest // as long as the `txsEffectsHash` is included and matches what is in the header. // Which we are checking in the `_validateHeader` call below. - bytes32 txsEffectsHash = TxsDecoder.decode(_body); + bytes32 txsEffectsHash = ExtRollupLib.computeTxsEffectsHash(_body); // Decode and validate header - HeaderLib.Header memory header = HeaderLib.decode(_args.header); + Header memory header = ExtRollupLib.decodeHeader(_args.header); setupEpoch(); ManaBaseFeeComponents memory components = getManaBaseFeeComponentsAt(Timestamp.wrap(block.timestamp), true); - uint256 manaBaseFee = FeeMath.summedBaseFee(components); + uint256 manaBaseFee = components.summedBaseFee(); _validateHeader({ _header: header, _signatures: _signatures, @@ -568,15 +495,13 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { _flags: DataStructures.ExecutionFlags({ignoreDA: false, ignoreSignatures: false}) }); - uint256 blockNumber = ++tips.pendingBlockNumber; + uint256 blockNumber = ++rollupStore.tips.pendingBlockNumber; { - FeeHeader memory parentFeeHeader = blocks[blockNumber - 1].feeHeader; - uint256 excessMana = (parentFeeHeader.excessMana + parentFeeHeader.manaUsed).clampedAdd( - -int256(FeeMath.MANA_TARGET) - ); + FeeHeader memory parentFeeHeader = rollupStore.blocks[blockNumber - 1].feeHeader; + uint256 excessMana = IntRollupLib.computeExcessMana(parentFeeHeader); - blocks[blockNumber] = BlockLog({ + rollupStore.blocks[blockNumber] = BlockLog({ archive: _args.archive, blockHash: _args.blockHash, slotNumber: Slot.wrap(header.globalVariables.slotNumber), @@ -639,15 +564,16 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { function updateL1GasFeeOracle() public override(IRollup) { Slot slot = getCurrentSlot(); // The slot where we find a new queued value acceptable - Slot acceptableSlot = l1GasOracleValues.slotOfChange + (LIFETIME - LAG); + Slot acceptableSlot = rollupStore.l1GasOracleValues.slotOfChange + (LIFETIME - LAG); if (slot < acceptableSlot) { return; } - l1GasOracleValues.pre = l1GasOracleValues.post; - l1GasOracleValues.post = L1FeeData({baseFee: block.basefee, blobFee: _getBlobBaseFee()}); - l1GasOracleValues.slotOfChange = slot + LAG; + rollupStore.l1GasOracleValues.pre = rollupStore.l1GasOracleValues.post; + rollupStore.l1GasOracleValues.post = + L1FeeData({baseFee: block.basefee, blobFee: _getBlobBaseFee()}); + rollupStore.l1GasOracleValues.slotOfChange = slot + LAG; } /** @@ -656,8 +582,8 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { * @return The fee asset price */ function getFeeAssetPrice() public view override(IRollup) returns (uint256) { - return FeeMath.feeAssetPriceModifier( - blocks[tips.pendingBlockNumber].feeHeader.feeAssetPriceNumerator + return IntRollupLib.feeAssetPriceModifier( + rollupStore.blocks[rollupStore.tips.pendingBlockNumber].feeHeader.feeAssetPriceNumerator ); } @@ -668,10 +594,10 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { returns (L1FeeData memory) { Slot slot = getSlotAt(_timestamp); - if (slot < l1GasOracleValues.slotOfChange) { - return l1GasOracleValues.pre; + if (slot < rollupStore.l1GasOracleValues.slotOfChange) { + return rollupStore.l1GasOracleValues.pre; } - return l1GasOracleValues.post; + return rollupStore.l1GasOracleValues.post; } /** @@ -709,244 +635,49 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { returns (ManaBaseFeeComponents memory) { // If we can prune, we use the proven block, otherwise the pending block - uint256 blockOfInterest = - canPruneAtTime(_timestamp) ? tips.provenBlockNumber : tips.pendingBlockNumber; - - FeeHeader storage parentFeeHeader = blocks[blockOfInterest].feeHeader; - uint256 excessMana = (parentFeeHeader.excessMana + parentFeeHeader.manaUsed).clampedAdd( - -int256(FeeMath.MANA_TARGET) + uint256 blockOfInterest = canPruneAtTime(_timestamp) + ? rollupStore.tips.provenBlockNumber + : rollupStore.tips.pendingBlockNumber; + + return ExtRollupLib.getManaBaseFeeComponentsAt( + rollupStore.blocks[blockOfInterest].feeHeader, + getL1FeesAt(_timestamp), + _inFeeAsset ? getFeeAssetPrice() : 1e9, + EPOCH_DURATION ); - - L1FeeData memory fees = getL1FeesAt(_timestamp); - uint256 dataCost = - Math.mulDiv(3 * BLOB_GAS_PER_BLOB, fees.blobFee, FeeMath.MANA_TARGET, Math.Rounding.Ceil); - uint256 gasUsed = FeeMath.L1_GAS_PER_BLOCK_PROPOSED + 3 * GAS_PER_BLOB_POINT_EVALUATION - + FeeMath.L1_GAS_PER_EPOCH_VERIFIED / EPOCH_DURATION; - uint256 gasCost = Math.mulDiv(gasUsed, fees.baseFee, FeeMath.MANA_TARGET, Math.Rounding.Ceil); - uint256 provingCost = FeeMath.provingCostPerMana( - blocks[tips.pendingBlockNumber].feeHeader.provingCostPerManaNumerator - ); - - uint256 congestionMultiplier = FeeMath.congestionMultiplier(excessMana); - uint256 total = dataCost + gasCost + provingCost; - uint256 congestionCost = Math.mulDiv( - total, congestionMultiplier, FeeMath.MINIMUM_CONGESTION_MULTIPLIER, Math.Rounding.Floor - ) - total; - - uint256 feeAssetPrice = _inFeeAsset ? getFeeAssetPrice() : 1e9; - - // @todo @lherskind. The following is a crime against humanity, but it makes it - // very neat to plot etc from python, #10004 will fix it across the board - return ManaBaseFeeComponents({ - dataCost: Math.mulDiv(dataCost, feeAssetPrice, 1e9, Math.Rounding.Ceil), - gasCost: Math.mulDiv(gasCost, feeAssetPrice, 1e9, Math.Rounding.Ceil), - provingCost: Math.mulDiv(provingCost, feeAssetPrice, 1e9, Math.Rounding.Ceil), - congestionCost: Math.mulDiv(congestionCost, feeAssetPrice, 1e9, Math.Rounding.Ceil), - congestionMultiplier: congestionMultiplier - }); } - function quoteToDigest(EpochProofQuoteLib.EpochProofQuote memory _quote) + function quoteToDigest(EpochProofQuote memory _quote) public view override(IRollup) returns (bytes32) { - return _hashTypedDataV4(EpochProofQuoteLib.hash(_quote)); - } - - /** - * @notice Returns the computed public inputs for the given epoch proof. - * - * @dev Useful for debugging and testing. Allows submitter to compare their - * own public inputs used for generating the proof vs the ones assembled - * by this contract when verifying it. - * - * @param _epochSize - The size of the epoch (to be promoted to a constant) - * @param _args - Array of public inputs to the proof (previousArchive, endArchive, previousBlockHash, endBlockHash, endTimestamp, outHash, proverId) - * @param _fees - Array of recipient-value pairs with fees to be distributed for the epoch - * @param _aggregationObject - The aggregation object for the proof - */ - function getEpochProofPublicInputs( - uint256 _epochSize, - bytes32[7] calldata _args, - bytes32[] calldata _fees, - bytes calldata _aggregationObject - ) public view override(IRollup) returns (bytes32[] memory) { - uint256 previousBlockNumber = tips.provenBlockNumber; - uint256 endBlockNumber = previousBlockNumber + _epochSize; - - // Args are defined as an array because Solidity complains with "stack too deep" otherwise - // 0 bytes32 _previousArchive, - // 1 bytes32 _endArchive, - // 2 bytes32 _previousBlockHash, - // 3 bytes32 _endBlockHash, - // 4 bytes32 _endTimestamp, - // 5 bytes32 _outHash, - // 6 bytes32 _proverId, - - // TODO(#7373): Public inputs are not fully verified - - { - // We do it this way to provide better error messages than passing along the storage values - bytes32 expectedPreviousArchive = blocks[previousBlockNumber].archive; - require( - expectedPreviousArchive == _args[0], - Errors.Rollup__InvalidPreviousArchive(expectedPreviousArchive, _args[0]) - ); - - bytes32 expectedEndArchive = blocks[endBlockNumber].archive; - require( - expectedEndArchive == _args[1], Errors.Rollup__InvalidArchive(expectedEndArchive, _args[1]) - ); - - bytes32 expectedPreviousBlockHash = blocks[previousBlockNumber].blockHash; - // TODO: Remove 0 check once we inject the proper genesis block hash - require( - expectedPreviousBlockHash == 0 || expectedPreviousBlockHash == _args[2], - Errors.Rollup__InvalidPreviousBlockHash(expectedPreviousBlockHash, _args[2]) - ); - - bytes32 expectedEndBlockHash = blocks[endBlockNumber].blockHash; - require( - expectedEndBlockHash == _args[3], - Errors.Rollup__InvalidBlockHash(expectedEndBlockHash, _args[3]) - ); - } - - bytes32[] memory publicInputs = new bytes32[]( - Constants.ROOT_ROLLUP_PUBLIC_INPUTS_LENGTH + Constants.AGGREGATION_OBJECT_LENGTH - ); - - // Structure of the root rollup public inputs we need to reassemble: - // - // struct RootRollupPublicInputs { - // previous_archive: AppendOnlyTreeSnapshot, - // end_archive: AppendOnlyTreeSnapshot, - // previous_block_hash: Field, - // end_block_hash: Field, - // end_timestamp: u64, - // end_block_number: Field, - // out_hash: Field, - // fees: [FeeRecipient; Constants.AZTEC_EPOCH_DURATION], - // vk_tree_root: Field, - // protocol_contract_tree_root: Field, - // prover_id: Field - // } - - // previous_archive.root: the previous archive tree root - publicInputs[0] = _args[0]; - - // previous_archive.next_available_leaf_index: the previous archive next available index - // normally this should be equal to the block number (since leaves are 0-indexed and blocks 1-indexed) - // but in yarn-project/merkle-tree/src/new_tree.ts we prefill the tree so that block N is in leaf N - publicInputs[1] = bytes32(previousBlockNumber + 1); - - // end_archive.root: the new archive tree root - publicInputs[2] = _args[1]; - - // end_archive.next_available_leaf_index: the new archive next available index - publicInputs[3] = bytes32(endBlockNumber + 1); - - // previous_block_hash: the block hash just preceding this epoch - publicInputs[4] = _args[2]; - - // end_block_hash: the last block hash in the epoch - publicInputs[5] = _args[3]; - - // end_timestamp: the timestamp of the last block in the epoch - publicInputs[6] = _args[4]; - - // end_block_number: last block number in the epoch - publicInputs[7] = bytes32(endBlockNumber); - - // out_hash: root of this epoch's l2 to l1 message tree - publicInputs[8] = _args[5]; - - uint256 feesLength = Constants.AZTEC_MAX_EPOCH_DURATION * 2; - // fees[9 to (9+feesLength-1)]: array of recipient-value pairs - for (uint256 i = 0; i < feesLength; i++) { - publicInputs[9 + i] = _fees[i]; - } - uint256 feesEnd = 9 + feesLength; - - // vk_tree_root - publicInputs[feesEnd] = vkTreeRoot; - - // protocol_contract_tree_root - publicInputs[feesEnd + 1] = protocolContractTreeRoot; - - // prover_id: id of current epoch's prover - publicInputs[feesEnd + 2] = _args[6]; - - // the block proof is recursive, which means it comes with an aggregation object - // this snippet copies it into the public inputs needed for verification - // it also guards against empty _aggregationObject used with mocked proofs - uint256 aggregationLength = _aggregationObject.length / 32; - for (uint256 i = 0; i < Constants.AGGREGATION_OBJECT_LENGTH && i < aggregationLength; i++) { - bytes32 part; - assembly { - part := calldataload(add(_aggregationObject.offset, mul(i, 32))) - } - publicInputs[i + feesEnd + 3] = part; - } - - return publicInputs; + return _hashTypedDataV4(IntRollupLib.computeQuoteHash(_quote)); } - function validateEpochProofRightClaimAtTime( - Timestamp _ts, - EpochProofQuoteLib.SignedEpochProofQuote calldata _quote - ) public view override(IRollup) { - SignatureLib.verify(_quote.signature, _quote.quote.prover, quoteToDigest(_quote.quote)); - + function validateEpochProofRightClaimAtTime(Timestamp _ts, SignedEpochProofQuote calldata _quote) + public + view + override(IRollup) + { Slot currentSlot = getSlotAt(_ts); address currentProposer = getProposerAt(_ts); Epoch epochToProve = getEpochToProve(); - - require( - _quote.quote.validUntilSlot >= currentSlot, - Errors.Rollup__QuoteExpired(currentSlot, _quote.quote.validUntilSlot) - ); - - require( - _quote.quote.basisPointFee <= 10_000, - Errors.Rollup__InvalidBasisPointFee(_quote.quote.basisPointFee) - ); - - require( - currentProposer == address(0) || currentProposer == msg.sender, - Errors.Leonidas__InvalidProposer(currentProposer, msg.sender) - ); - - require( - _quote.quote.epochToProve == epochToProve, - Errors.Rollup__NotClaimingCorrectEpoch(epochToProve, _quote.quote.epochToProve) - ); - - require( - positionInEpoch(currentSlot) < CLAIM_DURATION_IN_L2_SLOTS, - Errors.Rollup__NotInClaimPhase(positionInEpoch(currentSlot), CLAIM_DURATION_IN_L2_SLOTS) - ); - - // if the epoch to prove is not the one that has been claimed, - // then whatever is in the proofClaim is stale - require( - proofClaim.epochToProve != epochToProve || proofClaim.proposerClaimant == address(0), - Errors.Rollup__ProofRightAlreadyClaimed() - ); - - require( - _quote.quote.bondAmount >= PROOF_COMMITMENT_MIN_BOND_AMOUNT_IN_TST, - Errors.Rollup__InsufficientBondAmount( - PROOF_COMMITMENT_MIN_BOND_AMOUNT_IN_TST, _quote.quote.bondAmount - ) - ); - - uint256 availableFundsInEscrow = PROOF_COMMITMENT_ESCROW.deposits(_quote.quote.prover); - require( - _quote.quote.bondAmount <= availableFundsInEscrow, - Errors.Rollup__InsufficientFundsInEscrow(_quote.quote.bondAmount, availableFundsInEscrow) + uint256 posInEpoch = positionInEpoch(currentSlot); + bytes32 digest = quoteToDigest(_quote.quote); + + ExtRollupLib.validateEpochProofRightClaimAtTime( + currentSlot, + currentProposer, + epochToProve, + posInEpoch, + _quote, + digest, + rollupStore.proofClaim, + CLAIM_DURATION_IN_L2_SLOTS, + PROOF_COMMITMENT_MIN_BOND_AMOUNT_IN_TST, + PROOF_COMMITMENT_ESCROW ); } @@ -956,31 +687,31 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { * @return bytes32 - The current archive root */ function archive() public view override(IRollup) returns (bytes32) { - return blocks[tips.pendingBlockNumber].archive; + return rollupStore.blocks[rollupStore.tips.pendingBlockNumber].archive; } function getProvenBlockNumber() public view override(IRollup) returns (uint256) { - return tips.provenBlockNumber; + return rollupStore.tips.provenBlockNumber; } function getPendingBlockNumber() public view override(IRollup) returns (uint256) { - return tips.pendingBlockNumber; + return rollupStore.tips.pendingBlockNumber; } function getBlock(uint256 _blockNumber) public view override(IRollup) returns (BlockLog memory) { require( - _blockNumber <= tips.pendingBlockNumber, - Errors.Rollup__InvalidBlockNumber(tips.pendingBlockNumber, _blockNumber) + _blockNumber <= rollupStore.tips.pendingBlockNumber, + Errors.Rollup__InvalidBlockNumber(rollupStore.tips.pendingBlockNumber, _blockNumber) ); - return blocks[_blockNumber]; + return rollupStore.blocks[_blockNumber]; } function getEpochForBlock(uint256 _blockNumber) public view override(IRollup) returns (Epoch) { require( - _blockNumber <= tips.pendingBlockNumber, - Errors.Rollup__InvalidBlockNumber(tips.pendingBlockNumber, _blockNumber) + _blockNumber <= rollupStore.tips.pendingBlockNumber, + Errors.Rollup__InvalidBlockNumber(rollupStore.tips.pendingBlockNumber, _blockNumber) ); - return getEpochAt(getTimestampForSlot(blocks[_blockNumber].slotNumber)); + return getEpochAt(getTimestampForSlot(rollupStore.blocks[_blockNumber].slotNumber)); } /** @@ -993,8 +724,11 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { * @return uint256 - The epoch to prove */ function getEpochToProve() public view override(IRollup) returns (Epoch) { - require(tips.provenBlockNumber != tips.pendingBlockNumber, Errors.Rollup__NoEpochToProve()); - return getEpochForBlock(getProvenBlockNumber() + 1); + require( + rollupStore.tips.provenBlockNumber != rollupStore.tips.pendingBlockNumber, + Errors.Rollup__NoEpochToProve() + ); + return getEpochForBlock(rollupStore.tips.provenBlockNumber + 1); } /** @@ -1005,8 +739,8 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { * @return bytes32 - The archive root of the block */ function archiveAt(uint256 _blockNumber) public view override(IRollup) returns (bytes32) { - if (_blockNumber <= tips.pendingBlockNumber) { - return blocks[_blockNumber].archive; + if (_blockNumber <= rollupStore.tips.pendingBlockNumber) { + return rollupStore.blocks[_blockNumber].archive; } return bytes32(0); } @@ -1017,14 +751,14 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { function canPruneAtTime(Timestamp _ts) public view override(IRollup) returns (bool) { if ( - tips.pendingBlockNumber == tips.provenBlockNumber - || tips.pendingBlockNumber <= assumeProvenThroughBlockNumber + rollupStore.tips.pendingBlockNumber == rollupStore.tips.provenBlockNumber + || rollupStore.tips.pendingBlockNumber <= assumeProvenThroughBlockNumber ) { return false; } Slot currentSlot = getSlotAt(_ts); - Epoch oldestPendingEpoch = getEpochForBlock(tips.provenBlockNumber + 1); + Epoch oldestPendingEpoch = getEpochForBlock(rollupStore.tips.provenBlockNumber + 1); Slot startSlotOfPendingEpoch = toSlots(oldestPendingEpoch); // suppose epoch 1 is proven, epoch 2 is pending, epoch 3 is the current epoch. @@ -1035,7 +769,8 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { < startSlotOfPendingEpoch + toSlots(Epoch.wrap(1)) + Slot.wrap(CLAIM_DURATION_IN_L2_SLOTS); bool claimExists = currentSlot < startSlotOfPendingEpoch + toSlots(Epoch.wrap(2)) - && proofClaim.epochToProve == oldestPendingEpoch && proofClaim.proposerClaimant != address(0); + && rollupStore.proofClaim.epochToProve == oldestPendingEpoch + && rollupStore.proofClaim.proposerClaimant != address(0); if (inClaimPhase || claimExists) { // If we are in the claim phase, do not prune @@ -1046,17 +781,17 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { function _prune() internal { // TODO #8656 - delete proofClaim; + delete rollupStore.proofClaim; - uint256 pending = tips.pendingBlockNumber; + uint256 pending = rollupStore.tips.pendingBlockNumber; // @note We are not deleting the blocks, but we are "winding back" the pendingTip to the last block that was proven. // We can do because any new block proposed will overwrite a previous block in the block log, // so no values should "survive". // People must therefore read the chain using the pendingTip as a boundary. - tips.pendingBlockNumber = tips.provenBlockNumber; + rollupStore.tips.pendingBlockNumber = rollupStore.tips.provenBlockNumber; - emit PrunedPending(tips.provenBlockNumber, pending); + emit PrunedPending(rollupStore.tips.provenBlockNumber, pending); } /** @@ -1070,18 +805,31 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { * @param _flags - Flags specific to the execution, whether certain checks should be skipped */ function _validateHeader( - HeaderLib.Header memory _header, - SignatureLib.Signature[] memory _signatures, + Header memory _header, + Signature[] memory _signatures, bytes32 _digest, Timestamp _currentTime, uint256 _manaBaseFee, bytes32 _txEffectsHash, DataStructures.ExecutionFlags memory _flags ) internal view { - uint256 pendingBlockNumber = - canPruneAtTime(_currentTime) ? tips.provenBlockNumber : tips.pendingBlockNumber; - _validateHeaderForSubmissionBase( - _header, _currentTime, _manaBaseFee, _txEffectsHash, pendingBlockNumber, _flags + uint256 pendingBlockNumber = canPruneAtTime(_currentTime) + ? rollupStore.tips.provenBlockNumber + : rollupStore.tips.pendingBlockNumber; + + ExtRollupLib.validateHeaderForSubmissionBase( + ValidateHeaderArgs({ + header: _header, + currentTime: _currentTime, + manaBaseFee: _manaBaseFee, + txsEffectsHash: _txEffectsHash, + pendingBlockNumber: pendingBlockNumber, + flags: _flags, + version: VERSION, + feeJuicePortal: FEE_JUICE_PORTAL, + getTimestampForSlot: this.getTimestampForSlot + }), + rollupStore.blocks ); _validateHeaderForSubmissionSequencerSelection( Slot.wrap(_header.globalVariables.slotNumber), _signatures, _digest, _currentTime, _flags @@ -1106,7 +854,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { */ function _validateHeaderForSubmissionSequencerSelection( Slot _slot, - SignatureLib.Signature[] memory _signatures, + Signature[] memory _signatures, bytes32 _digest, Timestamp _currentTime, DataStructures.ExecutionFlags memory _flags @@ -1127,100 +875,21 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { _validateLeonidas(_slot, _signatures, _digest, _flags); } - /** - * @notice Validate a header for submission to the pending chain (base checks) - * Base checks here being the checks that we wish to do regardless of the sequencer - * selection mechanism. - * - * Each of the following validation checks must pass, otherwise an error is thrown and we revert. - * - The chain ID MUST match the current chain ID - * - The version MUST match the current version - * - The block id MUST be the next block in the chain - * - The last archive root in the header MUST match the current archive - * - The slot MUST be larger than the slot of the previous block (ensures single block per slot) - * - The timestamp MUST be equal to GENESIS_TIME + slot * SLOT_DURATION - * - The `txsEffectsHash` of the header must match the computed `_txsEffectsHash` - * - This can be relaxed to happen at the time of `submitProof` instead - * - * @param _header - The header to validate - */ - function _validateHeaderForSubmissionBase( - HeaderLib.Header memory _header, - Timestamp _currentTime, - uint256 _manaBaseFee, - bytes32 _txsEffectsHash, - uint256 _pendingBlockNumber, - DataStructures.ExecutionFlags memory _flags - ) internal view { - require( - block.chainid == _header.globalVariables.chainId, - Errors.Rollup__InvalidChainId(block.chainid, _header.globalVariables.chainId) - ); - - require( - _header.globalVariables.version == VERSION, - Errors.Rollup__InvalidVersion(VERSION, _header.globalVariables.version) - ); - - require( - _header.globalVariables.blockNumber == _pendingBlockNumber + 1, - Errors.Rollup__InvalidBlockNumber( - _pendingBlockNumber + 1, _header.globalVariables.blockNumber - ) - ); - - bytes32 tipArchive = blocks[_pendingBlockNumber].archive; - require( - tipArchive == _header.lastArchive.root, - Errors.Rollup__InvalidArchive(tipArchive, _header.lastArchive.root) - ); - - Slot slot = Slot.wrap(_header.globalVariables.slotNumber); - Slot lastSlot = blocks[_pendingBlockNumber].slotNumber; - require(slot > lastSlot, Errors.Rollup__SlotAlreadyInChain(lastSlot, slot)); - - Timestamp timestamp = getTimestampForSlot(slot); - require( - Timestamp.wrap(_header.globalVariables.timestamp) == timestamp, - Errors.Rollup__InvalidTimestamp(timestamp, Timestamp.wrap(_header.globalVariables.timestamp)) - ); - - // @note If you are hitting this error, it is likely because the chain you use have a blocktime that differs - // from the value that we have in the constants. - // When you are encountering this, it will likely be as the sequencer expects to be able to include - // an Aztec block in the "next" ethereum block based on a timestamp that is 12 seconds in the future - // from the last block. However, if the actual will only be 1 second in the future, you will end up - // expecting this value to be in the future. - require(timestamp <= _currentTime, Errors.Rollup__TimestampInFuture(_currentTime, timestamp)); - - // Check if the data is available - require( - _flags.ignoreDA || _header.contentCommitment.txsEffectsHash == _txsEffectsHash, - Errors.Rollup__UnavailableTxs(_header.contentCommitment.txsEffectsHash) - ); - - // If not canonical rollup, require that the fees are zero - if (address(this) != FEE_JUICE_PORTAL.canonicalRollup()) { - require(_header.globalVariables.gasFees.feePerDaGas == 0, Errors.Rollup__NonZeroDaFee()); - require(_header.globalVariables.gasFees.feePerL2Gas == 0, Errors.Rollup__NonZeroL2Fee()); - } else { - require(_header.globalVariables.gasFees.feePerDaGas == 0, Errors.Rollup__NonZeroDaFee()); - require( - _header.globalVariables.gasFees.feePerL2Gas == _manaBaseFee, - Errors.Rollup__InvalidManaBaseFee(_manaBaseFee, _header.globalVariables.gasFees.feePerL2Gas) - ); - } - } - function _fakeBlockNumberAsProven(uint256 _blockNumber) private { - if (_blockNumber > tips.provenBlockNumber && _blockNumber <= tips.pendingBlockNumber) { - tips.provenBlockNumber = _blockNumber; + if ( + _blockNumber > rollupStore.tips.provenBlockNumber + && _blockNumber <= rollupStore.tips.pendingBlockNumber + ) { + rollupStore.tips.provenBlockNumber = _blockNumber; // If this results on a new epoch, create a fake claim for it // Otherwise nextEpochToProve will report an old epoch Epoch epoch = getEpochForBlock(_blockNumber); - if (Epoch.unwrap(epoch) == 0 || Epoch.unwrap(epoch) > Epoch.unwrap(proofClaim.epochToProve)) { - proofClaim = DataStructures.EpochProofClaim({ + if ( + Epoch.unwrap(epoch) == 0 + || Epoch.unwrap(epoch) > Epoch.unwrap(rollupStore.proofClaim.epochToProve) + ) { + rollupStore.proofClaim = DataStructures.EpochProofClaim({ epochToProve: epoch, basisPointFee: 0, bondAmount: 0, diff --git a/l1-contracts/src/core/interfaces/ILeonidas.sol b/l1-contracts/src/core/interfaces/ILeonidas.sol index ece101d7277..256abed990e 100644 --- a/l1-contracts/src/core/interfaces/ILeonidas.sol +++ b/l1-contracts/src/core/interfaces/ILeonidas.sol @@ -3,6 +3,27 @@ pragma solidity >=0.8.27; import {Timestamp, Slot, Epoch} from "@aztec/core/libraries/TimeMath.sol"; +import {EnumerableSet} from "@oz/utils/structs/EnumerableSet.sol"; + +/** + * @notice The data structure for an epoch + * @param committee - The validator set for the epoch + * @param sampleSeed - The seed used to sample the validator set of the epoch + * @param nextSeed - The seed used to influence the NEXT epoch + */ +struct EpochData { + address[] committee; + uint256 sampleSeed; + uint256 nextSeed; +} + +struct LeonidasStorage { + EnumerableSet.AddressSet validatorSet; + // A mapping to snapshots of the validator set + mapping(Epoch => EpochData) epochs; + // The last stored randao value, same value as `seed` in the last inserted epoch + uint256 lastSeed; +} interface ILeonidas { // Changing depending on sybil mechanism and slashing enforcement diff --git a/l1-contracts/src/core/interfaces/IRollup.sol b/l1-contracts/src/core/interfaces/IRollup.sol index f52266dfe8a..387fe706175 100644 --- a/l1-contracts/src/core/interfaces/IRollup.sol +++ b/l1-contracts/src/core/interfaces/IRollup.sol @@ -2,13 +2,19 @@ // Copyright 2024 Aztec Labs. pragma solidity >=0.8.27; +import {IVerifier} from "@aztec/core/interfaces/IVerifier.sol"; import {IInbox} from "@aztec/core/interfaces/messagebridge/IInbox.sol"; import {IOutbox} from "@aztec/core/interfaces/messagebridge/IOutbox.sol"; -import {SignatureLib} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import {Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; import {DataStructures} from "@aztec/core/libraries/DataStructures.sol"; -import {EpochProofQuoteLib} from "@aztec/core/libraries/EpochProofQuoteLib.sol"; -import {ManaBaseFeeComponents} from "@aztec/core/libraries/FeeMath.sol"; -import {ProposeArgs} from "@aztec/core/libraries/ProposeLib.sol"; +import { + EpochProofQuote, + SignedEpochProofQuote +} from "@aztec/core/libraries/RollupLibs/EpochProofQuoteLib.sol"; +import { + FeeHeader, L1FeeData, ManaBaseFeeComponents +} from "@aztec/core/libraries/RollupLibs/FeeMath.sol"; +import {ProposeArgs} from "@aztec/core/libraries/RollupLibs/ProposeLib.sol"; import {Timestamp, Slot, Epoch} from "@aztec/core/libraries/TimeMath.sol"; struct SubmitEpochRootProofArgs { @@ -19,14 +25,6 @@ struct SubmitEpochRootProofArgs { bytes proof; } -struct FeeHeader { - uint256 excessMana; - uint256 feeAssetPriceNumerator; - uint256 manaUsed; - uint256 provingCostPerManaNumerator; - uint256 congestionCost; -} - struct BlockLog { FeeHeader feeHeader; bytes32 archive; @@ -34,9 +32,25 @@ struct BlockLog { Slot slotNumber; } -struct L1FeeData { - uint256 baseFee; - uint256 blobFee; +struct ChainTips { + uint256 pendingBlockNumber; + uint256 provenBlockNumber; +} + +struct L1GasOracleValues { + L1FeeData pre; + L1FeeData post; + Slot slotOfChange; +} + +struct RollupStore { + mapping(uint256 blockNumber => BlockLog log) blocks; + ChainTips tips; + bytes32 vkTreeRoot; + bytes32 protocolContractTreeRoot; + L1GasOracleValues l1GasOracleValues; + DataStructures.EpochProofClaim proofClaim; + IVerifier epochProofVerifier; } interface ITestRollup { @@ -65,19 +79,16 @@ interface IRollup { function prune() external; function updateL1GasFeeOracle() external; - function claimEpochProofRight(EpochProofQuoteLib.SignedEpochProofQuote calldata _quote) external; + function claimEpochProofRight(SignedEpochProofQuote calldata _quote) external; - function propose( - ProposeArgs calldata _args, - SignatureLib.Signature[] memory _signatures, - bytes calldata _body - ) external; + function propose(ProposeArgs calldata _args, Signature[] memory _signatures, bytes calldata _body) + external; function proposeAndClaim( ProposeArgs calldata _args, - SignatureLib.Signature[] memory _signatures, + Signature[] memory _signatures, bytes calldata _body, - EpochProofQuoteLib.SignedEpochProofQuote calldata _quote + SignedEpochProofQuote calldata _quote ) external; function submitEpochRootProof(SubmitEpochRootProofArgs calldata _args) external; @@ -86,7 +97,7 @@ interface IRollup { function validateHeader( bytes calldata _header, - SignatureLib.Signature[] memory _signatures, + Signature[] memory _signatures, bytes32 _digest, Timestamp _currentTime, bytes32 _txsEffecstHash, @@ -102,6 +113,9 @@ interface IRollup { // solhint-disable-next-line func-name-mixedcase function L1_BLOCK_AT_GENESIS() external view returns (uint256); + function getProofClaim() external view returns (DataStructures.EpochProofClaim memory); + function getTips() external view returns (ChainTips memory); + function status(uint256 _myHeaderBlockNumber) external view @@ -114,10 +128,7 @@ interface IRollup { Epoch provenEpochNumber ); - function quoteToDigest(EpochProofQuoteLib.EpochProofQuote memory _quote) - external - view - returns (bytes32); + function quoteToDigest(EpochProofQuote memory _quote) external view returns (bytes32); function getBlock(uint256 _blockNumber) external view returns (BlockLog memory); function getFeeAssetPrice() external view returns (uint256); function getManaBaseFeeAt(Timestamp _timestamp, bool _inFeeAsset) external view returns (uint256); @@ -131,10 +142,9 @@ interface IRollup { function getPendingBlockNumber() external view returns (uint256); function getEpochToProve() external view returns (Epoch); function getClaimableEpoch() external view returns (Epoch); - function validateEpochProofRightClaimAtTime( - Timestamp _ts, - EpochProofQuoteLib.SignedEpochProofQuote calldata _quote - ) external view; + function validateEpochProofRightClaimAtTime(Timestamp _ts, SignedEpochProofQuote calldata _quote) + external + view; function getEpochForBlock(uint256 _blockNumber) external view returns (Epoch); function getEpochProofPublicInputs( uint256 _epochSize, diff --git a/l1-contracts/src/core/libraries/EpochProofQuoteLib.sol b/l1-contracts/src/core/libraries/EpochProofQuoteLib.sol deleted file mode 100644 index be838b7a7ad..00000000000 --- a/l1-contracts/src/core/libraries/EpochProofQuoteLib.sol +++ /dev/null @@ -1,51 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 -// Copyright 2024 Aztec Labs. -pragma solidity >=0.8.27; - -import {SignatureLib} from "@aztec/core/libraries/crypto/SignatureLib.sol"; -import {Slot, Epoch} from "@aztec/core/libraries/TimeMath.sol"; - -library EpochProofQuoteLib { - /** - * @notice Struct encompassing an epoch proof quote - * @param epochToProve - The epoch number to prove - * @param validUntilSlot - The deadline of the quote, denoted in L2 slots - * @param bondAmount - The size of the bond - * @param prover - The address of the prover - * @param basisPointFee - The fee measured in basis points - */ - struct EpochProofQuote { - Epoch epochToProve; - Slot validUntilSlot; - uint256 bondAmount; - address prover; - uint32 basisPointFee; - } - - /** - * @notice A signed quote for the epoch proof - * @param quote - The Epoch Proof Quote - * @param signature - A signature on the quote - */ - struct SignedEpochProofQuote { - EpochProofQuote quote; - SignatureLib.Signature signature; - } - - bytes32 public constant EPOCH_PROOF_QUOTE_TYPEHASH = keccak256( - "EpochProofQuote(uint256 epochToProve,uint256 validUntilSlot,uint256 bondAmount,address prover,uint32 basisPointFee)" - ); - - function hash(EpochProofQuote memory _quote) internal pure returns (bytes32) { - return keccak256( - abi.encode( - EPOCH_PROOF_QUOTE_TYPEHASH, - _quote.epochToProve, - _quote.validUntilSlot, - _quote.bondAmount, - _quote.prover, - _quote.basisPointFee - ) - ); - } -} diff --git a/l1-contracts/src/core/libraries/LeonidasLib/LeonidasLib.sol b/l1-contracts/src/core/libraries/LeonidasLib/LeonidasLib.sol new file mode 100644 index 00000000000..28bc684fd84 --- /dev/null +++ b/l1-contracts/src/core/libraries/LeonidasLib/LeonidasLib.sol @@ -0,0 +1,255 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity >=0.8.27; + +import {EpochData, LeonidasStorage} from "@aztec/core/interfaces/ILeonidas.sol"; +import {SampleLib} from "@aztec/core/libraries/crypto/SampleLib.sol"; +import {SignatureLib, Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import {DataStructures} from "@aztec/core/libraries/DataStructures.sol"; +import {Errors} from "@aztec/core/libraries/Errors.sol"; +import {Slot, Epoch} from "@aztec/core/libraries/TimeMath.sol"; +import {MessageHashUtils} from "@oz/utils/cryptography/MessageHashUtils.sol"; +import {EnumerableSet} from "@oz/utils/structs/EnumerableSet.sol"; + +library LeonidasLib { + using EnumerableSet for EnumerableSet.AddressSet; + using MessageHashUtils for bytes32; + using SignatureLib for Signature; + + /** + * @notice Samples a validator set for a specific epoch + * + * @dev Only used internally, should never be called for anything but the "next" epoch + * Allowing us to always use `lastSeed`. + * + * @return The validators for the given epoch + */ + function sampleValidators( + LeonidasStorage storage _store, + uint256 _seed, + uint256 _targetCommitteeSize + ) external view returns (address[] memory) { + return _sampleValidators(_store, _seed, _targetCommitteeSize); + } + + function getProposerAt( + LeonidasStorage storage _store, + Slot _slot, + Epoch _epochNumber, + uint256 _targetCommitteeSize + ) external view returns (address) { + return _getProposerAt(_store, _slot, _epochNumber, _targetCommitteeSize); + } + + function getCommitteeAt( + LeonidasStorage storage _store, + Epoch _epochNumber, + uint256 _targetCommitteeSize + ) external view returns (address[] memory) { + return _getCommitteeAt(_store, _epochNumber, _targetCommitteeSize); + } + + /** + * @notice Propose a pending block from the point-of-view of sequencer selection. Will: + * - Setup the epoch if needed (if epoch committee is empty skips the rest) + * - Validate that the proposer is the proposer of the slot + * - Validate that the signatures for attestations are indeed from the validatorset + * - Validate that the number of valid attestations is sufficient + * + * @dev Cases where errors are thrown: + * - If the epoch is not setup + * - If the proposer is not the real proposer AND the proposer is not open + * - If the number of valid attestations is insufficient + * + * @param _slot - The slot of the block + * @param _signatures - The signatures of the committee members + * @param _digest - The digest of the block + */ + function validateLeonidas( + LeonidasStorage storage _store, + Slot _slot, + Epoch _epochNumber, + Signature[] memory _signatures, + bytes32 _digest, + DataStructures.ExecutionFlags memory _flags, + uint256 _targetCommitteeSize + ) external view { + address proposer = _getProposerAt(_store, _slot, _epochNumber, _targetCommitteeSize); + + // @todo Consider getting rid of this option. + // If the proposer is open, we allow anyone to propose without needing any signatures + if (proposer == address(0)) { + return; + } + + // @todo We should allow to provide a signature instead of needing the proposer to broadcast. + require(proposer == msg.sender, Errors.Leonidas__InvalidProposer(proposer, msg.sender)); + + // @note This is NOT the efficient way to do it, but it is a very convenient way for us to do it + // that allows us to reduce the number of code paths. Also when changed with optimistic for + // pleistarchus, this will be changed, so we can live with it. + + if (_flags.ignoreSignatures) { + return; + } + + address[] memory committee = _getCommitteeAt(_store, _epochNumber, _targetCommitteeSize); + + uint256 needed = committee.length * 2 / 3 + 1; + require( + _signatures.length >= needed, + Errors.Leonidas__InsufficientAttestationsProvided(needed, _signatures.length) + ); + + // Validate the attestations + uint256 validAttestations = 0; + + bytes32 digest = _digest.toEthSignedMessageHash(); + for (uint256 i = 0; i < _signatures.length; i++) { + // To avoid stack too deep errors + Signature memory signature = _signatures[i]; + if (signature.isEmpty) { + continue; + } + + // The verification will throw if invalid + signature.verify(committee[i], digest); + validAttestations++; + } + + require( + validAttestations >= needed, + Errors.Leonidas__InsufficientAttestations(needed, validAttestations) + ); + } + + /** + * @notice Get the sample seed for an epoch + * + * @dev This should behave as walking past the line, but it does not currently do that. + * If there are entire skips, e.g., 1, 2, 5 and we then go back and try executing + * for 4 we will get an invalid value because we will read lastSeed which is from 5. + * + * @dev The `_epoch` will never be 0 nor in the future + * + * @dev The return value will be equal to keccak256(n, block.prevrandao) for n being the last epoch + * setup. + * + * @return The sample seed for the epoch + */ + function getSampleSeed(LeonidasStorage storage _store, Epoch _epoch) + internal + view + returns (uint256) + { + if (Epoch.unwrap(_epoch) == 0) { + return type(uint256).max; + } + uint256 sampleSeed = _store.epochs[_epoch].sampleSeed; + if (sampleSeed != 0) { + return sampleSeed; + } + + sampleSeed = _store.epochs[_epoch - Epoch.wrap(1)].nextSeed; + if (sampleSeed != 0) { + return sampleSeed; + } + + return _store.lastSeed; + } + + /** + * @notice Samples a validator set for a specific epoch + * + * @dev Only used internally, should never be called for anything but the "next" epoch + * Allowing us to always use `lastSeed`. + * + * @return The validators for the given epoch + */ + function _sampleValidators( + LeonidasStorage storage _store, + uint256 _seed, + uint256 _targetCommitteeSize + ) private view returns (address[] memory) { + uint256 validatorSetSize = _store.validatorSet.length(); + if (validatorSetSize == 0) { + return new address[](0); + } + + // If we have less validators than the target committee size, we just return the full set + if (validatorSetSize <= _targetCommitteeSize) { + return _store.validatorSet.values(); + } + + uint256[] memory indicies = + SampleLib.computeCommitteeClever(_targetCommitteeSize, validatorSetSize, _seed); + + address[] memory committee = new address[](_targetCommitteeSize); + for (uint256 i = 0; i < _targetCommitteeSize; i++) { + committee[i] = _store.validatorSet.at(indicies[i]); + } + return committee; + } + + function _getProposerAt( + LeonidasStorage storage _store, + Slot _slot, + Epoch _epochNumber, + uint256 _targetCommitteeSize + ) private view returns (address) { + // @note this is deliberately "bad" for the simple reason of code reduction. + // it does not need to actually return the full committee and then draw from it + // it can just return the proposer directly, but then we duplicate the code + // which we just don't have room for right now... + address[] memory committee = _getCommitteeAt(_store, _epochNumber, _targetCommitteeSize); + if (committee.length == 0) { + return address(0); + } + return committee[computeProposerIndex( + _epochNumber, _slot, getSampleSeed(_store, _epochNumber), committee.length + )]; + } + + function _getCommitteeAt( + LeonidasStorage storage _store, + Epoch _epochNumber, + uint256 _targetCommitteeSize + ) private view returns (address[] memory) { + EpochData storage epoch = _store.epochs[_epochNumber]; + + if (epoch.sampleSeed != 0) { + uint256 committeeSize = epoch.committee.length; + if (committeeSize == 0) { + return new address[](0); + } + return epoch.committee; + } + + // Allow anyone if there is no validator set + if (_store.validatorSet.length() == 0) { + return new address[](0); + } + + // Emulate a sampling of the validators + uint256 sampleSeed = getSampleSeed(_store, _epochNumber); + return _sampleValidators(_store, sampleSeed, _targetCommitteeSize); + } + + /** + * @notice Computes the index of the committee member that acts as proposer for a given slot + * + * @param _epoch - The epoch to compute the proposer index for + * @param _slot - The slot to compute the proposer index for + * @param _seed - The seed to use for the computation + * @param _size - The size of the committee + * + * @return The index of the proposer + */ + function computeProposerIndex(Epoch _epoch, Slot _slot, uint256 _seed, uint256 _size) + private + pure + returns (uint256) + { + return uint256(keccak256(abi.encode(_epoch, _slot, _seed))) % _size; + } +} diff --git a/l1-contracts/src/core/libraries/RollupLibs/EpochProofLib.sol b/l1-contracts/src/core/libraries/RollupLibs/EpochProofLib.sol new file mode 100644 index 00000000000..6920c00cb4e --- /dev/null +++ b/l1-contracts/src/core/libraries/RollupLibs/EpochProofLib.sol @@ -0,0 +1,276 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity >=0.8.27; + +import {IFeeJuicePortal} from "@aztec/core/interfaces/IFeeJuicePortal.sol"; +import {IProofCommitmentEscrow} from "@aztec/core/interfaces/IProofCommitmentEscrow.sol"; +import { + RollupStore, SubmitEpochRootProofArgs, FeeHeader +} from "@aztec/core/interfaces/IRollup.sol"; +import {Constants} from "@aztec/core/libraries/ConstantsGen.sol"; +import {Errors} from "@aztec/core/libraries/Errors.sol"; +import {Epoch} from "@aztec/core/libraries/TimeMath.sol"; +import {IRewardDistributor} from "@aztec/governance/interfaces/IRewardDistributor.sol"; +import {IERC20} from "@oz/token/ERC20/IERC20.sol"; +import {SafeERC20} from "@oz/token/ERC20/utils/SafeERC20.sol"; +import {Math} from "@oz/utils/math/Math.sol"; + +struct SubmitEpochRootProofAddresses { + IProofCommitmentEscrow proofCommitmentEscrow; + IFeeJuicePortal feeJuicePortal; + IRewardDistributor rewardDistributor; + IERC20 asset; + address cuauhxicalli; +} + +struct SubmitEpochRootProofInterimValues { + uint256 previousBlockNumber; + uint256 endBlockNumber; + Epoch epochToProve; + Epoch startEpoch; + bool isFeeCanonical; + bool isRewardDistributorCanonical; + uint256 totalProverReward; + uint256 totalBurn; +} + +library EpochProofLib { + using SafeERC20 for IERC20; + + function submitEpochRootProof( + RollupStore storage _rollupStore, + SubmitEpochRootProofArgs calldata _args, + SubmitEpochRootProofInterimValues memory _interimValues, + SubmitEpochRootProofAddresses memory _addresses + ) internal returns (uint256) { + // Ensure that the proof is not across epochs + require( + _interimValues.startEpoch == _interimValues.epochToProve, + Errors.Rollup__InvalidEpoch(_interimValues.startEpoch, _interimValues.epochToProve) + ); + + bytes32[] memory publicInputs = getEpochProofPublicInputs( + _rollupStore, _args.epochSize, _args.args, _args.fees, _args.aggregationObject + ); + + require( + _rollupStore.epochProofVerifier.verify(_args.proof, publicInputs), + Errors.Rollup__InvalidProof() + ); + + if (_rollupStore.proofClaim.epochToProve == _interimValues.epochToProve) { + _addresses.proofCommitmentEscrow.unstakeBond( + _rollupStore.proofClaim.bondProvider, _rollupStore.proofClaim.bondAmount + ); + } + + _rollupStore.tips.provenBlockNumber = _interimValues.endBlockNumber; + + // @note Only if the rollup is the canonical will it be able to meaningfully claim fees + // Otherwise, the fees are unbacked #7938. + _interimValues.isFeeCanonical = address(this) == _addresses.feeJuicePortal.canonicalRollup(); + _interimValues.isRewardDistributorCanonical = + address(this) == _addresses.rewardDistributor.canonicalRollup(); + + _interimValues.totalProverReward = 0; + _interimValues.totalBurn = 0; + + if (_interimValues.isFeeCanonical || _interimValues.isRewardDistributorCanonical) { + for (uint256 i = 0; i < _args.epochSize; i++) { + address coinbase = address(uint160(uint256(publicInputs[9 + i * 2]))); + uint256 reward = 0; + uint256 toProver = 0; + uint256 burn = 0; + + if (_interimValues.isFeeCanonical) { + uint256 fees = uint256(publicInputs[10 + i * 2]); + if (fees > 0) { + // This is insanely expensive, and will be fixed as part of the general storage cost reduction. + // See #9826. + FeeHeader storage feeHeader = + _rollupStore.blocks[_interimValues.previousBlockNumber + 1 + i].feeHeader; + burn += feeHeader.congestionCost * feeHeader.manaUsed; + + reward += (fees - burn); + _addresses.feeJuicePortal.distributeFees(address(this), fees); + } + } + + if (_interimValues.isRewardDistributorCanonical) { + reward += _addresses.rewardDistributor.claim(address(this)); + } + + if (coinbase == address(0)) { + toProver = reward; + } else { + // @note We are getting value from the `proofClaim`, which are not cleared. + // So if someone is posting the proof before a new claim is made, + // the reward will calculated based on the previous values. + toProver = Math.mulDiv(reward, _rollupStore.proofClaim.basisPointFee, 10_000); + } + + uint256 toCoinbase = reward - toProver; + if (toCoinbase > 0) { + _addresses.asset.safeTransfer(coinbase, toCoinbase); + } + + _interimValues.totalProverReward += toProver; + _interimValues.totalBurn += burn; + } + + if (_interimValues.totalProverReward > 0) { + // If there is a bond-provider give him the reward, otherwise give it to the submitter. + address proofRewardRecipient = _rollupStore.proofClaim.bondProvider == address(0) + ? msg.sender + : _rollupStore.proofClaim.bondProvider; + _addresses.asset.safeTransfer(proofRewardRecipient, _interimValues.totalProverReward); + } + + if (_interimValues.totalBurn > 0) { + _addresses.asset.safeTransfer(_addresses.cuauhxicalli, _interimValues.totalBurn); + } + } + + return _interimValues.endBlockNumber; + } + + /** + * @notice Returns the computed public inputs for the given epoch proof. + * + * @dev Useful for debugging and testing. Allows submitter to compare their + * own public inputs used for generating the proof vs the ones assembled + * by this contract when verifying it. + * + * @param _epochSize - The size of the epoch (to be promoted to a constant) + * @param _args - Array of public inputs to the proof (previousArchive, endArchive, previousBlockHash, endBlockHash, endTimestamp, outHash, proverId) + * @param _fees - Array of recipient-value pairs with fees to be distributed for the epoch + * @param _aggregationObject - The aggregation object for the proof + */ + function getEpochProofPublicInputs( + RollupStore storage _rollupStore, + uint256 _epochSize, + bytes32[7] calldata _args, + bytes32[] calldata _fees, + bytes calldata _aggregationObject + ) internal view returns (bytes32[] memory) { + uint256 previousBlockNumber = _rollupStore.tips.provenBlockNumber; + uint256 endBlockNumber = previousBlockNumber + _epochSize; + + // Args are defined as an array because Solidity complains with "stack too deep" otherwise + // 0 bytes32 _previousArchive, + // 1 bytes32 _endArchive, + // 2 bytes32 _previousBlockHash, + // 3 bytes32 _endBlockHash, + // 4 bytes32 _endTimestamp, + // 5 bytes32 _outHash, + // 6 bytes32 _proverId, + + // TODO(#7373): Public inputs are not fully verified + + { + // We do it this way to provide better error messages than passing along the storage values + bytes32 expectedPreviousArchive = _rollupStore.blocks[previousBlockNumber].archive; + require( + expectedPreviousArchive == _args[0], + Errors.Rollup__InvalidPreviousArchive(expectedPreviousArchive, _args[0]) + ); + + bytes32 expectedEndArchive = _rollupStore.blocks[endBlockNumber].archive; + require( + expectedEndArchive == _args[1], Errors.Rollup__InvalidArchive(expectedEndArchive, _args[1]) + ); + + bytes32 expectedPreviousBlockHash = _rollupStore.blocks[previousBlockNumber].blockHash; + // TODO: Remove 0 check once we inject the proper genesis block hash + require( + expectedPreviousBlockHash == 0 || expectedPreviousBlockHash == _args[2], + Errors.Rollup__InvalidPreviousBlockHash(expectedPreviousBlockHash, _args[2]) + ); + + bytes32 expectedEndBlockHash = _rollupStore.blocks[endBlockNumber].blockHash; + require( + expectedEndBlockHash == _args[3], + Errors.Rollup__InvalidBlockHash(expectedEndBlockHash, _args[3]) + ); + } + + bytes32[] memory publicInputs = new bytes32[]( + Constants.ROOT_ROLLUP_PUBLIC_INPUTS_LENGTH + Constants.AGGREGATION_OBJECT_LENGTH + ); + + // Structure of the root rollup public inputs we need to reassemble: + // + // struct RootRollupPublicInputs { + // previous_archive: AppendOnlyTreeSnapshot, + // end_archive: AppendOnlyTreeSnapshot, + // previous_block_hash: Field, + // end_block_hash: Field, + // end_timestamp: u64, + // end_block_number: Field, + // out_hash: Field, + // fees: [FeeRecipient; Constants.AZTEC_EPOCH_DURATION], + // vk_tree_root: Field, + // protocol_contract_tree_root: Field, + // prover_id: Field + // } + + // previous_archive.root: the previous archive tree root + publicInputs[0] = _args[0]; + + // previous_archive.next_available_leaf_index: the previous archive next available index + // normally this should be equal to the block number (since leaves are 0-indexed and blocks 1-indexed) + // but in yarn-project/merkle-tree/src/new_tree.ts we prefill the tree so that block N is in leaf N + publicInputs[1] = bytes32(previousBlockNumber + 1); + + // end_archive.root: the new archive tree root + publicInputs[2] = _args[1]; + + // end_archive.next_available_leaf_index: the new archive next available index + publicInputs[3] = bytes32(endBlockNumber + 1); + + // previous_block_hash: the block hash just preceding this epoch + publicInputs[4] = _args[2]; + + // end_block_hash: the last block hash in the epoch + publicInputs[5] = _args[3]; + + // end_timestamp: the timestamp of the last block in the epoch + publicInputs[6] = _args[4]; + + // end_block_number: last block number in the epoch + publicInputs[7] = bytes32(endBlockNumber); + + // out_hash: root of this epoch's l2 to l1 message tree + publicInputs[8] = _args[5]; + + uint256 feesLength = Constants.AZTEC_MAX_EPOCH_DURATION * 2; + // fees[9 to (9+feesLength-1)]: array of recipient-value pairs + for (uint256 i = 0; i < feesLength; i++) { + publicInputs[9 + i] = _fees[i]; + } + uint256 feesEnd = 9 + feesLength; + + // vk_tree_root + publicInputs[feesEnd] = _rollupStore.vkTreeRoot; + + // protocol_contract_tree_root + publicInputs[feesEnd + 1] = _rollupStore.protocolContractTreeRoot; + + // prover_id: id of current epoch's prover + publicInputs[feesEnd + 2] = _args[6]; + + // the block proof is recursive, which means it comes with an aggregation object + // this snippet copies it into the public inputs needed for verification + // it also guards against empty _aggregationObject used with mocked proofs + uint256 aggregationLength = _aggregationObject.length / 32; + for (uint256 i = 0; i < Constants.AGGREGATION_OBJECT_LENGTH && i < aggregationLength; i++) { + bytes32 part; + assembly { + part := calldataload(add(_aggregationObject.offset, mul(i, 32))) + } + publicInputs[i + feesEnd + 3] = part; + } + + return publicInputs; + } +} diff --git a/l1-contracts/src/core/libraries/RollupLibs/EpochProofQuoteLib.sol b/l1-contracts/src/core/libraries/RollupLibs/EpochProofQuoteLib.sol new file mode 100644 index 00000000000..bfa865d6d4e --- /dev/null +++ b/l1-contracts/src/core/libraries/RollupLibs/EpochProofQuoteLib.sol @@ -0,0 +1,51 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity >=0.8.27; + +import {Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import {Slot, Epoch} from "@aztec/core/libraries/TimeMath.sol"; + +/** + * @notice Struct encompassing an epoch proof quote + * @param epochToProve - The epoch number to prove + * @param validUntilSlot - The deadline of the quote, denoted in L2 slots + * @param bondAmount - The size of the bond + * @param prover - The address of the prover + * @param basisPointFee - The fee measured in basis points + */ +struct EpochProofQuote { + Epoch epochToProve; + Slot validUntilSlot; + uint256 bondAmount; + address prover; + uint32 basisPointFee; +} + +/** + * @notice A signed quote for the epoch proof + * @param quote - The Epoch Proof Quote + * @param signature - A signature on the quote + */ +struct SignedEpochProofQuote { + EpochProofQuote quote; + Signature signature; +} + +library EpochProofQuoteLib { + bytes32 public constant EPOCH_PROOF_QUOTE_TYPEHASH = keccak256( + "EpochProofQuote(uint256 epochToProve,uint256 validUntilSlot,uint256 bondAmount,address prover,uint32 basisPointFee)" + ); + + function hash(EpochProofQuote memory _quote) internal pure returns (bytes32) { + return keccak256( + abi.encode( + EPOCH_PROOF_QUOTE_TYPEHASH, + _quote.epochToProve, + _quote.validUntilSlot, + _quote.bondAmount, + _quote.prover, + _quote.basisPointFee + ) + ); + } +} diff --git a/l1-contracts/src/core/libraries/RollupLibs/ExtRollupLib.sol b/l1-contracts/src/core/libraries/RollupLibs/ExtRollupLib.sol new file mode 100644 index 00000000000..4b51f8efef0 --- /dev/null +++ b/l1-contracts/src/core/libraries/RollupLibs/ExtRollupLib.sol @@ -0,0 +1,113 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity >=0.8.27; + +import {IFeeJuicePortal} from "@aztec/core/interfaces/IFeeJuicePortal.sol"; +import {IProofCommitmentEscrow} from "@aztec/core/interfaces/IProofCommitmentEscrow.sol"; +import {IProofCommitmentEscrow} from "@aztec/core/interfaces/IProofCommitmentEscrow.sol"; +import {BlockLog, RollupStore, SubmitEpochRootProofArgs} from "@aztec/core/interfaces/IRollup.sol"; +import {IRewardDistributor} from "@aztec/governance/interfaces/IRewardDistributor.sol"; +import {IERC20} from "@oz/token/ERC20/IERC20.sol"; +import {DataStructures} from "./../DataStructures.sol"; +import {Slot, Epoch} from "./../TimeMath.sol"; +import { + EpochProofLib, + SubmitEpochRootProofAddresses, + SubmitEpochRootProofInterimValues +} from "./EpochProofLib.sol"; +import {SignedEpochProofQuote} from "./EpochProofQuoteLib.sol"; +import {FeeMath, ManaBaseFeeComponents, FeeHeader, L1FeeData} from "./FeeMath.sol"; +import {HeaderLib, Header} from "./HeaderLib.sol"; +import {TxsDecoder} from "./TxsDecoder.sol"; +import {ValidationLib, ValidateHeaderArgs} from "./ValidationLib.sol"; +// We are using this library such that we can more easily "link" just a larger external library +// instead of a few smaller ones. + +library ExtRollupLib { + function submitEpochRootProof( + RollupStore storage _rollupStore, + SubmitEpochRootProofArgs calldata _args, + SubmitEpochRootProofInterimValues memory _interimValues, + IProofCommitmentEscrow _proofCommitmentEscrow, + IFeeJuicePortal _feeJuicePortal, + IRewardDistributor _rewardDistributor, + IERC20 _asset, + address _cuauhxicalli + ) external returns (uint256) { + return EpochProofLib.submitEpochRootProof( + _rollupStore, + _args, + _interimValues, + SubmitEpochRootProofAddresses({ + proofCommitmentEscrow: _proofCommitmentEscrow, + feeJuicePortal: _feeJuicePortal, + rewardDistributor: _rewardDistributor, + asset: _asset, + cuauhxicalli: _cuauhxicalli + }) + ); + } + + function validateHeaderForSubmissionBase( + ValidateHeaderArgs memory _args, + mapping(uint256 blockNumber => BlockLog log) storage _blocks + ) external view { + ValidationLib.validateHeaderForSubmissionBase(_args, _blocks); + } + + function validateEpochProofRightClaimAtTime( + Slot _currentSlot, + address _currentProposer, + Epoch _epochToProve, + uint256 _posInEpoch, + SignedEpochProofQuote calldata _quote, + bytes32 _digest, + DataStructures.EpochProofClaim storage _proofClaim, + uint256 _claimDurationInL2Slots, + uint256 _proofCommitmentMinBondAmountInTst, + IProofCommitmentEscrow _proofCommitmentEscrow + ) external view { + ValidationLib.validateEpochProofRightClaimAtTime( + _currentSlot, + _currentProposer, + _epochToProve, + _posInEpoch, + _quote, + _digest, + _proofClaim, + _claimDurationInL2Slots, + _proofCommitmentMinBondAmountInTst, + _proofCommitmentEscrow + ); + } + + function getManaBaseFeeComponentsAt( + FeeHeader storage _parentFeeHeader, + L1FeeData memory _fees, + uint256 _feeAssetPrice, + uint256 _epochDuration + ) external view returns (ManaBaseFeeComponents memory) { + return + FeeMath.getManaBaseFeeComponentsAt(_parentFeeHeader, _fees, _feeAssetPrice, _epochDuration); + } + + function getEpochProofPublicInputs( + RollupStore storage _rollupStore, + uint256 _epochSize, + bytes32[7] calldata _args, + bytes32[] calldata _fees, + bytes calldata _aggregationObject + ) external view returns (bytes32[] memory) { + return EpochProofLib.getEpochProofPublicInputs( + _rollupStore, _epochSize, _args, _fees, _aggregationObject + ); + } + + function decodeHeader(bytes calldata _header) external pure returns (Header memory) { + return HeaderLib.decode(_header); + } + + function computeTxsEffectsHash(bytes calldata _body) external pure returns (bytes32) { + return TxsDecoder.decode(_body); + } +} diff --git a/l1-contracts/src/core/libraries/FeeMath.sol b/l1-contracts/src/core/libraries/RollupLibs/FeeMath.sol similarity index 64% rename from l1-contracts/src/core/libraries/FeeMath.sol rename to l1-contracts/src/core/libraries/RollupLibs/FeeMath.sol index 215c2e4739a..ef3a27ffafb 100644 --- a/l1-contracts/src/core/libraries/FeeMath.sol +++ b/l1-contracts/src/core/libraries/RollupLibs/FeeMath.sol @@ -6,7 +6,26 @@ import {Math} from "@oz/utils/math/Math.sol"; import {SafeCast} from "@oz/utils/math/SafeCast.sol"; import {SignedMath} from "@oz/utils/math/SignedMath.sol"; -import {Errors} from "./Errors.sol"; +import {Errors} from "../Errors.sol"; + +// These values are taken from the model, but mostly pulled out of the ass +uint256 constant MINIMUM_PROVING_COST_PER_MANA = 5415357955; +uint256 constant MAX_PROVING_COST_MODIFIER = 1000000000; +uint256 constant PROVING_UPDATE_FRACTION = 100000000000; + +uint256 constant MINIMUM_FEE_ASSET_PRICE = 10000000000; +uint256 constant MAX_FEE_ASSET_PRICE_MODIFIER = 1000000000; +uint256 constant FEE_ASSET_PRICE_UPDATE_FRACTION = 100000000000; + +uint256 constant L1_GAS_PER_BLOCK_PROPOSED = 150000; +uint256 constant L1_GAS_PER_EPOCH_VERIFIED = 1000000; + +uint256 constant MINIMUM_CONGESTION_MULTIPLIER = 1000000000; +uint256 constant MANA_TARGET = 100000000; +uint256 constant CONGESTION_UPDATE_FRACTION = 854700854; + +uint256 constant BLOB_GAS_PER_BLOB = 2 ** 17; +uint256 constant GAS_PER_BLOB_POINT_EVALUATION = 50_000; struct OracleInput { int256 provingCostModifier; @@ -21,27 +40,58 @@ struct ManaBaseFeeComponents { uint256 provingCost; } +struct FeeHeader { + uint256 excessMana; + uint256 feeAssetPriceNumerator; + uint256 manaUsed; + uint256 provingCostPerManaNumerator; + uint256 congestionCost; +} + +struct L1FeeData { + uint256 baseFee; + uint256 blobFee; +} + library FeeMath { using Math for uint256; using SafeCast for int256; using SafeCast for uint256; using SignedMath for int256; - // These values are taken from the model, but mostly pulled out of the ass - uint256 internal constant MINIMUM_PROVING_COST_PER_MANA = 5415357955; - uint256 internal constant MAX_PROVING_COST_MODIFIER = 1000000000; - uint256 internal constant PROVING_UPDATE_FRACTION = 100000000000; - - uint256 internal constant MINIMUM_FEE_ASSET_PRICE = 10000000000; - uint256 internal constant MAX_FEE_ASSET_PRICE_MODIFIER = 1000000000; - uint256 internal constant FEE_ASSET_PRICE_UPDATE_FRACTION = 100000000000; - - uint256 internal constant L1_GAS_PER_BLOCK_PROPOSED = 150000; - uint256 internal constant L1_GAS_PER_EPOCH_VERIFIED = 1000000; + function getManaBaseFeeComponentsAt( + FeeHeader storage _parentFeeHeader, + L1FeeData memory _fees, + uint256 _feeAssetPrice, + uint256 _epochDuration + ) internal view returns (ManaBaseFeeComponents memory) { + uint256 excessMana = FeeMath.clampedAdd( + _parentFeeHeader.excessMana + _parentFeeHeader.manaUsed, -int256(MANA_TARGET) + ); - uint256 internal constant MINIMUM_CONGESTION_MULTIPLIER = 1000000000; - uint256 internal constant MANA_TARGET = 100000000; - uint256 internal constant CONGESTION_UPDATE_FRACTION = 854700854; + uint256 dataCost = + Math.mulDiv(3 * BLOB_GAS_PER_BLOB, _fees.blobFee, MANA_TARGET, Math.Rounding.Ceil); + uint256 gasUsed = L1_GAS_PER_BLOCK_PROPOSED + 3 * GAS_PER_BLOB_POINT_EVALUATION + + L1_GAS_PER_EPOCH_VERIFIED / _epochDuration; + uint256 gasCost = Math.mulDiv(gasUsed, _fees.baseFee, MANA_TARGET, Math.Rounding.Ceil); + uint256 provingCost = FeeMath.provingCostPerMana(_parentFeeHeader.provingCostPerManaNumerator); + + uint256 congestionMultiplier_ = congestionMultiplier(excessMana); + uint256 total = dataCost + gasCost + provingCost; + uint256 congestionCost = Math.mulDiv( + total, congestionMultiplier_, MINIMUM_CONGESTION_MULTIPLIER, Math.Rounding.Floor + ) - total; + + // @todo @lherskind. The following is a crime against humanity, but it makes it + // very neat to plot etc from python, #10004 will fix it across the board + return ManaBaseFeeComponents({ + dataCost: Math.mulDiv(dataCost, _feeAssetPrice, 1e9, Math.Rounding.Ceil), + gasCost: Math.mulDiv(gasCost, _feeAssetPrice, 1e9, Math.Rounding.Ceil), + provingCost: Math.mulDiv(provingCost, _feeAssetPrice, 1e9, Math.Rounding.Ceil), + congestionCost: Math.mulDiv(congestionCost, _feeAssetPrice, 1e9, Math.Rounding.Ceil), + congestionMultiplier: congestionMultiplier_ + }); + } function assertValid(OracleInput memory _self) internal pure returns (bool) { require( diff --git a/l1-contracts/src/core/libraries/HeaderLib.sol b/l1-contracts/src/core/libraries/RollupLibs/HeaderLib.sol similarity index 90% rename from l1-contracts/src/core/libraries/HeaderLib.sol rename to l1-contracts/src/core/libraries/RollupLibs/HeaderLib.sol index 4cade7f20e6..15d26b46e74 100644 --- a/l1-contracts/src/core/libraries/HeaderLib.sol +++ b/l1-contracts/src/core/libraries/RollupLibs/HeaderLib.sol @@ -5,6 +5,56 @@ pragma solidity >=0.8.27; import {Constants} from "@aztec/core/libraries/ConstantsGen.sol"; import {Errors} from "@aztec/core/libraries/Errors.sol"; +struct AppendOnlyTreeSnapshot { + bytes32 root; + uint32 nextAvailableLeafIndex; +} + +struct PartialStateReference { + AppendOnlyTreeSnapshot noteHashTree; + AppendOnlyTreeSnapshot nullifierTree; + AppendOnlyTreeSnapshot contractTree; + AppendOnlyTreeSnapshot publicDataTree; +} + +struct StateReference { + AppendOnlyTreeSnapshot l1ToL2MessageTree; + // Note: Can't use "partial" name here as in protocol specs because it is a reserved solidity keyword + PartialStateReference partialStateReference; +} + +struct GasFees { + uint256 feePerDaGas; + uint256 feePerL2Gas; +} + +struct GlobalVariables { + uint256 chainId; + uint256 version; + uint256 blockNumber; + uint256 slotNumber; + uint256 timestamp; + address coinbase; + bytes32 feeRecipient; + GasFees gasFees; +} + +struct ContentCommitment { + uint256 numTxs; + bytes32 txsEffectsHash; + bytes32 inHash; + bytes32 outHash; +} + +struct Header { + AppendOnlyTreeSnapshot lastArchive; + ContentCommitment contentCommitment; + StateReference stateReference; + GlobalVariables globalVariables; + uint256 totalFees; + uint256 totalManaUsed; +} + /** * @title Header Library * @author Aztec Labs @@ -56,56 +106,6 @@ import {Errors} from "@aztec/core/libraries/Errors.sol"; * | --- | --- | --- */ library HeaderLib { - struct AppendOnlyTreeSnapshot { - bytes32 root; - uint32 nextAvailableLeafIndex; - } - - struct PartialStateReference { - AppendOnlyTreeSnapshot noteHashTree; - AppendOnlyTreeSnapshot nullifierTree; - AppendOnlyTreeSnapshot contractTree; - AppendOnlyTreeSnapshot publicDataTree; - } - - struct StateReference { - AppendOnlyTreeSnapshot l1ToL2MessageTree; - // Note: Can't use "partial" name here as in protocol specs because it is a reserved solidity keyword - PartialStateReference partialStateReference; - } - - struct GasFees { - uint256 feePerDaGas; - uint256 feePerL2Gas; - } - - struct GlobalVariables { - uint256 chainId; - uint256 version; - uint256 blockNumber; - uint256 slotNumber; - uint256 timestamp; - address coinbase; - bytes32 feeRecipient; - GasFees gasFees; - } - - struct ContentCommitment { - uint256 numTxs; - bytes32 txsEffectsHash; - bytes32 inHash; - bytes32 outHash; - } - - struct Header { - AppendOnlyTreeSnapshot lastArchive; - ContentCommitment contentCommitment; - StateReference stateReference; - GlobalVariables globalVariables; - uint256 totalFees; - uint256 totalManaUsed; - } - uint256 private constant HEADER_LENGTH = 0x288; // Header byte length /** diff --git a/l1-contracts/src/core/libraries/RollupLibs/IntRollupLib.sol b/l1-contracts/src/core/libraries/RollupLibs/IntRollupLib.sol new file mode 100644 index 00000000000..f05eda331dd --- /dev/null +++ b/l1-contracts/src/core/libraries/RollupLibs/IntRollupLib.sol @@ -0,0 +1,31 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity >=0.8.27; + +import {EpochProofQuoteLib, EpochProofQuote} from "./EpochProofQuoteLib.sol"; + +import {FeeMath, ManaBaseFeeComponents, FeeHeader, MANA_TARGET} from "./FeeMath.sol"; + +// We are using this library such that we can more easily "link" just a larger external library +// instead of a few smaller ones. +library IntRollupLib { + function computeQuoteHash(EpochProofQuote memory _quote) internal pure returns (bytes32) { + return EpochProofQuoteLib.hash(_quote); + } + + function summedBaseFee(ManaBaseFeeComponents memory _components) internal pure returns (uint256) { + return FeeMath.summedBaseFee(_components); + } + + function clampedAdd(uint256 _a, int256 _b) internal pure returns (uint256) { + return FeeMath.clampedAdd(_a, _b); + } + + function feeAssetPriceModifier(uint256 _numerator) internal pure returns (uint256) { + return FeeMath.feeAssetPriceModifier(_numerator); + } + + function computeExcessMana(FeeHeader memory _feeHeader) internal pure returns (uint256) { + return clampedAdd(_feeHeader.excessMana + _feeHeader.manaUsed, -int256(MANA_TARGET)); + } +} diff --git a/l1-contracts/src/core/libraries/ProposeLib.sol b/l1-contracts/src/core/libraries/RollupLibs/ProposeLib.sol similarity index 88% rename from l1-contracts/src/core/libraries/ProposeLib.sol rename to l1-contracts/src/core/libraries/RollupLibs/ProposeLib.sol index ab5330661f7..59b4e10a7a6 100644 --- a/l1-contracts/src/core/libraries/ProposeLib.sol +++ b/l1-contracts/src/core/libraries/RollupLibs/ProposeLib.sol @@ -3,7 +3,7 @@ pragma solidity >=0.8.27; import {SignatureLib} from "@aztec/core/libraries/crypto/SignatureLib.sol"; -import {OracleInput} from "@aztec/core/libraries/FeeMath.sol"; +import {OracleInput} from "./FeeMath.sol"; struct ProposeArgs { bytes32 archive; diff --git a/l1-contracts/src/core/libraries/TxsDecoder.sol b/l1-contracts/src/core/libraries/RollupLibs/TxsDecoder.sol similarity index 99% rename from l1-contracts/src/core/libraries/TxsDecoder.sol rename to l1-contracts/src/core/libraries/RollupLibs/TxsDecoder.sol index 4a7da2a7720..b3a12b47c2c 100644 --- a/l1-contracts/src/core/libraries/TxsDecoder.sol +++ b/l1-contracts/src/core/libraries/RollupLibs/TxsDecoder.sol @@ -82,7 +82,7 @@ library TxsDecoder { * @param _body - The L2 block body calldata. * @return The txs effects hash. */ - function decode(bytes calldata _body) external pure returns (bytes32) { + function decode(bytes calldata _body) internal pure returns (bytes32) { ArrayOffsets memory offsets; Counts memory counts; ConsumablesVars memory vars; diff --git a/l1-contracts/src/core/libraries/RollupLibs/ValidationLib.sol b/l1-contracts/src/core/libraries/RollupLibs/ValidationLib.sol new file mode 100644 index 00000000000..5db4c00b5a1 --- /dev/null +++ b/l1-contracts/src/core/libraries/RollupLibs/ValidationLib.sol @@ -0,0 +1,157 @@ +// SPDX-License-Identifier: Apache-2.0 +// Copyright 2024 Aztec Labs. +pragma solidity >=0.8.27; + +import {IFeeJuicePortal} from "@aztec/core/interfaces/IFeeJuicePortal.sol"; +import {IProofCommitmentEscrow} from "@aztec/core/interfaces/IProofCommitmentEscrow.sol"; +import {BlockLog} from "@aztec/core/interfaces/IRollup.sol"; +import {SignatureLib} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import {DataStructures} from "./../DataStructures.sol"; +import {Errors} from "./../Errors.sol"; +import {Timestamp, Slot, Epoch} from "./../TimeMath.sol"; +import {SignedEpochProofQuote} from "./EpochProofQuoteLib.sol"; +import {Header} from "./HeaderLib.sol"; + +struct ValidateHeaderArgs { + Header header; + Timestamp currentTime; + uint256 manaBaseFee; + bytes32 txsEffectsHash; + uint256 pendingBlockNumber; + DataStructures.ExecutionFlags flags; + uint256 version; + IFeeJuicePortal feeJuicePortal; + function(Slot) external view returns (Timestamp) getTimestampForSlot; +} + +library ValidationLib { + function validateHeaderForSubmissionBase( + ValidateHeaderArgs memory _args, + mapping(uint256 blockNumber => BlockLog log) storage _blocks + ) internal view { + require( + block.chainid == _args.header.globalVariables.chainId, + Errors.Rollup__InvalidChainId(block.chainid, _args.header.globalVariables.chainId) + ); + + require( + _args.header.globalVariables.version == _args.version, + Errors.Rollup__InvalidVersion(_args.version, _args.header.globalVariables.version) + ); + + require( + _args.header.globalVariables.blockNumber == _args.pendingBlockNumber + 1, + Errors.Rollup__InvalidBlockNumber( + _args.pendingBlockNumber + 1, _args.header.globalVariables.blockNumber + ) + ); + + bytes32 tipArchive = _blocks[_args.pendingBlockNumber].archive; + require( + tipArchive == _args.header.lastArchive.root, + Errors.Rollup__InvalidArchive(tipArchive, _args.header.lastArchive.root) + ); + + Slot slot = Slot.wrap(_args.header.globalVariables.slotNumber); + Slot lastSlot = _blocks[_args.pendingBlockNumber].slotNumber; + require(slot > lastSlot, Errors.Rollup__SlotAlreadyInChain(lastSlot, slot)); + + Timestamp timestamp = _args.getTimestampForSlot(slot); + require( + Timestamp.wrap(_args.header.globalVariables.timestamp) == timestamp, + Errors.Rollup__InvalidTimestamp( + timestamp, Timestamp.wrap(_args.header.globalVariables.timestamp) + ) + ); + + // @note If you are hitting this error, it is likely because the chain you use have a blocktime that differs + // from the value that we have in the constants. + // When you are encountering this, it will likely be as the sequencer expects to be able to include + // an Aztec block in the "next" ethereum block based on a timestamp that is 12 seconds in the future + // from the last block. However, if the actual will only be 1 second in the future, you will end up + // expecting this value to be in the future. + require( + timestamp <= _args.currentTime, Errors.Rollup__TimestampInFuture(_args.currentTime, timestamp) + ); + + // Check if the data is available + require( + _args.flags.ignoreDA || _args.header.contentCommitment.txsEffectsHash == _args.txsEffectsHash, + Errors.Rollup__UnavailableTxs(_args.header.contentCommitment.txsEffectsHash) + ); + + // If not canonical rollup, require that the fees are zero + if (address(this) != _args.feeJuicePortal.canonicalRollup()) { + require(_args.header.globalVariables.gasFees.feePerDaGas == 0, Errors.Rollup__NonZeroDaFee()); + require(_args.header.globalVariables.gasFees.feePerL2Gas == 0, Errors.Rollup__NonZeroL2Fee()); + } else { + require(_args.header.globalVariables.gasFees.feePerDaGas == 0, Errors.Rollup__NonZeroDaFee()); + require( + _args.header.globalVariables.gasFees.feePerL2Gas == _args.manaBaseFee, + Errors.Rollup__InvalidManaBaseFee( + _args.manaBaseFee, _args.header.globalVariables.gasFees.feePerL2Gas + ) + ); + } + } + + function validateEpochProofRightClaimAtTime( + Slot _currentSlot, + address _currentProposer, + Epoch _epochToProve, + uint256 _posInEpoch, + SignedEpochProofQuote calldata _quote, + bytes32 _digest, + DataStructures.EpochProofClaim storage _proofClaim, + uint256 _claimDurationInL2Slots, + uint256 _proofCommitmentMinBondAmountInTst, + IProofCommitmentEscrow _proofCommitmentEscrow + ) internal view { + SignatureLib.verify(_quote.signature, _quote.quote.prover, _digest); + + require( + _quote.quote.validUntilSlot >= _currentSlot, + Errors.Rollup__QuoteExpired(_currentSlot, _quote.quote.validUntilSlot) + ); + + require( + _quote.quote.basisPointFee <= 10_000, + Errors.Rollup__InvalidBasisPointFee(_quote.quote.basisPointFee) + ); + + require( + _currentProposer == address(0) || _currentProposer == msg.sender, + Errors.Leonidas__InvalidProposer(_currentProposer, msg.sender) + ); + + require( + _quote.quote.epochToProve == _epochToProve, + Errors.Rollup__NotClaimingCorrectEpoch(_epochToProve, _quote.quote.epochToProve) + ); + + require( + _posInEpoch < _claimDurationInL2Slots, + Errors.Rollup__NotInClaimPhase(_posInEpoch, _claimDurationInL2Slots) + ); + + // if the epoch to prove is not the one that has been claimed, + // then whatever is in the proofClaim is stale + require( + _proofClaim.epochToProve != _epochToProve || _proofClaim.proposerClaimant == address(0), + Errors.Rollup__ProofRightAlreadyClaimed() + ); + + require( + _quote.quote.bondAmount >= _proofCommitmentMinBondAmountInTst, + Errors.Rollup__InsufficientBondAmount( + _proofCommitmentMinBondAmountInTst, _quote.quote.bondAmount + ) + ); + + uint256 availableFundsInEscrow = _proofCommitmentEscrow.deposits(_quote.quote.prover); + require( + _quote.quote.bondAmount <= availableFundsInEscrow, + Errors.Rollup__InsufficientFundsInEscrow(_quote.quote.bondAmount, availableFundsInEscrow) + ); + } +} diff --git a/l1-contracts/src/core/libraries/crypto/SampleLib.sol b/l1-contracts/src/core/libraries/crypto/SampleLib.sol index a790dc6e56f..721aa7bd1be 100644 --- a/l1-contracts/src/core/libraries/crypto/SampleLib.sol +++ b/l1-contracts/src/core/libraries/crypto/SampleLib.sol @@ -33,7 +33,7 @@ library SampleLib { * @return indices - The indices of the committee */ function computeCommitteeStupid(uint256 _committeeSize, uint256 _indexCount, uint256 _seed) - external + internal pure returns (uint256[] memory) { @@ -63,7 +63,7 @@ library SampleLib { * @return indices - The indices of the committee */ function computeCommitteeClever(uint256 _committeeSize, uint256 _indexCount, uint256 _seed) - external + internal pure returns (uint256[] memory) { diff --git a/l1-contracts/src/core/libraries/crypto/SignatureLib.sol b/l1-contracts/src/core/libraries/crypto/SignatureLib.sol index 29e37357bc8..4223f5ddafe 100644 --- a/l1-contracts/src/core/libraries/crypto/SignatureLib.sol +++ b/l1-contracts/src/core/libraries/crypto/SignatureLib.sol @@ -4,14 +4,14 @@ pragma solidity ^0.8.27; import {Errors} from "@aztec/core/libraries/Errors.sol"; -library SignatureLib { - struct Signature { - bool isEmpty; - uint8 v; - bytes32 r; - bytes32 s; - } +struct Signature { + bool isEmpty; + uint8 v; + bytes32 r; + bytes32 s; +} +library SignatureLib { /** * @notice The domain seperator for the signatures */ diff --git a/l1-contracts/test/Rollup.t.sol b/l1-contracts/test/Rollup.t.sol index 6c5abf747ee..80d85eb3115 100644 --- a/l1-contracts/test/Rollup.t.sol +++ b/l1-contracts/test/Rollup.t.sol @@ -6,8 +6,11 @@ import {DecoderBase} from "./decoders/Base.sol"; import {DataStructures} from "@aztec/core/libraries/DataStructures.sol"; import {Constants} from "@aztec/core/libraries/ConstantsGen.sol"; -import {SignatureLib} from "@aztec/core/libraries/crypto/SignatureLib.sol"; -import {EpochProofQuoteLib} from "@aztec/core/libraries/EpochProofQuoteLib.sol"; +import {Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import { + EpochProofQuote, + SignedEpochProofQuote +} from "@aztec/core/libraries/RollupLibs/EpochProofQuoteLib.sol"; import {Math} from "@oz/utils/math/Math.sol"; import {Registry} from "@aztec/governance/Registry.sol"; @@ -26,7 +29,9 @@ import {TestConstants} from "./harnesses/TestConstants.sol"; import {RewardDistributor} from "@aztec/governance/RewardDistributor.sol"; import {TxsDecoderHelper} from "./decoders/helpers/TxsDecoderHelper.sol"; import {IERC20Errors} from "@oz/interfaces/draft-IERC6093.sol"; -import {ProposeArgs, OracleInput, ProposeLib} from "@aztec/core/libraries/ProposeLib.sol"; +import { + ProposeArgs, OracleInput, ProposeLib +} from "@aztec/core/libraries/RollupLibs/ProposeLib.sol"; import { Timestamp, Slot, Epoch, SlotLib, EpochLib, TimeFns @@ -54,10 +59,10 @@ contract RollupTest is DecoderBase, TimeFns { FeeJuicePortal internal feeJuicePortal; IProofCommitmentEscrow internal proofCommitmentEscrow; RewardDistributor internal rewardDistributor; - SignatureLib.Signature[] internal signatures; + Signature[] internal signatures; - EpochProofQuoteLib.EpochProofQuote internal quote; - EpochProofQuoteLib.SignedEpochProofQuote internal signedQuote; + EpochProofQuote internal quote; + SignedEpochProofQuote internal signedQuote; uint256 internal privateKey; address internal signer; @@ -107,7 +112,7 @@ contract RollupTest is DecoderBase, TimeFns { privateKey = 0x123456789abcdef123456789abcdef123456789abcdef123456789abcdef1234; signer = vm.addr(privateKey); uint256 bond = rollup.PROOF_COMMITMENT_MIN_BOND_AMOUNT_IN_TST(); - quote = EpochProofQuoteLib.EpochProofQuote({ + quote = EpochProofQuote({ epochToProve: Epoch.wrap(0), validUntilSlot: Slot.wrap(1), bondAmount: bond, @@ -231,18 +236,14 @@ contract RollupTest is DecoderBase, TimeFns { ); rollup.claimEpochProofRight(signedQuote); - ( - Epoch epochToProve, - uint256 basisPointFee, - uint256 bondAmount, - address bondProvider, - address proposerClaimant - ) = rollup.proofClaim(); - assertEq(epochToProve, signedQuote.quote.epochToProve, "Invalid epoch to prove"); - assertEq(basisPointFee, signedQuote.quote.basisPointFee, "Invalid basis point fee"); - assertEq(bondAmount, signedQuote.quote.bondAmount, "Invalid bond amount"); - assertEq(bondProvider, quote.prover, "Invalid bond provider"); - assertEq(proposerClaimant, address(this), "Invalid proposer claimant"); + DataStructures.EpochProofClaim memory epochProofClaim = rollup.getProofClaim(); + assertEq(epochProofClaim.epochToProve, signedQuote.quote.epochToProve, "Invalid epoch to prove"); + assertEq( + epochProofClaim.basisPointFee, signedQuote.quote.basisPointFee, "Invalid basis point fee" + ); + assertEq(epochProofClaim.bondAmount, signedQuote.quote.bondAmount, "Invalid bond amount"); + assertEq(epochProofClaim.bondProvider, quote.prover, "Invalid bond provider"); + assertEq(epochProofClaim.proposerClaimant, address(this), "Invalid proposer claimant"); assertEq( proofCommitmentEscrow.deposits(quote.prover), quote.bondAmount * 9, "Invalid escrow balance" ); @@ -1181,16 +1182,16 @@ contract RollupTest is DecoderBase, TimeFns { ); } - function _quoteToSignedQuote(EpochProofQuoteLib.EpochProofQuote memory _quote) + function _quoteToSignedQuote(EpochProofQuote memory _quote) internal view - returns (EpochProofQuoteLib.SignedEpochProofQuote memory) + returns (SignedEpochProofQuote memory) { bytes32 digest = rollup.quoteToDigest(_quote); (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, digest); - return EpochProofQuoteLib.SignedEpochProofQuote({ + return SignedEpochProofQuote({ quote: _quote, - signature: SignatureLib.Signature({isEmpty: false, v: v, r: r, s: s}) + signature: Signature({isEmpty: false, v: v, r: r, s: s}) }); } } diff --git a/l1-contracts/test/decoders/Decoders.t.sol b/l1-contracts/test/decoders/Decoders.t.sol index c3d47db7bb8..2165f759911 100644 --- a/l1-contracts/test/decoders/Decoders.t.sol +++ b/l1-contracts/test/decoders/Decoders.t.sol @@ -8,7 +8,7 @@ import {Hash} from "@aztec/core/libraries/crypto/Hash.sol"; import {HeaderLibHelper} from "./helpers/HeaderLibHelper.sol"; import {TxsDecoderHelper} from "./helpers/TxsDecoderHelper.sol"; -import {HeaderLib} from "@aztec/core/libraries/HeaderLib.sol"; +import {HeaderLib, Header} from "@aztec/core/libraries/RollupLibs/HeaderLib.sol"; import {Constants} from "@aztec/core/libraries/ConstantsGen.sol"; /** @@ -39,7 +39,7 @@ contract DecodersTest is DecoderBase { // Header { DecoderBase.DecodedHeader memory referenceHeader = data.block.decodedHeader; - HeaderLib.Header memory header = headerHelper.decode(data.block.header); + Header memory header = headerHelper.decode(data.block.header); // GlobalVariables { diff --git a/l1-contracts/test/decoders/helpers/HeaderLibHelper.sol b/l1-contracts/test/decoders/helpers/HeaderLibHelper.sol index 02528023a7c..8b81756fa77 100644 --- a/l1-contracts/test/decoders/helpers/HeaderLibHelper.sol +++ b/l1-contracts/test/decoders/helpers/HeaderLibHelper.sol @@ -2,11 +2,11 @@ // Copyright 2024 Aztec Labs. pragma solidity >=0.8.27; -import {HeaderLib} from "@aztec/core/libraries/HeaderLib.sol"; +import {HeaderLib, Header} from "@aztec/core/libraries/RollupLibs/HeaderLib.sol"; contract HeaderLibHelper { // A wrapper used such that we get "calldata" and not memory - function decode(bytes calldata _header) public pure returns (HeaderLib.Header memory) { + function decode(bytes calldata _header) public pure returns (Header memory) { return HeaderLib.decode(_header); } } diff --git a/l1-contracts/test/decoders/helpers/TxsDecoderHelper.sol b/l1-contracts/test/decoders/helpers/TxsDecoderHelper.sol index 2f8db8d3378..45cd04ad139 100644 --- a/l1-contracts/test/decoders/helpers/TxsDecoderHelper.sol +++ b/l1-contracts/test/decoders/helpers/TxsDecoderHelper.sol @@ -2,7 +2,7 @@ // Copyright 2024 Aztec Labs. pragma solidity >=0.8.27; -import {TxsDecoder} from "@aztec/core/libraries/TxsDecoder.sol"; +import {TxsDecoder} from "@aztec/core/libraries/RollupLibs/TxsDecoder.sol"; import {MerkleLib} from "@aztec/core/libraries/crypto/MerkleLib.sol"; contract TxsDecoderHelper { diff --git a/l1-contracts/test/fees/FeeModelTestPoints.t.sol b/l1-contracts/test/fees/FeeModelTestPoints.t.sol index 3dd5b0de248..368df77e602 100644 --- a/l1-contracts/test/fees/FeeModelTestPoints.t.sol +++ b/l1-contracts/test/fees/FeeModelTestPoints.t.sol @@ -4,7 +4,7 @@ pragma solidity >=0.8.27; import {TestBase} from "../base/Base.sol"; -import {OracleInput as FeeMathOracleInput} from "@aztec/core/libraries/FeeMath.sol"; +import {OracleInput as FeeMathOracleInput} from "@aztec/core/libraries/RollupLibs/FeeMath.sol"; // Remember that foundry json parsing is alphabetically done, so you MUST // sort the struct fields alphabetically or prepare for a headache. diff --git a/l1-contracts/test/fees/FeeRollup.t.sol b/l1-contracts/test/fees/FeeRollup.t.sol index 7f131fb9da5..ba655ed2e74 100644 --- a/l1-contracts/test/fees/FeeRollup.t.sol +++ b/l1-contracts/test/fees/FeeRollup.t.sol @@ -6,8 +6,8 @@ import {DecoderBase} from "../decoders/Base.sol"; import {DataStructures} from "@aztec/core/libraries/DataStructures.sol"; import {Constants} from "@aztec/core/libraries/ConstantsGen.sol"; -import {SignatureLib} from "@aztec/core/libraries/crypto/SignatureLib.sol"; -import {EpochProofQuoteLib} from "@aztec/core/libraries/EpochProofQuoteLib.sol"; +import {SignatureLib, Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import {EpochProofQuoteLib} from "@aztec/core/libraries/RollupLibs/EpochProofQuoteLib.sol"; import {Math} from "@oz/utils/math/Math.sol"; import {Registry} from "@aztec/governance/Registry.sol"; @@ -36,10 +36,11 @@ import {TxsDecoderHelper} from "../decoders/helpers/TxsDecoderHelper.sol"; import {IERC20Errors} from "@oz/interfaces/draft-IERC6093.sol"; import {IFeeJuicePortal} from "@aztec/core/interfaces/IFeeJuicePortal.sol"; import {IRewardDistributor} from "@aztec/governance/interfaces/IRewardDistributor.sol"; -import {OracleInput} from "@aztec/core/libraries/FeeMath.sol"; -import {ProposeArgs, OracleInput, ProposeLib} from "@aztec/core/libraries/ProposeLib.sol"; +import { + ProposeArgs, OracleInput, ProposeLib +} from "@aztec/core/libraries/RollupLibs/ProposeLib.sol"; import {IERC20} from "@oz/token/ERC20/IERC20.sol"; -import {FeeMath} from "@aztec/core/libraries/FeeMath.sol"; +import {FeeMath, MANA_TARGET} from "@aztec/core/libraries/RollupLibs/FeeMath.sol"; import { FeeHeader as FeeHeaderModel, @@ -92,7 +93,7 @@ contract FeeRollupTest is FeeModelTestPoints, DecoderBase { bytes header; bytes body; bytes32[] txHashes; - SignatureLib.Signature[] signatures; + Signature[] signatures; } DecoderBase.Full full = load("empty_block_1"); @@ -156,7 +157,7 @@ contract FeeRollupTest is FeeModelTestPoints, DecoderBase { bytes32 blockHash = 0x267f79fe7e757b20e924fac9f78264a0d1c8c4b481fea21d0bbe74650d87a1f1; bytes32[] memory txHashes = new bytes32[](0); - SignatureLib.Signature[] memory signatures = new SignatureLib.Signature[](0); + Signature[] memory signatures = new Signature[](0); bytes memory body = full.block.body; bytes memory header = full.block.header; @@ -252,11 +253,11 @@ contract FeeRollupTest is FeeModelTestPoints, DecoderBase { rollup.getBlock(rollup.getPendingBlockNumber()).feeHeader; uint256 excessManaNoPrune = ( parentFeeHeaderNoPrune.excessMana + parentFeeHeaderNoPrune.manaUsed - ).clampedAdd(-int256(FeeMath.MANA_TARGET)); + ).clampedAdd(-int256(MANA_TARGET)); FeeHeader memory parentFeeHeaderPrune = rollup.getBlock(rollup.getProvenBlockNumber()).feeHeader; uint256 excessManaPrune = (parentFeeHeaderPrune.excessMana + parentFeeHeaderPrune.manaUsed) - .clampedAdd(-int256(FeeMath.MANA_TARGET)); + .clampedAdd(-int256(MANA_TARGET)); assertGt(excessManaNoPrune, excessManaPrune, "excess mana should be lower if we prune"); diff --git a/l1-contracts/test/fees/MinimalFeeModel.sol b/l1-contracts/test/fees/MinimalFeeModel.sol index 0e5284d0b43..90849e20248 100644 --- a/l1-contracts/test/fees/MinimalFeeModel.sol +++ b/l1-contracts/test/fees/MinimalFeeModel.sol @@ -2,7 +2,14 @@ // Copyright 2024 Aztec Labs. pragma solidity >=0.8.27; -import {FeeMath, OracleInput} from "@aztec/core/libraries/FeeMath.sol"; +import { + FeeMath, + OracleInput, + MANA_TARGET, + L1_GAS_PER_BLOCK_PROPOSED, + L1_GAS_PER_EPOCH_VERIFIED, + MINIMUM_CONGESTION_MULTIPLIER +} from "@aztec/core/libraries/RollupLibs/FeeMath.sol"; import {Timestamp, TimeFns, Slot, SlotLib} from "@aztec/core/libraries/TimeMath.sol"; import {Vm} from "forge-std/Vm.sol"; import { @@ -60,19 +67,17 @@ contract MinimalFeeModel is TimeFns { returns (ManaBaseFeeComponents memory) { L1Fees memory fees = getCurrentL1Fees(); - uint256 dataCost = Math.mulDiv( - _blobsUsed * BLOB_GAS_PER_BLOB, fees.blob_fee, FeeMath.MANA_TARGET, Math.Rounding.Ceil - ); - uint256 gasUsed = FeeMath.L1_GAS_PER_BLOCK_PROPOSED + _blobsUsed * GAS_PER_BLOB_POINT_EVALUATION - + FeeMath.L1_GAS_PER_EPOCH_VERIFIED / EPOCH_DURATION; - uint256 gasCost = Math.mulDiv(gasUsed, fees.base_fee, FeeMath.MANA_TARGET, Math.Rounding.Ceil); + uint256 dataCost = + Math.mulDiv(_blobsUsed * BLOB_GAS_PER_BLOB, fees.blob_fee, MANA_TARGET, Math.Rounding.Ceil); + uint256 gasUsed = L1_GAS_PER_BLOCK_PROPOSED + _blobsUsed * GAS_PER_BLOB_POINT_EVALUATION + + L1_GAS_PER_EPOCH_VERIFIED / EPOCH_DURATION; + uint256 gasCost = Math.mulDiv(gasUsed, fees.base_fee, MANA_TARGET, Math.Rounding.Ceil); uint256 provingCost = getProvingCost(); uint256 congestionMultiplier = FeeMath.congestionMultiplier(calcExcessMana()); uint256 total = dataCost + gasCost + provingCost; - uint256 congestionCost = - (total * congestionMultiplier / FeeMath.MINIMUM_CONGESTION_MULTIPLIER) - total; + uint256 congestionCost = (total * congestionMultiplier / MINIMUM_CONGESTION_MULTIPLIER) - total; uint256 feeAssetPrice = _inFeeAsset ? getFeeAssetPrice() : 1e9; @@ -91,7 +96,7 @@ contract MinimalFeeModel is TimeFns { function calcExcessMana() internal view returns (uint256) { FeeHeader storage parent = feeHeaders[populatedThrough]; - return (parent.excess_mana + parent.mana_used).clampedAdd(-int256(FeeMath.MANA_TARGET)); + return (parent.excess_mana + parent.mana_used).clampedAdd(-int256(MANA_TARGET)); } function addSlot(OracleInput memory _oracleInput) public { diff --git a/l1-contracts/test/fees/MinimalFeeModel.t.sol b/l1-contracts/test/fees/MinimalFeeModel.t.sol index cbc0149deff..f2b79e42f24 100644 --- a/l1-contracts/test/fees/MinimalFeeModel.t.sol +++ b/l1-contracts/test/fees/MinimalFeeModel.t.sol @@ -1,7 +1,7 @@ // SPDX-License-Identifier: UNLICENSED pragma solidity >=0.8.27; -import {OracleInput, FeeMath} from "@aztec/core/libraries/FeeMath.sol"; +import {OracleInput, FeeMath} from "@aztec/core/libraries/RollupLibs/FeeMath.sol"; import { FeeModelTestPoints, TestPoint, @@ -12,6 +12,10 @@ import { import {MinimalFeeModel} from "./MinimalFeeModel.sol"; import {Errors} from "@aztec/core/libraries/Errors.sol"; import {SlotLib, Slot} from "@aztec/core/libraries/TimeMath.sol"; +import { + MAX_PROVING_COST_MODIFIER, + MAX_FEE_ASSET_PRICE_MODIFIER +} from "@aztec/core/libraries/RollupLibs/FeeMath.sol"; contract MinimalFeeModelTest is FeeModelTestPoints { using SlotLib for Slot; @@ -78,8 +82,8 @@ contract MinimalFeeModelTest is FeeModelTestPoints { } function test_invalidOracleInput() public { - uint256 provingBoundary = FeeMath.MAX_PROVING_COST_MODIFIER + 1; - uint256 feeAssetPriceBoundary = FeeMath.MAX_FEE_ASSET_PRICE_MODIFIER + 1; + uint256 provingBoundary = MAX_PROVING_COST_MODIFIER + 1; + uint256 feeAssetPriceBoundary = MAX_FEE_ASSET_PRICE_MODIFIER + 1; vm.expectRevert(abi.encodeWithSelector(Errors.FeeMath__InvalidProvingCostModifier.selector)); model.addSlot( diff --git a/l1-contracts/test/portals/TokenPortal.t.sol b/l1-contracts/test/portals/TokenPortal.t.sol index da7af0eb534..990f3ec6512 100644 --- a/l1-contracts/test/portals/TokenPortal.t.sol +++ b/l1-contracts/test/portals/TokenPortal.t.sol @@ -21,9 +21,11 @@ import {TestERC20} from "@aztec/mock/TestERC20.sol"; import {NaiveMerkle} from "../merkle/Naive.sol"; import {MockFeeJuicePortal} from "@aztec/mock/MockFeeJuicePortal.sol"; import {RewardDistributor} from "@aztec/governance/RewardDistributor.sol"; +import {stdStorage, StdStorage} from "forge-std/Test.sol"; contract TokenPortalTest is Test { using Hash for DataStructures.L1ToL2Msg; + using stdStorage for StdStorage; event MessageConsumed(bytes32 indexed messageHash, address indexed recipient); @@ -77,7 +79,7 @@ contract TokenPortalTest is Test { tokenPortal.initialize(address(registry), address(testERC20), l2TokenAddress); // Modify the proven block count - vm.store(address(rollup), bytes32(uint256(9)), bytes32(l2BlockNumber)); + stdstore.target(address(rollup)).sig("getProvenBlockNumber()").checked_write(l2BlockNumber); assertEq(rollup.getProvenBlockNumber(), l2BlockNumber); vm.deal(address(this), 100 ether); diff --git a/l1-contracts/test/sparta/Sparta.t.sol b/l1-contracts/test/sparta/Sparta.t.sol index 9dff6c5babb..ea0c94f7d49 100644 --- a/l1-contracts/test/sparta/Sparta.t.sol +++ b/l1-contracts/test/sparta/Sparta.t.sol @@ -6,7 +6,7 @@ import {DecoderBase} from "../decoders/Base.sol"; import {DataStructures} from "@aztec/core/libraries/DataStructures.sol"; import {Constants} from "@aztec/core/libraries/ConstantsGen.sol"; -import {SignatureLib} from "@aztec/core/libraries/crypto/SignatureLib.sol"; +import {Signature} from "@aztec/core/libraries/crypto/SignatureLib.sol"; import {Inbox} from "@aztec/core/messagebridge/Inbox.sol"; import {Outbox} from "@aztec/core/messagebridge/Outbox.sol"; @@ -20,7 +20,9 @@ import {TestERC20} from "@aztec/mock/TestERC20.sol"; import {TxsDecoderHelper} from "../decoders/helpers/TxsDecoderHelper.sol"; import {MessageHashUtils} from "@oz/utils/cryptography/MessageHashUtils.sol"; import {MockFeeJuicePortal} from "@aztec/mock/MockFeeJuicePortal.sol"; -import {ProposeArgs, OracleInput, ProposeLib} from "@aztec/core/libraries/ProposeLib.sol"; +import { + ProposeArgs, OracleInput, ProposeLib +} from "@aztec/core/libraries/RollupLibs/ProposeLib.sol"; import {Slot, Epoch, SlotLib, EpochLib} from "@aztec/core/libraries/TimeMath.sol"; import {RewardDistributor} from "@aztec/governance/RewardDistributor.sol"; @@ -48,7 +50,7 @@ contract SpartaTest is DecoderBase { TxsDecoderHelper internal txsHelper; TestERC20 internal testERC20; RewardDistributor internal rewardDistributor; - SignatureLib.Signature internal emptySignature; + Signature internal emptySignature; mapping(address validator => uint256 privateKey) internal privateKeys; mapping(address => bool) internal _seenValidators; mapping(address => bool) internal _seenCommittee; @@ -199,7 +201,7 @@ contract SpartaTest is DecoderBase { address[] memory validators = rollup.getEpochCommittee(rollup.getCurrentEpoch()); ree.needed = validators.length * 2 / 3 + 1; - SignatureLib.Signature[] memory signatures = new SignatureLib.Signature[](_signatureCount); + Signature[] memory signatures = new Signature[](_signatureCount); bytes32 digest = ProposeLib.digest(args); for (uint256 i = 0; i < _signatureCount; i++) { @@ -239,7 +241,7 @@ contract SpartaTest is DecoderBase { return; } } else { - SignatureLib.Signature[] memory signatures = new SignatureLib.Signature[](0); + Signature[] memory signatures = new Signature[](0); rollup.propose(args, signatures, full.block.body); } @@ -298,13 +300,13 @@ contract SpartaTest is DecoderBase { function createSignature(address _signer, bytes32 _digest) internal view - returns (SignatureLib.Signature memory) + returns (Signature memory) { uint256 privateKey = privateKeys[_signer]; bytes32 digest = _digest.toEthSignedMessageHash(); (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, digest); - return SignatureLib.Signature({isEmpty: false, v: v, r: r, s: s}); + return Signature({isEmpty: false, v: v, r: r, s: s}); } } diff --git a/yarn-project/aztec.js/src/utils/cheat_codes.ts b/yarn-project/aztec.js/src/utils/cheat_codes.ts index 87048d1c0e1..10b837a9b04 100644 --- a/yarn-project/aztec.js/src/utils/cheat_codes.ts +++ b/yarn-project/aztec.js/src/utils/cheat_codes.ts @@ -82,8 +82,11 @@ export class RollupCheatCodes { /** The pending chain tip */ pending: bigint; /** The proven chain tip */ proven: bigint; }> { - const [pending, proven] = await this.rollup.read.tips(); - return { pending, proven }; + const res = await this.rollup.read.getTips(); + return { + pending: res.pendingBlockNumber, + proven: res.provenBlockNumber, + }; } /** Fetches the epoch and slot duration config from the rollup contract */ @@ -125,8 +128,13 @@ export class RollupCheatCodes { /** Returns the current proof claim (if any) */ public async getProofClaim(): Promise { // REFACTOR: This code is duplicated from l1-publisher - const [epochToProve, basisPointFee, bondAmount, bondProviderHex, proposerClaimantHex] = - await this.rollup.read.proofClaim(); + const { + epochToProve, + basisPointFee, + bondAmount, + bondProvider: bondProviderHex, + proposerClaimant: proposerClaimantHex, + } = await this.rollup.read.getProofClaim(); const bondProvider = EthAddress.fromString(bondProviderHex); const proposerClaimant = EthAddress.fromString(proposerClaimantHex); @@ -151,7 +159,7 @@ export class RollupCheatCodes { public async markAsProven(maybeBlockNumber?: number | bigint) { const blockNumber = maybeBlockNumber ? BigInt(maybeBlockNumber) - : await this.rollup.read.tips().then(([pending]) => pending); + : await this.rollup.read.getTips().then(({ pendingBlockNumber }) => pendingBlockNumber); await this.asOwner(async account => { await this.rollup.write.setAssumeProvenThroughBlockNumber([blockNumber], { account, chain: this.client.chain }); diff --git a/yarn-project/cli/src/cmds/l1/update_l1_validators.ts b/yarn-project/cli/src/cmds/l1/update_l1_validators.ts index e08231e5b7f..4f3d33eb574 100644 --- a/yarn-project/cli/src/cmds/l1/update_l1_validators.ts +++ b/yarn-project/cli/src/cmds/l1/update_l1_validators.ts @@ -171,8 +171,6 @@ export async function debugRollup({ rpcUrl, chainId, rollupAddress, log }: Rollu log(`Archive: ${archive}`); const epochNum = await rollup.read.getCurrentEpoch(); log(`Current epoch: ${epochNum}`); - const epoch = await rollup.read.epochs([epochNum]); - log(`Epoch Sample Seed: ${epoch[0].toString()}, Next Seed: ${epoch[1].toString()}`); const slot = await rollup.read.getCurrentSlot(); log(`Current slot: ${slot}`); const proposerDuringPrevL1Block = await rollup.read.getCurrentProposer(); diff --git a/yarn-project/end-to-end/src/prover-coordination/e2e_prover_coordination.test.ts b/yarn-project/end-to-end/src/prover-coordination/e2e_prover_coordination.test.ts index bf0cf8934e0..899b52437f7 100644 --- a/yarn-project/end-to-end/src/prover-coordination/e2e_prover_coordination.test.ts +++ b/yarn-project/end-to-end/src/prover-coordination/e2e_prover_coordination.test.ts @@ -139,12 +139,13 @@ describe('e2e_prover_coordination', () => { proposer: EthAddress; prover: EthAddress; }) => { - const [epochToProve, basisPointFee, bondAmount, prover, proposer] = await rollupContract.read.proofClaim(); + const { epochToProve, basisPointFee, bondAmount, bondProvider, proposerClaimant } = + await rollupContract.read.getProofClaim(); expect(epochToProve).toEqual(expected.epochToProve); expect(basisPointFee).toEqual(BigInt(expected.basisPointFee)); expect(bondAmount).toEqual(expected.bondAmount); - expect(prover).toEqual(expected.prover.toChecksumString()); - expect(proposer).toEqual(expected.proposer.toChecksumString()); + expect(bondProvider).toEqual(expected.prover.toChecksumString()); + expect(proposerClaimant).toEqual(expected.proposer.toChecksumString()); }; const performEscrow = async (amount: bigint) => { diff --git a/yarn-project/ethereum/src/deploy_l1_contracts.ts b/yarn-project/ethereum/src/deploy_l1_contracts.ts index 8d2f6b64245..32832708c2a 100644 --- a/yarn-project/ethereum/src/deploy_l1_contracts.ts +++ b/yarn-project/ethereum/src/deploy_l1_contracts.ts @@ -5,6 +5,8 @@ import { type DebugLogger } from '@aztec/foundation/log'; import { CoinIssuerAbi, CoinIssuerBytecode, + ExtRollupLibAbi, + ExtRollupLibBytecode, FeeJuicePortalAbi, FeeJuicePortalBytecode, GovernanceAbi, @@ -13,6 +15,8 @@ import { GovernanceProposerBytecode, InboxAbi, InboxBytecode, + LeonidasLibAbi, + LeonidasLibBytecode, OutboxAbi, OutboxBytecode, RegistryAbi, @@ -22,12 +26,8 @@ import { RollupAbi, RollupBytecode, RollupLinkReferences, - SampleLibAbi, - SampleLibBytecode, TestERC20Abi, TestERC20Bytecode, - TxsDecoderAbi, - TxsDecoderBytecode, } from '@aztec/l1-artifacts'; import type { Abi, Narrow } from 'abitype'; @@ -172,13 +172,13 @@ export const l1Artifacts: L1ContractArtifactsForDeployment = { libraries: { linkReferences: RollupLinkReferences, libraryCode: { - TxsDecoder: { - contractAbi: TxsDecoderAbi, - contractBytecode: TxsDecoderBytecode, + LeonidasLib: { + contractAbi: LeonidasLibAbi, + contractBytecode: LeonidasLibBytecode, }, - SampleLib: { - contractAbi: SampleLibAbi, - contractBytecode: SampleLibBytecode, + ExtRollupLib: { + contractAbi: ExtRollupLibAbi, + contractBytecode: ExtRollupLibBytecode, }, }, }, @@ -613,7 +613,16 @@ export async function deployL1Contract( const l1TxUtils = new L1TxUtils(publicClient, walletClient, logger); if (libraries) { - // @note Assumes that we wont have nested external libraries. + // Note that this does NOT work well for linked libraries having linked libraries. + + // Verify that all link references have corresponding code + for (const linkRef in libraries.linkReferences) { + for (const contractName in libraries.linkReferences[linkRef]) { + if (!libraries.libraryCode[contractName]) { + throw new Error(`Missing library code for ${contractName}`); + } + } + } const replacements: Record = {}; diff --git a/yarn-project/l1-artifacts/scripts/generate-artifacts.sh b/yarn-project/l1-artifacts/scripts/generate-artifacts.sh index ef0892de022..3b78a796f50 100755 --- a/yarn-project/l1-artifacts/scripts/generate-artifacts.sh +++ b/yarn-project/l1-artifacts/scripts/generate-artifacts.sh @@ -28,8 +28,8 @@ CONTRACTS=( "l1-contracts:GovernanceProposer" "l1-contracts:Governance" "l1-contracts:NewGovernanceProposerPayload" - "l1-contracts:TxsDecoder" - "l1-contracts:SampleLib" + "l1-contracts:LeonidasLib" + "l1-contracts:ExtRollupLib" ) diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 10e4b61f967..d7e139d4dde 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -307,8 +307,13 @@ export class L1Publisher { } public async getProofClaim(): Promise { - const [epochToProve, basisPointFee, bondAmount, bondProviderHex, proposerClaimantHex] = - await this.rollupContract.read.proofClaim(); + const { + epochToProve, + basisPointFee, + bondAmount, + bondProvider: bondProviderHex, + proposerClaimant: proposerClaimantHex, + } = await this.rollupContract.read.getProofClaim(); const bondProvider = EthAddress.fromString(bondProviderHex); const proposerClaimant = EthAddress.fromString(proposerClaimantHex); @@ -636,7 +641,7 @@ export class L1Publisher { const { fromBlock, toBlock, publicInputs, proof } = args; // Check that the block numbers match the expected epoch to be proven - const [pending, proven] = await this.rollupContract.read.tips(); + const { pendingBlockNumber: pending, provenBlockNumber: proven } = await this.rollupContract.read.getTips(); if (proven !== BigInt(fromBlock) - 1n) { throw new Error(`Cannot submit epoch proof for ${fromBlock}-${toBlock} as proven block is ${proven}`); } From c2c1744cb40f91773988476b23e61eb00babdc84 Mon Sep 17 00:00:00 2001 From: just-mitch <68168980+just-mitch@users.noreply.github.com> Date: Wed, 4 Dec 2024 09:52:52 -0500 Subject: [PATCH 24/24] chore: centralized helm flag for proving and clean release tf deploys (#10221) Make a single flag `aztec.realProofs` in our aztec-network chart for enabling real proofs. (fix #10157) Improve usability of deploy script for network releases. (fix #10158) --- .../aztec-network/templates/boot-node.yaml | 4 ++-- .../aztec-network/templates/prover-agent.yaml | 2 +- .../aztec-network/templates/prover-node.yaml | 2 +- spartan/aztec-network/templates/pxe.yaml | 2 +- .../templates/transaction-bot.yaml | 4 ++-- spartan/aztec-network/values.yaml | 8 +------- .../1-validator-with-proving-and-metrics.yaml | 17 +++++------------ .../values/1-validator-with-proving.yaml | 17 +++++------------ .../values/gcp-proving-test.yaml | 9 +-------- .../aztec-network/values/multicloud-demo.yaml | 4 ---- .../values/prover-node-with-agents.yaml | 6 ------ spartan/aztec-network/values/release.yaml | 19 +++++-------------- spartan/terraform/deploy-release/deploy.sh | 11 +++++++++-- spartan/terraform/deploy-release/main.tf | 12 ++++++------ .../terraform/deploy-release/release.tfvars | 5 +---- spartan/terraform/deploy-release/variables.tf | 8 ++++---- 16 files changed, 44 insertions(+), 86 deletions(-) diff --git a/spartan/aztec-network/templates/boot-node.yaml b/spartan/aztec-network/templates/boot-node.yaml index 00e585513a6..022d344ebe4 100644 --- a/spartan/aztec-network/templates/boot-node.yaml +++ b/spartan/aztec-network/templates/boot-node.yaml @@ -157,9 +157,9 @@ spec: - name: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT value: {{ include "aztec-network.otelCollectorLogsEndpoint" . | quote }} - name: PROVER_REAL_PROOFS - value: "{{ .Values.bootNode.realProofs }}" + value: "{{ .Values.aztec.realProofs }}" - name: PXE_PROVER_ENABLED - value: "{{ .Values.bootNode.realProofs }}" + value: "{{ .Values.aztec.realProofs }}" - name: ETHEREUM_SLOT_DURATION value: "{{ .Values.ethereum.blockTime }}" - name: AZTEC_SLOT_DURATION diff --git a/spartan/aztec-network/templates/prover-agent.yaml b/spartan/aztec-network/templates/prover-agent.yaml index ef080501868..a4517c7a503 100644 --- a/spartan/aztec-network/templates/prover-agent.yaml +++ b/spartan/aztec-network/templates/prover-agent.yaml @@ -88,7 +88,7 @@ spec: - name: DEBUG value: "{{ .Values.proverAgent.debug }}" - name: PROVER_REAL_PROOFS - value: "{{ .Values.proverAgent.realProofs }}" + value: "{{ .Values.aztec.realProofs }}" - name: PROVER_AGENT_COUNT value: "1" - name: PROVER_AGENT_POLL_INTERVAL_MS diff --git a/spartan/aztec-network/templates/prover-node.yaml b/spartan/aztec-network/templates/prover-node.yaml index bf13dad1821..bfe9447570c 100644 --- a/spartan/aztec-network/templates/prover-node.yaml +++ b/spartan/aztec-network/templates/prover-node.yaml @@ -121,7 +121,7 @@ spec: - name: DEBUG value: "{{ .Values.proverNode.debug }}" - name: PROVER_REAL_PROOFS - value: "{{ .Values.proverNode.realProofs }}" + value: "{{ .Values.aztec.realProofs }}" - name: PROVER_AGENT_COUNT value: "{{ .Values.proverNode.proverAgent.count }}" - name: PROVER_AGENT_POLL_INTERVAL_MS diff --git a/spartan/aztec-network/templates/pxe.yaml b/spartan/aztec-network/templates/pxe.yaml index 94a8a87886c..bb15f206c2d 100644 --- a/spartan/aztec-network/templates/pxe.yaml +++ b/spartan/aztec-network/templates/pxe.yaml @@ -87,7 +87,7 @@ spec: - name: DEBUG value: "{{ .Values.pxe.debug }}" - name: PXE_PROVER_ENABLED - value: "{{ .Values.pxe.proverEnabled }}" + value: "{{ .Values.aztec.realProofs }}" ports: - name: http containerPort: {{ .Values.pxe.service.nodePort }} diff --git a/spartan/aztec-network/templates/transaction-bot.yaml b/spartan/aztec-network/templates/transaction-bot.yaml index cd5b88a13bd..762c6bd7c07 100644 --- a/spartan/aztec-network/templates/transaction-bot.yaml +++ b/spartan/aztec-network/templates/transaction-bot.yaml @@ -94,9 +94,9 @@ spec: - name: BOT_NO_START value: "{{ .Values.bot.botNoStart }}" - name: PXE_PROVER_ENABLED - value: "{{ .Values.bot.pxeProverEnabled }}" + value: "{{ .Values.aztec.realProofs }}" - name: PROVER_REAL_PROOFS - value: "{{ .Values.bot.proverRealProofs }}" + value: "{{ .Values.aztec.realProofs }}" - name: BOT_MAX_CONSECUTIVE_ERRORS value: "{{ .Values.bot.maxErrors }}" - name: BOT_STOP_WHEN_UNHEALTHY diff --git a/spartan/aztec-network/values.yaml b/spartan/aztec-network/values.yaml index 005d3061137..fbc957e802f 100644 --- a/spartan/aztec-network/values.yaml +++ b/spartan/aztec-network/values.yaml @@ -33,6 +33,7 @@ aztec: slotDuration: 24 # in seconds, aka L2 slot duration. Must be a multiple of {{ ethereum.blockTime }} epochDuration: 16 # how many L2 slots in an epoch epochProofClaimWindow: 13 # in L2 slots + realProofs: false bootNode: peerIdPrivateKey: "" @@ -48,7 +49,6 @@ bootNode: sequencer: maxSecondsBetweenBlocks: 0 minTxsPerBlock: 1 - realProofs: false validator: disabled: true p2p: @@ -118,7 +118,6 @@ proverNode: nodePort: 8080 logLevel: "debug" debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" - realProofs: false proverAgent: count: 0 pollIntervalMs: 1000 @@ -136,9 +135,7 @@ proverNode: storage: "8Gi" pxe: - proverEnabled: false logLevel: "debug" - proverEnable: false debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" replicas: 1 service: @@ -167,8 +164,6 @@ bot: # Do not wait for transactions followChain: "NONE" botNoStart: false - pxeProverEnabled: false - proverRealProofs: false maxErrors: 3 stopIfUnhealthy: true service: @@ -222,7 +217,6 @@ proverAgent: spotEnabled: false logLevel: "debug" debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" - realProofs: false bb: hardwareConcurrency: "" nodeSelector: {} diff --git a/spartan/aztec-network/values/1-validator-with-proving-and-metrics.yaml b/spartan/aztec-network/values/1-validator-with-proving-and-metrics.yaml index 2a10cd3473f..95b7f0ac638 100644 --- a/spartan/aztec-network/values/1-validator-with-proving-and-metrics.yaml +++ b/spartan/aztec-network/values/1-validator-with-proving-and-metrics.yaml @@ -1,3 +1,8 @@ +aztec: + realProofs: true + slotDuration: 36 + epochDuration: 32 + validator: replicas: 1 validatorKeys: @@ -11,12 +16,8 @@ bootNode: validator: disabled: true -proverNode: - realProofs: true - proverAgent: replicas: 6 - realProofs: true bb: hardwareConcurrency: 16 resources: @@ -27,22 +28,14 @@ proverAgent: memory: "96Gi" cpu: "16" -pxe: - proverEnabled: true - bot: enabled: true - pxeProverEnabled: true txIntervalSeconds: 200 jobs: deployL1Verifier: enable: true -aztec: - slotDuration: 36 - epochDuration: 32 - telemetry: enabled: true otelCollectorEndpoint: http://metrics-opentelemetry-collector.metrics:4318 diff --git a/spartan/aztec-network/values/1-validator-with-proving.yaml b/spartan/aztec-network/values/1-validator-with-proving.yaml index 07f064561f5..caa0d4e5a15 100644 --- a/spartan/aztec-network/values/1-validator-with-proving.yaml +++ b/spartan/aztec-network/values/1-validator-with-proving.yaml @@ -1,3 +1,8 @@ +aztec: + realProofs: true + slotDuration: 36 + epochDuration: 32 + validator: replicas: 1 validatorKeys: @@ -11,12 +16,8 @@ bootNode: validator: disabled: true -proverNode: - realProofs: true - proverAgent: replicas: 6 - realProofs: true bb: hardwareConcurrency: 16 resources: @@ -27,18 +28,10 @@ proverAgent: memory: "96Gi" cpu: "16" -pxe: - proverEnabled: true - bot: enabled: true - pxeProverEnabled: true txIntervalSeconds: 200 jobs: deployL1Verifier: enable: true - -aztec: - slotDuration: 36 - epochDuration: 32 diff --git a/spartan/aztec-network/values/gcp-proving-test.yaml b/spartan/aztec-network/values/gcp-proving-test.yaml index 6a361ecd025..765f1a2ade3 100644 --- a/spartan/aztec-network/values/gcp-proving-test.yaml +++ b/spartan/aztec-network/values/gcp-proving-test.yaml @@ -23,7 +23,6 @@ validator: storage: "128Gi" bootNode: - realProofs: true validator: disabled: true resources: @@ -33,7 +32,6 @@ bootNode: storage: "128Gi" proverNode: - realProofs: true resources: requests: memory: "8Gi" @@ -42,7 +40,6 @@ proverNode: proverAgent: replicas: 120 - realProofs: true bb: hardwareConcurrency: 31 resources: @@ -50,15 +47,10 @@ proverAgent: memory: "116Gi" cpu: "31" -pxe: - proverEnabled: true - bot: replicas: 16 enabled: true - pxeProverEnabled: true txIntervalSeconds: 1 - proverRealProofs: true botPrivateKey: "" privateTransfersPerTx: 1 publicTransfersPerTx: 1 @@ -80,3 +72,4 @@ fullnameOverride: sp aztec: slotDuration: 36 epochDuration: 32 + realProofs: true diff --git a/spartan/aztec-network/values/multicloud-demo.yaml b/spartan/aztec-network/values/multicloud-demo.yaml index 6ba49557253..2c4ea379e6e 100644 --- a/spartan/aztec-network/values/multicloud-demo.yaml +++ b/spartan/aztec-network/values/multicloud-demo.yaml @@ -17,12 +17,8 @@ bootNode: validator: disabled: true -proverNode: - realProofs: false - proverAgent: replicas: 1 - realProofs: false bb: hardwareConcurrency: 16 diff --git a/spartan/aztec-network/values/prover-node-with-agents.yaml b/spartan/aztec-network/values/prover-node-with-agents.yaml index 4a58f419958..a1b981a5fc9 100644 --- a/spartan/aztec-network/values/prover-node-with-agents.yaml +++ b/spartan/aztec-network/values/prover-node-with-agents.yaml @@ -16,22 +16,16 @@ bootNode: validator: disabled: true -proverNode: - realProofs: false - proverAgent: replicas: 1 - realProofs: false bb: hardwareConcurrency: 16 pxe: external: false - proverEnabled: true bot: enabled: false - pxeProverEnabled: false txIntervalSeconds: 200 jobs: diff --git a/spartan/aztec-network/values/release.yaml b/spartan/aztec-network/values/release.yaml index b48f9cf2640..2f18e02fc51 100644 --- a/spartan/aztec-network/values/release.yaml +++ b/spartan/aztec-network/values/release.yaml @@ -1,6 +1,11 @@ network: public: true +aztec: + slotDuration: 36 + epochDuration: 32 + realProofs: true + images: aztec: pullPolicy: Always @@ -10,7 +15,6 @@ telemetry: otelCollectorEndpoint: http://34.150.160.154:4318 validator: - realProofs: true replicas: 48 validatorKeys: - 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 @@ -118,17 +122,12 @@ validator: disabled: false bootNode: - realProofs: true peerIdPrivateKey: 080212200ba8451c6d62b03c4441f0a466c0bce7a3a595f2cf50a055ded3305c77aa3af0 validator: disabled: true -proverNode: - realProofs: true - proverAgent: replicas: 4 - realProofs: true bb: hardwareConcurrency: 16 gke: @@ -141,19 +140,11 @@ proverAgent: memory: "96Gi" cpu: "16" -pxe: - proverEnabled: true - bot: followChain: "PENDING" enabled: true - pxeProverEnabled: true txIntervalSeconds: 200 jobs: deployL1Verifier: enable: true - -aztec: - slotDuration: 36 - epochDuration: 32 diff --git a/spartan/terraform/deploy-release/deploy.sh b/spartan/terraform/deploy-release/deploy.sh index e9574554524..ac13466745f 100755 --- a/spartan/terraform/deploy-release/deploy.sh +++ b/spartan/terraform/deploy-release/deploy.sh @@ -1,5 +1,12 @@ #!/bin/bash -RELEASE_NAME="rough-rhino" +# Usage: ./deploy.sh +# Example: ./deploy.sh rough-rhino aztecprotocol/aztec:698cd3d62680629a3f1bfc0f82604534cedbccf3-x86_64 + +set -eu + +RELEASE_NAME=$1 +AZTEC_DOCKER_IMAGE=$2 + terraform init -backend-config="key=deploy-network/${RELEASE_NAME}/terraform.tfstate" -terraform apply -var-file="release.tfvars" +terraform apply -var-file="release.tfvars" -var="RELEASE_NAME=${RELEASE_NAME}" -var="AZTEC_DOCKER_IMAGE=${AZTEC_DOCKER_IMAGE}" diff --git a/spartan/terraform/deploy-release/main.tf b/spartan/terraform/deploy-release/main.tf index bd98f0897a8..458f36795ef 100644 --- a/spartan/terraform/deploy-release/main.tf +++ b/spartan/terraform/deploy-release/main.tf @@ -18,32 +18,32 @@ terraform { provider "kubernetes" { alias = "gke-cluster" config_path = "~/.kube/config" - config_context = var.gke_cluster_context + config_context = var.GKE_CLUSTER_CONTEXT } provider "helm" { alias = "gke-cluster" kubernetes { config_path = "~/.kube/config" - config_context = var.gke_cluster_context + config_context = var.GKE_CLUSTER_CONTEXT } } # Aztec Helm release for gke-cluster resource "helm_release" "aztec-gke-cluster" { provider = helm.gke-cluster - name = var.release_name + name = var.RELEASE_NAME repository = "../../" chart = "aztec-network" - namespace = var.release_name + namespace = var.RELEASE_NAME create_namespace = true # base values file - values = [file("../../aztec-network/values/${var.values_file}")] + values = [file("../../aztec-network/values/${var.VALUES_FILE}")] set { name = "images.aztec.image" - value = var.aztec_docker_image + value = var.AZTEC_DOCKER_IMAGE } # Setting timeout and wait conditions diff --git a/spartan/terraform/deploy-release/release.tfvars b/spartan/terraform/deploy-release/release.tfvars index f3236423d9f..916a85db918 100644 --- a/spartan/terraform/deploy-release/release.tfvars +++ b/spartan/terraform/deploy-release/release.tfvars @@ -1,4 +1 @@ -release_name = "rough-rhino" -values_file = "release.yaml" -aztec_docker_image = "aztecprotocol/aztec:698cd3d62680629a3f1bfc0f82604534cedbccf3-x86_64" - +VALUES_FILE = "release.yaml" diff --git a/spartan/terraform/deploy-release/variables.tf b/spartan/terraform/deploy-release/variables.tf index ebccc9d3f67..03930fa3d65 100644 --- a/spartan/terraform/deploy-release/variables.tf +++ b/spartan/terraform/deploy-release/variables.tf @@ -1,20 +1,20 @@ -variable "gke_cluster_context" { +variable "GKE_CLUSTER_CONTEXT" { description = "GKE cluster context" type = string default = "gke_testnet-440309_us-east4-a_spartan-gke" } -variable "release_name" { +variable "RELEASE_NAME" { description = "Name of helm deployment and k8s namespace" type = string } -variable "values_file" { +variable "VALUES_FILE" { description = "Name of the values file to use for deployment" type = string } -variable "aztec_docker_image" { +variable "AZTEC_DOCKER_IMAGE" { description = "Docker image to use for the aztec network" type = string }