From fc1f30787b83a0c9c2ca73e675ff666395d24d74 Mon Sep 17 00:00:00 2001 From: Lasse Herskind <16536249+LHerskind@users.noreply.github.com> Date: Sat, 31 Aug 2024 00:18:15 +0100 Subject: [PATCH] feat: Removing `is_dev_net` flag (#8275) This PR tries to get rid of the `is_dev_net` flag that we had in the `constants.nr`. This is to simplify the flows such that is just one flow used by both the devnet and spartan. Alters our `createNode` and `setup` such that they will always assume active validators, since it will be required for proper sequencing. Changes the `l1-publisher` slightly to reduce the probability that `validateBlockForSubmission` would fail due to an Ethereum block arriving between the check and the `propose` call. Alters the `sequencer::work()` function such that there is a cleaner split between when we can return early, and when we have to throw an error and revert to properly rollback the global state. Alters the `collectAttestations` functions slightly, i) to cover the case where a validator client is not provided but a committee is needed, and ii) to ensure that the sequencers own attestation also makes it way into the attestations collected. --- # Graveyard Below this point is the graveyard where old issues very talked about and insanity ensued. --- Currently running into issues where tests are behaving "strange". Namely, it seems like we sometimes will have a passing test and sometimes wont. This especially is encountered when many tests are run at once, such as the `e2e_token_contract` tests. A snippet below shares some frustration. If we are running all the tests, they always fail, but if running only some, it seems to depend on what is being logged... ```bash DEBUG="aztec:*,-aztec:avm_simulator:*" LOG_LEVEL="silent" yarn test e2e_token_contract/transfer_private // passes LOG_LEVEL="silent" yarn test e2e_token_contract/transfer_private // fails LOG_LEVEL="DEBUG" yarn test e2e_token_contract/transfer_private // fails ``` Somewhat interesting, if using `AZTEC_SLOT_DURATION = 36` many of these issues seems to go away, e.g., transactions are not dropped anymore etc. However, this really should not be the case, hence this time influence is not walltime, as it is using an anvil instance behind the scenes. Main reason around this is more likely to be that we don't encounter the case where a `submitProof` have progressed time and moved the slot. --- Looking at logs! What do I see in the logs - Transaction TX_A is dropped. - When looking higher, I can see that TX_A is dropped because of duplicate nullifiers in the state trees! Something interesting! While the nullifier tree from sync is size 1088, the one we match against is 1216 and the index of first collisions is OUTSIDE of the tree that you get from synching :skull: - Looking JUST above where we drop these transactions i see that we are encountering an error while perfoming `validateHeader` (the slot have changed because of `submitProof`) - Looking slightly above this, we can see what I believe is the sequencer simulating the base rollups (all good here!) - Moving slightly up, we can see that the sequencer is processing the transaction itself. - Further up, we see the user creating the transaction - And above that we see the last block, lets call it BLOCK_A Note from this: There is no block after BLOCK_A where TX_A could have been included, but when the sequencer is FAILING to publish the new block, it seems to be keeping the state but dropping the block AND its transactions. So the setup fails because the user will get the response from the node that "this is a double-spend, go away". I tried using `PROVER_NODE_DISABLE_AUTOMATIC_PROVING` to turn of the proving, but that don't seem to have any effect, and if I try to just bypass the submitProofs it seems to cause the application to infinite loop where it just never tries anything ever again. The tree that the sequencer is checking against that is larger than what you get from sync seem to only be larger for a "short" time before it figures out something is messed up, and will "rollback", but the damage is done and we just dropped potentially a lot of transactions There exact timing of the failure also "depends", so that is kinda pain. ![image](https://github.com/user-attachments/assets/2fad7185-fb32-4ffd-a825-e9c55263c8e3) **Update**: Issue seemed to be that if we returned early from `work()` in the sequencer, when not proposing a block. We would not rollback the state, so if state changes were made, it would WRECK the next block. --- l1-contracts/src/core/Rollup.sol | 59 +---- l1-contracts/src/core/interfaces/IRollup.sol | 1 - .../src/core/libraries/ConstantsGen.sol | 1 - l1-contracts/test/Rollup.t.sol | 11 - l1-contracts/test/sparta/DevNet.t.sol | 232 ------------------ l1-contracts/test/sparta/Sparta.t.sol | 22 -- .../crates/types/src/constants.nr | 1 - yarn-project/circuits.js/src/constants.gen.ts | 1 - yarn-project/end-to-end/Earthfile | 3 + .../src/e2e_l1_with_wall_time.test.ts | 66 +++++ .../end-to-end/src/e2e_p2p_network.test.ts | 48 +++- .../end-to-end/src/fixtures/setup_p2p_test.ts | 18 +- yarn-project/end-to-end/src/fixtures/utils.ts | 20 +- .../e2e_public_testnet_transfer.test.ts | 1 - .../src/publisher/l1-publisher.test.ts | 13 +- .../src/publisher/l1-publisher.ts | 53 ++-- .../src/sequencer/sequencer.test.ts | 137 +++-------- .../src/sequencer/sequencer.ts | 222 ++++++++--------- .../validator-client/src/validator.ts | 6 +- 19 files changed, 302 insertions(+), 613 deletions(-) delete mode 100644 l1-contracts/test/sparta/DevNet.t.sol create mode 100644 yarn-project/end-to-end/src/e2e_l1_with_wall_time.test.ts diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index 93761eb3ac7..e4bc8ebb589 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -67,10 +67,6 @@ contract Rollup is Leonidas, IRollup, ITestRollup { bytes32 public vkTreeRoot; - // @note This should not exists, but we have it now to ensure we will not be killing the devnet with our - // timeliness requirements. - bool public isDevNet = Constants.IS_DEV_NET == 1; - // @note Assume that all blocks up to this value are automatically proven. Speeds up bootstrapping. // Testing only. This should be removed eventually. uint256 private assumeProvenUntilBlockNumber; @@ -111,12 +107,10 @@ contract Rollup is Leonidas, IRollup, ITestRollup { * @notice Prune the pending chain up to the last proven block * * @dev Will revert if there is nothing to prune or if the chain is not ready to be pruned + * + * @dev While in devnet, this will be guarded behind an `onlyOwner` */ - function prune() external override(IRollup) { - if (isDevNet) { - revert Errors.DevNet__NoPruningAllowed(); - } - + function prune() external override(IRollup) onlyOwner { if (pendingBlockCount == provenBlockCount) { revert Errors.Rollup__NothingToPrune(); } @@ -155,17 +149,6 @@ contract Rollup is Leonidas, IRollup, ITestRollup { assumeProvenUntilBlockNumber = blockNumber; } - /** - * @notice Set the devnet mode - * - * @dev This is only needed for testing, and should be removed - * - * @param _devNet - Whether or not the contract is in devnet mode - */ - function setDevNet(bool _devNet) external override(ITestRollup) onlyOwner { - isDevNet = _devNet; - } - /** * @notice Set the verifier contract * @@ -412,13 +395,9 @@ contract Rollup is Leonidas, IRollup, ITestRollup { revert Errors.Rollup__InvalidArchive(tipArchive, _archive); } - if (isDevNet) { - _devnetSequencerSubmissionChecks(_proposer); - } else { - address proposer = getProposerAt(_ts); - if (proposer != address(0) && proposer != _proposer) { - revert Errors.Leonidas__InvalidProposer(proposer, _proposer); - } + address proposer = getProposerAt(_ts); + if (proposer != address(0) && proposer != _proposer) { + revert Errors.Leonidas__InvalidProposer(proposer, _proposer); } return (slot, pendingBlockCount); @@ -568,8 +547,6 @@ contract Rollup is Leonidas, IRollup, ITestRollup { * This might be relaxed for allow consensus set to better handle short-term bursts of L1 congestion * - The slot MUST be in the current epoch * - * @dev While in isDevNet, we allow skipping all of the checks as we simply assume only TRUSTED sequencers - * * @param _slot - The slot of the header to validate * @param _signatures - The signatures to validate * @param _digest - The digest that signatures sign over @@ -581,19 +558,6 @@ contract Rollup is Leonidas, IRollup, ITestRollup { uint256 _currentTime, DataStructures.ExecutionFlags memory _flags ) internal view { - if (isDevNet) { - // @note If we are running in a devnet, we don't want to perform all the consensus - // checks, we instead simply require that either there are NO validators or - // that the proposer is a validator. - // - // This means that we relaxes the condition that the block must land in the - // correct slot and epoch to make it more fluid for the devnet launch - // or for testing. - - _devnetSequencerSubmissionChecks(msg.sender); - return; - } - // Ensure that the slot proposed is NOT in the future uint256 currentSlot = getSlotAt(_currentTime); if (_slot != currentSlot) { @@ -687,15 +651,4 @@ contract Rollup is Leonidas, IRollup, ITestRollup { revert Errors.Rollup__UnavailableTxs(_header.contentCommitment.txsEffectsHash); } } - - function _devnetSequencerSubmissionChecks(address _proposer) internal view { - if (getValidatorCount() == 0) { - return; - } - - if (!isValidator(_proposer)) { - revert Errors.DevNet__InvalidProposer(getValidatorAt(0), _proposer); - } - return; - } } diff --git a/l1-contracts/src/core/interfaces/IRollup.sol b/l1-contracts/src/core/interfaces/IRollup.sol index 7e2276342b7..33e5640537b 100644 --- a/l1-contracts/src/core/interfaces/IRollup.sol +++ b/l1-contracts/src/core/interfaces/IRollup.sol @@ -9,7 +9,6 @@ import {SignatureLib} from "../sequencer_selection/SignatureLib.sol"; import {DataStructures} from "../libraries/DataStructures.sol"; interface ITestRollup { - function setDevNet(bool _devNet) external; function setVerifier(address _verifier) external; function setVkTreeRoot(bytes32 _vkTreeRoot) external; function setAssumeProvenUntilBlockNumber(uint256 blockNumber) external; diff --git a/l1-contracts/src/core/libraries/ConstantsGen.sol b/l1-contracts/src/core/libraries/ConstantsGen.sol index 2e04e93dfe2..e85d0854c55 100644 --- a/l1-contracts/src/core/libraries/ConstantsGen.sol +++ b/l1-contracts/src/core/libraries/ConstantsGen.sol @@ -104,7 +104,6 @@ library Constants { uint256 internal constant ETHEREUM_SLOT_DURATION = 12; uint256 internal constant AZTEC_SLOT_DURATION = 12; uint256 internal constant AZTEC_EPOCH_DURATION = 48; - uint256 internal constant IS_DEV_NET = 1; uint256 internal constant GENESIS_ARCHIVE_ROOT = 8142738430000951296386584486068033372964809139261822027365426310856631083550; uint256 internal constant FEE_JUICE_INITIAL_MINT = 20000000000; diff --git a/l1-contracts/test/Rollup.t.sol b/l1-contracts/test/Rollup.t.sol index 5845f95b05c..c908099f6a0 100644 --- a/l1-contracts/test/Rollup.t.sol +++ b/l1-contracts/test/Rollup.t.sol @@ -113,13 +113,6 @@ contract RollupTest is DecoderBase { } function testRevertPrune() public setUpFor("mixed_block_1") { - if (rollup.isDevNet()) { - vm.expectRevert(abi.encodeWithSelector(Errors.DevNet__NoPruningAllowed.selector)); - rollup.prune(); - - return; - } - vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__NothingToPrune.selector)); rollup.prune(); @@ -136,10 +129,6 @@ contract RollupTest is DecoderBase { } function testPrune() public setUpFor("mixed_block_1") { - if (rollup.isDevNet()) { - return; - } - _testBlock("mixed_block_1", false); assertEq(inbox.inProgress(), 3, "Invalid in progress"); diff --git a/l1-contracts/test/sparta/DevNet.t.sol b/l1-contracts/test/sparta/DevNet.t.sol deleted file mode 100644 index 3b27c2fee3d..00000000000 --- a/l1-contracts/test/sparta/DevNet.t.sol +++ /dev/null @@ -1,232 +0,0 @@ -// SPDX-License-Identifier: Apache-2.0 -// Copyright 2023 Aztec Labs. -pragma solidity >=0.8.18; - -import {DecoderBase} from "../decoders/Base.sol"; - -import {DataStructures} from "../../src/core/libraries/DataStructures.sol"; -import {Constants} from "../../src/core/libraries/ConstantsGen.sol"; -import {SignatureLib} from "../../src/core/sequencer_selection/SignatureLib.sol"; - -import {Registry} from "../../src/core/messagebridge/Registry.sol"; -import {Inbox} from "../../src/core/messagebridge/Inbox.sol"; -import {Outbox} from "../../src/core/messagebridge/Outbox.sol"; -import {Errors} from "../../src/core/libraries/Errors.sol"; -import {Rollup} from "../../src/core/Rollup.sol"; -import {Leonidas} from "../../src/core/sequencer_selection/Leonidas.sol"; -import {AvailabilityOracle} from "../../src/core/availability_oracle/AvailabilityOracle.sol"; -import {NaiveMerkle} from "../merkle/Naive.sol"; -import {MerkleTestUtil} from "../merkle/TestUtil.sol"; -import {TxsDecoderHelper} from "../decoders/helpers/TxsDecoderHelper.sol"; -import {IFeeJuicePortal} from "../../src/core/interfaces/IFeeJuicePortal.sol"; - -/** - * We are using the same blocks as from Rollup.t.sol. - * The tests in this file is testing the sequencer selection - * - * We will skip these test if we are running with IS_DEV_NET = true - */ -contract DevNetTest is DecoderBase { - Registry internal registry; - Inbox internal inbox; - Outbox internal outbox; - Rollup internal rollup; - MerkleTestUtil internal merkleTestUtil; - TxsDecoderHelper internal txsHelper; - - AvailabilityOracle internal availabilityOracle; - - mapping(address validator => uint256 privateKey) internal privateKeys; - - SignatureLib.Signature internal emptySignature; - - /** - * @notice Set up the contracts needed for the tests with time aligned to the provided block name - */ - modifier setup(uint256 _validatorCount) { - string memory _name = "mixed_block_1"; - { - Leonidas leo = new Leonidas(address(1)); - DecoderBase.Full memory full = load(_name); - uint256 slotNumber = full.block.decodedHeader.globalVariables.slotNumber; - uint256 initialTime = - full.block.decodedHeader.globalVariables.timestamp - slotNumber * leo.SLOT_DURATION(); - vm.warp(initialTime); - } - - registry = new Registry(address(this)); - availabilityOracle = new AvailabilityOracle(); - rollup = new Rollup( - registry, - availabilityOracle, - IFeeJuicePortal(address(0)), - bytes32(0), - address(this), - new address[](0) - ); - inbox = Inbox(address(rollup.INBOX())); - outbox = Outbox(address(rollup.OUTBOX())); - - registry.upgrade(address(rollup)); - - merkleTestUtil = new MerkleTestUtil(); - txsHelper = new TxsDecoderHelper(); - - for (uint256 i = 1; i < _validatorCount + 1; i++) { - uint256 privateKey = uint256(keccak256(abi.encode("validator", i))); - address validator = vm.addr(privateKey); - privateKeys[validator] = privateKey; - rollup.addValidator(validator); - } - _; - } - - function testProposerForNonSetupEpoch(uint8 _epochsToJump) public setup(5) { - if (Constants.IS_DEV_NET == 0) { - return; - } - - uint256 pre = rollup.getCurrentEpoch(); - vm.warp( - block.timestamp + uint256(_epochsToJump) * rollup.EPOCH_DURATION() * rollup.SLOT_DURATION() - ); - uint256 post = rollup.getCurrentEpoch(); - assertEq(pre + _epochsToJump, post, "Invalid epoch"); - - address expectedProposer = rollup.getCurrentProposer(); - - // Add a validator which will also setup the epoch - rollup.addValidator(address(0xdead)); - - address actualProposer = rollup.getCurrentProposer(); - assertEq(expectedProposer, actualProposer, "Invalid proposer"); - } - - function testNoValidators() public setup(0) { - if (Constants.IS_DEV_NET == 0) { - return; - } - - _testBlock("mixed_block_1", false, false); - } - - function testInvalidProposer() public setup(1) { - if (Constants.IS_DEV_NET == 0) { - return; - } - - _testBlock("mixed_block_1", true, true); - } - - struct StructToAvoidDeepStacks { - uint256 needed; - address proposer; - bool shouldRevert; - } - - function _testBlock(string memory _name, bool _expectRevert, bool _invalidProposer) internal { - _testBlock(_name, _expectRevert, _invalidProposer, 0); - } - - function _testBlock(string memory _name, bool _expectRevert, bool _invalidProposer, uint256 _ts) - internal - { - DecoderBase.Full memory full = load(_name); - bytes memory header = full.block.header; - bytes32 archive = full.block.archive; - bytes memory body = full.block.body; - - StructToAvoidDeepStacks memory ree; - - // We jump to the time of the block. (unless it is in the past) - vm.warp(max(block.timestamp, max(full.block.decodedHeader.globalVariables.timestamp, _ts))); - - if (_ts > 0) { - // Update the timestamp and slot in the header - uint256 slotValue = rollup.getCurrentSlot(); - uint256 timestampMemoryPosition = 0x01b4; - uint256 slotMemoryPosition = 0x0194; - assembly { - mstore(add(header, add(0x20, timestampMemoryPosition)), _ts) - mstore(add(header, add(0x20, slotMemoryPosition)), slotValue) - } - } - - _populateInbox(full.populate.sender, full.populate.recipient, full.populate.l1ToL2Content); - - availabilityOracle.publish(body); - - ree.proposer = rollup.getCurrentProposer(); - ree.shouldRevert = false; - - rollup.setupEpoch(); - - if (_invalidProposer) { - ree.proposer = address(uint160(uint256(keccak256(abi.encode("invalid", ree.proposer))))); - // Why don't we end up here? - vm.expectRevert( - abi.encodeWithSelector( - Errors.DevNet__InvalidProposer.selector, rollup.getValidatorAt(0), ree.proposer - ) - ); - ree.shouldRevert = true; - } - - vm.prank(ree.proposer); - rollup.propose(header, archive, bytes32(0)); - - assertEq(_expectRevert, ree.shouldRevert, "Invalid revert expectation"); - - if (ree.shouldRevert) { - return; - } - - bytes32 l2ToL1MessageTreeRoot; - { - uint32 numTxs = full.block.numTxs; - // NB: The below works with full blocks because we require the largest possible subtrees - // for L2 to L1 messages - usually we make variable height subtrees, the roots of which - // form a balanced tree - - // The below is a little janky - we know that this test deals with full txs with equal numbers - // of msgs or txs with no messages, so the division works - // TODO edit full.messages to include information about msgs per tx? - uint256 subTreeHeight = merkleTestUtil.calculateTreeHeightFromSize( - full.messages.l2ToL1Messages.length == 0 ? 0 : full.messages.l2ToL1Messages.length / numTxs - ); - uint256 outHashTreeHeight = merkleTestUtil.calculateTreeHeightFromSize(numTxs); - uint256 numMessagesWithPadding = numTxs * Constants.MAX_L2_TO_L1_MSGS_PER_TX; - - uint256 treeHeight = subTreeHeight + outHashTreeHeight; - NaiveMerkle tree = new NaiveMerkle(treeHeight); - for (uint256 i = 0; i < numMessagesWithPadding; i++) { - if (i < full.messages.l2ToL1Messages.length) { - tree.insertLeaf(full.messages.l2ToL1Messages[i]); - } else { - tree.insertLeaf(bytes32(0)); - } - } - - l2ToL1MessageTreeRoot = tree.computeRoot(); - } - - (bytes32 root,) = outbox.getRootData(full.block.decodedHeader.globalVariables.blockNumber); - - if (rollup.provenBlockCount() > full.block.decodedHeader.globalVariables.blockNumber) { - assertEq(l2ToL1MessageTreeRoot, root, "Invalid l2 to l1 message tree root"); - } else { - assertEq(root, bytes32(0), "Invalid outbox root"); - } - - assertEq(rollup.archive(), archive, "Invalid archive"); - } - - function _populateInbox(address _sender, bytes32 _recipient, bytes32[] memory _contents) internal { - for (uint256 i = 0; i < _contents.length; i++) { - vm.prank(_sender); - inbox.sendL2Message( - DataStructures.L2Actor({actor: _recipient, version: 1}), _contents[i], bytes32(0) - ); - } - } -} diff --git a/l1-contracts/test/sparta/Sparta.t.sol b/l1-contracts/test/sparta/Sparta.t.sol index e02935ef76f..a3e4eac2d2d 100644 --- a/l1-contracts/test/sparta/Sparta.t.sol +++ b/l1-contracts/test/sparta/Sparta.t.sol @@ -24,8 +24,6 @@ import {IFeeJuicePortal} from "../../src/core/interfaces/IFeeJuicePortal.sol"; /** * We are using the same blocks as from Rollup.t.sol. * The tests in this file is testing the sequencer selection - * - * We will skip these test if we are running with IS_DEV_NET = true */ contract SpartaTest is DecoderBase { @@ -111,10 +109,6 @@ contract SpartaTest is DecoderBase { } function testProposerForNonSetupEpoch(uint8 _epochsToJump) public setup(4) { - if (Constants.IS_DEV_NET == 1) { - return; - } - uint256 pre = rollup.getCurrentEpoch(); vm.warp( block.timestamp + uint256(_epochsToJump) * rollup.EPOCH_DURATION() * rollup.SLOT_DURATION() @@ -132,10 +126,6 @@ contract SpartaTest is DecoderBase { } function testValidatorSetLargerThanCommittee(bool _insufficientSigs) public setup(100) { - if (Constants.IS_DEV_NET == 1) { - return; - } - assertGt(rollup.getValidators().length, rollup.TARGET_COMMITTEE_SIZE(), "Not enough validators"); uint256 committeSize = rollup.TARGET_COMMITTEE_SIZE() * 2 / 3 + (_insufficientSigs ? 0 : 1); @@ -149,27 +139,15 @@ contract SpartaTest is DecoderBase { } function testHappyPath() public setup(4) { - if (Constants.IS_DEV_NET == 1) { - return; - } - _testBlock("mixed_block_1", false, 3, false); _testBlock("mixed_block_2", false, 3, false); } function testInvalidProposer() public setup(4) { - if (Constants.IS_DEV_NET == 1) { - return; - } - _testBlock("mixed_block_1", true, 3, true); } function testInsufficientSigs() public setup(4) { - if (Constants.IS_DEV_NET == 1) { - return; - } - _testBlock("mixed_block_1", true, 2, false); } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr index 95c3c14e1b3..0183f973880 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr @@ -137,7 +137,6 @@ global ETHEREUM_SLOT_DURATION: u32 = 12; // AZTEC_SLOT_DURATION should be a multiple of ETHEREUM_SLOT_DURATION global AZTEC_SLOT_DURATION: u32 = ETHEREUM_SLOT_DURATION * 1; global AZTEC_EPOCH_DURATION: u32 = 48; -global IS_DEV_NET: bool = true; // The following is taken from building a block and looking at the `lastArchive` value in it. // You can run the `integration_l1_publisher.test.ts` and look at the first blocks in the fixtures. global GENESIS_ARCHIVE_ROOT: Field = 0x1200a06aae1368abe36530b585bd7a4d2ba4de5037b82076412691a187d7621e; diff --git a/yarn-project/circuits.js/src/constants.gen.ts b/yarn-project/circuits.js/src/constants.gen.ts index 1b25acd8207..10c2bd38b55 100644 --- a/yarn-project/circuits.js/src/constants.gen.ts +++ b/yarn-project/circuits.js/src/constants.gen.ts @@ -90,7 +90,6 @@ export const BLOB_SIZE_IN_BYTES = 126976; export const ETHEREUM_SLOT_DURATION = 12; export const AZTEC_SLOT_DURATION = 12; export const AZTEC_EPOCH_DURATION = 48; -export const IS_DEV_NET = 1; export const GENESIS_ARCHIVE_ROOT = 8142738430000951296386584486068033372964809139261822027365426310856631083550n; export const FEE_JUICE_INITIAL_MINT = 20000000000; export const MAX_PACKED_PUBLIC_BYTECODE_SIZE_IN_FIELDS = 20000; diff --git a/yarn-project/end-to-end/Earthfile b/yarn-project/end-to-end/Earthfile index 75512dfe5f8..b59dbd47986 100644 --- a/yarn-project/end-to-end/Earthfile +++ b/yarn-project/end-to-end/Earthfile @@ -110,6 +110,9 @@ NETWORK_TEST: e2e-p2p: DO +E2E_TEST --test=./src/e2e_p2p_network.test.ts +e2e-l1-with-wall-time: + DO +E2E_TEST --test=./src/e2e_l1_with_wall_time.test.ts + e2e-2-pxes: DO +E2E_TEST --test=./src/e2e_2_pxes.test.ts diff --git a/yarn-project/end-to-end/src/e2e_l1_with_wall_time.test.ts b/yarn-project/end-to-end/src/e2e_l1_with_wall_time.test.ts new file mode 100644 index 00000000000..70ec2f7f6b4 --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_l1_with_wall_time.test.ts @@ -0,0 +1,66 @@ +import { getSchnorrAccount } from '@aztec/accounts/schnorr'; +import { type DebugLogger, Fr, GrumpkinScalar, type PXE, type SentTx, TxStatus } from '@aztec/aztec.js'; +import { EthAddress } from '@aztec/circuits.js'; +import { type PXEService } from '@aztec/pxe'; + +import { privateKeyToAccount } from 'viem/accounts'; + +import { getPrivateKeyFromIndex, setup } from './fixtures/utils.js'; + +describe('e2e_l1_with_wall_time', () => { + let logger: DebugLogger; + let teardown: () => Promise; + let pxe: PXE; + + beforeEach(async () => { + const account = privateKeyToAccount(`0x${getPrivateKeyFromIndex(0)!.toString('hex')}`); + const initialValidators = [EthAddress.fromString(account.address)]; + + ({ teardown, logger, pxe } = await setup(0, { initialValidators, l1BlockTime: 12 })); + }); + + afterEach(() => teardown()); + + it('should produce blocks with a bunch of transactions', async () => { + for (let i = 0; i < 4; i++) { + const txs = await submitTxsTo(pxe as PXEService, 8); + await Promise.all( + txs.map(async (tx, j) => { + logger.info(`Waiting for tx ${i}-${j}: ${await tx.getTxHash()} to be mined`); + return tx.wait(); + }), + ); + } + }); + + // submits a set of transactions to the provided Private eXecution Environment (PXE) + const submitTxsTo = async (pxe: PXEService, numTxs: number) => { + const txs: SentTx[] = []; + for (let i = 0; i < numTxs; i++) { + const accountManager = getSchnorrAccount(pxe, Fr.random(), GrumpkinScalar.random(), Fr.random()); + const deployMethod = await accountManager.getDeployMethod(); + await deployMethod.create({ + contractAddressSalt: accountManager.salt, + skipClassRegistration: true, + skipPublicDeployment: true, + universalDeploy: true, + }); + await deployMethod.prove({}); + const tx = deployMethod.send(); + + const txHash = await tx.getTxHash(); + + logger.info(`Tx sent with hash ${txHash}`); + const receipt = await tx.getReceipt(); + expect(receipt).toEqual( + expect.objectContaining({ + status: TxStatus.PENDING, + error: '', + }), + ); + logger.info(`Receipt received for ${txHash}`); + txs.push(tx); + } + return txs; + }; +}); diff --git a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts index f297b084b47..2037f9a200d 100644 --- a/yarn-project/end-to-end/src/e2e_p2p_network.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p_network.test.ts @@ -1,12 +1,24 @@ import { getSchnorrAccount } from '@aztec/accounts/schnorr'; import { type AztecNodeConfig, type AztecNodeService } from '@aztec/aztec-node'; -import { CompleteAddress, type DebugLogger, Fr, GrumpkinScalar, type SentTx, TxStatus, sleep } from '@aztec/aztec.js'; -import { EthAddress, IS_DEV_NET } from '@aztec/circuits.js'; +import { + CompleteAddress, + type DebugLogger, + type DeployL1Contracts, + EthCheatCodes, + Fr, + GrumpkinScalar, + type SentTx, + TxStatus, + sleep, +} from '@aztec/aztec.js'; +import { EthAddress } from '@aztec/circuits.js'; +import { RollupAbi } from '@aztec/l1-artifacts'; import { type BootstrapNode } from '@aztec/p2p'; import { type PXEService, createPXEService, getPXEServiceConfig as getRpcConfig } from '@aztec/pxe'; import { jest } from '@jest/globals'; import fs from 'fs'; +import { getContract } from 'viem'; import { privateKeyToAccount } from 'viem/accounts'; import { @@ -32,6 +44,7 @@ describe('e2e_p2p_network', () => { let teardown: () => Promise; let bootstrapNode: BootstrapNode; let bootstrapNodeEnr: string; + let deployL1ContractsValues: DeployL1Contracts; beforeEach(async () => { // If we want to test with interval mining, we can use the local host and start `anvil --block-time 12` @@ -47,21 +60,32 @@ describe('e2e_p2p_network', () => { const initialValidators = [EthAddress.fromString(account.address)]; - // Add 1 extra validator if in devnet or NUM_NODES if not. - // Each of these will become a validator and sign attestations. - const limit = IS_DEV_NET ? 1 : NUM_NODES; - for (let i = 0; i < limit; i++) { - const account = privateKeyToAccount(`0x${getPrivateKeyFromIndex(i + 1)!.toString('hex')}`); - initialValidators.push(EthAddress.fromString(account.address)); - } - - ({ teardown, config, logger } = await setup(0, { initialValidators, ...options })); + ({ teardown, config, logger, deployL1ContractsValues } = await setup(0, { initialValidators, ...options })); bootstrapNode = await createBootstrapNode(BOOT_NODE_UDP_PORT); bootstrapNodeEnr = bootstrapNode.getENR().encodeTxt(); config.minTxsPerBlock = NUM_TXS_PER_BLOCK; config.maxTxsPerBlock = NUM_TXS_PER_BLOCK; + + const rollup = getContract({ + address: deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(), + abi: RollupAbi, + client: deployL1ContractsValues.walletClient, + }); + + for (let i = 0; i < NUM_NODES; i++) { + const account = privateKeyToAccount(`0x${getPrivateKeyFromIndex(i + 1)!.toString('hex')}`); + await rollup.write.addValidator([account.address]); + } + + //@note Now we jump ahead to the next epoch such that the validator committee is picked + // INTERVAL MINING: If we are using anvil interval mining this will NOT progress the time! + // Which means that the validator set will still be empty! So anyone can propose. + const slotsInEpoch = await rollup.read.EPOCH_DURATION(); + const timestamp = await rollup.read.getTimestampForSlot([slotsInEpoch]); + const cheatCodes = new EthCheatCodes(config.l1RpcUrl); + await cheatCodes.warp(Number(timestamp)); }); afterEach(() => teardown()); @@ -88,7 +112,6 @@ describe('e2e_p2p_network', () => { bootstrapNodeEnr, NUM_NODES, BOOT_NODE_UDP_PORT, - /*activate validators=*/ !IS_DEV_NET, ); // wait a bit for peers to discover each other @@ -149,7 +172,6 @@ describe('e2e_p2p_network', () => { i + 1 + BOOT_NODE_UDP_PORT, undefined, i, - /*validators*/ !IS_DEV_NET, `./data-${i}`, ); logger.info(`Node ${i} restarted`); diff --git a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts index 57fb87f171f..560677f6bee 100644 --- a/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts +++ b/yarn-project/end-to-end/src/fixtures/setup_p2p_test.ts @@ -34,18 +34,10 @@ export async function createNodes( bootstrapNodeEnr: string, numNodes: number, bootNodePort: number, - activateValidators: boolean = false, ): Promise { const nodes = []; for (let i = 0; i < numNodes; i++) { - const node = await createNode( - config, - peerIdPrivateKeys[i], - i + 1 + bootNodePort, - bootstrapNodeEnr, - i, - activateValidators, - ); + const node = await createNode(config, peerIdPrivateKeys[i], i + 1 + bootNodePort, bootstrapNodeEnr, i); nodes.push(node); } return nodes; @@ -58,7 +50,6 @@ export async function createNode( tcpListenPort: number, bootstrapNode: string | undefined, publisherAddressIndex: number, - activateValidators: boolean = false, dataDirectory?: string, ) { // We use different L1 publisher accounts in order to avoid duplicate tx nonces. We start from @@ -66,11 +57,8 @@ export async function createNode( const publisherPrivKey = getPrivateKeyFromIndex(publisherAddressIndex + 1); config.publisherPrivateKey = `0x${publisherPrivKey!.toString('hex')}`; - if (activateValidators) { - const validatorPrivKey = getPrivateKeyFromIndex(1 + publisherAddressIndex); - config.validatorPrivateKey = `0x${validatorPrivKey!.toString('hex')}`; - config.disableValidator = false; - } + const validatorPrivKey = getPrivateKeyFromIndex(1 + publisherAddressIndex); + config.validatorPrivateKey = `0x${validatorPrivKey!.toString('hex')}`; const newConfig: AztecNodeConfig = { ...config, diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index 7fb9bf3f5b1..dfbebc93636 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -300,6 +300,8 @@ type SetupOptions = { salt?: number; /** An initial set of validators */ initialValidators?: EthAddress[]; + /** Anvil block time (interval) */ + l1BlockTime?: number; } & Partial; /** Context for an end-to-end test as returned by the `setup` function */ @@ -337,7 +339,6 @@ export async function setup( opts: SetupOptions = {}, pxeOpts: Partial = {}, enableGas = false, - enableValidators = false, chain: Chain = foundry, ): Promise { const config = { ...getConfigEnvVars(), ...opts }; @@ -355,7 +356,7 @@ export async function setup( ); } - const res = await startAnvil(); + const res = await startAnvil(opts.l1BlockTime); anvil = res.anvil; config.l1RpcUrl = res.rpcUrl; } @@ -404,11 +405,8 @@ export async function setup( config.l1Contracts = deployL1ContractsValues.l1ContractAddresses; // Run the test with validators enabled - if (enableValidators) { - const validatorPrivKey = getPrivateKeyFromIndex(1); - config.validatorPrivateKey = `0x${validatorPrivKey!.toString('hex')}`; - } - config.disableValidator = !enableValidators; + const validatorPrivKey = getPrivateKeyFromIndex(1); + config.validatorPrivateKey = `0x${validatorPrivKey!.toString('hex')}`; logger.verbose('Creating and synching an aztec node...'); @@ -508,7 +506,7 @@ export function getL1WalletClient(rpcUrl: string, index: number) { * Ensures there's a running Anvil instance and returns the RPC URL. * @returns */ -export async function startAnvil(): Promise<{ anvil: Anvil; rpcUrl: string }> { +export async function startAnvil(l1BlockTime?: number): Promise<{ anvil: Anvil; rpcUrl: string }> { let rpcUrl: string | undefined = undefined; // Start anvil. @@ -517,7 +515,11 @@ export async function startAnvil(): Promise<{ anvil: Anvil; rpcUrl: string }> { async () => { const ethereumHostPort = await getPort(); rpcUrl = `http://127.0.0.1:${ethereumHostPort}`; - const anvil = createAnvil({ anvilBinary: './scripts/anvil_kill_wrapper.sh', port: ethereumHostPort }); + const anvil = createAnvil({ + anvilBinary: './scripts/anvil_kill_wrapper.sh', + port: ethereumHostPort, + blockTime: l1BlockTime, + }); await anvil.start(); return anvil; }, diff --git a/yarn-project/end-to-end/src/public-testnet/e2e_public_testnet_transfer.test.ts b/yarn-project/end-to-end/src/public-testnet/e2e_public_testnet_transfer.test.ts index fb9affae54d..dec793c0c7f 100644 --- a/yarn-project/end-to-end/src/public-testnet/e2e_public_testnet_transfer.test.ts +++ b/yarn-project/end-to-end/src/public-testnet/e2e_public_testnet_transfer.test.ts @@ -35,7 +35,6 @@ describe(`deploys and transfers a private only token`, () => { { skipProtocolContracts: true, stateLoad: undefined }, {}, false, - false, chain, )); proverConfig = getProverNodeConfigFromEnv(); diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts index 0ecc86cd335..82f466b1151 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.test.ts @@ -214,20 +214,11 @@ describe('L1Publisher', () => { it('does not retry if simulating a publish and propose tx fails', async () => { rollupContractRead.archive.mockResolvedValue(l2Block.header.lastArchive.root.toString() as `0x${string}`); - rollupContractSimulate.propose.mockRejectedValueOnce(new Error()); + rollupContractRead.validateHeader.mockRejectedValueOnce(new Error('Test error')); - if (L1Publisher.SKIP_SIMULATION) { - rollupContractRead.validateHeader.mockRejectedValueOnce(new Error('Test error')); - } - - const result = await publisher.processL2Block(l2Block); + await expect(publisher.processL2Block(l2Block)).rejects.toThrow(); - expect(result).toEqual(false); - if (!L1Publisher.SKIP_SIMULATION) { - expect(rollupContractSimulate.propose).toHaveBeenCalledTimes(1); - } expect(rollupContractRead.validateHeader).toHaveBeenCalledTimes(1); - expect(rollupContractWrite.propose).toHaveBeenCalledTimes(0); }); diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 00caf53414c..dc36dee4f9c 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -1,8 +1,8 @@ import { type L2Block, type Signature } from '@aztec/circuit-types'; import { type L1PublishBlockStats, type L1PublishProofStats } from '@aztec/circuit-types/stats'; -import { ETHEREUM_SLOT_DURATION, EthAddress, GENESIS_ARCHIVE_ROOT, type Header, type Proof } from '@aztec/circuits.js'; +import { ETHEREUM_SLOT_DURATION, EthAddress, type Header, type Proof } from '@aztec/circuits.js'; import { createEthereumChain } from '@aztec/ethereum'; -import { Fr } from '@aztec/foundation/fields'; +import { type Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; import { serializeToBuffer } from '@aztec/foundation/serialize'; import { InterruptibleSleep } from '@aztec/foundation/sleep'; @@ -183,27 +183,37 @@ export class L1Publisher { return [slot, blockNumber]; } + /** + * @notice Will call `validateHeader` to make sure that it is possible to propose + * + * @dev Throws if unable to propose + * + * @param header - The header to propose + * @param digest - The digest that attestations are signing over + * + */ public async validateBlockForSubmission( header: Header, - digest: Buffer = new Fr(GENESIS_ARCHIVE_ROOT).toBuffer(), - attestations: Signature[] = [], - ): Promise { + attestationData: { digest: Buffer; signatures: Signature[] } = { + digest: Buffer.alloc(32), + signatures: [], + }, + ): Promise { const ts = BigInt((await this.publicClient.getBlock()).timestamp + BigInt(ETHEREUM_SLOT_DURATION)); - const formattedAttestations = attestations.map(attest => attest.toViemSignature()); - const flags = { ignoreDA: true, ignoreSignatures: attestations.length == 0 }; + const formattedSignatures = attestationData.signatures.map(attest => attest.toViemSignature()); + const flags = { ignoreDA: true, ignoreSignatures: formattedSignatures.length == 0 }; const args = [ `0x${header.toBuffer().toString('hex')}`, - formattedAttestations, - `0x${digest.toString('hex')}`, + formattedSignatures, + `0x${attestationData.digest.toString('hex')}`, ts, flags, ] as const; try { await this.rollupContract.read.validateHeader(args, { account: this.account }); - return true; } catch (error: unknown) { // Specify the type of error if (error instanceof ContractFunctionRevertedError) { @@ -212,7 +222,7 @@ export class L1Publisher { } else { this.log.debug(`Unexpected error during validation: ${error}`); } - return false; + throw error; } } @@ -271,14 +281,6 @@ export class L1Publisher { blockHash: block.hash().toString(), }; - // @note This will make sure that we are passing the checks for our header ASSUMING that the data is also made available - // This means that we can avoid the simulation issues in later checks. - // By simulation issue, I mean the fact that the block.timestamp is equal to the last block, not the next, which - // make time consistency checks break. - if (!(await this.validateBlockForSubmission(block.header, block.archive.root.toBuffer(), attestations))) { - return false; - } - const processTxArgs = { header: block.header.toBuffer(), archive: block.archive.root.toBuffer(), @@ -292,7 +294,18 @@ export class L1Publisher { let txHash; const timer = new Timer(); - if (await this.checkIfTxsAreAvailable(block)) { + const isAvailable = await this.checkIfTxsAreAvailable(block); + + // @note This will make sure that we are passing the checks for our header ASSUMING that the data is also made available + // This means that we can avoid the simulation issues in later checks. + // By simulation issue, I mean the fact that the block.timestamp is equal to the last block, not the next, which + // make time consistency checks break. + await this.validateBlockForSubmission(block.header, { + digest: block.archive.root.toBuffer(), + signatures: attestations ?? [], + }); + + if (isAvailable) { this.log.verbose(`Transaction effects of block ${block.number} already published.`, ctx); txHash = await this.sendProposeWithoutBodyTx(processTxArgs); } else { diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts index 24a52633326..d3a8ec1fc82 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.test.ts @@ -23,7 +23,6 @@ import { Fr, GasFees, GlobalVariables, - IS_DEV_NET, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, } from '@aztec/circuits.js'; import { Buffer32 } from '@aztec/foundation/buffer'; @@ -73,35 +72,44 @@ describe('sequencer', () => { const mockedSig = new Signature(Buffer32.fromField(Fr.random()), Buffer32.fromField(Fr.random()), 27); const committee = [EthAddress.random()]; - const getSignatures = () => (IS_DEV_NET ? undefined : [mockedSig]); + const getSignatures = () => [mockedSig]; const getAttestations = () => { - if (IS_DEV_NET) { - return undefined; - } - const attestation = new BlockAttestation(block.header, archive, mockedSig); (attestation as any).sender = committee[0]; return [attestation]; }; + const createBlockProposal = () => { return new BlockProposal(block.header, archive, [TxHash.random()], mockedSig); }; let block: L2Block; + let mockedGlobalVariables: GlobalVariables; beforeEach(() => { lastBlockNumber = 0; block = L2Block.random(lastBlockNumber + 1); + mockedGlobalVariables = new GlobalVariables( + chainId, + version, + block.header.globalVariables.blockNumber, + block.header.globalVariables.slotNumber, + Fr.ZERO, + coinbase, + feeRecipient, + gasFees, + ); + publisher = mock(); publisher.getCurrentEpochCommittee.mockResolvedValue(committee); publisher.canProposeAtNextEthBlock.mockResolvedValue([ block.header.globalVariables.slotNumber.toBigInt(), block.header.globalVariables.blockNumber.toBigInt(), ]); - publisher.validateBlockForSubmission.mockResolvedValue(true); + publisher.validateBlockForSubmission.mockResolvedValue(); globalVariableBuilder = mock(); merkleTreeOps = mock(); @@ -147,12 +155,10 @@ describe('sequencer', () => { create: () => blockSimulator, }); - if (!IS_DEV_NET) { - validatorClient = mock({ - collectAttestations: mockFn().mockResolvedValue(getAttestations()), - createBlockProposal: mockFn().mockResolvedValue(createBlockProposal()), - }); - } + validatorClient = mock({ + collectAttestations: mockFn().mockResolvedValue(getAttestations()), + createBlockProposal: mockFn().mockResolvedValue(createBlockProposal()), + }); sequencer = new TestSubject( publisher, @@ -186,17 +192,6 @@ describe('sequencer', () => { blockSimulator.finaliseBlock.mockResolvedValue({ block }); publisher.processL2Block.mockResolvedValueOnce(true); - const mockedGlobalVariables = new GlobalVariables( - chainId, - version, - block.header.globalVariables.blockNumber, - block.header.globalVariables.slotNumber, - Fr.ZERO, - coinbase, - feeRecipient, - gasFees, - ); - globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); await sequencer.initialSync(); @@ -228,22 +223,11 @@ describe('sequencer', () => { blockSimulator.finaliseBlock.mockResolvedValue({ block }); publisher.processL2Block.mockResolvedValueOnce(true); - const mockedGlobalVariables = new GlobalVariables( - chainId, - version, - block.header.globalVariables.blockNumber, - block.header.globalVariables.slotNumber, - Fr.ZERO, - coinbase, - feeRecipient, - gasFees, - ); - globalVariableBuilder.buildGlobalVariables.mockResolvedValue(mockedGlobalVariables); // Not your turn! publisher.canProposeAtNextEthBlock.mockRejectedValue(new Error()); - publisher.validateBlockForSubmission.mockResolvedValue(false); + publisher.validateBlockForSubmission.mockRejectedValue(new Error()); await sequencer.initialSync(); await sequencer.work(); @@ -260,7 +244,7 @@ describe('sequencer', () => { // Now it is! publisher.validateBlockForSubmission.mockClear(); - publisher.validateBlockForSubmission.mockResolvedValue(true); + publisher.validateBlockForSubmission.mockResolvedValue(); await sequencer.work(); expect(blockSimulator.startNewBlock).toHaveBeenCalledWith( @@ -290,17 +274,6 @@ describe('sequencer', () => { blockSimulator.finaliseBlock.mockResolvedValue({ block }); publisher.processL2Block.mockResolvedValueOnce(true); - const mockedGlobalVariables = new GlobalVariables( - chainId, - version, - block.header.globalVariables.blockNumber, - block.header.globalVariables.slotNumber, - Fr.ZERO, - coinbase, - feeRecipient, - gasFees, - ); - globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); // We make a nullifier from tx1 a part of the nullifier tree, so it gets rejected as double spend @@ -342,17 +315,6 @@ describe('sequencer', () => { blockSimulator.finaliseBlock.mockResolvedValue({ block }); publisher.processL2Block.mockResolvedValueOnce(true); - const mockedGlobalVariables = new GlobalVariables( - chainId, - version, - block.header.globalVariables.blockNumber, - block.header.globalVariables.slotNumber, - Fr.ZERO, - coinbase, - feeRecipient, - gasFees, - ); - globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); // We make the chain id on the invalid tx not equal to the configured chain id @@ -388,17 +350,6 @@ describe('sequencer', () => { blockSimulator.finaliseBlock.mockResolvedValue({ block }); publisher.processL2Block.mockResolvedValueOnce(true); - const mockedGlobalVariables = new GlobalVariables( - chainId, - version, - new Fr(lastBlockNumber + 1), - block.header.globalVariables.slotNumber, - Fr.ZERO, - coinbase, - feeRecipient, - gasFees, - ); - globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); // We make txs[1] too big to fit @@ -435,17 +386,6 @@ describe('sequencer', () => { blockSimulator.finaliseBlock.mockResolvedValue({ block }); publisher.processL2Block.mockResolvedValueOnce(true); - const mockedGlobalVariables = new GlobalVariables( - chainId, - version, - new Fr(lastBlockNumber + 1), - block.header.globalVariables.slotNumber, - Fr.ZERO, - coinbase, - feeRecipient, - gasFees, - ); - globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); await sequencer.initialSync(); @@ -472,7 +412,7 @@ describe('sequencer', () => { Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); expect(publisher.processL2Block).toHaveBeenCalledTimes(1); - expect(publisher.processL2Block).toHaveBeenCalledWith(block, getAttestations()); + expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures()); expect(blockSimulator.cancelBlock).toHaveBeenCalledTimes(0); }); @@ -494,17 +434,6 @@ describe('sequencer', () => { blockSimulator.finaliseBlock.mockResolvedValue({ block }); publisher.processL2Block.mockResolvedValueOnce(true); - const mockedGlobalVariables = new GlobalVariables( - chainId, - version, - block.header.globalVariables.blockNumber, - block.header.globalVariables.slotNumber, - Fr.ZERO, - coinbase, - feeRecipient, - gasFees, - ); - globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); await sequencer.initialSync(); @@ -534,7 +463,7 @@ describe('sequencer', () => { Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); expect(publisher.processL2Block).toHaveBeenCalledTimes(1); - expect(publisher.processL2Block).toHaveBeenCalledWith(block, getAttestations()); + expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures()); expect(blockSimulator.cancelBlock).toHaveBeenCalledTimes(0); }); @@ -556,17 +485,6 @@ describe('sequencer', () => { blockSimulator.finaliseBlock.mockResolvedValue({ block }); publisher.processL2Block.mockResolvedValueOnce(true); - const mockedGlobalVariables = new GlobalVariables( - chainId, - version, - block.header.globalVariables.blockNumber, - block.header.globalVariables.slotNumber, - Fr.ZERO, - coinbase, - feeRecipient, - gasFees, - ); - globalVariableBuilder.buildGlobalVariables.mockResolvedValueOnce(mockedGlobalVariables); await sequencer.initialSync(); @@ -596,7 +514,8 @@ describe('sequencer', () => { Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n)), ); expect(publisher.processL2Block).toHaveBeenCalledTimes(1); - expect(publisher.processL2Block).toHaveBeenCalledWith(block, getAttestations()); + + expect(publisher.processL2Block).toHaveBeenCalledWith(block, getSignatures()); expect(blockSimulator.cancelBlock).toHaveBeenCalledTimes(0); }); @@ -632,9 +551,9 @@ describe('sequencer', () => { // This could practically be for any reason, e.g., could also be that we have entered a new slot. publisher.validateBlockForSubmission - .mockResolvedValueOnce(true) - .mockResolvedValueOnce(true) - .mockResolvedValueOnce(false); + .mockResolvedValueOnce() + .mockResolvedValueOnce() + .mockRejectedValueOnce(new Error()); await sequencer.work(); diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 2b8fa1ac1bf..2f90618e58f 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -17,7 +17,6 @@ import { EthAddress, GENESIS_ARCHIVE_ROOT, Header, - IS_DEV_NET, StateReference, } from '@aztec/circuits.js'; import { Fr } from '@aztec/foundation/fields'; @@ -183,91 +182,99 @@ export class Sequencer { } /** - * Grabs up to maxTxsPerBlock from the p2p client, constructs a block, and pushes it to L1. + * @notice Performs most of the sequencer duties: + * - Checks if we are up to date + * - If we are and we are the sequencer, collect txs and build a block + * - Collect attestations for the block + * - Submit block + * - If our block for some reason is not included, revert the state */ protected async work() { - try { - // Update state when the previous block has been synced - const prevBlockSynced = await this.isBlockSynced(); - // Do not go forward with new block if the previous one has not been mined and processed - if (!prevBlockSynced) { - this.log.debug('Previous block has not been mined and processed yet'); - return; - } + // Update state when the previous block has been synced + const prevBlockSynced = await this.isBlockSynced(); + // Do not go forward with new block if the previous one has not been mined and processed + if (!prevBlockSynced) { + this.log.debug('Previous block has not been mined and processed yet'); + return; + } - if (prevBlockSynced && this.state === SequencerState.PUBLISHING_BLOCK) { - this.log.debug(`Block has been synced`); - this.state = SequencerState.IDLE; - } + if (prevBlockSynced && this.state === SequencerState.PUBLISHING_BLOCK) { + this.log.debug(`Block has been synced`); + this.state = SequencerState.IDLE; + } - const chainTip = await this.l2BlockSource.getBlock(-1); - const historicalHeader = chainTip?.header; + const chainTip = await this.l2BlockSource.getBlock(-1); + const historicalHeader = chainTip?.header; - const newBlockNumber = - (historicalHeader === undefined - ? await this.l2BlockSource.getBlockNumber() - : Number(historicalHeader.globalVariables.blockNumber.toBigInt())) + 1; + const newBlockNumber = + (historicalHeader === undefined + ? await this.l2BlockSource.getBlockNumber() + : Number(historicalHeader.globalVariables.blockNumber.toBigInt())) + 1; - // If we cannot find a tip archive, assume genesis. - const chainTipArchive = - chainTip == undefined ? new Fr(GENESIS_ARCHIVE_ROOT).toBuffer() : chainTip?.archive.root.toBuffer(); + // If we cannot find a tip archive, assume genesis. + const chainTipArchive = + chainTip == undefined ? new Fr(GENESIS_ARCHIVE_ROOT).toBuffer() : chainTip?.archive.root.toBuffer(); - let slot: bigint; - try { - slot = await this.mayProposeBlock(chainTipArchive, BigInt(newBlockNumber)); - } catch (err) { - this.log.debug(`Cannot propose for block ${newBlockNumber}`); - return; - } + let slot: bigint; + try { + slot = await this.mayProposeBlock(chainTipArchive, BigInt(newBlockNumber)); + } catch (err) { + this.log.debug(`Cannot propose for block ${newBlockNumber}`); + return; + } - if (!this.shouldProposeBlock(historicalHeader, {})) { - return; - } + if (!this.shouldProposeBlock(historicalHeader, {})) { + return; + } - this.state = SequencerState.WAITING_FOR_TXS; + this.state = SequencerState.WAITING_FOR_TXS; - // Get txs to build the new block. - const pendingTxs = this.p2pClient.getTxs('pending'); + // Get txs to build the new block. + const pendingTxs = this.p2pClient.getTxs('pending'); - if (!this.shouldProposeBlock(historicalHeader, { pendingTxsCount: pendingTxs.length })) { - return; - } - this.log.debug(`Retrieved ${pendingTxs.length} txs from P2P pool`); + if (!this.shouldProposeBlock(historicalHeader, { pendingTxsCount: pendingTxs.length })) { + return; + } + this.log.debug(`Retrieved ${pendingTxs.length} txs from P2P pool`); - const newGlobalVariables = await this.globalsBuilder.buildGlobalVariables( - new Fr(newBlockNumber), - this._coinbase, - this._feeRecipient, - slot, - ); + const newGlobalVariables = await this.globalsBuilder.buildGlobalVariables( + new Fr(newBlockNumber), + this._coinbase, + this._feeRecipient, + slot, + ); - // If I created a "partial" header here that should make our job much easier. - const proposalHeader = new Header( - new AppendOnlyTreeSnapshot(Fr.fromBuffer(chainTipArchive), 1), - ContentCommitment.empty(), - StateReference.empty(), - newGlobalVariables, - Fr.ZERO, - ); + // If I created a "partial" header here that should make our job much easier. + const proposalHeader = new Header( + new AppendOnlyTreeSnapshot(Fr.fromBuffer(chainTipArchive), 1), + ContentCommitment.empty(), + StateReference.empty(), + newGlobalVariables, + Fr.ZERO, + ); - // TODO: It should be responsibility of the P2P layer to validate txs before passing them on here - const allValidTxs = await this.takeValidTxs( - pendingTxs, - this.txValidatorFactory.validatorForNewTxs(newGlobalVariables, this.allowedInSetup), - ); + // TODO: It should be responsibility of the P2P layer to validate txs before passing them on here + const allValidTxs = await this.takeValidTxs( + pendingTxs, + this.txValidatorFactory.validatorForNewTxs(newGlobalVariables, this.allowedInSetup), + ); - // TODO: We are taking the size of the tx from private-land, but we should be doing this after running - // public functions. Only reason why we do it here now is because the public processor and orchestrator - // are set up such that they require knowing the total number of txs in advance. Still, main reason for - // exceeding max block size in bytes is contract class registration, which happens in private-land. This - // may break if we start emitting lots of log data from public-land. - const validTxs = this.takeTxsWithinMaxSize(allValidTxs); + // TODO: We are taking the size of the tx from private-land, but we should be doing this after running + // public functions. Only reason why we do it here now is because the public processor and orchestrator + // are set up such that they require knowing the total number of txs in advance. Still, main reason for + // exceeding max block size in bytes is contract class registration, which happens in private-land. This + // may break if we start emitting lots of log data from public-land. + const validTxs = this.takeTxsWithinMaxSize(allValidTxs); - // Bail if we don't have enough valid txs - if (!this.shouldProposeBlock(historicalHeader, { validTxsCount: validTxs.length })) { - return; - } + // Bail if we don't have enough valid txs + if (!this.shouldProposeBlock(historicalHeader, { validTxsCount: validTxs.length })) { + return; + } + try { + // @note It is very important that the following function will FAIL and not just return early + // if it have made any state changes. If not, we won't rollback the state, and you will + // be in for a world of pain. await this.buildBlockAndPublish(validTxs, proposalHeader, historicalHeader); } catch (err) { if (BlockProofError.isBlockProofError(err)) { @@ -319,23 +326,21 @@ export class Sequencer { return true; } - if (IS_DEV_NET) { - // Compute time elapsed since the previous block - const lastBlockTime = historicalHeader?.globalVariables.timestamp.toNumber() || 0; - const currentTime = Math.floor(Date.now() / 1000); - const elapsedSinceLastBlock = currentTime - lastBlockTime; + // Compute time elapsed since the previous block + const lastBlockTime = historicalHeader?.globalVariables.timestamp.toNumber() || 0; + const currentTime = Math.floor(Date.now() / 1000); + const elapsedSinceLastBlock = currentTime - lastBlockTime; + this.log.debug( + `Last block mined at ${lastBlockTime} current time is ${currentTime} (elapsed ${elapsedSinceLastBlock})`, + ); + + // If we haven't hit the maxSecondsBetweenBlocks, we need to have at least minTxsPerBLock txs. + // Do not go forward with new block if not enough time has passed since last block + if (this.minSecondsBetweenBlocks > 0 && elapsedSinceLastBlock < this.minSecondsBetweenBlocks) { this.log.debug( - `Last block mined at ${lastBlockTime} current time is ${currentTime} (elapsed ${elapsedSinceLastBlock})`, + `Not creating block because not enough time ${this.minSecondsBetweenBlocks} has passed since last block`, ); - - // If we haven't hit the maxSecondsBetweenBlocks, we need to have at least minTxsPerBLock txs. - // Do not go forward with new block if not enough time has passed since last block - if (this.minSecondsBetweenBlocks > 0 && elapsedSinceLastBlock < this.minSecondsBetweenBlocks) { - this.log.debug( - `Not creating block because not enough time ${this.minSecondsBetweenBlocks} has passed since last block`, - ); - return false; - } + return false; } const skipCheck = this.skipMinTxsPerBlockCheck(historicalHeader); @@ -381,6 +386,16 @@ export class Sequencer { return true; } + /** + * @notice Build and propose a block to the chain + * + * @dev MUST throw instead of exiting early to ensure that world-state + * is being rolled back if the block is dropped. + * + * @param validTxs - The valid transactions to construct the block from + * @param proposalHeader - The partial header constructed for the proposal + * @param historicalHeader - The historical header of the parent + */ @trackSpan('Sequencer.buildBlockAndPublish', (_validTxs, proposalHeader, _historicalHeader) => ({ [Attributes.BLOCK_NUMBER]: proposalHeader.globalVariables.blockNumber.toNumber(), })) @@ -389,9 +404,7 @@ export class Sequencer { proposalHeader: Header, historicalHeader: Header | undefined, ): Promise { - if (!(await this.publisher.validateBlockForSubmission(proposalHeader))) { - return; - } + await this.publisher.validateBlockForSubmission(proposalHeader); const newGlobalVariables = proposalHeader.globalVariables; @@ -426,15 +439,16 @@ export class Sequencer { await this.p2pClient.deleteTxs(Tx.getHashes(failedTxData)); } + await this.publisher.validateBlockForSubmission(proposalHeader); + if ( - !(await this.publisher.validateBlockForSubmission(proposalHeader)) || !this.shouldProposeBlock(historicalHeader, { validTxsCount: validTxs.length, processedTxsCount: processedTxs.length, }) ) { blockBuilder.cancelBlock(); - return; + throw new Error('Should not propose the block'); } // All real transactions have been added, set the block as full and complete the proving. @@ -451,9 +465,7 @@ export class Sequencer { // Block is ready, now finalise const { block } = await blockBuilder.finaliseBlock(); - if (!(await this.publisher.validateBlockForSubmission(block.header))) { - return; - } + await this.publisher.validateBlockForSubmission(block.header); const workDuration = workTimer.ms(); this.log.verbose( @@ -496,24 +508,6 @@ export class Sequencer { } protected async collectAttestations(block: L2Block): Promise { - // @todo This should collect attestations properly and fix the ordering of them to make sense - // the current implementation is a PLACEHOLDER and should be nuked from orbit. - // It is assuming that there will only be ONE (1) validator, so only one attestation - // is needed. - // @note This is quite a sin, but I'm committing war crimes in this code already. - // _ ._ _ , _ ._ - // (_ ' ( ` )_ .__) - // ( ( ( ) `) ) _) - // (__ (_ (_ . _) _) ,__) - // `~~`\ ' . /`~~` - // ; ; - // / \ - // _____________/_ __ \_____________ - - if (IS_DEV_NET || !this.validatorClient) { - return undefined; - } - // TODO(https://github.com/AztecProtocol/aztec-packages/issues/7962): inefficient to have a round trip in here - this should be cached const committee = await this.publisher.getCurrentEpochCommittee(); @@ -521,6 +515,12 @@ export class Sequencer { return undefined; } + if (!this.validatorClient) { + const msg = 'Missing validator client: Cannot collect attestations'; + this.log.error(msg); + throw new Error(msg); + } + const numberOfRequiredAttestations = Math.floor((committee.length * 2) / 3) + 1; // TODO(https://github.com/AztecProtocol/aztec-packages/issues/7974): we do not have transaction[] lists in the block for now @@ -553,7 +553,7 @@ export class Sequencer { if (publishedL2Block) { this.lastPublishedBlock = block.number; } else { - throw new Error(`Failed to publish block`); + throw new Error(`Failed to publish block ${block.number}`); } } diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 1425f7d464c..7e44d06b9c2 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -86,9 +86,11 @@ export class ValidatorClient implements Validator { this.log.info(`Waiting for ${numberOfRequiredAttestations} attestations for slot: ${slot}`); - let attestations: BlockAttestation[] = [await this.attestToProposal(proposal)]; + const myAttestation = await this.attestToProposal(proposal); + + let attestations: BlockAttestation[] = []; while (attestations.length < numberOfRequiredAttestations) { - attestations = await this.p2pClient.getAttestationsForSlot(slot); + attestations = [myAttestation, ...(await this.p2pClient.getAttestationsForSlot(slot))]; if (attestations.length < numberOfRequiredAttestations) { this.log.verbose(`Waiting ${this.attestationPoolingIntervalMs}ms for more attestations...`);