From 84c35f5803b985da6da75bbb6dc8dd1271f76b62 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Wed, 25 Oct 2023 18:22:15 +0000 Subject: [PATCH 01/17] remove pedersen_buffer_to_field from cbinds --- .../crypto/pedersen_commitment/c_bind_new.cpp | 8 -------- .../crypto/pedersen_commitment/c_bind_new.hpp | 2 -- 2 files changed, 10 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/crypto/pedersen_commitment/c_bind_new.cpp b/barretenberg/cpp/src/barretenberg/crypto/pedersen_commitment/c_bind_new.cpp index 3a2dd00fc28..5d83d39c39b 100644 --- a/barretenberg/cpp/src/barretenberg/crypto/pedersen_commitment/c_bind_new.cpp +++ b/barretenberg/cpp/src/barretenberg/crypto/pedersen_commitment/c_bind_new.cpp @@ -46,12 +46,4 @@ WASM_EXPORT void pedersen___commit(fr::vec_in_buf inputs_buffer, fr::out_buf out serialize::write(output, pedersen_hash); } - -WASM_EXPORT void pedersen___buffer_to_field(uint8_t const* data, fr::out_buf r) -{ - std::vector to_compress; - read(data, to_compress); - auto output = crypto::pedersen_hash::hash_buffer(to_compress); - write(r, output); -} } \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/crypto/pedersen_commitment/c_bind_new.hpp b/barretenberg/cpp/src/barretenberg/crypto/pedersen_commitment/c_bind_new.hpp index 7ab3f72e3e1..2603f4ba451 100644 --- a/barretenberg/cpp/src/barretenberg/crypto/pedersen_commitment/c_bind_new.hpp +++ b/barretenberg/cpp/src/barretenberg/crypto/pedersen_commitment/c_bind_new.hpp @@ -17,6 +17,4 @@ WASM_EXPORT void pedersen___compress_with_hash_index(fr::vec_in_buf inputs_buffe fr::out_buf output); WASM_EXPORT void pedersen___commit(fr::vec_in_buf inputs_buffer, fr::out_buf output); - -WASM_EXPORT void pedersen___buffer_to_field(uint8_t const* data, fr::out_buf r); } \ No newline at end of file From d74eb760dad85ba15b676c1b2e68133868630b7c Mon Sep 17 00:00:00 2001 From: kevaundray Date: Wed, 25 Oct 2023 18:22:28 +0000 Subject: [PATCH 02/17] regenerate bindings --- barretenberg/exports.json | 16 ---------------- barretenberg/ts/src/barretenberg_api/index.ts | 5 ----- 2 files changed, 21 deletions(-) diff --git a/barretenberg/exports.json b/barretenberg/exports.json index 876db3300ae..9a7588fc331 100644 --- a/barretenberg/exports.json +++ b/barretenberg/exports.json @@ -77,22 +77,6 @@ ], "isAsync": false }, - { - "functionName": "pedersen___buffer_to_field", - "inArgs": [ - { - "name": "data", - "type": "const uint8_t *" - } - ], - "outArgs": [ - { - "name": "r", - "type": "fr::out_buf" - } - ], - "isAsync": false - }, { "functionName": "pedersen_hash_init", "inArgs": [], diff --git a/barretenberg/ts/src/barretenberg_api/index.ts b/barretenberg/ts/src/barretenberg_api/index.ts index 3f4f3d7c695..23c0aad4682 100644 --- a/barretenberg/ts/src/barretenberg_api/index.ts +++ b/barretenberg/ts/src/barretenberg_api/index.ts @@ -46,11 +46,6 @@ export class BarretenbergApi { return result[0]; } - async pedersenBufferToField(data: Uint8Array): Promise { - const result = await this.binder.callWasmExport('pedersen___buffer_to_field', [data], [Fr]); - return result[0]; - } - async pedersenHashInit(): Promise { const result = await this.binder.callWasmExport('pedersen_hash_init', [], []); return; From 7586a6db2c9ace6a8b352a5c79f4194fd3e751da Mon Sep 17 00:00:00 2001 From: kevaundray Date: Wed, 25 Oct 2023 18:22:41 +0000 Subject: [PATCH 03/17] remove test --- barretenberg/ts/src/barretenberg_api/pedersen.test.ts | 7 ------- 1 file changed, 7 deletions(-) diff --git a/barretenberg/ts/src/barretenberg_api/pedersen.test.ts b/barretenberg/ts/src/barretenberg_api/pedersen.test.ts index c2bf4bb20c8..73529600bb9 100644 --- a/barretenberg/ts/src/barretenberg_api/pedersen.test.ts +++ b/barretenberg/ts/src/barretenberg_api/pedersen.test.ts @@ -33,13 +33,6 @@ describe('pedersen', () => { expect(result).toEqual(new Fr(18374309251862457296563484909553154519357910650678202211610516068880120638872n)); }); - it('pedersenBufferToField', async () => { - const result = await api.pedersenBufferToField( - Buffer.from('Hello world! I am a buffer to be converted to a field!'), - ); - expect(result).toEqual(new Fr(5836632387256708040349959803326023895450290698906238002955147410646852307074n)); - }); - it('pedersenHashPair', async () => { const result = await api.pedersenHashPair(new Fr(4n), new Fr(8n)); expect(result).toEqual(new Fr(1521373897829389584529155077412196627698249315427143054350987371861781120260n)); From 53c65cda3ec35f8a44cc18856273ea61b62553e1 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Wed, 25 Oct 2023 18:32:58 +0000 Subject: [PATCH 04/17] use compressWIthHashIndex for redundant methods --- .../barretenberg/crypto/pedersen/pedersen.ts | 24 +++---------------- 1 file changed, 3 insertions(+), 21 deletions(-) diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts b/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts index 69362c4f878..9a2843a9196 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts @@ -14,15 +14,7 @@ import { deserializeArrayFromVector, deserializeField, serializeBufferArrayToVec * purposes. */ export function pedersenCompress(wasm: IWasmModule, lhs: Uint8Array, rhs: Uint8Array): Buffer { - // If not done already, precompute constants. - wasm.call('pedersen__init'); - if (lhs.length !== 32 || rhs.length !== 32) { - throw new Error(`Pedersen lhs and rhs inputs must be 32 bytes (got ${lhs.length} and ${rhs.length} respectively)`); - } - wasm.writeMemory(0, lhs); - wasm.writeMemory(32, rhs); - wasm.call('pedersen__hash_pair', 0, 32, 64); - return Buffer.from(wasm.getMemorySlice(64, 96)); + return pedersenCompressWithHashIndex(wasm, [Buffer.from(lhs), Buffer.from(rhs)], 0); } /** @@ -35,12 +27,7 @@ export function pedersenCompress(wasm: IWasmModule, lhs: Uint8Array, rhs: Uint8A * purposes. */ export function pedersenHashInputs(wasm: IWasmModule, inputs: Buffer[]): Buffer { - // If not done already, precompute constants. - wasm.call('pedersen__init'); - const inputVectors = serializeBufferArrayToVector(inputs); - wasm.writeMemory(0, inputVectors); - wasm.call('pedersen__hash_multiple', 0, 0); - return Buffer.from(wasm.getMemorySlice(0, 32)); + return pedersenCompressWithHashIndex(wasm, inputs, 0); } /** @@ -52,12 +39,7 @@ export function pedersenHashInputs(wasm: IWasmModule, inputs: Buffer[]): Buffer * purposes. */ export function pedersenCompressInputs(wasm: IWasmModule, inputs: Buffer[]): Buffer { - // If not done already, precompute constants. - wasm.call('pedersen__init'); - const inputVectors = serializeBufferArrayToVector(inputs); - wasm.writeMemory(0, inputVectors); - wasm.call('pedersen__compress', 0, 0); - return Buffer.from(wasm.getMemorySlice(0, 32)); + return pedersenCompressWithHashIndex(wasm, inputs, 0); } /** From 4954a42f3fcbee9174bd7af38a203d183e2f4453 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Wed, 25 Oct 2023 18:44:29 +0000 Subject: [PATCH 05/17] remove pedersenGetHash --- .../barretenberg/crypto/pedersen/pedersen.ts | 18 ------------------ yarn-project/merkle-tree/src/pedersen.ts | 9 --------- yarn-project/types/src/interfaces/hasher.ts | 7 ------- 3 files changed, 34 deletions(-) diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts b/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts index 9a2843a9196..d0c388af82b 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts @@ -60,24 +60,6 @@ export function pedersenCompressWithHashIndex(wasm: IWasmModule, inputs: Buffer[ return Buffer.from(wasm.getMemorySlice(0, 32)); } -/** - * Get a 32-byte pedersen hash from a buffer. - * @param wasm - The barretenberg module. - * @param data - The data buffer. - * @returns The hash buffer. - * @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific - * purposes. - */ -export function pedersenGetHash(wasm: IWasmModule, data: Buffer): Buffer { - // If not done already, precompute constants. - wasm.call('pedersen__init'); - const mem = wasm.call('bbmalloc', data.length); - wasm.writeMemory(mem, data); - wasm.call('pedersen__buffer_to_field', mem, data.length, 0); - wasm.call('bbfree', mem); - return Buffer.from(wasm.getMemorySlice(0, 32)); -} - /** * Given a buffer containing 32 byte pedersen leaves, return a new buffer containing the leaves and all pairs of nodes * that define a merkle tree. diff --git a/yarn-project/merkle-tree/src/pedersen.ts b/yarn-project/merkle-tree/src/pedersen.ts index 7c2c1caecd2..a3163403b32 100644 --- a/yarn-project/merkle-tree/src/pedersen.ts +++ b/yarn-project/merkle-tree/src/pedersen.ts @@ -1,6 +1,5 @@ import { pedersenCompress, - pedersenGetHash, pedersenGetHashTree, pedersenHashInputs, } from '@aztec/circuits.js/barretenberg'; @@ -31,14 +30,6 @@ export class Pedersen implements Hasher { return pedersenHashInputs(this.wasm, inputs); } - /* - * @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific - * purposes. - */ - public hashToField(data: Uint8Array): Buffer { - return pedersenGetHash(this.wasm, Buffer.from(data)); - } - /* * @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific * purposes. diff --git a/yarn-project/types/src/interfaces/hasher.ts b/yarn-project/types/src/interfaces/hasher.ts index aea50dc0ae1..e5e7178972d 100644 --- a/yarn-project/types/src/interfaces/hasher.ts +++ b/yarn-project/types/src/interfaces/hasher.ts @@ -17,13 +17,6 @@ export interface Hasher { */ compressInputs(inputs: Buffer[]): Buffer; - /** - * Get a 32-byte hash from a buffer. - * @param data - The data buffer. - * @returns The resulting hash buffer. - */ - hashToField(data: Uint8Array): Buffer; - /** * Given a buffer containing 32 byte leaves, return a new buffer containing the leaves and all pairs of * nodes that define a merkle tree. From 3c168245350f7231557fe77c46fbd1d2ab153cff Mon Sep 17 00:00:00 2001 From: kevaundray Date: Wed, 25 Oct 2023 18:45:49 +0000 Subject: [PATCH 06/17] lint --- .../src/barretenberg/crypto/pedersen/pedersen.ts | 2 +- yarn-project/merkle-tree/src/pedersen.ts | 6 +----- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts b/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts index d0c388af82b..786976580f5 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts @@ -27,7 +27,7 @@ export function pedersenCompress(wasm: IWasmModule, lhs: Uint8Array, rhs: Uint8A * purposes. */ export function pedersenHashInputs(wasm: IWasmModule, inputs: Buffer[]): Buffer { - return pedersenCompressWithHashIndex(wasm, inputs, 0); + return pedersenCompressWithHashIndex(wasm, inputs, 0); } /** diff --git a/yarn-project/merkle-tree/src/pedersen.ts b/yarn-project/merkle-tree/src/pedersen.ts index a3163403b32..a4c0b57bb9a 100644 --- a/yarn-project/merkle-tree/src/pedersen.ts +++ b/yarn-project/merkle-tree/src/pedersen.ts @@ -1,8 +1,4 @@ -import { - pedersenCompress, - pedersenGetHashTree, - pedersenHashInputs, -} from '@aztec/circuits.js/barretenberg'; +import { pedersenCompress, pedersenGetHashTree, pedersenHashInputs } from '@aztec/circuits.js/barretenberg'; import { IWasmModule } from '@aztec/foundation/wasm'; import { Hasher } from '@aztec/types'; From c3a54d0e1b0850054aacc657d6ea9a5be2a39c9a Mon Sep 17 00:00:00 2001 From: kevaundray Date: Wed, 25 Oct 2023 19:15:39 +0000 Subject: [PATCH 07/17] add test to show that some methods are all the same --- .../ts/src/barretenberg_api/pedersen.test.ts | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/barretenberg/ts/src/barretenberg_api/pedersen.test.ts b/barretenberg/ts/src/barretenberg_api/pedersen.test.ts index 73529600bb9..fab866c452d 100644 --- a/barretenberg/ts/src/barretenberg_api/pedersen.test.ts +++ b/barretenberg/ts/src/barretenberg_api/pedersen.test.ts @@ -28,6 +28,34 @@ describe('pedersen', () => { expect(result).toEqual(new Fr(2152386650411553803409271316104075950536496387580531018130718456431861859990n)); }); + it('pedersenCompressAndHashSame', async () => { + const resultCompress = await api.pedersenCompressWithHashIndex([new Fr(4n), new Fr(8n)], 7); + const resultHash = await api.pedersenHashWithHashIndex([new Fr(4n), new Fr(8n)], 7); + expect(resultCompress).toEqual(resultHash); + }); + + it('pedersenHashWith0IndexSameAsNoIndex', async () => { + const resultHashImplicit0 = await api.pedersenHash([new Fr(4n), new Fr(8n)]); + const resultCompressImplicit0 = await api.pedersenCompress([new Fr(4n), new Fr(8n)]); + const resultCompressFieldsImplicit0 = await api.pedersenCompressFields(new Fr(4n), new Fr(8n)); + const resultHashExplicit0 = await api.pedersenHashWithHashIndex([new Fr(4n), new Fr(8n)], 0); + expect(resultHashImplicit0).toEqual(resultCompressImplicit0); + expect(resultHashImplicit0).toEqual(resultHashExplicit0); + expect(resultHashImplicit0).toEqual(resultCompressFieldsImplicit0); + }); + + it('pedersenHashPairSameAsWith0Index', async () => { + const resultHashPair = await api.pedersenHashPair(new Fr(4n), new Fr(8n)); + const resultHashExplicit0 = await api.pedersenHashWithHashIndex([new Fr(4n), new Fr(8n)], 0); + expect(resultHashExplicit0).toEqual(resultHashPair); + }); + + it('pedersenHashMultipleSameAsWith0Index', async () => { + const resultHashPair = await api.pedersenHashMultiple([new Fr(4n), new Fr(8n)]); + const resultHashExplicit0 = await api.pedersenHashWithHashIndex([new Fr(4n), new Fr(8n)], 0); + expect(resultHashExplicit0).toEqual(resultHashPair); + }); + it('pedersenCommit', async () => { const result = await api.pedersenCommit([new Fr(4n), new Fr(8n), new Fr(12n)]); expect(result).toEqual(new Fr(18374309251862457296563484909553154519357910650678202211610516068880120638872n)); From c1ebc734a33b1bee57bd560ffa066a7098b2e3b8 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Wed, 25 Oct 2023 19:25:27 +0000 Subject: [PATCH 08/17] rename compress -> hash --- .../barretenberg/crypto/pedersen/pedersen.ts | 28 ++++++------------- yarn-project/merkle-tree/src/pedersen.ts | 8 +++--- yarn-project/types/src/interfaces/hasher.ts | 16 +++++------ 3 files changed, 20 insertions(+), 32 deletions(-) diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts b/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts index 786976580f5..75dfe0ba512 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts @@ -5,7 +5,7 @@ import { Buffer } from 'buffer'; import { deserializeArrayFromVector, deserializeField, serializeBufferArrayToVector } from '../../serialize.js'; /** - * Compresses two 32-byte hashes. + * Hashes two 32-byte hashes. * @param wasm - The barretenberg module. * @param lhs - The first hash. * @param rhs - The second hash. @@ -13,8 +13,8 @@ import { deserializeArrayFromVector, deserializeField, serializeBufferArrayToVec * @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific * purposes. */ -export function pedersenCompress(wasm: IWasmModule, lhs: Uint8Array, rhs: Uint8Array): Buffer { - return pedersenCompressWithHashIndex(wasm, [Buffer.from(lhs), Buffer.from(rhs)], 0); +export function pedersenHash(wasm: IWasmModule, lhs: Uint8Array, rhs: Uint8Array): Buffer { + return pedersenHashWithHashIndex(wasm, [Buffer.from(lhs), Buffer.from(rhs)], 0); } /** @@ -27,31 +27,19 @@ export function pedersenCompress(wasm: IWasmModule, lhs: Uint8Array, rhs: Uint8A * purposes. */ export function pedersenHashInputs(wasm: IWasmModule, inputs: Buffer[]): Buffer { - return pedersenCompressWithHashIndex(wasm, inputs, 0); + return pedersenHashWithHashIndex(wasm, inputs, 0); } /** - * Compresses an array of buffers. + * Hashes an array of buffers. * @param wasm - The barretenberg module. - * @param inputs - The array of buffers to compress. - * @returns The resulting 32-byte hash. - * @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific - * purposes. - */ -export function pedersenCompressInputs(wasm: IWasmModule, inputs: Buffer[]): Buffer { - return pedersenCompressWithHashIndex(wasm, inputs, 0); -} - -/** - * Compresses an array of buffers. - * @param wasm - The barretenberg module. - * @param inputs - The array of buffers to compress. + * @param inputs - The array of buffers to hash. * @param hashIndex - Hash index of the generator to use (See GeneratorIndex enum). * @returns The resulting 32-byte hash. * @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific * purposes. */ -export function pedersenCompressWithHashIndex(wasm: IWasmModule, inputs: Buffer[], hashIndex: number): Buffer { +export function pedersenHashWithHashIndex(wasm: IWasmModule, inputs: Buffer[], hashIndex: number): Buffer { // If not done already, precompute constants. wasm.call('pedersen__init'); const inputVectors = serializeBufferArrayToVector(inputs); @@ -66,7 +54,7 @@ export function pedersenCompressWithHashIndex(wasm: IWasmModule, inputs: Buffer[ * * E.g. * Input: [1][2][3][4] - * Output: [1][2][3][4][compress(1,2)][compress(3,4)][compress(5,6)]. + * Output: [1][2][3][4][hash(1,2)][hash(3,4)][hash(5,6)]. * * @param wasm - The barretenberg module. * @param values - The 32 byte pedersen leaves. diff --git a/yarn-project/merkle-tree/src/pedersen.ts b/yarn-project/merkle-tree/src/pedersen.ts index a4c0b57bb9a..fa041d8bdb3 100644 --- a/yarn-project/merkle-tree/src/pedersen.ts +++ b/yarn-project/merkle-tree/src/pedersen.ts @@ -1,4 +1,4 @@ -import { pedersenCompress, pedersenGetHashTree, pedersenHashInputs } from '@aztec/circuits.js/barretenberg'; +import { pedersenHash, pedersenGetHashTree, pedersenHashInputs } from '@aztec/circuits.js/barretenberg'; import { IWasmModule } from '@aztec/foundation/wasm'; import { Hasher } from '@aztec/types'; @@ -14,15 +14,15 @@ export class Pedersen implements Hasher { * @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific * purposes. */ - public compress(lhs: Uint8Array, rhs: Uint8Array): Buffer { - return pedersenCompress(this.wasm, lhs, rhs); + public hash(lhs: Uint8Array, rhs: Uint8Array): Buffer { + return pedersenHash(this.wasm, lhs, rhs); } /* * @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific * purposes. */ - public compressInputs(inputs: Buffer[]): Buffer { + public hashInputs(inputs: Buffer[]): Buffer { return pedersenHashInputs(this.wasm, inputs); } diff --git a/yarn-project/types/src/interfaces/hasher.ts b/yarn-project/types/src/interfaces/hasher.ts index e5e7178972d..ee9877fbbb2 100644 --- a/yarn-project/types/src/interfaces/hasher.ts +++ b/yarn-project/types/src/interfaces/hasher.ts @@ -3,19 +3,19 @@ */ export interface Hasher { /** - * Compresses two 32-byte hashes. - * @param lhs - The first hash. - * @param rhs - The second hash. + * Hash two 32-byte hashes. + * @param lhs - The first 32-byte array. + * @param rhs - The second 32-byte array. * @returns The new 32-byte hash. */ - compress(lhs: Uint8Array, rhs: Uint8Array): Buffer; + hash(lhs: Uint8Array, rhs: Uint8Array): Buffer; /** - * Compresses an array of buffers. - * @param inputs - The array of buffers to compress. + * Hashes an array of buffers. + * @param inputs - The array of buffers to hash. * @returns The resulting 32-byte hash. */ - compressInputs(inputs: Buffer[]): Buffer; + hashInputs(inputs: Buffer[]): Buffer; /** * Given a buffer containing 32 byte leaves, return a new buffer containing the leaves and all pairs of @@ -23,7 +23,7 @@ export interface Hasher { * * E.g. * Input: [1][2][3][4] - * Output: [1][2][3][4][compress(1,2)][compress(3,4)][compress(5,6)]. + * Output: [1][2][3][4][hash(1,2)][hash(3,4)][hash(5,6)]. * * @param leaves - The 32 byte leaves. * @returns A tree represented by an array. From 18a8e74186fb253fb8d53c4109ecdc452b69d569 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Wed, 25 Oct 2023 19:26:01 +0000 Subject: [PATCH 09/17] rename codebase to use hash --- .../src/client/private_execution.test.ts | 6 +- .../acir-simulator/src/public/index.test.ts | 10 +- yarn-project/acir-simulator/src/utils.ts | 4 +- .../account/defaults/entrypoint_payload.ts | 4 +- yarn-project/aztec.js/src/utils/authwit.ts | 4 +- .../aztec.js/src/utils/cheat_codes.ts | 4 +- .../end-to-end/src/e2e_block_building.test.ts | 4 +- .../src/simulators/lending_simulator.ts | 4 +- .../src/sparse_tree/sparse_tree.test.ts | 30 ++--- .../standard_indexed_tree.ts | 2 +- .../test/standard_indexed_tree.test.ts | 120 +++++++++--------- .../src/standard_tree/standard_tree.test.ts | 18 +-- .../src/test/standard_based_test_suite.ts | 32 ++--- .../src/test/utils/pedersen_with_counter.ts | 20 +-- yarn-project/merkle-tree/src/tree_base.ts | 6 +- .../src/note_processor/note_processor.test.ts | 4 +- yarn-project/types/src/sibling_path.ts | 2 +- 17 files changed, 137 insertions(+), 137 deletions(-) diff --git a/yarn-project/acir-simulator/src/client/private_execution.test.ts b/yarn-project/acir-simulator/src/client/private_execution.test.ts index cb2016075d5..df61888a4e6 100644 --- a/yarn-project/acir-simulator/src/client/private_execution.test.ts +++ b/yarn-project/acir-simulator/src/client/private_execution.test.ts @@ -22,7 +22,7 @@ import { computeVarArgsHash, siloCommitment, } from '@aztec/circuits.js/abis'; -import { pedersenCompressInputs } from '@aztec/circuits.js/barretenberg'; +import { pedersenHashInputs } from '@aztec/circuits.js/barretenberg'; import { makeContractDeploymentData } from '@aztec/circuits.js/factories'; import { FunctionArtifact, FunctionSelector, encodeArguments } from '@aztec/foundation/abi'; import { asyncMap } from '@aztec/foundation/async-map'; @@ -145,10 +145,10 @@ describe('Private Execution test suite', () => { return trees[name]; }; - const hash = (data: Buffer[]) => pedersenCompressInputs(circuitsWasm, data); + const hash = (data: Buffer[]) => pedersenHashInputs(circuitsWasm, data); const hashFields = (data: Fr[]) => Fr.fromBuffer( - pedersenCompressInputs( + pedersenHashInputs( circuitsWasm, data.map(f => f.toBuffer()), ), diff --git a/yarn-project/acir-simulator/src/public/index.test.ts b/yarn-project/acir-simulator/src/public/index.test.ts index 8976e59062d..093c823f6cc 100644 --- a/yarn-project/acir-simulator/src/public/index.test.ts +++ b/yarn-project/acir-simulator/src/public/index.test.ts @@ -6,7 +6,7 @@ import { HistoricBlockData, L1_TO_L2_MSG_TREE_HEIGHT, } from '@aztec/circuits.js'; -import { pedersenCompressInputs } from '@aztec/circuits.js/barretenberg'; +import { pedersenHashInputs } from '@aztec/circuits.js/barretenberg'; import { FunctionArtifact, FunctionSelector, encodeArguments } from '@aztec/foundation/abi'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { EthAddress } from '@aztec/foundation/eth-address'; @@ -335,9 +335,9 @@ describe('ACIR public execution simulator', () => { // Assert the commitment was created expect(result.newCommitments.length).toEqual(1); - const expectedNoteHash = pedersenCompressInputs(wasm, [amount.toBuffer(), secretHash.toBuffer()]); + const expectedNoteHash = pedersenHashInputs(wasm, [amount.toBuffer(), secretHash.toBuffer()]); const storageSlot = new Fr(5); // for pending_shields - const expectedInnerNoteHash = pedersenCompressInputs(wasm, [storageSlot.toBuffer(), expectedNoteHash]); + const expectedInnerNoteHash = pedersenHashInputs(wasm, [storageSlot.toBuffer(), expectedNoteHash]); expect(result.newCommitments[0].toBuffer()).toEqual(expectedInnerNoteHash); }); @@ -365,7 +365,7 @@ describe('ACIR public execution simulator', () => { // Assert the l2 to l1 message was created expect(result.newL2ToL1Messages.length).toEqual(1); - const expectedNewMessageValue = pedersenCompressInputs( + const expectedNewMessageValue = pedersenHashInputs( wasm, params.map(a => a.toBuffer()), ); @@ -452,7 +452,7 @@ describe('ACIR public execution simulator', () => { // Assert the l2 to l1 message was created expect(result.newNullifiers.length).toEqual(1); - const expectedNewMessageValue = pedersenCompressInputs( + const expectedNewMessageValue = pedersenHashInputs( wasm, params.map(a => a.toBuffer()), ); diff --git a/yarn-project/acir-simulator/src/utils.ts b/yarn-project/acir-simulator/src/utils.ts index 5711558d00c..45daead148b 100644 --- a/yarn-project/acir-simulator/src/utils.ts +++ b/yarn-project/acir-simulator/src/utils.ts @@ -1,5 +1,5 @@ import { CircuitsWasm, GrumpkinPrivateKey } from '@aztec/circuits.js'; -import { Grumpkin, pedersenCompressInputs } from '@aztec/circuits.js/barretenberg'; +import { Grumpkin, pedersenHashInputs } from '@aztec/circuits.js/barretenberg'; import { Fr } from '@aztec/foundation/fields'; /** @@ -24,7 +24,7 @@ export function computeSlotForMapping(mappingSlot: Fr, owner: NoirPoint | Fr, bb const ownerField = isFr(owner) ? owner : new Fr(owner.x); return Fr.fromBuffer( - pedersenCompressInputs( + pedersenHashInputs( bbWasm, [mappingSlot, ownerField].map(f => f.toBuffer()), ), diff --git a/yarn-project/aztec.js/src/account/defaults/entrypoint_payload.ts b/yarn-project/aztec.js/src/account/defaults/entrypoint_payload.ts index 7c8c1417cc8..1dca06c2ffd 100644 --- a/yarn-project/aztec.js/src/account/defaults/entrypoint_payload.ts +++ b/yarn-project/aztec.js/src/account/defaults/entrypoint_payload.ts @@ -1,5 +1,5 @@ import { CircuitsWasm, Fr, GeneratorIndex } from '@aztec/circuits.js'; -import { pedersenCompressWithHashIndex } from '@aztec/circuits.js/barretenberg'; +import { pedersenHashWithHashIndex } from '@aztec/circuits.js/barretenberg'; import { padArrayEnd } from '@aztec/foundation/collection'; import { FunctionCall, PackedArguments, emptyFunctionCall } from '@aztec/types'; @@ -69,7 +69,7 @@ export async function buildPayload(calls: FunctionCall[]): Promise<{ /** Compresses an entrypoint payload to a 32-byte buffer (useful for signing) */ export async function hashPayload(payload: EntrypointPayload) { - return pedersenCompressWithHashIndex( + return pedersenHashWithHashIndex( await CircuitsWasm.get(), flattenPayload(payload).map(fr => fr.toBuffer()), GeneratorIndex.SIGNATURE_PAYLOAD, diff --git a/yarn-project/aztec.js/src/utils/authwit.ts b/yarn-project/aztec.js/src/utils/authwit.ts index ca04db12928..74f04862c4c 100644 --- a/yarn-project/aztec.js/src/utils/authwit.ts +++ b/yarn-project/aztec.js/src/utils/authwit.ts @@ -1,5 +1,5 @@ import { AztecAddress, CircuitsWasm, GeneratorIndex } from '@aztec/circuits.js'; -import { pedersenCompressWithHashIndex } from '@aztec/circuits.js/barretenberg'; +import { pedersenHashWithHashIndex } from '@aztec/circuits.js/barretenberg'; import { FunctionCall, PackedArguments } from '@aztec/types'; // docs:start:authwit_computeAuthWitMessageHash @@ -12,7 +12,7 @@ import { FunctionCall, PackedArguments } from '@aztec/types'; */ export const computeAuthWitMessageHash = async (caller: AztecAddress, request: FunctionCall) => { const wasm = await CircuitsWasm.get(); - return pedersenCompressWithHashIndex( + return pedersenHashWithHashIndex( wasm, [ caller.toField(), diff --git a/yarn-project/aztec.js/src/utils/cheat_codes.ts b/yarn-project/aztec.js/src/utils/cheat_codes.ts index 7b1b825cd1d..8abc6ba5ee9 100644 --- a/yarn-project/aztec.js/src/utils/cheat_codes.ts +++ b/yarn-project/aztec.js/src/utils/cheat_codes.ts @@ -1,5 +1,5 @@ import { AztecAddress, CircuitsWasm, EthAddress, Fr } from '@aztec/circuits.js'; -import { pedersenCompressInputs } from '@aztec/circuits.js/barretenberg'; +import { pedersenHashInputs } from '@aztec/circuits.js/barretenberg'; import { toBigIntBE, toHex } from '@aztec/foundation/bigint-buffer'; import { keccak } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; @@ -236,7 +236,7 @@ export class AztecCheatCodes { // Based on `at` function in // aztec3-packages/yarn-project/aztec-nr/aztec/src/state_vars/map.nr return Fr.fromBuffer( - pedersenCompressInputs( + pedersenHashInputs( this.wasm, [new Fr(baseSlot), new Fr(key)].map(f => f.toBuffer()), ), diff --git a/yarn-project/end-to-end/src/e2e_block_building.test.ts b/yarn-project/end-to-end/src/e2e_block_building.test.ts index 5b680a65058..cf74d2c5b1e 100644 --- a/yarn-project/end-to-end/src/e2e_block_building.test.ts +++ b/yarn-project/end-to-end/src/e2e_block_building.test.ts @@ -7,7 +7,7 @@ import { isContractDeployed, } from '@aztec/aztec.js'; import { CircuitsWasm } from '@aztec/circuits.js'; -import { pedersenCompressInputs } from '@aztec/circuits.js/barretenberg'; +import { pedersenHashInputs } from '@aztec/circuits.js/barretenberg'; import { DebugLogger } from '@aztec/foundation/log'; import { TestContractArtifact } from '@aztec/noir-contracts/artifacts'; import { TestContract, TokenContract } from '@aztec/noir-contracts/types'; @@ -135,7 +135,7 @@ describe('e2e_block_building', () => { it('drops tx with private nullifier already emitted from public on the same block', async () => { const secret = Fr.random(); // See yarn-project/acir-simulator/src/public/index.test.ts 'Should be able to create a nullifier from the public context' - const emittedPublicNullifier = pedersenCompressInputs( + const emittedPublicNullifier = pedersenHashInputs( await CircuitsWasm.get(), [new Fr(140), secret].map(a => a.toBuffer()), ); diff --git a/yarn-project/end-to-end/src/simulators/lending_simulator.ts b/yarn-project/end-to-end/src/simulators/lending_simulator.ts index 994a7875623..17cfa0ddc03 100644 --- a/yarn-project/end-to-end/src/simulators/lending_simulator.ts +++ b/yarn-project/end-to-end/src/simulators/lending_simulator.ts @@ -1,7 +1,7 @@ // Convenience struct to hold an account's address and secret that can easily be passed around. import { CheatCodes } from '@aztec/aztec.js'; import { AztecAddress, CircuitsWasm, Fr } from '@aztec/circuits.js'; -import { pedersenCompressInputs } from '@aztec/circuits.js/barretenberg'; +import { pedersenHashInputs } from '@aztec/circuits.js/barretenberg'; import { LendingContract } from '@aztec/noir-contracts/types'; import { TokenSimulator } from './token_simulator.js'; @@ -26,7 +26,7 @@ export class LendingAccount { */ public async key(): Promise { return Fr.fromBuffer( - pedersenCompressInputs( + pedersenHashInputs( await CircuitsWasm.get(), [this.address, this.secret].map(f => f.toBuffer()), ), diff --git a/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.test.ts b/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.test.ts index d4b040b8a3e..d5ed72d5e58 100644 --- a/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.test.ts +++ b/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.test.ts @@ -95,11 +95,11 @@ describe('SparseTreeSpecific', () => { const db = levelup(createMemDown()); const tree = await createDb(db, pedersen, 'test', 3); - const level2ZeroHash = pedersen.compress(INITIAL_LEAF, INITIAL_LEAF); - const level1ZeroHash = pedersen.compress(level2ZeroHash, level2ZeroHash); + const level2ZeroHash = pedersen.hash(INITIAL_LEAF, INITIAL_LEAF); + const level1ZeroHash = pedersen.hash(level2ZeroHash, level2ZeroHash); expect(tree.getNumLeaves(false)).toEqual(0n); - expect(tree.getRoot(false)).toEqual(pedersen.compress(level1ZeroHash, level1ZeroHash)); + expect(tree.getRoot(false)).toEqual(pedersen.hash(level1ZeroHash, level1ZeroHash)); // Insert leaf at index 3 let level1LeftHash: Buffer; @@ -107,9 +107,9 @@ describe('SparseTreeSpecific', () => { { await tree.updateLeaf(leafAtIndex3, 3n); expect(tree.getNumLeaves(true)).toEqual(1n); - const level2Hash = pedersen.compress(INITIAL_LEAF, leafAtIndex3); - level1LeftHash = pedersen.compress(level2ZeroHash, level2Hash); - const root = pedersen.compress(level1LeftHash, level1ZeroHash); + const level2Hash = pedersen.hash(INITIAL_LEAF, leafAtIndex3); + level1LeftHash = pedersen.hash(level2ZeroHash, level2Hash); + const root = pedersen.hash(level1LeftHash, level1ZeroHash); expect(tree.getRoot(true)).toEqual(root); expect(await tree.getSiblingPath(3n, true)).toEqual( new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level2ZeroHash, level1ZeroHash]), @@ -122,9 +122,9 @@ describe('SparseTreeSpecific', () => { const leafAtIndex6 = randomBytes(32); await tree.updateLeaf(leafAtIndex6, 6n); expect(tree.getNumLeaves(true)).toEqual(2n); - const level2Hash = pedersen.compress(leafAtIndex6, INITIAL_LEAF); - level1RightHash = pedersen.compress(level2ZeroHash, level2Hash); - const root = pedersen.compress(level1LeftHash, level1RightHash); + const level2Hash = pedersen.hash(leafAtIndex6, INITIAL_LEAF); + level1RightHash = pedersen.hash(level2ZeroHash, level2Hash); + const root = pedersen.hash(level1LeftHash, level1RightHash); expect(tree.getRoot(true)).toEqual(root); expect(await tree.getSiblingPath(6n, true)).toEqual( new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level2ZeroHash, level1LeftHash]), @@ -136,9 +136,9 @@ describe('SparseTreeSpecific', () => { { await tree.updateLeaf(leafAtIndex2, 2n); expect(tree.getNumLeaves(true)).toEqual(3n); - const level2Hash = pedersen.compress(leafAtIndex2, leafAtIndex3); - level1LeftHash = pedersen.compress(level2ZeroHash, level2Hash); - const root = pedersen.compress(level1LeftHash, level1RightHash); + const level2Hash = pedersen.hash(leafAtIndex2, leafAtIndex3); + level1LeftHash = pedersen.hash(level2ZeroHash, level2Hash); + const root = pedersen.hash(level1LeftHash, level1RightHash); expect(tree.getRoot(true)).toEqual(root); expect(await tree.getSiblingPath(2n, true)).toEqual( new SiblingPath(TEST_TREE_DEPTH, [leafAtIndex3, level2ZeroHash, level1RightHash]), @@ -150,9 +150,9 @@ describe('SparseTreeSpecific', () => { const updatedLeafAtIndex3 = randomBytes(32); await tree.updateLeaf(updatedLeafAtIndex3, 3n); expect(tree.getNumLeaves(true)).toEqual(3n); - const level2Hash = pedersen.compress(leafAtIndex2, updatedLeafAtIndex3); - level1LeftHash = pedersen.compress(level2ZeroHash, level2Hash); - const root = pedersen.compress(level1LeftHash, level1RightHash); + const level2Hash = pedersen.hash(leafAtIndex2, updatedLeafAtIndex3); + level1LeftHash = pedersen.hash(level2ZeroHash, level2Hash); + const root = pedersen.hash(level1LeftHash, level1RightHash); expect(tree.getRoot(true)).toEqual(root); expect(await tree.getSiblingPath(3n, true)).toEqual( new SiblingPath(TEST_TREE_DEPTH, [leafAtIndex2, level2ZeroHash, level1RightHash]), diff --git a/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts b/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts index 93d88e71bef..417ae63be6a 100644 --- a/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts +++ b/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts @@ -573,7 +573,7 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { if (!hash0Leaf && leaf.value == 0n) { encodedLeaf = toBufferBE(0n, 32); } else { - encodedLeaf = this.hasher.compressInputs( + encodedLeaf = this.hasher.hashInputs( [leaf.value, leaf.nextIndex, leaf.nextValue].map(val => toBufferBE(val, 32)), ); } diff --git a/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree.test.ts b/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree.test.ts index cb3f861dde2..d2f2c187609 100644 --- a/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree.test.ts +++ b/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree.test.ts @@ -61,19 +61,19 @@ describe('StandardIndexedTreeSpecific', () => { * nextVal 0 0 0 0 0 0 0 0. */ - const initialLeafHash = pedersen.compressInputs(createIndexedTreeLeaf(0, 0, 0)); - const level1ZeroHash = pedersen.compress(INITIAL_LEAF, INITIAL_LEAF); - const level2ZeroHash = pedersen.compress(level1ZeroHash, level1ZeroHash); + const initialLeafHash = pedersen.hashInputs(createIndexedTreeLeaf(0, 0, 0)); + const level1ZeroHash = pedersen.hash(INITIAL_LEAF, INITIAL_LEAF); + const level2ZeroHash = pedersen.hash(level1ZeroHash, level1ZeroHash); let index0Hash = initialLeafHash; // Each element is named by the level followed by the index on that level. E.g. e10 -> level 1, index 0, e21 -> level 2, index 1 - let e10 = pedersen.compress(index0Hash, INITIAL_LEAF); - let e20 = pedersen.compress(e10, level1ZeroHash); + let e10 = pedersen.hash(index0Hash, INITIAL_LEAF); + let e20 = pedersen.hash(e10, level1ZeroHash); const initialE20 = e20; // Kept for calculating committed state later const initialE10 = e10; - let root = pedersen.compress(e20, level2ZeroHash); + let root = pedersen.hash(e20, level2ZeroHash); const initialRoot = root; const emptySiblingPath = new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level1ZeroHash, level2ZeroHash]); @@ -95,11 +95,11 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 1 0 0 0 0 0 0 0 * nextVal 30 0 0 0 0 0 0 0. */ - index0Hash = pedersen.compressInputs(createIndexedTreeLeaf(0, 1, 30)); - let index1Hash = pedersen.compressInputs(createIndexedTreeLeaf(30, 0, 0)); - e10 = pedersen.compress(index0Hash, index1Hash); - e20 = pedersen.compress(e10, level1ZeroHash); - root = pedersen.compress(e20, level2ZeroHash); + index0Hash = pedersen.hashInputs(createIndexedTreeLeaf(0, 1, 30)); + let index1Hash = pedersen.hashInputs(createIndexedTreeLeaf(30, 0, 0)); + e10 = pedersen.hash(index0Hash, index1Hash); + e20 = pedersen.hash(e10, level1ZeroHash); + root = pedersen.hash(e20, level2ZeroHash); await tree.appendLeaves([toBufferBE(30n, 32)]); @@ -122,12 +122,12 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 2 0 1 0 0 0 0 0 * nextVal 10 0 30 0 0 0 0 0. */ - index0Hash = pedersen.compressInputs(createIndexedTreeLeaf(0, 2, 10)); - let index2Hash = pedersen.compressInputs(createIndexedTreeLeaf(10, 1, 30)); - e10 = pedersen.compress(index0Hash, index1Hash); - let e11 = pedersen.compress(index2Hash, INITIAL_LEAF); - e20 = pedersen.compress(e10, e11); - root = pedersen.compress(e20, level2ZeroHash); + index0Hash = pedersen.hashInputs(createIndexedTreeLeaf(0, 2, 10)); + let index2Hash = pedersen.hashInputs(createIndexedTreeLeaf(10, 1, 30)); + e10 = pedersen.hash(index0Hash, index1Hash); + let e11 = pedersen.hash(index2Hash, INITIAL_LEAF); + e20 = pedersen.hash(e10, e11); + root = pedersen.hash(e20, level2ZeroHash); await tree.appendLeaves([toBufferBE(10n, 32)]); @@ -154,12 +154,12 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 2 0 3 1 0 0 0 0 * nextVal 10 0 20 30 0 0 0 0. */ - e10 = pedersen.compress(index0Hash, index1Hash); - index2Hash = pedersen.compressInputs(createIndexedTreeLeaf(10, 3, 20)); - const index3Hash = pedersen.compressInputs(createIndexedTreeLeaf(20, 1, 30)); - e11 = pedersen.compress(index2Hash, index3Hash); - e20 = pedersen.compress(e10, e11); - root = pedersen.compress(e20, level2ZeroHash); + e10 = pedersen.hash(index0Hash, index1Hash); + index2Hash = pedersen.hashInputs(createIndexedTreeLeaf(10, 3, 20)); + const index3Hash = pedersen.hashInputs(createIndexedTreeLeaf(20, 1, 30)); + e11 = pedersen.hash(index2Hash, index3Hash); + e20 = pedersen.hash(e10, e11); + root = pedersen.hash(e20, level2ZeroHash); await tree.appendLeaves([toBufferBE(20n, 32)]); @@ -186,13 +186,13 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 2 4 3 1 0 0 0 0 * nextVal 10 50 20 30 0 0 0 0. */ - index1Hash = pedersen.compressInputs(createIndexedTreeLeaf(30, 4, 50)); - const index4Hash = pedersen.compressInputs(createIndexedTreeLeaf(50, 0, 0)); - e10 = pedersen.compress(index0Hash, index1Hash); - e20 = pedersen.compress(e10, e11); - const e12 = pedersen.compress(index4Hash, INITIAL_LEAF); - const e21 = pedersen.compress(e12, level1ZeroHash); - root = pedersen.compress(e20, e21); + index1Hash = pedersen.hashInputs(createIndexedTreeLeaf(30, 4, 50)); + const index4Hash = pedersen.hashInputs(createIndexedTreeLeaf(50, 0, 0)); + e10 = pedersen.hash(index0Hash, index1Hash); + e20 = pedersen.hash(e10, e11); + const e12 = pedersen.hash(index4Hash, INITIAL_LEAF); + const e21 = pedersen.hash(e12, level1ZeroHash); + root = pedersen.hash(e20, e21); await tree.appendLeaves([toBufferBE(50n, 32)]); @@ -259,18 +259,18 @@ describe('StandardIndexedTreeSpecific', () => { */ const INITIAL_LEAF = toBufferBE(0n, 32); - const initialLeafHash = pedersen.compressInputs(createIndexedTreeLeaf(0, 0, 0)); - const level1ZeroHash = pedersen.compress(INITIAL_LEAF, INITIAL_LEAF); - const level2ZeroHash = pedersen.compress(level1ZeroHash, level1ZeroHash); + const initialLeafHash = pedersen.hashInputs(createIndexedTreeLeaf(0, 0, 0)); + const level1ZeroHash = pedersen.hash(INITIAL_LEAF, INITIAL_LEAF); + const level2ZeroHash = pedersen.hash(level1ZeroHash, level1ZeroHash); let index0Hash = initialLeafHash; - let e10 = pedersen.compress(index0Hash, INITIAL_LEAF); - let e20 = pedersen.compress(e10, level1ZeroHash); + let e10 = pedersen.hash(index0Hash, INITIAL_LEAF); + let e20 = pedersen.hash(e10, level1ZeroHash); const inite10 = e10; const inite20 = e20; - let root = pedersen.compress(e20, level2ZeroHash); + let root = pedersen.hash(e20, level2ZeroHash); const initialRoot = root; const emptySiblingPath = new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level1ZeroHash, level2ZeroHash]); @@ -293,11 +293,11 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 1 0 0 0 0 0 0 0 * nextVal 30 0 0 0 0 0 0 0. */ - index0Hash = pedersen.compressInputs(createIndexedTreeLeaf(0, 1, 30)); - let index1Hash = pedersen.compressInputs(createIndexedTreeLeaf(30, 0, 0)); - e10 = pedersen.compress(index0Hash, index1Hash); - e20 = pedersen.compress(e10, level1ZeroHash); - root = pedersen.compress(e20, level2ZeroHash); + index0Hash = pedersen.hashInputs(createIndexedTreeLeaf(0, 1, 30)); + let index1Hash = pedersen.hashInputs(createIndexedTreeLeaf(30, 0, 0)); + e10 = pedersen.hash(index0Hash, index1Hash); + e20 = pedersen.hash(e10, level1ZeroHash); + root = pedersen.hash(e20, level2ZeroHash); await tree.appendLeaves([toBufferBE(30n, 32)]); @@ -319,12 +319,12 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 2 0 1 0 0 0 0 0 * nextVal 10 0 30 0 0 0 0 0. */ - index0Hash = pedersen.compressInputs(createIndexedTreeLeaf(0, 2, 10)); - let index2Hash = pedersen.compressInputs(createIndexedTreeLeaf(10, 1, 30)); - e10 = pedersen.compress(index0Hash, index1Hash); - let e11 = pedersen.compress(index2Hash, INITIAL_LEAF); - e20 = pedersen.compress(e10, e11); - root = pedersen.compress(e20, level2ZeroHash); + index0Hash = pedersen.hashInputs(createIndexedTreeLeaf(0, 2, 10)); + let index2Hash = pedersen.hashInputs(createIndexedTreeLeaf(10, 1, 30)); + e10 = pedersen.hash(index0Hash, index1Hash); + let e11 = pedersen.hash(index2Hash, INITIAL_LEAF); + e20 = pedersen.hash(e10, e11); + root = pedersen.hash(e20, level2ZeroHash); await tree.appendLeaves([toBufferBE(10n, 32)]); @@ -351,12 +351,12 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 2 0 3 1 0 0 0 0 * nextVal 10 0 20 30 0 0 0 0. */ - e10 = pedersen.compress(index0Hash, index1Hash); - index2Hash = pedersen.compressInputs(createIndexedTreeLeaf(10, 3, 20)); - const index3Hash = pedersen.compressInputs(createIndexedTreeLeaf(20, 1, 30)); - e11 = pedersen.compress(index2Hash, index3Hash); - e20 = pedersen.compress(e10, e11); - root = pedersen.compress(e20, level2ZeroHash); + e10 = pedersen.hash(index0Hash, index1Hash); + index2Hash = pedersen.hashInputs(createIndexedTreeLeaf(10, 3, 20)); + const index3Hash = pedersen.hashInputs(createIndexedTreeLeaf(20, 1, 30)); + e11 = pedersen.hash(index2Hash, index3Hash); + e20 = pedersen.hash(e10, e11); + root = pedersen.hash(e20, level2ZeroHash); await tree.appendLeaves([toBufferBE(20n, 32)]); @@ -391,13 +391,13 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 2 6 3 1 0 0 0 0 * nextVal 10 50 20 30 0 0 0 0. */ - index1Hash = pedersen.compressInputs(createIndexedTreeLeaf(30, 6, 50)); - const index6Hash = pedersen.compressInputs(createIndexedTreeLeaf(50, 0, 0)); - e10 = pedersen.compress(index0Hash, index1Hash); - e20 = pedersen.compress(e10, e11); - const e13 = pedersen.compress(index6Hash, INITIAL_LEAF); - const e21 = pedersen.compress(level1ZeroHash, e13); - root = pedersen.compress(e20, e21); + index1Hash = pedersen.hashInputs(createIndexedTreeLeaf(30, 6, 50)); + const index6Hash = pedersen.hashInputs(createIndexedTreeLeaf(50, 0, 0)); + e10 = pedersen.hash(index0Hash, index1Hash); + e20 = pedersen.hash(e10, e11); + const e13 = pedersen.hash(index6Hash, INITIAL_LEAF); + const e21 = pedersen.hash(level1ZeroHash, e13); + root = pedersen.hash(e20, e21); await tree.appendLeaves([toBufferBE(50n, 32)]); diff --git a/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts b/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts index 22b340f9973..6cae5035466 100644 --- a/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts +++ b/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts @@ -38,7 +38,7 @@ describe('StandardTree_batchAppend', () => { pedersen.resetCounter(); }); - it('correctly computes root when batch appending and calls compress function expected num times', async () => { + it('correctly computes root when batch appending and calls hash function expected num times', async () => { const db = levelup(createMemDown()); const tree = await createDb(db, pedersen, 'test', 3); const leaves = Array.from({ length: 5 }, _ => randomBytes(32)); @@ -58,18 +58,18 @@ describe('StandardTree_batchAppend', () => { const level0NumHashing = 1; const expectedNumHashing = level2NumHashing + level1NumHashing + level0NumHashing; - expect(pedersen.compressCounter).toEqual(expectedNumHashing); + expect(pedersen.hashCounter).toEqual(expectedNumHashing); - const level2Node0 = pedersen.compress(leaves[0], leaves[1]); - const level2Node1 = pedersen.compress(leaves[2], leaves[3]); - const level2Node2 = pedersen.compress(leaves[4], INITIAL_LEAF); + const level2Node0 = pedersen.hash(leaves[0], leaves[1]); + const level2Node1 = pedersen.hash(leaves[2], leaves[3]); + const level2Node2 = pedersen.hash(leaves[4], INITIAL_LEAF); - const level2ZeroHash = pedersen.compress(INITIAL_LEAF, INITIAL_LEAF); + const level2ZeroHash = pedersen.hash(INITIAL_LEAF, INITIAL_LEAF); - const level1Node0 = pedersen.compress(level2Node0, level2Node1); - const level1Node1 = pedersen.compress(level2Node2, level2ZeroHash); + const level1Node0 = pedersen.hash(level2Node0, level2Node1); + const level1Node1 = pedersen.hash(level2Node2, level2ZeroHash); - const root = pedersen.compress(level1Node0, level1Node1); + const root = pedersen.hash(level1Node0, level1Node1); expect(tree.getRoot(true)).toEqual(root); }); diff --git a/yarn-project/merkle-tree/src/test/standard_based_test_suite.ts b/yarn-project/merkle-tree/src/test/standard_based_test_suite.ts index 1eccd96ea56..a5ed2f48099 100644 --- a/yarn-project/merkle-tree/src/test/standard_based_test_suite.ts +++ b/yarn-project/merkle-tree/src/test/standard_based_test_suite.ts @@ -56,9 +56,9 @@ export const standardBasedTreeTestSuite = ( const db = levelup(createMemDown()); const tree = await createDb(db, pedersen, 'test', 2); - const level1ZeroHash = pedersen.compress(INITIAL_LEAF, INITIAL_LEAF); + const level1ZeroHash = pedersen.hash(INITIAL_LEAF, INITIAL_LEAF); expect(tree.getNumLeaves(false)).toEqual(0n); - expect(tree.getRoot(false)).toEqual(pedersen.compress(level1ZeroHash, level1ZeroHash)); + expect(tree.getRoot(false)).toEqual(pedersen.hash(level1ZeroHash, level1ZeroHash)); expect(await tree.getSiblingPath(0n, false)).toEqual( new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level1ZeroHash]), ); @@ -66,23 +66,23 @@ export const standardBasedTreeTestSuite = ( await appendLeaves(tree, [values[0]]); expect(tree.getNumLeaves(true)).toEqual(1n); expect(tree.getNumLeaves(false)).toEqual(0n); - expect(tree.getRoot(true)).toEqual(pedersen.compress(pedersen.compress(values[0], INITIAL_LEAF), level1ZeroHash)); + expect(tree.getRoot(true)).toEqual(pedersen.hash(pedersen.hash(values[0], INITIAL_LEAF), level1ZeroHash)); expect(await tree.getSiblingPath(0n, true)).toEqual( new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level1ZeroHash]), ); - expect(tree.getRoot(false)).toEqual(pedersen.compress(level1ZeroHash, level1ZeroHash)); + expect(tree.getRoot(false)).toEqual(pedersen.hash(level1ZeroHash, level1ZeroHash)); expect(await tree.getSiblingPath(0n, false)).toEqual( new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level1ZeroHash]), ); await appendLeaves(tree, [values[1]]); expect(tree.getNumLeaves(true)).toEqual(2n); - expect(tree.getRoot(true)).toEqual(pedersen.compress(pedersen.compress(values[0], values[1]), level1ZeroHash)); + expect(tree.getRoot(true)).toEqual(pedersen.hash(pedersen.hash(values[0], values[1]), level1ZeroHash)); expect(await tree.getSiblingPath(1n, true)).toEqual( new SiblingPath(TEST_TREE_DEPTH, [values[0], level1ZeroHash]), ); expect(tree.getNumLeaves(false)).toEqual(0n); - expect(tree.getRoot(false)).toEqual(pedersen.compress(level1ZeroHash, level1ZeroHash)); + expect(tree.getRoot(false)).toEqual(pedersen.hash(level1ZeroHash, level1ZeroHash)); expect(await tree.getSiblingPath(1n, false)).toEqual( new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level1ZeroHash]), ); @@ -90,13 +90,13 @@ export const standardBasedTreeTestSuite = ( await appendLeaves(tree, [values[2]]); expect(tree.getNumLeaves(true)).toEqual(3n); expect(tree.getRoot(true)).toEqual( - pedersen.compress(pedersen.compress(values[0], values[1]), pedersen.compress(values[2], INITIAL_LEAF)), + pedersen.hash(pedersen.hash(values[0], values[1]), pedersen.hash(values[2], INITIAL_LEAF)), ); expect(await tree.getSiblingPath(2n, true)).toEqual( - new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, pedersen.compress(values[0], values[1])]), + new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, pedersen.hash(values[0], values[1])]), ); expect(tree.getNumLeaves(false)).toEqual(0n); - expect(tree.getRoot(false)).toEqual(pedersen.compress(level1ZeroHash, level1ZeroHash)); + expect(tree.getRoot(false)).toEqual(pedersen.hash(level1ZeroHash, level1ZeroHash)); expect(await tree.getSiblingPath(2n, false)).toEqual( new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level1ZeroHash]), ); @@ -104,29 +104,29 @@ export const standardBasedTreeTestSuite = ( await appendLeaves(tree, [values[3]]); expect(tree.getNumLeaves(true)).toEqual(4n); expect(tree.getRoot(true)).toEqual( - pedersen.compress(pedersen.compress(values[0], values[1]), pedersen.compress(values[2], values[3])), + pedersen.hash(pedersen.hash(values[0], values[1]), pedersen.hash(values[2], values[3])), ); expect(await tree.getSiblingPath(3n, true)).toEqual( - new SiblingPath(TEST_TREE_DEPTH, [values[2], pedersen.compress(values[0], values[1])]), + new SiblingPath(TEST_TREE_DEPTH, [values[2], pedersen.hash(values[0], values[1])]), ); expect(tree.getNumLeaves(false)).toEqual(0n); - expect(tree.getRoot(false)).toEqual(pedersen.compress(level1ZeroHash, level1ZeroHash)); + expect(tree.getRoot(false)).toEqual(pedersen.hash(level1ZeroHash, level1ZeroHash)); expect(await tree.getSiblingPath(3n, false)).toEqual( new SiblingPath(TEST_TREE_DEPTH, [INITIAL_LEAF, level1ZeroHash]), ); // Lifted from memory_tree.test.cpp to ensure consistency. //expect(root.toString('hex')).toEqual('0bf2e78afd70f72b0e6eafb03c41faef167a82441b05e517cdf35d813302061f'); expect(await tree.getSiblingPath(0n, true)).toEqual( - new SiblingPath(TEST_TREE_DEPTH, [values[1], pedersen.compress(values[2], values[3])]), + new SiblingPath(TEST_TREE_DEPTH, [values[1], pedersen.hash(values[2], values[3])]), ); expect(await tree.getSiblingPath(1n, true)).toEqual( - new SiblingPath(TEST_TREE_DEPTH, [values[0], pedersen.compress(values[2], values[3])]), + new SiblingPath(TEST_TREE_DEPTH, [values[0], pedersen.hash(values[2], values[3])]), ); expect(await tree.getSiblingPath(2n, true)).toEqual( - new SiblingPath(TEST_TREE_DEPTH, [values[3], pedersen.compress(values[0], values[1])]), + new SiblingPath(TEST_TREE_DEPTH, [values[3], pedersen.hash(values[0], values[1])]), ); expect(await tree.getSiblingPath(3n, true)).toEqual( - new SiblingPath(TEST_TREE_DEPTH, [values[2], pedersen.compress(values[0], values[1])]), + new SiblingPath(TEST_TREE_DEPTH, [values[2], pedersen.hash(values[0], values[1])]), ); await tree.commit(); diff --git a/yarn-project/merkle-tree/src/test/utils/pedersen_with_counter.ts b/yarn-project/merkle-tree/src/test/utils/pedersen_with_counter.ts index 4555ffa8105..04afeb17ac4 100644 --- a/yarn-project/merkle-tree/src/test/utils/pedersen_with_counter.ts +++ b/yarn-project/merkle-tree/src/test/utils/pedersen_with_counter.ts @@ -1,34 +1,34 @@ import { Pedersen } from '../../index.js'; /** - * A test utility allowing us to count the number of times the compress function has been called. + * A test utility allowing us to count the number of times the hash function has been called. * @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific * purposes. */ export class PedersenWithCounter extends Pedersen { /** - * The number of times the compress function has been called. + * The number of times the hash function has been called. */ - public compressCounter = 0; + public hashCounter = 0; /** * Compresses two 32-byte hashes. - * @param lhs - The first hash. - * @param rhs - The second hash. + * @param lhs - The first 32-byte array. + * @param rhs - The second 32-byte array. * @returns The new 32-byte hash. * @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific * purposes. */ - public compress(lhs: Uint8Array, rhs: Uint8Array): Buffer { - this.compressCounter++; - return super.compress(lhs, rhs); + public hash(lhs: Uint8Array, rhs: Uint8Array): Buffer { + this.hashCounter++; + return super.hash(lhs, rhs); } /** - * Resets the compress counter. + * Resets the hash counter. * @returns void */ public resetCounter() { - this.compressCounter = 0; + this.hashCounter = 0; } } diff --git a/yarn-project/merkle-tree/src/tree_base.ts b/yarn-project/merkle-tree/src/tree_base.ts index 4275d16b0b3..6b715280380 100644 --- a/yarn-project/merkle-tree/src/tree_base.ts +++ b/yarn-project/merkle-tree/src/tree_base.ts @@ -53,7 +53,7 @@ export abstract class TreeBase implements MerkleTree { let current = INITIAL_LEAF; for (let i = depth - 1; i >= 0; --i) { this.zeroHashes[i] = current; - current = hasher.compress(current, current); + current = hasher.hash(current, current); } this.root = root ? root : current; @@ -173,7 +173,7 @@ export abstract class TreeBase implements MerkleTree { const sibling = await this.getLatestValueAtIndex(level, isRight ? index - 1n : index + 1n, true); const lhs = isRight ? sibling : current; const rhs = isRight ? current : sibling; - current = this.hasher.compress(lhs, rhs); + current = this.hasher.hash(lhs, rhs); level -= 1; index >>= 1n; const cacheKey = indexToKeyHash(this.name, level, index); @@ -280,7 +280,7 @@ export abstract class TreeBase implements MerkleTree { const lhs = await this.getLatestValueAtIndex(level, index * 2n, true); const rhs = await this.getLatestValueAtIndex(level, index * 2n + 1n, true); const cacheKey = indexToKeyHash(this.name, level - 1, index); - this.cache[cacheKey] = this.hasher.compress(lhs, rhs); + this.cache[cacheKey] = this.hasher.hash(lhs, rhs); } level -= 1; diff --git a/yarn-project/pxe/src/note_processor/note_processor.test.ts b/yarn-project/pxe/src/note_processor/note_processor.test.ts index 3a15d11d492..4f41f052563 100644 --- a/yarn-project/pxe/src/note_processor/note_processor.test.ts +++ b/yarn-project/pxe/src/note_processor/note_processor.test.ts @@ -1,6 +1,6 @@ import { AcirSimulator } from '@aztec/acir-simulator'; import { CircuitsWasm, Fr, MAX_NEW_COMMITMENTS_PER_TX } from '@aztec/circuits.js'; -import { Grumpkin, pedersenCompressInputs } from '@aztec/circuits.js/barretenberg'; +import { Grumpkin, pedersenHashInputs } from '@aztec/circuits.js/barretenberg'; import { Point } from '@aztec/foundation/fields'; import { ConstantKeyPair } from '@aztec/key-store'; import { @@ -40,7 +40,7 @@ describe('Note Processor', () => { const computeMockNoteHash = (preimage: Fr[]) => Fr.fromBuffer( - pedersenCompressInputs( + pedersenHashInputs( wasm, preimage.map(p => p.toBuffer()), ), diff --git a/yarn-project/types/src/sibling_path.ts b/yarn-project/types/src/sibling_path.ts index 450546b898e..bb31ffe3709 100644 --- a/yarn-project/types/src/sibling_path.ts +++ b/yarn-project/types/src/sibling_path.ts @@ -34,7 +34,7 @@ export class SiblingPath { let current = zeroElement; for (let i = 0; i < size; ++i) { bufs.push(current); - current = hasher.compress(current, current); + current = hasher.hash(current, current); } return new SiblingPath(size, bufs); } From 1841760a995297cda405e32d4b968fb158ddb792 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Wed, 25 Oct 2023 19:26:32 +0000 Subject: [PATCH 10/17] lint --- yarn-project/merkle-tree/src/pedersen.ts | 2 +- yarn-project/pxe/src/note_processor/note_processor.test.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/yarn-project/merkle-tree/src/pedersen.ts b/yarn-project/merkle-tree/src/pedersen.ts index fa041d8bdb3..9b81c94b103 100644 --- a/yarn-project/merkle-tree/src/pedersen.ts +++ b/yarn-project/merkle-tree/src/pedersen.ts @@ -1,4 +1,4 @@ -import { pedersenHash, pedersenGetHashTree, pedersenHashInputs } from '@aztec/circuits.js/barretenberg'; +import { pedersenGetHashTree, pedersenHash, pedersenHashInputs } from '@aztec/circuits.js/barretenberg'; import { IWasmModule } from '@aztec/foundation/wasm'; import { Hasher } from '@aztec/types'; diff --git a/yarn-project/pxe/src/note_processor/note_processor.test.ts b/yarn-project/pxe/src/note_processor/note_processor.test.ts index 4f41f052563..00e1018b37c 100644 --- a/yarn-project/pxe/src/note_processor/note_processor.test.ts +++ b/yarn-project/pxe/src/note_processor/note_processor.test.ts @@ -1,6 +1,6 @@ import { AcirSimulator } from '@aztec/acir-simulator'; import { CircuitsWasm, Fr, MAX_NEW_COMMITMENTS_PER_TX } from '@aztec/circuits.js'; -import { Grumpkin, pedersenHashInputs } from '@aztec/circuits.js/barretenberg'; +import { Grumpkin, pedersenHashInputs } from '@aztec/circuits.js/barretenberg'; import { Point } from '@aztec/foundation/fields'; import { ConstantKeyPair } from '@aztec/key-store'; import { From 1095911ad65ae4ddf30767267f3779fb19ad41e9 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Wed, 25 Oct 2023 19:38:10 +0000 Subject: [PATCH 11/17] fix typos --- yarn-project/types/src/interfaces/hasher.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/yarn-project/types/src/interfaces/hasher.ts b/yarn-project/types/src/interfaces/hasher.ts index ee9877fbbb2..48e0d265e2b 100644 --- a/yarn-project/types/src/interfaces/hasher.ts +++ b/yarn-project/types/src/interfaces/hasher.ts @@ -3,9 +3,9 @@ */ export interface Hasher { /** - * Hash two 32-byte hashes. - * @param lhs - The first 32-byte array. - * @param rhs - The second 32-byte array. + * Hash two arrays. + * @param lhs - The first array. + * @param rhs - The second array. * @returns The new 32-byte hash. */ hash(lhs: Uint8Array, rhs: Uint8Array): Buffer; From 245e44bf13a19d45f4a21bbd01945874955938aa Mon Sep 17 00:00:00 2001 From: kevaundray Date: Wed, 25 Oct 2023 19:44:00 +0000 Subject: [PATCH 12/17] fix typo --- .../src/barretenberg/crypto/pedersen/pedersen.ts | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts b/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts index 75dfe0ba512..b0147b5faa7 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts @@ -5,10 +5,10 @@ import { Buffer } from 'buffer'; import { deserializeArrayFromVector, deserializeField, serializeBufferArrayToVector } from '../../serialize.js'; /** - * Hashes two 32-byte hashes. + * Hashes two arrays. * @param wasm - The barretenberg module. - * @param lhs - The first hash. - * @param rhs - The second hash. + * @param lhs - The first array. + * @param rhs - The second array. * @returns The new 32-byte hash. * @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific * purposes. @@ -18,10 +18,9 @@ export function pedersenHash(wasm: IWasmModule, lhs: Uint8Array, rhs: Uint8Array } /** - * Combine an array of hashes using pedersen hash. + * Computes the hash of an array of buffers. * @param wasm - The barretenberg module. - * @param lhs - The first hash. - * @param rhs - The second hash. + * @param inputs - The array of buffers to hash. * @returns The new 32-byte hash. * @deprecated Don't call pedersen directly in production code. Instead, create suitably-named functions for specific * purposes. From 362f18597380ab4d10ff9dca740ff18204f5efbf Mon Sep 17 00:00:00 2001 From: kevaundray Date: Wed, 25 Oct 2023 20:44:40 +0000 Subject: [PATCH 13/17] Empty commit From ccd0c96b5d264109bab9e6e2871b26084ff6e260 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Thu, 26 Oct 2023 11:07:54 +0000 Subject: [PATCH 14/17] move output argument to last position to match c_bind_new and other c functions --- .../cpp/src/barretenberg/crypto/pedersen_commitment/c_bind.cpp | 2 +- .../cpp/src/barretenberg/crypto/pedersen_commitment/c_bind.hpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/crypto/pedersen_commitment/c_bind.cpp b/barretenberg/cpp/src/barretenberg/crypto/pedersen_commitment/c_bind.cpp index f3b636a8e08..ee6beb6f3e5 100644 --- a/barretenberg/cpp/src/barretenberg/crypto/pedersen_commitment/c_bind.cpp +++ b/barretenberg/cpp/src/barretenberg/crypto/pedersen_commitment/c_bind.cpp @@ -20,7 +20,7 @@ WASM_EXPORT void pedersen__compress(uint8_t const* inputs_buffer, uint8_t* outpu barretenberg::fr::serialize_to_buffer(r, output); } -WASM_EXPORT void pedersen__compress_with_hash_index(uint8_t const* inputs_buffer, uint8_t* output, uint32_t hash_index) +WASM_EXPORT void pedersen__compress_with_hash_index(uint8_t const* inputs_buffer, uint32_t hash_index, uint8_t* output) { std::vector to_compress; read(inputs_buffer, to_compress); diff --git a/barretenberg/cpp/src/barretenberg/crypto/pedersen_commitment/c_bind.hpp b/barretenberg/cpp/src/barretenberg/crypto/pedersen_commitment/c_bind.hpp index 0ed52cc3fd8..af8ec1c6543 100644 --- a/barretenberg/cpp/src/barretenberg/crypto/pedersen_commitment/c_bind.hpp +++ b/barretenberg/cpp/src/barretenberg/crypto/pedersen_commitment/c_bind.hpp @@ -10,7 +10,7 @@ WASM_EXPORT void pedersen__compress_fields(uint8_t const* left, uint8_t const* r WASM_EXPORT void pedersen__compress(uint8_t const* inputs_buffer, uint8_t* output); -WASM_EXPORT void pedersen__compress_with_hash_index(uint8_t const* inputs_buffer, uint8_t* output, uint32_t hash_index); +WASM_EXPORT void pedersen__compress_with_hash_index(uint8_t const* inputs_buffer, uint32_t hash_index, uint8_t* output); WASM_EXPORT void pedersen__commit(uint8_t const* inputs_buffer, uint8_t* output); WASM_EXPORT void pedersen__buffer_to_field(uint8_t const* data, size_t length, uint8_t* r); From ef27e7137e81215bedf2628c212d5c6ed41d8820 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Thu, 26 Oct 2023 11:09:48 +0000 Subject: [PATCH 15/17] modify calling code to: - account for argument switch - fix: allocate instead of using scratch space --- .../src/barretenberg/crypto/pedersen/pedersen.ts | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts b/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts index b0147b5faa7..02efdfefeab 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts @@ -41,9 +41,19 @@ export function pedersenHashInputs(wasm: IWasmModule, inputs: Buffer[]): Buffer export function pedersenHashWithHashIndex(wasm: IWasmModule, inputs: Buffer[], hashIndex: number): Buffer { // If not done already, precompute constants. wasm.call('pedersen__init'); - const inputVectors = serializeBufferArrayToVector(inputs); - wasm.writeMemory(0, inputVectors); - wasm.call('pedersen__compress_with_hash_index', 0, 0, hashIndex); + + // Allocate memory for the inputs. We can optimize this + // by checking the length and copying the data to the + // wasm scratch space if it is small enough. + const data = serializeBufferArrayToVector(inputs); + const inputPtr = wasm.call('bbmalloc', data.length); + wasm.writeMemory(inputPtr, data); + + // Since the output is 32 bytes, instead of allocating memory + // we can simply use the scratch space. + const outputPtr = 0; + + wasm.call('pedersen__compress_with_hash_index', inputPtr, hashIndex, outputPtr); return Buffer.from(wasm.getMemorySlice(0, 32)); } From 96d4ebdcd39a30a8df9d60829ed01fd6ae417c48 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Thu, 26 Oct 2023 11:33:40 +0000 Subject: [PATCH 16/17] lint --- .../circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts b/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts index 02efdfefeab..ee4b74f90a3 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts @@ -42,8 +42,8 @@ export function pedersenHashWithHashIndex(wasm: IWasmModule, inputs: Buffer[], h // If not done already, precompute constants. wasm.call('pedersen__init'); - // Allocate memory for the inputs. We can optimize this - // by checking the length and copying the data to the + // Allocate memory for the inputs. We can optimize this + // by checking the length and copying the data to the // wasm scratch space if it is small enough. const data = serializeBufferArrayToVector(inputs); const inputPtr = wasm.call('bbmalloc', data.length); From 3deef579bc44b3e382656db8d5ba41840b92ac11 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Thu, 26 Oct 2023 11:57:34 +0000 Subject: [PATCH 17/17] charlie review: free the allocated input --- .../src/barretenberg/crypto/pedersen/pedersen.ts | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts b/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts index ee4b74f90a3..5b6993bff8d 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/pedersen/pedersen.ts @@ -54,7 +54,11 @@ export function pedersenHashWithHashIndex(wasm: IWasmModule, inputs: Buffer[], h const outputPtr = 0; wasm.call('pedersen__compress_with_hash_index', inputPtr, hashIndex, outputPtr); - return Buffer.from(wasm.getMemorySlice(0, 32)); + const hashOutput = wasm.getMemorySlice(0, 32); + + wasm.call('bbfree', inputPtr); + + return Buffer.from(hashOutput); } /**