Skip to content

Commit

Permalink
chore: fixup refactor pedersen standard (#2882)
Browse files Browse the repository at this point in the history
Please provide a paragraph or two giving a summary of the change,
including relevant motivation and context.

# Checklist:
Remove the checklist to signal you've completed it. Enable auto-merge if
the PR is ready to merge.
- [ ] If the pull request requires a cryptography review (e.g.
cryptographic algorithm implementations) I have added the 'crypto' tag.
- [ ] I have reviewed my diff in github, line by line and removed
unexpected formatting changes, testing logs, or commented-out code.
- [ ] Every change is related to the PR description.
- [ ] I have
[linked](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue)
this pull request to relevant issues (if any exist).
  • Loading branch information
kevaundray authored Oct 22, 2023
1 parent 5fe48c3 commit a4205ab
Show file tree
Hide file tree
Showing 18 changed files with 179 additions and 272 deletions.
2 changes: 1 addition & 1 deletion barretenberg/acir_tests/run_acir_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ set -eu
BIN=${BIN:-../cpp/build/bin/bb}
FLOW=${FLOW:-prove_and_verify}
CRS_PATH=~/.bb-crs
BRANCH=mv/new-pedersen
BRANCH=kw/mv/new-pedersen
VERBOSE=${VERBOSE:-}
NAMED_TEST=${1:-}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,12 +58,8 @@ std::vector<typename Curve::BaseField> pedersen_hash_base<Curve>::convert_buffer
template <typename Curve>
typename Curve::BaseField pedersen_hash_base<Curve>::hash(const std::vector<Fq>& inputs, const GeneratorContext context)
{
auto input_size = Fq(inputs.size());
std::vector<Fq> modified_inputs = inputs;
modified_inputs.insert(modified_inputs.begin(), input_size);
return (pedersen_commitment_base<Curve>::commit_native(modified_inputs, context)).x;
// Element result = length_generator * Fr(inputs.size());
// return (result + pedersen_commitment_base<Curve>::commit_native(inputs, context)).normalize().x;
Element result = length_generator * Fr(inputs.size());
return (result + pedersen_commitment_base<Curve>::commit_native(inputs, context)).normalize().x;
}

/**
Expand All @@ -85,20 +81,5 @@ typename Curve::BaseField pedersen_hash_base<Curve>::hash_buffer(const std::vect
return result;
}

// TODO(Kev): Why is this method needed?
template <typename Curve>
typename Curve::BaseField pedersen_hash_base<Curve>::hash(
const std::vector<std::pair<Fq, GeneratorContext>>& input_pairs)
{
// TODO: This mixes Fq and Fr. Is this correct?
// Element result = length_generator * Fr(input_pairs.size());
// return (result + pedersen_commitment_base<Curve>::commit_native(input_pairs)).normalize().x;
auto length_generator_ctx = GeneratorContext(0);
auto input_size = Fq(input_pairs.size());
auto modified_inputs = input_pairs;
modified_inputs.insert(modified_inputs.begin(), { input_size, length_generator_ctx });
return (pedersen_commitment_base<Curve>::commit_native(input_pairs)).x;
}

template class pedersen_hash_base<curve::Grumpkin>;
} // namespace crypto
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@ template <typename Curve> class pedersen_hash_base {
inline static constexpr AffineElement length_generator = Group::derive_generators("pedersen_hash_length", 1)[0];
static Fq hash(const std::vector<Fq>& inputs, GeneratorContext context = {});
static Fq hash_buffer(const std::vector<uint8_t>& input, GeneratorContext context = {});
static Fq hash(const std::vector<std::pair<Fq, GeneratorContext>>& input_pairs);

private:
static std::vector<Fq> convert_buffer(const std::vector<uint8_t>& input);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -706,11 +706,9 @@ TEST_F(join_split_tests, test_0_input_notes_and_detect_circuit_change)
EXPECT_TRUE(result.valid);

// The below part detects any changes in the join-split circuit
// TODO(Kev): This will be reduced back to 49492, once we switch
// back to the original hashing.
constexpr uint32_t CIRCUIT_GATE_COUNT = 95113;
constexpr uint32_t GATES_NEXT_POWER_OF_TWO = 131072;
const uint256_t VK_HASH("90d7bef7124dbd2206886c23a49df3399f998e2226aa0ad40ffda848ee43eefa");
constexpr uint32_t CIRCUIT_GATE_COUNT = 49492;
constexpr uint32_t GATES_NEXT_POWER_OF_TWO = 65535;
const uint256_t VK_HASH("986c3fe747d2f1b84fd9dea37a22c27bd4e1006900f458decf2da20442a7395a");

auto number_of_gates_js = result.number_of_gates;
std::cout << get_verification_key()->sha256_hash() << std::endl;
Expand Down
94 changes: 12 additions & 82 deletions barretenberg/cpp/src/barretenberg/stdlib/hash/pedersen/pedersen.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -9,34 +9,16 @@ template <typename C>
field_t<C> pedersen_hash<C>::hash(const std::vector<field_t>& inputs, const GeneratorContext context)
{
using cycle_scalar = typename cycle_group::cycle_scalar;
// using Curve = EmbeddedCurve;
using Curve = EmbeddedCurve;

// const auto base_points = context.generators->get(inputs.size(), context.offset, context.domain_separator);

// std::vector<cycle_scalar> scalars;
// std::vector<cycle_group> points;
// scalars.emplace_back(cycle_scalar::create_from_bn254_scalar(field_t(inputs.size())));
// points.emplace_back(crypto::pedersen_hash_base<Curve>::length_generator);
// for (size_t i = 0; i < inputs.size(); ++i) {
// scalars.emplace_back(cycle_scalar::create_from_bn254_scalar(inputs[i]));
// // constructs constant cycle_group objects (non-witness)
// points.emplace_back(base_points[i]);
// }

// auto result = cycle_group::batch_mul(scalars, points);
// return result.x;
//
// We add +1 because we want to also add the length as an input for the
// hash.
std::vector<field_t> modified_inputs = inputs;
modified_inputs.insert(modified_inputs.begin(), field_t(inputs.size()));
const auto base_points = context.generators->get(modified_inputs.size(), context.offset, context.domain_separator);
const auto base_points = context.generators->get(inputs.size(), context.offset, context.domain_separator);

std::vector<cycle_scalar> scalars;
std::vector<cycle_group> points;

for (size_t i = 0; i < modified_inputs.size(); ++i) {
scalars.emplace_back(cycle_scalar::create_from_bn254_scalar(modified_inputs[i]));
scalars.emplace_back(cycle_scalar::create_from_bn254_scalar(field_t(inputs.size())));
points.emplace_back(crypto::pedersen_hash_base<Curve>::length_generator);
for (size_t i = 0; i < inputs.size(); ++i) {
scalars.emplace_back(cycle_scalar::create_from_bn254_scalar(inputs[i]));
// constructs constant cycle_group objects (non-witness)
points.emplace_back(base_points[i]);
}
Expand All @@ -50,35 +32,17 @@ field_t<C> pedersen_hash<C>::hash_skip_field_validation(const std::vector<field_
const GeneratorContext context)
{
using cycle_scalar = typename cycle_group::cycle_scalar;
// using Curve = EmbeddedCurve;
using Curve = EmbeddedCurve;

// const auto base_points = context.generators->get(inputs.size(), context.offset, context.domain_separator);

// std::vector<cycle_scalar> scalars;
// std::vector<cycle_group> points;
// scalars.emplace_back(cycle_scalar::create_from_bn254_scalar(field_t(inputs.size())));
// points.emplace_back(crypto::pedersen_hash_base<Curve>::length_generator);
// for (size_t i = 0; i < inputs.size(); ++i) {
// // `true` param = skip primality test when performing a scalar mul
// scalars.emplace_back(cycle_scalar::create_from_bn254_scalar(inputs[i], true));
// // constructs constant cycle_group objects (non-witness)
// points.emplace_back(base_points[i]);
// }

// auto result = cycle_group::batch_mul(scalars, points);
// return result.x;
//
// Same reason as above, for adding +1
std::vector<field_t> modified_inputs = inputs;
modified_inputs.insert(modified_inputs.begin(), field_t(inputs.size()));
const auto base_points = context.generators->get(modified_inputs.size(), context.offset, context.domain_separator);
const auto base_points = context.generators->get(inputs.size(), context.offset, context.domain_separator);

std::vector<cycle_scalar> scalars;
std::vector<cycle_group> points;

for (size_t i = 0; i < modified_inputs.size(); ++i) {
scalars.emplace_back(cycle_scalar::create_from_bn254_scalar(field_t(inputs.size())));
points.emplace_back(crypto::pedersen_hash_base<Curve>::length_generator);
for (size_t i = 0; i < inputs.size(); ++i) {
// `true` param = skip primality test when performing a scalar mul
scalars.emplace_back(cycle_scalar::create_from_bn254_scalar(modified_inputs[i], true));
scalars.emplace_back(cycle_scalar::create_from_bn254_scalar(inputs[i], true));
// constructs constant cycle_group objects (non-witness)
points.emplace_back(base_points[i]);
}
Expand Down Expand Up @@ -125,40 +89,6 @@ field_t<C> pedersen_hash<C>::hash_buffer(const stdlib::byte_array<C>& input, Gen
}
return hashed;
}

template <typename C>
field_t<C> pedersen_hash<C>::hash(const std::vector<std::pair<field_t, GeneratorContext>>& input_pairs)
{

using cycle_scalar = typename cycle_group::cycle_scalar;

// std::vector<cycle_scalar> scalars;
// std::vector<cycle_group> points;

// scalars.emplace_back(cycle_scalar::create_from_bn254_scalar(field_t(input_pairs.size())));
// points.emplace_back(crypto::pedersen_hash_base<EmbeddedCurve>::length_generator);
// for (auto& [scalar, context] : input_pairs) {
// scalars.emplace_back(cycle_scalar::create_from_bn254_scalar(scalar));
// // constructs constant cycle_group objects (non-witness)
// points.emplace_back(context.generators->get(1, context.offset, context.domain_separator)[0]);
// }

// return cycle_group::batch_mul(scalars, points).x;
//
//
std::vector<cycle_scalar> scalars;
std::vector<cycle_group> points;

scalars.emplace_back(cycle_scalar::create_from_bn254_scalar(field_t(input_pairs.size())));
points.emplace_back(input_pairs[0].second.generators->get(1, 0, input_pairs[0].second.domain_separator)[0]);
for (auto& [scalar, context] : input_pairs) {
scalars.emplace_back(cycle_scalar::create_from_bn254_scalar(scalar));
// constructs constant cycle_group objects (non-witness)
points.emplace_back(context.generators->get(1, context.offset, context.domain_separator)[0]);
}

return cycle_group::batch_mul(scalars, points).x;
}
INSTANTIATE_STDLIB_TYPE(pedersen_hash);

} // namespace proof_system::plonk::stdlib
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ template <typename Builder> class pedersen_hash {
// TODO health warnings!
static field_t hash_skip_field_validation(const std::vector<field_t>& in, GeneratorContext context = {});
static field_t hash_buffer(const stdlib::byte_array<Builder>& input, GeneratorContext context = {});
static field_t hash(const std::vector<std::pair<field_t, GeneratorContext>>& input_pairs);
};

EXTERN_STDLIB_TYPE(pedersen_hash);
Expand Down
24 changes: 12 additions & 12 deletions barretenberg/ts/src/barretenberg_api/pedersen.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,27 +15,27 @@ describe('pedersen', () => {

it('pedersenCompressFields', async () => {
const result = await api.pedersenCompressFields(new Fr(4n), new Fr(8n));
expect(result).toEqual(new Fr(13982758649432613336147872733411006657097928907798377266063228888290725444169n));
expect(result).toEqual(new Fr(1521373897829389584529155077412196627698249315427143054350987371861781120260n));
});

it('pedersenPlookupCompressFields', async () => {
const result = await api.pedersenPlookupCompressFields(new Fr(4n), new Fr(8n));
expect(result).toEqual(new Fr(13982758649432613336147872733411006657097928907798377266063228888290725444169n));
expect(result).toEqual(new Fr(1521373897829389584529155077412196627698249315427143054350987371861781120260n));
});

it('pedersenCompress', async () => {
const result = await api.pedersenCompress([new Fr(4n), new Fr(8n), new Fr(12n)]);
expect(result).toEqual(new Fr(19056579601153937652779328314485097390897358462541238912904230749502508633726n));
expect(result).toEqual(new Fr(16354408412011670665169322571938780771784319449166930406648760506154417354381n));
});

it('pedersenPlookupCompress', async () => {
const result = await api.pedersenPlookupCompress([new Fr(4n), new Fr(8n), new Fr(12n)]);
expect(result).toEqual(new Fr(19056579601153937652779328314485097390897358462541238912904230749502508633726n));
expect(result).toEqual(new Fr(16354408412011670665169322571938780771784319449166930406648760506154417354381n));
});

it('pedersenCompressWithHashIndex', async () => {
const result = await api.pedersenCompressWithHashIndex([new Fr(4n), new Fr(8n)], 7);
expect(result).toEqual(new Fr(9623070643626513033232363421644611403228818065703560824918278791880825345070n));
expect(result).toEqual(new Fr(2152386650411553803409271316104075950536496387580531018130718456431861859990n));
});

it('pedersenCommit', async () => {
Expand All @@ -52,22 +52,22 @@ describe('pedersen', () => {
const result = await api.pedersenBufferToField(
Buffer.from('Hello world! I am a buffer to be converted to a field!'),
);
expect(result).toEqual(new Fr(8552025510016673626971243114002298733165590265505387301921017959053622217825n));
expect(result).toEqual(new Fr(5836632387256708040349959803326023895450290698906238002955147410646852307074n));
});

it('pedersenHashPair', async () => {
const result = await api.pedersenHashPair(new Fr(4n), new Fr(8n));
expect(result).toEqual(new Fr(13982758649432613336147872733411006657097928907798377266063228888290725444169n));
expect(result).toEqual(new Fr(1521373897829389584529155077412196627698249315427143054350987371861781120260n));
});

it('pedersenHashMultiple', async () => {
const result = await api.pedersenHashMultiple([new Fr(4n), new Fr(8n), new Fr(12n)]);
expect(result).toEqual(new Fr(19056579601153937652779328314485097390897358462541238912904230749502508633726n));
expect(result).toEqual(new Fr(16354408412011670665169322571938780771784319449166930406648760506154417354381n));
});

it('pedersenHashMultipleWithHashIndex', async () => {
const result = await api.pedersenHashMultipleWithHashIndex([new Fr(4n), new Fr(8n)], 7);
expect(result).toEqual(new Fr(9623070643626513033232363421644611403228818065703560824918278791880825345070n));
expect(result).toEqual(new Fr(2152386650411553803409271316104075950536496387580531018130718456431861859990n));
});

it('pedersenHashToTree', async () => {
Expand All @@ -77,9 +77,9 @@ describe('pedersen', () => {
new Fr(8n),
new Fr(12n),
new Fr(16n),
new Fr(13982758649432613336147872733411006657097928907798377266063228888290725444169n),
new Fr(1319116096575922946541582119791221180923112201751318788162745363104756571250n),
new Fr(11663284539342402700106107283927625502644585486344246278789732908619286294294n),
new Fr(1521373897829389584529155077412196627698249315427143054350987371861781120260n),
new Fr(18350527319045519333962768191016242826584323959670139897255818770108115223653n),
new Fr(5972535902427608430534212385621973704186819235181735133037695406667218179357n),
]);
});
});
2 changes: 1 addition & 1 deletion yarn-project/acir-simulator/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
"@aztec/circuits.js": "workspace:^",
"@aztec/foundation": "workspace:^",
"@aztec/types": "workspace:^",
"@kevaundray/acvm_js": "0.28.0-kw-branched-off-of-last-aztec-tag-168e69bed",
"@kevaundray/acvm_js": "0.28.0-kw-branched-off-of-last-aztec-tag-5b563c08c",
"levelup": "^5.1.1",
"memdown": "^6.1.1",
"tslib": "^2.4.0"
Expand Down
4 changes: 2 additions & 2 deletions yarn-project/boxes/blank-react/src/artifacts/Blank.json
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,11 @@
"fileMap": {
"38": {
"source": "use dep::std::hash::{pedersen_with_separator, sha256};\nuse crate::constants_gen::{\n GENERATOR_INDEX__SIGNATURE_PAYLOAD,\n GENERATOR_INDEX__L1_TO_L2_MESSAGE_SECRET,\n};\n\npub fn sha256_to_field<N>(bytes_to_hash: [u8; N]) -> Field {\n let sha256_hashed = sha256(bytes_to_hash);\n\n // Convert it to a field element\n let mut v = 1;\n let mut high = 0 as Field;\n let mut low = 0 as Field;\n\n for i in 0..16 {\n high = high + (sha256_hashed[15 - i] as Field) * v;\n low = low + (sha256_hashed[16 + 15 - i] as Field) * v;\n v = v * 256;\n }\n\n // Abuse that a % p + b % p = (a + b) % p and that low < p\n let hash_in_a_field = low + high * v;\n\n hash_in_a_field\n}\n\npub fn compute_secret_hash(secret: Field) -> Field {\n // TODO(#1205) This is probably not the right index to use\n pedersen_hash([secret], GENERATOR_INDEX__L1_TO_L2_MESSAGE_SECRET)\n}\n\n#[oracle(pedersenHash)]\nfn pedersen_hash_oracle<N>(_inputs: [Field; N], _hash_index: u32) -> Field {}\n\nunconstrained pub fn pedersen_hash_internal<N>(inputs: [Field; N], hash_index: u32) -> Field {\n pedersen_hash_oracle(inputs, hash_index)\n}\n\n// TODO(Kev): We can move this into the noir std-lib \n// as an acir opcode, once we have the pedersen PR merged.\npub fn pedersen_hash<N>(inputs: [Field; N], hash_index: u32) -> Field {\n // Temporary hack. This is not right. Kev forced me to do this. Blame him.\n pedersen_hash_internal(inputs, hash_index)\n // let input_size = inputs.len();\n // let mut modified_inputs = [];\n // modified_inputs = modified_inputs.push_back(input_size as Field);\n // for i in 0..N {\n // modified_inputs = modified_inputs.push_back(inputs[i]);\n // }\n // pedersen_with_separator(modified_inputs, hash_index)[0]\n}",
"path": "/mnt/user-data/leila/aztec3/aztec-packages/yarn-project/aztec-nr/aztec/src/hash"
"path": "/mnt/user-data/kev/aztec-packages/yarn-project/aztec-nr/aztec/src/hash"
},
"58": {
"source": "use crate::types::point::Point;\nuse crate::address::compute_address;\n\n#[oracle(getPublicKey)]\nfn get_public_key_oracle(_address: Field) -> [Field; 3] {}\n\nunconstrained fn get_public_key_internal(address: Field) -> [Field; 3] {\n get_public_key_oracle(address)\n}\n\npub fn get_public_key(address: Field) -> Point {\n let result = get_public_key_internal(address);\n let pub_key_x = result[0];\n let pub_key_y = result[1];\n let partial_address = result[2];\n \n let calculated_address = compute_address(pub_key_x, pub_key_y, partial_address);\n assert(calculated_address == address);\n \n Point::new(pub_key_x, pub_key_y)\n}\n",
"path": "/mnt/user-data/leila/aztec3/aztec-packages/yarn-project/aztec-nr/aztec/src/oracle/get_public_key"
"path": "/mnt/user-data/kev/aztec-packages/yarn-project/aztec-nr/aztec/src/oracle/get_public_key"
}
}
}
Expand Down
Loading

0 comments on commit a4205ab

Please sign in to comment.