Skip to content

Commit

Permalink
chore: Noir version for Pedersen commitment and hash (#5431)
Browse files Browse the repository at this point in the history
# Description

## Problem\*

Pedersen commitment and Pedersen Hash iin Noir

## Summary\*
When the constant input PR is merged and synchronised, these optimised
version of Pedersen commitments and Pedersen Hash in Noir should be in
parity with gates count of Pedersen commitment blackbox function.


## Additional Context



## Documentation\*

Check one:
- [X] No documentation needed.
- [ ] Documentation included in this PR.
- [ ] **[For Experimental Features]** Documentation to be submitted in a
separate PR.

# PR Checklist\*

- [X] I have tested the changes locally.
- [X] I have formatted the changes with [Prettier](https://prettier.io/)
and/or `cargo fmt` on default settings.
  • Loading branch information
guipublic authored Jul 15, 2024
1 parent 558d79e commit 029584b
Show file tree
Hide file tree
Showing 5 changed files with 190 additions and 54 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,8 @@ impl<F: Copy> BlackBoxFuncCall<F> {
| BlackBoxFuncCall::BigIntDiv { .. }
| BlackBoxFuncCall::BigIntToLeBytes { .. } => Vec::new(),
BlackBoxFuncCall::MultiScalarMul { points, scalars, .. } => {
let mut inputs: Vec<FunctionInput<F>> = Vec::with_capacity(points.len() * 2);
let mut inputs: Vec<FunctionInput<F>> =
Vec::with_capacity(points.len() + scalars.len());
inputs.extend(points.iter().copied());
inputs.extend(scalars.iter().copied());
inputs
Expand Down
71 changes: 51 additions & 20 deletions compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -752,27 +752,24 @@ impl<'f> Context<'f> {
Instruction::Call { func, arguments }
}
Value::Intrinsic(Intrinsic::BlackBox(BlackBoxFunc::MultiScalarMul)) => {
let mut array_with_predicate = im::Vector::new();
let array_typ;
if let Value::Array { array, typ } =
&self.inserter.function.dfg[arguments[0]]
{
array_typ = typ.clone();
for (i, value) in array.clone().iter().enumerate() {
if i % 3 == 2 {
array_with_predicate.push_back(self.var_or_one(
*value,
condition,
call_stack.clone(),
));
} else {
array_with_predicate.push_back(*value);
}
}
let points_array_idx = if matches!(
self.inserter.function.dfg[arguments[0]],
Value::Array { .. }
) {
0
} else {
unreachable!();
}
arguments[0] =
// if the first argument is not an array, we assume it is a slice
// which means the array is the second argument
1
};
let (array_with_predicate, array_typ) = self
.apply_predicate_to_msm_argument(
arguments[points_array_idx],
condition,
call_stack.clone(),
);

arguments[points_array_idx] =
self.inserter.function.dfg.make_array(array_with_predicate, array_typ);
Instruction::Call { func, arguments }
}
Expand All @@ -785,6 +782,40 @@ impl<'f> Context<'f> {
}
}

/// When a MSM is done under a predicate, we need to apply the predicate
/// to the is_infinity property of the input points in order to ensure
/// that the points will be on the curve no matter what.
fn apply_predicate_to_msm_argument(
&mut self,
argument: ValueId,
predicate: ValueId,
call_stack: CallStack,
) -> (im::Vector<ValueId>, Type) {
let array_typ;
let mut array_with_predicate = im::Vector::new();
if let Value::Array { array, typ } = &self.inserter.function.dfg[argument] {
array_typ = typ.clone();
for (i, value) in array.clone().iter().enumerate() {
if i % 3 == 2 {
array_with_predicate.push_back(self.var_or_one(
*value,
predicate,
call_stack.clone(),
));
} else {
array_with_predicate.push_back(*value);
}
}
} else {
unreachable!(
"Expected an array, got {}",
&self.inserter.function.dfg.type_of_value(argument)
);
};

(array_with_predicate, array_typ)
}

// Computes: if condition { var } else { 1 }
fn var_or_one(&mut self, var: ValueId, condition: ValueId, call_stack: CallStack) -> ValueId {
let field = self.insert_instruction(
Expand Down
3 changes: 3 additions & 0 deletions noir_stdlib/src/embedded_curve_ops.nr
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,9 @@ pub fn multi_scalar_mul<let N: u32>(
// docs:end:multi_scalar_mul
{}

#[foreign(multi_scalar_mul)]
pub(crate) fn multi_scalar_mul_slice(points: [EmbeddedCurvePoint], scalars: [EmbeddedCurveScalar]) -> [Field; 3] {}

// docs:start:fixed_base_scalar_mul
pub fn fixed_base_scalar_mul(
scalar_low: Field,
Expand Down
2 changes: 1 addition & 1 deletion noir_stdlib/src/field/bn254.nr
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ fn compute_decomposition(x: Field) -> (Field, Field) {
(low, high)
}

unconstrained fn decompose_hint(x: Field) -> (Field, Field) {
unconstrained pub(crate) fn decompose_hint(x: Field) -> (Field, Field) {
compute_decomposition(x)
}

Expand Down
165 changes: 133 additions & 32 deletions noir_stdlib/src/hash/mod.nr
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@ mod keccak;
use crate::default::Default;
use crate::uint128::U128;
use crate::sha256::{digest, sha256_var};
use crate::embedded_curve_ops::{EmbeddedCurvePoint, EmbeddedCurveScalar, multi_scalar_mul};
use crate::collections::vec::Vec;
use crate::embedded_curve_ops::{EmbeddedCurvePoint, EmbeddedCurveScalar, multi_scalar_mul, multi_scalar_mul_slice};

#[foreign(sha256)]
// docs:start:sha256
Expand All @@ -26,32 +27,35 @@ pub fn blake3<let N: u32>(input: [u8; N]) -> [u8; 32]
// docs:end:blake3
{}

#[no_predicates]
// docs:start:pedersen_commitment
pub fn pedersen_commitment<let N: u32>(input: [Field; N]) -> EmbeddedCurvePoint {
// docs:end:pedersen_commitment
let value = pedersen_commitment_with_separator(input, 0);
if (value.x == 0) & (value.y == 0) {
EmbeddedCurvePoint { x: 0, y: 0, is_infinite: true }
} else {
EmbeddedCurvePoint { x: value.x, y: value.y, is_infinite: false }
}
pedersen_commitment_with_separator(input, 0)
}

fn pedersen_commitment_with_separator_noir<let N: u32>(input: [Field; N], separator: u32) -> EmbeddedCurvePoint {
fn pedersen_commitment_with_separator<let N: u32>(input: [Field; N], separator: u32) -> EmbeddedCurvePoint {
let mut points = [EmbeddedCurveScalar { lo: 0, hi: 0 }; N];
for i in 0..N {
points[i] = EmbeddedCurveScalar::from_field(input[i]);
// we use the unsafe version because the multi_scalar_mul will constraint the scalars.
points[i] = from_field_unsafe(input[i]);
}
let generators = derive_generators("DEFAULT_DOMAIN_SEPARATOR".as_bytes(), separator);
let values = multi_scalar_mul(generators, points);
EmbeddedCurvePoint { x: values[0], y: values[1], is_infinite: values[2] as bool }
}

#[no_predicates]
pub fn pedersen_commitment_with_separator<let N: u32>(input: [Field; N], separator: u32) -> EmbeddedCurvePoint {
let values = __pedersen_commitment_with_separator(input, separator);
EmbeddedCurvePoint { x: values[0], y: values[1], is_infinite: false }
fn pedersen_hash_with_separator<let N: u32>(input: [Field; N], separator: u32) -> Field {
let mut scalars: Vec<EmbeddedCurveScalar> = Vec::from_slice([EmbeddedCurveScalar { lo: 0, hi: 0 }; N].as_slice()); //Vec::new();

for i in 0..N {
scalars.set(i, from_field_unsafe(input[i]));
}
scalars.push(EmbeddedCurveScalar { lo: N as Field, hi: 0 });
let domain_generators :[EmbeddedCurvePoint; N]= derive_generators("DEFAULT_DOMAIN_SEPARATOR".as_bytes(), separator);
let mut vec_generators = Vec::from_slice(domain_generators.as_slice());
let length_generator : [EmbeddedCurvePoint; 1] = derive_generators("pedersen_hash_length".as_bytes(), 0);
vec_generators.push(length_generator[0]);
multi_scalar_mul_slice(vec_generators.slice, scalars.slice)[0]
}

// docs:start:pedersen_hash
Expand All @@ -75,21 +79,17 @@ fn __derive_generators<let N: u32, let M: u32>(
starting_index: u32
) -> [EmbeddedCurvePoint; N] {}

fn pedersen_hash_with_separator_noir<let N: u32>(input: [Field; N], separator: u32) -> Field {
let v1 = pedersen_commitment_with_separator(input, separator);
let length_generator : [EmbeddedCurvePoint; 1] = derive_generators("pedersen_hash_length".as_bytes(), 0);
multi_scalar_mul(
[length_generator[0], v1],
[EmbeddedCurveScalar { lo: N as Field, hi: 0 }, EmbeddedCurveScalar { lo: 1, hi: 0 }]
)[0]
#[field(bn254)]
// Same as from_field but:
// does not assert the limbs are 128 bits
// does not assert the decomposition does not overflow the EmbeddedCurveScalar
fn from_field_unsafe(scalar: Field) -> EmbeddedCurveScalar {
let (xlo, xhi) = crate::field::bn254::decompose_hint(scalar);
// Check that the decomposition is correct
assert_eq(scalar, xlo + crate::field::bn254::TWO_POW_128 * xhi);
EmbeddedCurveScalar { lo: xlo, hi: xhi }
}

#[foreign(pedersen_hash)]
pub fn pedersen_hash_with_separator<let N: u32>(input: [Field; N], separator: u32) -> Field {}

#[foreign(pedersen_commitment)]
fn __pedersen_commitment_with_separator<let N: u32>(input: [Field; N], separator: u32) -> [Field; 2] {}

pub fn hash_to_field(inputs: [Field]) -> Field {
let mut sum = 0;

Expand Down Expand Up @@ -264,10 +264,111 @@ impl<A, B, C, D, E> Hash for (A, B, C, D, E) where A: Hash, B: Hash, C: Hash, D:
}
}

// Some test vectors for Pedersen hash and Pedersen Commitment.
// They have been generated using the same functions so the tests are for now useless
// but they will be useful when we switch to Noir implementation.
#[test]
fn assert_pedersen_noir() {
// TODO: make this a fuzzer test once fuzzer supports curve-specific blackbox functions.
let input = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
assert_eq(pedersen_hash_with_separator(input, 4), pedersen_hash_with_separator_noir(input, 4));
assert_eq(pedersen_commitment_with_separator(input, 4), pedersen_commitment_with_separator_noir(input, 4));
fn assert_pedersen() {
assert_eq(
pedersen_hash_with_separator([1], 1), 0x1b3f4b1a83092a13d8d1a59f7acb62aba15e7002f4440f2275edb99ebbc2305f
);
assert_eq(
pedersen_commitment_with_separator([1], 1), EmbeddedCurvePoint {
x: 0x054aa86a73cb8a34525e5bbed6e43ba1198e860f5f3950268f71df4591bde402,
y: 0x209dcfbf2cfb57f9f6046f44d71ac6faf87254afc7407c04eb621a6287cac126,
is_infinite: false
}
);

assert_eq(
pedersen_hash_with_separator([1, 2], 2), 0x26691c129448e9ace0c66d11f0a16d9014a9e8498ee78f4d69f0083168188255
);
assert_eq(
pedersen_commitment_with_separator([1, 2], 2), EmbeddedCurvePoint {
x: 0x2e2b3b191e49541fe468ec6877721d445dcaffe41728df0a0eafeb15e87b0753,
y: 0x2ff4482400ad3a6228be17a2af33e2bcdf41be04795f9782bd96efe7e24f8778,
is_infinite: false
}
);
assert_eq(
pedersen_hash_with_separator([1, 2, 3], 3), 0x0bc694b7a1f8d10d2d8987d07433f26bd616a2d351bc79a3c540d85b6206dbe4
);
assert_eq(
pedersen_commitment_with_separator([1, 2, 3], 3), EmbeddedCurvePoint {
x: 0x1fee4e8cf8d2f527caa2684236b07c4b1bad7342c01b0f75e9a877a71827dc85,
y: 0x2f9fedb9a090697ab69bf04c8bc15f7385b3e4b68c849c1536e5ae15ff138fd1,
is_infinite: false
}
);
assert_eq(
pedersen_hash_with_separator([1, 2, 3, 4], 4), 0xdae10fb32a8408521803905981a2b300d6a35e40e798743e9322b223a5eddc
);
assert_eq(
pedersen_commitment_with_separator([1, 2, 3, 4], 4), EmbeddedCurvePoint {
x: 0x07ae3e202811e1fca39c2d81eabe6f79183978e6f12be0d3b8eda095b79bdbc9,
y: 0x0afc6f892593db6fbba60f2da558517e279e0ae04f95758587760ba193145014,
is_infinite: false
}
);
assert_eq(
pedersen_hash_with_separator([1, 2, 3, 4, 5], 5), 0xfc375b062c4f4f0150f7100dfb8d9b72a6d28582dd9512390b0497cdad9c22
);
assert_eq(
pedersen_commitment_with_separator([1, 2, 3, 4, 5], 5), EmbeddedCurvePoint {
x: 0x1754b12bd475a6984a1094b5109eeca9838f4f81ac89c5f0a41dbce53189bb29,
y: 0x2da030e3cfcdc7ddad80eaf2599df6692cae0717d4e9f7bfbee8d073d5d278f7,
is_infinite: false
}
);
assert_eq(
pedersen_hash_with_separator([1, 2, 3, 4, 5, 6], 6), 0x1696ed13dc2730062a98ac9d8f9de0661bb98829c7582f699d0273b18c86a572
);
assert_eq(
pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6], 6), EmbeddedCurvePoint {
x: 0x190f6c0e97ad83e1e28da22a98aae156da083c5a4100e929b77e750d3106a697,
y: 0x1f4b60f34ef91221a0b49756fa0705da93311a61af73d37a0c458877706616fb,
is_infinite: false
}
);
assert_eq(
pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7], 7), 0x128c0ff144fc66b6cb60eeac8a38e23da52992fc427b92397a7dffd71c45ede3
);
assert_eq(
pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7], 7), EmbeddedCurvePoint {
x: 0x015441e9d29491b06563fac16fc76abf7a9534c715421d0de85d20dbe2965939,
y: 0x1d2575b0276f4e9087e6e07c2cb75aa1baafad127af4be5918ef8a2ef2fea8fc,
is_infinite: false
}
);
assert_eq(
pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7, 8], 8), 0x2f960e117482044dfc99d12fece2ef6862fba9242be4846c7c9a3e854325a55c
);
assert_eq(
pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7, 8], 8), EmbeddedCurvePoint {
x: 0x1657737676968887fceb6dd516382ea13b3a2c557f509811cd86d5d1199bc443,
y: 0x1f39f0cb569040105fa1e2f156521e8b8e08261e635a2b210bdc94e8d6d65f77,
is_infinite: false
}
);
assert_eq(
pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9], 9), 0x0c96db0790602dcb166cc4699e2d306c479a76926b81c2cb2aaa92d249ec7be7
);
assert_eq(
pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9], 9), EmbeddedCurvePoint {
x: 0x0a3ceae42d14914a432aa60ec7fded4af7dad7dd4acdbf2908452675ec67e06d,
y: 0xfc19761eaaf621ad4aec9a8b2e84a4eceffdba78f60f8b9391b0bd9345a2f2,
is_infinite: false
}
);
assert_eq(
pedersen_hash_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 10), 0x2cd37505871bc460a62ea1e63c7fe51149df5d0801302cf1cbc48beb8dff7e94
);
assert_eq(
pedersen_commitment_with_separator([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 10), EmbeddedCurvePoint {
x: 0x2fb3f8b3d41ddde007c8c3c62550f9a9380ee546fcc639ffbb3fd30c8d8de30c,
y: 0x300783be23c446b11a4c0fabf6c91af148937cea15fcf5fb054abf7f752ee245,
is_infinite: false
}
);
}

0 comments on commit 029584b

Please sign in to comment.