Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update to draft 19 #101

Merged
merged 1 commit into from
Feb 8, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ jobs:
- --features serde
toolchain:
- stable
- 1.60.0
- 1.61.0
name: test
steps:
- name: Checkout sources
Expand Down
9 changes: 6 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ license = "MIT"
name = "voprf"
readme = "README.md"
repository = "https://github.com/novifinancial/voprf/"
rust-version = "1.60"
rust-version = "1.61"
version = "0.5.0-pre.2"

[features]
Expand All @@ -28,7 +28,7 @@ curve25519-dalek = { version = "=4.0.0-rc.1", default-features = false, features
derive-where = { version = "1", features = ["zeroize-on-drop"] }
digest = "0.10"
displaydoc = { version = "0.2", default-features = false }
elliptic-curve = { version = "0.12", features = [
elliptic-curve = { version = "=0.13.0-pre.5", features = [
"hash2curve",
"sec1",
"voprf",
Expand All @@ -45,7 +45,7 @@ zeroize = { version = "1.5", default-features = false }
[dev-dependencies]
generic-array = { version = "0.14", features = ["more_lengths"] }
hex = "0.4"
p256 = { version = "0.12", default-features = false, features = [
p256 = { version = "=0.13.0-pre", default-features = false, features = [
"hash2curve",
"voprf",
] }
Expand All @@ -59,3 +59,6 @@ sha2 = "0.10"
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
targets = []

[patch.crates-io]
p256 = { git = "https://github.com/RustCrypto/elliptic-curves", rev = "136fed7944d53c0508b1a93cd97bdab46891bcf7" }
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ voprf = "0.5.0-pre.2"

### Minimum Supported Rust Version

Rust **1.60** or higher.
Rust **1.61** or higher.

Contributors
------------
Expand Down
10 changes: 5 additions & 5 deletions src/ciphersuite.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
//! Defines the CipherSuite trait to specify the underlying primitives for VOPRF

use digest::core_api::BlockSizeUser;
use digest::{Digest, OutputSizeUser};
use digest::{FixedOutput, HashMarker, OutputSizeUser};
use elliptic_curve::VoprfParameters;
use generic_array::typenum::{IsLess, IsLessOrEqual, U256};

Expand All @@ -22,25 +22,25 @@ where
{
/// The ciphersuite identifier as dictated by
/// <https://datatracker.ietf.org/doc/draft-irtf-cfrg-voprf/>
const ID: u16;
const ID: &'static str;

/// A finite cyclic group along with a point representation that allows some
/// customization on how to hash an input to a curve point. See [`Group`].
type Group: Group;

/// The main hash function to use (for HKDF computations and hashing
/// transcripts).
type Hash: BlockSizeUser + Digest;
type Hash: BlockSizeUser + Default + FixedOutput + HashMarker;
}

impl<T: VoprfParameters> CipherSuite for T
where
T: Group,
T::Hash: BlockSizeUser + Digest,
T::Hash: BlockSizeUser + Default + FixedOutput + HashMarker,
<T::Hash as OutputSizeUser>::OutputSize:
IsLess<U256> + IsLessOrEqual<<T::Hash as BlockSizeUser>::BlockSize>,
{
const ID: u16 = T::ID;
const ID: &'static str = T::ID;

type Group = T;

Expand Down
96 changes: 72 additions & 24 deletions src/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,13 @@
//! Common functionality between multiple OPRF modes.

use core::convert::TryFrom;
use core::ops::Add;

use derive_where::derive_where;
use digest::core_api::BlockSizeUser;
use digest::{Digest, Output, OutputSizeUser};
use generic_array::sequence::Concat;
use generic_array::typenum::{IsLess, IsLessOrEqual, Unsigned, U11, U2, U256};
use generic_array::typenum::{IsLess, IsLessOrEqual, Unsigned, U2, U256, U9};
use generic_array::{ArrayLength, GenericArray};
use rand_core::{CryptoRng, RngCore};
use subtle::ConstantTimeEq;
Expand All @@ -33,7 +34,7 @@ pub(crate) const STR_DERIVE_KEYPAIR: [u8; 13] = *b"DeriveKeyPair";
pub(crate) const STR_COMPOSITE: [u8; 9] = *b"Composite";
pub(crate) const STR_CHALLENGE: [u8; 9] = *b"Challenge";
pub(crate) const STR_INFO: [u8; 4] = *b"Info";
pub(crate) const STR_VOPRF: [u8; 8] = *b"VOPRF10-";
pub(crate) const STR_OPRF: [u8; 7] = *b"OPRFV1-";
pub(crate) const STR_HASH_TO_SCALAR: [u8; 13] = *b"HashToScalar-";
pub(crate) const STR_HASH_TO_GROUP: [u8; 12] = *b"HashToGroup-";

Expand Down Expand Up @@ -194,9 +195,9 @@ where
&STR_CHALLENGE,
];

let dst = GenericArray::from(STR_HASH_TO_SCALAR).concat(create_context_string::<CS>(mode));
let dst = Dst::new::<CS, _, _>(STR_HASH_TO_SCALAR, mode);
// This can't fail, the size of the `input` is known.
let c_scalar = CS::Group::hash_to_scalar::<CS::Hash>(&h2_input, &dst).unwrap();
let c_scalar = CS::Group::hash_to_scalar::<CS::Hash>(&h2_input, &dst.as_dst()).unwrap();
let s_scalar = r - &(c_scalar * &k);

Ok(Proof { c_scalar, s_scalar })
Expand Down Expand Up @@ -254,9 +255,9 @@ where
&STR_CHALLENGE,
];

let dst = GenericArray::from(STR_HASH_TO_SCALAR).concat(create_context_string::<CS>(mode));
let dst = Dst::new::<CS, _, _>(STR_HASH_TO_SCALAR, mode);
// This can't fail, the size of the `input` is known.
let c = CS::Group::hash_to_scalar::<CS::Hash>(&h2_input, &dst).unwrap();
let c = CS::Group::hash_to_scalar::<CS::Hash>(&h2_input, &dst.as_dst()).unwrap();

match c.ct_eq(&proof.c_scalar).into() {
true => Ok(()),
Expand Down Expand Up @@ -296,16 +297,16 @@ where
let len = u16::try_from(c_slice.len()).map_err(|_| Error::Batch)?;

// seedDST = "Seed-" || contextString
let seed_dst = GenericArray::from(STR_SEED).concat(create_context_string::<CS>(mode));
let seed_dst = Dst::new::<CS, _, _>(STR_SEED, mode);

// h1Input = I2OSP(len(Bm), 2) || Bm ||
// I2OSP(len(seedDST), 2) || seedDST
// seed = Hash(h1Input)
let seed = CS::Hash::new()
.chain_update(elem_len)
.chain_update(CS::Group::serialize_elem(b))
.chain_update(i2osp_2_array(&seed_dst))
.chain_update(seed_dst)
.chain_update(seed_dst.i2osp_2())
.chain_update_multi(&seed_dst.as_dst())
.finalize();
let seed_len = i2osp_2_array(&seed);

Expand All @@ -332,9 +333,9 @@ where
&STR_COMPOSITE,
];

let dst = GenericArray::from(STR_HASH_TO_SCALAR).concat(create_context_string::<CS>(mode));
let dst = Dst::new::<CS, _, _>(STR_HASH_TO_SCALAR, mode);
// This can't fail, the size of the `input` is known.
let di = CS::Group::hash_to_scalar::<CS::Hash>(&h2_input, &dst).unwrap();
let di = CS::Group::hash_to_scalar::<CS::Hash>(&h2_input, &dst.as_dst()).unwrap();
m = c * &di + &m;
z = match k_option {
Some(_) => z,
Expand Down Expand Up @@ -365,8 +366,7 @@ where
<CS::Hash as OutputSizeUser>::OutputSize:
IsLess<U256> + IsLessOrEqual<<CS::Hash as BlockSizeUser>::BlockSize>,
{
let context_string = create_context_string::<CS>(mode);
let dst = GenericArray::from(STR_DERIVE_KEYPAIR).concat(context_string);
let dst = Dst::new::<CS, _, _>(STR_DERIVE_KEYPAIR, mode);

let info_len = i2osp_2(info.len()).map_err(|_| Error::DeriveKeyPair)?;

Expand All @@ -376,7 +376,7 @@ where
// || contextString)
let sk_s = CS::Group::hash_to_scalar::<CS::Hash>(
&[seed, &info_len, info, &counter.to_be_bytes()],
&dst,
&dst.as_dst(),
)
.map_err(|_| Error::DeriveKeyPair)?;

Expand Down Expand Up @@ -455,8 +455,8 @@ where
<CS::Hash as OutputSizeUser>::OutputSize:
IsLess<U256> + IsLessOrEqual<<CS::Hash as BlockSizeUser>::BlockSize>,
{
let dst = GenericArray::from(STR_HASH_TO_GROUP).concat(create_context_string::<CS>(mode));
CS::Group::hash_to_curve::<CS::Hash>(&[input], &dst).map_err(|_| Error::Input)
let dst = Dst::new::<CS, _, _>(STR_HASH_TO_GROUP, mode);
CS::Group::hash_to_curve::<CS::Hash>(&[input], &dst.as_dst()).map_err(|_| Error::Input)
}

/// Internal function that finalizes the hash input for OPRF, VOPRF & POPRF.
Expand Down Expand Up @@ -497,16 +497,64 @@ where
.finalize())
}

/// Generates the contextString parameter as defined in
/// <https://datatracker.ietf.org/doc/draft-irtf-cfrg-voprf/>
pub(crate) fn create_context_string<CS: CipherSuite>(mode: Mode) -> GenericArray<u8, U11>
pub(crate) struct Dst<L: ArrayLength<u8>> {
dst_1: GenericArray<u8, L>,
dst_2: &'static str,
}
Comment on lines +500 to +503
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nice! Thanks for this.


impl<L: ArrayLength<u8>> Dst<L> {
pub(crate) fn new<CS: CipherSuite, T, TL: ArrayLength<u8>>(par_1: T, mode: Mode) -> Self
where
T: Into<GenericArray<u8, TL>>,
TL: Add<U9, Output = L>,
<CS::Hash as OutputSizeUser>::OutputSize:
IsLess<U256> + IsLessOrEqual<<CS::Hash as BlockSizeUser>::BlockSize>,
{
let par_1 = par_1.into();
// Generates the contextString parameter as defined in
// <https://datatracker.ietf.org/doc/draft-irtf-cfrg-voprf/>
let par_2 = GenericArray::from(STR_OPRF)
.concat([mode.to_u8()].into())
.concat([b'-'].into());

let dst_1 = par_1.concat(par_2);
let dst_2 = CS::ID;

assert!(
L::USIZE + dst_2.len() <= u16::MAX.into(),
"constructed DST longer then {}",
u16::MAX
);

Self { dst_1, dst_2 }
}

pub(crate) fn as_dst(&self) -> [&[u8]; 2] {
[&self.dst_1, self.dst_2.as_bytes()]
}

pub(crate) fn i2osp_2(&self) -> [u8; 2] {
u16::try_from(L::USIZE + self.dst_2.len())
.unwrap()
.to_be_bytes()
}
}

trait DigestExt {
fn chain_update_multi(self, data: &[&[u8]]) -> Self;
}

impl<T> DigestExt for T
where
<CS::Hash as OutputSizeUser>::OutputSize:
IsLess<U256> + IsLessOrEqual<<CS::Hash as BlockSizeUser>::BlockSize>,
T: Digest,
{
GenericArray::from(STR_VOPRF)
.concat([mode.to_u8()].into())
.concat(CS::ID.to_be_bytes().into())
fn chain_update_multi(mut self, datas: &[&[u8]]) -> Self {
for data in datas {
self.update(data)
}

self
}
}

///////////////////////
Expand Down
22 changes: 11 additions & 11 deletions src/group/elliptic_curve.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,12 @@
// of this source tree.

use digest::core_api::BlockSizeUser;
use digest::Digest;
use digest::{FixedOutput, HashMarker};
use elliptic_curve::group::cofactor::CofactorGroup;
use elliptic_curve::hash2curve::{ExpandMsgXmd, FromOkm, GroupDigest};
use elliptic_curve::sec1::{FromEncodedPoint, ModulusSize, ToEncodedPoint};
use elliptic_curve::{
AffinePoint, Field, FieldSize, Group as _, ProjectivePoint, PublicKey, Scalar, SecretKey,
AffinePoint, Field, FieldBytesSize, Group as _, ProjectivePoint, PublicKey, Scalar, SecretKey,
};
use generic_array::typenum::{IsLess, IsLessOrEqual, U256};
use generic_array::GenericArray;
Expand All @@ -24,32 +24,32 @@ impl<C> Group for C
where
C: GroupDigest,
ProjectivePoint<Self>: CofactorGroup + ToEncodedPoint<Self>,
FieldSize<Self>: ModulusSize,
FieldBytesSize<Self>: ModulusSize,
AffinePoint<Self>: FromEncodedPoint<Self> + ToEncodedPoint<Self>,
Scalar<Self>: FromOkm,
{
type Elem = ProjectivePoint<Self>;

type ElemLen = <FieldSize<Self> as ModulusSize>::CompressedPointSize;
type ElemLen = <FieldBytesSize<Self> as ModulusSize>::CompressedPointSize;

type Scalar = Scalar<Self>;

type ScalarLen = FieldSize<Self>;
type ScalarLen = FieldBytesSize<Self>;

// Implements the `hash_to_curve()` function from
// https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-hash-to-curve-11#section-3
fn hash_to_curve<H>(input: &[&[u8]], dst: &[u8]) -> Result<Self::Elem, InternalError>
fn hash_to_curve<H>(input: &[&[u8]], dst: &[&[u8]]) -> Result<Self::Elem, InternalError>
where
H: Digest + BlockSizeUser,
H: BlockSizeUser + Default + FixedOutput + HashMarker,
H::OutputSize: IsLess<U256> + IsLessOrEqual<H::BlockSize>,
{
Self::hash_from_bytes::<ExpandMsgXmd<H>>(input, dst).map_err(|_| InternalError::Input)
}

// Implements the `HashToScalar()` function
fn hash_to_scalar<H>(input: &[&[u8]], dst: &[u8]) -> Result<Self::Scalar, InternalError>
fn hash_to_scalar<H>(input: &[&[u8]], dst: &[&[u8]]) -> Result<Self::Scalar, InternalError>
where
H: Digest + BlockSizeUser,
H: BlockSizeUser + Default + FixedOutput + HashMarker,
H::OutputSize: IsLess<U256> + IsLessOrEqual<H::BlockSize>,
{
<Self as GroupDigest>::hash_to_scalar::<ExpandMsgXmd<H>>(input, dst)
Expand Down Expand Up @@ -92,15 +92,15 @@ where

#[cfg(test)]
fn zero_scalar() -> Self::Scalar {
Scalar::<Self>::zero()
Scalar::<Self>::ZERO
}

fn serialize_scalar(scalar: Self::Scalar) -> GenericArray<u8, Self::ScalarLen> {
scalar.into()
}

fn deserialize_scalar(scalar_bits: &[u8]) -> Result<Self::Scalar> {
SecretKey::<Self>::from_be_bytes(scalar_bits)
SecretKey::<Self>::from_slice(scalar_bits)
.map(|secret_key| *secret_key.to_nonzero_scalar())
.map_err(|_| Error::Deserialization)
}
Expand Down
10 changes: 5 additions & 5 deletions src/group/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ mod ristretto;
use core::ops::{Add, Mul, Sub};

use digest::core_api::BlockSizeUser;
use digest::Digest;
use digest::{FixedOutput, HashMarker};
use generic_array::typenum::{IsLess, IsLessOrEqual, U256};
use generic_array::{ArrayLength, GenericArray};
use rand_core::{CryptoRng, RngCore};
Expand Down Expand Up @@ -54,19 +54,19 @@ pub trait Group {
/// # Errors
/// [`Error::Input`](crate::Error::Input) if the `input` is empty or longer
/// then [`u16::MAX`].
fn hash_to_curve<H>(input: &[&[u8]], dst: &[u8]) -> Result<Self::Elem, InternalError>
fn hash_to_curve<H>(input: &[&[u8]], dst: &[&[u8]]) -> Result<Self::Elem, InternalError>
where
H: Digest + BlockSizeUser,
H: BlockSizeUser + Default + FixedOutput + HashMarker,
H::OutputSize: IsLess<U256> + IsLessOrEqual<H::BlockSize>;

/// Hashes a slice of pseudo-random bytes to a scalar
///
/// # Errors
/// [`Error::Input`](crate::Error::Input) if the `input` is empty or longer
/// then [`u16::MAX`].
fn hash_to_scalar<H>(input: &[&[u8]], dst: &[u8]) -> Result<Self::Scalar, InternalError>
fn hash_to_scalar<H>(input: &[&[u8]], dst: &[&[u8]]) -> Result<Self::Scalar, InternalError>
where
H: Digest + BlockSizeUser,
H: BlockSizeUser + Default + FixedOutput + HashMarker,
H::OutputSize: IsLess<U256> + IsLessOrEqual<H::BlockSize>;

/// Get the base point for the group
Expand Down
Loading