From eb6d65a12d03dd11480c8b4d96ea8a85b7136ed9 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Fri, 3 Feb 2023 22:37:33 +0100 Subject: [PATCH 01/12] intermediate commit --- Cargo.toml | 33 +- src/data_structures.rs | 26 +- src/ipa_pc/data_structures.rs | 35 +- src/ipa_pc/mod.rs | 24 +- src/kzg10/data_structures.rs | 387 +++++++-------- src/kzg10/mod.rs | 113 +++-- src/lib.rs | 5 +- src/marlin/marlin_pc/data_structures.rs | 49 +- src/marlin/marlin_pc/mod.rs | 76 +-- src/marlin/marlin_pst13_pc/data_structures.rs | 450 +++++++++--------- src/marlin/marlin_pst13_pc/mod.rs | 119 ++--- src/marlin/mod.rs | 72 +-- src/multilinear_pc/data_structures.rs | 18 +- src/multilinear_pc/mod.rs | 62 +-- src/sonic_pc/data_structures.rs | 190 +++----- src/sonic_pc/mod.rs | 134 +++--- src/streaming_kzg/mod.rs | 71 ++- src/streaming_kzg/space.rs | 55 ++- src/streaming_kzg/time.rs | 57 +-- 19 files changed, 924 insertions(+), 1052 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 98daed2a..4f184eba 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ark-poly-commit" -version = "0.3.0" +version = "0.4.0" authors = [ "Alessandro Chiesa ", "Mary Maller ", @@ -21,15 +21,15 @@ license = "MIT/Apache-2.0" edition = "2018" [dependencies] -ark-serialize = { version = "^0.3.0", default-features = false, features = [ "derive" ] } -ark-ff = { version = "^0.3.0", default-features = false } -ark-ec = { version = "^0.3.0", default-features = false } -ark-poly = {version = "^0.3.0", default-features = false } +ark-serialize = { version = "^0.4.0", default-features = false, features = [ "derive" ] } +ark-ff = { version = "^0.4.0", default-features = false } +ark-ec = { version = "^0.4.0", default-features = false } +ark-poly = {version = "^0.4.0", default-features = false } ark-sponge = {version = "^0.3.0", default-features = false} -ark-std = { version = "^0.3.0", default-features = false } -ark-relations = { version = "^0.3.0", default-features = false, optional = true } -ark-r1cs-std = { version = "^0.3.0", default-features = false, optional = true } +ark-std = { version = "^0.4.0", default-features = false } +ark-relations = { version = "^0.4.0", default-features = false, optional = true } +ark-r1cs-std = { version = "^0.4.0", default-features = false, optional = true } hashbrown = { version = "0.9", optional = true } digest = "0.9" @@ -37,9 +37,9 @@ rayon = { version = "1", optional = true } derivative = { version = "2", features = [ "use_core" ] } [dev-dependencies] -ark-ed-on-bls12-381 = { version = "^0.3.0", default-features = false } -ark-bls12-381 = { version = "^0.3.0", default-features = false, features = [ "curve" ] } -ark-bls12-377 = { version = "^0.3.0", default-features = false, features = [ "curve" ] } +ark-ed-on-bls12-381 = { version = "^0.4.0", default-features = false } +ark-bls12-381 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } +ark-bls12-377 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } blake2 = { version = "0.9", default-features = false } rand_chacha = { version = "0.3.0", default-features = false } @@ -56,17 +56,6 @@ incremental = true debug = true # To be removed in the new release. -[patch.crates-io] -ark-std = { git = "https://github.com/arkworks-rs/std" } -ark-ec = { git = "https://github.com/arkworks-rs/algebra" } -ark-ff = { git = "https://github.com/arkworks-rs/algebra" } -ark-poly = { git = "https://github.com/arkworks-rs/algebra" } -ark-serialize = { git = "https://github.com/arkworks-rs/algebra" } -ark-bls12-381 = { git = "https://github.com/arkworks-rs/curves" } -ark-bls12-377 = { git = "https://github.com/arkworks-rs/curves" } -ark-ed-on-bls12-381 = { git = "https://github.com/arkworks-rs/curves" } -ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std" } -ark-sponge = { git = "https://github.com/arkworks-rs/sponge" } [features] default = [ "std", "parallel" ] diff --git a/src/data_structures.rs b/src/data_structures.rs index 2259451e..b173d3c1 100644 --- a/src/data_structures.rs +++ b/src/data_structures.rs @@ -1,10 +1,10 @@ -use crate::{Polynomial, Rc, String, Vec}; +use crate::{Polynomial, String, Vec}; use ark_ff::{Field, PrimeField, ToConstraintField}; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::rand::RngCore; +use ark_std::sync::Arc; use ark_std::{ borrow::Borrow, - io::{Read, Write}, marker::PhantomData, ops::{AddAssign, MulAssign, SubAssign}, }; @@ -62,12 +62,6 @@ pub trait PCCommitment: Clone + CanonicalSerialize + CanonicalDeserialize { /// Does this commitment have a degree bound? fn has_degree_bound(&self) -> bool; - - /// Size in bytes - #[deprecated(since = "0.4.0", note = "Please use `.serialized_size()` instead.")] - fn size_in_bytes(&self) -> usize { - self.serialized_size() - } } /// Defines the minimal interface of prepared commitments for any polynomial @@ -96,16 +90,6 @@ pub trait PCRandomness: Clone + CanonicalSerialize + CanonicalDeserialize { ) -> Self; } -/// Defines the minimal interface of evaluation proofs for any polynomial -/// commitment scheme. -pub trait PCProof: Clone + CanonicalSerialize + CanonicalDeserialize { - /// Size in bytes - #[deprecated(since = "0.4.0", note = "Please use `.serialized_size()` instead.")] - fn size_in_bytes(&self) -> usize { - self.serialized_size() - } -} - /// A proof of satisfaction of linear combinations. #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] pub struct BatchLCProof { @@ -121,7 +105,7 @@ pub struct BatchLCProof> { label: PolynomialLabel, - polynomial: Rc

, + polynomial: P, degree_bound: Option, hiding_bound: Option, _field: PhantomData, @@ -145,7 +129,7 @@ impl<'a, F: Field, P: Polynomial> LabeledPolynomial { ) -> Self { Self { label, - polynomial: Rc::new(polynomial), + polynomial: polynomial, degree_bound, hiding_bound, _field: PhantomData, diff --git a/src/ipa_pc/data_structures.rs b/src/ipa_pc/data_structures.rs index 8369becf..7ba56c95 100644 --- a/src/ipa_pc/data_structures.rs +++ b/src/ipa_pc/data_structures.rs @@ -1,18 +1,15 @@ use crate::*; use crate::{PCCommitterKey, PCVerifierKey, Vec}; -use ark_ec::AffineCurve; +use ark_ec::AffineRepr; use ark_ff::{Field, UniformRand, Zero}; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::rand::RngCore; -use ark_std::{ - io::{Read, Write}, - vec, -}; +use ark_std::vec; /// `UniversalParams` are the universal parameters for the inner product arg scheme. #[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] #[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] -pub struct UniversalParams { +pub struct UniversalParams { /// The key used to commit to polynomials. pub comm_key: Vec, @@ -23,7 +20,7 @@ pub struct UniversalParams { pub s: G, } -impl PCUniversalParams for UniversalParams { +impl PCUniversalParams for UniversalParams { fn max_degree(&self) -> usize { self.comm_key.len() - 1 } @@ -38,7 +35,7 @@ impl PCUniversalParams for UniversalParams { Clone(bound = ""), Debug(bound = "") )] -pub struct CommitterKey { +pub struct CommitterKey { /// The key used to commit to polynomials. pub comm_key: Vec, @@ -54,7 +51,7 @@ pub struct CommitterKey { pub max_degree: usize, } -impl PCCommitterKey for CommitterKey { +impl PCCommitterKey for CommitterKey { fn max_degree(&self) -> usize { self.max_degree } @@ -66,7 +63,7 @@ impl PCCommitterKey for CommitterKey { /// `VerifierKey` is used to check evaluation proofs for a given commitment. pub type VerifierKey = CommitterKey; -impl PCVerifierKey for VerifierKey { +impl PCVerifierKey for VerifierKey { fn max_degree(&self) -> usize { self.max_degree } @@ -79,7 +76,7 @@ impl PCVerifierKey for VerifierKey { /// Nothing to do to prepare this verifier key (for now). pub type PreparedVerifierKey = VerifierKey; -impl PCPreparedVerifierKey> for PreparedVerifierKey { +impl PCPreparedVerifierKey> for PreparedVerifierKey { /// prepare `PreparedVerifierKey` from `VerifierKey` fn prepare(vk: &VerifierKey) -> Self { vk.clone() @@ -97,7 +94,7 @@ impl PCPreparedVerifierKey> for PreparedVerifierK PartialEq(bound = ""), Eq(bound = "") )] -pub struct Commitment { +pub struct Commitment { /// A Pedersen commitment to the polynomial. pub comm: G, @@ -107,7 +104,7 @@ pub struct Commitment { pub shifted_comm: Option, } -impl PCCommitment for Commitment { +impl PCCommitment for Commitment { #[inline] fn empty() -> Self { Commitment { @@ -124,7 +121,7 @@ impl PCCommitment for Commitment { /// Nothing to do to prepare this commitment (for now). pub type PreparedCommitment = Commitment; -impl PCPreparedCommitment> for PreparedCommitment { +impl PCPreparedCommitment> for PreparedCommitment { /// prepare `PreparedCommitment` from `Commitment` fn prepare(vk: &Commitment) -> Self { vk.clone() @@ -141,7 +138,7 @@ impl PCPreparedCommitment> for PreparedCommitment< PartialEq(bound = ""), Eq(bound = "") )] -pub struct Randomness { +pub struct Randomness { /// Randomness is some scalar field element. pub rand: G::ScalarField, @@ -149,7 +146,7 @@ pub struct Randomness { pub shifted_rand: Option, } -impl PCRandomness for Randomness { +impl PCRandomness for Randomness { fn empty() -> Self { Self { rand: G::ScalarField::zero(), @@ -177,7 +174,7 @@ impl PCRandomness for Randomness { Clone(bound = ""), Debug(bound = "") )] -pub struct Proof { +pub struct Proof { /// Vector of left elements for each of the log_d iterations in `open` pub l_vec: Vec, @@ -199,8 +196,6 @@ pub struct Proof { pub rand: Option, } -impl PCProof for Proof {} - /// `SuccinctCheckPolynomial` is a succinctly-representated polynomial /// generated from the `log_d` random oracle challenges generated in `open`. /// It has the special property that can be evaluated in `O(log_d)` time. diff --git a/src/ipa_pc/mod.rs b/src/ipa_pc/mod.rs index 4751f8c0..503bddf9 100644 --- a/src/ipa_pc/mod.rs +++ b/src/ipa_pc/mod.rs @@ -3,7 +3,7 @@ use crate::{BatchLCProof, DenseUVPolynomial, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; use crate::{PCCommitterKey, PCRandomness, PCUniversalParams, PolynomialCommitment}; -use ark_ec::{msm::VariableBaseMSM, AffineCurve, ProjectiveCurve}; +use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM}; use ark_ff::{Field, One, PrimeField, UniformRand, Zero}; use ark_serialize::CanonicalSerialize; use ark_std::rand::RngCore; @@ -33,7 +33,7 @@ use digest::Digest; /// [pcdas]: https://eprint.iacr.org/2020/499 /// [marlin]: https://eprint.iacr.org/2019/1047 pub struct InnerProductArgPC< - G: AffineCurve, + G: AffineRepr, D: Digest, P: DenseUVPolynomial, S: CryptographicSponge, @@ -46,8 +46,8 @@ pub struct InnerProductArgPC< impl InnerProductArgPC where - G: AffineCurve, - G::Projective: VariableBaseMSM, + G: AffineRepr, + G::Group: VariableBaseMSM, D: Digest, P: DenseUVPolynomial, S: CryptographicSponge, @@ -62,7 +62,7 @@ where scalars: &[G::ScalarField], hiding_generator: Option, randomizer: Option, - ) -> G::Projective { + ) -> G::Group { let scalars_bigint = ark_std::cfg_iter!(scalars) .map(|s| s.into_bigint()) .collect::>(); @@ -262,10 +262,10 @@ where } fn combine_shifted_comm( - combined_comm: Option, + combined_comm: Option, new_comm: Option, coeff: G::ScalarField, - ) -> Option { + ) -> Option { if let Some(new_comm) = new_comm { let coeff_new_comm = new_comm.mul(coeff); return Some(combined_comm.map_or(coeff_new_comm, |c| c + &coeff_new_comm)); @@ -276,7 +276,7 @@ where fn construct_labeled_commitments( lc_info: &[(String, Option)], - elements: &[G::Projective], + elements: &[G::Group], ) -> Vec>> { let comms = G::Projective::batch_normalization_into_affine(elements); let mut commitments = Vec::new(); @@ -337,8 +337,8 @@ where impl PolynomialCommitment for InnerProductArgPC where - G: AffineCurve, - G::Projective: VariableBaseMSM, + G: AffineRepr, + G::Group: VariableBaseMSM, D: Digest, P: DenseUVPolynomial, S: CryptographicSponge, @@ -1075,7 +1075,7 @@ mod tests { #![allow(non_camel_case_types)] use super::InnerProductArgPC; - use ark_ec::AffineCurve; + use ark_ec::AffineRepr; use ark_ed_on_bls12_381::{EdwardsAffine, Fr}; use ark_ff::PrimeField; use ark_poly::{univariate::DensePolynomial as DensePoly, DenseUVPolynomial}; @@ -1084,7 +1084,7 @@ mod tests { use rand_chacha::ChaCha20Rng; type UniPoly = DensePoly; - type Sponge = PoseidonSponge<::ScalarField>; + type Sponge = PoseidonSponge<::ScalarField>; type PC = InnerProductArgPC; type PC_JJB2S = PC; diff --git a/src/kzg10/data_structures.rs b/src/kzg10/data_structures.rs index 161e86f8..bf61b638 100644 --- a/src/kzg10/data_structures.rs +++ b/src/kzg10/data_structures.rs @@ -1,7 +1,9 @@ use crate::*; -use ark_ec::{PairingEngine, ProjectiveCurve}; -use ark_ff::{PrimeField, ToConstraintField, Zero}; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; +use ark_ec::pairing::Pairing; +use ark_ff::{PrimeField, ToConstraintField}; +use ark_serialize::{ + CanonicalDeserialize, CanonicalSerialize, Compress, SerializationError, Valid, +}; use ark_std::{ borrow::Cow, io::{Read, Write}, @@ -17,7 +19,7 @@ use ark_std::{ PartialEq(bound = ""), Eq(bound = "") )] -pub struct UniversalParams { +pub struct UniversalParams { /// Group elements of the form `{ \beta^i G }`, where `i` ranges from 0 to `degree`. pub powers_of_g: Vec, /// Group elements of the form `{ \beta^i \gamma G }`, where `i` ranges from 0 to `degree`. @@ -36,43 +38,48 @@ pub struct UniversalParams { pub prepared_beta_h: E::G2Prepared, } -impl PCUniversalParams for UniversalParams { +impl Valid for UniversalParams { + fn check(&self) -> bool { + self.powers_of_g.len() == self.powers_of_gamma_g.len() + && self.powers_of_g.len() == self.neg_powers_of_h.len() + } +} +impl PCUniversalParams for UniversalParams { fn max_degree(&self) -> usize { self.powers_of_g.len() - 1 } } -impl CanonicalSerialize for UniversalParams { - fn serialize(&self, mut writer: W) -> Result<(), SerializationError> { - self.powers_of_g.serialize(&mut writer)?; - self.powers_of_gamma_g.serialize(&mut writer)?; - self.h.serialize(&mut writer)?; - self.beta_h.serialize(&mut writer)?; - self.neg_powers_of_h.serialize(&mut writer) +impl CanonicalSerialize for UniversalParams { + fn serialize_with_mode( + &self, + writer: W, + compress: Compress, + ) -> Result<(), SerializationError> { + self.powers_of_g + .serialize_with_mode(&mut writer, compress)?; + self.powers_of_gamma_g + .serialize_with_mode(&mut writer, compress)?; + self.h.serialize_with_mode(&mut writer, compress)?; + self.beta_h.serialize_with_mode(&mut writer, compress)?; + self.neg_powers_of_h + .serialize_with_mode(&mut writer, compress) + } + + fn serialized_size(&self, compress: Compress) -> usize { + self.powers_of_g.serialized_size(compress) + + self.powers_of_gamma_g.serialized_size(compress) + + self.h.serialized_size(compress) + + self.beta_h.serialized_size(compress) + + self.neg_powers_of_h.serialized_size(compress) } - fn serialized_size(&self) -> usize { - self.powers_of_g.serialized_size() - + self.powers_of_gamma_g.serialized_size() - + self.h.serialized_size() - + self.beta_h.serialized_size() - + self.neg_powers_of_h.serialized_size() - } - - fn serialize_unchecked(&self, mut writer: W) -> Result<(), SerializationError> { - self.powers_of_g.serialize_unchecked(&mut writer)?; - self.powers_of_gamma_g.serialize_unchecked(&mut writer)?; - self.h.serialize_unchecked(&mut writer)?; - self.beta_h.serialize_unchecked(&mut writer)?; - self.neg_powers_of_h.serialize_unchecked(&mut writer) + fn serialize_uncompressed(&self, mut writer: W) -> Result<(), SerializationError> { + Self::serialize_with_mode(&self, writer, Compress::No) } - fn serialize_uncompressed(&self, mut writer: W) -> Result<(), SerializationError> { - self.powers_of_g.serialize_uncompressed(&mut writer)?; - self.powers_of_gamma_g.serialize_uncompressed(&mut writer)?; - self.h.serialize_uncompressed(&mut writer)?; - self.beta_h.serialize_uncompressed(&mut writer)?; - self.neg_powers_of_h.serialize_uncompressed(&mut writer) + fn serialize_compressed(&self, mut writer: W) -> Result<(), SerializationError> { + Self::serialize_with_mode(&self, writer, Compress::Yes) } fn uncompressed_size(&self) -> usize { @@ -84,36 +91,20 @@ impl CanonicalSerialize for UniversalParams { } } -impl CanonicalDeserialize for UniversalParams { - fn deserialize(mut reader: R) -> Result { - let powers_of_g = Vec::::deserialize(&mut reader)?; - let powers_of_gamma_g = BTreeMap::::deserialize(&mut reader)?; - let h = E::G2Affine::deserialize(&mut reader)?; - let beta_h = E::G2Affine::deserialize(&mut reader)?; - let neg_powers_of_h = BTreeMap::::deserialize(&mut reader)?; - - let prepared_h = E::G2Prepared::from(h.clone()); - let prepared_beta_h = E::G2Prepared::from(beta_h.clone()); - - Ok(Self { - powers_of_g, - powers_of_gamma_g, - h, - beta_h, - neg_powers_of_h, - prepared_h, - prepared_beta_h, - }) - } - - fn deserialize_uncompressed(mut reader: R) -> Result { - let powers_of_g = Vec::::deserialize_uncompressed(&mut reader)?; +impl CanonicalDeserialize for UniversalParams { + fn deserialize_with_mode( + reader: R, + compress: Compress, + validate: ark_serialize::Validate, + ) -> Result { + let powers_of_g = + Vec::::deserialize_with_mode(&mut reader, compress, validate)?; let powers_of_gamma_g = - BTreeMap::::deserialize_uncompressed(&mut reader)?; - let h = E::G2Affine::deserialize_uncompressed(&mut reader)?; - let beta_h = E::G2Affine::deserialize_uncompressed(&mut reader)?; + BTreeMap::::deserialize_with_mode(&mut reader, compress, validate)?; + let h = E::G2Affine::deserialize_with_mode(&mut reader, compress, validate)?; + let beta_h = E::G2Affine::deserialize_with_mode(&mut reader, compress, validate)?; let neg_powers_of_h = - BTreeMap::::deserialize_uncompressed(&mut reader)?; + BTreeMap::::deserialize_with_mode(&mut reader, compress, validate)?; let prepared_h = E::G2Prepared::from(h.clone()); let prepared_beta_h = E::G2Prepared::from(beta_h.clone()); @@ -129,25 +120,17 @@ impl CanonicalDeserialize for UniversalParams { }) } - fn deserialize_unchecked(mut reader: R) -> Result { - let powers_of_g = Vec::::deserialize_unchecked(&mut reader)?; - let powers_of_gamma_g = BTreeMap::::deserialize_unchecked(&mut reader)?; - let h = E::G2Affine::deserialize_unchecked(&mut reader)?; - let beta_h = E::G2Affine::deserialize_unchecked(&mut reader)?; - let neg_powers_of_h = BTreeMap::::deserialize_unchecked(&mut reader)?; - - let prepared_h = E::G2Prepared::from(h.clone()); - let prepared_beta_h = E::G2Prepared::from(beta_h.clone()); - - Ok(Self { - powers_of_g, - powers_of_gamma_g, - h, - beta_h, - neg_powers_of_h, - prepared_h, - prepared_beta_h, - }) + fn deserialize_uncompressed(mut reader: R) -> Result { + Self::deserialize_with_mode(&mut reader, Compress::No, ark_serialize::Validate::Yes) + } + fn deserialize_compressed(reader: R) -> Result { + Self::deserialize_with_mode(&mut reader, Compress::Yes, ark_serialize::Validate::Yes) + } + fn deserialize_compressed_unchecked(reader: R) -> Result { + Self::deserialize_with_mode(&mut reader, Compress::Yes, ark_serialize::Validate::No) + } + fn deserialize_uncompressed_unchecked(reader: R) -> Result { + Self::deserialize_with_mode(&mut reader, Compress::No, ark_serialize::Validate::No) } } @@ -161,67 +144,80 @@ impl CanonicalDeserialize for UniversalParams { Debug(bound = ""), PartialEq )] -pub struct Powers<'a, E: PairingEngine> { +pub struct Powers<'a, E: Pairing> { /// Group elements of the form `β^i G`, for different values of `i`. pub powers_of_g: Cow<'a, [E::G1Affine]>, /// Group elements of the form `β^i γG`, for different values of `i`. pub powers_of_gamma_g: Cow<'a, [E::G1Affine]>, } -impl Powers<'_, E> { +impl Powers<'_, E> { /// The number of powers in `self`. pub fn size(&self) -> usize { self.powers_of_g.len() } } - -impl<'a, E: PairingEngine> CanonicalSerialize for Powers<'a, E> { - fn serialize(&self, mut writer: W) -> Result<(), SerializationError> { - self.powers_of_g.serialize(&mut writer)?; - self.powers_of_gamma_g.serialize(&mut writer) +impl<'a, E: Pairing> Valid for Powers<'a, E> { + fn check(&self) -> bool { + self.powers_of_g.len() == self.powers_of_gamma_g.len() } - - fn serialized_size(&self) -> usize { - self.powers_of_g.serialized_size() + self.powers_of_gamma_g.serialized_size() +} +impl<'a, E: Pairing> CanonicalSerialize for Powers<'a, E> { + fn serialize_with_mode( + &self, + writer: W, + compress: Compress, + ) -> Result<(), SerializationError> { + self.powers_of_g.serialize_with_mode(&mut writer)?; + self.powers_of_gamma_g.serialize_with_mode(&mut writer) } - fn serialize_unchecked(&self, mut writer: W) -> Result<(), SerializationError> { - self.powers_of_g.serialize_unchecked(&mut writer)?; - self.powers_of_gamma_g.serialize_unchecked(&mut writer) + fn serialized_size(&self, compress: Compress) -> usize { + self.powers_of_g.serialized_size(compress) + + self.powers_of_gamma_g.serialized_size(compress) } - fn serialize_uncompressed(&self, mut writer: W) -> Result<(), SerializationError> { - self.powers_of_g.serialize_uncompressed(&mut writer)?; - self.powers_of_gamma_g.serialize_uncompressed(&mut writer) + fn serialize_compressed(&self, writer: W) -> Result<(), SerializationError> { + self.serialize_with_mode(writer, Compress::Yes) + } + fn compressed_size(&self) -> usize { + self.serialized_size(Compress::Yes) + } + fn serialize_uncompressed(&self, writer: W) -> Result<(), SerializationError> { + self.serialize_with_mode(writer, Compress::No) + } + fn uncompressed_size(&self) -> usize { + self.serialized_size(Compress::No) } } -impl<'a, E: PairingEngine> CanonicalDeserialize for Powers<'a, E> { - fn deserialize(mut reader: R) -> Result { - let powers_of_g = Vec::::deserialize(&mut reader)?; - let powers_of_gamma_g = Vec::::deserialize(&mut reader)?; +impl<'a, E: Pairing> CanonicalDeserialize for Powers<'a, E> { + fn deserialize_with_mode( + reader: R, + compress: Compress, + validate: ark_serialize::Validate, + ) -> Result { + let powers_of_g = + Vec::::deserialize_with_mode(&mut reader, compress, validate)?; + let powers_of_gamma_g = + Vec::::deserialize_with_mode(&mut reader, compress, validate)?; Ok(Self { powers_of_g: Cow::Owned(powers_of_g), powers_of_gamma_g: Cow::Owned(powers_of_gamma_g), }) } - fn deserialize_unchecked(mut reader: R) -> Result { - let powers_of_g = Vec::::deserialize_unchecked(&mut reader)?; - let powers_of_gamma_g = Vec::::deserialize_unchecked(&mut reader)?; - Ok(Self { - powers_of_g: Cow::Owned(powers_of_g), - powers_of_gamma_g: Cow::Owned(powers_of_gamma_g), - }) + fn deserialize_compressed(reader: R) -> Result { + Self::deserialize_with_mode(&mut reader, Compress::Yes, ark_serialize::Validate::Yes) } - - fn deserialize_uncompressed(mut reader: R) -> Result { - let powers_of_g = Vec::::deserialize_uncompressed(&mut reader)?; - let powers_of_gamma_g = Vec::::deserialize_uncompressed(&mut reader)?; - Ok(Self { - powers_of_g: Cow::Owned(powers_of_g), - powers_of_gamma_g: Cow::Owned(powers_of_gamma_g), - }) + fn deserialize_compressed_unchecked(reader: R) -> Result { + Self::deserialize_with_mode(&mut reader, Compress::Yes, ark_serialize::Validate::No) + } + fn deserialize_uncompressed(reader: R) -> Result { + Self::deserialize_with_mode(&mut reader, Compress::No, ark_serialize::Validate::Yes) + } + fn deserialize_uncompressed_unchecked(reader: R) -> Result { + Self::deserialize_with_mode(&mut reader, Compress::No, ark_serialize::Validate::No) } } /// `VerifierKey` is used to check evaluation proofs for a given commitment. @@ -233,7 +229,7 @@ impl<'a, E: PairingEngine> CanonicalDeserialize for Powers<'a, E> { PartialEq(bound = ""), Eq(bound = "") )] -pub struct VerifierKey { +pub struct VerifierKey { /// The generator of G1. pub g: E::G1Affine, /// The generator of G1 that is used for making a commitment hiding. @@ -250,49 +246,54 @@ pub struct VerifierKey { pub prepared_beta_h: E::G2Prepared, } -impl CanonicalSerialize for VerifierKey { - fn serialize(&self, mut writer: W) -> Result<(), SerializationError> { - self.g.serialize(&mut writer)?; - self.gamma_g.serialize(&mut writer)?; - self.h.serialize(&mut writer)?; - self.beta_h.serialize(&mut writer) +impl Valid for VerifierKey { + fn check(&self) -> Result<(), SerializationError> { + Ok(()) } +} - fn serialized_size(&self) -> usize { - self.g.serialized_size() - + self.gamma_g.serialized_size() - + self.h.serialized_size() - + self.beta_h.serialized_size() +impl CanonicalSerialize for VerifierKey { + fn serialize_with_mode( + &self, + writer: W, + compress: Compress, + ) -> Result<(), SerializationError> { + self.g.serialize_with_mode(&mut writer, compress)?; + self.gamma_g.serialize_with_mode(&mut writer, compress)?; + self.h.serialize_with_mode(&mut writer, compress)?; + self.beta_h.serialize_with_mode(&mut writer, compress) } - fn serialize_uncompressed(&self, mut writer: W) -> Result<(), SerializationError> { - self.g.serialize_uncompressed(&mut writer)?; - self.gamma_g.serialize_uncompressed(&mut writer)?; - self.h.serialize_uncompressed(&mut writer)?; - self.beta_h.serialize_uncompressed(&mut writer) + fn serialized_size(&self, compress: Compress) -> usize { + self.g.serialized_size(compress) + + self.gamma_g.serialized_size(compress) + + self.h.serialized_size(compress) + + self.beta_h.serialized_size(compress) } - - fn serialize_unchecked(&self, mut writer: W) -> Result<(), SerializationError> { - self.g.serialize_unchecked(&mut writer)?; - self.gamma_g.serialize_unchecked(&mut writer)?; - self.h.serialize_unchecked(&mut writer)?; - self.beta_h.serialize_unchecked(&mut writer) + fn serialize_compressed(&self, writer: W) -> Result<(), SerializationError> { + self.serialize_with_mode(writer, Compress::Yes) + } + fn compressed_size(&self) -> usize { + self.serialized_size(Compress::Yes) + } + fn serialize_uncompressed(&self, writer: W) -> Result<(), SerializationError> { + self.serialize_with_mode(writer, Compress::No) } - fn uncompressed_size(&self) -> usize { - self.g.uncompressed_size() - + self.gamma_g.uncompressed_size() - + self.h.uncompressed_size() - + self.beta_h.uncompressed_size() + self.serialized_size(Compress::No) } } -impl CanonicalDeserialize for VerifierKey { - fn deserialize(mut reader: R) -> Result { - let g = E::G1Affine::deserialize(&mut reader)?; - let gamma_g = E::G1Affine::deserialize(&mut reader)?; - let h = E::G2Affine::deserialize(&mut reader)?; - let beta_h = E::G2Affine::deserialize(&mut reader)?; +impl CanonicalDeserialize for VerifierKey { + fn deserialize_with_mode( + reader: R, + compress: Compress, + validate: ark_serialize::Validate, + ) -> Result { + let g = E::G1Affine::deserialize_with_mode(&mut reader, compress, validate)?; + let gamma_g = E::G1Affine::deserialize(&mut reader, compress, validate)?; + let h = E::G2Affine::deserialize(&mut reader, compress, validate)?; + let beta_h = E::G2Affine::deserialize(&mut reader, compress, validate)?; let prepared_h = E::G2Prepared::from(h.clone()); let prepared_beta_h = E::G2Prepared::from(beta_h.clone()); @@ -306,52 +307,26 @@ impl CanonicalDeserialize for VerifierKey { prepared_beta_h, }) } - - fn deserialize_uncompressed(mut reader: R) -> Result { - let g = E::G1Affine::deserialize_uncompressed(&mut reader)?; - let gamma_g = E::G1Affine::deserialize_uncompressed(&mut reader)?; - let h = E::G2Affine::deserialize_uncompressed(&mut reader)?; - let beta_h = E::G2Affine::deserialize_uncompressed(&mut reader)?; - - let prepared_h = E::G2Prepared::from(h.clone()); - let prepared_beta_h = E::G2Prepared::from(beta_h.clone()); - - Ok(Self { - g, - gamma_g, - h, - beta_h, - prepared_h, - prepared_beta_h, - }) + fn deserialize_compressed(reader: R) -> Result { + Self::deserialize_with_mode(reader, Compress::Yes, ark_serialize::Validate::Yes) } - - fn deserialize_unchecked(mut reader: R) -> Result { - let g = E::G1Affine::deserialize_unchecked(&mut reader)?; - let gamma_g = E::G1Affine::deserialize_unchecked(&mut reader)?; - let h = E::G2Affine::deserialize_unchecked(&mut reader)?; - let beta_h = E::G2Affine::deserialize_unchecked(&mut reader)?; - - let prepared_h = E::G2Prepared::from(h.clone()); - let prepared_beta_h = E::G2Prepared::from(beta_h.clone()); - - Ok(Self { - g, - gamma_g, - h, - beta_h, - prepared_h, - prepared_beta_h, - }) + fn deserialize_compressed_unchecked(reader: R) -> Result { + Self::deserialize_with_mode(reader, Compress::Yes, ark_serialize::Validate::No) + } + fn deserialize_uncompressed(reader: R) -> Result { + Self::deserialize_with_mode(reader, Compress::No, ark_serialize::Validate::Yes) + } + fn deserialize_uncompressed_unchecked(reader: R) -> Result { + Self::deserialize_with_mode(reader, Compress::No, ark_serialize::Validate::No) } } -impl ToConstraintField<::BasePrimeField> for VerifierKey +impl ToConstraintField<::BasePrimeField> for VerifierKey where - E::G1Affine: ToConstraintField<::BasePrimeField>, - E::G2Affine: ToConstraintField<::BasePrimeField>, + E::G1Affine: ToConstraintField<::BasePrimeField>, + E::G2Affine: ToConstraintField<::BasePrimeField>, { - fn to_field_elements(&self) -> Option::BasePrimeField>> { + fn to_field_elements(&self) -> Option::BasePrimeField>> { let mut res = Vec::new(); res.extend_from_slice(&self.g.to_field_elements().unwrap()); @@ -367,7 +342,7 @@ where /// We omit gamma here for simplicity. #[derive(Derivative)] #[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] -pub struct PreparedVerifierKey { +pub struct PreparedVerifierKey { /// The generator of G1, prepared for power series. pub prepared_g: Vec, /// The generator of G2, prepared for use in pairings. @@ -376,13 +351,13 @@ pub struct PreparedVerifierKey { pub prepared_beta_h: E::G2Prepared, } -impl PreparedVerifierKey { +impl PreparedVerifierKey { /// prepare `PreparedVerifierKey` from `VerifierKey` pub fn prepare(vk: &VerifierKey) -> Self { - let supported_bits = E::Fr::MODULUS_BIT_SIZE as usize; + let supported_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; let mut prepared_g = Vec::::new(); - let mut g = E::G1Projective::from(vk.g.clone()); + let mut g = E::G1::ScalarFieldom(vk.g.clone()); for _ in 0..supported_bits { prepared_g.push(g.clone().into()); g.double_in_place(); @@ -407,12 +382,12 @@ impl PreparedVerifierKey { PartialEq(bound = ""), Eq(bound = "") )] -pub struct Commitment( +pub struct Commitment( /// The commitment is a group element. pub E::G1Affine, ); -impl PCCommitment for Commitment { +impl PCCommitment for Commitment { #[inline] fn empty() -> Self { Commitment(E::G1Affine::zero()) @@ -423,18 +398,18 @@ impl PCCommitment for Commitment { } } -impl ToConstraintField<::BasePrimeField> for Commitment +impl ToConstraintField<::BasePrimeField> for Commitment where - E::G1Affine: ToConstraintField<::BasePrimeField>, + E::G1Affine: ToConstraintField<::BasePrimeField>, { - fn to_field_elements(&self) -> Option::BasePrimeField>> { + fn to_field_elements(&self) -> Option::BasePrimeField>> { self.0.to_field_elements() } } -impl<'a, E: PairingEngine> AddAssign<(E::Fr, &'a Commitment)> for Commitment { +impl<'a, E: Pairing> AddAssign<(E::ScalarField, &'a Commitment)> for Commitment { #[inline] - fn add_assign(&mut self, (f, other): (E::Fr, &'a Commitment)) { + fn add_assign(&mut self, (f, other): (E::ScalarField, &'a Commitment)) { let mut other = other.0 * f; other.add_assign_mixed(&self.0); self.0 = other.into(); @@ -451,18 +426,18 @@ impl<'a, E: PairingEngine> AddAssign<(E::Fr, &'a Commitment)> for Commitment< PartialEq(bound = ""), Eq(bound = "") )] -pub struct PreparedCommitment( +pub struct PreparedCommitment( /// The commitment is a group element. pub Vec, ); -impl PreparedCommitment { +impl PreparedCommitment { /// prepare `PreparedCommitment` from `Commitment` pub fn prepare(comm: &Commitment) -> Self { let mut prepared_comm = Vec::::new(); - let mut cur = E::G1Projective::from(comm.0.clone()); + let mut cur = E::G1::ScalarFieldom(comm.0.clone()); - let supported_bits = E::Fr::MODULUS_BIT_SIZE as usize; + let supported_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; for _ in 0..supported_bits { prepared_comm.push(cur.clone().into()); @@ -570,12 +545,10 @@ impl<'a, F: PrimeField, P: DenseUVPolynomial> AddAssign<(F, &'a Randomness { +pub struct Proof { /// This is a commitment to the witness polynomial; see [KZG10] for more details. pub w: E::G1Affine, /// This is the evaluation of the random polynomial at the point for which /// the evaluation proof was produced. - pub random_v: Option, + pub random_v: Option, } - -impl PCProof for Proof {} diff --git a/src/kzg10/mod.rs b/src/kzg10/mod.rs index 33fcdb18..0aea289f 100644 --- a/src/kzg10/mod.rs +++ b/src/kzg10/mod.rs @@ -6,8 +6,8 @@ //! This construction achieves extractability in the algebraic group model (AGM). use crate::{BTreeMap, Error, LabeledPolynomial, PCRandomness, ToString, Vec}; -use ark_ec::msm::{FixedBase, VariableBaseMSM}; -use ark_ec::{AffineCurve, PairingEngine, ProjectiveCurve}; +use ark_ec::{pairing::Pairing, CurveGroup}; +use ark_ec::{scalar_mul::fixed_base::FixedBase, VariableBaseMSM}; use ark_ff::{One, PrimeField, UniformRand, Zero}; use ark_poly::DenseUVPolynomial; use ark_std::{format, marker::PhantomData, ops::Div, ops::Mul, vec}; @@ -23,15 +23,15 @@ pub use data_structures::*; /// [Kate, Zaverucha and Goldbgerg][kzg10] /// /// [kzg10]: http://cacr.uwaterloo.ca/techreports/2010/cacr2010-10.pdf -pub struct KZG10> { +pub struct KZG10> { _engine: PhantomData, _poly: PhantomData

, } impl KZG10 where - E: PairingEngine, - P: DenseUVPolynomial, + E: Pairing, + P: DenseUVPolynomial, for<'a, 'b> &'a P: Div<&'b P, Output = P>, { /// Constructs public parameters when given as input the maximum degree `degree` @@ -45,12 +45,12 @@ where return Err(Error::DegreeIsZero); } let setup_time = start_timer!(|| format!("KZG10::Setup with degree {}", max_degree)); - let beta = E::Fr::rand(rng); - let g = E::G1Projective::rand(rng); - let gamma_g = E::G1Projective::rand(rng); + let beta = E::ScalarField::rand(rng); + let g = E::G1::rand(rng); + let gamma_g = E::G1::rand(rng); let h = E::G2Projective::rand(rng); - let mut powers_of_beta = vec![E::Fr::one()]; + let mut powers_of_beta = vec![E::ScalarField::one()]; let mut cur = beta; for _ in 0..max_degree { @@ -60,36 +60,31 @@ where let window_size = FixedBase::get_mul_window_size(max_degree + 1); - let scalar_bits = E::Fr::MODULUS_BIT_SIZE as usize; + let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; let g_time = start_timer!(|| "Generating powers of G"); let g_table = FixedBase::get_window_table(scalar_bits, window_size, g); let powers_of_g = - FixedBase::msm::(scalar_bits, window_size, &g_table, &powers_of_beta); + FixedBase::msm::(scalar_bits, window_size, &g_table, &powers_of_beta); end_timer!(g_time); let gamma_g_time = start_timer!(|| "Generating powers of gamma * G"); let gamma_g_table = FixedBase::get_window_table(scalar_bits, window_size, gamma_g); - let mut powers_of_gamma_g = FixedBase::msm::( - scalar_bits, - window_size, - &gamma_g_table, - &powers_of_beta, - ); + let mut powers_of_gamma_g = + FixedBase::msm::(scalar_bits, window_size, &gamma_g_table, &powers_of_beta); // Add an additional power of gamma_g, because we want to be able to support // up to D queries. powers_of_gamma_g.push(powers_of_gamma_g.last().unwrap().mul(&beta)); end_timer!(gamma_g_time); - let powers_of_g = E::G1Projective::batch_normalization_into_affine(&powers_of_g); - let powers_of_gamma_g = - E::G1Projective::batch_normalization_into_affine(&powers_of_gamma_g) - .into_iter() - .enumerate() - .collect(); + let powers_of_g = E::G1::batch_normalization_into_affine(&powers_of_g); + let powers_of_gamma_g = E::G1::batch_normalization_into_affine(&powers_of_gamma_g) + .into_iter() + .enumerate() + .collect(); let neg_powers_of_h_time = start_timer!(|| "Generating negative powers of h in G2"); let neg_powers_of_h = if produce_g2_powers { - let mut neg_powers_of_beta = vec![E::Fr::one()]; - let mut cur = E::Fr::one() / β + let mut neg_powers_of_beta = vec![E::ScalarField::one()]; + let mut cur = E::ScalarField::one() / β for _ in 0..max_degree { neg_powers_of_beta.push(cur); cur /= β @@ -139,7 +134,7 @@ where polynomial: &P, hiding_bound: Option, rng: Option<&mut dyn RngCore>, - ) -> Result<(Commitment, Randomness), Error> { + ) -> Result<(Commitment, Randomness), Error> { Self::check_degree_is_too_large(polynomial.degree(), powers.size())?; let commit_time = start_timer!(|| format!( @@ -152,13 +147,13 @@ where skip_leading_zeros_and_convert_to_bigints(polynomial); let msm_time = start_timer!(|| "MSM to compute commitment to plaintext poly"); - let mut commitment = ::msm_bigint( + let mut commitment = ::msm_bigint( &powers.powers_of_g[num_leading_zeros..], &plain_coeffs, ); end_timer!(msm_time); - let mut randomness = Randomness::::empty(); + let mut randomness = Randomness::::empty(); if let Some(hiding_degree) = hiding_bound { let mut rng = rng.ok_or(Error::MissingRng)?; let sample_random_poly_time = start_timer!(|| format!( @@ -176,7 +171,7 @@ where let random_ints = convert_to_bigints(&randomness.blinding_polynomial.coeffs()); let msm_time = start_timer!(|| "MSM to compute commitment to random poly"); - let random_commitment = ::msm_bigint( + let random_commitment = ::msm_bigint( &powers.powers_of_gamma_g, random_ints.as_slice(), ) @@ -197,9 +192,9 @@ where pub fn compute_witness_polynomial( p: &P, point: P::Point, - randomness: &Randomness, + randomness: &Randomness, ) -> Result<(P, Option

), Error> { - let divisor = P::from_coefficients_vec(vec![-point, E::Fr::one()]); + let divisor = P::from_coefficients_vec(vec![-point, E::ScalarField::one()]); let witness_time = start_timer!(|| "Computing witness polynomial"); let witness_polynomial = p / &divisor; @@ -222,7 +217,7 @@ where pub(crate) fn open_with_witness_polynomial<'a>( powers: &Powers, point: P::Point, - randomness: &Randomness, + randomness: &Randomness, witness_polynomial: &P, hiding_witness_polynomial: Option<&P>, ) -> Result, Error> { @@ -231,7 +226,7 @@ where skip_leading_zeros_and_convert_to_bigints(witness_polynomial); let witness_comm_time = start_timer!(|| "Computing commitment to witness polynomial"); - let mut w = ::msm_bigint( + let mut w = ::msm_bigint( &powers.powers_of_g[num_leading_zeros..], &witness_coeffs, ); @@ -246,7 +241,7 @@ where let random_witness_coeffs = convert_to_bigints(&hiding_witness_polynomial.coeffs()); let witness_comm_time = start_timer!(|| "Computing commitment to random witness polynomial"); - w += &::msm_bigint( + w += &::msm_bigint( &powers.powers_of_gamma_g, &random_witness_coeffs, ); @@ -267,7 +262,7 @@ where powers: &Powers, p: &P, point: P::Point, - rand: &Randomness, + rand: &Randomness, ) -> Result, Error> { Self::check_degree_is_too_large(p.degree(), powers.size())?; let open_time = start_timer!(|| format!("Opening polynomial of degree {}", p.degree())); @@ -293,8 +288,8 @@ where pub fn check( vk: &VerifierKey, comm: &Commitment, - point: E::Fr, - value: E::Fr, + point: E::ScalarField, + value: E::ScalarField, proof: &Proof, ) -> Result { let check_time = start_timer!(|| "Checking evaluation"); @@ -316,23 +311,23 @@ where pub fn batch_check( vk: &VerifierKey, commitments: &[Commitment], - points: &[E::Fr], - values: &[E::Fr], + points: &[E::ScalarField], + values: &[E::ScalarField], proofs: &[Proof], rng: &mut R, ) -> Result { let check_time = start_timer!(|| format!("Checking {} evaluation proofs", commitments.len())); - let mut total_c = ::zero(); - let mut total_w = ::zero(); + let mut total_c = ::zero(); + let mut total_w = ::zero(); let combination_time = start_timer!(|| "Combining commitments and proofs"); - let mut randomizer = E::Fr::one(); + let mut randomizer = E::ScalarField::one(); // Instead of multiplying g and gamma_g in each turn, we simply accumulate // their coefficients and perform a final multiplication at the end. - let mut g_multiplier = E::Fr::zero(); - let mut gamma_g_multiplier = E::Fr::zero(); + let mut g_multiplier = E::ScalarField::zero(); + let mut gamma_g_multiplier = E::ScalarField::zero(); for (((c, z), v), proof) in commitments.iter().zip(points).zip(values).zip(proofs) { let w = proof.w; let mut temp = w.mul(*z); @@ -353,7 +348,7 @@ where end_timer!(combination_time); let to_affine_time = start_timer!(|| "Converting results to affine for pairing"); - let affine_points = E::G1Projective::batch_normalization_into_affine(&[-total_w, total_c]); + let affine_points = E::G1::batch_normalization_into_affine(&[-total_w, total_c]); let (total_w, total_c) = (affine_points[0], affine_points[1]); end_timer!(to_affine_time); @@ -403,7 +398,7 @@ where supported_degree: usize, max_degree: usize, enforced_degree_bounds: Option<&[usize]>, - p: &'a LabeledPolynomial, + p: &'a LabeledPolynomial, ) -> Result<(), Error> { if let Some(bound) = p.degree_bound() { let enforced_degree_bounds = @@ -456,15 +451,15 @@ mod tests { use ark_bls12_377::Bls12_377; use ark_bls12_381::Bls12_381; use ark_bls12_381::Fr; - use ark_ec::PairingEngine; + use ark_ec::pairing::Pairing; use ark_poly::univariate::DensePolynomial as DensePoly; use ark_std::test_rng; - type UniPoly_381 = DensePoly<::Fr>; - type UniPoly_377 = DensePoly<::Fr>; + type UniPoly_381 = DensePoly<::Fr>; + type UniPoly_377 = DensePoly<::Fr>; type KZG_Bls12_381 = KZG10; - impl> KZG10 { + impl> KZG10 { /// Specializes the public parameters for a given maximum degree `d` for polynomials /// `d` should be less that `pp.max_degree()`. pub(crate) fn trim( @@ -524,8 +519,8 @@ mod tests { fn end_to_end_test_template() -> Result<(), Error> where - E: PairingEngine, - P: DenseUVPolynomial, + E: Pairing, + P: DenseUVPolynomial, for<'a, 'b> &'a P: Div<&'b P, Output = P>, { let rng = &mut test_rng(); @@ -539,7 +534,7 @@ mod tests { let p = P::rand(degree, rng); let hiding_bound = Some(1); let (comm, rand) = KZG10::::commit(&ck, &p, hiding_bound, Some(rng))?; - let point = E::Fr::rand(rng); + let point = E::ScalarField::rand(rng); let value = p.evaluate(&point); let proof = KZG10::::open(&ck, &p, point, &rand)?; assert!( @@ -555,8 +550,8 @@ mod tests { fn linear_polynomial_test_template() -> Result<(), Error> where - E: PairingEngine, - P: DenseUVPolynomial, + E: Pairing, + P: DenseUVPolynomial, for<'a, 'b> &'a P: Div<&'b P, Output = P>, { let rng = &mut test_rng(); @@ -567,7 +562,7 @@ mod tests { let p = P::rand(1, rng); let hiding_bound = Some(1); let (comm, rand) = KZG10::::commit(&ck, &p, hiding_bound, Some(rng))?; - let point = E::Fr::rand(rng); + let point = E::ScalarField::rand(rng); let value = p.evaluate(&point); let proof = KZG10::::open(&ck, &p, point, &rand)?; assert!( @@ -583,8 +578,8 @@ mod tests { fn batch_check_test_template() -> Result<(), Error> where - E: PairingEngine, - P: DenseUVPolynomial, + E: Pairing, + P: DenseUVPolynomial, for<'a, 'b> &'a P: Div<&'b P, Output = P>, { let rng = &mut test_rng(); @@ -603,7 +598,7 @@ mod tests { let p = P::rand(degree, rng); let hiding_bound = Some(1); let (comm, rand) = KZG10::::commit(&ck, &p, hiding_bound, Some(rng))?; - let point = E::Fr::rand(rng); + let point = E::ScalarField::rand(rng); let value = p.evaluate(&point); let proof = KZG10::::open(&ck, &p, point, &rand)?; diff --git a/src/lib.rs b/src/lib.rs index 068906bf..bc0a7fb1 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -7,7 +7,7 @@ #![deny(missing_docs)] #![deny(unused_imports)] #![deny(renamed_and_removed_lints, stable_features, unused_allocation)] -#![deny(unused_comparisons, bare_trait_objects, unused_must_use, const_err)] +#![deny(unused_comparisons, bare_trait_objects, unused_must_use)] #![forbid(unsafe_code)] #[allow(unused)] @@ -25,7 +25,6 @@ use ark_std::{ fmt::Debug, hash::Hash, iter::FromIterator, - rc::Rc, string::{String, ToString}, vec::Vec, }; @@ -163,7 +162,7 @@ pub trait PolynomialCommitment, S: Cryptographic /// The commitment randomness. type Randomness: PCRandomness; /// The evaluation proof for a single point. - type Proof: PCProof + Clone; + type Proof: Clone; /// The evaluation proof for a query set. type BatchProof: Clone + From> diff --git a/src/marlin/marlin_pc/data_structures.rs b/src/marlin/marlin_pc/data_structures.rs index 955312d5..98fce1ac 100644 --- a/src/marlin/marlin_pc/data_structures.rs +++ b/src/marlin/marlin_pc/data_structures.rs @@ -2,10 +2,9 @@ use crate::{ DenseUVPolynomial, PCCommitment, PCCommitterKey, PCPreparedCommitment, PCPreparedVerifierKey, PCRandomness, PCVerifierKey, Vec, }; -use ark_ec::{PairingEngine, ProjectiveCurve}; +use ark_ec::pairing::Pairing; use ark_ff::{Field, PrimeField, ToConstraintField}; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; -use ark_std::io::{Read, Write}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::ops::{Add, AddAssign}; use ark_std::rand::RngCore; @@ -22,7 +21,7 @@ pub type UniversalParams = kzg10::UniversalParams; Clone(bound = ""), Debug(bound = "") )] -pub struct CommitterKey { +pub struct CommitterKey { /// The key used to commit to polynomials. pub powers: Vec, @@ -42,7 +41,7 @@ pub struct CommitterKey { pub max_degree: usize, } -impl CommitterKey { +impl CommitterKey { /// Obtain powers for the underlying KZG10 construction pub fn powers<'a>(&'a self) -> kzg10::Powers<'a, E> { kzg10::Powers { @@ -82,7 +81,7 @@ impl CommitterKey { } } -impl PCCommitterKey for CommitterKey { +impl PCCommitterKey for CommitterKey { fn max_degree(&self) -> usize { self.max_degree } @@ -95,7 +94,7 @@ impl PCCommitterKey for CommitterKey { /// `VerifierKey` is used to check evaluation proofs for a given commitment. #[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] #[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] -pub struct VerifierKey { +pub struct VerifierKey { /// The verification key for the underlying KZG10 scheme. pub vk: kzg10::VerifierKey, /// Information required to enforce degree bounds. Each pair @@ -111,7 +110,7 @@ pub struct VerifierKey { pub supported_degree: usize, } -impl VerifierKey { +impl VerifierKey { /// Find the appropriate shift for the degree bound. pub fn get_shift_power(&self, bound: usize) -> Option { self.degree_bounds_and_shift_powers.as_ref().and_then(|v| { @@ -122,7 +121,7 @@ impl VerifierKey { } } -impl PCVerifierKey for VerifierKey { +impl PCVerifierKey for VerifierKey { fn max_degree(&self) -> usize { self.max_degree } @@ -132,18 +131,18 @@ impl PCVerifierKey for VerifierKey { } } -impl ToConstraintField<::BasePrimeField> for VerifierKey +impl ToConstraintField<::BasePrimeField> for VerifierKey where - E::G1Affine: ToConstraintField<::BasePrimeField>, - E::G2Affine: ToConstraintField<::BasePrimeField>, + E::G1Affine: ToConstraintField<::BasePrimeField>, + E::G2Affine: ToConstraintField<::BasePrimeField>, { - fn to_field_elements(&self) -> Option::BasePrimeField>> { + fn to_field_elements(&self) -> Option::BasePrimeField>> { let mut res = Vec::new(); res.extend_from_slice(&self.vk.to_field_elements().unwrap()); if let Some(degree_bounds_and_shift_powers) = &self.degree_bounds_and_shift_powers { for (d, shift_power) in degree_bounds_and_shift_powers.iter() { - let d_elem: ::BasePrimeField = (*d as u64).into(); + let d_elem: ::BasePrimeField = (*d as u64).into(); res.push(d_elem); res.extend_from_slice(&shift_power.to_field_elements().unwrap()); @@ -157,7 +156,7 @@ where /// `PreparedVerifierKey` is used to check evaluation proofs for a given commitment. #[derive(Derivative)] #[derivative(Clone(bound = ""), Debug(bound = ""))] -pub struct PreparedVerifierKey { +pub struct PreparedVerifierKey { /// The verification key for the underlying KZG10 scheme. pub prepared_vk: kzg10::PreparedVerifierKey, /// Information required to enforce degree bounds. Each pair @@ -172,12 +171,12 @@ pub struct PreparedVerifierKey { pub supported_degree: usize, } -impl PCPreparedVerifierKey> for PreparedVerifierKey { +impl PCPreparedVerifierKey> for PreparedVerifierKey { /// prepare `PreparedVerifierKey` from `VerifierKey` fn prepare(vk: &VerifierKey) -> Self { let prepared_vk = kzg10::PreparedVerifierKey::::prepare(&vk.vk); - let supported_bits = E::Fr::MODULUS_BIT_SIZE as usize; + let supported_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; let prepared_degree_bounds_and_shift_powers: Option)>> = if vk.degree_bounds_and_shift_powers.is_some() { @@ -189,7 +188,7 @@ impl PCPreparedVerifierKey> for PreparedVerifie for (d, shift_power) in degree_bounds_and_shift_powers { let mut prepared_shift_power = Vec::::new(); - let mut cur = E::G1Projective::from(shift_power.clone()); + let mut cur = E::G1::ScalarFieldom(shift_power.clone()); for _ in 0..supported_bits { prepared_shift_power.push(cur.clone().into()); cur.double_in_place(); @@ -223,7 +222,7 @@ impl PCPreparedVerifierKey> for PreparedVerifie PartialEq(bound = ""), Eq(bound = "") )] -pub struct Commitment { +pub struct Commitment { /// A KZG10 commitment to the polynomial. pub comm: kzg10::Commitment, @@ -233,11 +232,11 @@ pub struct Commitment { pub shifted_comm: Option>, } -impl ToConstraintField<::BasePrimeField> for Commitment +impl ToConstraintField<::BasePrimeField> for Commitment where - E::G1Affine: ToConstraintField<::BasePrimeField>, + E::G1Affine: ToConstraintField<::BasePrimeField>, { - fn to_field_elements(&self) -> Option::BasePrimeField>> { + fn to_field_elements(&self) -> Option::BasePrimeField>> { let mut res = Vec::new(); res.extend_from_slice(&self.comm.to_field_elements().unwrap()); @@ -249,7 +248,7 @@ where } } -impl PCCommitment for Commitment { +impl PCCommitment for Commitment { #[inline] fn empty() -> Self { Self { @@ -272,12 +271,12 @@ impl PCCommitment for Commitment { PartialEq(bound = ""), Eq(bound = "") )] -pub struct PreparedCommitment { +pub struct PreparedCommitment { pub(crate) prepared_comm: kzg10::PreparedCommitment, pub(crate) shifted_comm: Option>, } -impl PCPreparedCommitment> for PreparedCommitment { +impl PCPreparedCommitment> for PreparedCommitment { /// Prepare commitment to a polynomial that optionally enforces a degree bound. fn prepare(comm: &Commitment) -> Self { let prepared_comm = kzg10::PreparedCommitment::::prepare(&comm.comm); diff --git a/src/marlin/marlin_pc/mod.rs b/src/marlin/marlin_pc/mod.rs index c52f2790..61ee78fa 100644 --- a/src/marlin/marlin_pc/mod.rs +++ b/src/marlin/marlin_pc/mod.rs @@ -3,7 +3,7 @@ use crate::{BTreeMap, BTreeSet, ToString, Vec}; use crate::{BatchLCProof, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; use crate::{PCRandomness, PCUniversalParams, PolynomialCommitment}; -use ark_ec::{AffineCurve, PairingEngine, ProjectiveCurve}; +use ark_ec::pairing::Pairing; use ark_ff::Zero; use ark_poly::DenseUVPolynomial; use ark_std::rand::RngCore; @@ -26,13 +26,13 @@ pub use data_structures::*; /// /// [kzg]: http://cacr.uwaterloo.ca/techreports/2010/cacr2010-10.pdf /// [marlin]: https://eprint.iacr.org/2019/104 -pub struct MarlinKZG10, S: CryptographicSponge> { +pub struct MarlinKZG10, S: CryptographicSponge> { _engine: PhantomData, _poly: PhantomData

, _sponge: PhantomData, } -pub(crate) fn shift_polynomial>( +pub(crate) fn shift_polynomial>( ck: &CommitterKey, p: &P, degree_bound: usize, @@ -47,16 +47,16 @@ pub(crate) fn shift_polynomial>( let largest_enforced_degree_bound = enforced_degree_bounds.last().unwrap(); let mut shifted_polynomial_coeffs = - vec![E::Fr::zero(); largest_enforced_degree_bound - degree_bound]; + vec![E::ScalarField::zero(); largest_enforced_degree_bound - degree_bound]; shifted_polynomial_coeffs.extend_from_slice(&p.coeffs()); P::from_coefficients_vec(shifted_polynomial_coeffs) } } -impl PolynomialCommitment for MarlinKZG10 +impl PolynomialCommitment for MarlinKZG10 where - E: PairingEngine, - P: DenseUVPolynomial, + E: Pairing, + P: DenseUVPolynomial, S: CryptographicSponge, for<'a, 'b> &'a P: Div<&'b P, Output = P>, { @@ -66,7 +66,7 @@ where type PreparedVerifierKey = PreparedVerifierKey; type Commitment = Commitment; type PreparedCommitment = PreparedCommitment; - type Randomness = Randomness; + type Randomness = Randomness; type Proof = kzg10::Proof; type BatchProof = Vec; type Error = Error; @@ -175,7 +175,7 @@ where /// Outputs a commitment to `polynomial`. fn commit<'a>( ck: &Self::CommitterKey, - polynomials: impl IntoIterator>, + polynomials: impl IntoIterator>, rng: Option<&mut dyn RngCore>, ) -> Result< ( @@ -248,10 +248,10 @@ where /// On input a polynomial `p` and a point `point`, outputs a proof for the same. fn open<'a>( ck: &Self::CommitterKey, - labeled_polynomials: impl IntoIterator>, + labeled_polynomials: impl IntoIterator>, _commitments: impl IntoIterator>, point: &'a P::Point, - opening_challenges: &mut ChallengeGenerator, + opening_challenges: &mut ChallengeGenerator, rands: impl IntoIterator, _rng: Option<&mut dyn RngCore>, ) -> Result @@ -345,9 +345,9 @@ where vk: &Self::VerifierKey, commitments: impl IntoIterator>, point: &'a P::Point, - values: impl IntoIterator, + values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + opening_challenges: &mut ChallengeGenerator, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -371,9 +371,9 @@ where vk: &Self::VerifierKey, commitments: impl IntoIterator>, query_set: &QuerySet, - values: &Evaluations, + values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + opening_challenges: &mut ChallengeGenerator, rng: &mut R, ) -> Result where @@ -403,14 +403,14 @@ where fn open_combinations<'a>( ck: &Self::CommitterKey, - lc_s: impl IntoIterator>, - polynomials: impl IntoIterator>, + lc_s: impl IntoIterator>, + polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, + opening_challenges: &mut ChallengeGenerator, rands: impl IntoIterator, rng: Option<&mut dyn RngCore>, - ) -> Result, Self::Error> + ) -> Result, Self::Error> where P: 'a, Self::Randomness: 'a, @@ -432,12 +432,12 @@ where /// committed in `labeled_commitments`. fn check_combinations<'a, R: RngCore>( vk: &Self::VerifierKey, - lc_s: impl IntoIterator>, + lc_s: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - evaluations: &Evaluations, - proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + evaluations: &Evaluations, + proof: &BatchLCProof, + opening_challenges: &mut ChallengeGenerator, rng: &mut R, ) -> Result where @@ -459,10 +459,10 @@ where /// of the polynomials at the points in the query set. fn batch_open<'a>( ck: &CommitterKey, - labeled_polynomials: impl IntoIterator>, + labeled_polynomials: impl IntoIterator>, commitments: impl IntoIterator>>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, + opening_challenges: &mut ChallengeGenerator, rands: impl IntoIterator, rng: Option<&mut dyn RngCore>, ) -> Result>, Error> @@ -538,41 +538,41 @@ mod tests { use super::MarlinKZG10; use ark_bls12_377::Bls12_377; use ark_bls12_381::Bls12_381; - use ark_ec::PairingEngine; + use ark_ec::pairing::Pairing; use ark_ff::UniformRand; use ark_poly::{univariate::DensePolynomial as DensePoly, DenseUVPolynomial}; use ark_sponge::poseidon::PoseidonSponge; use rand_chacha::ChaCha20Rng; - type UniPoly_381 = DensePoly<::Fr>; - type UniPoly_377 = DensePoly<::Fr>; + type UniPoly_381 = DensePoly<::Fr>; + type UniPoly_377 = DensePoly<::Fr>; type PC = MarlinKZG10; - type Sponge_Bls12_381 = PoseidonSponge<::Fr>; - type Sponge_Bls12_377 = PoseidonSponge<::Fr>; + type Sponge_Bls12_381 = PoseidonSponge<::Fr>; + type Sponge_Bls12_377 = PoseidonSponge<::Fr>; type PC_Bls12_381 = PC; type PC_Bls12_377 = PC; - fn rand_poly( + fn rand_poly( degree: usize, _: Option, rng: &mut ChaCha20Rng, - ) -> DensePoly { - DensePoly::::rand(degree, rng) + ) -> DensePoly { + DensePoly::::rand(degree, rng) } - fn constant_poly( + fn constant_poly( _: usize, _: Option, rng: &mut ChaCha20Rng, - ) -> DensePoly { - DensePoly::::from_coefficients_slice(&[E::Fr::rand(rng)]) + ) -> DensePoly { + DensePoly::::from_coefficients_slice(&[E::ScalarField::rand(rng)]) } - fn rand_point(_: Option, rng: &mut ChaCha20Rng) -> E::Fr { - E::Fr::rand(rng) + fn rand_point(_: Option, rng: &mut ChaCha20Rng) -> E::ScalarField { + E::ScalarField::rand(rng) } #[test] diff --git a/src/marlin/marlin_pst13_pc/data_structures.rs b/src/marlin/marlin_pst13_pc/data_structures.rs index 62c9d31f..a5923c56 100644 --- a/src/marlin/marlin_pst13_pc/data_structures.rs +++ b/src/marlin/marlin_pst13_pc/data_structures.rs @@ -1,9 +1,8 @@ use crate::{BTreeMap, Vec}; use crate::{ - PCCommitterKey, PCPreparedVerifierKey, PCProof, PCRandomness, PCUniversalParams, PCVerifierKey, + PCCommitterKey, PCPreparedVerifierKey, PCRandomness, PCUniversalParams, PCVerifierKey, }; -use ark_ec::PairingEngine; -use ark_ff::Zero; +use ark_ec::pairing::Pairing; use ark_poly::DenseMVPolynomial; use ark_std::{ io::{Read, Write}, @@ -11,7 +10,9 @@ use ark_std::{ ops::{Add, AddAssign, Index}, }; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; +use ark_serialize::{ + CanonicalDeserialize, Valid, CanonicalSerialize, Compress, SerializationError, Validate, +}; use ark_std::rand::RngCore; /// `UniversalParams` are the universal parameters for the MarlinPST13 scheme. @@ -19,9 +20,9 @@ use ark_std::rand::RngCore; #[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] pub struct UniversalParams where - E: PairingEngine, - P: DenseMVPolynomial, - P::Point: Index, + E: Pairing, + P: DenseMVPolynomial, + P::Point: Index, { /// Contains group elements corresponding to all possible monomials with /// `num_vars` and maximum degree `max_degree` evaluated at `\beta` @@ -48,30 +49,57 @@ where pub max_degree: usize, } +impl Valid for UniversalParams +where + E: Pairing, + P: DenseMVPolynomial, + P::Point: Index, +{ + fn check(&self) -> Result<(), SerializationError> { + if self.powers_of_g.len() != (self.max_degree + 1) * self.num_vars { + return false; + } + + if self.beta_h.len() != self.num_vars { + return false; + } + + if self.prepared_beta_h.len() != self.num_vars { + return false; + } + Ok(()) + } +} + impl CanonicalSerialize for UniversalParams where - E: PairingEngine, - P: DenseMVPolynomial, - P::Point: Index, + E: Pairing, + P: DenseMVPolynomial, + P::Point: Index, { - fn serialize(&self, mut writer: W) -> Result<(), SerializationError> { - self.powers_of_g.serialize(&mut writer)?; - self.gamma_g.serialize(&mut writer)?; - self.powers_of_gamma_g.serialize(&mut writer)?; - self.h.serialize(&mut writer)?; - self.beta_h.serialize(&mut writer)?; - self.num_vars.serialize(&mut writer)?; - self.max_degree.serialize(&mut writer) - } - - fn serialized_size(&self) -> usize { - self.powers_of_g.serialized_size() - + self.gamma_g.serialized_size() - + self.powers_of_gamma_g.serialized_size() - + self.h.serialized_size() - + self.beta_h.serialized_size() - + self.num_vars.serialized_size() - + self.max_degree.serialized_size() + fn serialize_with_mode( + &self, + writer: W, + compress: Compress, + ) -> Result<(), SerializationError> { + self.powers_of_g + .serialize_with_mode(&mut writer, compress)?; + self.gamma_g.serialize(&mut writer, compress)?; + self.powers_of_gamma_g.serialize(&mut writer, compress)?; + self.h.serialize(&mut writer, compress)?; + self.beta_h.serialize(&mut writer, compress)?; + self.num_vars.serialize(&mut writer, compress)?; + self.max_degree.serialize(&mut writer, compress) + } + + fn serialized_size(&self, compress: Compress) -> usize { + self.powers_of_g.serialized_size(compress) + + self.gamma_g.serialized_size(compress) + + self.powers_of_gamma_g.serialized_size(compress) + + self.h.serialized_size(compress) + + self.beta_h.serialized_size(compress) + + self.num_vars.serialized_size(compress) + + self.max_degree.serialized_size(compress) } fn serialize_uncompressed(&self, mut writer: W) -> Result<(), SerializationError> { @@ -84,16 +112,6 @@ where self.max_degree.serialize_uncompressed(&mut writer) } - fn serialize_unchecked(&self, mut writer: W) -> Result<(), SerializationError> { - self.powers_of_g.serialize_unchecked(&mut writer)?; - self.gamma_g.serialize_unchecked(&mut writer)?; - self.powers_of_gamma_g.serialize_unchecked(&mut writer)?; - self.h.serialize_unchecked(&mut writer)?; - self.beta_h.serialize_unchecked(&mut writer)?; - self.num_vars.serialize_unchecked(&mut writer)?; - self.max_degree.serialize_unchecked(&mut writer) - } - fn uncompressed_size(&self) -> usize { self.powers_of_g.uncompressed_size() + self.gamma_g.uncompressed_size() @@ -103,22 +121,40 @@ where + self.num_vars.uncompressed_size() + self.max_degree.uncompressed_size() } + fn compressed_size(&self) -> usize { + self.powers_of_g.compressed_size() + + self.gamma_g.compressed_size() + + self.powers_of_gamma_g.compressed_size() + + self.h.compressed_size() + + self.beta_h.compressed_size() + + self.num_vars.compressed_size() + + self.max_degree.compressed_size() + } } impl CanonicalDeserialize for UniversalParams where - E: PairingEngine, - P: DenseMVPolynomial, - P::Point: Index, + E: Pairing, + P: DenseMVPolynomial, + P::Point: Index, { - fn deserialize(mut reader: R) -> Result { - let powers_of_g = BTreeMap::::deserialize(&mut reader)?; - let gamma_g = E::G1Affine::deserialize(&mut reader)?; - let powers_of_gamma_g = Vec::>::deserialize(&mut reader)?; - let h = E::G2Affine::deserialize(&mut reader)?; - let beta_h = Vec::::deserialize(&mut reader)?; - let num_vars = usize::deserialize(&mut reader)?; - let max_degree = usize::deserialize(&mut reader)?; + fn deserialize_with_mode( + reader: R, + compress: Compress, + validate: Validate, + ) -> Result { + let powers_of_g = BTreeMap::::deserialize_with_mode( + &mut reader, + compress, + validate, + )?; + let gamma_g = E::G1Affine::deserialize_with_mode(&mut reader, compress, validate)?; + let powers_of_gamma_g = + Vec::>::deserialize_with_mode(&mut reader, compress, validate)?; + let h = E::G2Affine::deserialize_with_mode(&mut reader, compress, validate)?; + let beta_h = Vec::::deserialize_with_mode(&mut reader, compress, validate)?; + let num_vars = usize::deserialize_with_mode(&mut reader, compress, validate)?; + let max_degree = usize::deserialize_with_mode(&mut reader, compress, validate)?; let prepared_beta_h = beta_h.iter().map(|x| x.clone().into()).collect(); Ok(Self { @@ -135,57 +171,30 @@ where } fn deserialize_uncompressed(mut reader: R) -> Result { - let powers_of_g = BTreeMap::::deserialize_uncompressed(&mut reader)?; - let gamma_g = E::G1Affine::deserialize_uncompressed(&mut reader)?; - let powers_of_gamma_g = Vec::>::deserialize_uncompressed(&mut reader)?; - let h = E::G2Affine::deserialize_uncompressed(&mut reader)?; - let beta_h = Vec::::deserialize_uncompressed(&mut reader)?; - let num_vars = usize::deserialize_uncompressed(&mut reader)?; - let max_degree = usize::deserialize_uncompressed(&mut reader)?; - - let prepared_beta_h = beta_h.iter().map(|x| x.clone().into()).collect(); - Ok(Self { - powers_of_g, - gamma_g, - powers_of_gamma_g, - h, - beta_h, - prepared_h: h.into(), - prepared_beta_h, - num_vars, - max_degree, - }) + Self::deserialize_with_mode(reader, Compress::No, Validate::Yes) } - - fn deserialize_unchecked(mut reader: R) -> Result { - let powers_of_g = BTreeMap::::deserialize_unchecked(&mut reader)?; - let gamma_g = E::G1Affine::deserialize_unchecked(&mut reader)?; - let powers_of_gamma_g = Vec::>::deserialize_unchecked(&mut reader)?; - let h = E::G2Affine::deserialize_unchecked(&mut reader)?; - let beta_h = Vec::::deserialize_unchecked(&mut reader)?; - let num_vars = usize::deserialize_unchecked(&mut reader)?; - let max_degree = usize::deserialize_unchecked(&mut reader)?; - - let prepared_beta_h = beta_h.iter().map(|x| x.clone().into()).collect(); - Ok(Self { - powers_of_g, - gamma_g, - powers_of_gamma_g, - h, - beta_h, - prepared_h: h.into(), - prepared_beta_h, - num_vars, - max_degree, - }) + fn deserialize_uncompressed_unchecked(reader: R) -> Result { + Self::deserialize_with_mode(reader, Compress::No, Validate::No) + } + fn deserialize_compressed_unchecked( + mut reader: R, + ) -> Result { + Self::deserialize_with_mode(reader, Compress::Yes, Validate::No) + } + fn deserialize_compressed(mut reader: R) -> Result { + Self::deserialize_with_mode( + reader, + ark_serialize::Compress::Yes, + ark_serialize::Validate::Yes, + ) } } impl PCUniversalParams for UniversalParams where - E: PairingEngine, - P: DenseMVPolynomial, - P::Point: Index, + E: Pairing, + P: DenseMVPolynomial, + P::Point: Index, { fn max_degree(&self) -> usize { self.max_degree @@ -198,9 +207,9 @@ where #[derivative(Hash(bound = ""), Clone(bound = ""), Debug(bound = ""))] pub struct CommitterKey where - E: PairingEngine, - P: DenseMVPolynomial, - P::Point: Index, + E: Pairing, + P: DenseMVPolynomial, + P::Point: Index, { /// Contains group elements corresponding to all possible monomials with /// `num_vars` and maximum degree `supported_degree` evaluated at `\beta` @@ -222,9 +231,9 @@ where impl PCCommitterKey for CommitterKey where - E: PairingEngine, - P: DenseMVPolynomial, - P::Point: Index, + E: Pairing, + P: DenseMVPolynomial, + P::Point: Index, { fn max_degree(&self) -> usize { self.max_degree @@ -238,7 +247,7 @@ where /// `VerifierKey` is used to check evaluation proofs for a given commitment. #[derive(Derivative)] #[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] -pub struct VerifierKey { +pub struct VerifierKey { /// The generator of G1. pub g: E::G1Affine, /// The generator of G1 that is used for making a commitment hiding. @@ -263,68 +272,80 @@ pub struct VerifierKey { /// from. pub max_degree: usize, } - -impl CanonicalSerialize for VerifierKey { - fn serialize(&self, mut writer: W) -> Result<(), SerializationError> { - self.g.serialize(&mut writer)?; - self.gamma_g.serialize(&mut writer)?; - self.h.serialize(&mut writer)?; - self.beta_h.serialize(&mut writer)?; - self.num_vars.serialize(&mut writer)?; - self.supported_degree.serialize(&mut writer)?; - self.max_degree.serialize(&mut writer) +impl Valid for VerifierKey { + fn check(&self) -> Result<(), SerializationError> { + if self.num_vars == 0 { + return Err(SerializationError::InvalidData); + } + if self.supported_degree == 0 { + return Err(SerializationError::InvalidData); + } + if self.max_degree == 0 { + return Err(SerializationError::InvalidData); + } + if self.beta_h.len() != self.num_vars { + return Err(SerializationError::InvalidData); + } + if self.prepared_beta_h.len() != self.num_vars { + return Err(SerializationError::InvalidData); + } + Ok(()) + } +} +impl CanonicalSerialize for VerifierKey { + fn serialize_with_mode( + &self, + writer: W, + compress: Compress, + ) -> Result<(), SerializationError> { + self.g.serialize_with_mode(&mut writer, compress)?; + self.gamma_g.serialize_with_mode(&mut writer, compress)?; + self.h.serialize_with_mode(&mut writer, compress)?; + self.beta_h.serialize_with_mode(&mut writer, compress)?; + self.num_vars.serialize_with_mode(&mut writer, compress)?; + self.supported_degree + .serialize_with_mode(&mut writer, compress)?; + self.max_degree.serialize_with_mode(&mut writer, compress) } - fn serialized_size(&self) -> usize { - self.g.serialized_size() - + self.gamma_g.serialized_size() - + self.h.serialized_size() - + self.beta_h.serialized_size() - + self.num_vars.serialized_size() - + self.supported_degree.serialized_size() - + self.max_degree.serialized_size() + fn serialized_size(&self, compress: Compress) -> usize { + self.g.serialized_size(compress) + + self.gamma_g.serialized_size(compress) + + self.h.serialized_size(compress) + + self.beta_h.serialized_size(compress) + + self.num_vars.serialized_size(compress) + + self.supported_degree.serialized_size(compress) + + self.max_degree.serialized_size(compress) } fn serialize_uncompressed(&self, mut writer: W) -> Result<(), SerializationError> { - self.g.serialize_uncompressed(&mut writer)?; - self.gamma_g.serialize_uncompressed(&mut writer)?; - self.h.serialize_uncompressed(&mut writer)?; - self.beta_h.serialize_uncompressed(&mut writer)?; - self.num_vars.serialize_uncompressed(&mut writer)?; - self.supported_degree.serialize_uncompressed(&mut writer)?; - self.max_degree.serialize_uncompressed(&mut writer) + Self::serialize_with_mode(&self, writer, Compress::No) } - - fn serialize_unchecked(&self, mut writer: W) -> Result<(), SerializationError> { - self.g.serialize_unchecked(&mut writer)?; - self.gamma_g.serialize_unchecked(&mut writer)?; - self.h.serialize_unchecked(&mut writer)?; - self.beta_h.serialize_unchecked(&mut writer)?; - self.num_vars.serialize_unchecked(&mut writer)?; - self.supported_degree.serialize_unchecked(&mut writer)?; - self.max_degree.serialize_unchecked(&mut writer) + fn serialize_compressed(&self, mut writer: W) -> Result<(), SerializationError> { + Self::serialize_with_mode(&self, writer, Compress::Yes) } fn uncompressed_size(&self) -> usize { - self.g.uncompressed_size() - + self.gamma_g.uncompressed_size() - + self.h.uncompressed_size() - + self.beta_h.uncompressed_size() - + self.num_vars.uncompressed_size() - + self.supported_degree.uncompressed_size() - + self.max_degree.uncompressed_size() + Self::serialized_size(&self, Compress::No) + } + fn compressed_size(&self) -> usize { + Self::serialized_size(&self, Compress::Yes) } } -impl CanonicalDeserialize for VerifierKey { - fn deserialize(mut reader: R) -> Result { - let g = E::G1Affine::deserialize(&mut reader)?; - let gamma_g = E::G1Affine::deserialize(&mut reader)?; - let h = E::G2Affine::deserialize(&mut reader)?; - let beta_h = Vec::::deserialize(&mut reader)?; - let num_vars = usize::deserialize(&mut reader)?; - let supported_degree = usize::deserialize(&mut reader)?; - let max_degree = usize::deserialize(&mut reader)?; +impl CanonicalDeserialize for VerifierKey { + fn deserialize_with_mode( + mut reader: R, + compress: Compress, + validate: Validate, + ) -> Result { + let g = E::G1Affine::deserialize_with_mode(&mut reader, compress, validate)?; + let gamma_g = E::G1Affine::deserialize_with_mode(&mut reader, compress, validate)?; + let h = E::G2Affine::deserialize_with_mode(&mut reader, compress, validate)?; + let beta_h = Vec::::deserialize_with_mode(&mut reader, compress, validate)?; + let num_vars = usize::deserialize_with_mode(&mut reader, compress, validate)?; + let supported_degree = usize::deserialize_with_mode(&mut reader, compress, validate)?; + let max_degree = usize::deserialize_with_mode(&mut reader, compress, validate)?; let prepared_beta_h = beta_h.iter().map(|x| x.clone().into()).collect(); Ok(Self { @@ -339,55 +360,21 @@ impl CanonicalDeserialize for VerifierKey { max_degree, }) } - - fn deserialize_uncompressed(mut reader: R) -> Result { - let g = E::G1Affine::deserialize_uncompressed(&mut reader)?; - let gamma_g = E::G1Affine::deserialize_uncompressed(&mut reader)?; - let h = E::G2Affine::deserialize_uncompressed(&mut reader)?; - let beta_h = Vec::::deserialize_uncompressed(&mut reader)?; - let num_vars = usize::deserialize_uncompressed(&mut reader)?; - let supported_degree = usize::deserialize_uncompressed(&mut reader)?; - let max_degree = usize::deserialize_uncompressed(&mut reader)?; - - let prepared_beta_h = beta_h.iter().map(|x| x.clone().into()).collect(); - Ok(Self { - g, - gamma_g, - h, - beta_h, - prepared_h: h.into(), - prepared_beta_h, - num_vars, - supported_degree, - max_degree, - }) + fn deserialize_compressed(reader: R) -> Result { + Self::deserialize_with_mode(reader, Compress::Yes, Validate::Yes) } - - fn deserialize_unchecked(mut reader: R) -> Result { - let g = E::G1Affine::deserialize_unchecked(&mut reader)?; - let gamma_g = E::G1Affine::deserialize_unchecked(&mut reader)?; - let h = E::G2Affine::deserialize_unchecked(&mut reader)?; - let beta_h = Vec::::deserialize_unchecked(&mut reader)?; - let num_vars = usize::deserialize_unchecked(&mut reader)?; - let supported_degree = usize::deserialize_unchecked(&mut reader)?; - let max_degree = usize::deserialize_unchecked(&mut reader)?; - - let prepared_beta_h = beta_h.iter().map(|x| x.clone().into()).collect(); - Ok(Self { - g, - gamma_g, - h, - beta_h, - prepared_h: h.into(), - prepared_beta_h, - num_vars, - supported_degree, - max_degree, - }) + fn deserialize_compressed_unchecked(reader: R) -> Result { + Self::deserialize_with_mode(reader, Compress::Yes, Validate::No) + } + fn deserialize_uncompressed(reader: R) -> Result { + Self::deserialize_with_mode(reader, Compress::No, Validate::Yes) + } + fn deserialize_uncompressed_unchecked(reader: R) -> Result { + Self::deserialize_with_mode(reader, Compress::No, Validate::No) } } -impl PCVerifierKey for VerifierKey { +impl PCVerifierKey for VerifierKey { fn max_degree(&self) -> usize { self.max_degree } @@ -400,7 +387,7 @@ impl PCVerifierKey for VerifierKey { /// Nothing to do to prepare this verifier key (for now). pub type PreparedVerifierKey = VerifierKey; -impl PCPreparedVerifierKey> for PreparedVerifierKey { +impl PCPreparedVerifierKey> for PreparedVerifierKey { /// prepare `PreparedVerifierKey` from `VerifierKey` fn prepare(vk: &VerifierKey) -> Self { vk.clone() @@ -418,9 +405,9 @@ impl PCPreparedVerifierKey> for PreparedVerifie )] pub struct Randomness where - E: PairingEngine, - P: DenseMVPolynomial, - P::Point: Index, + E: Pairing, + P: DenseMVPolynomial, + P::Point: Index, { /// A multivariate polynomial where each monomial is univariate with random coefficient pub blinding_polynomial: P, @@ -429,9 +416,9 @@ where impl Randomness where - E: PairingEngine, - P: DenseMVPolynomial, - P::Point: Index, + E: Pairing, + P: DenseMVPolynomial, + P::Point: Index, { /// Does `self` provide any hiding properties to the corresponding commitment? /// `self.is_hiding() == true` only if the underlying polynomial is non-zero. @@ -449,9 +436,9 @@ where impl PCRandomness for Randomness where - E: PairingEngine, - P: DenseMVPolynomial, - P::Point: Index, + E: Pairing, + P: DenseMVPolynomial, + P::Point: Index, { fn empty() -> Self { Self { @@ -474,12 +461,12 @@ where } } -impl<'a, E: PairingEngine, P: DenseMVPolynomial> Add<&'a Randomness> +impl<'a, E: Pairing, P: DenseMVPolynomial> Add<&'a Randomness> for Randomness where - E: PairingEngine, - P: DenseMVPolynomial, - P::Point: Index, + E: Pairing, + P: DenseMVPolynomial, + P::Point: Index, { type Output = Self; @@ -490,16 +477,16 @@ where } } -impl<'a, E, P> Add<(E::Fr, &'a Randomness)> for Randomness +impl<'a, E, P> Add<(E::ScalarField, &'a Randomness)> for Randomness where - E: PairingEngine, - P: DenseMVPolynomial, - P::Point: Index, + E: Pairing, + P: DenseMVPolynomial, + P::Point: Index, { type Output = Self; #[inline] - fn add(mut self, other: (E::Fr, &'a Randomness)) -> Self { + fn add(mut self, other: (E::ScalarField, &'a Randomness)) -> Self { self += other; self } @@ -507,9 +494,9 @@ where impl<'a, E, P> AddAssign<&'a Randomness> for Randomness where - E: PairingEngine, - P: DenseMVPolynomial, - P::Point: Index, + E: Pairing, + P: DenseMVPolynomial, + P::Point: Index, { #[inline] fn add_assign(&mut self, other: &'a Self) { @@ -517,14 +504,14 @@ where } } -impl<'a, E, P> AddAssign<(E::Fr, &'a Randomness)> for Randomness +impl<'a, E, P> AddAssign<(E::ScalarField, &'a Randomness)> for Randomness where - E: PairingEngine, - P: DenseMVPolynomial, - P::Point: Index, + E: Pairing, + P: DenseMVPolynomial, + P::Point: Index, { #[inline] - fn add_assign(&mut self, (f, other): (E::Fr, &'a Randomness)) { + fn add_assign(&mut self, (f, other): (E::ScalarField, &'a Randomness)) { self.blinding_polynomial += (f, &other.blinding_polynomial); } } @@ -539,21 +526,10 @@ where PartialEq(bound = ""), Eq(bound = "") )] -pub struct Proof { +pub struct Proof { /// Commitments to the witness polynomials pub w: Vec, /// Evaluation of the random polynomial at the point for which /// the evaluation proof was produced. - pub random_v: Option, -} - -impl PCProof for Proof { - fn size_in_bytes(&self) -> usize { - let hiding_size = if self.random_v.is_some() { - E::Fr::zero().serialized_size() - } else { - 0 - }; - (self.w.len() * E::G1Affine::zero().serialized_size()) / 2 + hiding_size - } -} + pub random_v: Option, +} \ No newline at end of file diff --git a/src/marlin/marlin_pst13_pc/mod.rs b/src/marlin/marlin_pst13_pc/mod.rs index 20f7096b..f9cb63b9 100644 --- a/src/marlin/marlin_pst13_pc/mod.rs +++ b/src/marlin/marlin_pst13_pc/mod.rs @@ -7,10 +7,7 @@ use crate::{BatchLCProof, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; use crate::{PCRandomness, PCUniversalParams, PolynomialCommitment}; use crate::{ToString, Vec}; -use ark_ec::{ - msm::{FixedBase, VariableBaseMSM}, - AffineCurve, PairingEngine, ProjectiveCurve, -}; +use ark_ec::{pairing::Pairing, scalar_mul::fixed_base::FixedBase, CurveGroup, VariableBaseMSM}; use ark_ff::{One, PrimeField, UniformRand, Zero}; use ark_poly::{multivariate::Term, DenseMVPolynomial}; use ark_std::rand::RngCore; @@ -33,13 +30,15 @@ use rayon::prelude::*; /// /// [pst]: https://eprint.iacr.org/2011/587 /// [marlin]: https://eprint.iacr.org/2019/104 -pub struct MarlinPST13, S: CryptographicSponge> { +pub struct MarlinPST13, S: CryptographicSponge> { _engine: PhantomData, _poly: PhantomData

, _sponge: PhantomData, } -impl, S: CryptographicSponge> MarlinPST13 { +impl, S: CryptographicSponge> + MarlinPST13 +{ /// Given some point `z`, compute the quotients `w_i(X)` s.t /// /// `p(X) - p(z) = (X_1-z_1)*w_1(X) + (X_2-z_2)*w_2(X) + ... + (X_l-z_l)*w_l(X)` @@ -47,7 +46,7 @@ impl, S: CryptographicSponge> Marl /// These quotients can always be found with no remainder. fn divide_at_point(p: &P, point: &P::Point) -> Vec

where - P::Point: Index, + P::Point: Index, { let num_vars = p.num_vars(); if p.is_zero() { @@ -115,7 +114,7 @@ impl, S: CryptographicSponge> Marl /// Check that a given polynomial is supported by parameters fn check_degrees_and_bounds<'a>( supported_degree: usize, - p: &'a LabeledPolynomial, + p: &'a LabeledPolynomial, ) -> Result<(), Error> where P: 'a, @@ -132,7 +131,7 @@ impl, S: CryptographicSponge> Marl } /// Convert polynomial coefficients to `BigInt` - fn convert_to_bigints(p: &P) -> Vec<::BigInt> { + fn convert_to_bigints(p: &P) -> Vec<::BigInt> { let plain_coeffs = ark_std::cfg_into_iter!(p.terms()) .map(|(coeff, _)| coeff.into_bigint()) .collect(); @@ -140,12 +139,12 @@ impl, S: CryptographicSponge> Marl } } -impl PolynomialCommitment for MarlinPST13 +impl PolynomialCommitment for MarlinPST13 where - E: PairingEngine, - P: DenseMVPolynomial + Sync, + E: Pairing, + P: DenseMVPolynomial + Sync, S: CryptographicSponge, - P::Point: Index, + P::Point: Index, { type UniversalParams = UniversalParams; type CommitterKey = CommitterKey; @@ -179,11 +178,11 @@ where // Trapdoor evaluation points let mut betas = Vec::with_capacity(num_vars); for _ in 0..num_vars { - betas.push(E::Fr::rand(rng)); + betas.push(E::ScalarField::rand(rng)); } // Generators - let g = E::G1Projective::rand(rng); - let gamma_g = E::G1Projective::rand(rng); + let g = E::G1::rand(rng); + let gamma_g = E::G1::rand(rng); let h = E::G2Projective::rand(rng); // A list of all variable numbers of multiplicity `max_degree` @@ -203,7 +202,7 @@ where // trapdoor and generate a `P::Term` object to index it ark_std::cfg_into_iter!(terms) .map(|term| { - let value: E::Fr = term.iter().map(|e| betas[*e]).product(); + let value: E::ScalarField = term.iter().map(|e| betas[*e]).product(); let term = (0..num_vars) .map(|var| (var, term.iter().filter(|e| **e == var).count())) .collect(); @@ -213,12 +212,12 @@ where }) .unzip(); - let scalar_bits = E::Fr::MODULUS_BIT_SIZE as usize; + let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; let g_time = start_timer!(|| "Generating powers of G"); let window_size = FixedBase::get_mul_window_size(max_degree + 1); let g_table = FixedBase::get_window_table(scalar_bits, window_size, g); let mut powers_of_g = - FixedBase::msm::(scalar_bits, window_size, &g_table, &powers_of_beta); + FixedBase::msm::(scalar_bits, window_size, &g_table, &powers_of_beta); powers_of_g.push(g); powers_of_beta_terms.push(P::Term::new(vec![])); end_timer!(g_time); @@ -234,12 +233,12 @@ where .enumerate() .for_each(|(i, v)| { let mut powers_of_beta = Vec::with_capacity(max_degree); - let mut cur = E::Fr::one(); + let mut cur = E::ScalarField::one(); for _ in 0..=max_degree { cur *= &betas[i]; powers_of_beta.push(cur); } - *v = FixedBase::msm::( + *v = FixedBase::msm::( scalar_bits, window_size, &gamma_g_table, @@ -248,11 +247,11 @@ where }); end_timer!(gamma_g_time); - let powers_of_g = E::G1Projective::batch_normalization_into_affine(&powers_of_g); + let powers_of_g = E::G1::batch_normalization_into_affine(&powers_of_g); let gamma_g = gamma_g.into_affine(); let powers_of_gamma_g = powers_of_gamma_g .into_iter() - .map(|v| E::G1Projective::batch_normalization_into_affine(&v)) + .map(|v| E::G1::batch_normalization_into_affine(&v)) .collect(); let beta_h: Vec<_> = betas.iter().map(|b| h.mul(b).into_affine()).collect(); let h = h.into_affine(); @@ -340,7 +339,7 @@ where /// Outputs a commitments to `polynomials`. fn commit<'a>( ck: &Self::CommitterKey, - polynomials: impl IntoIterator>, + polynomials: impl IntoIterator>, rng: Option<&mut dyn RngCore>, ) -> Result< ( @@ -380,8 +379,7 @@ where end_timer!(to_bigint_time); let msm_time = start_timer!(|| "MSM to compute commitment to plaintext poly"); - let mut commitment = - ::msm_bigint(&powers_of_g, &plain_ints); + let mut commitment = ::msm_bigint(&powers_of_g, &plain_ints); end_timer!(msm_time); // Sample random polynomial @@ -417,7 +415,7 @@ where let msm_time = start_timer!(|| "MSM to compute commitment to random poly"); let random_commitment = - ::msm_bigint(&powers_of_gamma_g, &random_ints) + ::msm_bigint(&powers_of_gamma_g, &random_ints) .into_affine(); end_timer!(msm_time); @@ -440,10 +438,10 @@ where /// On input a polynomial `p` and a point `point`, outputs a proof for the same. fn open<'a>( ck: &Self::CommitterKey, - labeled_polynomials: impl IntoIterator>, + labeled_polynomials: impl IntoIterator>, _commitments: impl IntoIterator>, point: &P::Point, - opening_challenges: &mut ChallengeGenerator, + opening_challenges: &mut ChallengeGenerator, rands: impl IntoIterator, _rng: Option<&mut dyn RngCore>, ) -> Result @@ -486,7 +484,7 @@ where // Convert coefficients to BigInt let witness_ints = Self::convert_to_bigints(&w); // Compute MSM - ::msm_bigint(&powers_of_g, &witness_ints) + ::msm_bigint(&powers_of_g, &witness_ints) }) .collect::>(); end_timer!(witness_comm_time); @@ -516,7 +514,7 @@ where // Convert coefficients to BigInt let hiding_witness_ints = Self::convert_to_bigints(hiding_witness); // Compute MSM and add result to witness - *witness += &::msm_bigint( + *witness += &::msm_bigint( &powers_of_gamma_g, &hiding_witness_ints, ); @@ -539,9 +537,9 @@ where vk: &Self::VerifierKey, commitments: impl IntoIterator>, point: &'a P::Point, - values: impl IntoIterator, + values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + opening_challenges: &mut ChallengeGenerator, _rng: Option<&mut dyn RngCore>, ) -> Result where @@ -582,9 +580,9 @@ where vk: &Self::VerifierKey, commitments: impl IntoIterator>, query_set: &QuerySet, - values: &Evaluations, + values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + opening_challenges: &mut ChallengeGenerator, rng: &mut R, ) -> Result where @@ -602,14 +600,14 @@ where start_timer!(|| format!("Checking {} evaluation proofs", combined_comms.len())); let g = vk.g.into_projective(); let gamma_g = vk.gamma_g.into_projective(); - let mut total_c = ::zero(); - let mut total_w = vec![::zero(); vk.num_vars]; + let mut total_c = ::zero(); + let mut total_w = vec![::zero(); vk.num_vars]; let combination_time = start_timer!(|| "Combining commitments and proofs"); - let mut randomizer = E::Fr::one(); + let mut randomizer = E::ScalarField::one(); // Instead of multiplying g and gamma_g in each turn, we simply accumulate // their coefficients and perform a final multiplication at the end. - let mut g_multiplier = E::Fr::zero(); - let mut gamma_g_multiplier = E::Fr::zero(); + let mut g_multiplier = E::ScalarField::zero(); + let mut gamma_g_multiplier = E::ScalarField::zero(); for (((c, z), v), proof) in combined_comms .iter() .zip(combined_queries) @@ -617,7 +615,7 @@ where .zip(proof) { let w = &proof.w; - let mut temp: E::G1Projective = ark_std::cfg_iter!(w) + let mut temp: E::G1 = ark_std::cfg_iter!(w) .enumerate() .map(|(j, w_j)| w_j.mul(z[j])) .sum(); @@ -656,14 +654,14 @@ where fn open_combinations<'a>( ck: &Self::CommitterKey, - linear_combinations: impl IntoIterator>, - polynomials: impl IntoIterator>, + linear_combinations: impl IntoIterator>, + polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, + opening_challenges: &mut ChallengeGenerator, rands: impl IntoIterator, rng: Option<&mut dyn RngCore>, - ) -> Result, Self::Error> + ) -> Result, Self::Error> where P: 'a, Self::Randomness: 'a, @@ -685,12 +683,12 @@ where /// committed in `labeled_commitments`. fn check_combinations<'a, R: RngCore>( vk: &Self::VerifierKey, - linear_combinations: impl IntoIterator>, + linear_combinations: impl IntoIterator>, commitments: impl IntoIterator>, eqn_query_set: &QuerySet, - eqn_evaluations: &Evaluations, - proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + eqn_evaluations: &Evaluations, + proof: &BatchLCProof, + opening_challenges: &mut ChallengeGenerator, rng: &mut R, ) -> Result where @@ -715,7 +713,7 @@ mod tests { use super::MarlinPST13; use ark_bls12_377::Bls12_377; use ark_bls12_381::Bls12_381; - use ark_ec::PairingEngine; + use ark_ec::pairing::Pairing; use ark_ff::UniformRand; use ark_poly::{ multivariate::{SparsePolynomial as SparsePoly, SparseTerm}, @@ -724,30 +722,33 @@ mod tests { use ark_sponge::poseidon::PoseidonSponge; use rand_chacha::ChaCha20Rng; - type MVPoly_381 = SparsePoly<::Fr, SparseTerm>; - type MVPoly_377 = SparsePoly<::Fr, SparseTerm>; + type MVPoly_381 = SparsePoly<::Fr, SparseTerm>; + type MVPoly_377 = SparsePoly<::Fr, SparseTerm>; type PC = MarlinPST13; - type Sponge_bls12_381 = PoseidonSponge<::Fr>; - type Sponge_Bls12_377 = PoseidonSponge<::Fr>; + type Sponge_bls12_381 = PoseidonSponge<::Fr>; + type Sponge_Bls12_377 = PoseidonSponge<::Fr>; type PC_Bls12_381 = PC; type PC_Bls12_377 = PC; - fn rand_poly( + fn rand_poly( degree: usize, num_vars: Option, rng: &mut ChaCha20Rng, - ) -> SparsePoly { - SparsePoly::::rand(degree, num_vars.unwrap(), rng) + ) -> SparsePoly { + SparsePoly::::rand(degree, num_vars.unwrap(), rng) } - fn rand_point(num_vars: Option, rng: &mut ChaCha20Rng) -> Vec { + fn rand_point( + num_vars: Option, + rng: &mut ChaCha20Rng, + ) -> Vec { let num_vars = num_vars.unwrap(); let mut point = Vec::with_capacity(num_vars); for _ in 0..num_vars { - point.push(E::Fr::rand(rng)); + point.push(E::ScalarField::rand(rng)); } point } diff --git a/src/marlin/mod.rs b/src/marlin/mod.rs index eff8c4ce..d5e0cf04 100644 --- a/src/marlin/mod.rs +++ b/src/marlin/mod.rs @@ -4,7 +4,7 @@ use crate::{BTreeMap, BTreeSet, Debug, RngCore, String, ToString, Vec}; use crate::{BatchLCProof, LabeledPolynomial, LinearCombination}; use crate::{Evaluations, LabeledCommitment, QuerySet}; use crate::{PCRandomness, Polynomial, PolynomialCommitment}; -use ark_ec::{AffineCurve, PairingEngine, ProjectiveCurve}; +use ark_ec::pairing::Pairing; use ark_ff::{One, Zero}; use ark_sponge::CryptographicSponge; use ark_std::{convert::TryInto, hash::Hash, ops::AddAssign, ops::Mul}; @@ -28,10 +28,10 @@ pub mod marlin_pst13_pc; /// Common functionalities between `marlin_pc` and `marlin_pst13_pc` struct Marlin where - E: PairingEngine, + E: Pairing, S: CryptographicSponge, - P: Polynomial, - PC: PolynomialCommitment, + P: Polynomial, + PC: PolynomialCommitment, { _engine: core::marker::PhantomData, _sponge: core::marker::PhantomData, @@ -41,16 +41,16 @@ where impl Marlin where - E: PairingEngine, + E: Pairing, S: CryptographicSponge, - P: Polynomial, - PC: PolynomialCommitment, + P: Polynomial, + PC: PolynomialCommitment, { /// MSM for `commitments` and `coeffs` fn combine_commitments<'a>( - coeffs_and_comms: impl IntoIterator)>, - ) -> (E::G1Projective, Option) { - let mut combined_comm = E::G1Projective::zero(); + coeffs_and_comms: impl IntoIterator)>, + ) -> (E::G1, Option) { + let mut combined_comm = E::G1::zero(); let mut combined_shifted_comm = None; for (coeff, comm) in coeffs_and_comms { if coeff.is_one() { @@ -69,7 +69,7 @@ where /// Normalize a list of commitments fn normalize_commitments<'a>( - commitments: Vec<(E::G1Projective, Option)>, + commitments: Vec<(E::G1, Option)>, ) -> Vec> { let mut comms = Vec::with_capacity(commitments.len()); let mut s_comms = Vec::with_capacity(commitments.len()); @@ -80,12 +80,12 @@ where s_comms.push(c); s_flags.push(true); } else { - s_comms.push(E::G1Projective::zero()); + s_comms.push(E::G1::zero()); s_flags.push(false); } } - let comms = E::G1Projective::batch_normalization_into_affine(&comms); - let s_comms = E::G1Projective::batch_normalization_into_affine(&mut s_comms); + let comms = E::G1::batch_normalization_into_affine(&comms); + let s_comms = E::G1::batch_normalization_into_affine(&mut s_comms); comms .into_iter() .zip(s_comms) @@ -107,13 +107,13 @@ where /// Accumulate `commitments` and `values` according to the challenges produces by `challenge_gen`. fn accumulate_commitments_and_values<'a>( commitments: impl IntoIterator>>, - values: impl IntoIterator, - challenge_gen: &mut ChallengeGenerator, + values: impl IntoIterator, + challenge_gen: &mut ChallengeGenerator, vk: Option<&marlin_pc::VerifierKey>, - ) -> Result<(E::G1Projective, E::Fr), Error> { + ) -> Result<(E::G1, E::ScalarField), Error> { let acc_time = start_timer!(|| "Accumulating commitments and values"); - let mut combined_comm = E::G1Projective::zero(); - let mut combined_value = E::Fr::zero(); + let mut combined_comm = E::G1::zero(); + let mut combined_value = E::ScalarField::zero(); for (labeled_commitment, value) in commitments.into_iter().zip(values) { let degree_bound = labeled_commitment.degree_bound(); let commitment = labeled_commitment.commitment(); @@ -154,10 +154,10 @@ where fn combine_and_normalize<'a, D: Clone + Ord + Sync>( commitments: impl IntoIterator>>, query_set: &QuerySet, - evaluations: &Evaluations, - opening_challenges: &mut ChallengeGenerator, + evaluations: &Evaluations, + opening_challenges: &mut ChallengeGenerator, vk: Option<&marlin_pc::VerifierKey>, - ) -> Result<(Vec>, Vec, Vec), Error> + ) -> Result<(Vec>, Vec, Vec), Error> where marlin_pc::Commitment: 'a, { @@ -212,7 +212,7 @@ where combined_evals.push(v); } let norm_time = start_timer!(|| "Normalizing combined commitments"); - E::G1Projective::batch_normalization(&mut combined_comms); + E::G1::batch_normalization(&mut combined_comms); let combined_comms = combined_comms .into_iter() .map(|c| kzg10::Commitment(c.into())) @@ -226,26 +226,26 @@ where /// the combinations at the points in the query set. fn open_combinations<'a, D>( ck: &PC::CommitterKey, - lc_s: impl IntoIterator>, - polynomials: impl IntoIterator>, + lc_s: impl IntoIterator>, + polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, + opening_challenges: &mut ChallengeGenerator, rands: impl IntoIterator, rng: Option<&mut dyn RngCore>, - ) -> Result, Error> + ) -> Result, Error> where - P: 'a + Polynomial, + P: 'a + Polynomial, D: Debug + Clone + Hash + Ord + Sync, PC: PolynomialCommitment< - E::Fr, + E::ScalarField, P, S, Commitment = marlin_pc::Commitment, PreparedCommitment = marlin_pc::PreparedCommitment, Error = Error, >, - PC::Randomness: 'a + AddAssign<(E::Fr, &'a PC::Randomness)>, + PC::Randomness: 'a + AddAssign<(E::ScalarField, &'a PC::Randomness)>, PC::Commitment: 'a, { let label_map = polynomials @@ -322,20 +322,20 @@ where fn check_combinations<'a, R, D>( vk: &PC::VerifierKey, - lc_s: impl IntoIterator>, + lc_s: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - evaluations: &Evaluations, - proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + evaluations: &Evaluations, + proof: &BatchLCProof, + opening_challenges: &mut ChallengeGenerator, rng: &mut R, ) -> Result where R: RngCore, - P: Polynomial, + P: Polynomial, D: Debug + Clone + Hash + Ord + Sync, PC: PolynomialCommitment< - E::Fr, + E::ScalarField, P, S, Commitment = marlin_pc::Commitment, diff --git a/src/multilinear_pc/data_structures.rs b/src/multilinear_pc/data_structures.rs index e52ef66b..6920ece7 100644 --- a/src/multilinear_pc/data_structures.rs +++ b/src/multilinear_pc/data_structures.rs @@ -1,16 +1,16 @@ -use ark_ec::PairingEngine; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read, SerializationError, Write}; +use ark_ec::pairing::Pairing; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::vec::Vec; #[allow(type_alias_bounds)] /// Evaluations over {0,1}^n for G1 -pub type EvaluationHyperCubeOnG1 = Vec; +pub type EvaluationHyperCubeOnG1 = Vec; #[allow(type_alias_bounds)] /// Evaluations over {0,1}^n for G2 -pub type EvaluationHyperCubeOnG2 = Vec; +pub type EvaluationHyperCubeOnG2 = Vec; /// Public Parameter used by prover #[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug)] -pub struct UniversalParams { +pub struct UniversalParams { /// number of variables pub num_vars: usize, /// `pp_{num_vars}`, `pp_{num_vars - 1}`, `pp_{num_vars - 2}`, ..., defined by XZZPD19 @@ -27,7 +27,7 @@ pub struct UniversalParams { /// Public Parameter used by prover #[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug)] -pub struct CommitterKey { +pub struct CommitterKey { /// number of variables pub nv: usize, /// pp_k defined by libra @@ -42,7 +42,7 @@ pub struct CommitterKey { /// Public Parameter used by prover #[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug)] -pub struct VerifierKey { +pub struct VerifierKey { /// number of variables pub nv: usize, /// generator of G1 @@ -55,7 +55,7 @@ pub struct VerifierKey { #[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug)] /// commitment -pub struct Commitment { +pub struct Commitment { /// number of variables pub nv: usize, /// product of g as described by the vRAM paper @@ -64,7 +64,7 @@ pub struct Commitment { #[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug)] /// proof of opening -pub struct Proof { +pub struct Proof { /// Evaluation of quotients pub proofs: Vec, } diff --git a/src/multilinear_pc/mod.rs b/src/multilinear_pc/mod.rs index 6d001639..fe07626c 100644 --- a/src/multilinear_pc/mod.rs +++ b/src/multilinear_pc/mod.rs @@ -1,8 +1,8 @@ use crate::multilinear_pc::data_structures::{ Commitment, CommitterKey, Proof, UniversalParams, VerifierKey, }; -use ark_ec::msm::{FixedBase, VariableBaseMSM}; -use ark_ec::{AffineCurve, PairingEngine, ProjectiveCurve}; +use ark_ec::{scalar_mul::fixed_base::FixedBase, VariableBaseMSM}; +use ark_ec::{pairing::Pairing, CurveGroup}; use ark_ff::{Field, PrimeField}; use ark_ff::{One, Zero}; use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; @@ -18,24 +18,24 @@ use ark_std::UniformRand; pub mod data_structures; /// Polynomial Commitment Scheme on multilinear extensions. -pub struct MultilinearPC { +pub struct MultilinearPC { _engine: PhantomData, } -impl MultilinearPC { +impl MultilinearPC { /// setup pub fn setup(num_vars: usize, rng: &mut R) -> UniversalParams { assert!(num_vars > 0, "constant polynomial not supported"); - let g: E::G1Projective = E::G1Projective::rand(rng); + let g: E::G1 = E::G1::rand(rng); let h: E::G2Projective = E::G2Projective::rand(rng); let g = g.into_affine(); let h = h.into_affine(); let mut powers_of_g = Vec::new(); let mut powers_of_h = Vec::new(); - let t: Vec<_> = (0..num_vars).map(|_| E::Fr::rand(rng)).collect(); - let scalar_bits = E::Fr::MODULUS_BIT_SIZE as usize; + let t: Vec<_> = (0..num_vars).map(|_| E::ScalarField::rand(rng)).collect(); + let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; - let mut eq: LinkedList> = + let mut eq: LinkedList> = LinkedList::from_iter(eq_extension(&t).into_iter()); let mut eq_arr = LinkedList::new(); let mut base = eq.pop_back().unwrap().evaluations; @@ -64,7 +64,7 @@ impl MultilinearPC { let g_table = FixedBase::get_window_table(scalar_bits, window_size, g.into_projective()); let h_table = FixedBase::get_window_table(scalar_bits, window_size, h.into_projective()); - let pp_g = E::G1Projective::batch_normalization_into_affine(&FixedBase::msm( + let pp_g = E::G1::batch_normalization_into_affine(&FixedBase::msm( scalar_bits, window_size, &g_table, @@ -92,7 +92,7 @@ impl MultilinearPC { let window_size = FixedBase::get_mul_window_size(num_vars); let g_table = FixedBase::get_window_table(scalar_bits, window_size, g.into_projective()); - E::G1Projective::batch_normalization_into_affine(&FixedBase::msm( + E::G1::batch_normalization_into_affine(&FixedBase::msm( scalar_bits, window_size, &g_table, @@ -139,7 +139,7 @@ impl MultilinearPC { /// commit pub fn commit( ck: &CommitterKey, - polynomial: &impl MultilinearExtension, + polynomial: &impl MultilinearExtension, ) -> Commitment { let nv = polynomial.num_vars(); let scalars: Vec<_> = polynomial @@ -147,24 +147,22 @@ impl MultilinearPC { .into_iter() .map(|x| x.into_bigint()) .collect(); - let g_product = ::msm_bigint( - &ck.powers_of_g[0], - scalars.as_slice(), - ) - .into_affine(); + let g_product = + ::msm_bigint(&ck.powers_of_g[0], scalars.as_slice()) + .into_affine(); Commitment { nv, g_product } } /// On input a polynomial `p` and a point `point`, outputs a proof for the same. pub fn open( ck: &CommitterKey, - polynomial: &impl MultilinearExtension, - point: &[E::Fr], + polynomial: &impl MultilinearExtension, + point: &[E::ScalarField], ) -> Proof { assert_eq!(polynomial.num_vars(), ck.nv, "Invalid size of polynomial"); let nv = polynomial.num_vars(); - let mut r: Vec> = (0..nv + 1).map(|_| Vec::new()).collect(); - let mut q: Vec> = (0..nv + 1).map(|_| Vec::new()).collect(); + let mut r: Vec> = (0..nv + 1).map(|_| Vec::new()).collect(); + let mut q: Vec> = (0..nv + 1).map(|_| Vec::new()).collect(); r[nv] = polynomial.to_evaluations(); @@ -172,11 +170,15 @@ impl MultilinearPC { for i in 0..nv { let k = nv - i; let point_at_k = point[i]; - q[k] = (0..(1 << (k - 1))).map(|_| E::Fr::zero()).collect(); - r[k - 1] = (0..(1 << (k - 1))).map(|_| E::Fr::zero()).collect(); + q[k] = (0..(1 << (k - 1))) + .map(|_| E::ScalarField::zero()) + .collect(); + r[k - 1] = (0..(1 << (k - 1))) + .map(|_| E::ScalarField::zero()) + .collect(); for b in 0..(1 << (k - 1)) { q[k][b] = r[k][(b << 1) + 1] - &r[k][b << 1]; - r[k - 1][b] = r[k][b << 1] * &(E::Fr::one() - &point_at_k) + r[k - 1][b] = r[k][b << 1] * &(E::ScalarField::one() - &point_at_k) + &(r[k][(b << 1) + 1] * &point_at_k); } let scalars: Vec<_> = (0..(1 << k)) @@ -197,8 +199,8 @@ impl MultilinearPC { pub fn check<'a>( vk: &VerifierKey, commitment: &Commitment, - point: &[E::Fr], - value: E::Fr, + point: &[E::ScalarField], + value: E::ScalarField, proof: &Proof, ) -> bool { let left = E::pairing( @@ -206,17 +208,17 @@ impl MultilinearPC { vk.h, ); - let scalar_size = E::Fr::MODULUS_BIT_SIZE as usize; + let scalar_size = E::ScalarField::MODULUS_BIT_SIZE as usize; let window_size = FixedBase::get_mul_window_size(vk.nv); let g_table = FixedBase::get_window_table(scalar_size, window_size, vk.g.into_projective()); - let g_mul: Vec = FixedBase::msm(scalar_size, window_size, &g_table, point); + let g_mul: Vec = FixedBase::msm(scalar_size, window_size, &g_table, point); let pairing_lefts: Vec<_> = (0..vk.nv) .map(|i| vk.g_mask_random[i].into_projective() - &g_mul[i]) .collect(); let pairing_lefts: Vec = - E::G1Projective::batch_normalization_into_affine(&pairing_lefts); + E::G1::batch_normalization_into_affine(&pairing_lefts); let pairing_lefts: Vec = pairing_lefts .into_iter() .map(|x| E::G1Prepared::from(x)) @@ -275,13 +277,13 @@ mod tests { use crate::multilinear_pc::data_structures::UniversalParams; use crate::multilinear_pc::MultilinearPC; use ark_bls12_381::Bls12_381; - use ark_ec::PairingEngine; + use ark_ec::pairing::Pairing; use ark_poly::{DenseMultilinearExtension, MultilinearExtension, SparseMultilinearExtension}; use ark_std::rand::RngCore; use ark_std::vec::Vec; use ark_std::{test_rng, UniformRand}; type E = Bls12_381; - type Fr = ::Fr; + type Fr = ::Fr; fn test_polynomial( uni_params: &UniversalParams, diff --git a/src/sonic_pc/data_structures.rs b/src/sonic_pc/data_structures.rs index 07a9b777..a6cc60a6 100644 --- a/src/sonic_pc/data_structures.rs +++ b/src/sonic_pc/data_structures.rs @@ -2,8 +2,10 @@ use crate::kzg10; use crate::{ BTreeMap, PCCommitterKey, PCPreparedCommitment, PCPreparedVerifierKey, PCVerifierKey, Vec, }; -use ark_ec::{PairingEngine, ProjectiveCurve}; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; +use ark_ec::pairing::Pairing; +use ark_serialize::{ + CanonicalDeserialize, Valid,CanonicalSerialize, Compress, SerializationError, Validate, +}; use ark_std::io::{Read, Write}; /// `UniversalParams` are the universal parameters for the KZG10 scheme. @@ -18,11 +20,11 @@ pub type Commitment = kzg10::Commitment; /// `PreparedCommitment` is the prepared commitment for the KZG10 scheme. pub type PreparedCommitment = kzg10::PreparedCommitment; -impl PCPreparedCommitment> for PreparedCommitment { +impl PCPreparedCommitment> for PreparedCommitment { /// prepare `PreparedCommitment` from `Commitment` fn prepare(comm: &Commitment) -> Self { let mut prepared_comm = Vec::::new(); - let mut cur = E::G1Projective::from(comm.0.clone()); + let mut cur = E::G1::ScalarFieldom(comm.0.clone()); for _ in 0..128 { prepared_comm.push(cur.clone().into()); cur.double_in_place(); @@ -41,7 +43,7 @@ impl PCPreparedCommitment> for PreparedCommitmen Clone(bound = ""), Debug(bound = "") )] -pub struct CommitterKey { +pub struct CommitterKey { /// The key used to commit to polynomials. pub powers_of_g: Vec, @@ -65,7 +67,7 @@ pub struct CommitterKey { pub max_degree: usize, } -impl CommitterKey { +impl CommitterKey { /// Obtain powers for the underlying KZG10 construction pub fn powers(&self) -> kzg10::Powers { kzg10::Powers { @@ -111,7 +113,7 @@ impl CommitterKey { } } -impl PCCommitterKey for CommitterKey { +impl PCCommitterKey for CommitterKey { fn max_degree(&self) -> usize { self.max_degree } @@ -124,7 +126,7 @@ impl PCCommitterKey for CommitterKey { /// `VerifierKey` is used to check evaluation proofs for a given commitment. #[derive(Derivative)] #[derivative(Default(bound = ""), Clone(bound = ""), Debug(bound = ""))] -pub struct VerifierKey { +pub struct VerifierKey { /// The generator of G1. pub g: E::G1Affine, @@ -156,7 +158,7 @@ pub struct VerifierKey { pub max_degree: usize, } -impl VerifierKey { +impl VerifierKey { /// Find the appropriate shift for the degree bound. pub fn get_shift_power(&self, degree_bound: usize) -> Option { self.degree_bounds_and_neg_powers_of_h @@ -169,71 +171,70 @@ impl VerifierKey { } } -impl CanonicalSerialize for VerifierKey { - fn serialize(&self, mut writer: W) -> Result<(), SerializationError> { - self.g.serialize(&mut writer)?; - self.gamma_g.serialize(&mut writer)?; - self.h.serialize(&mut writer)?; - self.beta_h.serialize(&mut writer)?; - self.degree_bounds_and_neg_powers_of_h - .serialize(&mut writer)?; - self.supported_degree.serialize(&mut writer)?; - self.max_degree.serialize(&mut writer) - } - - fn serialized_size(&self) -> usize { - self.g.serialized_size() - + self.gamma_g.serialized_size() - + self.h.serialized_size() - + self.beta_h.serialized_size() - + self.degree_bounds_and_neg_powers_of_h.serialized_size() - + self.supported_degree.serialized_size() - + self.max_degree.serialized_size() +impl Valid for VerifierKey { + fn check(&self) -> bool { + // TODO probably need to do more checks here + self.supported_degree <= self.max_degree } +} - fn serialize_uncompressed(&self, mut writer: W) -> Result<(), SerializationError> { - self.g.serialize_uncompressed(&mut writer)?; - self.gamma_g.serialize_uncompressed(&mut writer)?; - self.h.serialize_uncompressed(&mut writer)?; - self.beta_h.serialize_uncompressed(&mut writer)?; +impl CanonicalSerialize for VerifierKey { + fn serialize_with_mode( + &self, + writer: W, + compress: Compress, + ) -> Result<(), SerializationError> { + self.g.serialize_with_mode(&mut writer, compress)?; + self.gamma_g.serialize_with_mode(&mut writer, compress)?; + self.h.serialize_with_mode(&mut writer, compress)?; + self.beta_h.serialize_with_mode(&mut writer, compress)?; self.degree_bounds_and_neg_powers_of_h - .serialize_uncompressed(&mut writer)?; - self.supported_degree.serialize_uncompressed(&mut writer)?; - self.max_degree.serialize_uncompressed(&mut writer) + .serialize_with_mode(&mut writer, compress)?; + self.supported_degree + .serialize_with_mode(&mut writer, compress)?; + self.max_degree.serialize_with_mode(&mut writer, compress) } - fn serialize_unchecked(&self, mut writer: W) -> Result<(), SerializationError> { - self.g.serialize_unchecked(&mut writer)?; - self.gamma_g.serialize_unchecked(&mut writer)?; - self.h.serialize_unchecked(&mut writer)?; - self.beta_h.serialize_unchecked(&mut writer)?; - self.degree_bounds_and_neg_powers_of_h - .serialize_unchecked(&mut writer)?; - self.supported_degree.serialize_unchecked(&mut writer)?; - self.max_degree.serialize_unchecked(&mut writer) + fn serialized_size(&self, compress: Compress) -> usize { + self.g.serialized_size(compress) + + self.gamma_g.serialized_size(compress) + + self.h.serialized_size(compress) + + self.beta_h.serialized_size(compress) + + self + .degree_bounds_and_neg_powers_of_h + .serialized_size(compress) + + self.supported_degree.serialized_size(compress) + + self.max_degree.serialized_size(compress) } + fn serialize_compressed(&self, writer: W) -> Result<(), SerializationError> { + self.serialize_with_mode(writer, Compress::Yes) + } + fn compressed_size(&self) -> usize { + self.serialized_size(Compress::Yes) + } + fn serialize_uncompressed(&self, writer: W) -> Result<(), SerializationError> { + self.serialize_with_mode(writer, Compress::No) + } fn uncompressed_size(&self) -> usize { - self.g.uncompressed_size() - + self.gamma_g.uncompressed_size() - + self.h.uncompressed_size() - + self.beta_h.uncompressed_size() - + self.degree_bounds_and_neg_powers_of_h.uncompressed_size() - + self.supported_degree.uncompressed_size() - + self.max_degree.uncompressed_size() + self.serialized_size(Compress::No) } } -impl CanonicalDeserialize for VerifierKey { - fn deserialize(mut reader: R) -> Result { - let g = E::G1Affine::deserialize(&mut reader)?; - let gamma_g = E::G1Affine::deserialize(&mut reader)?; - let h = E::G2Affine::deserialize(&mut reader)?; - let beta_h = E::G2Affine::deserialize(&mut reader)?; +impl CanonicalDeserialize for VerifierKey { + fn deserialize_with_mode( + reader: R, + compress: Compress, + validate: Validate, + ) -> Result { + let g = E::G1Affine::deserialize(&mut reader, compress, validate)?; + let gamma_g = E::G1Affine::deserialize(&mut reader, compress, validate)?; + let h = E::G2Affine::deserialize(&mut reader, compress, validate)?; + let beta_h = E::G2Affine::deserialize(&mut reader, compress, validate)?; let degree_bounds_and_neg_powers_of_h = - Option::>::deserialize(&mut reader)?; - let supported_degree = usize::deserialize(&mut reader)?; - let max_degree = usize::deserialize(&mut reader)?; + Option::>::deserialize(&mut reader, compress, validate)?; + let supported_degree = usize::deserialize(&mut reader, compress, validate)?; + let max_degree = usize::deserialize(&mut reader, compress, validate)?; let prepared_h = E::G2Prepared::from(h.clone()); let prepared_beta_h = E::G2Prepared::from(beta_h.clone()); @@ -251,60 +252,21 @@ impl CanonicalDeserialize for VerifierKey { }) } - fn deserialize_uncompressed(mut reader: R) -> Result { - let g = E::G1Affine::deserialize_uncompressed(&mut reader)?; - let gamma_g = E::G1Affine::deserialize_uncompressed(&mut reader)?; - let h = E::G2Affine::deserialize_uncompressed(&mut reader)?; - let beta_h = E::G2Affine::deserialize_uncompressed(&mut reader)?; - let degree_bounds_and_neg_powers_of_h = - Option::>::deserialize_uncompressed(&mut reader)?; - let supported_degree = usize::deserialize_uncompressed(&mut reader)?; - let max_degree = usize::deserialize_uncompressed(&mut reader)?; - - let prepared_h = E::G2Prepared::from(h.clone()); - let prepared_beta_h = E::G2Prepared::from(beta_h.clone()); - - Ok(Self { - g, - gamma_g, - h, - beta_h, - prepared_h, - prepared_beta_h, - degree_bounds_and_neg_powers_of_h, - supported_degree, - max_degree, - }) + fn deserialize_compressed(reader: R) -> Result { + Self::deserialize_with_mode(reader, Compress::Yes, Validate::Yes) } - - fn deserialize_unchecked(mut reader: R) -> Result { - let g = E::G1Affine::deserialize_unchecked(&mut reader)?; - let gamma_g = E::G1Affine::deserialize_unchecked(&mut reader)?; - let h = E::G2Affine::deserialize_unchecked(&mut reader)?; - let beta_h = E::G2Affine::deserialize_unchecked(&mut reader)?; - let degree_bounds_and_neg_powers_of_h = - Option::>::deserialize_unchecked(&mut reader)?; - let supported_degree = usize::deserialize_unchecked(&mut reader)?; - let max_degree = usize::deserialize_unchecked(&mut reader)?; - - let prepared_h = E::G2Prepared::from(h.clone()); - let prepared_beta_h = E::G2Prepared::from(beta_h.clone()); - - Ok(Self { - g, - gamma_g, - h, - beta_h, - prepared_h, - prepared_beta_h, - degree_bounds_and_neg_powers_of_h, - supported_degree, - max_degree, - }) + fn deserialize_compressed_unchecked(reader: R) -> Result { + Self::deserialize_with_mode(reader, Compress::Yes, Validate::No) + } + fn deserialize_uncompressed(reader: R) -> Result { + Self::deserialize_with_mode(reader, Compress::No, Validate::Yes) + } + fn deserialize_uncompressed_unchecked(reader: R) -> Result { + Self::deserialize_with_mode(reader, Compress::No, Validate::No) } } -impl PCVerifierKey for VerifierKey { +impl PCVerifierKey for VerifierKey { fn max_degree(&self) -> usize { self.max_degree } @@ -317,7 +279,7 @@ impl PCVerifierKey for VerifierKey { /// Nothing to do to prepare this verifier key (for now). pub type PreparedVerifierKey = VerifierKey; -impl PCPreparedVerifierKey> for PreparedVerifierKey { +impl PCPreparedVerifierKey> for PreparedVerifierKey { /// prepare `PreparedVerifierKey` from `VerifierKey` fn prepare(vk: &VerifierKey) -> Self { vk.clone() @@ -334,4 +296,4 @@ impl PCPreparedVerifierKey> for PreparedVerifie PartialEq(bound = ""), Eq(bound = "") )] -pub struct BatchProof(pub(crate) Vec>); +pub struct BatchProof(pub(crate) Vec>); diff --git a/src/sonic_pc/mod.rs b/src/sonic_pc/mod.rs index 7c48de7f..aac81c70 100644 --- a/src/sonic_pc/mod.rs +++ b/src/sonic_pc/mod.rs @@ -4,7 +4,7 @@ use crate::{BatchLCProof, DenseUVPolynomial, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; use crate::{PCRandomness, PCUniversalParams, PolynomialCommitment}; -use ark_ec::{AffineCurve, PairingEngine, ProjectiveCurve}; +use ark_ec::{pairing::Pairing, }; use ark_ff::{One, UniformRand, Zero}; use ark_std::rand::RngCore; use ark_std::{convert::TryInto, marker::PhantomData, ops::Div, ops::Mul, vec}; @@ -24,7 +24,7 @@ pub use data_structures::*; /// [sonic]: https://eprint.iacr.org/2019/099 /// [al]: https://eprint.iacr.org/2019/601 /// [marlin]: https://eprint.iacr.org/2019/1047 -pub struct SonicKZG10, S: CryptographicSponge> { +pub struct SonicKZG10, S: CryptographicSponge> { _engine: PhantomData, _poly: PhantomData

, _sponge: PhantomData, @@ -32,28 +32,28 @@ pub struct SonicKZG10, S: Cryptogr impl SonicKZG10 where - E: PairingEngine, - P: DenseUVPolynomial, + E: Pairing, + P: DenseUVPolynomial, S: CryptographicSponge, { fn accumulate_elems<'a>( - combined_comms: &mut BTreeMap, E::G1Projective>, - combined_witness: &mut E::G1Projective, - combined_adjusted_witness: &mut E::G1Projective, + combined_comms: &mut BTreeMap, E::G1>, + combined_witness: &mut E::G1, + combined_adjusted_witness: &mut E::G1, vk: &VerifierKey, commitments: impl IntoIterator>>, point: P::Point, - values: impl IntoIterator, + values: impl IntoIterator, proof: &kzg10::Proof, - opening_challenges: &mut ChallengeGenerator, - randomizer: Option, + opening_challenges: &mut ChallengeGenerator, + randomizer: Option, ) { let acc_time = start_timer!(|| "Accumulating elements"); let mut curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); // Keeps track of running combination of values - let mut combined_values = E::Fr::zero(); + let mut combined_values = E::ScalarField::zero(); // Iterates through all of the commitments and accumulates common degree_bound elements in a BTreeMap for (labeled_comm, value) in commitments.into_iter().zip(values) { @@ -63,21 +63,19 @@ where let degree_bound = labeled_comm.degree_bound(); // Applying opening challenge and randomness (used in batch_checking) - let mut comm_with_challenge: E::G1Projective = comm.0.mul(curr_challenge); + let mut comm_with_challenge: E::G1 = comm.0.mul(curr_challenge); if let Some(randomizer) = randomizer { comm_with_challenge = comm_with_challenge.mul(&randomizer); } // Accumulate values in the BTreeMap - *combined_comms - .entry(degree_bound) - .or_insert(E::G1Projective::zero()) += &comm_with_challenge; + *combined_comms.entry(degree_bound).or_insert(E::G1::zero()) += &comm_with_challenge; curr_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); } // Push expected results into list of elems. Power will be the negative of the expected power - let mut witness: E::G1Projective = proof.w.into_projective(); + let mut witness: E::G1 = proof.w.into_projective(); let mut adjusted_witness = vk.g.mul(combined_values) - &proof.w.mul(point); if let Some(random_v) = proof.random_v { adjusted_witness += &vk.gamma_g.mul(random_v); @@ -94,13 +92,13 @@ where } fn check_elems( - combined_comms: BTreeMap, E::G1Projective>, - combined_witness: E::G1Projective, - combined_adjusted_witness: E::G1Projective, + combined_comms: BTreeMap, E::G1>, + combined_witness: E::G1, + combined_adjusted_witness: E::G1, vk: &VerifierKey, ) -> Result { let check_time = start_timer!(|| "Checking elems"); - let mut g1_projective_elems: Vec = Vec::new(); + let mut g1_projective_elems: Vec = Vec::new(); let mut g2_prepared_elems: Vec = Vec::new(); for (degree_bound, comm) in combined_comms.into_iter() { @@ -122,7 +120,7 @@ where g2_prepared_elems.push(vk.prepared_beta_h.clone()); let g1_prepared_elems_iter = - E::G1Projective::batch_normalization_into_affine(g1_projective_elems.as_slice()) + E::G1::batch_normalization_into_affine(g1_projective_elems.as_slice()) .into_iter() .map(|a| a.into()); @@ -134,10 +132,10 @@ where } } -impl PolynomialCommitment for SonicKZG10 +impl PolynomialCommitment for SonicKZG10 where - E: PairingEngine, - P: DenseUVPolynomial, + E: Pairing, + P: DenseUVPolynomial, S: CryptographicSponge, for<'a, 'b> &'a P: Div<&'b P, Output = P>, { @@ -147,7 +145,7 @@ where type PreparedVerifierKey = PreparedVerifierKey; type Commitment = Commitment; type PreparedCommitment = PreparedCommitment; - type Randomness = Randomness; + type Randomness = Randomness; type Proof = kzg10::Proof; type BatchProof = Vec; type Error = Error; @@ -277,7 +275,7 @@ where /// Outputs a commitment to `polynomial`. fn commit<'a>( ck: &Self::CommitterKey, - polynomials: impl IntoIterator>, + polynomials: impl IntoIterator>, rng: Option<&mut dyn RngCore>, ) -> Result< ( @@ -343,10 +341,10 @@ where fn open<'a>( ck: &Self::CommitterKey, - labeled_polynomials: impl IntoIterator>, + labeled_polynomials: impl IntoIterator>, _commitments: impl IntoIterator>, point: &'a P::Point, - opening_challenges: &mut ChallengeGenerator, + opening_challenges: &mut ChallengeGenerator, rands: impl IntoIterator, _rng: Option<&mut dyn RngCore>, ) -> Result @@ -389,18 +387,18 @@ where vk: &Self::VerifierKey, commitments: impl IntoIterator>, point: &'a P::Point, - values: impl IntoIterator, + values: impl IntoIterator, proof: &Self::Proof, - opening_challenges: &mut ChallengeGenerator, + opening_challenges: &mut ChallengeGenerator, _rng: Option<&mut dyn RngCore>, ) -> Result where Self::Commitment: 'a, { let check_time = start_timer!(|| "Checking evaluations"); - let mut combined_comms: BTreeMap, E::G1Projective> = BTreeMap::new(); - let mut combined_witness: E::G1Projective = E::G1Projective::zero(); - let mut combined_adjusted_witness: E::G1Projective = E::G1Projective::zero(); + let mut combined_comms: BTreeMap, E::G1> = BTreeMap::new(); + let mut combined_witness: E::G1 = E::G1::zero(); + let mut combined_adjusted_witness: E::G1 = E::G1::zero(); Self::accumulate_elems( &mut combined_comms, @@ -429,9 +427,9 @@ where vk: &Self::VerifierKey, commitments: impl IntoIterator>, query_set: &QuerySet, - values: &Evaluations, + values: &Evaluations, proof: &Self::BatchProof, - opening_challenges: &mut ChallengeGenerator, + opening_challenges: &mut ChallengeGenerator, rng: &mut R, ) -> Result where @@ -449,11 +447,11 @@ where assert_eq!(proof.len(), query_to_labels_map.len()); - let mut randomizer = E::Fr::one(); + let mut randomizer = E::ScalarField::one(); - let mut combined_comms: BTreeMap, E::G1Projective> = BTreeMap::new(); - let mut combined_witness: E::G1Projective = E::G1Projective::zero(); - let mut combined_adjusted_witness: E::G1Projective = E::G1Projective::zero(); + let mut combined_comms: BTreeMap, E::G1> = BTreeMap::new(); + let mut combined_witness: E::G1 = E::G1::zero(); + let mut combined_adjusted_witness: E::G1 = E::G1::zero(); for ((_point_label, (point, labels)), p) in query_to_labels_map.into_iter().zip(proof) { let mut comms_to_combine: Vec<&'_ LabeledCommitment<_>> = Vec::new(); @@ -499,14 +497,14 @@ where fn open_combinations<'a>( ck: &Self::CommitterKey, - linear_combinations: impl IntoIterator>, - polynomials: impl IntoIterator>, + linear_combinations: impl IntoIterator>, + polynomials: impl IntoIterator>, commitments: impl IntoIterator>, query_set: &QuerySet, - opening_challenges: &mut ChallengeGenerator, + opening_challenges: &mut ChallengeGenerator, rands: impl IntoIterator, rng: Option<&mut dyn RngCore>, - ) -> Result, Self::Error> + ) -> Result, Self::Error> where Self::Randomness: 'a, Self::Commitment: 'a, @@ -530,7 +528,7 @@ where let mut degree_bound = None; let mut hiding_bound = None; let mut randomness = Self::Randomness::empty(); - let mut comm = E::G1Projective::zero(); + let mut comm = E::G1::zero(); let num_polys = lc.len(); for (coeff, label) in lc.iter().filter(|(_, l)| !l.is_one()) { @@ -566,11 +564,10 @@ where lc_info.push((lc_label, degree_bound)); } - let comms: Vec = - E::G1Projective::batch_normalization_into_affine(&lc_commitments) - .into_iter() - .map(|c| kzg10::Commitment::(c)) - .collect(); + let comms: Vec = E::G1::batch_normalization_into_affine(&lc_commitments) + .into_iter() + .map(|c| kzg10::Commitment::(c)) + .collect(); let lc_commitments = lc_info .into_iter() @@ -594,12 +591,12 @@ where /// committed in `labeled_commitments`. fn check_combinations<'a, R: RngCore>( vk: &Self::VerifierKey, - linear_combinations: impl IntoIterator>, + linear_combinations: impl IntoIterator>, commitments: impl IntoIterator>, eqn_query_set: &QuerySet, - eqn_evaluations: &Evaluations, - proof: &BatchLCProof, - opening_challenges: &mut ChallengeGenerator, + eqn_evaluations: &Evaluations, + proof: &BatchLCProof, + opening_challenges: &mut ChallengeGenerator, rng: &mut R, ) -> Result where @@ -619,7 +616,7 @@ where let num_polys = lc.len(); let mut degree_bound = None; - let mut combined_comm = E::G1Projective::zero(); + let mut combined_comm = E::G1::zero(); for (coeff, label) in lc.iter() { if label.is_one() { @@ -651,11 +648,10 @@ where lc_info.push((lc_label, degree_bound)); } - let comms: Vec = - E::G1Projective::batch_normalization_into_affine(&lc_commitments) - .into_iter() - .map(|c| kzg10::Commitment(c)) - .collect(); + let comms: Vec = E::G1::batch_normalization_into_affine(&lc_commitments) + .into_iter() + .map(|c| kzg10::Commitment(c)) + .collect(); let lc_commitments = lc_info .into_iter() @@ -681,31 +677,31 @@ mod tests { use super::SonicKZG10; use ark_bls12_377::Bls12_377; use ark_bls12_381::Bls12_381; - use ark_ec::PairingEngine; + use ark_ec::pairing::Pairing; use ark_ff::UniformRand; use ark_poly::{univariate::DensePolynomial as DensePoly, DenseUVPolynomial}; use ark_sponge::poseidon::PoseidonSponge; use rand_chacha::ChaCha20Rng; - type UniPoly_381 = DensePoly<::Fr>; - type UniPoly_377 = DensePoly<::Fr>; + type UniPoly_381 = DensePoly<::Fr>; + type UniPoly_377 = DensePoly<::Fr>; type PC = SonicKZG10; - type Sponge_Bls12_377 = PoseidonSponge<::Fr>; - type Sponge_Bls12_381 = PoseidonSponge<::Fr>; + type Sponge_Bls12_377 = PoseidonSponge<::Fr>; + type Sponge_Bls12_381 = PoseidonSponge<::Fr>; type PC_Bls12_377 = PC; type PC_Bls12_381 = PC; - fn rand_poly( + fn rand_poly( degree: usize, _: Option, rng: &mut ChaCha20Rng, - ) -> DensePoly { - DensePoly::::rand(degree, rng) + ) -> DensePoly { + DensePoly::::rand(degree, rng) } - fn rand_point(_: Option, rng: &mut ChaCha20Rng) -> E::Fr { - E::Fr::rand(rng) + fn rand_point(_: Option, rng: &mut ChaCha20Rng) -> E::ScalarField { + E::ScalarField::rand(rng) } #[test] diff --git a/src/streaming_kzg/mod.rs b/src/streaming_kzg/mod.rs index 4c5e8385..a1718a11 100644 --- a/src/streaming_kzg/mod.rs +++ b/src/streaming_kzg/mod.rs @@ -86,7 +86,7 @@ mod data_structures; mod space; mod time; -use ark_ec::ProjectiveCurve; +use ark_ec::CurveGroup; use ark_serialize::CanonicalSerialize; use ark_std::vec::Vec; pub use data_structures::*; @@ -103,40 +103,39 @@ use ark_std::ops::{Add, Mul}; use ark_std::borrow::Borrow; use ark_std::fmt; -use ark_ec::{msm::VariableBaseMSM, AffineCurve, PairingEngine}; +use ark_ec::{pairing::Pairing, AffineRepr, VariableBaseMSM}; /// A Kate polynomial commitment over a bilinear group, represented as a single \\(\GG_1\\) element. #[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub struct Commitment(pub(crate) E::G1Affine); +pub struct Commitment(pub(crate) E::G1Affine); -impl Commitment { +impl Commitment { /// Return the size of Commitment in bytes. pub fn size_in_bytes(&self) -> usize { - // ark_ff::to_bytes![E::G1Affine::zero()].unwrap().len() / 2 - E::G1Affine::zero().serialized_size() / 2 + E::G1Affine::zero().serialized_size(Compress::Yes) } } #[inline] -fn msm(bases: &[E::G1Affine], scalars: &[E::Fr]) -> E::G1Affine { +fn msm(bases: &[E::G1Affine], scalars: &[E::ScalarField]) -> E::G1Affine { let scalars = scalars.iter().map(|x| x.into_bigint()).collect::>(); - let sp = ::msm_bigint(bases, &scalars); + let sp = ::msm_bigint(bases, &scalars); sp.into_affine() } /// Polynomial evaluation proof, represented as a single \\(\GG_1\\) element. #[derive(Clone, Debug, PartialEq, Eq)] -pub struct EvaluationProof(pub E::G1Affine); +pub struct EvaluationProof(pub E::G1Affine); -impl Add for EvaluationProof { +impl Add for EvaluationProof { type Output = Self; fn add(self, rhs: Self) -> Self::Output { - EvaluationProof(self.0 + rhs.0) + EvaluationProof((self.0 + rhs.0).into_affine()) } } -impl core::iter::Sum for EvaluationProof { +impl core::iter::Sum for EvaluationProof { fn sum>(iter: I) -> Self { let zero = EvaluationProof(E::G1Affine::zero()); iter.fold(zero, |x, y| x + y) @@ -159,27 +158,27 @@ pub(crate) type VerificationResult = Result<(), VerificationError>; /// The verification key for the polynomial commitment scheme. /// It also implements verification functions for the evaluation proof. #[derive(Debug, PartialEq, Eq)] -pub struct VerifierKey { +pub struct VerifierKey { /// The generator of \\(\GG_1\\) powers_of_g: Vec, /// The generator og \\(\GG_2\\), together with its multiplication by the trapdoor. powers_of_g2: Vec, } -impl VerifierKey { +impl VerifierKey { /// The verification procedure for the EvaluationProof with a single polynomial evaluated at a single evaluation point. /// The polynomial are evaluated at the point ``alpha`` and is committed as ``commitment``. /// The evaluation proof can be obtained either in a space-efficient or a time-efficient flavour. pub fn verify( &self, commitment: &Commitment, - &alpha: &E::Fr, - evaluation: &E::Fr, + &alpha: &E::ScalarField, + evaluation: &E::ScalarField, proof: &EvaluationProof, ) -> VerificationResult { - let scalars = [(-alpha).into_bigint(), E::Fr::one().into_bigint()]; - let ep = ::msm_bigint(&self.powers_of_g2, &scalars); - let lhs = commitment.0.into_projective() - self.powers_of_g[0].mul(evaluation); + let scalars = [(-alpha).into_bigint(), E::ScalarField::one().into_bigint()]; + let ep = ::msm_bigint(&self.powers_of_g2, &scalars); + let lhs = commitment.0.into_group() - self.powers_of_g[0].mul(evaluation); let g2 = self.powers_of_g2[0]; if E::pairing(lhs, g2) == E::pairing(proof.0, ep) { @@ -198,21 +197,20 @@ impl VerifierKey { pub fn verify_multi_points( &self, commitments: &[Commitment], - eval_points: &[E::Fr], - evaluations: &[Vec], + eval_points: &[E::ScalarField], + evaluations: &[Vec], proof: &EvaluationProof, - open_chal: &E::Fr, + open_chal: &E::ScalarField, ) -> VerificationResult { // Computing the vanishing polynomial over eval_points let zeros = vanishing_polynomial(eval_points); let zeros_repr = zeros.iter().map(|x| x.into_bigint()).collect::>(); - let zeros = - ::msm_bigint(&self.powers_of_g2, &zeros_repr); + let zeros = ::msm_bigint(&self.powers_of_g2, &zeros_repr); // Computing the inverse for the interpolation let mut sca_inverse = Vec::new(); for (j, x_j) in eval_points.iter().enumerate() { - let mut sca = E::Fr::one(); + let mut sca = E::ScalarField::one(); for (k, x_k) in eval_points.iter().enumerate() { if j == k { continue; @@ -226,12 +224,13 @@ impl VerifierKey { // Computing the lagrange polynomial for the interpolation let mut lang = Vec::new(); for (j, _x_j) in eval_points.iter().enumerate() { - let mut l_poly = DensePolynomial::from_coefficients_vec(vec![E::Fr::one()]); + let mut l_poly = DensePolynomial::from_coefficients_vec(vec![E::ScalarField::one()]); for (k, x_k) in eval_points.iter().enumerate() { if j == k { continue; } - let tmp_poly = DensePolynomial::from_coefficients_vec(vec![-(*x_k), E::Fr::one()]); + let tmp_poly = + DensePolynomial::from_coefficients_vec(vec![-(*x_k), E::ScalarField::one()]); l_poly = l_poly.mul(&tmp_poly); } lang.push(l_poly); @@ -250,11 +249,11 @@ impl VerifierKey { // Gathering commitments let comm_vec = commitments.iter().map(|x| x.0).collect::>(); let etas_repr = etas.iter().map(|e| e.into_bigint()).collect::>(); - let f_comm = ::msm_bigint(&comm_vec, &etas_repr); + let f_comm = ::msm_bigint(&comm_vec, &etas_repr); let g2 = self.powers_of_g2[0]; - if E::pairing(f_comm - i_comm.into_projective(), g2) == E::pairing(proof.0, zeros) { + if E::pairing(f_comm - i_comm.into_group(), g2) == E::pairing(proof.0, zeros) { Ok(()) } else { Err(VerificationError) @@ -262,13 +261,13 @@ impl VerifierKey { } } -fn interpolate_poly( - eval_points: &[E::Fr], - evals: &[E::Fr], - sca_inverse: &[E::Fr], - lang: &[DensePolynomial], -) -> DensePolynomial { - let mut res = DensePolynomial::from_coefficients_vec(vec![E::Fr::zero()]); +fn interpolate_poly( + eval_points: &[E::ScalarField], + evals: &[E::ScalarField], + sca_inverse: &[E::ScalarField], + lang: &[DensePolynomial], +) -> DensePolynomial { + let mut res = DensePolynomial::from_coefficients_vec(vec![E::ScalarField::zero()]); for (j, (_x_j, y_j)) in eval_points.iter().zip(evals.iter()).enumerate() { let l_poly = lang[j].mul(sca_inverse[j] * y_j); res = (&res).add(&l_poly); diff --git a/src/streaming_kzg/space.rs b/src/streaming_kzg/space.rs index 4fd4d13a..94a59bcc 100644 --- a/src/streaming_kzg/space.rs +++ b/src/streaming_kzg/space.rs @@ -1,5 +1,5 @@ //! Space-efficient implementation of the polynomial commitment of Kate et al. -use ark_ec::{PairingEngine, ProjectiveCurve}; +use ark_ec::{pairing::Pairing, CurveGroup}; use ark_ff::{PrimeField, Zero}; use ark_poly::Polynomial; use ark_std::borrow::Borrow; @@ -7,7 +7,7 @@ use ark_std::collections::VecDeque; use ark_std::vec::Vec; use crate::streaming_kzg::{ceil_div, vanishing_polynomial, FoldedPolynomialTree}; -use ark_ec::msm::{ChunkedPippenger, HashMapPippenger, VariableBaseMSM}; +use ark_ec::scalar_mul::variable_base::{ChunkedPippenger, HashMapPippenger, VariableBaseMSM}; use ark_std::iterable::{Iterable, Reverse}; use super::{time::CommitterKey, VerifierKey}; @@ -20,7 +20,7 @@ const LENGTH_MISMATCH_MSG: &str = "Expecting at least one element in the committ #[derive(Clone)] pub struct CommitterKeyStream where - E: PairingEngine, + E: Pairing, SG: Iterable, SG::Item: Borrow, { @@ -32,7 +32,7 @@ where impl CommitterKeyStream where - E: PairingEngine, + E: Pairing, SG: Iterable, SG::Item: Borrow, { @@ -57,12 +57,12 @@ where pub fn open( &self, polynomial: &SF, - alpha: &E::Fr, + alpha: &E::ScalarField, max_msm_buffer: usize, - ) -> (E::Fr, EvaluationProof) + ) -> (E::ScalarField, EvaluationProof) where SF: Iterable, - SF::Item: Borrow, + SF::Item: Borrow, { let mut quotient = ChunkedPippenger::new(max_msm_buffer); @@ -74,7 +74,7 @@ where // See let bases = bases_init.skip(self.powers_of_g.len() - polynomial.len()); - let mut previous = E::Fr::zero(); + let mut previous = E::ScalarField::zero(); for (scalar, base) in scalars.zip(bases) { quotient.add(base, previous.into_bigint()); let coefficient = previous * alpha + scalar.borrow(); @@ -90,12 +90,12 @@ where pub fn open_multi_points( &self, polynomial: &SF, - points: &[E::Fr], + points: &[E::ScalarField], max_msm_buffer: usize, - ) -> (Vec, EvaluationProof) + ) -> (Vec, EvaluationProof) where SF: Iterable, - SF::Item: Borrow, + SF::Item: Borrow, { let zeros = vanishing_polynomial(points); let mut quotient = ChunkedPippenger::new(max_msm_buffer); @@ -104,7 +104,7 @@ where // See let mut bases = bases_init.skip(self.powers_of_g.len() - polynomial.len() + zeros.degree()); - let mut state = VecDeque::::with_capacity(points.len()); + let mut state = VecDeque::::with_capacity(points.len()); let mut polynomial_iterator = polynomial.iter(); @@ -131,13 +131,12 @@ where pub fn commit(&self, polynomial: &SF) -> Commitment where SF: Iterable, - SF::Item: Borrow, + SF::Item: Borrow, { assert!(self.powers_of_g.len() >= polynomial.len()); Commitment( - ::msm_chunks(&self.powers_of_g, polynomial) - .into_affine(), + ::msm_chunks(&self.powers_of_g, polynomial).into_affine(), ) } @@ -147,7 +146,7 @@ where polynomials: &[&'a dyn Iterable>], ) -> Vec> where - F: Borrow, + F: Borrow, { polynomials.iter().map(|&p| self.commit(p)).collect() } @@ -157,12 +156,12 @@ where /// The function takes as input a committer key and the tree structure of all the folding polynomials, and produces the desired commitment for each polynomial. pub fn commit_folding( &self, - polynomials: &FoldedPolynomialTree, + polynomials: &FoldedPolynomialTree, max_msm_buffer: usize, ) -> Vec> where SF: Iterable, - SF::Item: Borrow, + SF::Item: Borrow, { let n = polynomials.depth(); let mut pippengers: Vec> = Vec::new(); @@ -196,20 +195,20 @@ where /// `eta` is the random challenge for batching folding polynomials. pub fn open_folding<'a, SF>( &self, - polynomials: FoldedPolynomialTree<'a, E::Fr, SF>, - points: &[E::Fr], - etas: &[E::Fr], + polynomials: FoldedPolynomialTree<'a, E::ScalarField, SF>, + points: &[E::ScalarField], + etas: &[E::ScalarField], max_msm_buffer: usize, - ) -> (Vec>, EvaluationProof) + ) -> (Vec>, EvaluationProof) where SG: Iterable, SF: Iterable, - E: PairingEngine, + E: Pairing, SG::Item: Borrow, - SF::Item: Borrow + Copy, + SF::Item: Borrow + Copy, { let n = polynomials.depth(); - let mut pippenger = HashMapPippenger::::new(max_msm_buffer); + let mut pippenger = HashMapPippenger::::new(max_msm_buffer); let mut folded_bases = Vec::new(); let zeros = vanishing_polynomial(points); let mut remainders = vec![VecDeque::new(); n]; @@ -222,7 +221,7 @@ where let bases = bases_init.skip(delta); (0..points.len()).for_each(|_| { - remainders[i - 1].push_back(E::Fr::zero()); + remainders[i - 1].push_back(E::ScalarField::zero()); }); folded_bases.push(bases); @@ -255,7 +254,7 @@ where } } -impl<'a, E: PairingEngine> From<&'a CommitterKey> +impl<'a, E: Pairing> From<&'a CommitterKey> for CommitterKeyStream> { fn from(ck: &'a CommitterKey) -> Self { @@ -268,7 +267,7 @@ impl<'a, E: PairingEngine> From<&'a CommitterKey> impl From<&CommitterKeyStream> for VerifierKey where - E: PairingEngine, + E: Pairing, SG: Iterable, SG::Item: Borrow, { diff --git a/src/streaming_kzg/time.rs b/src/streaming_kzg/time.rs index 251a0e9e..5885495f 100644 --- a/src/streaming_kzg/time.rs +++ b/src/streaming_kzg/time.rs @@ -1,15 +1,16 @@ //! An impementation of a time-efficient version of Kate et al's polynomial commitment, //! with optimization from [\[BDFG20\]](https://eprint.iacr.org/2020/081.pdf). -use ark_ec::msm::FixedBase; -use ark_ec::PairingEngine; -use ark_ec::ProjectiveCurve; +use ark_ec::pairing::Pairing; +use ark_ec::scalar_mul::fixed_base::FixedBase; +use ark_ec::CurveGroup; use ark_ff::{PrimeField, Zero}; use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial}; use ark_std::borrow::Borrow; -use ark_std::ops::{Div, Mul}; +use ark_std::ops::Div; use ark_std::rand::RngCore; use ark_std::vec::Vec; use ark_std::UniformRand; +use std::ops::Mul; use crate::streaming_kzg::{ linear_combination, msm, powers, Commitment, EvaluationProof, VerifierKey, @@ -23,12 +24,12 @@ use super::vanishing_polynomial; /// plus the `max_eval_degree` powers over \\(\GG_2\\), /// where `max_degree` is the max polynomial degree to commit to, /// and `max_eval_degree` is the max number of different points to open simultaneously. -pub struct CommitterKey { +pub struct CommitterKey { pub(crate) powers_of_g: Vec, pub(crate) powers_of_g2: Vec, } -impl From<&CommitterKey> for VerifierKey { +impl From<&CommitterKey> for VerifierKey { fn from(ck: &CommitterKey) -> VerifierKey { let max_eval_points = ck.max_eval_points(); let powers_of_g2 = ck.powers_of_g2[..max_eval_points + 1].to_vec(); @@ -41,7 +42,7 @@ impl From<&CommitterKey> for VerifierKey { } } -impl CommitterKey { +impl CommitterKey { /// The setup algorithm for the commitment scheme. /// /// Given a degree bound `max_degree`, @@ -50,17 +51,17 @@ impl CommitterKey { /// construct the committer key. pub fn new(max_degree: usize, max_eval_points: usize, rng: &mut impl RngCore) -> Self { // Compute the consecutive powers of an element. - let tau = E::Fr::rand(rng); + let tau = E::ScalarField::rand(rng); let powers_of_tau = powers(tau, max_degree + 1); - let g = E::G1Projective::rand(rng); + let g = E::G1::rand(rng); let window_size = FixedBase::get_mul_window_size(max_degree + 1); - let scalar_bits = E::Fr::MODULUS_BIT_SIZE as usize; + let scalar_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; let g_table = FixedBase::get_window_table(scalar_bits, window_size, g); let powers_of_g_proj = FixedBase::msm(scalar_bits, window_size, &g_table, &powers_of_tau); - let powers_of_g = E::G1Projective::batch_normalization_into_affine(&powers_of_g_proj); + let powers_of_g = E::G1::normalize_batch(&powers_of_g_proj); - let g2 = E::G2Projective::rand(rng).into_affine(); + let g2 = E::G2::rand(rng).into_affine(); let powers_of_g2 = powers_of_tau .iter() .take(max_eval_points + 1) @@ -80,20 +81,20 @@ impl CommitterKey { } /// Given a polynomial `polynomial` of degree less than `max_degree`, return a commitment to `polynomial`. - pub fn commit(&self, polynomial: &[E::Fr]) -> Commitment { + pub fn commit(&self, polynomial: &[E::ScalarField]) -> Commitment { Commitment(msm::(&self.powers_of_g, polynomial)) } /// Obtain a new preprocessed committer key defined by the indices `indices`. pub fn index_by(&self, indices: &[usize]) -> Self { - let mut indexed_powers_of_g = vec![E::G1Affine::zero(); self.powers_of_g.len()]; + let mut indexed_powers_of_g = vec![E::G1::zero(); self.powers_of_g.len()]; indices .iter() .zip(self.powers_of_g.iter()) .for_each(|(&i, &g)| indexed_powers_of_g[i] = indexed_powers_of_g[i] + g); Self { powers_of_g2: self.powers_of_g2.clone(), - powers_of_g: indexed_powers_of_g, + powers_of_g: E::G1::normalize_batch(indexed_powers_of_g.as_slice()), } } @@ -101,7 +102,7 @@ impl CommitterKey { pub fn batch_commit(&self, polynomials: J) -> Vec> where J: IntoIterator, - J::Item: Borrow>, + J::Item: Borrow>, { polynomials .into_iter() @@ -114,19 +115,21 @@ impl CommitterKey { /// together with an evaluation proof. pub fn open( &self, - polynomial: &[E::Fr], - evalualtion_point: &E::Fr, - ) -> (E::Fr, EvaluationProof) { + polynomial: &[E::ScalarField], + evalualtion_point: &E::ScalarField, + ) -> (E::ScalarField, EvaluationProof) { let mut quotient = Vec::new(); - let mut previous = E::Fr::zero(); + let mut previous = E::ScalarField::zero(); for &c in polynomial.iter().rev() { let coefficient = c + previous * evalualtion_point; quotient.insert(0, coefficient); previous = coefficient; } - let (&evaluation, quotient) = quotient.split_first().unwrap_or((&E::Fr::zero(), &[])); + let (&evaluation, quotient) = quotient + .split_first() + .unwrap_or((&E::ScalarField::zero(), &[])); let evaluation_proof = msm::(&self.powers_of_g, quotient); (evaluation, EvaluationProof(evaluation_proof)) } @@ -134,8 +137,8 @@ impl CommitterKey { /// Evaluate a single polynomial at a set of points `eval_points`, and provide a single evaluation proof. pub fn open_multi_points( &self, - polynomial: &[E::Fr], - eval_points: &[E::Fr], + polynomial: &[E::ScalarField], + eval_points: &[E::ScalarField], ) -> EvaluationProof { // Computing the vanishing polynomial over eval_points let z_poly = vanishing_polynomial(eval_points); @@ -149,14 +152,14 @@ impl CommitterKey { /// `eval_chal` is the random challenge for batching evaluation proofs across different polynomials. pub fn batch_open_multi_points( &self, - polynomials: &[&Vec], - eval_points: &[E::Fr], - eval_chal: &E::Fr, + polynomials: &[&Vec], + eval_points: &[E::ScalarField], + eval_chal: &E::ScalarField, ) -> EvaluationProof { assert!(eval_points.len() < self.powers_of_g2.len()); let etas = powers(*eval_chal, polynomials.len()); let batched_polynomial = - linear_combination(polynomials, &etas).unwrap_or_else(|| vec![E::Fr::zero()]); + linear_combination(polynomials, &etas).unwrap_or_else(|| vec![E::ScalarField::zero()]); self.open_multi_points(&batched_polynomial, eval_points) } } From efa8a53ab3867353f69a16a95c6cd98aacb6ce09 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Sat, 4 Feb 2023 00:00:54 +0100 Subject: [PATCH 02/12] building passing --- Cargo.toml | 6 +- src/challenge.rs | 2 +- src/constraints.rs | 2 +- src/data_structures.rs | 1 - src/ipa_pc/mod.rs | 40 +++++----- src/kzg10/data_structures.rs | 75 ++++++++++++------- src/kzg10/mod.rs | 31 ++++---- src/lib.rs | 4 +- src/marlin/marlin_pc/data_structures.rs | 3 +- src/marlin/marlin_pc/mod.rs | 10 ++- src/marlin/marlin_pst13_pc/data_structures.rs | 65 ++++++---------- src/marlin/marlin_pst13_pc/mod.rs | 55 +++++++------- src/marlin/mod.rs | 21 +++--- src/multilinear_pc/mod.rs | 44 ++++------- src/sonic_pc/data_structures.rs | 34 +++++---- src/sonic_pc/mod.rs | 27 ++++--- src/streaming_kzg/mod.rs | 2 +- src/streaming_kzg/space.rs | 9 ++- 18 files changed, 217 insertions(+), 214 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 4f184eba..d803afb6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -25,7 +25,7 @@ ark-serialize = { version = "^0.4.0", default-features = false, features = [ "de ark-ff = { version = "^0.4.0", default-features = false } ark-ec = { version = "^0.4.0", default-features = false } ark-poly = {version = "^0.4.0", default-features = false } -ark-sponge = {version = "^0.3.0", default-features = false} +ark-crypto-primitives = {version = "^0.4.0", features = ["sponge"] } ark-std = { version = "^0.4.0", default-features = false } ark-relations = { version = "^0.4.0", default-features = false, optional = true } @@ -59,7 +59,7 @@ debug = true [features] default = [ "std", "parallel" ] -std = [ "ark-ff/std", "ark-ec/std", "ark-poly/std", "ark-std/std", "ark-relations/std", "ark-serialize/std", "ark-sponge/std"] -r1cs = [ "ark-relations", "ark-r1cs-std", "hashbrown", "ark-sponge/r1cs"] +std = [ "ark-ff/std", "ark-ec/std", "ark-poly/std", "ark-std/std", "ark-relations/std", "ark-serialize/std", "ark-crypto-primitives/std"] +r1cs = [ "ark-relations", "ark-r1cs-std", "hashbrown", "ark-crypto-primitives/r1cs"] print-trace = [ "ark-std/print-trace" ] parallel = [ "std", "ark-ff/parallel", "ark-ec/parallel", "ark-poly/parallel", "ark-std/parallel", "rayon" ] diff --git a/src/challenge.rs b/src/challenge.rs index aa78d902..e88bdd2e 100644 --- a/src/challenge.rs +++ b/src/challenge.rs @@ -1,5 +1,5 @@ use ark_ff::PrimeField; -use ark_sponge::{CryptographicSponge, FieldElementSize}; +use ark_crypto_primitives::sponge::{CryptographicSponge, FieldElementSize}; /// `ChallengeGenerator` generates opening challenges using multivariate or univariate strategy. /// For multivariate strategy, each challenge is freshly squeezed from a sponge. diff --git a/src/constraints.rs b/src/constraints.rs index 729b833c..8abb14ee 100644 --- a/src/constraints.rs +++ b/src/constraints.rs @@ -2,12 +2,12 @@ use crate::{ data_structures::LabeledCommitment, BatchLCProof, LCTerm, LinearCombination, PolynomialCommitment, String, Vec, }; +use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ff::PrimeField; use ark_poly::Polynomial; use ark_r1cs_std::fields::nonnative::NonNativeFieldVar; use ark_r1cs_std::{fields::fp::FpVar, prelude::*}; use ark_relations::r1cs::{ConstraintSystemRef, Namespace, Result as R1CSResult, SynthesisError}; -use ark_sponge::CryptographicSponge; use ark_std::{borrow::Borrow, cmp::Eq, cmp::PartialEq, hash::Hash, marker::Sized}; use hashbrown::{HashMap, HashSet}; diff --git a/src/data_structures.rs b/src/data_structures.rs index b173d3c1..4802ec7b 100644 --- a/src/data_structures.rs +++ b/src/data_structures.rs @@ -2,7 +2,6 @@ use crate::{Polynomial, String, Vec}; use ark_ff::{Field, PrimeField, ToConstraintField}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::rand::RngCore; -use ark_std::sync::Arc; use ark_std::{ borrow::Borrow, marker::PhantomData, diff --git a/src/ipa_pc/mod.rs b/src/ipa_pc/mod.rs index 503bddf9..eff7143b 100644 --- a/src/ipa_pc/mod.rs +++ b/src/ipa_pc/mod.rs @@ -16,7 +16,7 @@ pub use data_structures::*; use rayon::prelude::*; use crate::challenge::ChallengeGenerator; -use ark_sponge::CryptographicSponge; +use ark_crypto_primitives::sponge::CryptographicSponge; use digest::Digest; /// A polynomial commitment scheme based on the hardness of the @@ -67,7 +67,7 @@ where .map(|s| s.into_bigint()) .collect::>(); - let mut comm = ::msm_bigint(comm_key, &scalars_bigint); + let mut comm = ::msm_bigint(comm_key, &scalars_bigint); if randomizer.is_some() { assert!(hiding_generator.is_some()); @@ -114,7 +114,7 @@ where // `log_d` is ceil(log2 (d + 1)), which is the number of steps to compute all of the challenges let log_d = ark_std::log2(d + 1) as usize; - let mut combined_commitment_proj = G::Projective::zero(); + let mut combined_commitment_proj = G::Group::zero(); let mut combined_v = G::ScalarField::zero(); let mut cur_challenge = opening_challenges.try_next_challenge_of_size(CHALLENGE_SIZE); @@ -196,7 +196,7 @@ where let v_prime = check_poly.evaluate(point) * &proof.c; let h_prime = h_prime.into_affine(); - let check_commitment_elem: G::Projective = Self::cm_commit( + let check_commitment_elem: G::Group = Self::cm_commit( &[proof.final_comm_key.clone(), h_prime], &[proof.c.clone(), v_prime], None, @@ -278,7 +278,7 @@ where lc_info: &[(String, Option)], elements: &[G::Group], ) -> Vec>> { - let comms = G::Projective::batch_normalization_into_affine(elements); + let comms = G::Group::normalize_batch(elements); let mut commitments = Vec::new(); let mut i = 0; @@ -327,11 +327,11 @@ where j += 1; } let generator = g.unwrap(); - generator.mul_by_cofactor_to_projective() + generator.mul_by_cofactor_to_group() }) .collect(); - G::Projective::batch_normalization_into_affine(&generators) + G::Group::normalize_batch(&generators) } } @@ -501,7 +501,7 @@ where { let mut combined_polynomial = P::zero(); let mut combined_rand = G::ScalarField::zero(); - let mut combined_commitment_proj = G::Projective::zero(); + let mut combined_commitment_proj = G::Group::zero(); let mut has_hiding = false; @@ -595,10 +595,8 @@ where Some(hiding_rand), ); - let mut batch = G::Projective::batch_normalization_into_affine(&[ - combined_commitment_proj, - hiding_commitment_proj, - ]); + let mut batch = + G::Group::normalize_batch(&[combined_commitment_proj, hiding_commitment_proj]); hiding_commitment = Some(batch.pop().unwrap()); combined_commitment = batch.pop().unwrap(); @@ -664,7 +662,7 @@ where let mut z = z.as_mut_slice(); // This will be used for transforming the key in each step - let mut key_proj: Vec = ck.comm_key.iter().map(|x| (*x).into()).collect(); + let mut key_proj: Vec = ck.comm_key.iter().map(|x| (*x).into()).collect(); let mut key_proj = key_proj.as_mut_slice(); let mut temp; @@ -689,7 +687,7 @@ where let r = Self::cm_commit(key_r, coeffs_l, None, None) + &h_prime.mul(Self::inner_product(coeffs_l, z_r)); - let lr = G::Projective::batch_normalization_into_affine(&[l, r]); + let lr = G::Group::normalize_batch(&[l, r]); l_vec.push(lr[0]); r_vec.push(lr[1]); @@ -719,7 +717,7 @@ where z = z_l; key_proj = key_proj_l; - temp = G::Projective::batch_normalization_into_affine(key_proj); + temp = G::Group::normalize_batch(key_proj); comm_key = &temp; n /= 2; @@ -815,7 +813,7 @@ where let mut randomizer = G::ScalarField::one(); let mut combined_check_poly = P::zero(); - let mut combined_final_key = G::Projective::zero(); + let mut combined_final_key = G::Group::zero(); for ((_point_label, (point, labels)), p) in query_to_labels_map.into_iter().zip(proof) { let lc_time = @@ -907,8 +905,8 @@ where let mut degree_bound = None; let mut hiding_bound = None; - let mut combined_comm = G::Projective::zero(); - let mut combined_shifted_comm: Option = None; + let mut combined_comm = G::Group::zero(); + let mut combined_shifted_comm: Option = None; let mut combined_rand = G::ScalarField::zero(); let mut combined_shifted_rand: Option = None; @@ -1011,8 +1009,8 @@ where let num_polys = lc.len(); let mut degree_bound = None; - let mut combined_comm = G::Projective::zero(); - let mut combined_shifted_comm: Option = None; + let mut combined_comm = G::Group::zero(); + let mut combined_shifted_comm: Option = None; for (coeff, label) in lc.iter() { if label.is_one() { @@ -1075,11 +1073,11 @@ mod tests { #![allow(non_camel_case_types)] use super::InnerProductArgPC; + use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; use ark_ec::AffineRepr; use ark_ed_on_bls12_381::{EdwardsAffine, Fr}; use ark_ff::PrimeField; use ark_poly::{univariate::DensePolynomial as DensePoly, DenseUVPolynomial}; - use ark_sponge::poseidon::PoseidonSponge; use blake2::Blake2s; use rand_chacha::ChaCha20Rng; diff --git a/src/kzg10/data_structures.rs b/src/kzg10/data_structures.rs index bf61b638..9b8e2fc8 100644 --- a/src/kzg10/data_structures.rs +++ b/src/kzg10/data_structures.rs @@ -1,5 +1,7 @@ use crate::*; use ark_ec::pairing::Pairing; +use ark_ec::AffineRepr; +use ark_ec::Group; use ark_ff::{PrimeField, ToConstraintField}; use ark_serialize::{ CanonicalDeserialize, CanonicalSerialize, Compress, SerializationError, Valid, @@ -39,9 +41,13 @@ pub struct UniversalParams { } impl Valid for UniversalParams { - fn check(&self) -> bool { - self.powers_of_g.len() == self.powers_of_gamma_g.len() - && self.powers_of_g.len() == self.neg_powers_of_h.len() + fn check(&self) -> Result<(), SerializationError> { + if !(self.powers_of_g.len() == self.powers_of_gamma_g.len() + && self.powers_of_g.len() == self.neg_powers_of_h.len()) + { + return Err(SerializationError::InvalidData); + } + Ok(()) } } impl PCUniversalParams for UniversalParams { @@ -53,7 +59,7 @@ impl PCUniversalParams for UniversalParams { impl CanonicalSerialize for UniversalParams { fn serialize_with_mode( &self, - writer: W, + mut writer: W, compress: Compress, ) -> Result<(), SerializationError> { self.powers_of_g @@ -74,11 +80,11 @@ impl CanonicalSerialize for UniversalParams { + self.neg_powers_of_h.serialized_size(compress) } - fn serialize_uncompressed(&self, mut writer: W) -> Result<(), SerializationError> { + fn serialize_uncompressed(&self, writer: W) -> Result<(), SerializationError> { Self::serialize_with_mode(&self, writer, Compress::No) } - fn serialize_compressed(&self, mut writer: W) -> Result<(), SerializationError> { + fn serialize_compressed(&self, writer: W) -> Result<(), SerializationError> { Self::serialize_with_mode(&self, writer, Compress::Yes) } @@ -93,7 +99,7 @@ impl CanonicalSerialize for UniversalParams { impl CanonicalDeserialize for UniversalParams { fn deserialize_with_mode( - reader: R, + mut reader: R, compress: Compress, validate: ark_serialize::Validate, ) -> Result { @@ -123,13 +129,17 @@ impl CanonicalDeserialize for UniversalParams { fn deserialize_uncompressed(mut reader: R) -> Result { Self::deserialize_with_mode(&mut reader, Compress::No, ark_serialize::Validate::Yes) } - fn deserialize_compressed(reader: R) -> Result { + fn deserialize_compressed(mut reader: R) -> Result { Self::deserialize_with_mode(&mut reader, Compress::Yes, ark_serialize::Validate::Yes) } - fn deserialize_compressed_unchecked(reader: R) -> Result { + fn deserialize_compressed_unchecked( + mut reader: R, + ) -> Result { Self::deserialize_with_mode(&mut reader, Compress::Yes, ark_serialize::Validate::No) } - fn deserialize_uncompressed_unchecked(reader: R) -> Result { + fn deserialize_uncompressed_unchecked( + mut reader: R, + ) -> Result { Self::deserialize_with_mode(&mut reader, Compress::No, ark_serialize::Validate::No) } } @@ -158,18 +168,23 @@ impl Powers<'_, E> { } } impl<'a, E: Pairing> Valid for Powers<'a, E> { - fn check(&self) -> bool { - self.powers_of_g.len() == self.powers_of_gamma_g.len() + fn check(&self) -> Result<(), SerializationError> { + if self.powers_of_g.len() != self.powers_of_gamma_g.len() { + return Err(SerializationError::InvalidData); + } + Ok(()) } } impl<'a, E: Pairing> CanonicalSerialize for Powers<'a, E> { fn serialize_with_mode( &self, - writer: W, + mut writer: W, compress: Compress, ) -> Result<(), SerializationError> { - self.powers_of_g.serialize_with_mode(&mut writer)?; - self.powers_of_gamma_g.serialize_with_mode(&mut writer) + self.powers_of_g + .serialize_with_mode(&mut writer, compress)?; + self.powers_of_gamma_g + .serialize_with_mode(&mut writer, compress) } fn serialized_size(&self, compress: Compress) -> usize { @@ -193,7 +208,7 @@ impl<'a, E: Pairing> CanonicalSerialize for Powers<'a, E> { impl<'a, E: Pairing> CanonicalDeserialize for Powers<'a, E> { fn deserialize_with_mode( - reader: R, + mut reader: R, compress: Compress, validate: ark_serialize::Validate, ) -> Result { @@ -207,16 +222,20 @@ impl<'a, E: Pairing> CanonicalDeserialize for Powers<'a, E> { }) } - fn deserialize_compressed(reader: R) -> Result { + fn deserialize_compressed(mut reader: R) -> Result { Self::deserialize_with_mode(&mut reader, Compress::Yes, ark_serialize::Validate::Yes) } - fn deserialize_compressed_unchecked(reader: R) -> Result { + fn deserialize_compressed_unchecked( + mut reader: R, + ) -> Result { Self::deserialize_with_mode(&mut reader, Compress::Yes, ark_serialize::Validate::No) } - fn deserialize_uncompressed(reader: R) -> Result { + fn deserialize_uncompressed(mut reader: R) -> Result { Self::deserialize_with_mode(&mut reader, Compress::No, ark_serialize::Validate::Yes) } - fn deserialize_uncompressed_unchecked(reader: R) -> Result { + fn deserialize_uncompressed_unchecked( + mut reader: R, + ) -> Result { Self::deserialize_with_mode(&mut reader, Compress::No, ark_serialize::Validate::No) } } @@ -255,7 +274,7 @@ impl Valid for VerifierKey { impl CanonicalSerialize for VerifierKey { fn serialize_with_mode( &self, - writer: W, + mut writer: W, compress: Compress, ) -> Result<(), SerializationError> { self.g.serialize_with_mode(&mut writer, compress)?; @@ -286,14 +305,14 @@ impl CanonicalSerialize for VerifierKey { impl CanonicalDeserialize for VerifierKey { fn deserialize_with_mode( - reader: R, + mut reader: R, compress: Compress, validate: ark_serialize::Validate, ) -> Result { let g = E::G1Affine::deserialize_with_mode(&mut reader, compress, validate)?; - let gamma_g = E::G1Affine::deserialize(&mut reader, compress, validate)?; - let h = E::G2Affine::deserialize(&mut reader, compress, validate)?; - let beta_h = E::G2Affine::deserialize(&mut reader, compress, validate)?; + let gamma_g = E::G1Affine::deserialize_with_mode(&mut reader, compress, validate)?; + let h = E::G2Affine::deserialize_with_mode(&mut reader, compress, validate)?; + let beta_h = E::G2Affine::deserialize_with_mode(&mut reader, compress, validate)?; let prepared_h = E::G2Prepared::from(h.clone()); let prepared_beta_h = E::G2Prepared::from(beta_h.clone()); @@ -357,7 +376,7 @@ impl PreparedVerifierKey { let supported_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; let mut prepared_g = Vec::::new(); - let mut g = E::G1::ScalarFieldom(vk.g.clone()); + let mut g = E::G1::from(vk.g.clone()); for _ in 0..supported_bits { prepared_g.push(g.clone().into()); g.double_in_place(); @@ -411,7 +430,7 @@ impl<'a, E: Pairing> AddAssign<(E::ScalarField, &'a Commitment)> for Commitme #[inline] fn add_assign(&mut self, (f, other): (E::ScalarField, &'a Commitment)) { let mut other = other.0 * f; - other.add_assign_mixed(&self.0); + other.add_assign(&self.0); self.0 = other.into(); } } @@ -435,7 +454,7 @@ impl PreparedCommitment { /// prepare `PreparedCommitment` from `Commitment` pub fn prepare(comm: &Commitment) -> Self { let mut prepared_comm = Vec::::new(); - let mut cur = E::G1::ScalarFieldom(comm.0.clone()); + let mut cur = E::G1::from(comm.0.clone()); let supported_bits = E::ScalarField::MODULUS_BIT_SIZE as usize; diff --git a/src/kzg10/mod.rs b/src/kzg10/mod.rs index 0aea289f..89fd384f 100644 --- a/src/kzg10/mod.rs +++ b/src/kzg10/mod.rs @@ -6,11 +6,13 @@ //! This construction achieves extractability in the algebraic group model (AGM). use crate::{BTreeMap, Error, LabeledPolynomial, PCRandomness, ToString, Vec}; +use ark_ec::AffineRepr; use ark_ec::{pairing::Pairing, CurveGroup}; use ark_ec::{scalar_mul::fixed_base::FixedBase, VariableBaseMSM}; use ark_ff::{One, PrimeField, UniformRand, Zero}; use ark_poly::DenseUVPolynomial; use ark_std::{format, marker::PhantomData, ops::Div, ops::Mul, vec}; +use std::ops::AddAssign; use ark_std::rand::RngCore; #[cfg(feature = "parallel")] @@ -48,7 +50,7 @@ where let beta = E::ScalarField::rand(rng); let g = E::G1::rand(rng); let gamma_g = E::G1::rand(rng); - let h = E::G2Projective::rand(rng); + let h = E::G2::rand(rng); let mut powers_of_beta = vec![E::ScalarField::one()]; @@ -75,8 +77,8 @@ where powers_of_gamma_g.push(powers_of_gamma_g.last().unwrap().mul(&beta)); end_timer!(gamma_g_time); - let powers_of_g = E::G1::batch_normalization_into_affine(&powers_of_g); - let powers_of_gamma_g = E::G1::batch_normalization_into_affine(&powers_of_gamma_g) + let powers_of_g = E::G1::normalize_batch(&powers_of_g); + let powers_of_gamma_g = E::G1::normalize_batch(&powers_of_gamma_g) .into_iter() .enumerate() .collect(); @@ -91,14 +93,14 @@ where } let neg_h_table = FixedBase::get_window_table(scalar_bits, window_size, h); - let neg_powers_of_h = FixedBase::msm::( + let neg_powers_of_h = FixedBase::msm::( scalar_bits, window_size, &neg_h_table, &neg_powers_of_beta, ); - let affines = E::G2Projective::batch_normalization_into_affine(&neg_powers_of_h); + let affines = E::G2::normalize_batch(&neg_powers_of_h); let mut affines_map = BTreeMap::new(); affines.into_iter().enumerate().for_each(|(i, a)| { affines_map.insert(i, a); @@ -178,7 +180,7 @@ where .into_affine(); end_timer!(msm_time); - commitment.add_assign_mixed(&random_commitment); + commitment.add_assign(&random_commitment); end_timer!(commit_time); Ok((Commitment(commitment.into()), randomness)) @@ -293,13 +295,13 @@ where proof: &Proof, ) -> Result { let check_time = start_timer!(|| "Checking evaluation"); - let mut inner = comm.0.into_projective() - &vk.g.mul(value); + let mut inner = comm.0.into_group() - &vk.g.mul(value); if let Some(random_v) = proof.random_v { inner -= &vk.gamma_g.mul(random_v); } let lhs = E::pairing(inner, vk.h); - let inner = vk.beta_h.into_projective() - &vk.h.mul(point); + let inner = vk.beta_h.into_group() - &vk.h.mul(point); let rhs = E::pairing(proof.w, inner); end_timer!(check_time, || format!("Result: {}", lhs == rhs)); @@ -331,7 +333,7 @@ where for (((c, z), v), proof) in commitments.iter().zip(points).zip(values).zip(proofs) { let w = proof.w; let mut temp = w.mul(*z); - temp.add_assign_mixed(&c.0); + temp.add_assign(&c.0); let c = temp; g_multiplier += &(randomizer * v); if let Some(random_v) = proof.random_v { @@ -348,15 +350,16 @@ where end_timer!(combination_time); let to_affine_time = start_timer!(|| "Converting results to affine for pairing"); - let affine_points = E::G1::batch_normalization_into_affine(&[-total_w, total_c]); + let affine_points = E::G1::normalize_batch(&[-total_w, total_c]); let (total_w, total_c) = (affine_points[0], affine_points[1]); end_timer!(to_affine_time); let pairing_time = start_timer!(|| "Performing product of pairings"); - let result = E::product_of_pairings(&[ - (total_w.into(), vk.prepared_beta_h.clone()), - (total_c.into(), vk.prepared_h.clone()), - ]) + let result = E::multi_pairing( + [total_w, total_c], + [vk.prepared_beta_h.clone(), vk.prepared_h.clone()], + ) + .0 .is_one(); end_timer!(pairing_time); end_timer!(check_time, || format!("Result: {}", result)); diff --git a/src/lib.rs b/src/lib.rs index bc0a7fb1..2138d492 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -108,7 +108,7 @@ pub mod challenge; pub mod multilinear_pc; use crate::challenge::ChallengeGenerator; -use ark_sponge::{CryptographicSponge, FieldElementSize}; +use ark_crypto_primitives::sponge::{CryptographicSponge, FieldElementSize}; /// Multivariate polynomial commitment based on the construction in /// [[PST13]][pst] with batching and (optional) hiding property inspired /// by the univariate scheme in [[CHMMVW20, "Marlin"]][marlin] @@ -533,8 +533,8 @@ fn lc_query_set_to_poly_query_set<'a, F: Field, T: Clone + Ord>( #[cfg(test)] pub mod tests { use crate::*; + use ark_crypto_primitives::sponge::poseidon::{PoseidonConfig, PoseidonSponge}; use ark_poly::Polynomial; - use ark_sponge::poseidon::{PoseidonConfig, PoseidonSponge}; use ark_std::rand::{ distributions::{Distribution, Uniform}, Rng, SeedableRng, diff --git a/src/marlin/marlin_pc/data_structures.rs b/src/marlin/marlin_pc/data_structures.rs index 98fce1ac..af75bdc9 100644 --- a/src/marlin/marlin_pc/data_structures.rs +++ b/src/marlin/marlin_pc/data_structures.rs @@ -3,6 +3,7 @@ use crate::{ PCRandomness, PCVerifierKey, Vec, }; use ark_ec::pairing::Pairing; +use ark_ec::Group; use ark_ff::{Field, PrimeField, ToConstraintField}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::ops::{Add, AddAssign}; @@ -188,7 +189,7 @@ impl PCPreparedVerifierKey> for PreparedVerifierKey::new(); - let mut cur = E::G1::ScalarFieldom(shift_power.clone()); + let mut cur = E::G1::from(shift_power.clone()); for _ in 0..supported_bits { prepared_shift_power.push(cur.clone().into()); cur.double_in_place(); diff --git a/src/marlin/marlin_pc/mod.rs b/src/marlin/marlin_pc/mod.rs index 61ee78fa..3342e206 100644 --- a/src/marlin/marlin_pc/mod.rs +++ b/src/marlin/marlin_pc/mod.rs @@ -4,6 +4,8 @@ use crate::{BatchLCProof, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; use crate::{PCRandomness, PCUniversalParams, PolynomialCommitment}; use ark_ec::pairing::Pairing; +use ark_ec::AffineRepr; +use ark_ec::CurveGroup; use ark_ff::Zero; use ark_poly::DenseUVPolynomial; use ark_std::rand::RngCore; @@ -11,7 +13,7 @@ use ark_std::{marker::PhantomData, ops::Div, vec}; mod data_structures; use crate::challenge::ChallengeGenerator; -use ark_sponge::CryptographicSponge; +use ark_crypto_primitives::sponge::CryptographicSponge; pub use data_structures::*; /// Polynomial commitment based on [[KZG10]][kzg], with degree enforcement, batching, @@ -312,7 +314,7 @@ where } let proof_time = start_timer!(|| "Creating proof for unshifted polynomials"); let proof = kzg10::KZG10::open(&ck.powers(), &p, *point, &r)?; - let mut w = proof.w.into_projective(); + let mut w = proof.w.into_group(); let mut random_v = proof.random_v; end_timer!(proof_time); @@ -327,7 +329,7 @@ where )?; end_timer!(proof_time); - w += &shifted_proof.w.into_projective(); + w += &shifted_proof.w.into_group(); if let Some(shifted_random_v) = shifted_proof.random_v { random_v = random_v.map(|v| v + &shifted_random_v); } @@ -538,10 +540,10 @@ mod tests { use super::MarlinKZG10; use ark_bls12_377::Bls12_377; use ark_bls12_381::Bls12_381; + use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; use ark_ec::pairing::Pairing; use ark_ff::UniformRand; use ark_poly::{univariate::DensePolynomial as DensePoly, DenseUVPolynomial}; - use ark_sponge::poseidon::PoseidonSponge; use rand_chacha::ChaCha20Rng; type UniPoly_381 = DensePoly<::Fr>; diff --git a/src/marlin/marlin_pst13_pc/data_structures.rs b/src/marlin/marlin_pst13_pc/data_structures.rs index a5923c56..a3e7f598 100644 --- a/src/marlin/marlin_pst13_pc/data_structures.rs +++ b/src/marlin/marlin_pst13_pc/data_structures.rs @@ -11,7 +11,7 @@ use ark_std::{ }; use ark_serialize::{ - CanonicalDeserialize, Valid, CanonicalSerialize, Compress, SerializationError, Validate, + CanonicalDeserialize, CanonicalSerialize, Compress, SerializationError, Valid, Validate, }; use ark_std::rand::RngCore; @@ -57,15 +57,15 @@ where { fn check(&self) -> Result<(), SerializationError> { if self.powers_of_g.len() != (self.max_degree + 1) * self.num_vars { - return false; + return Err(SerializationError::InvalidData); } if self.beta_h.len() != self.num_vars { - return false; + return Err(SerializationError::InvalidData); } if self.prepared_beta_h.len() != self.num_vars { - return false; + return Err(SerializationError::InvalidData); } Ok(()) } @@ -79,17 +79,18 @@ where { fn serialize_with_mode( &self, - writer: W, + mut writer: W, compress: Compress, ) -> Result<(), SerializationError> { self.powers_of_g .serialize_with_mode(&mut writer, compress)?; - self.gamma_g.serialize(&mut writer, compress)?; - self.powers_of_gamma_g.serialize(&mut writer, compress)?; - self.h.serialize(&mut writer, compress)?; - self.beta_h.serialize(&mut writer, compress)?; - self.num_vars.serialize(&mut writer, compress)?; - self.max_degree.serialize(&mut writer, compress) + self.gamma_g.serialize_with_mode(&mut writer, compress)?; + self.powers_of_gamma_g + .serialize_with_mode(&mut writer, compress)?; + self.h.serialize_with_mode(&mut writer, compress)?; + self.beta_h.serialize_with_mode(&mut writer, compress)?; + self.num_vars.serialize_with_mode(&mut writer, compress)?; + self.max_degree.serialize_with_mode(&mut writer, compress) } fn serialized_size(&self, compress: Compress) -> usize { @@ -103,32 +104,14 @@ where } fn serialize_uncompressed(&self, mut writer: W) -> Result<(), SerializationError> { - self.powers_of_g.serialize_uncompressed(&mut writer)?; - self.gamma_g.serialize_uncompressed(&mut writer)?; - self.powers_of_gamma_g.serialize_uncompressed(&mut writer)?; - self.h.serialize_uncompressed(&mut writer)?; - self.beta_h.serialize_uncompressed(&mut writer)?; - self.num_vars.serialize_uncompressed(&mut writer)?; - self.max_degree.serialize_uncompressed(&mut writer) + Self::serialize_with_mode(self, &mut writer, Compress::No) } fn uncompressed_size(&self) -> usize { - self.powers_of_g.uncompressed_size() - + self.gamma_g.uncompressed_size() - + self.powers_of_gamma_g.uncompressed_size() - + self.h.uncompressed_size() - + self.beta_h.uncompressed_size() - + self.num_vars.uncompressed_size() - + self.max_degree.uncompressed_size() + Self::serialized_size(self, Compress::No) } fn compressed_size(&self) -> usize { - self.powers_of_g.compressed_size() - + self.gamma_g.compressed_size() - + self.powers_of_gamma_g.compressed_size() - + self.h.compressed_size() - + self.beta_h.compressed_size() - + self.num_vars.compressed_size() - + self.max_degree.compressed_size() + Self::serialized_size(self, Compress::Yes) } } @@ -139,7 +122,7 @@ where P::Point: Index, { fn deserialize_with_mode( - reader: R, + mut reader: R, compress: Compress, validate: Validate, ) -> Result { @@ -170,18 +153,16 @@ where }) } - fn deserialize_uncompressed(mut reader: R) -> Result { + fn deserialize_uncompressed(reader: R) -> Result { Self::deserialize_with_mode(reader, Compress::No, Validate::Yes) } fn deserialize_uncompressed_unchecked(reader: R) -> Result { Self::deserialize_with_mode(reader, Compress::No, Validate::No) } - fn deserialize_compressed_unchecked( - mut reader: R, - ) -> Result { + fn deserialize_compressed_unchecked(reader: R) -> Result { Self::deserialize_with_mode(reader, Compress::Yes, Validate::No) } - fn deserialize_compressed(mut reader: R) -> Result { + fn deserialize_compressed(reader: R) -> Result { Self::deserialize_with_mode( reader, ark_serialize::Compress::Yes, @@ -295,7 +276,7 @@ impl Valid for VerifierKey { impl CanonicalSerialize for VerifierKey { fn serialize_with_mode( &self, - writer: W, + mut writer: W, compress: Compress, ) -> Result<(), SerializationError> { self.g.serialize_with_mode(&mut writer, compress)?; @@ -318,10 +299,10 @@ impl CanonicalSerialize for VerifierKey { + self.max_degree.serialized_size(compress) } - fn serialize_uncompressed(&self, mut writer: W) -> Result<(), SerializationError> { + fn serialize_uncompressed(&self, writer: W) -> Result<(), SerializationError> { Self::serialize_with_mode(&self, writer, Compress::No) } - fn serialize_compressed(&self, mut writer: W) -> Result<(), SerializationError> { + fn serialize_compressed(&self, writer: W) -> Result<(), SerializationError> { Self::serialize_with_mode(&self, writer, Compress::Yes) } @@ -532,4 +513,4 @@ pub struct Proof { /// Evaluation of the random polynomial at the point for which /// the evaluation proof was produced. pub random_v: Option, -} \ No newline at end of file +} diff --git a/src/marlin/marlin_pst13_pc/mod.rs b/src/marlin/marlin_pst13_pc/mod.rs index f9cb63b9..ce37e0c8 100644 --- a/src/marlin/marlin_pst13_pc/mod.rs +++ b/src/marlin/marlin_pst13_pc/mod.rs @@ -7,11 +7,13 @@ use crate::{BatchLCProof, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; use crate::{PCRandomness, PCUniversalParams, PolynomialCommitment}; use crate::{ToString, Vec}; +use ark_ec::AffineRepr; use ark_ec::{pairing::Pairing, scalar_mul::fixed_base::FixedBase, CurveGroup, VariableBaseMSM}; use ark_ff::{One, PrimeField, UniformRand, Zero}; use ark_poly::{multivariate::Term, DenseMVPolynomial}; use ark_std::rand::RngCore; use ark_std::{marker::PhantomData, ops::Index, ops::Mul, vec}; +use std::ops::AddAssign; mod data_structures; pub use data_structures::*; @@ -20,7 +22,7 @@ mod combinations; use combinations::*; use crate::challenge::ChallengeGenerator; -use ark_sponge::CryptographicSponge; +use ark_crypto_primitives::sponge::CryptographicSponge; #[cfg(feature = "parallel")] use rayon::prelude::*; @@ -183,7 +185,7 @@ where // Generators let g = E::G1::rand(rng); let gamma_g = E::G1::rand(rng); - let h = E::G2Projective::rand(rng); + let h = E::G2::rand(rng); // A list of all variable numbers of multiplicity `max_degree` let variable_set: Vec<_> = (0..num_vars) @@ -247,11 +249,11 @@ where }); end_timer!(gamma_g_time); - let powers_of_g = E::G1::batch_normalization_into_affine(&powers_of_g); + let powers_of_g = E::G1::normalize_batch(&powers_of_g); let gamma_g = gamma_g.into_affine(); let powers_of_gamma_g = powers_of_gamma_g .into_iter() - .map(|v| E::G1::batch_normalization_into_affine(&v)) + .map(|v| E::G1::normalize_batch(&v)) .collect(); let beta_h: Vec<_> = betas.iter().map(|b| h.mul(b).into_affine()).collect(); let h = h.into_affine(); @@ -420,7 +422,7 @@ where end_timer!(msm_time); // Mask commitment with random poly - commitment.add_assign_mixed(&random_commitment); + commitment.add_assign(&random_commitment); let comm = Self::Commitment { comm: kzg10::Commitment(commitment.into()), @@ -555,22 +557,23 @@ where None, )?; // Compute both sides of the pairing equation - let mut inner = combined_comm.into().into_projective() - &vk.g.mul(combined_value); + let mut inner = combined_comm.into().into_group() - &vk.g.mul(combined_value); if let Some(random_v) = proof.random_v { inner -= &vk.gamma_g.mul(random_v); } let lhs = E::pairing(inner, vk.h); // Create a list of elements corresponding to each pairing in the product on the rhs - let rhs_product: Vec<(E::G1Prepared, E::G2Prepared)> = ark_std::cfg_iter!(proof.w) - .enumerate() - .map(|(j, w_j)| { - let beta_minus_z: E::G2Affine = - (vk.beta_h[j].into_projective() - &vk.h.mul(point[j])).into(); - ((*w_j).into(), beta_minus_z.into()) - }) - .collect(); - let rhs = E::product_of_pairings(&rhs_product); + let (rhs_product_g1, rhs_product_g2): (Vec, Vec) = + ark_std::cfg_iter!(proof.w) + .enumerate() + .map(|(j, w_j)| { + let beta_minus_z: E::G2Affine = + (vk.beta_h[j].into_group() - &vk.h.mul(point[j])).into(); + ((*w_j).into(), beta_minus_z.into()) + }) + .unzip(); + let rhs = E::multi_pairing(rhs_product_g1, rhs_product_g2); end_timer!(check_time); Ok(lhs == rhs) @@ -598,8 +601,8 @@ where )?; let check_time = start_timer!(|| format!("Checking {} evaluation proofs", combined_comms.len())); - let g = vk.g.into_projective(); - let gamma_g = vk.gamma_g.into_projective(); + let g = vk.g.into_group(); + let gamma_g = vk.gamma_g.into_group(); let mut total_c = ::zero(); let mut total_w = vec![::zero(); vk.num_vars]; let combination_time = start_timer!(|| "Combining commitments and proofs"); @@ -619,7 +622,7 @@ where .enumerate() .map(|(j, w_j)| w_j.mul(z[j])) .sum(); - temp.add_assign_mixed(&c.0); + temp.add_assign(&c.0); let c = temp; g_multiplier += &(randomizer * &v); if let Some(random_v) = proof.random_v { @@ -638,15 +641,17 @@ where end_timer!(combination_time); let to_affine_time = start_timer!(|| "Converting results to affine for pairing"); - let mut pairings = Vec::new(); - total_w.into_iter().enumerate().for_each(|(j, w_j)| { - pairings.push(((-w_j).into_affine().into(), vk.prepared_beta_h[j].clone())) - }); - pairings.push((total_c.into_affine().into(), vk.prepared_h.clone())); + let (mut p1, mut p2): (Vec, Vec) = total_w + .into_iter() + .enumerate() + .map(|(j, w_j)| ((-w_j).into_affine().into(), vk.prepared_beta_h[j].clone())) + .unzip(); + p1.push(total_c.into_affine().into()); + p2.push(vk.prepared_h.clone()); end_timer!(to_affine_time); let pairing_time = start_timer!(|| "Performing product of pairings"); - let result = E::product_of_pairings(&pairings).is_one(); + let result = E::multi_pairing(p1, p2).0.is_one(); end_timer!(pairing_time); end_timer!(check_time); Ok(result) @@ -713,13 +718,13 @@ mod tests { use super::MarlinPST13; use ark_bls12_377::Bls12_377; use ark_bls12_381::Bls12_381; + use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; use ark_ec::pairing::Pairing; use ark_ff::UniformRand; use ark_poly::{ multivariate::{SparsePolynomial as SparsePoly, SparseTerm}, DenseMVPolynomial, }; - use ark_sponge::poseidon::PoseidonSponge; use rand_chacha::ChaCha20Rng; type MVPoly_381 = SparsePoly<::Fr, SparseTerm>; diff --git a/src/marlin/mod.rs b/src/marlin/mod.rs index d5e0cf04..57446c03 100644 --- a/src/marlin/mod.rs +++ b/src/marlin/mod.rs @@ -4,9 +4,11 @@ use crate::{BTreeMap, BTreeSet, Debug, RngCore, String, ToString, Vec}; use crate::{BatchLCProof, LabeledPolynomial, LinearCombination}; use crate::{Evaluations, LabeledCommitment, QuerySet}; use crate::{PCRandomness, Polynomial, PolynomialCommitment}; +use ark_crypto_primitives::sponge::CryptographicSponge; use ark_ec::pairing::Pairing; +use ark_ec::AffineRepr; +use ark_ec::CurveGroup; use ark_ff::{One, Zero}; -use ark_sponge::CryptographicSponge; use ark_std::{convert::TryInto, hash::Hash, ops::AddAssign, ops::Mul}; /// Polynomial commitment scheme from [[KZG10]][kzg] that enforces @@ -54,7 +56,7 @@ where let mut combined_shifted_comm = None; for (coeff, comm) in coeffs_and_comms { if coeff.is_one() { - combined_comm.add_assign_mixed(&comm.comm.0); + combined_comm.add_assign(&comm.comm.0); } else { combined_comm += &comm.comm.0.mul(coeff); } @@ -84,8 +86,8 @@ where s_flags.push(false); } } - let comms = E::G1::batch_normalization_into_affine(&comms); - let s_comms = E::G1::batch_normalization_into_affine(&mut s_comms); + let comms = E::G1::normalize_batch(&comms); + let s_comms = E::G1::normalize_batch(&mut s_comms); comms .into_iter() .zip(s_comms) @@ -127,12 +129,7 @@ where if let Some(degree_bound) = degree_bound { let challenge_i_1 = challenge_gen.try_next_challenge_of_size(CHALLENGE_SIZE); - let shifted_comm = commitment - .shifted_comm - .as_ref() - .unwrap() - .0 - .into_projective(); + let shifted_comm = commitment.shifted_comm.as_ref().unwrap().0.into_group(); let shift_power = vk .unwrap() @@ -212,8 +209,8 @@ where combined_evals.push(v); } let norm_time = start_timer!(|| "Normalizing combined commitments"); - E::G1::batch_normalization(&mut combined_comms); - let combined_comms = combined_comms + let combined_comms_affine = E::G1::normalize_batch(&combined_comms); + let combined_comms = combined_comms_affine .into_iter() .map(|c| kzg10::Commitment(c.into())) .collect::>(); diff --git a/src/multilinear_pc/mod.rs b/src/multilinear_pc/mod.rs index fe07626c..1e92035f 100644 --- a/src/multilinear_pc/mod.rs +++ b/src/multilinear_pc/mod.rs @@ -1,8 +1,9 @@ use crate::multilinear_pc::data_structures::{ Commitment, CommitterKey, Proof, UniversalParams, VerifierKey, }; -use ark_ec::{scalar_mul::fixed_base::FixedBase, VariableBaseMSM}; +use ark_ec::AffineRepr; use ark_ec::{pairing::Pairing, CurveGroup}; +use ark_ec::{scalar_mul::fixed_base::FixedBase, VariableBaseMSM}; use ark_ff::{Field, PrimeField}; use ark_ff::{One, Zero}; use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; @@ -27,7 +28,7 @@ impl MultilinearPC { pub fn setup(num_vars: usize, rng: &mut R) -> UniversalParams { assert!(num_vars > 0, "constant polynomial not supported"); let g: E::G1 = E::G1::rand(rng); - let h: E::G2Projective = E::G2Projective::rand(rng); + let h: E::G2 = E::G2::rand(rng); let g = g.into_affine(); let h = h.into_affine(); let mut powers_of_g = Vec::new(); @@ -61,16 +62,16 @@ impl MultilinearPC { total_scalars += 1 << (num_vars - i); } let window_size = FixedBase::get_mul_window_size(total_scalars); - let g_table = FixedBase::get_window_table(scalar_bits, window_size, g.into_projective()); - let h_table = FixedBase::get_window_table(scalar_bits, window_size, h.into_projective()); + let g_table = FixedBase::get_window_table(scalar_bits, window_size, g.into_group()); + let h_table = FixedBase::get_window_table(scalar_bits, window_size, h.into_group()); - let pp_g = E::G1::batch_normalization_into_affine(&FixedBase::msm( + let pp_g = E::G1::normalize_batch(&FixedBase::msm( scalar_bits, window_size, &g_table, &pp_powers, )); - let pp_h = E::G2Projective::batch_normalization_into_affine(&FixedBase::msm( + let pp_h = E::G2::normalize_batch(&FixedBase::msm( scalar_bits, window_size, &h_table, @@ -90,14 +91,8 @@ impl MultilinearPC { // let vp_generation_timer = start_timer!(|| "VP generation"); let g_mask = { let window_size = FixedBase::get_mul_window_size(num_vars); - let g_table = - FixedBase::get_window_table(scalar_bits, window_size, g.into_projective()); - E::G1::batch_normalization_into_affine(&FixedBase::msm( - scalar_bits, - window_size, - &g_table, - &t, - )) + let g_table = FixedBase::get_window_table(scalar_bits, window_size, g.into_group()); + E::G1::normalize_batch(&FixedBase::msm(scalar_bits, window_size, &g_table, &t)) }; // end_timer!(vp_generation_timer); @@ -186,8 +181,7 @@ impl MultilinearPC { .collect(); let pi_h = - ::msm_bigint(&ck.powers_of_h[i], &scalars) - .into_affine(); // no need to move outside and partition + ::msm_bigint(&ck.powers_of_h[i], &scalars).into_affine(); // no need to move outside and partition proofs.push(pi_h); } @@ -203,22 +197,18 @@ impl MultilinearPC { value: E::ScalarField, proof: &Proof, ) -> bool { - let left = E::pairing( - commitment.g_product.into_projective() - &vk.g.mul(value), - vk.h, - ); + let left = E::pairing(commitment.g_product.into_group() - &vk.g.mul(value), vk.h); let scalar_size = E::ScalarField::MODULUS_BIT_SIZE as usize; let window_size = FixedBase::get_mul_window_size(vk.nv); - let g_table = FixedBase::get_window_table(scalar_size, window_size, vk.g.into_projective()); + let g_table = FixedBase::get_window_table(scalar_size, window_size, vk.g.into_group()); let g_mul: Vec = FixedBase::msm(scalar_size, window_size, &g_table, point); let pairing_lefts: Vec<_> = (0..vk.nv) - .map(|i| vk.g_mask_random[i].into_projective() - &g_mul[i]) + .map(|i| vk.g_mask_random[i].into_group() - &g_mul[i]) .collect(); - let pairing_lefts: Vec = - E::G1::batch_normalization_into_affine(&pairing_lefts); + let pairing_lefts: Vec = E::G1::normalize_batch(&pairing_lefts); let pairing_lefts: Vec = pairing_lefts .into_iter() .map(|x| E::G1Prepared::from(x)) @@ -230,11 +220,7 @@ impl MultilinearPC { .map(|x| E::G2Prepared::from(*x)) .collect(); - let pairings: Vec<_> = pairing_lefts - .into_iter() - .zip(pairing_rights.into_iter()) - .collect(); - let right = E::product_of_pairings(pairings.iter()); + let right = E::multi_pairing(pairing_lefts, pairing_rights); left == right } } diff --git a/src/sonic_pc/data_structures.rs b/src/sonic_pc/data_structures.rs index a6cc60a6..430073d6 100644 --- a/src/sonic_pc/data_structures.rs +++ b/src/sonic_pc/data_structures.rs @@ -3,8 +3,9 @@ use crate::{ BTreeMap, PCCommitterKey, PCPreparedCommitment, PCPreparedVerifierKey, PCVerifierKey, Vec, }; use ark_ec::pairing::Pairing; +use ark_ec::Group; use ark_serialize::{ - CanonicalDeserialize, Valid,CanonicalSerialize, Compress, SerializationError, Validate, + CanonicalDeserialize, CanonicalSerialize, Compress, SerializationError, Valid, Validate, }; use ark_std::io::{Read, Write}; @@ -24,7 +25,7 @@ impl PCPreparedCommitment> for PreparedCommitment { /// prepare `PreparedCommitment` from `Commitment` fn prepare(comm: &Commitment) -> Self { let mut prepared_comm = Vec::::new(); - let mut cur = E::G1::ScalarFieldom(comm.0.clone()); + let mut cur = E::G1::from(comm.0.clone()); for _ in 0..128 { prepared_comm.push(cur.clone().into()); cur.double_in_place(); @@ -172,16 +173,19 @@ impl VerifierKey { } impl Valid for VerifierKey { - fn check(&self) -> bool { + fn check(&self) -> Result<(), SerializationError> { // TODO probably need to do more checks here - self.supported_degree <= self.max_degree + if self.supported_degree > self.max_degree { + return Err(SerializationError::InvalidData); + } + Ok(()) } } impl CanonicalSerialize for VerifierKey { fn serialize_with_mode( &self, - writer: W, + mut writer: W, compress: Compress, ) -> Result<(), SerializationError> { self.g.serialize_with_mode(&mut writer, compress)?; @@ -223,18 +227,22 @@ impl CanonicalSerialize for VerifierKey { impl CanonicalDeserialize for VerifierKey { fn deserialize_with_mode( - reader: R, + mut reader: R, compress: Compress, validate: Validate, ) -> Result { - let g = E::G1Affine::deserialize(&mut reader, compress, validate)?; - let gamma_g = E::G1Affine::deserialize(&mut reader, compress, validate)?; - let h = E::G2Affine::deserialize(&mut reader, compress, validate)?; - let beta_h = E::G2Affine::deserialize(&mut reader, compress, validate)?; + let g = E::G1Affine::deserialize_with_mode(&mut reader, compress, validate)?; + let gamma_g = E::G1Affine::deserialize_with_mode(&mut reader, compress, validate)?; + let h = E::G2Affine::deserialize_with_mode(&mut reader, compress, validate)?; + let beta_h = E::G2Affine::deserialize_with_mode(&mut reader, compress, validate)?; let degree_bounds_and_neg_powers_of_h = - Option::>::deserialize(&mut reader, compress, validate)?; - let supported_degree = usize::deserialize(&mut reader, compress, validate)?; - let max_degree = usize::deserialize(&mut reader, compress, validate)?; + Option::>::deserialize_with_mode( + &mut reader, + compress, + validate, + )?; + let supported_degree = usize::deserialize_with_mode(&mut reader, compress, validate)?; + let max_degree = usize::deserialize_with_mode(&mut reader, compress, validate)?; let prepared_h = E::G2Prepared::from(h.clone()); let prepared_beta_h = E::G2Prepared::from(beta_h.clone()); diff --git a/src/sonic_pc/mod.rs b/src/sonic_pc/mod.rs index aac81c70..a4358461 100644 --- a/src/sonic_pc/mod.rs +++ b/src/sonic_pc/mod.rs @@ -3,15 +3,17 @@ use crate::{BTreeMap, BTreeSet, String, ToString, Vec}; use crate::{BatchLCProof, DenseUVPolynomial, Error, Evaluations, QuerySet}; use crate::{LabeledCommitment, LabeledPolynomial, LinearCombination}; use crate::{PCRandomness, PCUniversalParams, PolynomialCommitment}; +use ark_ec::AffineRepr; +use ark_ec::CurveGroup; -use ark_ec::{pairing::Pairing, }; +use ark_ec::pairing::Pairing; use ark_ff::{One, UniformRand, Zero}; use ark_std::rand::RngCore; use ark_std::{convert::TryInto, marker::PhantomData, ops::Div, ops::Mul, vec}; mod data_structures; use crate::challenge::ChallengeGenerator; -use ark_sponge::CryptographicSponge; +use ark_crypto_primitives::sponge::CryptographicSponge; pub use data_structures::*; /// Polynomial commitment based on [[KZG10]][kzg], with degree enforcement and @@ -75,7 +77,7 @@ where } // Push expected results into list of elems. Power will be the negative of the expected power - let mut witness: E::G1 = proof.w.into_projective(); + let mut witness: E::G1 = proof.w.into_group(); let mut adjusted_witness = vk.g.mul(combined_values) - &proof.w.mul(point); if let Some(random_v) = proof.random_v { adjusted_witness += &vk.gamma_g.mul(random_v); @@ -119,14 +121,15 @@ where g1_projective_elems.push(-combined_witness); g2_prepared_elems.push(vk.prepared_beta_h.clone()); - let g1_prepared_elems_iter = - E::G1::batch_normalization_into_affine(g1_projective_elems.as_slice()) + let g1_prepared_elems_iter: Vec = + E::G1::normalize_batch(g1_projective_elems.as_slice()) .into_iter() - .map(|a| a.into()); + .map(|a| a.into()) + .collect::>(); - let g1_g2_prepared: Vec<(E::G1Prepared, E::G2Prepared)> = - g1_prepared_elems_iter.zip(g2_prepared_elems).collect(); - let is_one: bool = E::product_of_pairings(g1_g2_prepared.iter()).is_one(); + let is_one: bool = E::multi_pairing(g1_prepared_elems_iter, g2_prepared_elems) + .0 + .is_one(); end_timer!(check_time); Ok(is_one) } @@ -564,7 +567,7 @@ where lc_info.push((lc_label, degree_bound)); } - let comms: Vec = E::G1::batch_normalization_into_affine(&lc_commitments) + let comms: Vec = E::G1::normalize_batch(&lc_commitments) .into_iter() .map(|c| kzg10::Commitment::(c)) .collect(); @@ -648,7 +651,7 @@ where lc_info.push((lc_label, degree_bound)); } - let comms: Vec = E::G1::batch_normalization_into_affine(&lc_commitments) + let comms: Vec = E::G1::normalize_batch(&lc_commitments) .into_iter() .map(|c| kzg10::Commitment(c)) .collect(); @@ -677,10 +680,10 @@ mod tests { use super::SonicKZG10; use ark_bls12_377::Bls12_377; use ark_bls12_381::Bls12_381; + use ark_crypto_primitives::sponge::poseidon::PoseidonSponge; use ark_ec::pairing::Pairing; use ark_ff::UniformRand; use ark_poly::{univariate::DensePolynomial as DensePoly, DenseUVPolynomial}; - use ark_sponge::poseidon::PoseidonSponge; use rand_chacha::ChaCha20Rng; type UniPoly_381 = DensePoly<::Fr>; diff --git a/src/streaming_kzg/mod.rs b/src/streaming_kzg/mod.rs index a1718a11..e3bdb2af 100644 --- a/src/streaming_kzg/mod.rs +++ b/src/streaming_kzg/mod.rs @@ -87,7 +87,7 @@ mod space; mod time; use ark_ec::CurveGroup; -use ark_serialize::CanonicalSerialize; +use ark_serialize::{CanonicalSerialize, Compress}; use ark_std::vec::Vec; pub use data_structures::*; pub use space::CommitterKeyStream; diff --git a/src/streaming_kzg/space.rs b/src/streaming_kzg/space.rs index 94a59bcc..e8ab2d4f 100644 --- a/src/streaming_kzg/space.rs +++ b/src/streaming_kzg/space.rs @@ -64,7 +64,7 @@ where SF: Iterable, SF::Item: Borrow, { - let mut quotient = ChunkedPippenger::new(max_msm_buffer); + let mut quotient: ChunkedPippenger = ChunkedPippenger::new(max_msm_buffer); let bases_init = self.powers_of_g.iter(); let scalars = polynomial.iter(); @@ -98,7 +98,7 @@ where SF::Item: Borrow, { let zeros = vanishing_polynomial(points); - let mut quotient = ChunkedPippenger::new(max_msm_buffer); + let mut quotient: ChunkedPippenger = ChunkedPippenger::new(max_msm_buffer); let bases_init = self.powers_of_g.iter(); // TODO: change `skip` to `advance_by` once rust-lang/rust#7774 is fixed. // See @@ -164,10 +164,11 @@ where SF::Item: Borrow, { let n = polynomials.depth(); - let mut pippengers: Vec> = Vec::new(); + let mut pippengers: Vec> = Vec::new(); let mut folded_bases = Vec::new(); for i in 1..n + 1 { - let pippenger = ChunkedPippenger::with_size(max_msm_buffer / n); + let pippenger: ChunkedPippenger<::G1> = + ChunkedPippenger::with_size(max_msm_buffer / n); let bases_init = self.powers_of_g.iter(); let delta = self.powers_of_g.len() - ceil_div(polynomials.len(), 1 << i); From 30a29492c94464a2dabd5f007a1f30bf86632305 Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Sat, 4 Feb 2023 00:07:07 +0100 Subject: [PATCH 03/12] tests passing --- src/kzg10/mod.rs | 4 ++-- src/marlin/marlin_pc/mod.rs | 8 ++++---- src/marlin/marlin_pst13_pc/mod.rs | 8 ++++---- src/multilinear_pc/mod.rs | 5 +++-- src/sonic_pc/mod.rs | 8 ++++---- 5 files changed, 17 insertions(+), 16 deletions(-) diff --git a/src/kzg10/mod.rs b/src/kzg10/mod.rs index 89fd384f..e5dd0879 100644 --- a/src/kzg10/mod.rs +++ b/src/kzg10/mod.rs @@ -458,8 +458,8 @@ mod tests { use ark_poly::univariate::DensePolynomial as DensePoly; use ark_std::test_rng; - type UniPoly_381 = DensePoly<::Fr>; - type UniPoly_377 = DensePoly<::Fr>; + type UniPoly_381 = DensePoly<::ScalarField>; + type UniPoly_377 = DensePoly<::ScalarField>; type KZG_Bls12_381 = KZG10; impl> KZG10 { diff --git a/src/marlin/marlin_pc/mod.rs b/src/marlin/marlin_pc/mod.rs index 3342e206..93856b05 100644 --- a/src/marlin/marlin_pc/mod.rs +++ b/src/marlin/marlin_pc/mod.rs @@ -546,13 +546,13 @@ mod tests { use ark_poly::{univariate::DensePolynomial as DensePoly, DenseUVPolynomial}; use rand_chacha::ChaCha20Rng; - type UniPoly_381 = DensePoly<::Fr>; - type UniPoly_377 = DensePoly<::Fr>; + type UniPoly_381 = DensePoly<::ScalarField>; + type UniPoly_377 = DensePoly<::ScalarField>; type PC = MarlinKZG10; - type Sponge_Bls12_381 = PoseidonSponge<::Fr>; - type Sponge_Bls12_377 = PoseidonSponge<::Fr>; + type Sponge_Bls12_381 = PoseidonSponge<::ScalarField>; + type Sponge_Bls12_377 = PoseidonSponge<::ScalarField>; type PC_Bls12_381 = PC; type PC_Bls12_377 = PC; diff --git a/src/marlin/marlin_pst13_pc/mod.rs b/src/marlin/marlin_pst13_pc/mod.rs index ce37e0c8..db29d7f6 100644 --- a/src/marlin/marlin_pst13_pc/mod.rs +++ b/src/marlin/marlin_pst13_pc/mod.rs @@ -727,13 +727,13 @@ mod tests { }; use rand_chacha::ChaCha20Rng; - type MVPoly_381 = SparsePoly<::Fr, SparseTerm>; - type MVPoly_377 = SparsePoly<::Fr, SparseTerm>; + type MVPoly_381 = SparsePoly<::ScalarField, SparseTerm>; + type MVPoly_377 = SparsePoly<::ScalarField, SparseTerm>; type PC = MarlinPST13; - type Sponge_bls12_381 = PoseidonSponge<::Fr>; - type Sponge_Bls12_377 = PoseidonSponge<::Fr>; + type Sponge_bls12_381 = PoseidonSponge<::ScalarField>; + type Sponge_Bls12_377 = PoseidonSponge<::ScalarField>; type PC_Bls12_381 = PC; type PC_Bls12_377 = PC; diff --git a/src/multilinear_pc/mod.rs b/src/multilinear_pc/mod.rs index 1e92035f..bd5d3e53 100644 --- a/src/multilinear_pc/mod.rs +++ b/src/multilinear_pc/mod.rs @@ -260,16 +260,17 @@ fn eq_extension(t: &[F]) -> Vec> { #[cfg(test)] mod tests { + use crate::ark_std::UniformRand; use crate::multilinear_pc::data_structures::UniversalParams; use crate::multilinear_pc::MultilinearPC; use ark_bls12_381::Bls12_381; use ark_ec::pairing::Pairing; use ark_poly::{DenseMultilinearExtension, MultilinearExtension, SparseMultilinearExtension}; use ark_std::rand::RngCore; + use ark_std::test_rng; use ark_std::vec::Vec; - use ark_std::{test_rng, UniformRand}; type E = Bls12_381; - type Fr = ::Fr; + type Fr = ::ScalarField; fn test_polynomial( uni_params: &UniversalParams, diff --git a/src/sonic_pc/mod.rs b/src/sonic_pc/mod.rs index a4358461..2e65daeb 100644 --- a/src/sonic_pc/mod.rs +++ b/src/sonic_pc/mod.rs @@ -686,12 +686,12 @@ mod tests { use ark_poly::{univariate::DensePolynomial as DensePoly, DenseUVPolynomial}; use rand_chacha::ChaCha20Rng; - type UniPoly_381 = DensePoly<::Fr>; - type UniPoly_377 = DensePoly<::Fr>; + type UniPoly_381 = DensePoly<::ScalarField>; + type UniPoly_377 = DensePoly<::ScalarField>; type PC = SonicKZG10; - type Sponge_Bls12_377 = PoseidonSponge<::Fr>; - type Sponge_Bls12_381 = PoseidonSponge<::Fr>; + type Sponge_Bls12_377 = PoseidonSponge<::ScalarField>; + type Sponge_Bls12_381 = PoseidonSponge<::ScalarField>; type PC_Bls12_377 = PC; type PC_Bls12_381 = PC; From a8e6b8ad31ef6b8db4905ab048356406a6a6d69e Mon Sep 17 00:00:00 2001 From: nikkolasg Date: Sat, 4 Feb 2023 00:09:23 +0100 Subject: [PATCH 04/12] cargo fmt --- src/challenge.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/challenge.rs b/src/challenge.rs index e88bdd2e..23b3c9d1 100644 --- a/src/challenge.rs +++ b/src/challenge.rs @@ -1,5 +1,5 @@ -use ark_ff::PrimeField; use ark_crypto_primitives::sponge::{CryptographicSponge, FieldElementSize}; +use ark_ff::PrimeField; /// `ChallengeGenerator` generates opening challenges using multivariate or univariate strategy. /// For multivariate strategy, each challenge is freshly squeezed from a sponge. From e1e27dee97c79834adc2b2913336f566a026cfe8 Mon Sep 17 00:00:00 2001 From: Pratyush Mishra Date: Tue, 14 Feb 2023 16:50:13 -0800 Subject: [PATCH 05/12] Address comments --- Cargo.toml | 9 +- src/kzg10/data_structures.rs | 87 ------------------- src/marlin/marlin_pst13_pc/data_structures.rs | 54 ------------ src/sonic_pc/data_structures.rs | 26 ------ 4 files changed, 1 insertion(+), 175 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index d803afb6..0c71527f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,14 +2,7 @@ name = "ark-poly-commit" version = "0.4.0" authors = [ - "Alessandro Chiesa ", - "Mary Maller ", - "Yuncong Hu ", - "William Lin", - "Pratyush Mishra ", - "Noah Vesely ", - "Nicholas Ward ", - "arkworks contributors" + "arkworks contributors" ] description = "A library for constructing polynomial commitment schemes for use in zkSNARKs" repository = "https://github.com/arkworks-rs/poly-commit" diff --git a/src/kzg10/data_structures.rs b/src/kzg10/data_structures.rs index 9b8e2fc8..314e590b 100644 --- a/src/kzg10/data_structures.rs +++ b/src/kzg10/data_structures.rs @@ -79,22 +79,6 @@ impl CanonicalSerialize for UniversalParams { + self.beta_h.serialized_size(compress) + self.neg_powers_of_h.serialized_size(compress) } - - fn serialize_uncompressed(&self, writer: W) -> Result<(), SerializationError> { - Self::serialize_with_mode(&self, writer, Compress::No) - } - - fn serialize_compressed(&self, writer: W) -> Result<(), SerializationError> { - Self::serialize_with_mode(&self, writer, Compress::Yes) - } - - fn uncompressed_size(&self) -> usize { - self.powers_of_g.uncompressed_size() - + self.powers_of_gamma_g.uncompressed_size() - + self.h.uncompressed_size() - + self.beta_h.uncompressed_size() - + self.neg_powers_of_h.uncompressed_size() - } } impl CanonicalDeserialize for UniversalParams { @@ -125,23 +109,6 @@ impl CanonicalDeserialize for UniversalParams { prepared_beta_h, }) } - - fn deserialize_uncompressed(mut reader: R) -> Result { - Self::deserialize_with_mode(&mut reader, Compress::No, ark_serialize::Validate::Yes) - } - fn deserialize_compressed(mut reader: R) -> Result { - Self::deserialize_with_mode(&mut reader, Compress::Yes, ark_serialize::Validate::Yes) - } - fn deserialize_compressed_unchecked( - mut reader: R, - ) -> Result { - Self::deserialize_with_mode(&mut reader, Compress::Yes, ark_serialize::Validate::No) - } - fn deserialize_uncompressed_unchecked( - mut reader: R, - ) -> Result { - Self::deserialize_with_mode(&mut reader, Compress::No, ark_serialize::Validate::No) - } } /// `Powers` is used to commit to and create evaluation proofs for a given @@ -191,19 +158,6 @@ impl<'a, E: Pairing> CanonicalSerialize for Powers<'a, E> { self.powers_of_g.serialized_size(compress) + self.powers_of_gamma_g.serialized_size(compress) } - - fn serialize_compressed(&self, writer: W) -> Result<(), SerializationError> { - self.serialize_with_mode(writer, Compress::Yes) - } - fn compressed_size(&self) -> usize { - self.serialized_size(Compress::Yes) - } - fn serialize_uncompressed(&self, writer: W) -> Result<(), SerializationError> { - self.serialize_with_mode(writer, Compress::No) - } - fn uncompressed_size(&self) -> usize { - self.serialized_size(Compress::No) - } } impl<'a, E: Pairing> CanonicalDeserialize for Powers<'a, E> { @@ -221,23 +175,6 @@ impl<'a, E: Pairing> CanonicalDeserialize for Powers<'a, E> { powers_of_gamma_g: Cow::Owned(powers_of_gamma_g), }) } - - fn deserialize_compressed(mut reader: R) -> Result { - Self::deserialize_with_mode(&mut reader, Compress::Yes, ark_serialize::Validate::Yes) - } - fn deserialize_compressed_unchecked( - mut reader: R, - ) -> Result { - Self::deserialize_with_mode(&mut reader, Compress::Yes, ark_serialize::Validate::No) - } - fn deserialize_uncompressed(mut reader: R) -> Result { - Self::deserialize_with_mode(&mut reader, Compress::No, ark_serialize::Validate::Yes) - } - fn deserialize_uncompressed_unchecked( - mut reader: R, - ) -> Result { - Self::deserialize_with_mode(&mut reader, Compress::No, ark_serialize::Validate::No) - } } /// `VerifierKey` is used to check evaluation proofs for a given commitment. #[derive(Derivative)] @@ -289,18 +226,6 @@ impl CanonicalSerialize for VerifierKey { + self.h.serialized_size(compress) + self.beta_h.serialized_size(compress) } - fn serialize_compressed(&self, writer: W) -> Result<(), SerializationError> { - self.serialize_with_mode(writer, Compress::Yes) - } - fn compressed_size(&self) -> usize { - self.serialized_size(Compress::Yes) - } - fn serialize_uncompressed(&self, writer: W) -> Result<(), SerializationError> { - self.serialize_with_mode(writer, Compress::No) - } - fn uncompressed_size(&self) -> usize { - self.serialized_size(Compress::No) - } } impl CanonicalDeserialize for VerifierKey { @@ -326,18 +251,6 @@ impl CanonicalDeserialize for VerifierKey { prepared_beta_h, }) } - fn deserialize_compressed(reader: R) -> Result { - Self::deserialize_with_mode(reader, Compress::Yes, ark_serialize::Validate::Yes) - } - fn deserialize_compressed_unchecked(reader: R) -> Result { - Self::deserialize_with_mode(reader, Compress::Yes, ark_serialize::Validate::No) - } - fn deserialize_uncompressed(reader: R) -> Result { - Self::deserialize_with_mode(reader, Compress::No, ark_serialize::Validate::Yes) - } - fn deserialize_uncompressed_unchecked(reader: R) -> Result { - Self::deserialize_with_mode(reader, Compress::No, ark_serialize::Validate::No) - } } impl ToConstraintField<::BasePrimeField> for VerifierKey diff --git a/src/marlin/marlin_pst13_pc/data_structures.rs b/src/marlin/marlin_pst13_pc/data_structures.rs index a3e7f598..eff5a983 100644 --- a/src/marlin/marlin_pst13_pc/data_structures.rs +++ b/src/marlin/marlin_pst13_pc/data_structures.rs @@ -102,17 +102,6 @@ where + self.num_vars.serialized_size(compress) + self.max_degree.serialized_size(compress) } - - fn serialize_uncompressed(&self, mut writer: W) -> Result<(), SerializationError> { - Self::serialize_with_mode(self, &mut writer, Compress::No) - } - - fn uncompressed_size(&self) -> usize { - Self::serialized_size(self, Compress::No) - } - fn compressed_size(&self) -> usize { - Self::serialized_size(self, Compress::Yes) - } } impl CanonicalDeserialize for UniversalParams @@ -152,23 +141,6 @@ where max_degree, }) } - - fn deserialize_uncompressed(reader: R) -> Result { - Self::deserialize_with_mode(reader, Compress::No, Validate::Yes) - } - fn deserialize_uncompressed_unchecked(reader: R) -> Result { - Self::deserialize_with_mode(reader, Compress::No, Validate::No) - } - fn deserialize_compressed_unchecked(reader: R) -> Result { - Self::deserialize_with_mode(reader, Compress::Yes, Validate::No) - } - fn deserialize_compressed(reader: R) -> Result { - Self::deserialize_with_mode( - reader, - ark_serialize::Compress::Yes, - ark_serialize::Validate::Yes, - ) - } } impl PCUniversalParams for UniversalParams @@ -298,20 +270,6 @@ impl CanonicalSerialize for VerifierKey { + self.supported_degree.serialized_size(compress) + self.max_degree.serialized_size(compress) } - - fn serialize_uncompressed(&self, writer: W) -> Result<(), SerializationError> { - Self::serialize_with_mode(&self, writer, Compress::No) - } - fn serialize_compressed(&self, writer: W) -> Result<(), SerializationError> { - Self::serialize_with_mode(&self, writer, Compress::Yes) - } - - fn uncompressed_size(&self) -> usize { - Self::serialized_size(&self, Compress::No) - } - fn compressed_size(&self) -> usize { - Self::serialized_size(&self, Compress::Yes) - } } impl CanonicalDeserialize for VerifierKey { @@ -341,18 +299,6 @@ impl CanonicalDeserialize for VerifierKey { max_degree, }) } - fn deserialize_compressed(reader: R) -> Result { - Self::deserialize_with_mode(reader, Compress::Yes, Validate::Yes) - } - fn deserialize_compressed_unchecked(reader: R) -> Result { - Self::deserialize_with_mode(reader, Compress::Yes, Validate::No) - } - fn deserialize_uncompressed(reader: R) -> Result { - Self::deserialize_with_mode(reader, Compress::No, Validate::Yes) - } - fn deserialize_uncompressed_unchecked(reader: R) -> Result { - Self::deserialize_with_mode(reader, Compress::No, Validate::No) - } } impl PCVerifierKey for VerifierKey { diff --git a/src/sonic_pc/data_structures.rs b/src/sonic_pc/data_structures.rs index 430073d6..b9c75030 100644 --- a/src/sonic_pc/data_structures.rs +++ b/src/sonic_pc/data_structures.rs @@ -210,19 +210,6 @@ impl CanonicalSerialize for VerifierKey { + self.supported_degree.serialized_size(compress) + self.max_degree.serialized_size(compress) } - - fn serialize_compressed(&self, writer: W) -> Result<(), SerializationError> { - self.serialize_with_mode(writer, Compress::Yes) - } - fn compressed_size(&self) -> usize { - self.serialized_size(Compress::Yes) - } - fn serialize_uncompressed(&self, writer: W) -> Result<(), SerializationError> { - self.serialize_with_mode(writer, Compress::No) - } - fn uncompressed_size(&self) -> usize { - self.serialized_size(Compress::No) - } } impl CanonicalDeserialize for VerifierKey { @@ -259,19 +246,6 @@ impl CanonicalDeserialize for VerifierKey { max_degree, }) } - - fn deserialize_compressed(reader: R) -> Result { - Self::deserialize_with_mode(reader, Compress::Yes, Validate::Yes) - } - fn deserialize_compressed_unchecked(reader: R) -> Result { - Self::deserialize_with_mode(reader, Compress::Yes, Validate::No) - } - fn deserialize_uncompressed(reader: R) -> Result { - Self::deserialize_with_mode(reader, Compress::No, Validate::Yes) - } - fn deserialize_uncompressed_unchecked(reader: R) -> Result { - Self::deserialize_with_mode(reader, Compress::No, Validate::No) - } } impl PCVerifierKey for VerifierKey { From 268965319fd427e3a8b5270b99a523e93fd18827 Mon Sep 17 00:00:00 2001 From: Pratyush Mishra Date: Tue, 14 Feb 2023 17:45:57 -0800 Subject: [PATCH 06/12] Improve checks --- src/kzg10/data_structures.rs | 78 +++++++++++-------- src/marlin/marlin_pst13_pc/data_structures.rs | 78 ++++++++++--------- src/sonic_pc/data_structures.rs | 30 ++++--- 3 files changed, 106 insertions(+), 80 deletions(-) diff --git a/src/kzg10/data_structures.rs b/src/kzg10/data_structures.rs index 314e590b..3b6412c0 100644 --- a/src/kzg10/data_structures.rs +++ b/src/kzg10/data_structures.rs @@ -4,7 +4,7 @@ use ark_ec::AffineRepr; use ark_ec::Group; use ark_ff::{PrimeField, ToConstraintField}; use ark_serialize::{ - CanonicalDeserialize, CanonicalSerialize, Compress, SerializationError, Valid, + CanonicalDeserialize, CanonicalSerialize, Compress, SerializationError, Valid, Validate, }; use ark_std::{ borrow::Cow, @@ -42,11 +42,11 @@ pub struct UniversalParams { impl Valid for UniversalParams { fn check(&self) -> Result<(), SerializationError> { - if !(self.powers_of_g.len() == self.powers_of_gamma_g.len() - && self.powers_of_g.len() == self.neg_powers_of_h.len()) - { - return Err(SerializationError::InvalidData); - } + self.powers_of_g.check()?; + self.powers_of_gamma_g.check()?; + self.h.check()?; + self.beta_h.check()?; + self.neg_powers_of_h.check()?; Ok(()) } } @@ -85,21 +85,18 @@ impl CanonicalDeserialize for UniversalParams { fn deserialize_with_mode( mut reader: R, compress: Compress, - validate: ark_serialize::Validate, + validate: Validate, ) -> Result { - let powers_of_g = - Vec::::deserialize_with_mode(&mut reader, compress, validate)?; + let powers_of_g = Vec::deserialize_with_mode(&mut reader, compress, Validate::No)?; let powers_of_gamma_g = - BTreeMap::::deserialize_with_mode(&mut reader, compress, validate)?; - let h = E::G2Affine::deserialize_with_mode(&mut reader, compress, validate)?; - let beta_h = E::G2Affine::deserialize_with_mode(&mut reader, compress, validate)?; - let neg_powers_of_h = - BTreeMap::::deserialize_with_mode(&mut reader, compress, validate)?; + BTreeMap::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let h = E::G2Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let beta_h = E::G2Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let neg_powers_of_h = BTreeMap::deserialize_with_mode(&mut reader, compress, Validate::No)?; let prepared_h = E::G2Prepared::from(h.clone()); let prepared_beta_h = E::G2Prepared::from(beta_h.clone()); - - Ok(Self { + let result = Self { powers_of_g, powers_of_gamma_g, h, @@ -107,7 +104,12 @@ impl CanonicalDeserialize for UniversalParams { neg_powers_of_h, prepared_h, prepared_beta_h, - }) + }; + if let Validate::Yes = validate { + result.check()?; + } + + Ok(result) } } @@ -136,9 +138,6 @@ impl Powers<'_, E> { } impl<'a, E: Pairing> Valid for Powers<'a, E> { fn check(&self) -> Result<(), SerializationError> { - if self.powers_of_g.len() != self.powers_of_gamma_g.len() { - return Err(SerializationError::InvalidData); - } Ok(()) } } @@ -164,16 +163,20 @@ impl<'a, E: Pairing> CanonicalDeserialize for Powers<'a, E> { fn deserialize_with_mode( mut reader: R, compress: Compress, - validate: ark_serialize::Validate, + validate: Validate, ) -> Result { let powers_of_g = - Vec::::deserialize_with_mode(&mut reader, compress, validate)?; + Vec::deserialize_with_mode(&mut reader, compress, validate)?; let powers_of_gamma_g = - Vec::::deserialize_with_mode(&mut reader, compress, validate)?; - Ok(Self { + Vec::deserialize_with_mode(&mut reader, compress, validate)?; + let result = Self { powers_of_g: Cow::Owned(powers_of_g), powers_of_gamma_g: Cow::Owned(powers_of_gamma_g), - }) + }; + if let Validate::Yes = validate { + result.check()?; + } + Ok(result) } } /// `VerifierKey` is used to check evaluation proofs for a given commitment. @@ -204,6 +207,11 @@ pub struct VerifierKey { impl Valid for VerifierKey { fn check(&self) -> Result<(), SerializationError> { + self.g.check()?; + self.gamma_g.check()?; + self.h.check()?; + self.beta_h.check()?; + Ok(()) } } @@ -232,24 +240,28 @@ impl CanonicalDeserialize for VerifierKey { fn deserialize_with_mode( mut reader: R, compress: Compress, - validate: ark_serialize::Validate, + validate: Validate, ) -> Result { - let g = E::G1Affine::deserialize_with_mode(&mut reader, compress, validate)?; - let gamma_g = E::G1Affine::deserialize_with_mode(&mut reader, compress, validate)?; - let h = E::G2Affine::deserialize_with_mode(&mut reader, compress, validate)?; - let beta_h = E::G2Affine::deserialize_with_mode(&mut reader, compress, validate)?; + let g = E::G1Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let gamma_g = E::G1Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let h = E::G2Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let beta_h = E::G2Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?; let prepared_h = E::G2Prepared::from(h.clone()); let prepared_beta_h = E::G2Prepared::from(beta_h.clone()); - - Ok(Self { + let result = Self { g, gamma_g, h, beta_h, prepared_h, prepared_beta_h, - }) + }; + if let Validate::Yes = validate { + result.check()?; + } + + Ok(result) } } diff --git a/src/marlin/marlin_pst13_pc/data_structures.rs b/src/marlin/marlin_pst13_pc/data_structures.rs index eff5a983..776aa3bc 100644 --- a/src/marlin/marlin_pst13_pc/data_structures.rs +++ b/src/marlin/marlin_pst13_pc/data_structures.rs @@ -56,17 +56,13 @@ where P::Point: Index, { fn check(&self) -> Result<(), SerializationError> { - if self.powers_of_g.len() != (self.max_degree + 1) * self.num_vars { - return Err(SerializationError::InvalidData); - } - - if self.beta_h.len() != self.num_vars { - return Err(SerializationError::InvalidData); - } - - if self.prepared_beta_h.len() != self.num_vars { - return Err(SerializationError::InvalidData); - } + self.powers_of_g.check()?; + self.gamma_g.check()?; + self.powers_of_gamma_g.check()?; + self.h.check()?; + self.beta_h.check()?; + self.num_vars.check()?; + self.max_degree.check()?; Ok(()) } } @@ -115,21 +111,21 @@ where compress: Compress, validate: Validate, ) -> Result { - let powers_of_g = BTreeMap::::deserialize_with_mode( + let powers_of_g = BTreeMap::deserialize_with_mode( &mut reader, compress, - validate, + Validate::No, )?; - let gamma_g = E::G1Affine::deserialize_with_mode(&mut reader, compress, validate)?; + let gamma_g = E::G1Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?; let powers_of_gamma_g = - Vec::>::deserialize_with_mode(&mut reader, compress, validate)?; - let h = E::G2Affine::deserialize_with_mode(&mut reader, compress, validate)?; - let beta_h = Vec::::deserialize_with_mode(&mut reader, compress, validate)?; - let num_vars = usize::deserialize_with_mode(&mut reader, compress, validate)?; - let max_degree = usize::deserialize_with_mode(&mut reader, compress, validate)?; + Vec::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let h = E::G2Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let beta_h = Vec::::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let num_vars = usize::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let max_degree = usize::deserialize_with_mode(&mut reader, compress, Validate::No)?; let prepared_beta_h = beta_h.iter().map(|x| x.clone().into()).collect(); - Ok(Self { + let result = Self { powers_of_g, gamma_g, powers_of_gamma_g, @@ -139,7 +135,11 @@ where prepared_beta_h, num_vars, max_degree, - }) + }; + if let Validate::Yes = validate { + result.check()?; + } + Ok(result) } } @@ -227,21 +227,21 @@ pub struct VerifierKey { } impl Valid for VerifierKey { fn check(&self) -> Result<(), SerializationError> { + self.g.check()?; + self.gamma_g.check()?; + self.h.check()?; + self.beta_h.check()?; + if self.num_vars == 0 { return Err(SerializationError::InvalidData); } if self.supported_degree == 0 { return Err(SerializationError::InvalidData); } - if self.max_degree == 0 { - return Err(SerializationError::InvalidData); - } - if self.beta_h.len() != self.num_vars { - return Err(SerializationError::InvalidData); - } - if self.prepared_beta_h.len() != self.num_vars { + if self.max_degree == 0 || self.max_degree < self.supported_degree { return Err(SerializationError::InvalidData); } + Ok(()) } } @@ -278,16 +278,16 @@ impl CanonicalDeserialize for VerifierKey { compress: Compress, validate: Validate, ) -> Result { - let g = E::G1Affine::deserialize_with_mode(&mut reader, compress, validate)?; - let gamma_g = E::G1Affine::deserialize_with_mode(&mut reader, compress, validate)?; - let h = E::G2Affine::deserialize_with_mode(&mut reader, compress, validate)?; - let beta_h = Vec::::deserialize_with_mode(&mut reader, compress, validate)?; - let num_vars = usize::deserialize_with_mode(&mut reader, compress, validate)?; - let supported_degree = usize::deserialize_with_mode(&mut reader, compress, validate)?; - let max_degree = usize::deserialize_with_mode(&mut reader, compress, validate)?; + let g = E::G1Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let gamma_g = E::G1Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let h = E::G2Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let beta_h = Vec::::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let num_vars = usize::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let supported_degree = usize::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let max_degree = usize::deserialize_with_mode(&mut reader, compress, Validate::No)?; let prepared_beta_h = beta_h.iter().map(|x| x.clone().into()).collect(); - Ok(Self { + let result = Self { g, gamma_g, h, @@ -297,7 +297,11 @@ impl CanonicalDeserialize for VerifierKey { num_vars, supported_degree, max_degree, - }) + }; + if let Validate::Yes = validate { + result.check()?; + } + Ok(result) } } diff --git a/src/sonic_pc/data_structures.rs b/src/sonic_pc/data_structures.rs index b9c75030..708b5589 100644 --- a/src/sonic_pc/data_structures.rs +++ b/src/sonic_pc/data_structures.rs @@ -174,7 +174,11 @@ impl VerifierKey { impl Valid for VerifierKey { fn check(&self) -> Result<(), SerializationError> { - // TODO probably need to do more checks here + self.g.check()?; + self.gamma_g.check()?; + self.h.check()?; + self.beta_h.check()?; + self.degree_bounds_and_neg_powers_of_h.check()?; if self.supported_degree > self.max_degree { return Err(SerializationError::InvalidData); } @@ -218,23 +222,23 @@ impl CanonicalDeserialize for VerifierKey { compress: Compress, validate: Validate, ) -> Result { - let g = E::G1Affine::deserialize_with_mode(&mut reader, compress, validate)?; - let gamma_g = E::G1Affine::deserialize_with_mode(&mut reader, compress, validate)?; - let h = E::G2Affine::deserialize_with_mode(&mut reader, compress, validate)?; - let beta_h = E::G2Affine::deserialize_with_mode(&mut reader, compress, validate)?; + let g = E::G1Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let gamma_g = E::G1Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let h = E::G2Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let beta_h = E::G2Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?; let degree_bounds_and_neg_powers_of_h = Option::>::deserialize_with_mode( &mut reader, compress, - validate, + Validate::No, )?; - let supported_degree = usize::deserialize_with_mode(&mut reader, compress, validate)?; - let max_degree = usize::deserialize_with_mode(&mut reader, compress, validate)?; + let supported_degree = usize::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let max_degree = usize::deserialize_with_mode(&mut reader, compress, Validate::No)?; let prepared_h = E::G2Prepared::from(h.clone()); let prepared_beta_h = E::G2Prepared::from(beta_h.clone()); - Ok(Self { + let result = Self { g, gamma_g, h, @@ -244,7 +248,13 @@ impl CanonicalDeserialize for VerifierKey { degree_bounds_and_neg_powers_of_h, supported_degree, max_degree, - }) + }; + + if let Validate::Yes = validate { + result.check()?; + } + + Ok(result) } } From 988107272e311d8fb1f359ac3eee00fb90876d85 Mon Sep 17 00:00:00 2001 From: Pratyush Mishra Date: Tue, 14 Feb 2023 17:47:03 -0800 Subject: [PATCH 07/12] Format --- src/kzg10/data_structures.rs | 6 ++---- src/marlin/marlin_pst13_pc/data_structures.rs | 15 ++++++--------- 2 files changed, 8 insertions(+), 13 deletions(-) diff --git a/src/kzg10/data_structures.rs b/src/kzg10/data_structures.rs index 3b6412c0..fb5390aa 100644 --- a/src/kzg10/data_structures.rs +++ b/src/kzg10/data_structures.rs @@ -165,10 +165,8 @@ impl<'a, E: Pairing> CanonicalDeserialize for Powers<'a, E> { compress: Compress, validate: Validate, ) -> Result { - let powers_of_g = - Vec::deserialize_with_mode(&mut reader, compress, validate)?; - let powers_of_gamma_g = - Vec::deserialize_with_mode(&mut reader, compress, validate)?; + let powers_of_g = Vec::deserialize_with_mode(&mut reader, compress, validate)?; + let powers_of_gamma_g = Vec::deserialize_with_mode(&mut reader, compress, validate)?; let result = Self { powers_of_g: Cow::Owned(powers_of_g), powers_of_gamma_g: Cow::Owned(powers_of_gamma_g), diff --git a/src/marlin/marlin_pst13_pc/data_structures.rs b/src/marlin/marlin_pst13_pc/data_structures.rs index 776aa3bc..8ccf300b 100644 --- a/src/marlin/marlin_pst13_pc/data_structures.rs +++ b/src/marlin/marlin_pst13_pc/data_structures.rs @@ -111,16 +111,12 @@ where compress: Compress, validate: Validate, ) -> Result { - let powers_of_g = BTreeMap::deserialize_with_mode( - &mut reader, - compress, - Validate::No, - )?; + let powers_of_g = BTreeMap::deserialize_with_mode(&mut reader, compress, Validate::No)?; let gamma_g = E::G1Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?; - let powers_of_gamma_g = - Vec::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let powers_of_gamma_g = Vec::deserialize_with_mode(&mut reader, compress, Validate::No)?; let h = E::G2Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?; - let beta_h = Vec::::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let beta_h = + Vec::::deserialize_with_mode(&mut reader, compress, Validate::No)?; let num_vars = usize::deserialize_with_mode(&mut reader, compress, Validate::No)?; let max_degree = usize::deserialize_with_mode(&mut reader, compress, Validate::No)?; @@ -281,7 +277,8 @@ impl CanonicalDeserialize for VerifierKey { let g = E::G1Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?; let gamma_g = E::G1Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?; let h = E::G2Affine::deserialize_with_mode(&mut reader, compress, Validate::No)?; - let beta_h = Vec::::deserialize_with_mode(&mut reader, compress, Validate::No)?; + let beta_h = + Vec::::deserialize_with_mode(&mut reader, compress, Validate::No)?; let num_vars = usize::deserialize_with_mode(&mut reader, compress, Validate::No)?; let supported_degree = usize::deserialize_with_mode(&mut reader, compress, Validate::No)?; let max_degree = usize::deserialize_with_mode(&mut reader, compress, Validate::No)?; From 4d7cbb8ebf1df0eb07a3382acd26650e9a2597bc Mon Sep 17 00:00:00 2001 From: Pratyush Mishra Date: Tue, 14 Feb 2023 17:49:32 -0800 Subject: [PATCH 08/12] Fix CI for no-std --- .github/workflows/ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0f4be64a..06c397d5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -87,7 +87,7 @@ jobs: uses: actions-rs/toolchain@v1 with: toolchain: stable - target: aarch64-unknown-none + target: thumbv6m-none-eabi override: true - name: Build @@ -95,11 +95,11 @@ jobs: with: use-cross: true command: build - args: --no-default-features --target aarch64-unknown-none + args: --no-default-features --target thumbv6m-none-eabi - name: Check uses: actions-rs/cargo@v1 with: use-cross: true command: check - args: --examples --no-default-features --target aarch64-unknown-none + args: --examples --no-default-features --target thumbv6m-none-eabi From 3391417d7f0f03186cfd05b6cdb9d5b830793d93 Mon Sep 17 00:00:00 2001 From: Pratyush Mishra Date: Tue, 14 Feb 2023 17:53:04 -0800 Subject: [PATCH 09/12] Update CHANGELOG --- CHANGELOG.md | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9203dcd4..1a7ac4d5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,12 +4,13 @@ ### Breaking changes -- [\#82](https://github.com/arkworks-rs/poly-commit/pull/82) Function parameter `opening_challenge: F` for `open`, - `check`, has been changed from `F` to `opening_challenges: &mut ChallengeGenerator`. +- [\#112](https://github.com/arkworks-rs/poly-commit/pull/112) Upgrade all dependencies to `0.4`. +- [\#82](https://github.com/arkworks-rs/poly-commit/pull/82) Argument `opening_challenge: F` for `open`, + `check`, has been changed from `F` to `opening_challenges: &mut ChallengeGenerator`. ### Features -- [\#82](https://github.com/arkworks-rs/poly-commit/pull/82) Add multivariate opening challenge strategy. Integrate with sponge API. +- [\#82](https://github.com/arkworks-rs/poly-commit/pull/82) Add multivariate opening challenge strategy. Integrate with sponge API. ### Improvements @@ -19,7 +20,7 @@ ### Breaking changes -- [\#78](https://github.com/arkworks-rs/poly-commit/pull/78) Fix MarlinPC's CommitterKey to return the correct `supported_degree`. +- [\#78](https://github.com/arkworks-rs/poly-commit/pull/78) Fix `MarlinPC`'s `CommitterKey` to return the correct `supported_degree`. ### Features @@ -27,6 +28,6 @@ ### Bug fixes -## v0.2.0 +## v0.2.0 -- initial release of `ark-poly-commit`. \ No newline at end of file +- Initial release of `ark-poly-commit`. From 5fb2d44a5a74dfa848a795ee49fbefe2faefca37 Mon Sep 17 00:00:00 2001 From: Pratyush Mishra Date: Tue, 14 Feb 2023 18:10:21 -0800 Subject: [PATCH 10/12] Fix no-std Co-authored-by: Marcin Gorny --- Cargo.toml | 17 ++++++----------- src/ipa_pc/mod.rs | 4 ++-- src/kzg10/mod.rs | 5 ++--- src/lib.rs | 2 +- src/marlin/marlin_pst13_pc/mod.rs | 6 +++--- src/streaming_kzg/time.rs | 7 +------ 6 files changed, 15 insertions(+), 26 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 0c71527f..aec77249 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,9 +1,6 @@ [package] name = "ark-poly-commit" version = "0.4.0" -authors = [ - "arkworks contributors" -] description = "A library for constructing polynomial commitment schemes for use in zkSNARKs" repository = "https://github.com/arkworks-rs/poly-commit" documentation = "https://docs.rs/ark-poly-commit/" @@ -18,22 +15,22 @@ ark-serialize = { version = "^0.4.0", default-features = false, features = [ "de ark-ff = { version = "^0.4.0", default-features = false } ark-ec = { version = "^0.4.0", default-features = false } ark-poly = {version = "^0.4.0", default-features = false } -ark-crypto-primitives = {version = "^0.4.0", features = ["sponge"] } - +ark-crypto-primitives = {version = "^0.4.0", default-features = false, features = ["sponge"] } ark-std = { version = "^0.4.0", default-features = false } + ark-relations = { version = "^0.4.0", default-features = false, optional = true } ark-r1cs-std = { version = "^0.4.0", default-features = false, optional = true } -hashbrown = { version = "0.9", optional = true } +hashbrown = { version = "0.13", default-features = false, optional = true } -digest = "0.9" -rayon = { version = "1", optional = true } +digest = "0.10" derivative = { version = "2", features = [ "use_core" ] } +rayon = { version = "1", optional = true } [dev-dependencies] ark-ed-on-bls12-381 = { version = "^0.4.0", default-features = false } ark-bls12-381 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } ark-bls12-377 = { version = "^0.4.0", default-features = false, features = [ "curve" ] } -blake2 = { version = "0.9", default-features = false } +blake2 = { version = "0.10", default-features = false } rand_chacha = { version = "0.3.0", default-features = false } [profile.release] @@ -48,8 +45,6 @@ debug-assertions = true incremental = true debug = true -# To be removed in the new release. - [features] default = [ "std", "parallel" ] std = [ "ark-ff/std", "ark-ec/std", "ark-poly/std", "ark-std/std", "ark-relations/std", "ark-serialize/std", "ark-crypto-primitives/std"] diff --git a/src/ipa_pc/mod.rs b/src/ipa_pc/mod.rs index eff7143b..53326e5d 100644 --- a/src/ipa_pc/mod.rs +++ b/src/ipa_pc/mod.rs @@ -1078,13 +1078,13 @@ mod tests { use ark_ed_on_bls12_381::{EdwardsAffine, Fr}; use ark_ff::PrimeField; use ark_poly::{univariate::DensePolynomial as DensePoly, DenseUVPolynomial}; - use blake2::Blake2s; + use blake2::Blake2s256; use rand_chacha::ChaCha20Rng; type UniPoly = DensePoly; type Sponge = PoseidonSponge<::ScalarField>; type PC = InnerProductArgPC; - type PC_JJB2S = PC; + type PC_JJB2S = PC; fn rand_poly( degree: usize, diff --git a/src/kzg10/mod.rs b/src/kzg10/mod.rs index e5dd0879..4c52390a 100644 --- a/src/kzg10/mod.rs +++ b/src/kzg10/mod.rs @@ -12,7 +12,6 @@ use ark_ec::{scalar_mul::fixed_base::FixedBase, VariableBaseMSM}; use ark_ff::{One, PrimeField, UniformRand, Zero}; use ark_poly::DenseUVPolynomial; use ark_std::{format, marker::PhantomData, ops::Div, ops::Mul, vec}; -use std::ops::AddAssign; use ark_std::rand::RngCore; #[cfg(feature = "parallel")] @@ -180,7 +179,7 @@ where .into_affine(); end_timer!(msm_time); - commitment.add_assign(&random_commitment); + commitment += &random_commitment; end_timer!(commit_time); Ok((Commitment(commitment.into()), randomness)) @@ -333,7 +332,7 @@ where for (((c, z), v), proof) in commitments.iter().zip(points).zip(values).zip(proofs) { let w = proof.w; let mut temp = w.mul(*z); - temp.add_assign(&c.0); + temp += &c.0; let c = temp; g_multiplier += &(randomizer * v); if let Some(random_v) = proof.random_v { diff --git a/src/lib.rs b/src/lib.rs index 2138d492..fdbd8f05 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -60,7 +60,7 @@ macro_rules! eprintln { () => {}; ($($arg: tt)*) => {}; } -#[cfg(not(feature = "std"))] +#[cfg(all(test, not(feature = "std")))] macro_rules! println { () => {}; ($($arg: tt)*) => {}; diff --git a/src/marlin/marlin_pst13_pc/mod.rs b/src/marlin/marlin_pst13_pc/mod.rs index db29d7f6..eaa5eeea 100644 --- a/src/marlin/marlin_pst13_pc/mod.rs +++ b/src/marlin/marlin_pst13_pc/mod.rs @@ -13,7 +13,6 @@ use ark_ff::{One, PrimeField, UniformRand, Zero}; use ark_poly::{multivariate::Term, DenseMVPolynomial}; use ark_std::rand::RngCore; use ark_std::{marker::PhantomData, ops::Index, ops::Mul, vec}; -use std::ops::AddAssign; mod data_structures; pub use data_structures::*; @@ -422,7 +421,7 @@ where end_timer!(msm_time); // Mask commitment with random poly - commitment.add_assign(&random_commitment); + commitment += &random_commitment; let comm = Self::Commitment { comm: kzg10::Commitment(commitment.into()), @@ -622,7 +621,7 @@ where .enumerate() .map(|(j, w_j)| w_j.mul(z[j])) .sum(); - temp.add_assign(&c.0); + temp += &c.0; let c = temp; g_multiplier += &(randomizer * &v); if let Some(random_v) = proof.random_v { @@ -725,6 +724,7 @@ mod tests { multivariate::{SparsePolynomial as SparsePoly, SparseTerm}, DenseMVPolynomial, }; + use ark_std::vec::Vec; use rand_chacha::ChaCha20Rng; type MVPoly_381 = SparsePoly<::ScalarField, SparseTerm>; diff --git a/src/streaming_kzg/time.rs b/src/streaming_kzg/time.rs index 5885495f..8c7fa2f8 100644 --- a/src/streaming_kzg/time.rs +++ b/src/streaming_kzg/time.rs @@ -5,12 +5,7 @@ use ark_ec::scalar_mul::fixed_base::FixedBase; use ark_ec::CurveGroup; use ark_ff::{PrimeField, Zero}; use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial}; -use ark_std::borrow::Borrow; -use ark_std::ops::Div; -use ark_std::rand::RngCore; -use ark_std::vec::Vec; -use ark_std::UniformRand; -use std::ops::Mul; +use ark_std::{borrow::Borrow, ops::Div, ops::Mul, rand::RngCore, vec::Vec, UniformRand}; use crate::streaming_kzg::{ linear_combination, msm, powers, Commitment, EvaluationProof, VerifierKey, From 258729289ad99d4691dccd899383666baa679898 Mon Sep 17 00:00:00 2001 From: Pratyush Mishra Date: Tue, 14 Feb 2023 18:32:51 -0800 Subject: [PATCH 11/12] Revert CI change --- .github/workflows/ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 06c397d5..0f4be64a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -87,7 +87,7 @@ jobs: uses: actions-rs/toolchain@v1 with: toolchain: stable - target: thumbv6m-none-eabi + target: aarch64-unknown-none override: true - name: Build @@ -95,11 +95,11 @@ jobs: with: use-cross: true command: build - args: --no-default-features --target thumbv6m-none-eabi + args: --no-default-features --target aarch64-unknown-none - name: Check uses: actions-rs/cargo@v1 with: use-cross: true command: check - args: --examples --no-default-features --target thumbv6m-none-eabi + args: --examples --no-default-features --target aarch64-unknown-none From 6859c41776e404f9bad84f19b8b6d3af857c54cb Mon Sep 17 00:00:00 2001 From: Pratyush Mishra Date: Wed, 15 Feb 2023 08:21:23 -0800 Subject: [PATCH 12/12] Fix CI for merge groups --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0f4be64a..f1d55e8b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,5 +1,6 @@ name: CI on: + merge_group: pull_request: push: branches: