Skip to content

Commit

Permalink
Change pedersen_commit and add cfg_into_iter!
Browse files Browse the repository at this point in the history
  • Loading branch information
autquis committed Jan 22, 2024
1 parent 5677c5b commit c2e6412
Show file tree
Hide file tree
Showing 3 changed files with 37 additions and 67 deletions.
80 changes: 31 additions & 49 deletions poly-commit/src/hyrax/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use crate::hyrax::utils::tensor_prime;
use crate::to_bytes;
use crate::utils::{inner_product, scalar_by_vector, vector_sum, Matrix};
use crate::{
hyrax::utils::flat_to_matrix_column_major, Error, LabeledCommitment, LabeledPolynomial,
Expand All @@ -9,6 +8,7 @@ use ark_crypto_primitives::sponge::{Absorb, CryptographicSponge};
use ark_ec::{AffineRepr, CurveGroup, VariableBaseMSM};
use ark_ff::PrimeField;
use ark_poly::MultilinearExtension;
use ark_serialize::serialize_to_vec;
use ark_std::{marker::PhantomData, rand::RngCore, string::ToString, vec::Vec, UniformRand};
use blake2::Blake2s256;
use digest::Digest;
Expand Down Expand Up @@ -80,42 +80,19 @@ where
{
/// Pedersen commitment to a vector of scalars as described in appendix A.1
/// of the reference article.
/// The caller must either directly pass hiding exponent `r` inside Some,
/// or provide an rng so that `r` can be sampled.
/// If there are `n` scalars, the first `n` elements of the key will be
/// multiplied by them in the same order, and its `n + 1`th element will be
/// multiplied by `r`.
/// The function does not add handle hiding term `h * r`.
/// It is only a wrapper around MSM.
///
/// # Panics
///
/// Panics if both `r` and `rng` are None.
fn pedersen_commit(
key: &HyraxCommitterKey<G>,
scalars: &[G::ScalarField],
r: Option<G::ScalarField>,
rng: Option<&mut dyn RngCore>,
) -> (G, G::ScalarField) {
// Cannot use unwrap_or, since its argument is always evaluated
let r = match r {
Some(v) => v,
None => G::ScalarField::rand(rng.expect("Either r or rng must be provided")),
};

let mut scalars_ext = Vec::from(scalars);
scalars_ext.push(r);

// Trimming the key to the length of the coefficient vector
let mut points_ext = key.com_key[0..scalars.len()].to_vec();
points_ext.push(key.h);

/// Panics if `key` and `scalars` do not have the same length
fn pedersen_commit(key: &[G], scalars: &[G::ScalarField]) -> G::Group {
assert_eq!(key.len(), scalars.len());
let scalars_bigint = ark_std::cfg_iter!(scalars)
.map(|s| s.into_bigint())
.collect::<Vec<_>>();

// Multi-exponentiation in the group of points of the EC
let com = <G::Group as VariableBaseMSM>::msm_bigint(&points_ext, &scalars_bigint);

(com.into(), r)
<G::Group as VariableBaseMSM>::msm_bigint(&key, &scalars_bigint)
}
}

Expand Down Expand Up @@ -260,10 +237,10 @@ where
let (row_coms, com_rands): (Vec<_>, Vec<_>) = cfg_iter!(m)
.map(|row| {
#[cfg(not(feature = "parallel"))]
let (c, r) = Self::pedersen_commit(ck, row, None, Some(rng_inner));
let r = G::ScalarField::rand(rng_inner);
#[cfg(feature = "parallel")]
let (c, r) =
Self::pedersen_commit(ck, row, None, Some(&mut rand::thread_rng()));
let r = G::ScalarField::rand(&mut rand::thread_rng());
let c = (Self::pedersen_commit(&ck.com_key, row) + ck.h * r).into();
(c, r)
})
.unzip();
Expand Down Expand Up @@ -360,10 +337,10 @@ where
}

// Absorbing public parameters
sponge.absorb(&to_bytes!(ck).map_err(|_| Error::TranscriptError)?);
sponge.absorb(&serialize_to_vec!(*ck).map_err(|_| Error::TranscriptError)?);

// Absorbing the commitment to the polynomial
sponge.absorb(&to_bytes!(&com.row_coms).map_err(|_| Error::TranscriptError)?);
sponge.absorb(&serialize_to_vec!(com.row_coms).map_err(|_| Error::TranscriptError)?);

// Absorbing the point
sponge.absorb(point);
Expand All @@ -383,7 +360,10 @@ where
let eval = inner_product(&lt, &r);

// Singleton commit
let (com_eval, r_eval) = Self::pedersen_commit(ck, &[eval], None, Some(rng_inner));
let (com_eval, r_eval) = {
let r = G::ScalarField::rand(rng_inner);
((ck.com_key[0] * eval + ck.h * r).into(), r)
};

// ******** Dot product argument ********
// Appendix A.2 in the reference article
Expand All @@ -394,17 +374,19 @@ where
let b = inner_product(&r, &d);

// Multi-commit
let (com_d, r_d) = Self::pedersen_commit(ck, &d, None, Some(rng_inner));
let r_d = G::ScalarField::rand(rng_inner);
let com_d = (Self::pedersen_commit(&ck.com_key, &d) + ck.h * r_d).into();

// Singleton commit
let (com_b, r_b) = Self::pedersen_commit(ck, &[b], None, Some(rng_inner));
let r_b = G::ScalarField::rand(rng_inner);
let com_b = (ck.com_key[0] * b + ck.h * r_b).into();

// Absorbing the commitment to the evaluation
sponge.absorb(&to_bytes!(&com_eval).map_err(|_| Error::TranscriptError)?);
sponge.absorb(&serialize_to_vec!(com_eval).map_err(|_| Error::TranscriptError)?);

// Absorbing the two auxiliary commitments
sponge.absorb(&to_bytes!(&com_d).map_err(|_| Error::TranscriptError)?);
sponge.absorb(&to_bytes!(&com_b).map_err(|_| Error::TranscriptError)?);
sponge.absorb(&serialize_to_vec!(com_d).map_err(|_| Error::TranscriptError)?);
sponge.absorb(&serialize_to_vec!(com_b).map_err(|_| Error::TranscriptError)?);

// Receive the random challenge c from the verifier, i.e. squeeze
// it from the transcript.
Expand Down Expand Up @@ -493,36 +475,36 @@ where
let l_bigint = cfg_iter!(l)
.map(|chi| chi.into_bigint())
.collect::<Vec<_>>();
let t_prime: G = <G::Group as VariableBaseMSM>::msm_bigint(row_coms, &l_bigint).into();
let t_prime: G = <G::Group as VariableBaseMSM>::msm_bigint(&row_coms, &l_bigint).into();

// Absorbing public parameters
sponge.absorb(&to_bytes!(vk).map_err(|_| Error::TranscriptError)?);
sponge.absorb(&serialize_to_vec!(*vk).map_err(|_| Error::TranscriptError)?);

// Absorbing the commitment to the polynomial
sponge.absorb(&to_bytes!(row_coms).map_err(|_| Error::TranscriptError)?);
sponge.absorb(&serialize_to_vec!(*row_coms).map_err(|_| Error::TranscriptError)?);

// Absorbing the point
sponge.absorb(point);

// Absorbing the commitment to the evaluation
sponge.absorb(&to_bytes!(com_eval).map_err(|_| Error::TranscriptError)?);
sponge.absorb(&serialize_to_vec!(*com_eval).map_err(|_| Error::TranscriptError)?);

// Absorbing the two auxiliary commitments
sponge.absorb(&to_bytes!(com_d).map_err(|_| Error::TranscriptError)?);
sponge.absorb(&to_bytes!(com_b).map_err(|_| Error::TranscriptError)?);
sponge.absorb(&serialize_to_vec!(*com_d).map_err(|_| Error::TranscriptError)?);
sponge.absorb(&serialize_to_vec!(*com_b).map_err(|_| Error::TranscriptError)?);

// Receive the random challenge c from the verifier, i.e. squeeze
// it from the transcript.
let c: G::ScalarField = sponge.squeeze_field_elements(1)[0];

// First check
let com_z_zd = Self::pedersen_commit(vk, z, Some(*z_d), None).0;
let com_z_zd = (Self::pedersen_commit(&vk.com_key, z) + vk.h * z_d).into();
if com_z_zd != (t_prime.mul(c) + com_d).into() {
return Ok(false);
}

// Second check
let com_dp = Self::pedersen_commit(vk, &[inner_product(&r, z)], Some(*z_b), None).0;
let com_dp = (vk.com_key[0] * inner_product(&r, z) + vk.h * z_b).into();
if com_dp != (com_eval.mul(c) + com_b).into() {
return Ok(false);
}
Expand Down
6 changes: 3 additions & 3 deletions poly-commit/src/hyrax/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@ use ark_std::vec::Vec;
#[cfg(feature = "parallel")]
use rayon::prelude::*;

/// Transforms a flat vector into a matrix in column-major order. The latter is
/// given as a list of rows.
/// Transforms a flat vector into a n*m matrix in column-major order. The
/// latter is given as a list of rows.
///
/// For example, if flat = [1, 2, 3, 4, 5, 6] and n = 2, m = 3, then
/// For example, if flat = [1, 2, 3, 4, 5, 6] and n = 3, m = 2, then
/// the output is [[1, 3, 5], [2, 4, 6]].
pub(crate) fn flat_to_matrix_column_major<T: Copy>(flat: &[T], n: usize, m: usize) -> Vec<Vec<T>> {
assert_eq!(flat.len(), n * m, "n * m should coincide with flat.len()");
Expand Down
18 changes: 3 additions & 15 deletions poly-commit/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,22 +4,10 @@ use ark_std::vec::Vec;

#[cfg(feature = "parallel")]
use rayon::{
iter::{IntoParallelRefIterator, ParallelIterator},
iter::{IntoParallelIterator, IntoParallelRefIterator, ParallelIterator},
prelude::IndexedParallelIterator,
};

/// Takes as input a struct, and converts them to a series of bytes. All traits
/// that implement `CanonicalSerialize` can be automatically converted to bytes
/// in this manner.
/// From jellyfish lib
#[macro_export]
macro_rules! to_bytes {
($x:expr) => {{
let mut buf = ark_std::vec![];
ark_serialize::CanonicalSerialize::serialize_compressed($x, &mut buf).map(|_| buf)
}};
}

/// Return ceil(x / y).
pub(crate) fn ceil_div(x: usize, y: usize) -> usize {
// XXX. warning: this expression can overflow.
Expand Down Expand Up @@ -70,11 +58,11 @@ impl<F: Field> Matrix<F> {
self.n
);

(0..self.m)
cfg_into_iter!(0..self.m)
.map(|col| {
inner_product(
v,
&(0..self.n)
&cfg_into_iter!(0..self.m)
.map(|row| self.entries[row][col])
.collect::<Vec<F>>(),
)
Expand Down

0 comments on commit c2e6412

Please sign in to comment.