Skip to content

Commit

Permalink
Improve documentation and code quality (#23)
Browse files Browse the repository at this point in the history
  • Loading branch information
davxy authored Jul 10, 2024
1 parent d466265 commit c30e810
Show file tree
Hide file tree
Showing 9 changed files with 137 additions and 126 deletions.
5 changes: 4 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,10 @@ std = [
"fflonk/std",
"ring-proof/std",
]
getrandom = [ "rand_core" ]
getrandom = [
"rand_core",
"ark-std/getrandom"
]
curves = [
"secp256r1",
"ed25519",
Expand Down
24 changes: 24 additions & 0 deletions src/codec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,30 @@ where
}
}

/// Point encoder wrapper using `Suite::Codec`.
pub fn point_encode<S: Suite>(pt: &AffinePoint<S>) -> Vec<u8> {
let mut buf = Vec::new();
S::Codec::point_encode(pt, &mut buf);
buf
}

/// Point decoder wrapper using `Suite::Codec`.
pub fn point_decode<S: Suite>(buf: &[u8]) -> AffinePoint<S> {
S::Codec::point_decode(buf)
}

/// Scalar encoder wrapper using `Suite::Codec`.
pub fn scalar_encode<S: Suite>(sc: &ScalarField<S>) -> Vec<u8> {
let mut buf = Vec::new();
S::Codec::scalar_encode(sc, &mut buf);
buf
}

/// Scalar decoder wrapper using `Suite::Codec`.
pub fn scalar_decode<S: Suite>(buf: &[u8]) -> ScalarField<S> {
S::Codec::scalar_decode(buf)
}

#[cfg(test)]
mod tests {
use crate::testing::{
Expand Down
16 changes: 8 additions & 8 deletions src/ietf.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ impl<S: IetfSuite> CanonicalSerialize for Proof<S> {
mut writer: W,
_compress_always: ark_serialize::Compress,
) -> Result<(), ark_serialize::SerializationError> {
let c_buf = utils::scalar_encode::<S>(&self.c);
let c_buf = codec::scalar_encode::<S>(&self.c);
if c_buf.len() < S::CHALLENGE_LEN {
// Encoded scalar length must be at least S::CHALLENGE_LEN
return Err(ark_serialize::SerializationError::NotEnoughSpace);
Expand Down Expand Up @@ -56,7 +56,7 @@ impl<S: IetfSuite> CanonicalDeserialize for Proof<S> {
if reader.read_exact(&mut c_buf[..]).is_err() {
return Err(ark_serialize::SerializationError::InvalidData);
}
let c = utils::scalar_decode::<S>(&c_buf);
let c = codec::scalar_decode::<S>(&c_buf);
let s = <ScalarField<S> as CanonicalDeserialize>::deserialize_with_mode(
&mut reader,
ark_serialize::Compress::No,
Expand Down Expand Up @@ -144,8 +144,8 @@ pub mod testing {

impl<S: IetfSuite> core::fmt::Debug for TestVector<S> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let c = hex::encode(utils::scalar_encode::<S>(&self.c));
let s = hex::encode(utils::scalar_encode::<S>(&self.s));
let c = hex::encode(codec::scalar_encode::<S>(&self.c));
let s = hex::encode(codec::scalar_encode::<S>(&self.s));
f.debug_struct("TestVector")
.field("base", &self.base)
.field("proof_c", &c)
Expand Down Expand Up @@ -179,13 +179,13 @@ pub mod testing {

fn from_map(map: &common::TestVectorMap) -> Self {
let base = common::TestVector::from_map(map);
let c = utils::scalar_decode::<S>(&map.item_bytes("proof_c"));
let s = utils::scalar_decode::<S>(&map.item_bytes("proof_s"));
let c = codec::scalar_decode::<S>(&map.item_bytes("proof_c"));
let s = codec::scalar_decode::<S>(&map.item_bytes("proof_s"));
Self { base, c, s }
}

fn to_map(&self) -> common::TestVectorMap {
let buf = utils::scalar_encode::<S>(&self.c);
let buf = codec::scalar_encode::<S>(&self.c);
let proof_c = if S::Codec::BIG_ENDIAN {
let len = buf.len();
&buf[len - S::CHALLENGE_LEN..]
Expand All @@ -194,7 +194,7 @@ pub mod testing {
};
let items = [
("proof_c", hex::encode(proof_c)),
("proof_s", hex::encode(utils::scalar_encode::<S>(&self.s))),
("proof_s", hex::encode(codec::scalar_encode::<S>(&self.s))),
];
let mut map = self.base.to_map();
items.into_iter().for_each(|(name, value)| {
Expand Down
12 changes: 7 additions & 5 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ pub trait Suite: Copy + Clone {
///
/// # Panics
///
/// This function panics if `Hasher` output is less than 32 bytes.
/// This function panics if `Hasher` output is less than 64 bytes.
fn nonce(sk: &ScalarField<Self>, pt: Input<Self>) -> ScalarField<Self> {
utils::nonce_rfc_8032::<Self>(sk, &pt.0)
}
Expand All @@ -124,7 +124,7 @@ pub trait Suite: Copy + Clone {
///
/// By default uses "try and increment" method described by RFC 9381.
fn data_to_point(data: &[u8]) -> Option<AffinePoint<Self>> {
utils::hash_to_curve_tai_rfc_9381::<Self>(data, false)
utils::hash_to_curve_tai_rfc_9381::<Self>(data)
}

/// Map the point to a hash value using `Self::Hasher`.
Expand Down Expand Up @@ -265,12 +265,14 @@ mod tests {
};

#[test]
fn proof_to_hash_works() {
fn vrf_output_check() {
use ark_std::rand::SeedableRng;
let mut rng = rand_chacha::ChaCha20Rng::from_seed([42; 32]);
let secret = Secret::from_seed(TEST_SEED);
let input = Input::from(random_val(None));
let input = Input::from(random_val(Some(&mut rng)));
let output = secret.output(input);

let expected = "2eaa1a349197bb2b6c455bc5554b331162f0e9b13aea0aab28283cc30e7c6482";
let expected = "0245a793d85347ca3c056f8c8f42f1049a310fabff6933b9eae592541a545cb8";
assert_eq!(expected, hex::encode(output.hash()));
}
}
24 changes: 12 additions & 12 deletions src/pedersen.rs
Original file line number Diff line number Diff line change
Expand Up @@ -198,12 +198,12 @@ pub mod testing {

fn from_map(map: &common::TestVectorMap) -> Self {
let base = common::TestVector::from_map(map);
let blind = utils::scalar_decode::<S>(&map.item_bytes("blinding"));
let pk_blind = utils::decode_point::<S>(&map.item_bytes("proof_pkb"));
let r = utils::decode_point::<S>(&map.item_bytes("proof_r"));
let ok = utils::decode_point::<S>(&map.item_bytes("proof_ok"));
let s = utils::scalar_decode::<S>(&map.item_bytes("proof_s"));
let sb = utils::scalar_decode::<S>(&map.item_bytes("proof_sb"));
let blind = codec::scalar_decode::<S>(&map.item_bytes("blinding"));
let pk_blind = codec::point_decode::<S>(&map.item_bytes("proof_pkb"));
let r = codec::point_decode::<S>(&map.item_bytes("proof_r"));
let ok = codec::point_decode::<S>(&map.item_bytes("proof_ok"));
let s = codec::scalar_decode::<S>(&map.item_bytes("proof_s"));
let sb = codec::scalar_decode::<S>(&map.item_bytes("proof_sb"));
let proof = Proof {
pk_blind,
r,
Expand All @@ -218,27 +218,27 @@ pub mod testing {
let items = [
(
"blinding",
hex::encode(utils::scalar_encode::<S>(&self.blind)),
hex::encode(codec::scalar_encode::<S>(&self.blind)),
),
(
"proof_pkb",
hex::encode(utils::encode_point::<S>(&self.proof.pk_blind)),
hex::encode(codec::point_encode::<S>(&self.proof.pk_blind)),
),
(
"proof_r",
hex::encode(utils::encode_point::<S>(&self.proof.r)),
hex::encode(codec::point_encode::<S>(&self.proof.r)),
),
(
"proof_ok",
hex::encode(utils::encode_point::<S>(&self.proof.ok)),
hex::encode(codec::point_encode::<S>(&self.proof.ok)),
),
(
"proof_s",
hex::encode(utils::scalar_encode::<S>(&self.proof.s)),
hex::encode(codec::scalar_encode::<S>(&self.proof.s)),
),
(
"proof_sb",
hex::encode(utils::scalar_encode::<S>(&self.proof.sb)),
hex::encode(codec::scalar_encode::<S>(&self.proof.sb)),
),
];
let mut map = self.base.to_map();
Expand Down
73 changes: 35 additions & 38 deletions src/suites/bandersnatch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,56 +2,53 @@
//!
//! Configuration:
//!
//! * `suite_string` = b"Bandersnatch_SHA-512_ELL2" for Twisted Edwards form.
//! * `suite_string` = b"Bandersnatch_SW_SHA-512_TAI" for Short Weierstrass form.
//! * `suite_string` = b"Bandersnatch_SHA-512_ELL2" for Twisted Edwards form.
//! * `suite_string` = b"Bandersnatch_SW_SHA-512_TAI" for Short Weierstrass form.
//!
//! * The EC group G is the Bandersnatch elliptic curve, in Short Weierstrass or
//! Twisted Edwards form, with the finite field and curve parameters as specified
//! [here](https://neuromancer.sk/std/bls/Bandersnatch)
//! For this group, `fLen` = `qLen` = 32 and `cofactor` = 4.
//! - The EC group <G> is the prime subgroup of the Bandersnatch elliptic curve,
//! in Twisted Edwards form, with finite field and curve parameters as specified in
//! [MSZ21](https://eprint.iacr.org/2021/1152).
//! For this group, `fLen` = `qLen` = $32$ and `cofactor` = $4$.
//!
//! * `cLen` = 32.
//! - The prime subgroup generator G in <G> is defined as follows:
//! - G.x = 0x29c132cc2c0b34c5743711777bbe42f32b79c022ad998465e1e71866a252ae18
//! - G.y = 0x2a6c669eda123e0f157d8b50badcd586358cad81eee464605e3167b6cc974166
//!
//! * The key pair generation primitive is `PK = SK * g`, with SK the secret
//! key scalar and `g` the group generator. In this ciphersuite, the secret
//! scalar x is equal to the secret key SK.
//! * `cLen` = 32.
//!
//! * encode_to_curve_salt = PK_string.
//! * The key pair generation primitive is `PK = sk * G`, with x the secret
//! key scalar and `G` the group generator. In this ciphersuite, the secret
//! scalar x is equal to the secret key scalar sk.
//!
//! * The ECVRF_nonce_generation function is as specified in
//! Section 5.4.2.1.
//! * encode_to_curve_salt = PS_string (point_to_string(PK)).
//!
//! * The int_to_string function encodes into the 32 bytes little endian
//! representation.
//! * The ECVRF_nonce_generation function is as specified in Section 5.4.2.2
//! of RFC-9381.
//!
//! * The string_to_int function decodes from the 32 bytes little endian
//! representation.
//! * The int_to_string function encodes into the 32 bytes little endian
//! representation.
//!
//! * The point_to_string function converts a point on E to an octet
//! string using compressed form. The Y coordinate is encoded using
//! int_to_string function and the most significant bit of the last
//! octet is used to keep track of the X's sign. This implies that
//! the point is encoded on 32 bytes.
//! * The string_to_int function decodes from the 32 bytes little endian
//! representation.
//!
//! * The string_to_point function tries to decompress the point encoded
//! according to `point_to_string` procedure. This function MUST outputs
//! "INVALID" if the octet string does not decode to a point on the curve E.
//! * The point_to_string function converts a point in <G> to an octet
//! string using compressed form. The y coordinate is encoded using
//! int_to_string function and the most significant bit of the last
//! octet is used to keep track of the x's sign. This implies that
//! the point is encoded on 32 bytes.
//!
//! * The hash function Hash is SHA-512 as specified in
//! [RFC6234](https://www.rfc-editor.org/rfc/rfc6234), with hLen = 64.
//! * The string_to_point function tries to decompress the point encoded
//! according to `point_to_string` procedure. This function MUST outputs
//! "INVALID" if the octet string does not decode to a point on G.
//!
//! * The ECVRF_encode_to_curve function is as specified in
//! Section 5.4.1.2, with `h2c_suite_ID_string` = `"Bandersnatch_XMD:SHA-512_ELL2_RO_"`.
//! The suite is defined in Section 8.5 of [RFC9380](https://datatracker.ietf.org/doc/rfc9380/).
//! * The hash function Hash is SHA-512 as specified in
//! [RFC6234](https://www.rfc-editor.org/rfc/rfc6234), with hLen = 64.
//!
//! * The prime subgroup generator is generated following Zcash's fashion:
// "The generators of G1 and G2 are computed by finding the lexicographically
// smallest valid x-coordinate, and its lexicographically smallest
// y-coordinate and scaling it by the cofactor such that the result is not
// the point at infinity."
//
// GENERATOR_X = 18886178867200960497001835917649091219057080094937609519140440539760939937304
// GENERATOR_Y = 19188667384257783945677642223292697773471335439753913231509108946878080696678
//! * The `ECVRF_encode_to_curve` function uses *Elligator2* method described in
//! section 6.8.2 of [RFC-9380](https://datatracker.ietf.org/doc/rfc9380) and is
//! described in section 5.4.1.2 of [RFC-9381](https://datatracker.ietf.org/doc/rfc9381),
//! with `h2c_suite_ID_string` = `"Bandersnatch_XMD:SHA-512_ELL2_RO_"`
//! and domain separation tag `DST = "ECVRF_" || h2c_suite_ID_string || suite_string`.
use crate::{pedersen::PedersenSuite, utils::ark_next::*, *};
use ark_ff::MontFp;
Expand Down
2 changes: 1 addition & 1 deletion src/suites/secp256.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ impl Suite for P256Sha256Tai {
}

fn data_to_point(data: &[u8]) -> Option<AffinePoint> {
utils::hash_to_curve_tai_rfc_9381::<Self>(data, true)
utils::hash_to_curve_tai_rfc_9381::<Self>(data)
}
}

Expand Down
32 changes: 18 additions & 14 deletions src/testing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,10 @@ pub(crate) mod suite {
type Affine = ark_ed25519::EdwardsAffine;
type Hasher = sha2::Sha256;
type Codec = codec::ArkworksCodec;

fn nonce(_sk: &ScalarField, _pt: Input) -> ScalarField {
random_val(None)
}
}

suite_types!(TestSuite);
Expand Down Expand Up @@ -152,12 +156,12 @@ macro_rules! ring_suite_tests {

impl<S: Suite> core::fmt::Debug for TestVector<S> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
let sk = hex::encode(utils::scalar_encode::<S>(&self.sk));
let pk = hex::encode(utils::encode_point::<S>(&self.pk));
let sk = hex::encode(codec::scalar_encode::<S>(&self.sk));
let pk = hex::encode(codec::point_encode::<S>(&self.pk));
let alpha = hex::encode(&self.alpha);
let ad = hex::encode(&self.ad);
let h = hex::encode(utils::encode_point::<S>(&self.h));
let gamma = hex::encode(utils::encode_point::<S>(&self.gamma));
let h = hex::encode(codec::point_encode::<S>(&self.h));
let gamma = hex::encode(codec::point_encode::<S>(&self.gamma));
let beta = hex::encode(&self.beta);
f.debug_struct("TestVector")
.field("comment", &self.comment)
Expand Down Expand Up @@ -227,7 +231,7 @@ impl<S: Suite + std::fmt::Debug> TestVectorTrait for TestVector<S> {

let salt = salt
.map(|v| v.to_vec())
.unwrap_or_else(|| utils::encode_point::<S>(&pk));
.unwrap_or_else(|| codec::point_encode::<S>(&pk));

let h2c_data = [&salt[..], alpha].concat();
let h = <S as Suite>::data_to_point(&h2c_data).unwrap();
Expand Down Expand Up @@ -255,12 +259,12 @@ impl<S: Suite + std::fmt::Debug> TestVectorTrait for TestVector<S> {
let item_bytes = |field| hex::decode(map.0.get(field).unwrap()).unwrap();
let comment = map.0.get("comment").unwrap().to_string();
let flags = item_bytes("flags")[0];
let sk = utils::scalar_decode::<S>(&item_bytes("sk"));
let pk = utils::decode_point::<S>(&item_bytes("pk"));
let sk = codec::scalar_decode::<S>(&item_bytes("sk"));
let pk = codec::point_decode::<S>(&item_bytes("pk"));
let alpha = item_bytes("alpha");
let ad = item_bytes("ad");
let h = utils::decode_point::<S>(&item_bytes("h"));
let gamma = utils::decode_point::<S>(&item_bytes("gamma"));
let h = codec::point_decode::<S>(&item_bytes("h"));
let gamma = codec::point_decode::<S>(&item_bytes("gamma"));
let beta = item_bytes("beta");
Self {
comment,
Expand All @@ -279,12 +283,12 @@ impl<S: Suite + std::fmt::Debug> TestVectorTrait for TestVector<S> {
let items = [
("comment", self.comment.clone()),
("flags", hex::encode([self.flags])),
("sk", hex::encode(utils::scalar_encode::<S>(&self.sk))),
("pk", hex::encode(utils::encode_point::<S>(&self.pk))),
("sk", hex::encode(codec::scalar_encode::<S>(&self.sk))),
("pk", hex::encode(codec::point_encode::<S>(&self.pk))),
("alpha", hex::encode(&self.alpha)),
("ad", hex::encode(&self.ad)),
("h", hex::encode(utils::encode_point::<S>(&self.h))),
("gamma", hex::encode(utils::encode_point::<S>(&self.gamma))),
("h", hex::encode(codec::point_encode::<S>(&self.h))),
("gamma", hex::encode(codec::point_encode::<S>(&self.gamma))),
("beta", hex::encode(&self.beta)),
// ("proof_c", hex::encode(utils::encode_scalar::<S>(&v.c))),
// ("proof_s", hex::encode(utils::encode_scalar::<S>(&v.s))),
Expand All @@ -304,7 +308,7 @@ impl<S: Suite + std::fmt::Debug> TestVectorTrait for TestVector<S> {

// Prepare hash_to_curve data = salt || alpha
// Salt is defined to be pk (adjust it to make the encoding to match)
let pk_bytes = utils::encode_point::<S>(&pk.0);
let pk_bytes = codec::point_encode::<S>(&pk.0);
let h2c_data = [&pk_bytes[..], &self.alpha[..]].concat();

let h = S::data_to_point(&h2c_data).unwrap();
Expand Down
Loading

0 comments on commit c30e810

Please sign in to comment.