From c30e8102c838541a1f7fe5125a81aaaf6cc6b72c Mon Sep 17 00:00:00 2001 From: Davide Galassi Date: Wed, 10 Jul 2024 10:30:16 +0200 Subject: [PATCH] Improve documentation and code quality (#23) --- Cargo.toml | 5 ++- src/codec.rs | 24 ++++++++++++ src/ietf.rs | 16 ++++---- src/lib.rs | 12 +++--- src/pedersen.rs | 24 ++++++------ src/suites/bandersnatch.rs | 73 ++++++++++++++++++------------------- src/suites/secp256.rs | 2 +- src/testing.rs | 32 +++++++++------- src/utils.rs | 75 ++++++++++++++------------------------ 9 files changed, 137 insertions(+), 126 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 695bb66..0fb90ad 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -46,7 +46,10 @@ std = [ "fflonk/std", "ring-proof/std", ] -getrandom = [ "rand_core" ] +getrandom = [ + "rand_core", + "ark-std/getrandom" +] curves = [ "secp256r1", "ed25519", diff --git a/src/codec.rs b/src/codec.rs index 6455683..fdd6a10 100644 --- a/src/codec.rs +++ b/src/codec.rs @@ -110,6 +110,30 @@ where } } +/// Point encoder wrapper using `Suite::Codec`. +pub fn point_encode(pt: &AffinePoint) -> Vec { + let mut buf = Vec::new(); + S::Codec::point_encode(pt, &mut buf); + buf +} + +/// Point decoder wrapper using `Suite::Codec`. +pub fn point_decode(buf: &[u8]) -> AffinePoint { + S::Codec::point_decode(buf) +} + +/// Scalar encoder wrapper using `Suite::Codec`. +pub fn scalar_encode(sc: &ScalarField) -> Vec { + let mut buf = Vec::new(); + S::Codec::scalar_encode(sc, &mut buf); + buf +} + +/// Scalar decoder wrapper using `Suite::Codec`. +pub fn scalar_decode(buf: &[u8]) -> ScalarField { + S::Codec::scalar_decode(buf) +} + #[cfg(test)] mod tests { use crate::testing::{ diff --git a/src/ietf.rs b/src/ietf.rs index 1c7a6d7..7b4d5aa 100644 --- a/src/ietf.rs +++ b/src/ietf.rs @@ -26,7 +26,7 @@ impl CanonicalSerialize for Proof { mut writer: W, _compress_always: ark_serialize::Compress, ) -> Result<(), ark_serialize::SerializationError> { - let c_buf = utils::scalar_encode::(&self.c); + let c_buf = codec::scalar_encode::(&self.c); if c_buf.len() < S::CHALLENGE_LEN { // Encoded scalar length must be at least S::CHALLENGE_LEN return Err(ark_serialize::SerializationError::NotEnoughSpace); @@ -56,7 +56,7 @@ impl CanonicalDeserialize for Proof { if reader.read_exact(&mut c_buf[..]).is_err() { return Err(ark_serialize::SerializationError::InvalidData); } - let c = utils::scalar_decode::(&c_buf); + let c = codec::scalar_decode::(&c_buf); let s = as CanonicalDeserialize>::deserialize_with_mode( &mut reader, ark_serialize::Compress::No, @@ -144,8 +144,8 @@ pub mod testing { impl core::fmt::Debug for TestVector { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - let c = hex::encode(utils::scalar_encode::(&self.c)); - let s = hex::encode(utils::scalar_encode::(&self.s)); + let c = hex::encode(codec::scalar_encode::(&self.c)); + let s = hex::encode(codec::scalar_encode::(&self.s)); f.debug_struct("TestVector") .field("base", &self.base) .field("proof_c", &c) @@ -179,13 +179,13 @@ pub mod testing { fn from_map(map: &common::TestVectorMap) -> Self { let base = common::TestVector::from_map(map); - let c = utils::scalar_decode::(&map.item_bytes("proof_c")); - let s = utils::scalar_decode::(&map.item_bytes("proof_s")); + let c = codec::scalar_decode::(&map.item_bytes("proof_c")); + let s = codec::scalar_decode::(&map.item_bytes("proof_s")); Self { base, c, s } } fn to_map(&self) -> common::TestVectorMap { - let buf = utils::scalar_encode::(&self.c); + let buf = codec::scalar_encode::(&self.c); let proof_c = if S::Codec::BIG_ENDIAN { let len = buf.len(); &buf[len - S::CHALLENGE_LEN..] @@ -194,7 +194,7 @@ pub mod testing { }; let items = [ ("proof_c", hex::encode(proof_c)), - ("proof_s", hex::encode(utils::scalar_encode::(&self.s))), + ("proof_s", hex::encode(codec::scalar_encode::(&self.s))), ]; let mut map = self.base.to_map(); items.into_iter().for_each(|(name, value)| { diff --git a/src/lib.rs b/src/lib.rs index 6eba1b3..e8990d5 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -105,7 +105,7 @@ pub trait Suite: Copy + Clone { /// /// # Panics /// - /// This function panics if `Hasher` output is less than 32 bytes. + /// This function panics if `Hasher` output is less than 64 bytes. fn nonce(sk: &ScalarField, pt: Input) -> ScalarField { utils::nonce_rfc_8032::(sk, &pt.0) } @@ -124,7 +124,7 @@ pub trait Suite: Copy + Clone { /// /// By default uses "try and increment" method described by RFC 9381. fn data_to_point(data: &[u8]) -> Option> { - utils::hash_to_curve_tai_rfc_9381::(data, false) + utils::hash_to_curve_tai_rfc_9381::(data) } /// Map the point to a hash value using `Self::Hasher`. @@ -265,12 +265,14 @@ mod tests { }; #[test] - fn proof_to_hash_works() { + fn vrf_output_check() { + use ark_std::rand::SeedableRng; + let mut rng = rand_chacha::ChaCha20Rng::from_seed([42; 32]); let secret = Secret::from_seed(TEST_SEED); - let input = Input::from(random_val(None)); + let input = Input::from(random_val(Some(&mut rng))); let output = secret.output(input); - let expected = "2eaa1a349197bb2b6c455bc5554b331162f0e9b13aea0aab28283cc30e7c6482"; + let expected = "0245a793d85347ca3c056f8c8f42f1049a310fabff6933b9eae592541a545cb8"; assert_eq!(expected, hex::encode(output.hash())); } } diff --git a/src/pedersen.rs b/src/pedersen.rs index 19c2ee6..b8a33a1 100644 --- a/src/pedersen.rs +++ b/src/pedersen.rs @@ -198,12 +198,12 @@ pub mod testing { fn from_map(map: &common::TestVectorMap) -> Self { let base = common::TestVector::from_map(map); - let blind = utils::scalar_decode::(&map.item_bytes("blinding")); - let pk_blind = utils::decode_point::(&map.item_bytes("proof_pkb")); - let r = utils::decode_point::(&map.item_bytes("proof_r")); - let ok = utils::decode_point::(&map.item_bytes("proof_ok")); - let s = utils::scalar_decode::(&map.item_bytes("proof_s")); - let sb = utils::scalar_decode::(&map.item_bytes("proof_sb")); + let blind = codec::scalar_decode::(&map.item_bytes("blinding")); + let pk_blind = codec::point_decode::(&map.item_bytes("proof_pkb")); + let r = codec::point_decode::(&map.item_bytes("proof_r")); + let ok = codec::point_decode::(&map.item_bytes("proof_ok")); + let s = codec::scalar_decode::(&map.item_bytes("proof_s")); + let sb = codec::scalar_decode::(&map.item_bytes("proof_sb")); let proof = Proof { pk_blind, r, @@ -218,27 +218,27 @@ pub mod testing { let items = [ ( "blinding", - hex::encode(utils::scalar_encode::(&self.blind)), + hex::encode(codec::scalar_encode::(&self.blind)), ), ( "proof_pkb", - hex::encode(utils::encode_point::(&self.proof.pk_blind)), + hex::encode(codec::point_encode::(&self.proof.pk_blind)), ), ( "proof_r", - hex::encode(utils::encode_point::(&self.proof.r)), + hex::encode(codec::point_encode::(&self.proof.r)), ), ( "proof_ok", - hex::encode(utils::encode_point::(&self.proof.ok)), + hex::encode(codec::point_encode::(&self.proof.ok)), ), ( "proof_s", - hex::encode(utils::scalar_encode::(&self.proof.s)), + hex::encode(codec::scalar_encode::(&self.proof.s)), ), ( "proof_sb", - hex::encode(utils::scalar_encode::(&self.proof.sb)), + hex::encode(codec::scalar_encode::(&self.proof.sb)), ), ]; let mut map = self.base.to_map(); diff --git a/src/suites/bandersnatch.rs b/src/suites/bandersnatch.rs index 975a89f..d20e777 100644 --- a/src/suites/bandersnatch.rs +++ b/src/suites/bandersnatch.rs @@ -2,56 +2,53 @@ //! //! Configuration: //! -//! * `suite_string` = b"Bandersnatch_SHA-512_ELL2" for Twisted Edwards form. -//! * `suite_string` = b"Bandersnatch_SW_SHA-512_TAI" for Short Weierstrass form. +//! * `suite_string` = b"Bandersnatch_SHA-512_ELL2" for Twisted Edwards form. +//! * `suite_string` = b"Bandersnatch_SW_SHA-512_TAI" for Short Weierstrass form. //! -//! * The EC group G is the Bandersnatch elliptic curve, in Short Weierstrass or -//! Twisted Edwards form, with the finite field and curve parameters as specified -//! [here](https://neuromancer.sk/std/bls/Bandersnatch) -//! For this group, `fLen` = `qLen` = 32 and `cofactor` = 4. +//! - The EC group is the prime subgroup of the Bandersnatch elliptic curve, +//! in Twisted Edwards form, with finite field and curve parameters as specified in +//! [MSZ21](https://eprint.iacr.org/2021/1152). +//! For this group, `fLen` = `qLen` = $32$ and `cofactor` = $4$. //! -//! * `cLen` = 32. +//! - The prime subgroup generator G in is defined as follows: +//! - G.x = 0x29c132cc2c0b34c5743711777bbe42f32b79c022ad998465e1e71866a252ae18 +//! - G.y = 0x2a6c669eda123e0f157d8b50badcd586358cad81eee464605e3167b6cc974166 //! -//! * The key pair generation primitive is `PK = SK * g`, with SK the secret -//! key scalar and `g` the group generator. In this ciphersuite, the secret -//! scalar x is equal to the secret key SK. +//! * `cLen` = 32. //! -//! * encode_to_curve_salt = PK_string. +//! * The key pair generation primitive is `PK = sk * G`, with x the secret +//! key scalar and `G` the group generator. In this ciphersuite, the secret +//! scalar x is equal to the secret key scalar sk. //! -//! * The ECVRF_nonce_generation function is as specified in -//! Section 5.4.2.1. +//! * encode_to_curve_salt = PS_string (point_to_string(PK)). //! -//! * The int_to_string function encodes into the 32 bytes little endian -//! representation. +//! * The ECVRF_nonce_generation function is as specified in Section 5.4.2.2 +//! of RFC-9381. //! -//! * The string_to_int function decodes from the 32 bytes little endian -//! representation. +//! * The int_to_string function encodes into the 32 bytes little endian +//! representation. //! -//! * The point_to_string function converts a point on E to an octet -//! string using compressed form. The Y coordinate is encoded using -//! int_to_string function and the most significant bit of the last -//! octet is used to keep track of the X's sign. This implies that -//! the point is encoded on 32 bytes. +//! * The string_to_int function decodes from the 32 bytes little endian +//! representation. //! -//! * The string_to_point function tries to decompress the point encoded -//! according to `point_to_string` procedure. This function MUST outputs -//! "INVALID" if the octet string does not decode to a point on the curve E. +//! * The point_to_string function converts a point in to an octet +//! string using compressed form. The y coordinate is encoded using +//! int_to_string function and the most significant bit of the last +//! octet is used to keep track of the x's sign. This implies that +//! the point is encoded on 32 bytes. //! -//! * The hash function Hash is SHA-512 as specified in -//! [RFC6234](https://www.rfc-editor.org/rfc/rfc6234), with hLen = 64. +//! * The string_to_point function tries to decompress the point encoded +//! according to `point_to_string` procedure. This function MUST outputs +//! "INVALID" if the octet string does not decode to a point on G. //! -//! * The ECVRF_encode_to_curve function is as specified in -//! Section 5.4.1.2, with `h2c_suite_ID_string` = `"Bandersnatch_XMD:SHA-512_ELL2_RO_"`. -//! The suite is defined in Section 8.5 of [RFC9380](https://datatracker.ietf.org/doc/rfc9380/). +//! * The hash function Hash is SHA-512 as specified in +//! [RFC6234](https://www.rfc-editor.org/rfc/rfc6234), with hLen = 64. //! -//! * The prime subgroup generator is generated following Zcash's fashion: -// "The generators of G1 and G2 are computed by finding the lexicographically -// smallest valid x-coordinate, and its lexicographically smallest -// y-coordinate and scaling it by the cofactor such that the result is not -// the point at infinity." -// -// GENERATOR_X = 18886178867200960497001835917649091219057080094937609519140440539760939937304 -// GENERATOR_Y = 19188667384257783945677642223292697773471335439753913231509108946878080696678 +//! * The `ECVRF_encode_to_curve` function uses *Elligator2* method described in +//! section 6.8.2 of [RFC-9380](https://datatracker.ietf.org/doc/rfc9380) and is +//! described in section 5.4.1.2 of [RFC-9381](https://datatracker.ietf.org/doc/rfc9381), +//! with `h2c_suite_ID_string` = `"Bandersnatch_XMD:SHA-512_ELL2_RO_"` +//! and domain separation tag `DST = "ECVRF_" || h2c_suite_ID_string || suite_string`. use crate::{pedersen::PedersenSuite, utils::ark_next::*, *}; use ark_ff::MontFp; diff --git a/src/suites/secp256.rs b/src/suites/secp256.rs index eefe321..8ce1963 100644 --- a/src/suites/secp256.rs +++ b/src/suites/secp256.rs @@ -72,7 +72,7 @@ impl Suite for P256Sha256Tai { } fn data_to_point(data: &[u8]) -> Option { - utils::hash_to_curve_tai_rfc_9381::(data, true) + utils::hash_to_curve_tai_rfc_9381::(data) } } diff --git a/src/testing.rs b/src/testing.rs index f197197..91d921a 100644 --- a/src/testing.rs +++ b/src/testing.rs @@ -20,6 +20,10 @@ pub(crate) mod suite { type Affine = ark_ed25519::EdwardsAffine; type Hasher = sha2::Sha256; type Codec = codec::ArkworksCodec; + + fn nonce(_sk: &ScalarField, _pt: Input) -> ScalarField { + random_val(None) + } } suite_types!(TestSuite); @@ -152,12 +156,12 @@ macro_rules! ring_suite_tests { impl core::fmt::Debug for TestVector { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - let sk = hex::encode(utils::scalar_encode::(&self.sk)); - let pk = hex::encode(utils::encode_point::(&self.pk)); + let sk = hex::encode(codec::scalar_encode::(&self.sk)); + let pk = hex::encode(codec::point_encode::(&self.pk)); let alpha = hex::encode(&self.alpha); let ad = hex::encode(&self.ad); - let h = hex::encode(utils::encode_point::(&self.h)); - let gamma = hex::encode(utils::encode_point::(&self.gamma)); + let h = hex::encode(codec::point_encode::(&self.h)); + let gamma = hex::encode(codec::point_encode::(&self.gamma)); let beta = hex::encode(&self.beta); f.debug_struct("TestVector") .field("comment", &self.comment) @@ -227,7 +231,7 @@ impl TestVectorTrait for TestVector { let salt = salt .map(|v| v.to_vec()) - .unwrap_or_else(|| utils::encode_point::(&pk)); + .unwrap_or_else(|| codec::point_encode::(&pk)); let h2c_data = [&salt[..], alpha].concat(); let h = ::data_to_point(&h2c_data).unwrap(); @@ -255,12 +259,12 @@ impl TestVectorTrait for TestVector { let item_bytes = |field| hex::decode(map.0.get(field).unwrap()).unwrap(); let comment = map.0.get("comment").unwrap().to_string(); let flags = item_bytes("flags")[0]; - let sk = utils::scalar_decode::(&item_bytes("sk")); - let pk = utils::decode_point::(&item_bytes("pk")); + let sk = codec::scalar_decode::(&item_bytes("sk")); + let pk = codec::point_decode::(&item_bytes("pk")); let alpha = item_bytes("alpha"); let ad = item_bytes("ad"); - let h = utils::decode_point::(&item_bytes("h")); - let gamma = utils::decode_point::(&item_bytes("gamma")); + let h = codec::point_decode::(&item_bytes("h")); + let gamma = codec::point_decode::(&item_bytes("gamma")); let beta = item_bytes("beta"); Self { comment, @@ -279,12 +283,12 @@ impl TestVectorTrait for TestVector { let items = [ ("comment", self.comment.clone()), ("flags", hex::encode([self.flags])), - ("sk", hex::encode(utils::scalar_encode::(&self.sk))), - ("pk", hex::encode(utils::encode_point::(&self.pk))), + ("sk", hex::encode(codec::scalar_encode::(&self.sk))), + ("pk", hex::encode(codec::point_encode::(&self.pk))), ("alpha", hex::encode(&self.alpha)), ("ad", hex::encode(&self.ad)), - ("h", hex::encode(utils::encode_point::(&self.h))), - ("gamma", hex::encode(utils::encode_point::(&self.gamma))), + ("h", hex::encode(codec::point_encode::(&self.h))), + ("gamma", hex::encode(codec::point_encode::(&self.gamma))), ("beta", hex::encode(&self.beta)), // ("proof_c", hex::encode(utils::encode_scalar::(&v.c))), // ("proof_s", hex::encode(utils::encode_scalar::(&v.s))), @@ -304,7 +308,7 @@ impl TestVectorTrait for TestVector { // Prepare hash_to_curve data = salt || alpha // Salt is defined to be pk (adjust it to make the encoding to match) - let pk_bytes = utils::encode_point::(&pk.0); + let pk_bytes = codec::point_encode::(&pk.0); let h2c_data = [&pk_bytes[..], &self.alpha[..]].concat(); let h = S::data_to_point(&h2c_data).unwrap(); diff --git a/src/utils.rs b/src/utils.rs index 658e3ac..ae67c03 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,4 +1,4 @@ -use crate::{AffinePoint, Codec, HashOutput, ScalarField, Suite}; +use crate::*; use ark_ec::AffineRepr; use ark_ff::PrimeField; @@ -63,10 +63,12 @@ pub(crate) fn hmac(sk: &[u8], data: /// find a valid curve point after approximately two attempts on average. /// /// The input `data` is defined to be `salt || alpha` according to the RFC 9281. -pub fn hash_to_curve_tai_rfc_9381( - data: &[u8], - point_be_encoding: bool, -) -> Option> { +/// +/// # Panics +/// +/// This function panics if `Suite::Hasher` output is less than AffinePoint base field +/// modulus size (in bytes). +pub fn hash_to_curve_tai_rfc_9381(data: &[u8]) -> Option> { use ark_ec::AffineRepr; use ark_ff::Field; use ark_serialize::CanonicalDeserialize; @@ -74,23 +76,24 @@ pub fn hash_to_curve_tai_rfc_9381( const DOM_SEP_FRONT: u8 = 0x01; const DOM_SEP_BACK: u8 = 0x00; - let mod_size = < as Field>::BasePrimeField as PrimeField>::MODULUS_BIT_SIZE - as usize - / 8; - if S::Hasher::output_size() < mod_size { - return None; - } + let mod_size = + < as Field>::BasePrimeField as PrimeField>::MODULUS_BIT_SIZE as usize / 8; + + assert!( + S::Hasher::output_size() >= mod_size, + "Suite::Hasher output is required to be >= base field modulus size" + ); let mut buf = [S::SUITE_ID, &[DOM_SEP_FRONT], data, &[0x00, DOM_SEP_BACK]].concat(); let ctr_pos = buf.len() - 2; for ctr in 0..=255 { - // Modify the `ctr` value buf[ctr_pos] = ctr; let mut hash = hash::(&buf).to_vec(); - if point_be_encoding { + if S::Codec::BIG_ENDIAN { hash.reverse(); } + // TODO: flags? Must be pushed before reversing! hash.push(0x00); if let Ok(pt) = AffinePoint::::deserialize_compressed_unchecked(&hash[..]) { @@ -172,19 +175,21 @@ pub fn point_to_hash_rfc_9381(pt: &AffinePoint) -> HashOutput { /// This procedure is based on section 5.1.6 of RFC 8032: "Edwards-Curve Digital /// Signature Algorithm (EdDSA)". /// -/// The algorithm generate the nonce value in a deterministic -/// pseudorandom fashion. -/// -/// `Suite::Hash` is recommended to be be at least 64 bytes. +/// The algorithm generate the nonce value in a deterministic pseudorandom fashion. /// /// # Panics /// -/// This function panics if `Hash` is less than 32 bytes. +/// This function panics if `Suite::Hasher` output is less than 64 bytes. pub fn nonce_rfc_8032(sk: &ScalarField, input: &AffinePoint) -> ScalarField { - let raw = scalar_encode::(sk); + assert!( + S::Hasher::output_size() >= 64, + "Suite::Hasher output is required to be >= 64 bytes" + ); + + let raw = codec::scalar_encode::(sk); let sk_hash = &hash::(&raw)[32..]; - let raw = encode_point::(input); + let raw = codec::point_encode::(input); let v = [sk_hash, &raw[..]].concat(); let h = &hash::(&v)[..]; @@ -203,14 +208,14 @@ pub fn nonce_rfc_6979(sk: &ScalarField, input: &AffinePoint) -> where S::Hasher: digest::core_api::BlockSizeUser, { - let raw = encode_point::(input); + let raw = codec::point_encode::(input); let h1 = hash::(&raw); let v = [1; 32]; let k = [0; 32]; // K = HMAC_K(V || 0x00 || int2octets(x) || bits2octets(h1)) - let x = scalar_encode::(sk); + let x = codec::scalar_encode::(sk); let raw = [&v[..], &[0x00], &x[..], &h1[..]].concat(); let k = hmac::(&k, &raw); @@ -230,30 +235,6 @@ where S::Codec::scalar_decode(&v) } -/// Point encoder wrapper using `Suite::Codec`. -pub fn encode_point(pt: &AffinePoint) -> Vec { - let mut buf = Vec::new(); - S::Codec::point_encode(pt, &mut buf); - buf -} - -/// Point decoder wrapper using `Suite::Codec`. -pub fn decode_point(buf: &[u8]) -> AffinePoint { - S::Codec::point_decode(buf) -} - -/// Scalar encoder wrapper using `Suite::Codec`. -pub fn scalar_encode(sc: &ScalarField) -> Vec { - let mut buf = Vec::new(); - S::Codec::scalar_encode(sc, &mut buf); - buf -} - -/// Scalar decoder wrapper using `Suite::Codec`. -pub fn scalar_decode(buf: &[u8]) -> ScalarField { - S::Codec::scalar_decode(buf) -} - // Upcoming Arkworks features. pub(crate) mod ark_next { use ark_ec::{ @@ -349,7 +330,7 @@ mod tests { #[test] fn hash_to_curve_tai_works() { - let pt = hash_to_curve_tai_rfc_9381::(b"hello world", false).unwrap(); + let pt = hash_to_curve_tai_rfc_9381::(b"hello world").unwrap(); // Check that `pt` is in the prime subgroup assert!(pt.is_on_curve()); assert!(pt.is_in_correct_subgroup_assuming_on_curve())