Skip to content

Commit

Permalink
Verify user inclusion in a test (#290)
Browse files Browse the repository at this point in the history
* Verify user inclusion in a test

* Perform batch KZG opening/verification

* Remove magic number

* Separate negative tests for invalid grand total and invalid range check
  • Loading branch information
alxkzmn authored May 13, 2024
1 parent 6c77984 commit 62aaf80
Show file tree
Hide file tree
Showing 7 changed files with 260 additions and 71 deletions.
2 changes: 1 addition & 1 deletion backend/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion prover/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion prover/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ parallel = ["dep:rayon"]
frontend-halo2 = ["dep:halo2_proofs"]

[dependencies]
plonkish_backend = { git = "https://github.com/han0110/plonkish", package = "plonkish_backend", features= ["frontend-halo2", "benchmark"] }
plonkish_backend = { git = "https://github.com/summa-dev/plonkish", branch="summa-changes", package = "plonkish_backend", features= ["frontend-halo2", "benchmark"] }
plotters = { version = "0.3.4", optional = true }
rand = "0.8"
csv = "1.1"
Expand Down
6 changes: 3 additions & 3 deletions prover/src/chips/range/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use crate::utils::{big_uint_to_fp, fp_to_big_uint};
/// Example:
/// decompose_fp_to_bytes(0x1f2f3f, 2) -> [0x3f, 0x2f]
pub fn decompose_fp_to_bytes(value: Fp, n: usize) -> Vec<u8> {
let value_biguint = fp_to_big_uint(value);
let value_biguint = fp_to_big_uint(&value);

let mut bytes = value_biguint.to_bytes_le();

Expand All @@ -33,7 +33,7 @@ pub fn decompose_fp_to_bytes(value: Fp, n: usize) -> Vec<u8> {
/// If value is decomposed in #byte pairs which are less than n, then the returned byte pairs are padded with 0s at the most significant byte pairs.
/// If value is decomposed in #byte pairs which are greater than n, then the most significant byte pairs are truncated. A warning is printed.
pub fn decompose_fp_to_byte_pairs(value: Fp, n: usize) -> Vec<u16> {
let value_biguint = fp_to_big_uint(value);
let value_biguint = fp_to_big_uint(&value);
let mut bytes = value_biguint.to_bytes_le();

// Ensure the bytes vector has an even length for pairs of bytes.
Expand Down Expand Up @@ -80,7 +80,7 @@ mod testing {
#[test]
fn test_fp_to_big_uint() {
let f = Fp::from(5);
let big_uint = fp_to_big_uint(f);
let big_uint = fp_to_big_uint(&f);
assert_eq!(big_uint, BigUint::from(5u8));
}

Expand Down
9 changes: 5 additions & 4 deletions prover/src/circuits/summa_circuit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -150,11 +150,11 @@ pub mod summa_hyperplonk {
running_sum_values.push(vec![]);

region.assign_advice(
|| "username",
|| format!("username {}", i),
config.username,
i,
|| {
Value::known(big_uint_to_fp(
Value::known(big_uint_to_fp::<Fp>(
self.entries[i].username_as_big_uint(),
))
},
Expand All @@ -163,7 +163,8 @@ pub mod summa_hyperplonk {
let mut assigned_balances_row = vec![];

for (j, balance) in self.entries[i].balances().iter().enumerate() {
let balance_value = Value::known(big_uint_to_fp(balance));
let balance_value: Value<Fp> =
Value::known(big_uint_to_fp(balance));

let assigned_balance = region.assign_advice(
|| format!("balance {}", j),
Expand Down Expand Up @@ -266,7 +267,7 @@ pub mod summa_hyperplonk {
fn instances(&self) -> Vec<Vec<Fp>> {
// The last decomposition of each range check chip should be zero
let mut instances = vec![Fp::ZERO];
instances.extend(self.grand_total.iter().map(big_uint_to_fp));
instances.extend(self.grand_total.iter().map(big_uint_to_fp::<Fp>));
vec![instances]
}
}
Expand Down
256 changes: 200 additions & 56 deletions prover/src/circuits/tests.rs
Original file line number Diff line number Diff line change
@@ -1,100 +1,244 @@
use halo2_proofs::arithmetic::Field;
use plonkish_backend::{
backend::{hyperplonk::HyperPlonk, PlonkishBackend, PlonkishCircuit, PlonkishCircuitInfo},
backend::{hyperplonk::HyperPlonk, PlonkishBackend, PlonkishCircuit},
frontend::halo2::Halo2Circuit,
halo2_curves::bn256::{Bn256, Fr as Fp},
pcs::{multilinear::MultilinearKzg, PolynomialCommitmentScheme},
pcs::{multilinear::MultilinearKzg, Evaluation, PolynomialCommitmentScheme},
util::{
arithmetic::PrimeField,
transcript::{InMemoryTranscript, Keccak256Transcript, TranscriptRead, TranscriptWrite},
DeserializeOwned, Serialize,
transcript::{
FieldTranscriptRead, FieldTranscriptWrite, InMemoryTranscript, Keccak256Transcript,
},
Itertools,
},
Error::InvalidSumcheck,
};
use std::hash::Hash;

use rand::{
rngs::{OsRng, StdRng},
CryptoRng, RngCore, SeedableRng,
CryptoRng, Rng, RngCore, SeedableRng,
};

use crate::{
circuits::summa_circuit::summa_hyperplonk::SummaHyperplonk, utils::generate_dummy_entries,
circuits::summa_circuit::summa_hyperplonk::SummaHyperplonk,
utils::{
big_uint_to_fp, fp_to_big_uint, generate_dummy_entries, uni_to_multivar_binary_index,
MultilinearAsUnivariate,
},
};
const K: u32 = 17;
const N_CURRENCIES: usize = 2;
const N_USERS: usize = 1 << 16;

pub fn run_plonkish_backend<F, Pb, T, C>(
num_vars: usize,
circuit_fn: impl Fn(usize) -> (PlonkishCircuitInfo<F>, C),
) where
F: PrimeField + Hash + Serialize + DeserializeOwned,
Pb: PlonkishBackend<F>,
T: TranscriptRead<<Pb::Pcs as PolynomialCommitmentScheme<F>>::CommitmentChunk, F>
+ TranscriptWrite<<Pb::Pcs as PolynomialCommitmentScheme<F>>::CommitmentChunk, F>
+ InMemoryTranscript<Param = ()>,
C: PlonkishCircuit<F>,
{
let (circuit_info, circuit) = circuit_fn(num_vars);
pub fn seeded_std_rng() -> impl RngCore + CryptoRng {
StdRng::seed_from_u64(OsRng.next_u64())
}

#[test]
fn test_summa_hyperplonk() {
type ProvingBackend = HyperPlonk<MultilinearKzg<Bn256>>;
let entries = generate_dummy_entries::<N_USERS, N_CURRENCIES>().unwrap();
let circuit = SummaHyperplonk::<N_USERS, N_CURRENCIES>::init(entries.to_vec());
let num_vars = K;

let circuit_fn = |num_vars| {
let circuit = Halo2Circuit::<Fp, SummaHyperplonk<N_USERS, N_CURRENCIES>>::new::<
ProvingBackend,
>(num_vars, circuit.clone());
(circuit.circuit_info().unwrap(), circuit)
};

let (circuit_info, circuit) = circuit_fn(num_vars as usize);
let instances = circuit.instances();

let param = Pb::setup(&circuit_info, seeded_std_rng()).unwrap();
let param = ProvingBackend::setup(&circuit_info, seeded_std_rng()).unwrap();

let (pp, vp) = Pb::preprocess(&param, &circuit_info).unwrap();
let (prover_parameters, verifier_parameters) =
ProvingBackend::preprocess(&param, &circuit_info).unwrap();

let proof = {
let mut transcript = T::new(());
Pb::prove(&pp, &circuit, &mut transcript, seeded_std_rng()).unwrap();
transcript.into_proof()
let (witness_polys, proof_transcript) = {
let mut proof_transcript = Keccak256Transcript::new(());

let witness_polys = ProvingBackend::prove(
&prover_parameters,
&circuit,
&mut proof_transcript,
seeded_std_rng(),
)
.unwrap();
(witness_polys, proof_transcript)
};

let result = {
let mut transcript = T::from_proof((), proof.as_slice());
Pb::verify(&vp, instances, &mut transcript, seeded_std_rng())
let num_points = N_CURRENCIES + 1;

let proof = proof_transcript.into_proof();

let mut transcript;
let result: Result<(), plonkish_backend::Error> = {
transcript = Keccak256Transcript::from_proof((), proof.as_slice());
ProvingBackend::verify(
&verifier_parameters,
instances,
&mut transcript,
seeded_std_rng(),
)
};
assert_eq!(result, Ok(()));

let wrong_instances = instances[0]
let invalid_grand_total_instances = instances[0]
.iter()
.map(|instance| *instance + F::ONE)
.enumerate()
.map(|(i, element)| {
if i == 0 {
*element
} else {
Fp::random(seeded_std_rng())
}
})
.collect::<Vec<_>>();
let wrong_result = {
let mut transcript = T::from_proof((), proof.as_slice());
Pb::verify(
&vp,
&vec![wrong_instances],

let invalid_result = {
let mut transcript = Keccak256Transcript::from_proof((), proof.as_slice());
ProvingBackend::verify(
&verifier_parameters,
&[invalid_grand_total_instances],
&mut transcript,
seeded_std_rng(),
)
};
assert_eq!(
wrong_result,
invalid_result,
Err(InvalidSumcheck(
"Consistency failure at round 1".to_string()
))
);
}

pub fn seeded_std_rng() -> impl RngCore + CryptoRng {
StdRng::seed_from_u64(OsRng.next_u64())
}
let invalid_range_check_instances = instances[0]
.iter()
.enumerate()
.map(|(i, element)| {
if i == 0 {
Fp::random(seeded_std_rng())
} else {
*element
}
})
.collect::<Vec<_>>();

#[test]
fn test_summa_hyperplonk() {
type Pb = HyperPlonk<MultilinearKzg<Bn256>>;
let entries = generate_dummy_entries::<N_USERS, N_CURRENCIES>().unwrap();
let circuit = SummaHyperplonk::<N_USERS, N_CURRENCIES>::init(entries.to_vec());
let num_vars = K;
run_plonkish_backend::<Fp, Pb, Keccak256Transcript<_>, _>(
num_vars.try_into().unwrap(),
|num_vars| {
let circuit = Halo2Circuit::<Fp, SummaHyperplonk<N_USERS, N_CURRENCIES>>::new::<Pb>(
num_vars,
circuit.clone(),
);
(circuit.circuit_info().unwrap(), circuit)
},
let invalid_result = {
let mut transcript = Keccak256Transcript::from_proof((), proof.as_slice());
ProvingBackend::verify(
&verifier_parameters,
&[invalid_range_check_instances],
&mut transcript,
seeded_std_rng(),
)
};
assert_eq!(
invalid_result,
Err(InvalidSumcheck(
"Consistency failure at round 1".to_string()
))
);

//Create an evaluation challenge at a random "user index"
let fraction: f64 = rand::thread_rng().gen();
let random_user_index = (fraction * (entries.len() as f64)) as usize;

assert_eq!(
fp_to_big_uint(&witness_polys[0].evaluate_as_univariate(&random_user_index)),
*entries[random_user_index].username_as_big_uint()
);
assert_eq!(
fp_to_big_uint(&witness_polys[1].evaluate_as_univariate(&random_user_index)),
entries[random_user_index].balances()[0]
);

// Convert challenge into a multivariate form
let multivariate_challenge: Vec<Fp> =
uni_to_multivar_binary_index(&random_user_index, num_vars as usize);

let mut kzg_transcript = Keccak256Transcript::new(());

let mut transcript = Keccak256Transcript::from_proof((), proof.as_slice());

let user_entry_commitments = MultilinearKzg::<Bn256>::read_commitments(
&verifier_parameters.pcs,
num_points,
&mut transcript,
)
.unwrap();
let user_entry_polynomials = witness_polys.iter().take(num_points).collect::<Vec<_>>();

//Store the user index multi-variable in the transcript for the verifier
for binary_var in multivariate_challenge.iter() {
kzg_transcript.write_field_element(binary_var).unwrap();
}

let evals = user_entry_polynomials
.iter()
.enumerate()
.map(|(poly_idx, poly)| {
Evaluation::new(poly_idx, 0, poly.evaluate(&multivariate_challenge))
})
.collect_vec();

MultilinearKzg::<Bn256>::batch_open(
&prover_parameters.pcs,
user_entry_polynomials,
&user_entry_commitments,
&[multivariate_challenge],
&evals,
&mut kzg_transcript,
)
.unwrap();

let kzg_proof = kzg_transcript.into_proof();

// Verifier side
let mut kzg_transcript = Keccak256Transcript::from_proof((), kzg_proof.as_slice());

// The verifier knows the ZK-SNARK proof, can extract the polynomial commitments
let mut transcript = Keccak256Transcript::from_proof((), proof.as_slice());
let user_entry_commitments = MultilinearKzg::<Bn256>::read_commitments(
&verifier_parameters.pcs,
num_points,
&mut transcript,
)
.unwrap();

//The verifier doesn't know the mapping of their "user index" to the multi-variable index, reads it from the transcript
let mut multivariate_challenge: Vec<Fp> = Vec::new();
for _ in 0..num_vars {
multivariate_challenge.push(kzg_transcript.read_field_element().unwrap());
}

//The user knows their evaluation at the challenge point
let evals: Vec<Evaluation<Fp>> = (0..N_CURRENCIES + 1)
.map(|i| {
if i == 0 {
Evaluation::new(
i,
0,
big_uint_to_fp::<Fp>(entries[random_user_index].username_as_big_uint()),
)
} else {
Evaluation::new(
i,
0,
big_uint_to_fp::<Fp>(&entries[random_user_index].balances()[i - 1]),
)
}
})
.collect();

MultilinearKzg::<Bn256>::batch_verify(
&verifier_parameters.pcs,
&user_entry_commitments,
&[multivariate_challenge],
&evals,
&mut kzg_transcript,
)
.unwrap();
}

#[cfg(feature = "dev-graph")]
Expand Down
Loading

0 comments on commit 62aaf80

Please sign in to comment.