From a7e8bd14b9b676995eb9fabe0c96ce18650adbff Mon Sep 17 00:00:00 2001 From: Shahar Papini <43779613+spapinistarkware@users.noreply.github.com> Date: Tue, 26 Mar 2024 11:00:05 +0200 Subject: [PATCH] Commitment Scheme evaluation per size (#483) --- src/core/backend/cpu/quotients.rs | 7 +- src/core/commitment_scheme/prover.rs | 64 ++++----- src/core/commitment_scheme/quotients.rs | 172 +++++++++++++++++++++++- src/core/commitment_scheme/utils.rs | 7 - src/core/commitment_scheme/verifier.rs | 108 ++++++--------- src/core/fri.rs | 4 +- src/core/oods.rs | 48 ------- src/core/prover/mod.rs | 4 +- src/fibonacci/mod.rs | 2 +- 9 files changed, 248 insertions(+), 168 deletions(-) diff --git a/src/core/backend/cpu/quotients.rs b/src/core/backend/cpu/quotients.rs index a88294b84..e46dd3234 100644 --- a/src/core/backend/cpu/quotients.rs +++ b/src/core/backend/cpu/quotients.rs @@ -41,7 +41,7 @@ pub fn accumulate_row_quotients( let mut row_accumlator = SecureField::zero(); for sample in samples { let mut numerator = SecureField::zero(); - for (column_index, sampled_value) in &sample.column_indices_and_values { + for (column_index, sampled_value) in &sample.columns_and_values { let column = &columns[*column_index]; let value = column[row]; let linear_term = complex_conjugate_line(sample.point, *sampled_value, domain_point); @@ -54,8 +54,7 @@ pub fn accumulate_row_quotients( domain_point.into_ef(), ); - row_accumlator = row_accumlator - * random_coeff.pow(sample.column_indices_and_values.len() as u128) + row_accumlator = row_accumlator * random_coeff.pow(sample.columns_and_values.len() as u128) + numerator / denominator; } row_accumlator @@ -85,7 +84,7 @@ mod tests { coeff, &[ColumnSampleBatch { point, - column_indices_and_values: vec![(0, value)], + columns_and_values: vec![(0, value)], }], ); let quot_poly_base_field = diff --git a/src/core/commitment_scheme/prover.rs b/src/core/commitment_scheme/prover.rs index c323c4cf6..f2172b06d 100644 --- a/src/core/commitment_scheme/prover.rs +++ b/src/core/commitment_scheme/prover.rs @@ -10,7 +10,6 @@ use super::super::circle::CirclePoint; use super::super::fields::m31::BaseField; use super::super::fields::qm31::SecureField; use super::super::fri::{FriConfig, FriProof, FriProver}; -use super::super::oods::get_pair_oods_quotient; use super::super::poly::circle::CanonicCoset; use super::super::poly::BitReversedOrder; use super::super::proof_of_work::{ProofOfWork, ProofOfWorkProof}; @@ -18,12 +17,14 @@ use super::super::prover::{ LOG_BLOWUP_FACTOR, LOG_LAST_LAYER_DEGREE_BOUND, N_QUERIES, PROOF_OF_WORK_BITS, }; use super::super::ColumnVec; +use super::quotients::{compute_fri_quotients, PointSample}; use super::utils::TreeVec; use crate::commitment_scheme::blake2_hash::{Blake2sHash, Blake2sHasher}; use crate::commitment_scheme::merkle_input::{MerkleTreeColumnLayout, MerkleTreeInput}; use crate::commitment_scheme::mixed_degree_decommitment::MixedDecommitment; use crate::commitment_scheme::mixed_degree_merkle_tree::MixedDegreeMerkleTree; use crate::core::channel::Channel; +use crate::core::poly::circle::SecureEvaluation; type MerkleHasher = Blake2sHasher; type ProofChannel = Blake2sChannel; @@ -65,43 +66,42 @@ impl CommitmentSchemeProver { pub fn prove_values( &self, - prove_points: TreeVec>>>, + sampled_points: TreeVec>>>, channel: &mut ProofChannel, ) -> CommitmentSchemeProof { - // Evaluate polynomials on open points. - let proved_values = - self.polynomials() - .zip_cols(&prove_points) - .map_cols(|(poly, points)| { - points - .iter() - .map(|point| poly.eval_at_point(*point)) - .collect_vec() - }); - channel.mix_felts(&proved_values.clone().flatten_cols()); - - // Compute oods quotients for boundary constraints on prove_points. - let quotients = self - .evaluations() - .zip_cols(&proved_values) - .zip_cols(&prove_points) - .map_cols(|((evaluation, values), points)| { - zip(points, values) - .map(|(&point, &value)| { - get_pair_oods_quotient(point, value, evaluation).bit_reverse() + // Evaluate polynomials on samples points. + let samples = self + .polynomials() + .zip_cols(&sampled_points) + .map_cols(|(poly, points)| { + points + .iter() + .map(|&point| PointSample { + point, + value: poly.eval_at_point(point), }) .collect_vec() }); + let sampled_values = samples + .as_cols_ref() + .map_cols(|x| x.iter().map(|o| o.value).collect()); + channel.mix_felts(&sampled_values.clone().flatten_cols()); + + // Compute oods quotients for boundary constraints on the sampled points. + let columns = self.evaluations().flatten(); + let quotients = compute_fri_quotients(&columns, &samples.flatten(), channel.draw_felt()); + + // TODO(spapini): Conversion to CircleEvaluation can be removed when FRI supports + // SecureColumn. + let quotients = quotients + .into_iter() + .map(SecureEvaluation::to_cpu) + .collect_vec(); // Run FRI commitment phase on the oods quotients. let fri_config = FriConfig::new(LOG_LAST_LAYER_DEGREE_BOUND, LOG_BLOWUP_FACTOR, N_QUERIES); - // TODO(spapini): Remove rev() when we start accumulating by size. - // This is only done because fri demands descending sizes. - let fri_prover = FriProver::::commit( - channel, - fri_config, - "ients.flatten_cols_rev(), - ); + let fri_prover = + FriProver::::commit(channel, fri_config, "ients); // Proof of work. let proof_of_work = ProofOfWork::new(PROOF_OF_WORK_BITS).prove(channel); @@ -125,7 +125,7 @@ impl CommitmentSchemeProver { let decommitments = decommitment_results.map(|(_, d)| d); CommitmentSchemeProof { - proved_values, + sampled_values, decommitments, queried_values, proof_of_work, @@ -136,7 +136,7 @@ impl CommitmentSchemeProver { #[derive(Debug)] pub struct CommitmentSchemeProof { - pub proved_values: TreeVec>>, + pub sampled_values: TreeVec>>, pub decommitments: TreeVec>, pub queried_values: TreeVec>>, pub proof_of_work: ProofOfWorkProof, diff --git a/src/core/commitment_scheme/quotients.rs b/src/core/commitment_scheme/quotients.rs index a0792de6c..cb9719a50 100644 --- a/src/core/commitment_scheme/quotients.rs +++ b/src/core/commitment_scheme/quotients.rs @@ -1,10 +1,21 @@ +use std::cmp::Reverse; +use std::collections::BTreeMap; +use std::iter::zip; + +use itertools::{izip, multiunzip, Itertools}; + +use crate::core::backend::cpu::quotients::accumulate_row_quotients; use crate::core::backend::Backend; use crate::core::circle::CirclePoint; use crate::core::fields::m31::BaseField; use crate::core::fields::qm31::SecureField; use crate::core::fields::secure_column::SecureColumn; -use crate::core::poly::circle::{CircleDomain, CircleEvaluation}; +use crate::core::fri::SparseCircleEvaluation; +use crate::core::poly::circle::{CanonicCoset, CircleDomain, CircleEvaluation, SecureEvaluation}; use crate::core::poly::BitReversedOrder; +use crate::core::prover::VerificationError; +use crate::core::queries::SparseSubCircleDomain; +use crate::core::utils::bit_reverse_index; pub trait QuotientOps: Backend { /// Accumulates the quotients of the columns at the given domain. @@ -26,5 +37,162 @@ pub struct ColumnSampleBatch { /// The point at which the columns are sampled. pub point: CirclePoint, /// The sampled column indices and their values at the point. - pub column_indices_and_values: Vec<(usize, SecureField)>, + pub columns_and_values: Vec<(usize, SecureField)>, +} +impl ColumnSampleBatch { + /// Groups column samples by sampled point. + /// # Arguments + /// samples: For each column, a vector of samples. + pub fn new(samples: &[&Vec]) -> Vec { + // Group samples by point, and create a ColumnSampleBatch for each point. + // This should keep a stable ordering. + let mut grouped_samples = BTreeMap::new(); + for (column_index, samples) in samples.iter().enumerate() { + for sample in samples.iter() { + grouped_samples + .entry(sample.point) + .or_insert_with(Vec::new) + .push((column_index, sample.value)); + } + } + grouped_samples + .into_iter() + .map(|(point, columns_and_values)| ColumnSampleBatch { + point, + columns_and_values, + }) + .collect() + } +} + +pub struct PointSample { + pub point: CirclePoint, + pub value: SecureField, +} + +pub fn compute_fri_quotients( + columns: &[&CircleEvaluation], + samples: &[Vec], + random_coeff: SecureField, +) -> Vec> { + zip(columns, samples) + .sorted_by_key(|(c, _)| Reverse(c.domain.log_size())) + .group_by(|(c, _)| c.domain.log_size()) + .into_iter() + .map(|(log_size, tuples)| { + let (columns, samples): (Vec<_>, Vec<_>) = tuples.unzip(); + let domain = CanonicCoset::new(log_size).circle_domain(); + // TODO: slice. + let batched_samples = ColumnSampleBatch::new(&samples); + let values = B::accumulate_quotients(domain, &columns, random_coeff, &batched_samples); + SecureEvaluation { domain, values } + }) + .collect() +} + +pub fn fri_answers( + column_log_sizes: Vec, + samples: &[Vec], + random_coeff: SecureField, + query_domain_per_log_size: BTreeMap, + queried_values_per_column: &[Vec], +) -> Result>, VerificationError> { + izip!(column_log_sizes, samples, queried_values_per_column) + .sorted_by_key(|(log_size, ..)| Reverse(*log_size)) + .group_by(|(log_size, ..)| *log_size) + .into_iter() + .map(|(log_size, tuples)| { + let (_, samples, queried_valued_per_column): (Vec<_>, Vec<_>, Vec<_>) = + multiunzip(tuples); + fri_answers_for_log_size( + log_size, + &samples, + random_coeff, + &query_domain_per_log_size[&log_size], + &queried_valued_per_column, + ) + }) + .collect() +} + +pub fn fri_answers_for_log_size( + log_size: u32, + samples: &[&Vec], + random_coeff: SecureField, + query_domain: &SparseSubCircleDomain, + queried_values_per_column: &[&Vec], +) -> Result, VerificationError> { + let commitment_domain = CanonicCoset::new(log_size).circle_domain(); + let batched_samples = ColumnSampleBatch::new(samples); + for x in queried_values_per_column { + if x.len() != query_domain.flatten().len() { + return Err(VerificationError::InvalidStructure); + } + } + let mut queried_values_per_column = queried_values_per_column + .iter() + .map(|q| q.iter()) + .collect_vec(); + + let mut evals = Vec::new(); + for subdomain in query_domain.iter() { + let domain = subdomain.to_circle_domain(&commitment_domain); + let mut column_evals = Vec::new(); + for queried_values in queried_values_per_column.iter_mut() { + let eval = CircleEvaluation::new( + domain, + queried_values.take(domain.size()).copied().collect_vec(), + ); + column_evals.push(eval); + } + // TODO(spapini): bit reverse iterator. + let mut values = Vec::new(); + for row in 0..domain.size() { + let domain_point = domain.at(bit_reverse_index(row, log_size)); + let value = accumulate_row_quotients( + &batched_samples, + &column_evals.iter().collect_vec(), + row, + random_coeff, + domain_point, + ); + values.push(value); + } + let eval = CircleEvaluation::new(domain, values); + evals.push(eval); + } + + let res = SparseCircleEvaluation::new(evals); + if !queried_values_per_column.iter().all(|x| x.is_empty()) { + return Err(VerificationError::InvalidStructure); + } + Ok(res) +} + +#[cfg(test)] +mod tests { + use crate::core::backend::cpu::{CPUCircleEvaluation, CPUCirclePoly}; + use crate::core::circle::SECURE_FIELD_CIRCLE_GEN; + use crate::core::commitment_scheme::quotients::{compute_fri_quotients, PointSample}; + use crate::core::poly::circle::CanonicCoset; + use crate::{m31, qm31}; + + #[test] + fn test_quotients_are_low_degree() { + const LOG_SIZE: u32 = 7; + let polynomial = CPUCirclePoly::new((0..1 << LOG_SIZE).map(|i| m31!(i)).collect()); + let eval_domain = CanonicCoset::new(LOG_SIZE + 1).circle_domain(); + let eval = polynomial.evaluate(eval_domain); + let point = SECURE_FIELD_CIRCLE_GEN; + let value = polynomial.eval_at_point(point); + let coeff = qm31!(1, 2, 3, 4); + let quot_eval = + compute_fri_quotients(&[&eval], &[vec![PointSample { point, value }]], coeff) + .pop() + .unwrap(); + let quot_poly_base_field = + CPUCircleEvaluation::new(eval_domain, quot_eval.values.columns[0].clone()) + .interpolate(); + assert!(quot_poly_base_field.is_in_fft_space(LOG_SIZE)); + } } diff --git a/src/core/commitment_scheme/utils.rs b/src/core/commitment_scheme/utils.rs index 202def62a..975d7bbba 100644 --- a/src/core/commitment_scheme/utils.rs +++ b/src/core/commitment_scheme/utils.rs @@ -95,11 +95,4 @@ impl TreeVec>> { pub fn flatten_cols(self) -> Vec { self.0.into_iter().flatten().flatten().collect() } - - // TODO(spapini): Remove after accumulating oods quotients by size. - /// Flattens a [`TreeVec>`] of [Vec]s into a single [Vec] with all the elements - /// combined, in reverse order. - pub fn flatten_cols_rev(self) -> Vec { - self.0.into_iter().flatten().flatten().rev().collect() - } } diff --git a/src/core/commitment_scheme/verifier.rs b/src/core/commitment_scheme/verifier.rs index a0fd34810..da270eab4 100644 --- a/src/core/commitment_scheme/verifier.rs +++ b/src/core/commitment_scheme/verifier.rs @@ -6,14 +6,12 @@ use super::super::channel::Blake2sChannel; use super::super::circle::CirclePoint; use super::super::fields::m31::BaseField; use super::super::fields::qm31::SecureField; -use super::super::fri::{CirclePolyDegreeBound, FriConfig, FriVerifier, SparseCircleEvaluation}; -use super::super::oods::get_pair_oods_quotient; -use super::super::poly::circle::{CanonicCoset, CircleDomain, CircleEvaluation}; +use super::super::fri::{CirclePolyDegreeBound, FriConfig, FriVerifier}; use super::super::proof_of_work::ProofOfWork; use super::super::prover::{ LOG_BLOWUP_FACTOR, LOG_LAST_LAYER_DEGREE_BOUND, N_QUERIES, PROOF_OF_WORK_BITS, }; -use super::super::queries::SparseSubCircleDomain; +use super::quotients::{fri_answers, PointSample}; use super::utils::TreeVec; use super::CommitmentSchemeProof; use crate::commitment_scheme::blake2_hash::{Blake2sHash, Blake2sHasher}; @@ -53,20 +51,25 @@ impl CommitmentSchemeVerifier { pub fn verify_values( &self, - prove_points: TreeVec>>>, + sampled_points: TreeVec>>>, proof: CommitmentSchemeProof, channel: &mut ProofChannel, ) -> Result<(), VerificationError> { - channel.mix_felts(&proof.proved_values.clone().flatten_cols()); + channel.mix_felts(&proof.sampled_values.clone().flatten_cols()); + let random_coeff = channel.draw_felt(); - // Compute degree bounds for OODS quotients without looking at the proof. let bounds = self .column_log_sizes() - .zip_cols(&prove_points) - .map_cols(|(log_size, prove_points)| { - vec![CirclePolyDegreeBound::new(log_size); prove_points.len()] + .zip_cols(&sampled_points) + .map_cols(|(log_size, sampled_points)| { + vec![CirclePolyDegreeBound::new(log_size); sampled_points.len()] }) - .flatten_cols_rev(); + .flatten_cols() + .into_iter() + .sorted() + .rev() + .dedup() + .collect_vec(); // FRI commitment phase on OODS quotients. let fri_config = FriConfig::new(LOG_LAST_LAYER_DEGREE_BOUND, LOG_BLOWUP_FACTOR, N_QUERIES); @@ -77,7 +80,7 @@ impl CommitmentSchemeVerifier { ProofOfWork::new(PROOF_OF_WORK_BITS).verify(channel, &proof.proof_of_work)?; // Get FRI query domains. - let fri_query_domains = fri_verifier.column_opening_positions(channel); + let fri_query_domains = fri_verifier.column_query_positions(channel); // Verify merkle decommitments. let merkle_verification_result = self @@ -85,7 +88,7 @@ impl CommitmentSchemeVerifier { .as_ref() .zip(&proof.decommitments) .map(|(tree, decommitment)| { - // TODO(spapini): Also verify proved_values here. + // TODO(spapini): Also verify queried_values here. let queries = tree .log_sizes .iter() @@ -100,69 +103,34 @@ impl CommitmentSchemeVerifier { } // Answer FRI queries. - let mut fri_answers = self - .column_log_sizes() - .zip_cols(proof.proved_values) - .zip_cols(prove_points) - .zip_cols(proof.queried_values) - .map_cols( - // For each column. - |(((log_size, proved_values), opened_points), queried_values)| { - zip(opened_points, proved_values) - .map(|(point, value)| { - // For each opening point of that column. - eval_quotients_on_sparse_domain( - queried_values.clone(), - &fri_query_domains[&(log_size + LOG_BLOWUP_FACTOR)], - CanonicCoset::new(log_size + LOG_BLOWUP_FACTOR).circle_domain(), - point, - value, - ) - }) - .collect_vec() - }, - ) - .flatten_cols() - .into_iter() - .collect::, _>>()?; + let samples = sampled_points + .zip_cols(proof.sampled_values) + .map_cols(|(sampled_points, sampled_values)| { + zip(sampled_points, sampled_values) + .map(|(point, value)| PointSample { point, value }) + .collect_vec() + }) + .flatten(); + + // TODO(spapini): Properly defined column log size and dinstinguish between poly and + // commitment. + let fri_answers = fri_answers( + self.column_log_sizes() + .flatten() + .into_iter() + .map(|x| x + LOG_BLOWUP_FACTOR) + .collect(), + &samples, + random_coeff, + fri_query_domains, + &proof.queried_values.flatten(), + )?; - // TODO(spapini): Remove reverse. - fri_answers.reverse(); fri_verifier.decommit(fri_answers)?; Ok(()) } } -/// Evaluates the oods quotients on the sparse domain. -fn eval_quotients_on_sparse_domain( - queried_values: Vec, - query_domains: &SparseSubCircleDomain, - commitment_domain: CircleDomain, - point: CirclePoint, - value: SecureField, -) -> Result, VerificationError> { - let queried_values = &mut queried_values.into_iter(); - let res = SparseCircleEvaluation::new( - query_domains - .iter() - .map(|subdomain| { - let values = queried_values.take(1 << subdomain.log_size).collect_vec(); - if values.len() != 1 << subdomain.log_size { - return Err(VerificationError::InvalidStructure); - } - let subeval = - CircleEvaluation::new(subdomain.to_circle_domain(&commitment_domain), values); - Ok(get_pair_oods_quotient(point, value, &subeval).bit_reverse()) - }) - .collect::>()?, - ); - assert!( - queried_values.is_empty(), - "Not all queried values were used" - ); - Ok(res) -} - /// Verifier data for a single commitment tree in a commitment scheme. pub struct CommitmentTreeVerifier { pub commitment: Blake2sHash, diff --git a/src/core/fri.rs b/src/core/fri.rs index 67fa072b5..608c5523b 100644 --- a/src/core/fri.rs +++ b/src/core/fri.rs @@ -506,7 +506,7 @@ impl> FriVerifier { /// Samples queries and returns the opening positions for each unique column size. /// /// The order of the opening positions corresponds to the order of the column commitment. - pub fn column_opening_positions( + pub fn column_query_positions( &mut self, channel: &mut impl Channel, ) -> BTreeMap { @@ -1080,7 +1080,7 @@ mod tests { let bounds = LOG_DEGREES.map(CirclePolyDegreeBound::new).to_vec(); let mut verifier = FriVerifier::commit(&mut test_channel(), config, proof, bounds).unwrap(); - let verifier_opening_positions = verifier.column_opening_positions(&mut test_channel()); + let verifier_opening_positions = verifier.column_query_positions(&mut test_channel()); assert_eq!(prover_opening_positions, verifier_opening_positions); verifier.decommit(decommitment_values) diff --git a/src/core/oods.rs b/src/core/oods.rs index 61b6ad8ed..129dface6 100644 --- a/src/core/oods.rs +++ b/src/core/oods.rs @@ -54,51 +54,3 @@ pub fn get_oods_quotient( } CircleEvaluation::new(eval.domain, values) } - -/// Returns the pair OODS quotient (i.e quotienting out both the oods point and its complex -/// conjugate) polynomial evaluation over the whole domain. Used in case we don't want the highest -/// monomial of the resulting quotient polynomial to increase which might take it out of the fft -/// space. -pub fn get_pair_oods_quotient( - oods_point: CirclePoint, - oods_value: SecureField, - eval: &CPUCircleEvaluation, -) -> CPUCircleEvaluation { - let mut values = Vec::with_capacity(eval.domain.size()); - for (i, point) in enumerate(eval.domain.iter()) { - let index = bit_reverse_index(i, eval.domain.log_size()); - values.push(eval_pair_oods_quotient_at_point( - point, - eval.values[index], - oods_point, - oods_value, - )); - } - CircleEvaluation::new(eval.domain, values) -} - -#[cfg(test)] -mod tests { - use crate::core::backend::cpu::{CPUCircleEvaluation, CPUCirclePoly}; - use crate::core::circle::SECURE_FIELD_CIRCLE_GEN; - use crate::core::oods::get_pair_oods_quotient; - use crate::core::poly::circle::CanonicCoset; - use crate::m31; - - #[test] - fn test_oods_quotients_are_low_degree() { - const LOG_SIZE: u32 = 7; - let polynomial = CPUCirclePoly::new((0..1 << LOG_SIZE).map(|i| m31!(i)).collect()); - let eval_domain = CanonicCoset::new(LOG_SIZE + 1).circle_domain(); - let eval = polynomial.evaluate(eval_domain); - let oods_point = SECURE_FIELD_CIRCLE_GEN; - let oods_value = polynomial.eval_at_point(oods_point); - let quot_eval = get_pair_oods_quotient(oods_point, oods_value, &eval).bit_reverse(); - let quot_eval_base_field = CPUCircleEvaluation::new( - eval_domain, - quot_eval.values.iter().map(|v| v.0 .0).collect(), - ); - let quot_poly_base_field = quot_eval_base_field.interpolate(); - assert!(quot_poly_base_field.is_in_fft_space(LOG_SIZE)); - } -} diff --git a/src/core/prover/mod.rs b/src/core/prover/mod.rs index d8ace6156..bbc4304da 100644 --- a/src/core/prover/mod.rs +++ b/src/core/prover/mod.rs @@ -97,7 +97,7 @@ pub fn prove( // values. This is a sanity check. // TODO(spapini): Save clone. let (trace_oods_values, composition_oods_value) = - opened_values_to_mask(air, commitment_scheme_proof.proved_values.clone()).unwrap(); + opened_values_to_mask(air, commitment_scheme_proof.sampled_values.clone()).unwrap(); if composition_oods_value != air.eval_composition_polynomial_at_point(oods_point, &trace_oods_values, random_coeff) @@ -142,7 +142,7 @@ pub fn verify( // TODO(spapini): Save clone. let (trace_oods_values, composition_oods_value) = - opened_values_to_mask(air, proof.commitment_scheme_proof.proved_values.clone()) + opened_values_to_mask(air, proof.commitment_scheme_proof.sampled_values.clone()) .map_err(|_| VerificationError::InvalidStructure)?; if composition_oods_value diff --git a/src/fibonacci/mod.rs b/src/fibonacci/mod.rs index e92beae37..ff430c622 100644 --- a/src/fibonacci/mod.rs +++ b/src/fibonacci/mod.rs @@ -222,7 +222,7 @@ mod tests { let mut invalid_proof = fib.prove().unwrap(); invalid_proof .commitment_scheme_proof - .proved_values + .sampled_values .swap(0, 1); fib.verify(invalid_proof).unwrap_err();