From e275e211dd6ec907fbee477b0206489b296cc8d5 Mon Sep 17 00:00:00 2001 From: Kunming Jiang Date: Wed, 12 Mar 2025 20:53:38 -0400 Subject: [PATCH 1/7] WIP batch_diff_size interface --- mpcs/src/lib.rs | 266 +++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 265 insertions(+), 1 deletion(-) diff --git a/mpcs/src/lib.rs b/mpcs/src/lib.rs index aa601fd12..8c84aa371 100644 --- a/mpcs/src/lib.rs +++ b/mpcs/src/lib.rs @@ -1,7 +1,7 @@ #![deny(clippy::cargo)] use ff_ext::ExtensionField; use itertools::Itertools; -use multilinear_extensions::mle::DenseMultilinearExtension; +use multilinear_extensions::mle::{DenseMultilinearExtension, FieldType, MultilinearExtension}; use serde::{Serialize, de::DeserializeOwned}; use std::fmt::Debug; use transcript::{BasicTranscript, Transcript}; @@ -54,6 +54,208 @@ pub fn pcs_batch_commit>( Pcs::batch_commit(pp, polys) } +// Express Value as binary in big-endian +fn compute_binary_with_length(length: usize, mut value: usize) -> Vec { + assert!(2 >> length >= value); + let mut bin = Vec::new(); + for _ in 0..length { + bin.insert(0, value % 2 == 1); + value <<= 1; + } + bin +} + +// Pack polynomials of different sizes into the same, returns +// 0: A list of packed polys +// 1: The final packed poly, if of different size +// 2: For each component poly of each packed poly, record its position in tree as binary +// 3: Same as 2 but for the final packed poly only +fn pack_poly_prover( + polys: &[DenseMultilinearExtension], +) -> ( + Vec>, + Option>, + Vec>>, + Option>>, +) { + // Assert that polys are sorted by size in decreasing order + assert!(polys.len() > 0); + for i in 0..polys.len() - 1 { + assert!(polys[i].num_vars >= polys[i + 1].num_vars); + } + // Use depth and index to track the position of the last poly + let mut depth = 0; + let mut index = 0; + // Packed polynomials of various sizes into packed polynomials of the same size + let max_poly_num_vars = polys[0].num_vars; + let mut packed_polys = Vec::new(); + let mut packed_comps = Vec::new(); + let mut next_packed_poly = polys[0].clone(); + let mut next_packed_comp = vec![compute_binary_with_length(depth, index)]; + for i in 1..polys.len() { + let p = &polys[i]; + // Update comp and packed_poly + if next_packed_poly.num_vars == max_poly_num_vars && next_packed_poly.evaluations.len() == 2 << next_packed_poly.num_vars { + // If full and reached max poly size, initialize a new packed poly + packed_comps.push(next_packed_comp); + depth = 0; + index = 0; + next_packed_comp = vec![compute_binary_with_length(depth, index)]; + packed_polys.push(next_packed_poly); + next_packed_poly = p.clone(); + } else { + // Find the next empty slot + if next_packed_poly.num_vars == max_poly_num_vars { + // If full and not reached max poly size, add a new right subtree + for c in &mut next_packed_comp { + c.insert(0, false); + } + depth = 1; + index = 1; + } else { + while index % 2 == 1 { + assert!(depth > 1); // If depth == 1 and index == 1, then the tree is full and should be handled in the case above + index /= 2; + depth -= 1; + } + index += 1; + } + // If next poly is smaller than the slot, keep branching + while p.num_vars < next_packed_poly.num_vars - depth { + depth += 1; + index *= 2; + } + next_packed_comp.push(compute_binary_with_length(depth, index)); + next_packed_poly.merge(p.clone()); + } + } + // Final packed poly + if next_packed_poly.num_vars == max_poly_num_vars { + packed_polys.push(next_packed_poly); + packed_comps.push(next_packed_comp); + (packed_polys, None, packed_comps, None) + } else { + (packed_polys, Some(next_packed_poly), packed_comps, Some(next_packed_comp)) + } +} + +// Given only the number of variables of each polynomial, returns num_vars of the packed poly +// and deduce the structure of the packed binary tree +fn pack_poly_verifier( + poly_num_vars: &[usize] +) -> ( + usize, + Option, + Vec>>, + Option>>, +) { + // Use depth and index to track the position of the last poly + let mut depth = 0; + let mut index = 0; + // Packed polynomials of various sizes into packed polynomials of the same size + let max_poly_num_vars = poly_num_vars[0]; + let mut packed_comps = Vec::new(); + let mut next_packed_comp = vec![compute_binary_with_length(depth, index)]; + let mut next_pack_num_vars = poly_num_vars[0]; + let mut next_pack_eval_size = 2 << next_pack_num_vars; + for i in 1..poly_num_vars.len() { + let next_num_vars = poly_num_vars[i]; + // Update comp and packed_poly + if next_pack_num_vars == max_poly_num_vars && next_pack_eval_size == 2 << next_pack_num_vars { + // If full and reached max poly size, initialize a new packed poly + packed_comps.push(next_packed_comp); + depth = 0; + index = 0; + next_packed_comp = vec![compute_binary_with_length(depth, index)]; + next_pack_num_vars = next_num_vars; + next_pack_eval_size = 2 << next_num_vars; + } else { + // Find the next empty slot + if next_pack_num_vars == max_poly_num_vars { + // If full and not reached max poly size, add a new right subtree + for c in &mut next_packed_comp { + c.insert(0, false); + } + depth = 1; + index = 1; + } else { + while index % 2 == 1 { + assert!(depth > 1); // If depth == 1 and index == 1, then the tree is full and should be handled in the case above + index /= 2; + depth -= 1; + } + index += 1; + } + // If next poly is smaller than the slot, keep branching + while next_num_vars < next_pack_num_vars - depth { + depth += 1; + index *= 2; + } + next_packed_comp.push(compute_binary_with_length(depth, index)); + next_pack_eval_size += 2 << next_num_vars; + } + } + // Final packed poly + if next_pack_num_vars == max_poly_num_vars { + packed_comps.push(next_packed_comp); + (max_poly_num_vars, None, packed_comps, None) + } else { + (max_poly_num_vars, Some(next_pack_num_vars), packed_comps, Some(next_packed_comp)) + } +} + +// Compute evaluation on packed poly from individual evals and the pack binary tree +fn compute_packed_eval( + packed_point: &[E], + final_point: &[E], + evals: &[Evaluation], + packed_comps: &[Vec>], + final_comp: &Option>>, +) -> (Vec>, Option) { + // Use comps to compute evals for packed polys from regular evals + let mut packed_evals = Vec::new(); + let mut next_orig_poly = 0; + for (i, next_packed_comp) in packed_comps.iter().enumerate() { + let mut packed_eval = E::ZERO; + for next_index in next_packed_comp { + let mut next_eval = evals[next_orig_poly].value; + for (j, b) in next_index.iter().enumerate() { + if *b { next_eval *= packed_point[j] } + } + packed_eval *= next_eval; + next_orig_poly += 1; + } + packed_evals.push(Evaluation::new(i, 0, packed_eval)); + } + if let Some(final_comp) = final_comp { + let mut final_eval = E::ZERO; + for next_index in final_comp { + let mut next_eval = evals[next_orig_poly].value; + for (j, b) in next_index.iter().enumerate() { + if *b { next_eval *= final_point[j] } + } + final_eval *= next_eval; + next_orig_poly += 1; + } + (packed_evals, Some(final_eval)) + } else { + (packed_evals, None) + } +} + +pub fn pcs_batch_commit_diff_size>( + pp: &Pcs::ProverParam, + polys: &[DenseMultilinearExtension], +) -> Result<(Pcs::CommitmentWithWitness, Option), Error> { + let (packed_polys, final_poly, _, _) = pack_poly_prover(polys); + // Final packed poly + if let Some(final_poly) = final_poly { + Ok((Pcs::batch_commit(pp, &packed_polys)?, Some(Pcs::batch_commit(pp, &[final_poly])?))) + } else { + Ok((Pcs::batch_commit(pp, &packed_polys)?, None)) + } +} + pub fn pcs_batch_commit_and_write>( pp: &Pcs::ProverParam, polys: &[DenseMultilinearExtension], @@ -84,6 +286,37 @@ pub fn pcs_batch_open>( Pcs::batch_open(pp, polys, comms, points, evals, transcript) } +pub fn pcs_batch_open_diff_size>( + pp: &Pcs::ProverParam, + polys: &[DenseMultilinearExtension], + packed_comms: &[Pcs::CommitmentWithWitness], + final_comm: Option<&Pcs::CommitmentWithWitness>, + points: &[Vec], + evals: &[Evaluation], + transcript: &mut impl Transcript, +) -> Result<(Pcs::Proof, Option), Error> { + // TODO: Sort the polys by decreasing size + // TODO: The prover should be able to avoid packing the polys again + let (packed_polys, final_poly, packed_comps, final_comp) = pack_poly_prover(polys); + // TODO: Add unifying sumcheck if the points do not match + // For now, assume that all polys are evaluated on the same points + let packed_point = points[0].clone(); + let final_point = if let Some(final_poly) = &final_poly { packed_point[packed_point.len() - final_poly.num_vars..packed_point.len()].to_vec() } else { Vec::new() }; + // Use comps to compute evals for packed polys from regular evals + let (packed_evals, final_eval) = compute_packed_eval(&packed_point, &final_point, evals, &packed_comps, &final_comp); + if let Some(final_eval) = final_eval { + Ok(( + Pcs::batch_open(pp, &packed_polys, packed_comms, &[packed_point], &packed_evals, transcript)?, + Some(Pcs::open(pp, &final_poly.unwrap(), final_comm.unwrap(), &final_point, &final_eval, transcript)?) + )) + } else { + Ok(( + Pcs::batch_open(pp, &packed_polys, packed_comms, &[packed_point], &packed_evals, transcript)?, + None + )) + } +} + pub fn pcs_verify>( vp: &Pcs::VerifierParam, comm: &Pcs::Commitment, @@ -109,6 +342,37 @@ where Pcs::batch_verify(vp, comms, points, evals, proof, transcript) } +pub fn pcs_batch_verify_diff_size<'a, E: ExtensionField, Pcs: PolynomialCommitmentScheme>( + vp: &Pcs::VerifierParam, + poly_num_vars: &[usize], // Size of the original polynomials, for reproducing results + packed_comms: &[Pcs::Commitment], + final_comm: Option<&Pcs::Commitment>, + points: &[Vec], + evals: &[Evaluation], + packed_proof: &Pcs::Proof, + final_proof: Option<&Pcs::Proof>, + transcript: &mut impl Transcript, +) -> Result<(), Error> +where + Pcs::Commitment: 'a, +{ + // Replicate packing + let (_, final_poly_num_vars, packed_comps, final_comp) = pack_poly_verifier(poly_num_vars); + // TODO: Add unifying sumcheck if the points do not match + // For now, assume that all polys are evaluated on the same points + let packed_point = points[0].clone(); + let final_point = if let Some(final_poly_num_vars) = &final_poly_num_vars { packed_point[packed_point.len() - final_poly_num_vars..packed_point.len()].to_vec() } else { Vec::new() }; + // Use comps to compute evals for packed polys from regular evals + let (packed_evals, final_eval) = compute_packed_eval(&packed_point, &final_point, evals, &packed_comps, &final_comp); + if let Some(final_eval) = final_eval { + Pcs::batch_verify(vp, packed_comms, &[packed_point], &packed_evals, packed_proof, transcript)?; + Pcs::verify(vp, final_comm.unwrap(), &final_point, &final_eval, final_proof.unwrap(), transcript) + } else { + Pcs::batch_verify(vp, packed_comms, &[packed_point], &packed_evals, packed_proof, transcript) + } +} + + pub trait PolynomialCommitmentScheme: Clone + Debug { type Param: Clone + Debug + Serialize + DeserializeOwned; type ProverParam: Clone + Debug + Serialize + DeserializeOwned; From 21e964b86fc544b2ffb0171f906ffc6fabf0fed4 Mon Sep 17 00:00:00 2001 From: Kunming Jiang Date: Fri, 14 Mar 2025 17:46:28 -0400 Subject: [PATCH 2/7] WIP batch_diff_size testing --- mpcs/src/basefold.rs | 14 +++- mpcs/src/lib.rs | 168 +++++++++++++++++++++++++++++++++---------- 2 files changed, 142 insertions(+), 40 deletions(-) diff --git a/mpcs/src/basefold.rs b/mpcs/src/basefold.rs index 015f10184..526e9ef0a 100644 --- a/mpcs/src/basefold.rs +++ b/mpcs/src/basefold.rs @@ -1114,8 +1114,7 @@ mod test { use crate::{ basefold::Basefold, test_util::{ - gen_rand_poly_base, gen_rand_poly_ext, run_batch_commit_open_verify, - run_commit_open_verify, run_simple_batch_commit_open_verify, + gen_rand_poly_base, gen_rand_poly_ext, run_batch_commit_open_verify, run_commit_open_verify, run_diff_size_batch_commit_open_verify, run_simple_batch_commit_open_verify }, }; @@ -1215,4 +1214,15 @@ mod test { ); } } + + #[test] + fn batch_commit_diff_size_open_verify() { + let gen_rand_poly = gen_rand_poly_base; + run_diff_size_batch_commit_open_verify::( + gen_rand_poly, + 17, + 3, + 5, + ); + } } diff --git a/mpcs/src/lib.rs b/mpcs/src/lib.rs index 8c84aa371..60d8b5081 100644 --- a/mpcs/src/lib.rs +++ b/mpcs/src/lib.rs @@ -6,6 +6,7 @@ use serde::{Serialize, de::DeserializeOwned}; use std::fmt::Debug; use transcript::{BasicTranscript, Transcript}; use util::hash::Digest; +use p3_field::PrimeCharacteristicRing; pub mod sum_check; pub mod util; @@ -56,7 +57,7 @@ pub fn pcs_batch_commit>( // Express Value as binary in big-endian fn compute_binary_with_length(length: usize, mut value: usize) -> Vec { - assert!(2 >> length >= value); + assert!(value < 1 << length); let mut bin = Vec::new(); for _ in 0..length { bin.insert(0, value % 2 == 1); @@ -95,7 +96,7 @@ fn pack_poly_prover( for i in 1..polys.len() { let p = &polys[i]; // Update comp and packed_poly - if next_packed_poly.num_vars == max_poly_num_vars && next_packed_poly.evaluations.len() == 2 << next_packed_poly.num_vars { + if next_packed_poly.num_vars == max_poly_num_vars && next_packed_poly.evaluations.len() == 1 << next_packed_poly.num_vars { // If full and reached max poly size, initialize a new packed poly packed_comps.push(next_packed_comp); depth = 0; @@ -104,8 +105,11 @@ fn pack_poly_prover( packed_polys.push(next_packed_poly); next_packed_poly = p.clone(); } else { + let mut next_packed_poly_num_vars = next_packed_poly.num_vars; // Find the next empty slot - if next_packed_poly.num_vars == max_poly_num_vars { + if next_packed_poly.evaluations.len() == 1 << next_packed_poly.num_vars { + // Conceptually next_packed_poly now has one more variable, but has yet to be reflected in its coefficients + next_packed_poly_num_vars += 1; // If full and not reached max poly size, add a new right subtree for c in &mut next_packed_comp { c.insert(0, false); @@ -121,7 +125,7 @@ fn pack_poly_prover( index += 1; } // If next poly is smaller than the slot, keep branching - while p.num_vars < next_packed_poly.num_vars - depth { + while p.num_vars < next_packed_poly_num_vars - depth { depth += 1; index *= 2; } @@ -129,6 +133,19 @@ fn pack_poly_prover( next_packed_poly.merge(p.clone()); } } + // Pad the evaluations of final poly with 0 until a power of 2 + let pad_num_evals = (1 << next_packed_poly.num_vars) - next_packed_poly.evaluations.len(); + if pad_num_evals > 0 { + match &mut next_packed_poly.evaluations { + FieldType::Base(e) => { + e.extend(vec![E::BaseField::ZERO; pad_num_evals]) + } + FieldType::Ext(e) => { + e.extend(vec![E::ZERO; pad_num_evals]) + } + _ => () + } + } // Final packed poly if next_packed_poly.num_vars == max_poly_num_vars { packed_polys.push(next_packed_poly); @@ -157,22 +174,23 @@ fn pack_poly_verifier( let mut packed_comps = Vec::new(); let mut next_packed_comp = vec![compute_binary_with_length(depth, index)]; let mut next_pack_num_vars = poly_num_vars[0]; - let mut next_pack_eval_size = 2 << next_pack_num_vars; + let mut next_pack_eval_size = 1 << next_pack_num_vars; for i in 1..poly_num_vars.len() { let next_num_vars = poly_num_vars[i]; // Update comp and packed_poly - if next_pack_num_vars == max_poly_num_vars && next_pack_eval_size == 2 << next_pack_num_vars { + if next_pack_num_vars == max_poly_num_vars && next_pack_eval_size == 1 << next_pack_num_vars { // If full and reached max poly size, initialize a new packed poly packed_comps.push(next_packed_comp); depth = 0; index = 0; next_packed_comp = vec![compute_binary_with_length(depth, index)]; next_pack_num_vars = next_num_vars; - next_pack_eval_size = 2 << next_num_vars; + next_pack_eval_size = 1 << next_num_vars; } else { // Find the next empty slot - if next_pack_num_vars == max_poly_num_vars { + if next_pack_eval_size == 1 << next_pack_num_vars { // If full and not reached max poly size, add a new right subtree + next_pack_num_vars += 1; for c in &mut next_packed_comp { c.insert(0, false); } @@ -192,7 +210,7 @@ fn pack_poly_verifier( index *= 2; } next_packed_comp.push(compute_binary_with_length(depth, index)); - next_pack_eval_size += 2 << next_num_vars; + next_pack_eval_size += 1 << next_num_vars; } } // Final packed poly @@ -208,29 +226,29 @@ fn pack_poly_verifier( fn compute_packed_eval( packed_point: &[E], final_point: &[E], - evals: &[Evaluation], + evals: &[E], packed_comps: &[Vec>], final_comp: &Option>>, -) -> (Vec>, Option) { +) -> (Vec, Option) { // Use comps to compute evals for packed polys from regular evals let mut packed_evals = Vec::new(); let mut next_orig_poly = 0; - for (i, next_packed_comp) in packed_comps.iter().enumerate() { + for next_packed_comp in packed_comps { let mut packed_eval = E::ZERO; for next_index in next_packed_comp { - let mut next_eval = evals[next_orig_poly].value; + let mut next_eval = evals[next_orig_poly]; for (j, b) in next_index.iter().enumerate() { if *b { next_eval *= packed_point[j] } } packed_eval *= next_eval; next_orig_poly += 1; } - packed_evals.push(Evaluation::new(i, 0, packed_eval)); + packed_evals.push(packed_eval); } if let Some(final_comp) = final_comp { let mut final_eval = E::ZERO; for next_index in final_comp { - let mut next_eval = evals[next_orig_poly].value; + let mut next_eval = evals[next_orig_poly]; for (j, b) in next_index.iter().enumerate() { if *b { next_eval *= final_point[j] } } @@ -289,32 +307,29 @@ pub fn pcs_batch_open>( pub fn pcs_batch_open_diff_size>( pp: &Pcs::ProverParam, polys: &[DenseMultilinearExtension], - packed_comms: &[Pcs::CommitmentWithWitness], - final_comm: Option<&Pcs::CommitmentWithWitness>, + packed_comm: &Pcs::CommitmentWithWitness, + final_comm: &Option, points: &[Vec], - evals: &[Evaluation], + evals: &[E], transcript: &mut impl Transcript, ) -> Result<(Pcs::Proof, Option), Error> { // TODO: Sort the polys by decreasing size // TODO: The prover should be able to avoid packing the polys again let (packed_polys, final_poly, packed_comps, final_comp) = pack_poly_prover(polys); + let packed_polys: Vec> = packed_polys.into_iter().map(|p| ArcMultilinearExtension::from(p)).collect(); // TODO: Add unifying sumcheck if the points do not match // For now, assume that all polys are evaluated on the same points let packed_point = points[0].clone(); let final_point = if let Some(final_poly) = &final_poly { packed_point[packed_point.len() - final_poly.num_vars..packed_point.len()].to_vec() } else { Vec::new() }; // Use comps to compute evals for packed polys from regular evals let (packed_evals, final_eval) = compute_packed_eval(&packed_point, &final_point, evals, &packed_comps, &final_comp); - if let Some(final_eval) = final_eval { - Ok(( - Pcs::batch_open(pp, &packed_polys, packed_comms, &[packed_point], &packed_evals, transcript)?, - Some(Pcs::open(pp, &final_poly.unwrap(), final_comm.unwrap(), &final_point, &final_eval, transcript)?) - )) - } else { - Ok(( - Pcs::batch_open(pp, &packed_polys, packed_comms, &[packed_point], &packed_evals, transcript)?, - None - )) - } + + let pack_proof = Pcs::simple_batch_open(pp, &packed_polys, packed_comm, &packed_point, &packed_evals, transcript)?; + let final_proof = match (&final_poly, &final_comm, &final_eval) { + (Some(final_poly), Some(final_comm), Some(final_eval)) => Some(Pcs::open(pp, final_poly, final_comm, &final_point, final_eval, transcript)?), + _ => None, + }; + Ok((pack_proof, final_proof)) } pub fn pcs_verify>( @@ -345,12 +360,12 @@ where pub fn pcs_batch_verify_diff_size<'a, E: ExtensionField, Pcs: PolynomialCommitmentScheme>( vp: &Pcs::VerifierParam, poly_num_vars: &[usize], // Size of the original polynomials, for reproducing results - packed_comms: &[Pcs::Commitment], - final_comm: Option<&Pcs::Commitment>, + packed_comm: &Pcs::Commitment, + final_comm: &Option, points: &[Vec], - evals: &[Evaluation], + evals: &[E], packed_proof: &Pcs::Proof, - final_proof: Option<&Pcs::Proof>, + final_proof: &Option, transcript: &mut impl Transcript, ) -> Result<(), Error> where @@ -364,11 +379,10 @@ where let final_point = if let Some(final_poly_num_vars) = &final_poly_num_vars { packed_point[packed_point.len() - final_poly_num_vars..packed_point.len()].to_vec() } else { Vec::new() }; // Use comps to compute evals for packed polys from regular evals let (packed_evals, final_eval) = compute_packed_eval(&packed_point, &final_point, evals, &packed_comps, &final_comp); - if let Some(final_eval) = final_eval { - Pcs::batch_verify(vp, packed_comms, &[packed_point], &packed_evals, packed_proof, transcript)?; - Pcs::verify(vp, final_comm.unwrap(), &final_point, &final_eval, final_proof.unwrap(), transcript) - } else { - Pcs::batch_verify(vp, packed_comms, &[packed_point], &packed_evals, packed_proof, transcript) + Pcs::simple_batch_verify(vp, packed_comm, &packed_point, &packed_evals, packed_proof, transcript)?; + match (&final_comm, &final_eval, &final_proof) { + (Some(final_comm), Some(final_eval), Some(final_proof)) => Pcs::verify(vp, final_comm, &final_point, &final_eval, final_proof, transcript), + _ => Ok(()), } } @@ -901,4 +915,82 @@ pub mod test_util { } } } + + #[cfg(test)] + pub(super) fn run_diff_size_batch_commit_open_verify( + gen_rand_poly: fn(usize) -> DenseMultilinearExtension, + max_num_vars: usize, + max_vars_gap: usize, + batch_size: usize, + ) where + E: ExtensionField, + Pcs: PolynomialCommitmentScheme, + { + use crate::{pcs_batch_commit_diff_size, pcs_batch_open_diff_size, pcs_batch_verify_diff_size}; + + for vars_gap in 1..=max_vars_gap { + assert!(max_num_vars > vars_gap * batch_size); + let (pp, vp) = setup_pcs::(max_num_vars); + + let (poly_num_vars, packed_comm, final_comm, evals, packed_proof, final_proof, challenge) = { + let mut transcript = BasicTranscript::new(b"BaseFold"); + let polys: Vec> = (0..batch_size).map(|i| gen_rand_polys(|_| max_num_vars - i * vars_gap, 1, gen_rand_poly)).flatten().collect(); + let (packed_comm, final_comm) = pcs_batch_commit_diff_size::(&pp, &polys).unwrap(); + let point = get_point_from_challenge(max_num_vars, &mut transcript); + let points: Vec> = polys.iter().map(|p| point[max_num_vars - p.num_vars..].to_vec()).collect(); + let evals = polys.iter().zip(&points).map(|(poly, point)| poly.evaluate(point)).collect_vec(); + transcript.append_field_element_exts(&evals); + + let (packed_proof, final_proof) = pcs_batch_open_diff_size::(&pp, &polys, &packed_comm, &final_comm, &points, &evals, &mut transcript).unwrap(); + ( + polys.iter().map(|p| p.num_vars).collect::>(), + Pcs::get_pure_commitment(&packed_comm), + if let Some(final_comm) = final_comm { Some(Pcs::get_pure_commitment(&final_comm)) } else { None }, + evals, + packed_proof, + final_proof, + transcript.read_challenge(), + ) + }; + // Batch verify + { + let mut transcript = BasicTranscript::new(b"BaseFold"); + Pcs::write_commitment(&packed_comm, &mut transcript).unwrap(); + if let Some(final_comm) = &final_comm { + Pcs::write_commitment(final_comm, &mut transcript).unwrap(); + } + + let point = get_point_from_challenge(max_num_vars, &mut transcript); + let points: Vec> = poly_num_vars.iter().map(|n| point[max_num_vars - n..].to_vec()).collect(); + transcript.append_field_element_exts(&evals); + + pcs_batch_verify_diff_size::(&vp, &poly_num_vars, &packed_comm, &final_comm, &points, &evals, &packed_proof, &final_proof, &mut transcript).unwrap(); + + let v_challenge = transcript.read_challenge(); + assert_eq!(challenge, v_challenge); + + println!( + "Proof size for simple batch: {} bytes", + bincode::serialized_size(&packed_proof).unwrap() + bincode::serialized_size(&final_proof).unwrap() + ); + } + } + } +} + +#[cfg(test)] +mod test { + + #[test] + fn test_packing() { + use crate::pack_poly_verifier; + + let poly_num_vars = [27, 26, 25, 24, 23]; + let (pack_size, final_size, pack_comp, final_comp) = pack_poly_verifier(&poly_num_vars); + println!("PACK_SIZE: {:?}", pack_size); + println!("FINAL_SIZE: {:?}", final_size); + println!("PACK_COMP: {:?}", pack_comp); + println!("FINAL_COMP: {:?}", final_comp); + } + } From 8631b153246f0b1ca7444a238131ae426fe267f4 Mon Sep 17 00:00:00 2001 From: Kunming Jiang Date: Mon, 17 Mar 2025 14:45:50 -0400 Subject: [PATCH 3/7] Finished diff size but same point --- mpcs/src/basefold.rs | 2 +- mpcs/src/lib.rs | 126 +++++++++++++++++++++++++++++++++---------- mpcs/src/whir.rs | 13 ++++- 3 files changed, 111 insertions(+), 30 deletions(-) diff --git a/mpcs/src/basefold.rs b/mpcs/src/basefold.rs index 526e9ef0a..1cc6129ad 100644 --- a/mpcs/src/basefold.rs +++ b/mpcs/src/basefold.rs @@ -1220,7 +1220,7 @@ mod test { let gen_rand_poly = gen_rand_poly_base; run_diff_size_batch_commit_open_verify::( gen_rand_poly, - 17, + 20, 3, 5, ); diff --git a/mpcs/src/lib.rs b/mpcs/src/lib.rs index 60d8b5081..e9a3c31fc 100644 --- a/mpcs/src/lib.rs +++ b/mpcs/src/lib.rs @@ -55,13 +55,21 @@ pub fn pcs_batch_commit>( Pcs::batch_commit(pp, polys) } +pub fn pcs_batch_commit_and_write>( + pp: &Pcs::ProverParam, + polys: &[DenseMultilinearExtension], + transcript: &mut impl Transcript, +) -> Result { + Pcs::batch_commit_and_write(pp, polys, transcript) +} + // Express Value as binary in big-endian fn compute_binary_with_length(length: usize, mut value: usize) -> Vec { - assert!(value < 1 << length); + assert!(value < (1 << length)); let mut bin = Vec::new(); for _ in 0..length { bin.insert(0, value % 2 == 1); - value <<= 1; + value >>= 1; } bin } @@ -233,26 +241,32 @@ fn compute_packed_eval( // Use comps to compute evals for packed polys from regular evals let mut packed_evals = Vec::new(); let mut next_orig_poly = 0; + let pack_num_vars = packed_point.len(); for next_packed_comp in packed_comps { let mut packed_eval = E::ZERO; for next_index in next_packed_comp { let mut next_eval = evals[next_orig_poly]; + // Note: the points are stored in reverse for (j, b) in next_index.iter().enumerate() { - if *b { next_eval *= packed_point[j] } + let next_point = packed_point[pack_num_vars - 1 - j]; + if *b { next_eval *= next_point } else { next_eval *= E::ONE - next_point } } - packed_eval *= next_eval; + packed_eval += next_eval; next_orig_poly += 1; } packed_evals.push(packed_eval); } if let Some(final_comp) = final_comp { let mut final_eval = E::ZERO; + let final_num_vars = final_point.len(); for next_index in final_comp { let mut next_eval = evals[next_orig_poly]; + // Note: the points are stored in reverse for (j, b) in next_index.iter().enumerate() { - if *b { next_eval *= final_point[j] } + let next_point = final_point[final_num_vars - 1 - j]; + if *b { next_eval *= next_point } else { next_eval *= E::ONE - next_point } } - final_eval *= next_eval; + final_eval += next_eval; next_orig_poly += 1; } (packed_evals, Some(final_eval)) @@ -262,25 +276,35 @@ fn compute_packed_eval( } pub fn pcs_batch_commit_diff_size>( - pp: &Pcs::ProverParam, + pack_pp: &Pcs::ProverParam, + final_pp: &Option, polys: &[DenseMultilinearExtension], ) -> Result<(Pcs::CommitmentWithWitness, Option), Error> { let (packed_polys, final_poly, _, _) = pack_poly_prover(polys); // Final packed poly - if let Some(final_poly) = final_poly { - Ok((Pcs::batch_commit(pp, &packed_polys)?, Some(Pcs::batch_commit(pp, &[final_poly])?))) - } else { - Ok((Pcs::batch_commit(pp, &packed_polys)?, None)) + match (final_pp, final_poly) { + (Some(final_pp), Some(final_poly)) => Ok((Pcs::batch_commit(pack_pp, &packed_polys)?, Some(Pcs::batch_commit(final_pp, &[final_poly])?))), + (None, None) => Ok((Pcs::batch_commit(pack_pp, &packed_polys)?, None)), + _ => unreachable!() } } -pub fn pcs_batch_commit_and_write>( +pub fn pcs_batch_commit_diff_size_and_write>( pp: &Pcs::ProverParam, polys: &[DenseMultilinearExtension], transcript: &mut impl Transcript, -) -> Result { - Pcs::batch_commit_and_write(pp, polys, transcript) -} +) -> Result<(Pcs::CommitmentWithWitness, Option), Error> { + let (packed_polys, final_poly, _, _) = pack_poly_prover(polys); + // Final packed poly + if let Some(final_poly) = final_poly { + Ok(( + Pcs::batch_commit_and_write(pp, &packed_polys, transcript)?, + Some(Pcs::commit_and_write(pp, &final_poly, transcript)?) + )) + } else { + Ok((Pcs::batch_commit_and_write(pp, &packed_polys, transcript)?, None)) + } +} pub fn pcs_open>( pp: &Pcs::ProverParam, @@ -316,18 +340,22 @@ pub fn pcs_batch_open_diff_size> = packed_polys.into_iter().map(|p| ArcMultilinearExtension::from(p)).collect(); // TODO: Add unifying sumcheck if the points do not match // For now, assume that all polys are evaluated on the same points let packed_point = points[0].clone(); - let final_point = if let Some(final_poly) = &final_poly { packed_point[packed_point.len() - final_poly.num_vars..packed_point.len()].to_vec() } else { Vec::new() }; + // Note: the points are stored in reverse + let final_point = if let Some(final_poly) = &final_poly { packed_point[..final_poly.num_vars].to_vec() } else { Vec::new() }; // Use comps to compute evals for packed polys from regular evals let (packed_evals, final_eval) = compute_packed_eval(&packed_point, &final_point, evals, &packed_comps, &final_comp); + let packed_polys: Vec> = packed_polys.into_iter().map(|p| ArcMultilinearExtension::from(p)).collect(); let pack_proof = Pcs::simple_batch_open(pp, &packed_polys, packed_comm, &packed_point, &packed_evals, transcript)?; let final_proof = match (&final_poly, &final_comm, &final_eval) { - (Some(final_poly), Some(final_comm), Some(final_eval)) => Some(Pcs::open(pp, final_poly, final_comm, &final_point, final_eval, transcript)?), - _ => None, + (Some(final_poly), Some(final_comm), Some(final_eval)) => { + Some(Pcs::open(pp, final_poly, final_comm, &final_point, final_eval, transcript)?) + } + (None, None, None) => None, + _ => unreachable!(), }; Ok((pack_proof, final_proof)) } @@ -376,13 +404,17 @@ where // TODO: Add unifying sumcheck if the points do not match // For now, assume that all polys are evaluated on the same points let packed_point = points[0].clone(); - let final_point = if let Some(final_poly_num_vars) = &final_poly_num_vars { packed_point[packed_point.len() - final_poly_num_vars..packed_point.len()].to_vec() } else { Vec::new() }; + let final_point = if let Some(final_poly_num_vars) = &final_poly_num_vars { packed_point[..*final_poly_num_vars].to_vec() } else { Vec::new() }; // Use comps to compute evals for packed polys from regular evals let (packed_evals, final_eval) = compute_packed_eval(&packed_point, &final_point, evals, &packed_comps, &final_comp); + Pcs::simple_batch_verify(vp, packed_comm, &packed_point, &packed_evals, packed_proof, transcript)?; match (&final_comm, &final_eval, &final_proof) { - (Some(final_comm), Some(final_eval), Some(final_proof)) => Pcs::verify(vp, final_comm, &final_point, &final_eval, final_proof, transcript), - _ => Ok(()), + (Some(final_comm), Some(final_eval), Some(final_proof)) => { + Pcs::verify(vp, final_comm, &final_point, &final_eval, final_proof, transcript) + } + (None, None, None) => Ok(()), + _ => unreachable!(), } } @@ -926,7 +958,7 @@ pub mod test_util { E: ExtensionField, Pcs: PolynomialCommitmentScheme, { - use crate::{pcs_batch_commit_diff_size, pcs_batch_open_diff_size, pcs_batch_verify_diff_size}; + use crate::{pcs_batch_commit_diff_size_and_write, pcs_batch_open_diff_size, pcs_batch_verify_diff_size}; for vars_gap in 1..=max_vars_gap { assert!(max_num_vars > vars_gap * batch_size); @@ -934,16 +966,18 @@ pub mod test_util { let (poly_num_vars, packed_comm, final_comm, evals, packed_proof, final_proof, challenge) = { let mut transcript = BasicTranscript::new(b"BaseFold"); - let polys: Vec> = (0..batch_size).map(|i| gen_rand_polys(|_| max_num_vars - i * vars_gap, 1, gen_rand_poly)).flatten().collect(); - let (packed_comm, final_comm) = pcs_batch_commit_diff_size::(&pp, &polys).unwrap(); + let polys: Vec> = (0..batch_size).map(|i| + gen_rand_polys(|_| max_num_vars - i * vars_gap, 1, gen_rand_poly) + ).flatten().collect(); + let (packed_comm, final_comm) = pcs_batch_commit_diff_size_and_write::(&pp, &polys, &mut transcript).unwrap(); let point = get_point_from_challenge(max_num_vars, &mut transcript); - let points: Vec> = polys.iter().map(|p| point[max_num_vars - p.num_vars..].to_vec()).collect(); + let points: Vec> = polys.iter().map(|p| point[..p.num_vars].to_vec()).collect(); let evals = polys.iter().zip(&points).map(|(poly, point)| poly.evaluate(point)).collect_vec(); transcript.append_field_element_exts(&evals); let (packed_proof, final_proof) = pcs_batch_open_diff_size::(&pp, &polys, &packed_comm, &final_comm, &points, &evals, &mut transcript).unwrap(); ( - polys.iter().map(|p| p.num_vars).collect::>(), + polys.iter().map(|p| p.num_vars()).collect::>(), Pcs::get_pure_commitment(&packed_comm), if let Some(final_comm) = final_comm { Some(Pcs::get_pure_commitment(&final_comm)) } else { None }, evals, @@ -961,7 +995,7 @@ pub mod test_util { } let point = get_point_from_challenge(max_num_vars, &mut transcript); - let points: Vec> = poly_num_vars.iter().map(|n| point[max_num_vars - n..].to_vec()).collect(); + let points: Vec> = poly_num_vars.iter().map(|n| point[..*n].to_vec()).collect(); transcript.append_field_element_exts(&evals); pcs_batch_verify_diff_size::(&vp, &poly_num_vars, &packed_comm, &final_comm, &points, &evals, &packed_proof, &final_proof, &mut transcript).unwrap(); @@ -980,6 +1014,12 @@ pub mod test_util { #[cfg(test)] mod test { + use ark_std::test_rng; + use ff_ext::GoldilocksExt2; + use multilinear_extensions::mle::{DenseMultilinearExtension, FieldType, MultilinearExtension}; + use p3_field::PrimeCharacteristicRing; + use p3_goldilocks::Goldilocks; + type E = GoldilocksExt2; #[test] fn test_packing() { @@ -993,4 +1033,34 @@ mod test { println!("FINAL_COMP: {:?}", final_comp); } + #[test] + fn test_packing_eval() { + let mut rng = test_rng(); + let poly0 = DenseMultilinearExtension::::random(4, &mut rng); + let poly1 = DenseMultilinearExtension::::random(3, &mut rng); + let poly2 = DenseMultilinearExtension::::random(2, &mut rng); + let point = [E::from_i32(5), E::from_i32(7), E::from_i32(9), E::from_i32(11), E::from_i32(13)]; + let eval0 = poly0.evaluate(&point[..4]); + let eval1 = poly1.evaluate(&point[..3]); + let eval2 = poly2.evaluate(&point[..2]); + let claim = + (E::ONE - point[4]) * eval0 + + point[4] * (E::ONE - point[3]) * eval1 + + point[4] * point[3] * (E::ONE - point[2]) * eval2; + + let mut poly = poly0.clone(); + poly.merge(poly1.clone()); + poly.merge(poly2.clone()); + match &mut poly.evaluations { + FieldType::Base(e) => { + e.extend(vec![Goldilocks::ZERO; 4]) + } + FieldType::Ext(e) => { + e.extend(vec![E::ZERO; 4]) + } + _ => () + } + let eval = poly.evaluate(&point); + println!("CLAIM: {:?}, EXPECTED: {:?}", claim, eval); + } } diff --git a/mpcs/src/whir.rs b/mpcs/src/whir.rs index 637d8f35b..cb04c3649 100644 --- a/mpcs/src/whir.rs +++ b/mpcs/src/whir.rs @@ -179,7 +179,7 @@ where mod tests { use super::*; use crate::test_util::{ - gen_rand_poly_base, run_commit_open_verify, run_simple_batch_commit_open_verify, + gen_rand_poly_base, run_commit_open_verify, run_diff_size_batch_commit_open_verify, run_simple_batch_commit_open_verify }; use ff_ext::GoldilocksExt2; use spec::WhirDefaultSpec; @@ -258,4 +258,15 @@ mod tests { ); } } + + #[test] + fn batch_commit_diff_size_open_verify() { + let gen_rand_poly = gen_rand_poly_base; + run_diff_size_batch_commit_open_verify::( + gen_rand_poly, + 20, + 3, + 5, + ); + } } From f8e6640e2daddc8720e3877ceab885d5f31edd0d Mon Sep 17 00:00:00 2001 From: Kunming Jiang Date: Mon, 17 Mar 2025 23:50:32 -0400 Subject: [PATCH 4/7] Buggy sumcheck? --- Cargo.lock | 1 + mpcs/Cargo.toml | 1 + mpcs/src/lib.rs | 82 +++++++++++++++++++++++++++++++++--------- mpcs/src/whir.rs | 2 +- sumcheck/src/prover.rs | 3 ++ 5 files changed, 72 insertions(+), 17 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0c796d7eb..59d9e2204 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1538,6 +1538,7 @@ dependencies = [ "rand_chacha", "rayon", "serde", + "sumcheck", "transcript", "whir", "zeroize", diff --git a/mpcs/Cargo.toml b/mpcs/Cargo.toml index 54305ecc7..d3240d6aa 100644 --- a/mpcs/Cargo.toml +++ b/mpcs/Cargo.toml @@ -34,6 +34,7 @@ rand.workspace = true rand_chacha.workspace = true rayon = { workspace = true, optional = true } serde.workspace = true +sumcheck = { path = "../sumcheck" } transcript = { path = "../transcript" } whir = { path = "../whir", features = ["ceno"] } zeroize = "1.8" diff --git a/mpcs/src/lib.rs b/mpcs/src/lib.rs index e9a3c31fc..6f72269a6 100644 --- a/mpcs/src/lib.rs +++ b/mpcs/src/lib.rs @@ -1,12 +1,14 @@ #![deny(clippy::cargo)] use ff_ext::ExtensionField; -use itertools::Itertools; -use multilinear_extensions::mle::{DenseMultilinearExtension, FieldType, MultilinearExtension}; +use itertools::{Either, Itertools}; +use multilinear_extensions::{mle::{DenseMultilinearExtension, FieldType, MultilinearExtension}, virtual_poly::{build_eq_x_r, eq_eval, VPAuxInfo}}; use serde::{Serialize, de::DeserializeOwned}; use std::fmt::Debug; use transcript::{BasicTranscript, Transcript}; use util::hash::Digest; use p3_field::PrimeCharacteristicRing; +use multilinear_extensions::virtual_poly::VirtualPolynomial; +use sumcheck::structs::{IOPProof, IOPProverState, IOPVerifierState}; pub mod sum_check; pub mod util; @@ -334,21 +336,50 @@ pub fn pcs_batch_open_diff_size, points: &[Vec], - evals: &[E], + _evals: &[E], transcript: &mut impl Transcript, -) -> Result<(Pcs::Proof, Option), Error> { +) -> Result<(IOPProof, Vec, Pcs::Proof, Option), Error> { + assert_eq!(polys.len(), points.len()); // TODO: Sort the polys by decreasing size + let arc_polys: Vec> = polys.into_iter().map(|p| ArcMultilinearExtension::from(p.clone())).collect(); + // UNIFY SUMCHECK + // Sample random coefficients for each poly + let unify_coeffs = transcript.sample_vec(polys.len()); + // First convert each point into EQ + let eq_points = points.iter().map(|p| build_eq_x_r(p)).collect::>(); + + let mut sumcheck_poly = VirtualPolynomial::::new(polys[0].num_vars()); + for ((eq, poly), coeff) in eq_points.into_iter().zip(arc_polys).zip(unify_coeffs) { + let claim = match (&poly.evaluations(), &eq.evaluations) { + (FieldType::Base(p), FieldType::Ext(e)) => { + p.iter().zip(e).map(|(p, e)| E::from_bases(&[*p, E::BaseField::ZERO]) * *e).fold(E::ZERO, |s, i| s + i) + } + _ => unreachable!() + }; + println!("C: {:?}", claim); + sumcheck_poly.add_mle_list(vec![eq, poly], coeff); + } + let (unify_proof, unify_prover_state) = IOPProverState::prove_batch_polys(1, vec![sumcheck_poly], transcript); + let packed_point = unify_proof.point.clone(); + // sumcheck_poly is consisted of [eq, poly, eq, poly, ...], we only need the evaluations to `poly` here + let sumcheck_evals = unify_prover_state.get_mle_final_evaluations(); + let (_, evals): (Vec<_>, Vec<_>) = sumcheck_evals.into_iter().enumerate().partition_map(|(i, e)| { + if i % 2 == 0 { + Either::Left(e) + } else { + Either::Right(e) + } + }); + + // GEN & EVAL PACK POLYS // TODO: The prover should be able to avoid packing the polys again let (packed_polys, final_poly, packed_comps, final_comp) = pack_poly_prover(polys); - // TODO: Add unifying sumcheck if the points do not match - // For now, assume that all polys are evaluated on the same points - let packed_point = points[0].clone(); + let packed_polys: Vec> = packed_polys.into_iter().map(|p| ArcMultilinearExtension::from(p)).collect(); // Note: the points are stored in reverse let final_point = if let Some(final_poly) = &final_poly { packed_point[..final_poly.num_vars].to_vec() } else { Vec::new() }; // Use comps to compute evals for packed polys from regular evals - let (packed_evals, final_eval) = compute_packed_eval(&packed_point, &final_point, evals, &packed_comps, &final_comp); + let (packed_evals, final_eval) = compute_packed_eval(&packed_point, &final_point, &evals, &packed_comps, &final_comp); - let packed_polys: Vec> = packed_polys.into_iter().map(|p| ArcMultilinearExtension::from(p)).collect(); let pack_proof = Pcs::simple_batch_open(pp, &packed_polys, packed_comm, &packed_point, &packed_evals, transcript)?; let final_proof = match (&final_poly, &final_comm, &final_eval) { (Some(final_poly), Some(final_comm), Some(final_eval)) => { @@ -357,7 +388,7 @@ pub fn pcs_batch_open_diff_size None, _ => unreachable!(), }; - Ok((pack_proof, final_proof)) + Ok((unify_proof, evals, pack_proof, final_proof)) } pub fn pcs_verify>( @@ -391,7 +422,9 @@ pub fn pcs_batch_verify_diff_size<'a, E: ExtensionField, Pcs: PolynomialCommitme packed_comm: &Pcs::Commitment, final_comm: &Option, points: &[Vec], - evals: &[E], + poly_evals: &[E], // Evaluation of polys on original points + unify_proof: &IOPProof, + unify_evals: &[E], // Evaluation of polys on unified points packed_proof: &Pcs::Proof, final_proof: &Option, transcript: &mut impl Transcript, @@ -399,6 +432,21 @@ pub fn pcs_batch_verify_diff_size<'a, E: ExtensionField, Pcs: PolynomialCommitme where Pcs::Commitment: 'a, { + assert_eq!(poly_num_vars.len(), points.len()); + assert_eq!(poly_evals.len(), points.len()); + // UNIFY SUMCHECK + // Sample random coefficients for each poly + let unify_coeffs = transcript.sample_vec(poly_num_vars.len()); + let claim = poly_evals.iter().zip(&unify_coeffs).map(|(e, c)| *e * *c).sum(); + let sumcheck_subclaim = IOPVerifierState::verify(claim, unify_proof, &VPAuxInfo { max_degree: 2, max_num_variables: poly_num_vars[0], phantom: Default::default() }, transcript); + let packed_point = sumcheck_subclaim.point.iter().map(|c| c.elements).collect::>(); + let claimed_eval = sumcheck_subclaim.expected_evaluation; + // Compute the evaluation of every EQ + let eq_evals = points.iter().map(|p| eq_eval(p, &packed_point[..p.len()])); + let expected_eval = eq_evals.zip(unify_evals).zip(unify_coeffs).map(|((eq, poly), coeff)| eq * *poly * coeff).sum(); + assert_eq!(claimed_eval, expected_eval); + + // VERIFY PACK POLYS // Replicate packing let (_, final_poly_num_vars, packed_comps, final_comp) = pack_poly_verifier(poly_num_vars); // TODO: Add unifying sumcheck if the points do not match @@ -406,7 +454,7 @@ where let packed_point = points[0].clone(); let final_point = if let Some(final_poly_num_vars) = &final_poly_num_vars { packed_point[..*final_poly_num_vars].to_vec() } else { Vec::new() }; // Use comps to compute evals for packed polys from regular evals - let (packed_evals, final_eval) = compute_packed_eval(&packed_point, &final_point, evals, &packed_comps, &final_comp); + let (packed_evals, final_eval) = compute_packed_eval(&packed_point, &final_point, unify_evals, &packed_comps, &final_comp); Pcs::simple_batch_verify(vp, packed_comm, &packed_point, &packed_evals, packed_proof, transcript)?; match (&final_comm, &final_eval, &final_proof) { @@ -964,7 +1012,7 @@ pub mod test_util { assert!(max_num_vars > vars_gap * batch_size); let (pp, vp) = setup_pcs::(max_num_vars); - let (poly_num_vars, packed_comm, final_comm, evals, packed_proof, final_proof, challenge) = { + let (poly_num_vars, packed_comm, final_comm, poly_evals, unify_evals, unify_proof, packed_proof, final_proof, challenge) = { let mut transcript = BasicTranscript::new(b"BaseFold"); let polys: Vec> = (0..batch_size).map(|i| gen_rand_polys(|_| max_num_vars - i * vars_gap, 1, gen_rand_poly) @@ -975,12 +1023,14 @@ pub mod test_util { let evals = polys.iter().zip(&points).map(|(poly, point)| poly.evaluate(point)).collect_vec(); transcript.append_field_element_exts(&evals); - let (packed_proof, final_proof) = pcs_batch_open_diff_size::(&pp, &polys, &packed_comm, &final_comm, &points, &evals, &mut transcript).unwrap(); + let (unify_proof, unify_evals, packed_proof, final_proof) = pcs_batch_open_diff_size::(&pp, &polys, &packed_comm, &final_comm, &points, &evals, &mut transcript).unwrap(); ( polys.iter().map(|p| p.num_vars()).collect::>(), Pcs::get_pure_commitment(&packed_comm), if let Some(final_comm) = final_comm { Some(Pcs::get_pure_commitment(&final_comm)) } else { None }, evals, + unify_evals, + unify_proof, packed_proof, final_proof, transcript.read_challenge(), @@ -996,9 +1046,9 @@ pub mod test_util { let point = get_point_from_challenge(max_num_vars, &mut transcript); let points: Vec> = poly_num_vars.iter().map(|n| point[..*n].to_vec()).collect(); - transcript.append_field_element_exts(&evals); + transcript.append_field_element_exts(&poly_evals); - pcs_batch_verify_diff_size::(&vp, &poly_num_vars, &packed_comm, &final_comm, &points, &evals, &packed_proof, &final_proof, &mut transcript).unwrap(); + pcs_batch_verify_diff_size::(&vp, &poly_num_vars, &packed_comm, &final_comm, &points, &poly_evals, &unify_proof, &unify_evals, &packed_proof, &final_proof, &mut transcript).unwrap(); let v_challenge = transcript.read_challenge(); assert_eq!(challenge, v_challenge); diff --git a/mpcs/src/whir.rs b/mpcs/src/whir.rs index cb04c3649..beb161ae7 100644 --- a/mpcs/src/whir.rs +++ b/mpcs/src/whir.rs @@ -266,7 +266,7 @@ mod tests { gen_rand_poly, 20, 3, - 5, + 3, ); } } diff --git a/sumcheck/src/prover.rs b/sumcheck/src/prover.rs index d554b7858..5a81cb735 100644 --- a/sumcheck/src/prover.rs +++ b/sumcheck/src/prover.rs @@ -439,6 +439,9 @@ impl<'a, E: ExtensionField> IOPProverState<'a, E> { _ => unimplemented!("do not support degree {} > 5", products.len()), }; exit_span!(span); + if self.round == 1 { + println!("SUM: {:?}", (sum[0] + sum[1])); + } sum.iter_mut().for_each(|sum| *sum *= *coefficient); let span = entered_span!("extrapolation"); From ce7058145b5c2c5228b0d61b581c75cca6bb10d6 Mon Sep 17 00:00:00 2001 From: Kunming Jiang Date: Tue, 18 Mar 2025 11:09:17 -0400 Subject: [PATCH 5/7] Rudimentary batch_commit_diff_size --- mpcs/src/lib.rs | 47 ++++++++++++++++++++++-------------------- sumcheck/src/prover.rs | 3 --- 2 files changed, 25 insertions(+), 25 deletions(-) diff --git a/mpcs/src/lib.rs b/mpcs/src/lib.rs index 6f72269a6..9777616d6 100644 --- a/mpcs/src/lib.rs +++ b/mpcs/src/lib.rs @@ -277,6 +277,8 @@ fn compute_packed_eval( } } +// Batch the polynomials into pack_poly and final_poly +// Returns the commitment to both (if exist) pub fn pcs_batch_commit_diff_size>( pack_pp: &Pcs::ProverParam, final_pp: &Option, @@ -336,34 +338,31 @@ pub fn pcs_batch_open_diff_size, points: &[Vec], - _evals: &[E], + _poly_evals: &[E], transcript: &mut impl Transcript, ) -> Result<(IOPProof, Vec, Pcs::Proof, Option), Error> { assert_eq!(polys.len(), points.len()); - // TODO: Sort the polys by decreasing size - let arc_polys: Vec> = polys.into_iter().map(|p| ArcMultilinearExtension::from(p.clone())).collect(); + // Assert that the poly are sorted in decreasing size + for i in 0..polys.len() - 1 { + assert!(polys[i].num_vars >= polys[i + 1].num_vars); + } // UNIFY SUMCHECK // Sample random coefficients for each poly let unify_coeffs = transcript.sample_vec(polys.len()); - // First convert each point into EQ + // Convert each point into EQ let eq_points = points.iter().map(|p| build_eq_x_r(p)).collect::>(); - + // Perform sumcheck + let arc_polys: Vec> = polys.into_iter().map(|p| ArcMultilinearExtension::from(p.clone())).collect(); let mut sumcheck_poly = VirtualPolynomial::::new(polys[0].num_vars()); for ((eq, poly), coeff) in eq_points.into_iter().zip(arc_polys).zip(unify_coeffs) { - let claim = match (&poly.evaluations(), &eq.evaluations) { - (FieldType::Base(p), FieldType::Ext(e)) => { - p.iter().zip(e).map(|(p, e)| E::from_bases(&[*p, E::BaseField::ZERO]) * *e).fold(E::ZERO, |s, i| s + i) - } - _ => unreachable!() - }; - println!("C: {:?}", claim); sumcheck_poly.add_mle_list(vec![eq, poly], coeff); } let (unify_proof, unify_prover_state) = IOPProverState::prove_batch_polys(1, vec![sumcheck_poly], transcript); + // Obtain new point and evals let packed_point = unify_proof.point.clone(); - // sumcheck_poly is consisted of [eq, poly, eq, poly, ...], we only need the evaluations to `poly` here + // sumcheck_poly is consisted of [eq, poly, eq, poly, ...], we only need the evaluations to the `poly`s here let sumcheck_evals = unify_prover_state.get_mle_final_evaluations(); - let (_, evals): (Vec<_>, Vec<_>) = sumcheck_evals.into_iter().enumerate().partition_map(|(i, e)| { + let (_, unify_evals): (Vec<_>, Vec<_>) = sumcheck_evals.into_iter().enumerate().partition_map(|(i, e)| { if i % 2 == 0 { Either::Left(e) } else { @@ -377,8 +376,8 @@ pub fn pcs_batch_open_diff_size> = packed_polys.into_iter().map(|p| ArcMultilinearExtension::from(p)).collect(); // Note: the points are stored in reverse let final_point = if let Some(final_poly) = &final_poly { packed_point[..final_poly.num_vars].to_vec() } else { Vec::new() }; - // Use comps to compute evals for packed polys from regular evals - let (packed_evals, final_eval) = compute_packed_eval(&packed_point, &final_point, &evals, &packed_comps, &final_comp); + // Use comps to compute evals for packed polys from unify evals + let (packed_evals, final_eval) = compute_packed_eval(&packed_point, &final_point, &unify_evals, &packed_comps, &final_comp); let pack_proof = Pcs::simple_batch_open(pp, &packed_polys, packed_comm, &packed_point, &packed_evals, transcript)?; let final_proof = match (&final_poly, &final_comm, &final_eval) { @@ -388,7 +387,7 @@ pub fn pcs_batch_open_diff_size None, _ => unreachable!(), }; - Ok((unify_proof, evals, pack_proof, final_proof)) + Ok((unify_proof, unify_evals, pack_proof, final_proof)) } pub fn pcs_verify>( @@ -434,11 +433,18 @@ where { assert_eq!(poly_num_vars.len(), points.len()); assert_eq!(poly_evals.len(), points.len()); + // Assert that the poly are sorted in decreasing size + for i in 0..poly_num_vars.len() - 1 { + assert!(poly_num_vars[i] >= poly_num_vars[i + 1]); + } // UNIFY SUMCHECK + let max_num_vars = poly_num_vars[0]; // Sample random coefficients for each poly let unify_coeffs = transcript.sample_vec(poly_num_vars.len()); - let claim = poly_evals.iter().zip(&unify_coeffs).map(|(e, c)| *e * *c).sum(); - let sumcheck_subclaim = IOPVerifierState::verify(claim, unify_proof, &VPAuxInfo { max_degree: 2, max_num_variables: poly_num_vars[0], phantom: Default::default() }, transcript); + // Claim is obtained as eval * coeff * (1 << (max_num_vars - num_vars)) due to scaling factor: see prove_round_and_update_state in sumcheck/src/prover.rs + let claim = poly_evals.iter().zip(&unify_coeffs).zip(poly_num_vars).map(|((e, c), n)| *e * *c * E::from_u64(1 << max_num_vars - n)).sum(); + let sumcheck_subclaim = IOPVerifierState::verify(claim, unify_proof, &VPAuxInfo { max_degree: 2, max_num_variables: max_num_vars, phantom: Default::default() }, transcript); + // Obtain new point and evals let packed_point = sumcheck_subclaim.point.iter().map(|c| c.elements).collect::>(); let claimed_eval = sumcheck_subclaim.expected_evaluation; // Compute the evaluation of every EQ @@ -449,9 +455,6 @@ where // VERIFY PACK POLYS // Replicate packing let (_, final_poly_num_vars, packed_comps, final_comp) = pack_poly_verifier(poly_num_vars); - // TODO: Add unifying sumcheck if the points do not match - // For now, assume that all polys are evaluated on the same points - let packed_point = points[0].clone(); let final_point = if let Some(final_poly_num_vars) = &final_poly_num_vars { packed_point[..*final_poly_num_vars].to_vec() } else { Vec::new() }; // Use comps to compute evals for packed polys from regular evals let (packed_evals, final_eval) = compute_packed_eval(&packed_point, &final_point, unify_evals, &packed_comps, &final_comp); diff --git a/sumcheck/src/prover.rs b/sumcheck/src/prover.rs index 5a81cb735..d554b7858 100644 --- a/sumcheck/src/prover.rs +++ b/sumcheck/src/prover.rs @@ -439,9 +439,6 @@ impl<'a, E: ExtensionField> IOPProverState<'a, E> { _ => unimplemented!("do not support degree {} > 5", products.len()), }; exit_span!(span); - if self.round == 1 { - println!("SUM: {:?}", (sum[0] + sum[1])); - } sum.iter_mut().for_each(|sum| *sum *= *coefficient); let span = entered_span!("extrapolation"); From 1c504a522cd3d922152ee263f16dd1f563b98066 Mon Sep 17 00:00:00 2001 From: Kunming Jiang Date: Sun, 30 Mar 2025 13:54:03 -0400 Subject: [PATCH 6/7] Much simpler interleaving method --- mpcs/src/lib.rs | 87 +++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 73 insertions(+), 14 deletions(-) diff --git a/mpcs/src/lib.rs b/mpcs/src/lib.rs index f80ac2dbe..2a6bc6cbd 100644 --- a/mpcs/src/lib.rs +++ b/mpcs/src/lib.rs @@ -77,6 +77,8 @@ fn compute_binary_with_length(length: usize, mut value: usize) -> Vec { bin } +/* Old Interleaving Approach for bookkeeping + // Given the sizes of a list of polys sorted in decreasing order, // Compute which list each entry of their interleaved form belong to // e.g.: [4, 2, 1, 1] => [0, 1, 0, 2, 0, 1, 0, 3] @@ -165,6 +167,61 @@ fn interleave_polys( } DenseMultilinearExtension { num_vars: interleaved_num_vars, evaluations: interleaved_evaluations } } +*/ + +// Interleave the polys give their position on the binary tree +// Assume the polys are sorted by decreasing size +// Denote: N - size of the interleaved poly; M - num of polys +// This function performs interleave in O(M) + O(N) time and is *potentially* parallelizable (maybe? idk) +fn interleave_polys( + polys: Vec<&DenseMultilinearExtension>, + comps: &Vec>, +) -> DenseMultilinearExtension { + assert!(polys.len() > 0); + let sizes: Vec = polys.iter().map(|p| p.evaluations.len()).collect(); + let interleaved_size = sizes.iter().sum::().next_power_of_two(); + let interleaved_num_vars = interleaved_size.ilog2() as usize; + // Initialize the interleaved poly + // Is there a better way to deal with field types? + let mut interleaved_evaluations = match polys[0].evaluations { + FieldType::Base(_) => FieldType::Base(vec![E::BaseField::ZERO; interleaved_size]), + FieldType::Ext(_) => FieldType::Ext(vec![E::ZERO; interleaved_size]), + _ => unreachable!() + }; + // For every poly, determine its: + // * Start: where's its first entry in the interleaved poly? + // * Gap: how many entires are between its consecutive entries in the interleaved poly? + // Then fill in the corresponding entries in the interleaved poly + for (poly, comp) in polys.iter().zip(comps) { + // Start is the decimal representation of the inverse of comp + let mut start = 0; + let mut pow_2 = 1; + for b in comp { + start += if *b { pow_2 } else { 0 }; + pow_2 *= 2; + } + // Gap is 2 ** (interleaved_num_vars - poly_num_vars) + let gap = 1 << (interleaved_num_vars - poly.num_vars); + // Fill in the blank + match (&mut interleaved_evaluations, &poly.evaluations) { + (FieldType::Base(ie), FieldType::Base(pe)) => { + for (i, e) in pe.iter().enumerate() { + ie[start + gap * i] = *e; + } + } + (FieldType::Ext(ie), FieldType::Ext(pe)) => { + for (i, e) in pe.iter().enumerate() { + ie[start + gap * i] = *e; + } + } + (a, b) => panic!( + "do not support merge different field type DME a: {:?} b: {:?}", + a, b + ), + } + } + DenseMultilinearExtension { num_vars: interleaved_num_vars, evaluations: interleaved_evaluations } +} // Pack polynomials of different sizes into the same, returns // 0: A list of packed polys @@ -239,8 +296,10 @@ fn pack_poly_prover( } } // Interleave every poly - let mut packed_polys: Vec<_> = packed_polys.into_iter().map(|ps| interleave_polys(ps)).collect(); - let next_packed_poly = interleave_polys(next_packed_poly); + let mut packed_polys: Vec<_> = packed_polys.into_iter().zip(&packed_comps).map(|(ps, pc)| + interleave_polys(ps, pc) + ).collect(); + let next_packed_poly = interleave_polys(next_packed_poly, &next_packed_comp); // Final packed poly if next_packed_poly.num_vars == max_poly_num_vars { @@ -1119,7 +1178,7 @@ pub mod test_util { { use crate::{pcs_batch_commit_diff_size_and_write, pcs_batch_open_diff_size, pcs_batch_verify_diff_size}; - for vars_gap in 1..=max_vars_gap { + for vars_gap in 0..=max_vars_gap { println!("GAP: {vars_gap}"); assert!(max_num_vars > vars_gap * batch_size); let (pp, vp) = setup_pcs::(max_num_vars); @@ -1182,19 +1241,19 @@ mod test { use p3_field::PrimeCharacteristicRing; use p3_goldilocks::Goldilocks; - use crate::interleave_pattern; + // use crate::interleave_pattern; type E = GoldilocksExt2; - #[test] - fn test_interleave() { - let poly_num_vars = [vec![27, 26, 25, 25], vec![4, 4, 4, 4, 4], vec![8], vec![23, 23, 19, 13]]; - for num_vars in poly_num_vars { - println!("NUM_VARS: {:?}", num_vars); - let sizes = num_vars.iter().map(|n| 2_i32.pow(*n) as usize).collect(); - let interleaved_indices = interleave_pattern(sizes); - println!("INDICES: {:?}", interleaved_indices); - } - } + // #[test] + // fn test_interleave() { + // let poly_num_vars = [vec![27, 26, 25, 25], vec![4, 4, 4, 4, 4], vec![8], vec![23, 23, 19, 13]]; + // for num_vars in poly_num_vars { + // println!("NUM_VARS: {:?}", num_vars); + // let sizes = num_vars.iter().map(|n| 2_i32.pow(*n) as usize).collect(); + // let interleaved_indices = interleave_pattern(sizes); + // println!("INDICES: {:?}", interleaved_indices); + // } + // } #[test] fn test_packing() { From 3d8b8b93c65e11221e15b7cbfd0cd91c7f029f80 Mon Sep 17 00:00:00 2001 From: Kunming Jiang Date: Mon, 31 Mar 2025 15:21:41 -0400 Subject: [PATCH 7/7] Added parallel version for interleave --- mpcs/src/lib.rs | 85 +++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 83 insertions(+), 2 deletions(-) diff --git a/mpcs/src/lib.rs b/mpcs/src/lib.rs index 2a6bc6cbd..db10bc1f7 100644 --- a/mpcs/src/lib.rs +++ b/mpcs/src/lib.rs @@ -1,6 +1,6 @@ #![deny(clippy::cargo)] use ff_ext::ExtensionField; -use itertools::{interleave, Either, Itertools}; +use itertools::{Either, Itertools}; use multilinear_extensions::{mle::{DenseMultilinearExtension, FieldType, MultilinearExtension}, virtual_poly::{build_eq_x_r, eq_eval, VPAuxInfo}}; use serde::{Serialize, de::DeserializeOwned}; use std::fmt::Debug; @@ -10,6 +10,8 @@ use p3_field::PrimeCharacteristicRing; use multilinear_extensions::virtual_poly::VirtualPolynomial; use sumcheck::structs::{IOPProof, IOPProverState, IOPVerifierState}; use witness::RowMajorMatrix; +#[cfg(feature = "parallel")] +use rayon::prelude::*; pub mod sum_check; pub mod util; @@ -172,7 +174,8 @@ fn interleave_polys( // Interleave the polys give their position on the binary tree // Assume the polys are sorted by decreasing size // Denote: N - size of the interleaved poly; M - num of polys -// This function performs interleave in O(M) + O(N) time and is *potentially* parallelizable (maybe? idk) +// This function performs interleave in O(M) + O(N) time +#[cfg(not(feature = "parallel"))] fn interleave_polys( polys: Vec<&DenseMultilinearExtension>, comps: &Vec>, @@ -223,6 +226,84 @@ fn interleave_polys( DenseMultilinearExtension { num_vars: interleaved_num_vars, evaluations: interleaved_evaluations } } +// Parallel version: divide interleaved_evaluation into chunks +#[cfg(feature = "parallel")] +fn interleave_polys( + polys: Vec<&DenseMultilinearExtension>, + comps: &Vec>, +) -> DenseMultilinearExtension { + use std::cmp::min; + + assert!(polys.len() > 0); + let sizes: Vec = polys.iter().map(|p| p.evaluations.len()).collect(); + let interleaved_size = sizes.iter().sum::().next_power_of_two(); + let interleaved_num_vars = interleaved_size.ilog2() as usize; + + // Compute Start and Gap for each poly + // * Start: where's its first entry in the interleaved poly? + // * Gap: how many entires are between its consecutive entries in the interleaved poly? + let start_list: Vec = comps.iter().map(|comp| { + let mut start = 0; + let mut pow_2 = 1; + for b in comp { + start += if *b { pow_2 } else { 0 }; + pow_2 *= 2; + } + start + }).collect(); + let gap_list: Vec = polys.iter().map(|poly| + 1 << (interleaved_num_vars - poly.num_vars) + ).collect(); + // Minimally each chunk needs one entry from the smallest poly + let num_chunks = min(rayon::current_num_threads().next_power_of_two(), sizes[sizes.len() - 1]); + let interleaved_chunk_size = interleaved_size / num_chunks; + // Length of the poly each thread processes + let poly_chunk_size: Vec = sizes.iter().map(|s| s / num_chunks).collect(); + + // Initialize the interleaved poly + // Is there a better way to deal with field types? + let interleaved_evaluations = match polys[0].evaluations { + FieldType::Base(_) => { + let mut interleaved_eval = vec![E::BaseField::ZERO; interleaved_size]; + interleaved_eval.par_chunks_exact_mut(interleaved_chunk_size).enumerate().for_each(|(i, chunk)| { + for (p, poly) in polys.iter().enumerate() { + match &poly.evaluations { + FieldType::Base(pe) => { + // Each thread processes a chunk of pe + for (j, e) in pe[i * poly_chunk_size[p]..(i+1) * poly_chunk_size[p]].iter().enumerate() { + chunk[start_list[p] + gap_list[p] * j] = *e; + } + } + b => panic!("do not support merge BASE field type with b: {:?}", b) + } + } + }); + FieldType::Base(interleaved_eval) + } + FieldType::Ext(_) => { + let mut interleaved_eval = vec![E::ZERO; interleaved_size]; + interleaved_eval.par_chunks_exact_mut(num_chunks).enumerate().for_each(|(i, chunk)| { + for (p, poly) in polys.iter().enumerate() { + match &poly.evaluations { + FieldType::Ext(pe) => { + // Each thread processes a chunk of pe + for (j, e) in pe[i * poly_chunk_size[p]..(i+1) * poly_chunk_size[p]].iter().enumerate() { + chunk[start_list[p] + gap_list[p] * j] = *e; + } + } + b => panic!("do not support merge EXT field type with b: {:?}", b) + } + } + }); + FieldType::Ext(interleaved_eval) + } + _ => unreachable!() + }; + + DenseMultilinearExtension { num_vars: interleaved_num_vars, evaluations: interleaved_evaluations } +} + + // Pack polynomials of different sizes into the same, returns // 0: A list of packed polys // 1: The final packed poly, if of different size