Skip to content

Commit

Permalink
refactor!: remove use of Serialize from QueryProof
Browse files Browse the repository at this point in the history
  • Loading branch information
JayWhite2357 committed Dec 6, 2024
1 parent c6c12b0 commit 9ac21cc
Show file tree
Hide file tree
Showing 7 changed files with 297 additions and 92 deletions.
8 changes: 8 additions & 0 deletions crates/proof-of-sql/src/base/proof/keccak256_transcript.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use super::{transcript_core::TranscriptCore, Transcript};
use core::mem::replace;
use serde::Serialize;
use tiny_keccak::{Hasher, Keccak};

#[allow(dead_code)]
Expand Down Expand Up @@ -36,6 +37,13 @@ impl TranscriptCore for Keccak256Transcript {
result
}
}
impl<S: Serialize + ?Sized> From<&S> for Keccak256Transcript {
fn from(value: &S) -> Self {
let mut transcript: Self = Transcript::new();
transcript.extend_as_le_from_refs([postcard::to_allocvec(value).unwrap().as_slice()]);
transcript
}
}

#[cfg(test)]
mod tests {
Expand Down
29 changes: 7 additions & 22 deletions crates/proof-of-sql/src/base/proof/transcript.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,13 @@ pub trait Transcript {
fn new() -> Self;
/// Appends the provided messages by appending the reversed raw bytes (i.e. assuming the message is bigendian)
fn extend_as_be<M: FromBytes + AsBytes>(&mut self, messages: impl IntoIterator<Item = M>);
/// Appends the provided messages by appending the reversed raw bytes (i.e. assuming the message is bigendian)
fn extend_as_be_from_refs<'a, M: FromBytes + AsBytes + 'a + Copy>(
&mut self,
messages: impl IntoIterator<Item = &'a M>,
) {
self.extend_as_be(messages.into_iter().copied());
}
/// Appends the provided messages by appending the raw bytes (i.e. assuming the message is littleendian)
fn extend_as_le<M: AsBytes>(&mut self, messages: impl IntoIterator<Item = M>);
/// Appends the provided messages by appending the raw bytes (i.e. assuming the message is littleendian)
Expand All @@ -30,13 +37,6 @@ pub trait Transcript {
/// Request a challenge. Returns the raw, unreversed, bytes. (i.e. littleendian form)
fn challenge_as_le(&mut self) -> [u8; 32];

/// Appends a type that implements [`serde::Serialize`] by appending the raw bytes (i.e. assuming the message is littleendian)
///
/// # Panics
/// - Panics if `postcard::to_allocvec(message)` fails to serialize the message.
fn extend_serialize_as_le(&mut self, message: &(impl serde::Serialize + ?Sized)) {
self.extend_as_le_from_refs([postcard::to_allocvec(message).unwrap().as_slice()]);
}
/// Appends a type that implements [`ark_serialize::CanonicalSerialize`] by appending the raw bytes (i.e. assuming the message is littleendian)
///
/// # Panics
Expand Down Expand Up @@ -66,21 +66,6 @@ mod tests {
use crate::base::proof::Keccak256Transcript;
use alloc::{string::ToString, vec};

#[test]
fn we_can_extend_transcript_with_serialize() {
let mut transcript1: Keccak256Transcript = Transcript::new();
let mut transcript2: Keccak256Transcript = Transcript::new();

transcript1.extend_serialize_as_le(&(123, vec!["hi", "there"]));
transcript2.extend_serialize_as_le(&(123, vec!["hi", "there"]));

assert_eq!(transcript1.challenge_as_le(), transcript2.challenge_as_le());

transcript2.extend_serialize_as_le(&234.567);

assert_ne!(transcript1.challenge_as_le(), transcript2.challenge_as_le());
}

#[test]
fn we_can_extend_transcript_with_canonical_serialize() {
let mut transcript1: Keccak256Transcript = Transcript::new();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ fn test_create_verify_proof() {

// we return a different evaluation point if we start with a different transcript
let mut transcript = Transcript::new(b"sumchecktest");
transcript.extend_serialize_as_le(&123u64);
transcript.extend_as_le([123u64]);
let subclaim = proof
.verify_without_evaluation(
&mut transcript,
Expand Down Expand Up @@ -197,7 +197,7 @@ fn we_can_verify_many_random_test_cases() {
);

let mut transcript = Transcript::new(b"sumchecktest");
transcript.extend_serialize_as_le(&123u64);
transcript.extend_as_le([123u64]);
let verify_result = proof.verify_without_evaluation(
&mut transcript,
CompositePolynomialInfo {
Expand Down
97 changes: 63 additions & 34 deletions crates/proof-of-sql/src/sql/proof/query_proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,17 +7,19 @@ use crate::{
bit::BitDistribution,
commitment::{Commitment, CommitmentEvaluationProof},
database::{
ColumnRef, CommitmentAccessor, DataAccessor, MetadataAccessor, Table, TableRef,
ColumnRef, CommitmentAccessor, DataAccessor, MetadataAccessor, OwnedColumn, OwnedTable,
Table, TableRef,
},
map::{IndexMap, IndexSet},
math::log2_up,
polynomial::{compute_evaluation_vector, CompositePolynomialInfo},
proof::{Keccak256Transcript, ProofError, Transcript},
proof::{ProofError, Transcript},
scalar::Scalar,
},
proof_primitive::sumcheck::SumcheckProof,
sql::proof::{FirstRoundBuilder, QueryData},
};
use alloc::{vec, vec::Vec};
use alloc::{string::String, vec, vec::Vec};
use bumpalo::Bump;
use core::cmp;
use num_traits::Zero;
Expand Down Expand Up @@ -70,7 +72,8 @@ impl<CP: CommitmentEvaluationProof> QueryProof<CP> {
/// Create a new `QueryProof`.
#[tracing::instrument(name = "QueryProof::new", level = "debug", skip_all)]
pub fn new(
expr: &(impl ProofPlan + Serialize),
expr: &impl ProofPlan,
transcript: &mut impl Transcript,
accessor: &impl DataAccessor<CP::Scalar>,
setup: &CP::ProverPublicSetup<'_>,
) -> (Self, ProvableQueryResult) {
Expand All @@ -95,6 +98,7 @@ impl<CP: CommitmentEvaluationProof> QueryProof<CP> {
// Prover First Round: Evaluate the query && get the right number of post result challenges
let mut first_round_builder = FirstRoundBuilder::new();
let query_result = expr.first_round_evaluate(&mut first_round_builder, &alloc, &table_map);
let owned_table_result = OwnedTable::from(&query_result);
let provable_result = query_result.into();
let one_evaluation_lengths = first_round_builder.one_evaluation_lengths();

Expand All @@ -109,9 +113,9 @@ impl<CP: CommitmentEvaluationProof> QueryProof<CP> {
assert!(num_sumcheck_variables > 0);

// construct a transcript for the proof
let mut transcript: Keccak256Transcript = make_transcript(
expr,
&provable_result,
first_round_extend_transcript(
transcript,
&owned_table_result,
range_length,
min_row_num,
one_evaluation_lengths,
Expand Down Expand Up @@ -141,7 +145,7 @@ impl<CP: CommitmentEvaluationProof> QueryProof<CP> {
let commitments = builder.commit_intermediate_mles(min_row_num, setup);

// add the commitments, bit distributions and one evaluation lengths to the proof
extend_transcript(&mut transcript, &commitments, builder.bit_distributions());
final_round_extend_transcript(transcript, &commitments, builder.bit_distributions());

// construct the sumcheck polynomial
let num_random_scalars = num_sumcheck_variables + builder.num_sumcheck_subpolynomials();
Expand All @@ -157,7 +161,7 @@ impl<CP: CommitmentEvaluationProof> QueryProof<CP> {

// create the sumcheck proof -- this is the main part of proving a query
let mut evaluation_point = vec![Zero::zero(); poly.num_variables];
let sumcheck_proof = SumcheckProof::create(&mut transcript, &mut evaluation_point, &poly);
let sumcheck_proof = SumcheckProof::create(transcript, &mut evaluation_point, &poly);

// evaluate the MLEs used in sumcheck except for the result columns
let mut evaluation_vec = vec![Zero::zero(); range_length];
Expand All @@ -182,7 +186,7 @@ impl<CP: CommitmentEvaluationProof> QueryProof<CP> {

// finally, form the inner product proof of the MLEs' evaluations
let evaluation_proof = CP::new(
&mut transcript,
transcript,
&folded_mle,
&evaluation_point,
min_row_num as u64,
Expand All @@ -205,7 +209,8 @@ impl<CP: CommitmentEvaluationProof> QueryProof<CP> {
/// Verify a `QueryProof`. Note: This does NOT transform the result!
pub fn verify(
self,
expr: &(impl ProofPlan + Serialize),
expr: &impl ProofPlan,
transcript: &mut impl Transcript,
accessor: &impl CommitmentAccessor<CP::Commitment>,
result: ProvableQueryResult,
setup: &CP::VerifierPublicSetup<'_>,
Expand Down Expand Up @@ -241,9 +246,9 @@ impl<CP: CommitmentEvaluationProof> QueryProof<CP> {
}

// construct a transcript for the proof
let mut transcript: Keccak256Transcript = make_transcript(
expr,
&result,
first_round_extend_transcript(
transcript,
&owned_table_result,
self.range_length,
min_row_num,
&self.one_evaluation_lengths,
Expand All @@ -260,7 +265,7 @@ impl<CP: CommitmentEvaluationProof> QueryProof<CP> {
.collect();

// add the commitments and bit disctibutions to the proof
extend_transcript(&mut transcript, &self.commitments, &self.bit_distributions);
final_round_extend_transcript(transcript, &self.commitments, &self.bit_distributions);

// draw the random scalars for sumcheck
let num_random_scalars = num_sumcheck_variables + counts.sumcheck_subpolynomials;
Expand All @@ -278,11 +283,9 @@ impl<CP: CommitmentEvaluationProof> QueryProof<CP> {
max_multiplicands: core::cmp::max(counts.sumcheck_max_multiplicands, 2),
num_variables: num_sumcheck_variables,
};
let subclaim = self.sumcheck_proof.verify_without_evaluation(
&mut transcript,
poly_info,
&Zero::zero(),
)?;
let subclaim =
self.sumcheck_proof
.verify_without_evaluation(transcript, poly_info, &Zero::zero())?;

// commit to mle evaluations
transcript.extend_canonical_serialize_as_le(&self.pcs_proof_evaluations);
Expand Down Expand Up @@ -363,7 +366,7 @@ impl<CP: CommitmentEvaluationProof> QueryProof<CP> {
let product = builder.folded_pcs_proof_evaluation();
self.evaluation_proof
.verify_batched_proof(
&mut transcript,
transcript,
&pcs_proof_commitments,
builder.inner_product_multipliers(),
&product,
Expand Down Expand Up @@ -414,29 +417,55 @@ impl<CP: CommitmentEvaluationProof> QueryProof<CP> {
/// This function returns a `merlin::Transcript`. The transcript is a record
/// of all the operations and data involved in creating a proof.
/// ```
fn make_transcript<T: Transcript>(
expr: &(impl ProofPlan + Serialize),
result: &ProvableQueryResult,
fn first_round_extend_transcript<S: Scalar>(
transcript: &mut impl Transcript,
result: &OwnedTable<S>,
range_length: usize,
min_row_num: usize,
one_evaluation_lengths: &[usize],
) -> T {
let mut transcript = T::new();
transcript.extend_serialize_as_le(result);
transcript.extend_serialize_as_le(expr);
transcript.extend_serialize_as_le(&range_length);
transcript.extend_serialize_as_le(&min_row_num);
transcript.extend_serialize_as_le(one_evaluation_lengths);
transcript
) {
transcript.extend_as_le([range_length, min_row_num]);
transcript.extend_as_le_from_refs(one_evaluation_lengths);
for (name, column) in result.inner_table() {
transcript.extend_as_le_from_refs([name.as_str()]);
match column {
OwnedColumn::Boolean(col) => transcript.extend_as_be(col.iter().map(|&b| u8::from(b))),
OwnedColumn::TinyInt(col) => transcript.extend_as_be_from_refs(col),
OwnedColumn::SmallInt(col) => transcript.extend_as_be_from_refs(col),
OwnedColumn::Int(col) => transcript.extend_as_be_from_refs(col),
OwnedColumn::BigInt(col) => transcript.extend_as_be_from_refs(col),
OwnedColumn::VarChar(col) => {
transcript.extend_as_le_from_refs(col.iter().map(String::as_str));
}
OwnedColumn::Int128(col) => transcript.extend_as_be_from_refs(col),
OwnedColumn::Decimal75(precision, scale, col) => {
transcript.extend_as_be([precision.value()]);
transcript.extend_as_be([*scale]);
transcript.extend_as_be(col.iter().map(|&s| Into::<[u64; 4]>::into(s)));
}
OwnedColumn::Scalar(col) => {
transcript.extend_as_be(col.iter().map(|&s| Into::<[u64; 4]>::into(s)));
}
OwnedColumn::TimestampTZ(po_sqltime_unit, po_sqltime_zone, col) => {
transcript.extend_as_be([u64::from(*po_sqltime_unit)]);
transcript.extend_as_be([po_sqltime_zone.offset()]);
transcript.extend_as_be_from_refs(col);
}
}
}
}

fn extend_transcript<C: Commitment>(
fn final_round_extend_transcript<C: Commitment>(
transcript: &mut impl Transcript,
commitments: &[C],
bit_distributions: &[BitDistribution],
) {
for commitment in commitments {
commitment.append_to_transcript(transcript);
}
transcript.extend_serialize_as_le(bit_distributions);
transcript.extend_as_le_from_refs(
bit_distributions
.iter()
.flat_map(|bd| [&bd.vary_mask, &bd.or_all]),
);
}
Loading

0 comments on commit 9ac21cc

Please sign in to comment.