From 431181440de10934348ed5c4e17c2404b9213e39 Mon Sep 17 00:00:00 2001 From: Ray Date: Sat, 8 Feb 2025 21:56:41 +0100 Subject: [PATCH 01/10] feat: add dory commitment scheme Signed-off-by: Ray --- Cargo.lock | 116 ++---- jolt-core/Cargo.toml | 2 +- jolt-core/src/poly/commitment/dory.rs | 142 ++++++++ jolt-core/src/poly/commitment/dory/error.rs | 30 ++ jolt-core/src/poly/commitment/dory/params.rs | 163 +++++++++ jolt-core/src/poly/commitment/dory/reduce.rs | 332 ++++++++++++++++++ jolt-core/src/poly/commitment/dory/scalar.rs | 101 ++++++ jolt-core/src/poly/commitment/dory/tests.rs | 52 +++ .../poly/commitment/dory/vec_operations.rs | 255 ++++++++++++++ jolt-core/src/poly/commitment/mod.rs | 1 + jolt-core/src/utils/errors.rs | 2 + 11 files changed, 1108 insertions(+), 88 deletions(-) create mode 100644 jolt-core/src/poly/commitment/dory.rs create mode 100644 jolt-core/src/poly/commitment/dory/error.rs create mode 100644 jolt-core/src/poly/commitment/dory/params.rs create mode 100644 jolt-core/src/poly/commitment/dory/reduce.rs create mode 100644 jolt-core/src/poly/commitment/dory/scalar.rs create mode 100644 jolt-core/src/poly/commitment/dory/tests.rs create mode 100644 jolt-core/src/poly/commitment/dory/vec_operations.rs diff --git a/Cargo.lock b/Cargo.lock index aefc84ab4..bb67d95c3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -69,7 +69,7 @@ dependencies = [ "k256", "keccak-asm", "proptest", - "rand 0.8.5", + "rand", "ruint", "serde", "tiny-keccak", @@ -358,7 +358,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1df2c09229cbc5a028b1d70e00fdb2acee28b1055dfb5ca73eea49c5a25c4e7c" dependencies = [ "num-traits", - "rand 0.8.5", + "rand", ] [[package]] @@ -368,7 +368,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" dependencies = [ "num-traits", - "rand 0.8.5", + "rand", "rayon", ] @@ -477,7 +477,7 @@ dependencies = [ "cfg-if", "derive_more", "p3-util", - "rand 0.8.5", + "rand", "rayon", "seq-macro", "subtle", @@ -890,7 +890,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ "generic-array", - "rand_core 0.6.4", + "rand_core", "subtle", "zeroize", ] @@ -1031,7 +1031,7 @@ dependencies = [ "generic-array", "group", "pkcs8", - "rand_core 0.6.4", + "rand_core", "sec1", "subtle", "zeroize", @@ -1125,7 +1125,7 @@ version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" dependencies = [ - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -1151,7 +1151,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" dependencies = [ "byteorder", - "rand 0.8.5", + "rand", "rustc-hex", "static_assertions", ] @@ -1269,17 +1269,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "getrandom" -version = "0.1.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" -dependencies = [ - "cfg-if", - "libc", - "wasi 0.9.0+wasi-snapshot-preview1", -] - [[package]] name = "getrandom" version = "0.2.15" @@ -1289,7 +1278,7 @@ dependencies = [ "cfg-if", "js-sys", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "wasm-bindgen", ] @@ -1312,7 +1301,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" dependencies = [ "ff", - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -1574,7 +1563,7 @@ dependencies = [ "hex", "icicle-runtime", "once_cell", - "rand 0.8.5", + "rand", "rayon", ] @@ -1586,7 +1575,7 @@ dependencies = [ "cmake", "icicle-core", "icicle-runtime", - "rand 0.8.5", + "rand", ] [[package]] @@ -1851,7 +1840,7 @@ dependencies = [ "eyre", "jolt-core", "jolt-sdk", - "rand 0.8.5", + "rand", "rmp-serde", "serde", "syn 1.0.109", @@ -1880,7 +1869,7 @@ dependencies = [ "enum_dispatch", "eyre", "fixedbitset", - "getrandom 0.2.15", + "getrandom", "iai-callgrind", "icicle-bn254", "icicle-core", @@ -1891,9 +1880,9 @@ dependencies = [ "num-integer", "once_cell", "postcard", - "rand 0.7.3", - "rand_chacha 0.3.1", - "rand_core 0.6.4", + "rand", + "rand_chacha", + "rand_core", "rayon", "reqwest", "serde", @@ -2119,7 +2108,7 @@ checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" dependencies = [ "hermit-abi 0.3.9", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "windows-sys 0.52.0", ] @@ -2575,8 +2564,8 @@ dependencies = [ "bitflags", "lazy_static", "num-traits", - "rand 0.8.5", - "rand_chacha 0.3.1", + "rand", + "rand_chacha", "rand_xorshift", "regex-syntax 0.8.5", "rusty-fork", @@ -2605,19 +2594,6 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" -[[package]] -name = "rand" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" -dependencies = [ - "getrandom 0.1.16", - "libc", - "rand_chacha 0.2.2", - "rand_core 0.5.1", - "rand_hc", -] - [[package]] name = "rand" version = "0.8.5" @@ -2625,18 +2601,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha 0.3.1", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_chacha" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" -dependencies = [ - "ppv-lite86", - "rand_core 0.5.1", + "rand_chacha", + "rand_core", ] [[package]] @@ -2646,16 +2612,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_core" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" -dependencies = [ - "getrandom 0.1.16", + "rand_core", ] [[package]] @@ -2664,16 +2621,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.15", -] - -[[package]] -name = "rand_hc" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -dependencies = [ - "rand_core 0.5.1", + "getrandom", ] [[package]] @@ -2682,7 +2630,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" dependencies = [ - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -2720,7 +2668,7 @@ version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ - "getrandom 0.2.15", + "getrandom", "libredox", "thiserror", ] @@ -2831,7 +2779,7 @@ checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" dependencies = [ "cc", "cfg-if", - "getrandom 0.2.15", + "getrandom", "libc", "spin", "untrusted", @@ -2886,7 +2834,7 @@ dependencies = [ "parity-scale-codec", "primitive-types", "proptest", - "rand 0.8.5", + "rand", "rlp", "ruint-macro", "serde", @@ -3279,7 +3227,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ "digest 0.10.7", - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -3914,12 +3862,6 @@ dependencies = [ "try-lock", ] -[[package]] -name = "wasi" -version = "0.9.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" - [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" diff --git a/jolt-core/Cargo.toml b/jolt-core/Cargo.toml index 7df78f525..d78f02a21 100644 --- a/jolt-core/Cargo.toml +++ b/jolt-core/Cargo.toml @@ -53,7 +53,7 @@ num-integer = "0.1.45" postcard = { version = "1.0.8", default-features = false, features = [ "use-std", ] } -rand = "0.7.3" +rand = "0.8.0" rand_chacha = { version = "0.3.0", default-features = false } rand_core = { version = "0.6.4", default-features = false } rayon = { version = "^1.8.0", optional = true } diff --git a/jolt-core/src/poly/commitment/dory.rs b/jolt-core/src/poly/commitment/dory.rs new file mode 100644 index 000000000..d92bc0f71 --- /dev/null +++ b/jolt-core/src/poly/commitment/dory.rs @@ -0,0 +1,142 @@ +use std::{marker::PhantomData, ops::Mul}; + +use ark_ec::pairing::{Pairing, PairingOutput}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; +use error::Error; +use params::PublicParams; +use rayon::iter::IntoParallelIterator; +use rayon::iter::ParallelIterator; +use scalar::ScalarProof; +use scalar::{commit, Commitment, Witness}; +use vec_operations::{G1Vec, G2Vec}; + +use crate::utils::errors::ProofVerifyError; +use crate::{ + field::JoltField, + poly::multilinear_polynomial::MultilinearPolynomial, + utils::{ + math::Math, + transcript::{AppendToTranscript, Transcript}, + }, +}; + +use super::commitment_scheme::{CommitShape, CommitmentScheme}; + +mod error; +mod params; +mod reduce; +mod scalar; +mod vec_operations; + +#[cfg(test)] +mod tests; + +/// G1 +pub type G1 = ::G1; + +/// G2 +pub type G2 = ::G2; + +/// Cyclic group of integers modulo prime number r +/// +/// This is the Domain Set Z +pub type Zr = ::ScalarField; + +pub type Gt = PairingOutput; + +#[derive(Clone, Default)] +pub struct DoryScheme { + _data: PhantomData<(P, ProofTranscript)>, +} + +impl AppendToTranscript for Commitment

{ + fn append_to_transcript(&self, _transcript: &mut ProofTranscript) { + todo!() + } +} + +#[derive(CanonicalDeserialize, CanonicalSerialize)] +pub struct DoryBatchedProof; + +impl CommitmentScheme for DoryScheme +where + P: Pairing + Default, + ProofTranscript: Transcript, + G1

: Mul, Output = G1

>, + G2

: Mul, Output = G2

>, + P::ScalarField: JoltField + Default, +{ + type Field = Zr

; + + type Setup = PublicParams

; + + type Commitment = Commitment

; + + type Proof = ScalarProof

; + + type BatchedProof = DoryBatchedProof; + + fn setup(shapes: &[CommitShape]) -> Self::Setup { + // Dory's setup procedure initializes + let mut max_len: usize = 0; + for shape in shapes { + let len = shape.input_length.log_2(); + if len > max_len { + max_len = len; + } + } + let mut rng = ark_std::rand::thread_rng(); + PublicParams::new(&mut rng, max_len).expect("Length must be greater than 0") + } + + fn commit(poly: &MultilinearPolynomial, setup: &Self::Setup) -> Self::Commitment { + let MultilinearPolynomial::LargeScalars(poly) = poly else { + panic!("Expected LargeScalars polynomial"); + }; + let witness = Witness::new(setup, poly.evals_ref()); + commit(witness, setup).unwrap() + } + + fn batch_commit( + polys: &[&MultilinearPolynomial], + setup: &Self::Setup, + _batch_type: super::commitment_scheme::BatchType, + ) -> Vec { + polys + .into_par_iter() + .map(|poly| Self::commit(poly, setup)) + .collect() + } + + fn prove( + setup: &Self::Setup, + poly: &MultilinearPolynomial, + _opening_point: &[Self::Field], // point at which the polynomial is evaluated + _transcript: &mut ProofTranscript, + ) -> Self::Proof { + let MultilinearPolynomial::LargeScalars(poly) = poly else { + panic!("Expected LargeScalars polynomial"); + }; + let witness = Witness::new(setup, poly.evals_ref()); + ScalarProof::new(witness) + } + + fn verify( + proof: &Self::Proof, + setup: &Self::Setup, + _transcript: &mut ProofTranscript, + _opening_point: &[Self::Field], // point at which the polynomial is evaluated + _opening: &Self::Field, // evaluation \widetilde{Z}(r) + commitment: &Self::Commitment, + ) -> Result<(), ProofVerifyError> { + if proof.verify(setup, commitment).unwrap() { + Ok(()) + } else { + Err(ProofVerifyError::VerificationFailed) + } + } + + fn protocol_name() -> &'static [u8] { + b"dory" + } +} diff --git a/jolt-core/src/poly/commitment/dory/error.rs b/jolt-core/src/poly/commitment/dory/error.rs new file mode 100644 index 000000000..f9c300354 --- /dev/null +++ b/jolt-core/src/poly/commitment/dory/error.rs @@ -0,0 +1,30 @@ +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error("length mismatch")] + LengthMismatch, + #[error("empty vectors: {0:?}")] + EmptyVector(GType), + #[error("could not invert d")] + CouldntInvertD, + + #[error("serialization error {0}")] + Serialization(#[from] ark_serialize::SerializationError), + + #[error( + "recursive public parameters should be twice as the public parameters it is derived from" + )] + LengthNotTwice, + #[error("reduce params not initialized")] + ReduceParamsNotInitialized, + #[error("public params is empty")] + EmptyPublicParams, + + #[error("zr zero")] + ZrZero, +} + +#[derive(Debug)] +pub enum GType { + G1, + G2, +} diff --git a/jolt-core/src/poly/commitment/dory/params.rs b/jolt-core/src/poly/commitment/dory/params.rs new file mode 100644 index 000000000..b8f7123ff --- /dev/null +++ b/jolt-core/src/poly/commitment/dory/params.rs @@ -0,0 +1,163 @@ +use ark_ec::pairing::Pairing; +use ark_ff::UniformRand; +use ark_serialize::CanonicalSerialize; +use ark_std::rand::Rng; +use sha3::{Digest, Sha3_256}; + +use super::{Error, G1Vec, G2Vec, Gt, G1, G2}; + +#[derive(Clone)] +pub struct PublicParams { + pub g1v: G1Vec

, + pub g2v: G2Vec

, + + pub x: Gt

, + + pub reduce_pp: Option>, +} + +#[derive(Clone)] +pub struct ReducePublicParams { + pub gamma_1_prime: G1Vec

, + pub gamma_2_prime: G2Vec

, + + pub delta_1r: Gt

, + pub delta_1l: Gt

, + pub delta_2r: Gt

, + pub delta_2l: Gt

, +} + +impl PublicParams { + pub fn new(rng: &mut impl Rng, n: usize) -> Result + where + G1: UniformRand, + G2: UniformRand, + { + let g1v = G1Vec::random(rng, n); + let g2v = G2Vec::random(rng, n); + let x = g1v.inner_prod(&g2v)?; + let reduce_pp = ReducePublicParams::new(rng, &g1v, &g2v)?; + let value = Self { + g1v, + g2v, + reduce_pp, + x, + }; + Ok(value) + } + + pub fn new_derived(&self, rng: &mut impl Rng, n: usize) -> Result + where + G1: UniformRand, + G2: UniformRand, + { + if self.g1v.len() != 2 * n || self.g2v.len() != 2 * n { + return Err(Error::LengthNotTwice); + } + let Some(reduce_pp) = &self.reduce_pp else { + return Err(Error::ReduceParamsNotInitialized); + }; + let g1v = reduce_pp.gamma_1_prime.clone(); + let g2v = reduce_pp.gamma_2_prime.clone(); + + let reduce_pp = ReducePublicParams::new(rng, &g1v, &g2v)?; + let x = g1v.inner_prod(&g2v)?; + + let value = Self { + g1v, + g2v, + reduce_pp, + x, + }; + Ok(value) + } + + pub fn digest(&self, prev: Option<&[u8]>) -> Result, Error> { + let mut hasher = Sha3_256::new(); + if let Some(prev) = prev { + hasher.update(prev); + } + + if let Some(reduce_pp) = &self.reduce_pp { + hasher.update(reduce_pp.digest()?); + } + self.x + .serialize_uncompressed(&mut hasher) + .expect("Serialization failed"); + + self.g1v.serialize_uncompressed(&mut hasher)?; + self.g2v.serialize_uncompressed(&mut hasher)?; + + Ok(hasher.finalize().to_vec()) + } + + pub fn generate_public_params(rng: &mut impl Rng, mut n: usize) -> Result, Error> + where + G1: UniformRand, + G2: UniformRand, + { + let mut res = Vec::new(); + let mut params = Self::new(rng, n)?; + while n > 0 { + res.push(params); + if n / 2 == 0 { + break; + } + n /= 2; + params = res.last().expect("just pushed").new_derived(rng, n)?; + } + Ok(res) + } +} + +impl ReducePublicParams { + pub fn new( + rng: &mut impl Rng, + g1v: &[G1], + g2v: &[G2], + ) -> Result, Error> + where + G1: UniformRand, + G2: UniformRand, + { + assert_eq!(g1v.len(), g2v.len()); + if g1v.len() == 1 { + return Ok(None); + } + let m = g1v.len() / 2; + let gamma_1l: G1Vec = (&g1v[..m]).into(); + let gamma_1r: G1Vec = (&g1v[m..]).into(); + + let gamma_2l = (&g2v[..m]).into(); + let gamma_2r = (&g2v[m..]).into(); + + let gamma_1_prime = G1Vec::random(rng, m); + let gamma_2_prime = G2Vec::random(rng, m); + + let delta_1l = gamma_1l.inner_prod(&gamma_2_prime)?; + let delta_1r = gamma_1r.inner_prod(&gamma_2_prime)?; + let delta_2l = gamma_1_prime.inner_prod(&gamma_2l)?; + let delta_2r = gamma_1_prime.inner_prod(&gamma_2r)?; + Ok(Some(Self { + gamma_1_prime, + gamma_2_prime, + delta_1r, + delta_1l, + delta_2r, + delta_2l, + })) + } + + pub fn digest(&self) -> Result, Error> { + let mut hasher = Sha3_256::new(); + + self.gamma_1_prime.serialize_uncompressed(&mut hasher)?; + self.gamma_2_prime.serialize_uncompressed(&mut hasher)?; + self.delta_1r.serialize_uncompressed(&mut hasher)?; + self.delta_1l.serialize_uncompressed(&mut hasher)?; + self.delta_2r.serialize_uncompressed(&mut hasher)?; + self.delta_2l.serialize_uncompressed(&mut hasher)?; + + Ok(hasher.finalize().to_vec()) + } +} diff --git a/jolt-core/src/poly/commitment/dory/reduce.rs b/jolt-core/src/poly/commitment/dory/reduce.rs new file mode 100644 index 000000000..57234e38f --- /dev/null +++ b/jolt-core/src/poly/commitment/dory/reduce.rs @@ -0,0 +1,332 @@ +#![allow(dead_code)] + +use std::ops::{Add, Mul}; + +use ark_ec::pairing::Pairing; +use ark_ff::{Field, PrimeField}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; +use sha3::{Digest, Sha3_256}; + +use super::{ + params::ReducePublicParams, vec_operations::mul_gt, Commitment, Error, G1Vec, G2Vec, Gt, + PublicParams, ScalarProof, Witness, Zr, G1, G2, +}; + +/// Proof +#[derive(Clone, CanonicalDeserialize, CanonicalSerialize)] +pub struct DoryProof { + pub from_prover_1: Vec>, + pub from_prover_2: Vec>, + pub final_proof: ScalarProof, +} + +impl DoryProof { + fn verify_recursive( + public_params: &[PublicParams], + commitment: Commitment, + from_prover_1: &[ReduceProverStep1Elements], + from_prover_2: &[ReduceProverStep2Elements], + final_proof: &ScalarProof, + ) -> Result { + match public_params { + [] => Err(Error::EmptyPublicParams), + [params] => final_proof.verify(params, &commitment), + [param1, public_params_rest @ ..] => { + let digest = param1.digest(None)?.to_vec(); + + let PublicParams { x, reduce_pp, .. } = param1; + let ReducePublicParams { + delta_1r, + delta_1l, + delta_2r, + delta_2l, + .. + } = reduce_pp.as_ref().expect("gv1 is greater than 1"); + + match (from_prover_1, from_prover_2) { + ( + [ReduceProverStep1Elements { + d1l, d1r, d2l, d2r, .. + }, from_prover_1_rest @ ..], + [ReduceProverStep2Elements { + c_plus, c_minus, .. + }, from_prover_2_rest @ ..], + ) => { + let Commitment { c, d1, d2 } = commitment; + + let step_1_element = ReduceProverStep1Elements { + pp_digest: digest, + d1l: *d1l, + d1r: *d1r, + d2l: *d2l, + d2r: *d2r, + c, + d1, + d2, + }; + + let (betha, step_1_digest) = step_1_element.ro()?; + + let step_2_element = ReduceProverStep2Elements { + step_1_digest, + c_plus: *c_plus, + c_minus: *c_minus, + }; + + let alpha = step_2_element.ro()?; + let inverse_alpha = alpha.inverse().ok_or(Error::ZrZero)?; + let inverse_betha = betha.inverse().ok_or(Error::ZrZero)?; + + let c_prime = mul_gt(&[ + c, + *x, + d2 * betha, + d1 * inverse_betha, + *c_plus * alpha, + *c_minus * inverse_alpha, + ]) + .expect("slice is not empty"); + + let d1_prime = mul_gt(&[ + *d1l * alpha, + *d1r, + *delta_1l * alpha * betha, + *delta_1r * betha, + ]) + .expect("slice is not empty"); + + let d2_prime = mul_gt(&[ + *d2l * inverse_alpha, + *d2r, + *delta_2l * inverse_alpha * inverse_betha, + *delta_2r * inverse_betha, + ]) + .expect("slice is not empty"); + + let next_commitment = Commitment { + c: c_prime, + d1: d1_prime, + d2: d2_prime, + }; + + Self::verify_recursive( + public_params_rest, + next_commitment, + from_prover_1_rest, + from_prover_2_rest, + final_proof, + ) + } + _ => todo!(), + } + } + } + } + + pub fn verify( + &self, + public_params: &[PublicParams], + commitment: Commitment, + ) -> Result + where + Gt: Mul, Output = Gt>, + G1: Mul, Output = G1>, + G2: Mul, Output = G2>, + { + Self::verify_recursive( + public_params, + commitment, + &self.from_prover_1, + &self.from_prover_2, + &self.final_proof, + ) + } +} + +#[derive(Clone, CanonicalDeserialize, CanonicalSerialize)] +pub struct ReduceProverStep1Elements { + pp_digest: Vec, + d1l: Gt, + d1r: Gt, + d2l: Gt, + d2r: Gt, + c: Gt, + d1: Gt, + d2: Gt, +} + +impl ReduceProverStep1Elements { + pub fn ro(&self) -> Result<(Zr, Vec), Error> { + let mut hasher = Sha3_256::new(); + self.serialize_uncompressed(&mut hasher)?; + let digest = hasher.finalize(); + Ok(( + Zr::::from_be_bytes_mod_order(&digest), + digest.to_vec(), + )) + } +} + +#[derive(Clone, CanonicalDeserialize, CanonicalSerialize)] +pub struct ReduceProverStep2Elements { + step_1_digest: Vec, + c_plus: Gt, + c_minus: Gt, +} + +impl ReduceProverStep2Elements { + pub fn ro(&self) -> Result, Error> { + let mut hasher = Sha3_256::new(); + self.serialize_uncompressed(&mut hasher)?; + let digest = hasher.finalize(); + Ok(Zr::::from_be_bytes_mod_order(&digest)) + } +} + +pub fn reduce( + params: &[PublicParams], + witness: Witness, + Commitment { c, d1, d2 }: Commitment, +) -> Result, Error> +where + G1Vec: Add, Output = G1Vec>, + G2Vec: Add, Output = G2Vec>, +{ + match params { + [] => unimplemented!(), + [param1, rest_param @ ..] => { + let digest = param1.digest(None)?; + + let PublicParams { + g1v, + g2v, + x, + reduce_pp, + .. + } = param1; + + let ReducePublicParams { + delta_1r, + delta_1l, + delta_2r, + delta_2l, + gamma_1_prime, + gamma_2_prime, + } = reduce_pp.as_ref().unwrap(); + + let m = g1v.len() / 2; + + // P: + let v1l: G1Vec = (&witness.v1[..m]).into(); + let v1r: G1Vec = (&witness.v1[m..]).into(); + let v2l = (&witness.v2[..m]).into(); + let v2r = (&witness.v2[m..]).into(); + + // P --> V: + let d1l = v1l.inner_prod(gamma_2_prime)?; + let d1r = v1r.inner_prod(gamma_2_prime)?; + let d2l = gamma_1_prime.inner_prod(&v2l)?; + let d2r = gamma_1_prime.inner_prod(&v2r)?; + + let step_1_element = ReduceProverStep1Elements { + pp_digest: digest, + d1l, + d1r, + d2l, + d2r, + c, + d1, + d2, + }; + + let (betha, step_1_digest) = step_1_element.ro()?; + let inverse_betha = betha.inverse().unwrap(); + + // P: + let v1 = witness.v1 + (g1v * betha); + let v2 = witness.v2 + (g2v * inverse_betha); + + let v1l: G1Vec = v1[..m].to_vec().into(); + let v1r: G1Vec = v1[m..].to_vec().into(); + let v2l = v2[..m].to_vec().into(); + let v2r = v2[m..].to_vec().into(); + + // P --> V: + let c_plus = v1l.inner_prod(&v2r)?; + let c_minus = v1r.inner_prod(&v2l)?; + + let step_2_element = ReduceProverStep2Elements { + step_1_digest, + c_plus, + c_minus, + }; + let alpha = step_2_element.ro()?; + let inverse_alpha = alpha.inverse().unwrap(); + + let v1_prime = v1l * alpha + v1r; + let v2_prime = v2l * inverse_alpha + v2r; + + let next_witness = Witness { + v1: v1_prime, + v2: v2_prime, + }; + + if m == 1 { + return Ok(DoryProof { + from_prover_1: vec![step_1_element], + from_prover_2: vec![step_2_element], + final_proof: ScalarProof::new(next_witness), + }); + } + + let c_prime = mul_gt(&[ + c, + *x, + d2 * betha, + d1 * inverse_betha, + c_plus * alpha, + c_minus * inverse_alpha, + ]) + .unwrap(); + + let d1_prime = mul_gt(&[ + d1l * alpha, + d1r, + *delta_1l * alpha * betha, + *delta_1r * betha, + ]) + .unwrap(); + + let d2_prime = mul_gt(&[ + d2l * inverse_alpha, + d2r, + *delta_2l * inverse_alpha * inverse_betha, + *delta_2r * inverse_betha, + ]) + .unwrap(); + + let next_commitment = Commitment { + c: c_prime, + d1: d1_prime, + d2: d2_prime, + }; + + let DoryProof { + from_prover_1: step_1_elements, + from_prover_2: step_2_elements, + final_proof: scalar_product_proof, + } = reduce(rest_param, next_witness, next_commitment)?; + + let mut from_prover_1 = vec![step_1_element]; + from_prover_1.extend(step_1_elements); + let mut from_prover_2 = vec![step_2_element]; + from_prover_2.extend(step_2_elements); + + Ok(DoryProof { + from_prover_1, + from_prover_2, + final_proof: scalar_product_proof, + }) + } + } +} diff --git a/jolt-core/src/poly/commitment/dory/scalar.rs b/jolt-core/src/poly/commitment/dory/scalar.rs new file mode 100644 index 000000000..2687fdaa2 --- /dev/null +++ b/jolt-core/src/poly/commitment/dory/scalar.rs @@ -0,0 +1,101 @@ +use std::ops::Mul; + +use ark_ec::pairing::Pairing; +use ark_ff::{Field, UniformRand}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; +use rand::thread_rng; + +use super::{ + vec_operations::{e, mul_gt}, + Error, G1Vec, G2Vec, Gt, PublicParams, Zr, G1, G2, +}; + +/// Witness over set Zr +#[derive(Clone)] +pub struct Witness { + pub v1: G1Vec, + pub v2: G2Vec, +} + +impl Witness

{ + pub fn new(params: &PublicParams

, poly: &[Zr

]) -> Self { + let v1 = params + .g1v + .iter() + .zip(poly.iter()) + .map(|(a, b)| *a * *b) + .collect::>>(); + + let v2 = params + .g2v + .iter() + .zip(poly.iter()) + .map(|(a, b)| *a * *b) + .collect::>>(); + let v1 = v1.into(); + let v2 = v2.into(); + + Self { v1, v2 } + } +} + +#[derive(Clone, Copy, CanonicalSerialize, CanonicalDeserialize, Debug, Default, PartialEq, Eq)] +pub struct Commitment { + pub c: Gt, + pub d1: Gt, + pub d2: Gt, +} + +pub fn commit( + Witness { v1, v2 }: Witness, + public_params: &PublicParams, +) -> Result, Error> { + let d1 = v1.inner_prod(&public_params.g2v)?; + let d2 = public_params.g1v.inner_prod(&v2)?; + let c = v1.inner_prod(&v2)?; + + let commitment = Commitment { d1, d2, c }; + Ok(commitment) +} + +#[derive(Clone, CanonicalDeserialize, CanonicalSerialize)] +pub struct ScalarProof { + //pp: &'a PublicParams, + e1: G1Vec, + e2: G2Vec, +} + +impl ScalarProof { + pub fn new(witness: Witness) -> Self { + Self { + //pp: public_params, + e1: witness.v1, + e2: witness.v2, + } + } + + pub fn verify( + &self, + pp: &PublicParams, + Commitment { c, d1, d2 }: &Commitment, + ) -> Result + where + for<'c> &'c G1Vec: Mul, Output = G1Vec>, + G1: Mul, Output = G1>, + G2: Mul, Output = G2>, + Gt: Mul, Output = Gt>, + { + let mut rng = thread_rng(); + let d: Zr = Zr::::rand(&mut rng); + let d_inv = d.inverse().ok_or(Error::CouldntInvertD)?; + + let g1 = G1Vec::::from(&[self.e1[0], pp.g1v[0] * d]).sum(); + + let g2 = G2Vec::::from(&[self.e2[0], pp.g2v[0] * d_inv]).sum(); + let left_eq = e(g1, g2); + + let right_eq = mul_gt(&[pp.x, *c, *d2 * d, *d1 * d_inv]).expect("has more than one item"); + + Ok(left_eq == right_eq) + } +} diff --git a/jolt-core/src/poly/commitment/dory/tests.rs b/jolt-core/src/poly/commitment/dory/tests.rs new file mode 100644 index 000000000..48d37267b --- /dev/null +++ b/jolt-core/src/poly/commitment/dory/tests.rs @@ -0,0 +1,52 @@ +use ark_bn254::Bn254; +use ark_std::UniformRand; + +use crate::poly::commitment::dory::scalar::Witness; + +use super::{commit, reduce, G1Vec, G2Vec, PublicParams, ScalarProof, G1, G2}; + +#[test] +fn test_scalar_product_proof() { + let mut rng = ark_std::test_rng(); + let public_params = PublicParams::::new(&mut rng, 1).unwrap(); + + let g1v = vec![G1::::rand(&mut rng)]; + let g2v = vec![G2::::rand(&mut rng)]; + let witness = Witness { + v1: g1v.into(), + v2: g2v.into(), + }; + let commitment = commit(witness.clone(), &public_params).unwrap(); + + let proof = ScalarProof::new(witness); + assert!(proof.verify(&public_params, &commitment).unwrap()); +} + +#[test] +fn test_dory_reduce() { + let mut rng = ark_std::test_rng(); + let n = 8; + let g1v = G1Vec::::random(&mut rng, n); + let g2v = G2Vec::random(&mut rng, n); + + let params = PublicParams::generate_public_params(&mut rng, n).unwrap(); + + let witness = Witness { v1: g1v, v2: g2v }; + let commitment = commit(witness.clone(), ¶ms[0]).unwrap(); + + let proof = reduce::reduce(¶ms, witness, commitment).unwrap(); + + assert_eq!(proof.from_prover_1.len(), 3); + assert_eq!(proof.from_prover_2.len(), 3); + + assert_eq!(params[0].g1v.len(), 8); + assert_eq!(params[1].g1v.len(), 4); + assert_eq!(params[2].g1v.len(), 2); + assert_eq!(params[3].g1v.len(), 1); + + assert_eq!(params[0].reduce_pp.as_ref().unwrap().gamma_1_prime.len(), 4); + assert_eq!(params[1].reduce_pp.as_ref().unwrap().gamma_1_prime.len(), 2); + assert_eq!(params[2].reduce_pp.as_ref().unwrap().gamma_1_prime.len(), 1); + assert!(params[3].reduce_pp.is_none()); + assert!(proof.verify(¶ms, commitment).unwrap()); +} diff --git a/jolt-core/src/poly/commitment/dory/vec_operations.rs b/jolt-core/src/poly/commitment/dory/vec_operations.rs new file mode 100644 index 000000000..fc8966064 --- /dev/null +++ b/jolt-core/src/poly/commitment/dory/vec_operations.rs @@ -0,0 +1,255 @@ +use std::ops::{Add, Deref, Mul}; + +use ark_ec::{pairing::Pairing, Group}; +use ark_ff::UniformRand; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; +use ark_std::rand::Rng; + +use super::{ + error::{Error, GType}, + Gt, Zr, G1, G2, +}; + +pub fn e(g1: G1, g2: G2) -> Gt { + Curve::pairing(g1, g2) +} + +pub fn mul_gt(gts: &[Gt]) -> Option> { + gts.iter().fold(None, |prev, curr| match prev { + Some(prev) => Some(curr + prev), + None => Some(*curr), + }) +} + +// G1 +#[derive(Clone, CanonicalDeserialize, CanonicalSerialize)] +pub struct G1Vec(Vec>); + +impl G1Vec { + pub fn inner_prod(&self, g2v: &G2Vec) -> Result, Error> + where + G1: From>, + G2: From>, + { + match (self.as_ref(), g2v.as_ref()) { + ([], _) => Err(Error::EmptyVector(GType::G1)), + (_, []) => Err(Error::EmptyVector(GType::G2)), + (a, b) if a.len() != b.len() => Err(Error::LengthMismatch), + ([g1], [g2]) => Ok(e(*g1, *g2)), + (a, b) => Ok(Curve::multi_pairing(a, b)), + } + } + + pub fn sum(&self) -> G1 { + self.iter().sum() + } + + pub fn random(rng: &mut impl Rng, n: usize) -> Self + where + G1: UniformRand, + { + Self( + (0..n) + .map(|_| { + let random_scalar = Zr::::rand(rng); + G1::::generator() * random_scalar + }) + .collect(), + ) + } +} + +impl Deref for G1Vec { + type Target = [G1]; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl Add for G1Vec +where + Curve: Pairing, + for<'b> &'b G1: Add<&'b G1, Output = G1>, +{ + type Output = G1Vec; + + fn add(self, rhs: Self) -> Self::Output { + Self( + self.0 + .iter() + .zip(rhs.0.iter()) + .map(|(val1, val2)| val1 + val2) + .collect(), + ) + } +} + +impl Mul> for G1Vec +where + G1: Copy, + G1: Mul, Output = G1>, +{ + type Output = G1Vec; + + fn mul(self, rhs: Zr) -> Self::Output { + G1Vec(self.0.iter().map(|val| *val * rhs).collect()) + } +} + +impl Mul> for &G1Vec +where + G1: Copy, + G1: Mul, Output = G1>, +{ + type Output = G1Vec; + + fn mul(self, rhs: Zr) -> Self::Output { + G1Vec(self.iter().map(|val| *val * rhs).collect()) + } +} + +impl From<&[G1]> for G1Vec { + fn from(value: &[G1]) -> Self { + Self(value.into()) + } +} + +impl From<&[G1; N]> for G1Vec { + fn from(value: &[G1; N]) -> Self { + Self((*value).into()) + } +} + +impl From>> for G1Vec { + fn from(value: Vec>) -> Self { + Self(value) + } +} + +// G2 + +#[derive(Clone, CanonicalDeserialize, CanonicalSerialize)] +pub struct G2Vec(Vec>); + +impl G2Vec { + pub fn sum(&self) -> G2 { + self.iter().sum() + } + + pub fn random(rng: &mut impl Rng, n: usize) -> Self + where + G2: UniformRand, + { + Self( + (0..n) + .map(|_| { + let random_scalar = Zr::::rand(rng); + G2::::generator() * random_scalar + }) + .collect(), + ) + } +} + +impl Deref for G2Vec { + type Target = [G2]; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl From<&[G2]> for G2Vec { + fn from(value: &[G2]) -> Self { + Self(value.into()) + } +} + +impl From<&[G2; N]> for G2Vec { + fn from(value: &[G2; N]) -> Self { + Self((*value).into()) + } +} + +impl From>> for G2Vec { + fn from(value: Vec>) -> Self { + Self(value) + } +} + +impl Add for G2Vec +where + Curve: Pairing, +{ + type Output = G2Vec; + + fn add(self, rhs: Self) -> Self::Output { + G2Vec( + self.0 + .iter() + .zip(rhs.0.iter()) + .map(|(val1, val2)| *val1 + *val2) + .collect(), + ) + } +} + +impl Mul> for G2Vec +where + Curve: Pairing, + G2: Copy + Mul, Output = G2>, +{ + type Output = G2Vec; + + fn mul(self, rhs: Zr) -> Self::Output { + G2Vec(self.0.iter().map(|val| *val * rhs).collect()) + } +} + +impl Mul> for &G2Vec +where + Curve: Pairing, + G2: Copy, + G2: Mul, Output = G2>, +{ + type Output = G2Vec; + + fn mul(self, rhs: Zr) -> Self::Output { + G2Vec(self.0.iter().map(|val| *val * rhs).collect()) + } +} + +#[cfg(test)] +mod tests { + use ark_bn254::Bn254; + use ark_ff::UniformRand; + + use super::{ + super::{G1Vec, G1, G2}, + e, mul_gt, + }; + + #[test] + fn test_inner_prod() { + let mut rng = ark_std::test_rng(); + let g1a = G1::::rand(&mut rng); + let g1b = G1::::rand(&mut rng); + let g1c = G1::::rand(&mut rng); + + let g2a = G2::::rand(&mut rng); + let g2b = G2::::rand(&mut rng); + let g2c = G2::::rand(&mut rng); + + let expected = mul_gt(&[e(g1a, g2a), e(g1b, g2b), e(g1c, g2c)]).unwrap(); + + let g1v = &[g1a, g1b, g1c]; + let g1v: G1Vec = g1v.into(); + let g2v = &[g2a, g2b, g2c]; + let g2v = g2v.into(); + + let actual = g1v.inner_prod(&g2v).unwrap(); + + assert_eq!(expected, actual); + } +} diff --git a/jolt-core/src/poly/commitment/mod.rs b/jolt-core/src/poly/commitment/mod.rs index 370368ab2..aa4c37cdb 100644 --- a/jolt-core/src/poly/commitment/mod.rs +++ b/jolt-core/src/poly/commitment/mod.rs @@ -1,5 +1,6 @@ pub mod binius; pub mod commitment_scheme; +pub mod dory; pub mod hyperkzg; pub mod kzg; pub mod zeromorph; diff --git a/jolt-core/src/utils/errors.rs b/jolt-core/src/utils/errors.rs index 0521dcd7b..713bb48cf 100644 --- a/jolt-core/src/utils/errors.rs +++ b/jolt-core/src/utils/errors.rs @@ -18,4 +18,6 @@ pub enum ProofVerifyError { KeyLengthError(usize, usize), #[error("Invalid key length: {0}, expected power of 2")] InvalidKeyLength(usize), + #[error("Verification was incorrect")] + VerificationFailed, } From cffd65cc19ea3b16bdbefc2e1bb5d46983ba65e0 Mon Sep 17 00:00:00 2001 From: Ray Date: Fri, 14 Feb 2025 21:03:53 +0100 Subject: [PATCH 02/10] refactor: use trait for inner prod --- jolt-core/src/poly/commitment/dory/params.rs | 2 +- jolt-core/src/poly/commitment/dory/reduce.rs | 5 ++-- jolt-core/src/poly/commitment/dory/scalar.rs | 2 +- .../poly/commitment/dory/vec_operations.rs | 28 +++++++++++++------ 4 files changed, 24 insertions(+), 13 deletions(-) diff --git a/jolt-core/src/poly/commitment/dory/params.rs b/jolt-core/src/poly/commitment/dory/params.rs index b8f7123ff..9d29d960e 100644 --- a/jolt-core/src/poly/commitment/dory/params.rs +++ b/jolt-core/src/poly/commitment/dory/params.rs @@ -4,7 +4,7 @@ use ark_serialize::CanonicalSerialize; use ark_std::rand::Rng; use sha3::{Digest, Sha3_256}; -use super::{Error, G1Vec, G2Vec, Gt, G1, G2}; +use super::{vec_operations::InnerProd, Error, G1Vec, G2Vec, Gt, G1, G2}; #[derive(Clone)] pub struct PublicParams { diff --git a/jolt-core/src/poly/commitment/dory/reduce.rs b/jolt-core/src/poly/commitment/dory/reduce.rs index 57234e38f..a998a1d78 100644 --- a/jolt-core/src/poly/commitment/dory/reduce.rs +++ b/jolt-core/src/poly/commitment/dory/reduce.rs @@ -8,8 +8,9 @@ use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use sha3::{Digest, Sha3_256}; use super::{ - params::ReducePublicParams, vec_operations::mul_gt, Commitment, Error, G1Vec, G2Vec, Gt, - PublicParams, ScalarProof, Witness, Zr, G1, G2, + params::ReducePublicParams, + vec_operations::{mul_gt, InnerProd}, + Commitment, Error, G1Vec, G2Vec, Gt, PublicParams, ScalarProof, Witness, Zr, G1, G2, }; /// Proof diff --git a/jolt-core/src/poly/commitment/dory/scalar.rs b/jolt-core/src/poly/commitment/dory/scalar.rs index 2687fdaa2..5c5c0e3f4 100644 --- a/jolt-core/src/poly/commitment/dory/scalar.rs +++ b/jolt-core/src/poly/commitment/dory/scalar.rs @@ -6,7 +6,7 @@ use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use rand::thread_rng; use super::{ - vec_operations::{e, mul_gt}, + vec_operations::{e, mul_gt, InnerProd}, Error, G1Vec, G2Vec, Gt, PublicParams, Zr, G1, G2, }; diff --git a/jolt-core/src/poly/commitment/dory/vec_operations.rs b/jolt-core/src/poly/commitment/dory/vec_operations.rs index fc8966064..e7706a72b 100644 --- a/jolt-core/src/poly/commitment/dory/vec_operations.rs +++ b/jolt-core/src/poly/commitment/dory/vec_operations.rs @@ -21,16 +21,18 @@ pub fn mul_gt(gts: &[Gt]) -> Option> { }) } -// G1 -#[derive(Clone, CanonicalDeserialize, CanonicalSerialize)] -pub struct G1Vec(Vec>); +pub trait InnerProd { + type G2; + type Gt; + fn inner_prod(&self, g2v: &Self::G2) -> Result; +} -impl G1Vec { - pub fn inner_prod(&self, g2v: &G2Vec) -> Result, Error> - where - G1: From>, - G2: From>, - { +impl InnerProd for G1Vec { + type G2 = G2Vec; + + type Gt = Gt; + + fn inner_prod(&self, g2v: &Self::G2) -> Result { match (self.as_ref(), g2v.as_ref()) { ([], _) => Err(Error::EmptyVector(GType::G1)), (_, []) => Err(Error::EmptyVector(GType::G2)), @@ -39,7 +41,13 @@ impl G1Vec { (a, b) => Ok(Curve::multi_pairing(a, b)), } } +} + +// G1 +#[derive(Clone, CanonicalDeserialize, CanonicalSerialize)] +pub struct G1Vec(Vec>); +impl G1Vec { pub fn sum(&self) -> G1 { self.iter().sum() } @@ -225,6 +233,8 @@ mod tests { use ark_bn254::Bn254; use ark_ff::UniformRand; + use super::InnerProd; + use super::{ super::{G1Vec, G1, G2}, e, mul_gt, From 7546f5816855a00e1bfdac50b4dc764a0cd631af Mon Sep 17 00:00:00 2001 From: Ray Date: Sun, 16 Feb 2025 13:02:12 +0100 Subject: [PATCH 03/10] refactor: update public params to enum --- jolt-core/src/poly/commitment/dory.rs | 14 +- jolt-core/src/poly/commitment/dory/params.rs | 223 ++++++++++--------- jolt-core/src/poly/commitment/dory/reduce.rs | 26 +-- jolt-core/src/poly/commitment/dory/scalar.rs | 31 ++- 4 files changed, 157 insertions(+), 137 deletions(-) diff --git a/jolt-core/src/poly/commitment/dory.rs b/jolt-core/src/poly/commitment/dory.rs index d92bc0f71..6e80eabf3 100644 --- a/jolt-core/src/poly/commitment/dory.rs +++ b/jolt-core/src/poly/commitment/dory.rs @@ -10,6 +10,7 @@ use scalar::ScalarProof; use scalar::{commit, Commitment, Witness}; use vec_operations::{G1Vec, G2Vec}; +use crate::msm::VariableBaseMSM; use crate::utils::errors::ProofVerifyError; use crate::{ field::JoltField, @@ -60,6 +61,7 @@ pub struct DoryBatchedProof; impl CommitmentScheme for DoryScheme where + P::G1: VariableBaseMSM, P: Pairing + Default, ProofTranscript: Transcript, G1

: Mul, Output = G1

>, @@ -90,10 +92,7 @@ where } fn commit(poly: &MultilinearPolynomial, setup: &Self::Setup) -> Self::Commitment { - let MultilinearPolynomial::LargeScalars(poly) = poly else { - panic!("Expected LargeScalars polynomial"); - }; - let witness = Witness::new(setup, poly.evals_ref()); + let witness = Witness::new(setup, poly); commit(witness, setup).unwrap() } @@ -114,10 +113,9 @@ where _opening_point: &[Self::Field], // point at which the polynomial is evaluated _transcript: &mut ProofTranscript, ) -> Self::Proof { - let MultilinearPolynomial::LargeScalars(poly) = poly else { - panic!("Expected LargeScalars polynomial"); - }; - let witness = Witness::new(setup, poly.evals_ref()); + //let evalutation_y = poly.evaluate(opening_point); + + let witness = Witness::new(setup, poly); ScalarProof::new(witness) } diff --git a/jolt-core/src/poly/commitment/dory/params.rs b/jolt-core/src/poly/commitment/dory/params.rs index 9d29d960e..a33ff4f24 100644 --- a/jolt-core/src/poly/commitment/dory/params.rs +++ b/jolt-core/src/poly/commitment/dory/params.rs @@ -7,88 +7,45 @@ use sha3::{Digest, Sha3_256}; use super::{vec_operations::InnerProd, Error, G1Vec, G2Vec, Gt, G1, G2}; #[derive(Clone)] -pub struct PublicParams { - pub g1v: G1Vec

, - pub g2v: G2Vec

, - - pub x: Gt

, - - pub reduce_pp: Option>, -} - -#[derive(Clone)] -pub struct ReducePublicParams { - pub gamma_1_prime: G1Vec

, - pub gamma_2_prime: G2Vec

, - - pub delta_1r: Gt

, - pub delta_1l: Gt

, - pub delta_2r: Gt

, - pub delta_2l: Gt

, +pub enum PublicParams { + Single { + g1v: G1Vec

, + g2v: G2Vec

, + x: Gt

, + }, + + Multi { + g1v: G1Vec

, + g2v: G2Vec

, + x: Gt

, + + gamma_1_prime: G1Vec

, + gamma_2_prime: G2Vec

, + + delta_1r: Gt

, + delta_1l: Gt

, + delta_2r: Gt

, + delta_2l: Gt

, + }, } impl PublicParams { - pub fn new(rng: &mut impl Rng, n: usize) -> Result - where - G1: UniformRand, - G2: UniformRand, - { - let g1v = G1Vec::random(rng, n); - let g2v = G2Vec::random(rng, n); - let x = g1v.inner_prod(&g2v)?; - let reduce_pp = ReducePublicParams::new(rng, &g1v, &g2v)?; - let value = Self { - g1v, - g2v, - reduce_pp, - x, - }; - Ok(value) - } - - pub fn new_derived(&self, rng: &mut impl Rng, n: usize) -> Result - where - G1: UniformRand, - G2: UniformRand, - { - if self.g1v.len() != 2 * n || self.g2v.len() != 2 * n { - return Err(Error::LengthNotTwice); + pub fn g1v(&self) -> &G1Vec { + match self { + PublicParams::Single { g1v, .. } | PublicParams::Multi { g1v, .. } => g1v, } - let Some(reduce_pp) = &self.reduce_pp else { - return Err(Error::ReduceParamsNotInitialized); - }; - let g1v = reduce_pp.gamma_1_prime.clone(); - let g2v = reduce_pp.gamma_2_prime.clone(); - - let reduce_pp = ReducePublicParams::new(rng, &g1v, &g2v)?; - let x = g1v.inner_prod(&g2v)?; - - let value = Self { - g1v, - g2v, - reduce_pp, - x, - }; - Ok(value) } - pub fn digest(&self, prev: Option<&[u8]>) -> Result, Error> { - let mut hasher = Sha3_256::new(); - if let Some(prev) = prev { - hasher.update(prev); + pub fn g2v(&self) -> &G2Vec { + match self { + PublicParams::Single { g2v, .. } | PublicParams::Multi { g2v, .. } => g2v, } + } - if let Some(reduce_pp) = &self.reduce_pp { - hasher.update(reduce_pp.digest()?); + pub fn x(&self) -> &Gt { + match self { + PublicParams::Single { x, .. } | PublicParams::Multi { x, .. } => x, } - self.x - .serialize_uncompressed(&mut hasher) - .expect("Serialization failed"); - - self.g1v.serialize_uncompressed(&mut hasher)?; - self.g2v.serialize_uncompressed(&mut hasher)?; - - Ok(hasher.finalize().to_vec()) } pub fn generate_public_params(rng: &mut impl Rng, mut n: usize) -> Result, Error> @@ -104,59 +61,111 @@ impl PublicParams { break; } n /= 2; - params = res.last().expect("just pushed").new_derived(rng, n)?; + params = res.last().expect("just pushed").new_derived(rng)?; } Ok(res) } -} -impl ReducePublicParams { - pub fn new( - rng: &mut impl Rng, - g1v: &[G1], - g2v: &[G2], - ) -> Result, Error> + pub fn new(rng: &mut impl Rng, n: usize) -> Result where G1: UniformRand, G2: UniformRand, { - assert_eq!(g1v.len(), g2v.len()); - if g1v.len() == 1 { - return Ok(None); + let g1v = G1Vec::random(rng, n); + let g2v = G2Vec::random(rng, n); + Self::params_with_provided_g(rng, g1v, g2v) + } + + fn params_with_provided_g( + rng: &mut impl Rng, + g1v: G1Vec, + g2v: G2Vec, + ) -> Result { + let n = g1v.len(); + + let x = g1v.inner_prod(&g2v)?; + if n == 1 { + Ok(Self::Single { g1v, g2v, x }) + } else { + let m = g1v.len() / 2; + let gamma_1l: G1Vec = (&g1v[..m]).into(); + let gamma_1r: G1Vec = (&g1v[m..]).into(); + + let gamma_2l = (&g2v[..m]).into(); + let gamma_2r = (&g2v[m..]).into(); + + let gamma_1_prime = G1Vec::random(rng, m); + let gamma_2_prime = G2Vec::random(rng, m); + + let delta_1l = gamma_1l.inner_prod(&gamma_2_prime)?; + let delta_1r = gamma_1r.inner_prod(&gamma_2_prime)?; + let delta_2l = gamma_1_prime.inner_prod(&gamma_2l)?; + let delta_2r = gamma_1_prime.inner_prod(&gamma_2r)?; + + Ok(Self::Multi { + g1v, + g2v, + x, + gamma_1_prime, + gamma_2_prime, + delta_1r, + delta_1l, + delta_2r, + delta_2l, + }) } - let m = g1v.len() / 2; - let gamma_1l: G1Vec = (&g1v[..m]).into(); - let gamma_1r: G1Vec = (&g1v[m..]).into(); + } + + fn new_derived(&self, rng: &mut impl Rng) -> Result + where + G1: UniformRand, + G2: UniformRand, + { + let Self::Multi { + gamma_1_prime, + gamma_2_prime, + .. + } = self + else { + panic!() + }; - let gamma_2l = (&g2v[..m]).into(); - let gamma_2r = (&g2v[m..]).into(); + let g1v = gamma_1_prime.clone(); + let g2v = gamma_2_prime.clone(); + Self::params_with_provided_g(rng, g1v, g2v) + } - let gamma_1_prime = G1Vec::random(rng, m); - let gamma_2_prime = G2Vec::random(rng, m); + pub fn digest(&self, prev: Option<&[u8]>) -> Result, Error> { + let mut hasher = Sha3_256::new(); + if let Some(prev) = prev { + hasher.update(prev); + } - let delta_1l = gamma_1l.inner_prod(&gamma_2_prime)?; - let delta_1r = gamma_1r.inner_prod(&gamma_2_prime)?; - let delta_2l = gamma_1_prime.inner_prod(&gamma_2l)?; - let delta_2r = gamma_1_prime.inner_prod(&gamma_2r)?; - Ok(Some(Self { + if let Self::Multi { gamma_1_prime, gamma_2_prime, delta_1r, delta_1l, delta_2r, delta_2l, - })) - } - - pub fn digest(&self) -> Result, Error> { - let mut hasher = Sha3_256::new(); + .. + } = &self + { + gamma_1_prime.serialize_uncompressed(&mut hasher)?; + gamma_2_prime.serialize_uncompressed(&mut hasher)?; + delta_1r.serialize_uncompressed(&mut hasher)?; + delta_1l.serialize_uncompressed(&mut hasher)?; + delta_2r.serialize_uncompressed(&mut hasher)?; + delta_2l.serialize_uncompressed(&mut hasher)?; + } - self.gamma_1_prime.serialize_uncompressed(&mut hasher)?; - self.gamma_2_prime.serialize_uncompressed(&mut hasher)?; - self.delta_1r.serialize_uncompressed(&mut hasher)?; - self.delta_1l.serialize_uncompressed(&mut hasher)?; - self.delta_2r.serialize_uncompressed(&mut hasher)?; - self.delta_2l.serialize_uncompressed(&mut hasher)?; + match self { + PublicParams::Single { g1v, g2v, x } | PublicParams::Multi { g1v, g2v, x, .. } => { + x.serialize_uncompressed(&mut hasher)?; + g1v.serialize_uncompressed(&mut hasher)?; + g2v.serialize_uncompressed(&mut hasher)?; + } + } Ok(hasher.finalize().to_vec()) } diff --git a/jolt-core/src/poly/commitment/dory/reduce.rs b/jolt-core/src/poly/commitment/dory/reduce.rs index a998a1d78..1c1c8d4d9 100644 --- a/jolt-core/src/poly/commitment/dory/reduce.rs +++ b/jolt-core/src/poly/commitment/dory/reduce.rs @@ -8,7 +8,6 @@ use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use sha3::{Digest, Sha3_256}; use super::{ - params::ReducePublicParams, vec_operations::{mul_gt, InnerProd}, Commitment, Error, G1Vec, G2Vec, Gt, PublicParams, ScalarProof, Witness, Zr, G1, G2, }; @@ -35,14 +34,17 @@ impl DoryProof { [param1, public_params_rest @ ..] => { let digest = param1.digest(None)?.to_vec(); - let PublicParams { x, reduce_pp, .. } = param1; - let ReducePublicParams { + let PublicParams::Multi { delta_1r, delta_1l, delta_2r, delta_2l, + x, .. - } = reduce_pp.as_ref().expect("gv1 is greater than 1"); + } = param1 + else { + panic!() + }; match (from_prover_1, from_prover_2) { ( @@ -198,22 +200,20 @@ where [param1, rest_param @ ..] => { let digest = param1.digest(None)?; - let PublicParams { + let PublicParams::Multi { g1v, g2v, x, - reduce_pp, - .. - } = param1; - - let ReducePublicParams { + gamma_1_prime, + gamma_2_prime, delta_1r, delta_1l, delta_2r, delta_2l, - gamma_1_prime, - gamma_2_prime, - } = reduce_pp.as_ref().unwrap(); + } = param1 + else { + panic!() + }; let m = g1v.len() / 2; diff --git a/jolt-core/src/poly/commitment/dory/scalar.rs b/jolt-core/src/poly/commitment/dory/scalar.rs index 5c5c0e3f4..b7edbb5d8 100644 --- a/jolt-core/src/poly/commitment/dory/scalar.rs +++ b/jolt-core/src/poly/commitment/dory/scalar.rs @@ -5,6 +5,10 @@ use ark_ff::{Field, UniformRand}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use rand::thread_rng; +use crate::{ + field::JoltField, msm::VariableBaseMSM, poly::multilinear_polynomial::MultilinearPolynomial, +}; + use super::{ vec_operations::{e, mul_gt, InnerProd}, Error, G1Vec, G2Vec, Gt, PublicParams, Zr, G1, G2, @@ -17,17 +21,25 @@ pub struct Witness { pub v2: G2Vec, } -impl Witness

{ - pub fn new(params: &PublicParams

, poly: &[Zr

]) -> Self { +impl Witness

+where + P::G1: VariableBaseMSM, + P::ScalarField: JoltField, +{ + pub fn new(params: &PublicParams

, poly: &MultilinearPolynomial) -> Self { + let MultilinearPolynomial::LargeScalars(poly) = poly else { + panic!() + }; + let poly = poly.evals_ref(); let v1 = params - .g1v + .g1v() .iter() .zip(poly.iter()) .map(|(a, b)| *a * *b) .collect::>>(); let v2 = params - .g2v + .g2v() .iter() .zip(poly.iter()) .map(|(a, b)| *a * *b) @@ -50,8 +62,8 @@ pub fn commit( Witness { v1, v2 }: Witness, public_params: &PublicParams, ) -> Result, Error> { - let d1 = v1.inner_prod(&public_params.g2v)?; - let d2 = public_params.g1v.inner_prod(&v2)?; + let d1 = v1.inner_prod(public_params.g2v())?; + let d2 = public_params.g1v().inner_prod(&v2)?; let c = v1.inner_prod(&v2)?; let commitment = Commitment { d1, d2, c }; @@ -89,12 +101,13 @@ impl ScalarProof { let d: Zr = Zr::::rand(&mut rng); let d_inv = d.inverse().ok_or(Error::CouldntInvertD)?; - let g1 = G1Vec::::from(&[self.e1[0], pp.g1v[0] * d]).sum(); + let g1 = G1Vec::::from(&[self.e1[0], pp.g1v()[0] * d]).sum(); - let g2 = G2Vec::::from(&[self.e2[0], pp.g2v[0] * d_inv]).sum(); + let g2 = G2Vec::::from(&[self.e2[0], pp.g2v()[0] * d_inv]).sum(); let left_eq = e(g1, g2); - let right_eq = mul_gt(&[pp.x, *c, *d2 * d, *d1 * d_inv]).expect("has more than one item"); + let right_eq = + mul_gt(&[*pp.x(), *c, *d2 * d, *d1 * d_inv]).expect("has more than one item"); Ok(left_eq == right_eq) } From 9163895e50437bb297c97af8933fd3e37bf035e5 Mon Sep 17 00:00:00 2001 From: Ray Date: Sun, 16 Feb 2025 14:57:22 +0100 Subject: [PATCH 04/10] refactor: use transcript, single and multi proof --- jolt-core/src/poly/commitment/dory.rs | 31 +++-- jolt-core/src/poly/commitment/dory/params.rs | 78 ++++-------- jolt-core/src/poly/commitment/dory/reduce.rs | 117 ++++++++++-------- jolt-core/src/poly/commitment/dory/scalar.rs | 30 +++-- jolt-core/src/poly/commitment/dory/tests.rs | 39 ++++-- .../poly/commitment/dory/vec_operations.rs | 32 +++-- 6 files changed, 176 insertions(+), 151 deletions(-) diff --git a/jolt-core/src/poly/commitment/dory.rs b/jolt-core/src/poly/commitment/dory.rs index 6e80eabf3..665ad5c33 100644 --- a/jolt-core/src/poly/commitment/dory.rs +++ b/jolt-core/src/poly/commitment/dory.rs @@ -6,6 +6,8 @@ use error::Error; use params::PublicParams; use rayon::iter::IntoParallelIterator; use rayon::iter::ParallelIterator; +use reduce::reduce; +use reduce::DoryProof; use scalar::ScalarProof; use scalar::{commit, Commitment, Witness}; use vec_operations::{G1Vec, G2Vec}; @@ -70,11 +72,11 @@ where { type Field = Zr

; - type Setup = PublicParams

; + type Setup = Vec>; type Commitment = Commitment

; - type Proof = ScalarProof

; + type Proof = DoryProof

; type BatchedProof = DoryBatchedProof; @@ -88,12 +90,14 @@ where } } let mut rng = ark_std::rand::thread_rng(); - PublicParams::new(&mut rng, max_len).expect("Length must be greater than 0") + PublicParams::generate_public_params(&mut rng, max_len) + .expect("Length must be greater than 0") } fn commit(poly: &MultilinearPolynomial, setup: &Self::Setup) -> Self::Commitment { - let witness = Witness::new(setup, poly); - commit(witness, setup).unwrap() + let public_param = setup.first().unwrap(); + let witness = Witness::new(public_param, poly); + commit(witness, public_param).unwrap() } fn batch_commit( @@ -111,26 +115,29 @@ where setup: &Self::Setup, poly: &MultilinearPolynomial, _opening_point: &[Self::Field], // point at which the polynomial is evaluated - _transcript: &mut ProofTranscript, + transcript: &mut ProofTranscript, ) -> Self::Proof { - //let evalutation_y = poly.evaluate(opening_point); + let public_param = setup.first().unwrap(); - let witness = Witness::new(setup, poly); - ScalarProof::new(witness) + let witness = Witness::new(public_param, poly); + + let commitment = commit(witness.clone(), public_param).unwrap(); + + reduce(transcript, setup.as_slice(), witness, commitment).unwrap() } fn verify( proof: &Self::Proof, setup: &Self::Setup, - _transcript: &mut ProofTranscript, + transcript: &mut ProofTranscript, _opening_point: &[Self::Field], // point at which the polynomial is evaluated _opening: &Self::Field, // evaluation \widetilde{Z}(r) commitment: &Self::Commitment, ) -> Result<(), ProofVerifyError> { - if proof.verify(setup, commitment).unwrap() { + if proof.verify(transcript, setup, *commitment).unwrap() { Ok(()) } else { - Err(ProofVerifyError::VerificationFailed) + todo!() } } diff --git a/jolt-core/src/poly/commitment/dory/params.rs b/jolt-core/src/poly/commitment/dory/params.rs index a33ff4f24..c19db4bae 100644 --- a/jolt-core/src/poly/commitment/dory/params.rs +++ b/jolt-core/src/poly/commitment/dory/params.rs @@ -1,18 +1,19 @@ use ark_ec::pairing::Pairing; use ark_ff::UniformRand; -use ark_serialize::CanonicalSerialize; use ark_std::rand::Rng; -use sha3::{Digest, Sha3_256}; use super::{vec_operations::InnerProd, Error, G1Vec, G2Vec, Gt, G1, G2}; +#[derive(Clone)] +pub struct SingleParam { + pub g1: G1

, + pub g2: G2

, + pub x: Gt

, +} + #[derive(Clone)] pub enum PublicParams { - Single { - g1v: G1Vec

, - g2v: G2Vec

, - x: Gt

, - }, + Single(SingleParam

), Multi { g1v: G1Vec

, @@ -30,21 +31,23 @@ pub enum PublicParams { } impl PublicParams { - pub fn g1v(&self) -> &G1Vec { + pub fn g1v(&self) -> Vec> { match self { - PublicParams::Single { g1v, .. } | PublicParams::Multi { g1v, .. } => g1v, + PublicParams::Single(SingleParam { g1, .. }) => vec![*g1], + PublicParams::Multi { g1v, .. } => g1v.to_vec(), } } - pub fn g2v(&self) -> &G2Vec { + pub fn g2v(&self) -> Vec> { match self { - PublicParams::Single { g2v, .. } | PublicParams::Multi { g2v, .. } => g2v, + PublicParams::Single(SingleParam { g2, .. }) => vec![*g2], + PublicParams::Multi { g2v, .. } => g2v.to_vec(), } } pub fn x(&self) -> &Gt { match self { - PublicParams::Single { x, .. } | PublicParams::Multi { x, .. } => x, + PublicParams::Single(SingleParam { x, .. }) | PublicParams::Multi { x, .. } => x, } } @@ -81,18 +84,22 @@ impl PublicParams { g1v: G1Vec, g2v: G2Vec, ) -> Result { - let n = g1v.len(); - let x = g1v.inner_prod(&g2v)?; - if n == 1 { - Ok(Self::Single { g1v, g2v, x }) + // if there's a single element, return a single param + if let ([g1], [g2]) = (&*g1v, &*g2v) { + Ok(Self::Single(SingleParam { + g1: *g1, + g2: *g2, + x, + })) + // else, prepare gamma and delta public params } else { let m = g1v.len() / 2; let gamma_1l: G1Vec = (&g1v[..m]).into(); let gamma_1r: G1Vec = (&g1v[m..]).into(); - let gamma_2l = (&g2v[..m]).into(); - let gamma_2r = (&g2v[m..]).into(); + let gamma_2l: G2Vec = (&g2v[..m]).into(); + let gamma_2r: G2Vec = (&g2v[m..]).into(); let gamma_1_prime = G1Vec::random(rng, m); let gamma_2_prime = G2Vec::random(rng, m); @@ -134,39 +141,4 @@ impl PublicParams { let g2v = gamma_2_prime.clone(); Self::params_with_provided_g(rng, g1v, g2v) } - - pub fn digest(&self, prev: Option<&[u8]>) -> Result, Error> { - let mut hasher = Sha3_256::new(); - if let Some(prev) = prev { - hasher.update(prev); - } - - if let Self::Multi { - gamma_1_prime, - gamma_2_prime, - delta_1r, - delta_1l, - delta_2r, - delta_2l, - .. - } = &self - { - gamma_1_prime.serialize_uncompressed(&mut hasher)?; - gamma_2_prime.serialize_uncompressed(&mut hasher)?; - delta_1r.serialize_uncompressed(&mut hasher)?; - delta_1l.serialize_uncompressed(&mut hasher)?; - delta_2r.serialize_uncompressed(&mut hasher)?; - delta_2l.serialize_uncompressed(&mut hasher)?; - } - - match self { - PublicParams::Single { g1v, g2v, x } | PublicParams::Multi { g1v, g2v, x, .. } => { - x.serialize_uncompressed(&mut hasher)?; - g1v.serialize_uncompressed(&mut hasher)?; - g2v.serialize_uncompressed(&mut hasher)?; - } - } - - Ok(hasher.finalize().to_vec()) - } } diff --git a/jolt-core/src/poly/commitment/dory/reduce.rs b/jolt-core/src/poly/commitment/dory/reduce.rs index 1c1c8d4d9..598bc84fd 100644 --- a/jolt-core/src/poly/commitment/dory/reduce.rs +++ b/jolt-core/src/poly/commitment/dory/reduce.rs @@ -3,9 +3,12 @@ use std::ops::{Add, Mul}; use ark_ec::pairing::Pairing; -use ark_ff::{Field, PrimeField}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; -use sha3::{Digest, Sha3_256}; + +use crate::{ + field::JoltField, + utils::transcript::{AppendToTranscript, Transcript}, +}; use super::{ vec_operations::{mul_gt, InnerProd}, @@ -20,8 +23,12 @@ pub struct DoryProof { pub final_proof: ScalarProof, } -impl DoryProof { - fn verify_recursive( +impl DoryProof +where + Curve::ScalarField: JoltField, +{ + fn verify_recursive( + transcript: &mut ProofTranscript, public_params: &[PublicParams], commitment: Commitment, from_prover_1: &[ReduceProverStep1Elements], @@ -30,10 +37,8 @@ impl DoryProof { ) -> Result { match public_params { [] => Err(Error::EmptyPublicParams), - [params] => final_proof.verify(params, &commitment), + [PublicParams::Single(param)] => final_proof.verify(param, &commitment), [param1, public_params_rest @ ..] => { - let digest = param1.digest(None)?.to_vec(); - let PublicParams::Multi { delta_1r, delta_1l, @@ -58,7 +63,6 @@ impl DoryProof { let Commitment { c, d1, d2 } = commitment; let step_1_element = ReduceProverStep1Elements { - pp_digest: digest, d1l: *d1l, d1r: *d1r, d2l: *d2l, @@ -68,17 +72,22 @@ impl DoryProof { d2, }; - let (betha, step_1_digest) = step_1_element.ro()?; + // update transcript with step_1_elements + step_1_element.append_to_transcript(transcript); + // Get from Transcript + let betha: Zr = transcript.challenge_scalar(); let step_2_element = ReduceProverStep2Elements { - step_1_digest, c_plus: *c_plus, c_minus: *c_minus, }; - let alpha = step_2_element.ro()?; - let inverse_alpha = alpha.inverse().ok_or(Error::ZrZero)?; - let inverse_betha = betha.inverse().ok_or(Error::ZrZero)?; + // update transcript with step_2_elements + step_2_element.append_to_transcript(transcript); + // Get from Transcript + let alpha: Zr = transcript.challenge_scalar(); + let inverse_alpha = JoltField::inverse(&alpha).ok_or(Error::ZrZero)?; + let inverse_betha = JoltField::inverse(&betha).ok_or(Error::ZrZero)?; let c_prime = mul_gt(&[ c, @@ -113,6 +122,7 @@ impl DoryProof { }; Self::verify_recursive( + transcript, public_params_rest, next_commitment, from_prover_1_rest, @@ -126,8 +136,9 @@ impl DoryProof { } } - pub fn verify( + pub fn verify( &self, + transcript: &mut ProofTranscript, public_params: &[PublicParams], commitment: Commitment, ) -> Result @@ -137,6 +148,7 @@ impl DoryProof { G2: Mul, Output = G2>, { Self::verify_recursive( + transcript, public_params, commitment, &self.from_prover_1, @@ -148,7 +160,6 @@ impl DoryProof { #[derive(Clone, CanonicalDeserialize, CanonicalSerialize)] pub struct ReduceProverStep1Elements { - pp_digest: Vec, d1l: Gt, d1r: Gt, d2l: Gt, @@ -158,48 +169,55 @@ pub struct ReduceProverStep1Elements { d2: Gt, } -impl ReduceProverStep1Elements { - pub fn ro(&self) -> Result<(Zr, Vec), Error> { - let mut hasher = Sha3_256::new(); - self.serialize_uncompressed(&mut hasher)?; - let digest = hasher.finalize(); - Ok(( - Zr::::from_be_bytes_mod_order(&digest), - digest.to_vec(), - )) +impl AppendToTranscript for ReduceProverStep1Elements

{ + fn append_to_transcript(&self, transcript: &mut ProofTranscript) { + append_gt(transcript, self.d1l); + append_gt(transcript, self.d1r); + append_gt(transcript, self.d2l); + append_gt(transcript, self.d2r); + append_gt(transcript, self.c); + append_gt(transcript, self.d1); + append_gt(transcript, self.d2); } } +fn append_gt(transcript: &mut ProofTranscript, gt: Gt

) { + let mut buf = vec![]; + gt.serialize_uncompressed(&mut buf).unwrap(); + // Serialize uncompressed gives the scalar in LE byte order which is not + // a natural representation in the EVM for scalar math so we reverse + // to get an EVM compatible version. + buf = buf.into_iter().rev().collect(); + transcript.append_bytes(&buf); +} + #[derive(Clone, CanonicalDeserialize, CanonicalSerialize)] pub struct ReduceProverStep2Elements { - step_1_digest: Vec, c_plus: Gt, c_minus: Gt, } -impl ReduceProverStep2Elements { - pub fn ro(&self) -> Result, Error> { - let mut hasher = Sha3_256::new(); - self.serialize_uncompressed(&mut hasher)?; - let digest = hasher.finalize(); - Ok(Zr::::from_be_bytes_mod_order(&digest)) +impl AppendToTranscript for ReduceProverStep2Elements

{ + fn append_to_transcript(&self, transcript: &mut ProofTranscript) { + append_gt(transcript, self.c_plus); + append_gt(transcript, self.c_minus); } } -pub fn reduce( +pub fn reduce( + transcript: &mut ProofTranscript, params: &[PublicParams], witness: Witness, Commitment { c, d1, d2 }: Commitment, ) -> Result, Error> where + Curve::ScalarField: JoltField, G1Vec: Add, Output = G1Vec>, G2Vec: Add, Output = G2Vec>, { match params { [] => unimplemented!(), [param1, rest_param @ ..] => { - let digest = param1.digest(None)?; - let PublicParams::Multi { g1v, g2v, @@ -220,8 +238,8 @@ where // P: let v1l: G1Vec = (&witness.v1[..m]).into(); let v1r: G1Vec = (&witness.v1[m..]).into(); - let v2l = (&witness.v2[..m]).into(); - let v2r = (&witness.v2[m..]).into(); + let v2l: G2Vec = (&witness.v2[..m]).into(); + let v2r: G2Vec = (&witness.v2[m..]).into(); // P --> V: let d1l = v1l.inner_prod(gamma_2_prime)?; @@ -230,7 +248,6 @@ where let d2r = gamma_1_prime.inner_prod(&v2r)?; let step_1_element = ReduceProverStep1Elements { - pp_digest: digest, d1l, d1r, d2l, @@ -239,9 +256,12 @@ where d1, d2, }; + // update transcript with step 1 element + step_1_element.append_to_transcript(transcript); - let (betha, step_1_digest) = step_1_element.ro()?; - let inverse_betha = betha.inverse().unwrap(); + // Get from Transcript + let betha: Zr = transcript.challenge_scalar(); + let inverse_betha = JoltField::inverse(&betha).unwrap(); // P: let v1 = witness.v1 + (g1v * betha); @@ -249,20 +269,19 @@ where let v1l: G1Vec = v1[..m].to_vec().into(); let v1r: G1Vec = v1[m..].to_vec().into(); - let v2l = v2[..m].to_vec().into(); - let v2r = v2[m..].to_vec().into(); + let v2l: G2Vec = v2[..m].to_vec().into(); + let v2r: G2Vec = v2[m..].to_vec().into(); // P --> V: let c_plus = v1l.inner_prod(&v2r)?; let c_minus = v1r.inner_prod(&v2l)?; - let step_2_element = ReduceProverStep2Elements { - step_1_digest, - c_plus, - c_minus, - }; - let alpha = step_2_element.ro()?; - let inverse_alpha = alpha.inverse().unwrap(); + let step_2_element = ReduceProverStep2Elements { c_plus, c_minus }; + // update transcript with step 2 elements + step_2_element.append_to_transcript(transcript); + // Get from Transcript + let alpha: Zr = transcript.challenge_scalar(); + let inverse_alpha = JoltField::inverse(&alpha).unwrap(); let v1_prime = v1l * alpha + v1r; let v2_prime = v2l * inverse_alpha + v2r; @@ -316,7 +335,7 @@ where from_prover_1: step_1_elements, from_prover_2: step_2_elements, final_proof: scalar_product_proof, - } = reduce(rest_param, next_witness, next_commitment)?; + } = reduce(transcript, rest_param, next_witness, next_commitment)?; let mut from_prover_1 = vec![step_1_element]; from_prover_1.extend(step_1_elements); diff --git a/jolt-core/src/poly/commitment/dory/scalar.rs b/jolt-core/src/poly/commitment/dory/scalar.rs index b7edbb5d8..c66bd2b57 100644 --- a/jolt-core/src/poly/commitment/dory/scalar.rs +++ b/jolt-core/src/poly/commitment/dory/scalar.rs @@ -10,6 +10,7 @@ use crate::{ }; use super::{ + params::SingleParam, vec_operations::{e, mul_gt, InnerProd}, Error, G1Vec, G2Vec, Gt, PublicParams, Zr, G1, G2, }; @@ -33,16 +34,16 @@ where let poly = poly.evals_ref(); let v1 = params .g1v() - .iter() + .into_iter() .zip(poly.iter()) - .map(|(a, b)| *a * *b) + .map(|(a, b)| a * b) .collect::>>(); let v2 = params .g2v() - .iter() + .into_iter() .zip(poly.iter()) - .map(|(a, b)| *a * *b) + .map(|(a, b)| a * b) .collect::>>(); let v1 = v1.into(); let v2 = v2.into(); @@ -62,7 +63,7 @@ pub fn commit( Witness { v1, v2 }: Witness, public_params: &PublicParams, ) -> Result, Error> { - let d1 = v1.inner_prod(public_params.g2v())?; + let d1 = v1.inner_prod(&public_params.g2v())?; let d2 = public_params.g1v().inner_prod(&v2)?; let c = v1.inner_prod(&v2)?; @@ -72,23 +73,21 @@ pub fn commit( #[derive(Clone, CanonicalDeserialize, CanonicalSerialize)] pub struct ScalarProof { - //pp: &'a PublicParams, - e1: G1Vec, - e2: G2Vec, + e1: G1, + e2: G2, } impl ScalarProof { pub fn new(witness: Witness) -> Self { Self { - //pp: public_params, - e1: witness.v1, - e2: witness.v2, + e1: witness.v1[0], + e2: witness.v2[0], } } pub fn verify( &self, - pp: &PublicParams, + pp: &SingleParam, Commitment { c, d1, d2 }: &Commitment, ) -> Result where @@ -101,13 +100,12 @@ impl ScalarProof { let d: Zr = Zr::::rand(&mut rng); let d_inv = d.inverse().ok_or(Error::CouldntInvertD)?; - let g1 = G1Vec::::from(&[self.e1[0], pp.g1v()[0] * d]).sum(); + let g1 = G1Vec::::from(&[self.e1, pp.g1 * d]).sum(); - let g2 = G2Vec::::from(&[self.e2[0], pp.g2v()[0] * d_inv]).sum(); + let g2 = G2Vec::::from(&[self.e2, pp.g2 * d_inv]).sum(); let left_eq = e(g1, g2); - let right_eq = - mul_gt(&[*pp.x(), *c, *d2 * d, *d1 * d_inv]).expect("has more than one item"); + let right_eq = mul_gt(&[pp.x, *c, *d2 * d, *d1 * d_inv]).expect("has more than one item"); Ok(left_eq == right_eq) } diff --git a/jolt-core/src/poly/commitment/dory/tests.rs b/jolt-core/src/poly/commitment/dory/tests.rs index 48d37267b..19df2ca0a 100644 --- a/jolt-core/src/poly/commitment/dory/tests.rs +++ b/jolt-core/src/poly/commitment/dory/tests.rs @@ -1,7 +1,10 @@ use ark_bn254::Bn254; use ark_std::UniformRand; -use crate::poly::commitment::dory::scalar::Witness; +use crate::{ + poly::commitment::dory::scalar::Witness, + utils::transcript::{KeccakTranscript, Transcript}, +}; use super::{commit, reduce, G1Vec, G2Vec, PublicParams, ScalarProof, G1, G2}; @@ -9,6 +12,9 @@ use super::{commit, reduce, G1Vec, G2Vec, PublicParams, ScalarProof, G1, G2}; fn test_scalar_product_proof() { let mut rng = ark_std::test_rng(); let public_params = PublicParams::::new(&mut rng, 1).unwrap(); + let PublicParams::Single(single_param) = &public_params else { + panic!() + }; let g1v = vec![G1::::rand(&mut rng)]; let g2v = vec![G2::::rand(&mut rng)]; @@ -19,7 +25,8 @@ fn test_scalar_product_proof() { let commitment = commit(witness.clone(), &public_params).unwrap(); let proof = ScalarProof::new(witness); - assert!(proof.verify(&public_params, &commitment).unwrap()); + + assert!(proof.verify(single_param, &commitment).unwrap()); } #[test] @@ -33,20 +40,28 @@ fn test_dory_reduce() { let witness = Witness { v1: g1v, v2: g2v }; let commitment = commit(witness.clone(), ¶ms[0]).unwrap(); + let mut transcript = KeccakTranscript::new(&[]); - let proof = reduce::reduce(¶ms, witness, commitment).unwrap(); + let proof = reduce::reduce(&mut transcript, ¶ms, witness, commitment).unwrap(); assert_eq!(proof.from_prover_1.len(), 3); assert_eq!(proof.from_prover_2.len(), 3); - assert_eq!(params[0].g1v.len(), 8); - assert_eq!(params[1].g1v.len(), 4); - assert_eq!(params[2].g1v.len(), 2); - assert_eq!(params[3].g1v.len(), 1); + assert_eq!(params[0].g1v().len(), 8); + assert_eq!(params[1].g1v().len(), 4); + assert_eq!(params[2].g1v().len(), 2); + assert_eq!(params[3].g1v().len(), 1); + + let mut prev = n; + for param in ¶ms[..params.len() - 1] { + let PublicParams::Multi { gamma_1_prime, .. } = param else { + panic!() + }; + prev /= 2; + assert_eq!(gamma_1_prime.len(), prev); + } + assert!(matches!(params[3], PublicParams::Single(_))); - assert_eq!(params[0].reduce_pp.as_ref().unwrap().gamma_1_prime.len(), 4); - assert_eq!(params[1].reduce_pp.as_ref().unwrap().gamma_1_prime.len(), 2); - assert_eq!(params[2].reduce_pp.as_ref().unwrap().gamma_1_prime.len(), 1); - assert!(params[3].reduce_pp.is_none()); - assert!(proof.verify(¶ms, commitment).unwrap()); + let mut transcript = KeccakTranscript::new(&[]); + assert!(proof.verify(&mut transcript, ¶ms, commitment).unwrap()); } diff --git a/jolt-core/src/poly/commitment/dory/vec_operations.rs b/jolt-core/src/poly/commitment/dory/vec_operations.rs index e7706a72b..4d0350937 100644 --- a/jolt-core/src/poly/commitment/dory/vec_operations.rs +++ b/jolt-core/src/poly/commitment/dory/vec_operations.rs @@ -21,18 +21,31 @@ pub fn mul_gt(gts: &[Gt]) -> Option> { }) } -pub trait InnerProd { - type G2; +pub trait InnerProd { type Gt; - fn inner_prod(&self, g2v: &Self::G2) -> Result; + fn inner_prod(&self, g2v: &G2) -> Result; } -impl InnerProd for G1Vec { - type G2 = G2Vec; +impl InnerProd> for Vec> { + type Gt = Gt; + + fn inner_prod(&self, g2v: &G2Vec) -> Result { + Ok(Curve::multi_pairing(self, g2v.as_ref())) + } +} +impl InnerProd>> for G1Vec { type Gt = Gt; - fn inner_prod(&self, g2v: &Self::G2) -> Result { + fn inner_prod(&self, g2v: &Vec>) -> Result { + Ok(Curve::multi_pairing(self.as_ref(), g2v)) + } +} + +impl InnerProd> for G1Vec { + type Gt = Gt; + + fn inner_prod(&self, g2v: &G2Vec) -> Result { match (self.as_ref(), g2v.as_ref()) { ([], _) => Err(Error::EmptyVector(GType::G1)), (_, []) => Err(Error::EmptyVector(GType::G2)), @@ -78,7 +91,6 @@ impl Deref for G1Vec { impl Add for G1Vec where Curve: Pairing, - for<'b> &'b G1: Add<&'b G1, Output = G1>, { type Output = G1Vec; @@ -87,7 +99,7 @@ where self.0 .iter() .zip(rhs.0.iter()) - .map(|(val1, val2)| val1 + val2) + .map(|(val1, val2)| *val1 + *val2) .collect(), ) } @@ -233,6 +245,8 @@ mod tests { use ark_bn254::Bn254; use ark_ff::UniformRand; + use crate::poly::commitment::dory::vec_operations::G2Vec; + use super::InnerProd; use super::{ @@ -256,7 +270,7 @@ mod tests { let g1v = &[g1a, g1b, g1c]; let g1v: G1Vec = g1v.into(); let g2v = &[g2a, g2b, g2c]; - let g2v = g2v.into(); + let g2v: G2Vec = g2v.into(); let actual = g1v.inner_prod(&g2v).unwrap(); From 96e6ab24aec8523e9f951e7a899cdcba88d1e52e Mon Sep 17 00:00:00 2001 From: Ray Date: Sun, 16 Feb 2025 15:52:54 +0100 Subject: [PATCH 05/10] refactor: use pattern matching and update errors --- jolt-core/src/poly/commitment/dory.rs | 19 +- jolt-core/src/poly/commitment/dory/error.rs | 12 +- jolt-core/src/poly/commitment/dory/params.rs | 84 ++++--- jolt-core/src/poly/commitment/dory/reduce.rs | 226 ++++++++---------- jolt-core/src/poly/commitment/dory/scalar.rs | 16 +- .../poly/commitment/dory/vec_operations.rs | 17 +- 6 files changed, 186 insertions(+), 188 deletions(-) diff --git a/jolt-core/src/poly/commitment/dory.rs b/jolt-core/src/poly/commitment/dory.rs index 665ad5c33..9841cdbe3 100644 --- a/jolt-core/src/poly/commitment/dory.rs +++ b/jolt-core/src/poly/commitment/dory.rs @@ -17,10 +17,7 @@ use crate::utils::errors::ProofVerifyError; use crate::{ field::JoltField, poly::multilinear_polynomial::MultilinearPolynomial, - utils::{ - math::Math, - transcript::{AppendToTranscript, Transcript}, - }, + utils::{math::Math, transcript::Transcript}, }; use super::commitment_scheme::{CommitShape, CommitmentScheme}; @@ -52,10 +49,14 @@ pub struct DoryScheme { _data: PhantomData<(P, ProofTranscript)>, } -impl AppendToTranscript for Commitment

{ - fn append_to_transcript(&self, _transcript: &mut ProofTranscript) { - todo!() - } +fn append_gt(transcript: &mut ProofTranscript, gt: Gt

) { + let mut buf = vec![]; + gt.serialize_uncompressed(&mut buf).unwrap(); + // Serialize uncompressed gives the scalar in LE byte order which is not + // a natural representation in the EVM for scalar math so we reverse + // to get an EVM compatible version. + buf = buf.into_iter().rev().collect(); + transcript.append_bytes(&buf); } #[derive(CanonicalDeserialize, CanonicalSerialize)] @@ -137,7 +138,7 @@ where if proof.verify(transcript, setup, *commitment).unwrap() { Ok(()) } else { - todo!() + Err(ProofVerifyError::VerificationFailed) } } diff --git a/jolt-core/src/poly/commitment/dory/error.rs b/jolt-core/src/poly/commitment/dory/error.rs index f9c300354..06090eda4 100644 --- a/jolt-core/src/poly/commitment/dory/error.rs +++ b/jolt-core/src/poly/commitment/dory/error.rs @@ -14,10 +14,18 @@ pub enum Error { "recursive public parameters should be twice as the public parameters it is derived from" )] LengthNotTwice, - #[error("reduce params not initialized")] - ReduceParamsNotInitialized, + + #[error("tried to create a params derived from params single")] + DerivedFromSingle, + #[error("public params is empty")] EmptyPublicParams, + #[error("found a public param with single G1/G2 element")] + ReduceSinglePublicParam, + #[error("single param found with non empty step1 or step2")] + SingleWithNonEmptySteps, + #[error("found multi param with empty step1 or step2")] + MultiParamsWithEmptySteps, #[error("zr zero")] ZrZero, diff --git a/jolt-core/src/poly/commitment/dory/params.rs b/jolt-core/src/poly/commitment/dory/params.rs index c19db4bae..313eac26f 100644 --- a/jolt-core/src/poly/commitment/dory/params.rs +++ b/jolt-core/src/poly/commitment/dory/params.rs @@ -2,7 +2,7 @@ use ark_ec::pairing::Pairing; use ark_ff::UniformRand; use ark_std::rand::Rng; -use super::{vec_operations::InnerProd, Error, G1Vec, G2Vec, Gt, G1, G2}; +use super::{error::GType, vec_operations::InnerProd, Error, G1Vec, G2Vec, Gt, G1, G2}; #[derive(Clone)] pub struct SingleParam { @@ -51,11 +51,7 @@ impl PublicParams { } } - pub fn generate_public_params(rng: &mut impl Rng, mut n: usize) -> Result, Error> - where - G1: UniformRand, - G2: UniformRand, - { + pub fn generate_public_params(rng: &mut impl Rng, mut n: usize) -> Result, Error> { let mut res = Vec::new(); let mut params = Self::new(rng, n)?; while n > 0 { @@ -85,56 +81,56 @@ impl PublicParams { g2v: G2Vec, ) -> Result { let x = g1v.inner_prod(&g2v)?; - // if there's a single element, return a single param - if let ([g1], [g2]) = (&*g1v, &*g2v) { - Ok(Self::Single(SingleParam { + match (&*g1v, &*g2v) { + // if there's a single element, return a single param + ([g1], [g2]) => Ok(Self::Single(SingleParam { g1: *g1, g2: *g2, x, - })) - // else, prepare gamma and delta public params - } else { - let m = g1v.len() / 2; - let gamma_1l: G1Vec = (&g1v[..m]).into(); - let gamma_1r: G1Vec = (&g1v[m..]).into(); - - let gamma_2l: G2Vec = (&g2v[..m]).into(); - let gamma_2r: G2Vec = (&g2v[m..]).into(); - - let gamma_1_prime = G1Vec::random(rng, m); - let gamma_2_prime = G2Vec::random(rng, m); - - let delta_1l = gamma_1l.inner_prod(&gamma_2_prime)?; - let delta_1r = gamma_1r.inner_prod(&gamma_2_prime)?; - let delta_2l = gamma_1_prime.inner_prod(&gamma_2l)?; - let delta_2r = gamma_1_prime.inner_prod(&gamma_2r)?; - - Ok(Self::Multi { - g1v, - g2v, - x, - gamma_1_prime, - gamma_2_prime, - delta_1r, - delta_1l, - delta_2r, - delta_2l, - }) + })), + // else, prepare gamma and delta public params + (a, b) if !a.is_empty() & !b.is_empty() && a.len() == b.len() => { + let m = g1v.len() / 2; + let gamma_1l: G1Vec = (&g1v[..m]).into(); + let gamma_1r: G1Vec = (&g1v[m..]).into(); + + let gamma_2l: G2Vec = (&g2v[..m]).into(); + let gamma_2r: G2Vec = (&g2v[m..]).into(); + + let gamma_1_prime = G1Vec::random(rng, m); + let gamma_2_prime = G2Vec::random(rng, m); + + let delta_1l = gamma_1l.inner_prod(&gamma_2_prime)?; + let delta_1r = gamma_1r.inner_prod(&gamma_2_prime)?; + let delta_2l = gamma_1_prime.inner_prod(&gamma_2l)?; + let delta_2r = gamma_1_prime.inner_prod(&gamma_2r)?; + + Ok(Self::Multi { + g1v, + g2v, + x, + gamma_1_prime, + gamma_2_prime, + delta_1r, + delta_1l, + delta_2r, + delta_2l, + }) + } + ([], _) => Err(Error::EmptyVector(GType::G1)), + (_, []) => Err(Error::EmptyVector(GType::G2)), + (_, _) => Err(Error::LengthMismatch), } } - fn new_derived(&self, rng: &mut impl Rng) -> Result - where - G1: UniformRand, - G2: UniformRand, - { + fn new_derived(&self, rng: &mut impl Rng) -> Result { let Self::Multi { gamma_1_prime, gamma_2_prime, .. } = self else { - panic!() + return Err(Error::DerivedFromSingle); }; let g1v = gamma_1_prime.clone(); diff --git a/jolt-core/src/poly/commitment/dory/reduce.rs b/jolt-core/src/poly/commitment/dory/reduce.rs index 598bc84fd..994fee35d 100644 --- a/jolt-core/src/poly/commitment/dory/reduce.rs +++ b/jolt-core/src/poly/commitment/dory/reduce.rs @@ -11,6 +11,7 @@ use crate::{ }; use super::{ + append_gt, vec_operations::{mul_gt, InnerProd}, Commitment, Error, G1Vec, G2Vec, Gt, PublicParams, ScalarProof, Witness, Zr, G1, G2, }; @@ -35,103 +36,96 @@ where from_prover_2: &[ReduceProverStep2Elements], final_proof: &ScalarProof, ) -> Result { - match public_params { - [] => Err(Error::EmptyPublicParams), - [PublicParams::Single(param)] => final_proof.verify(param, &commitment), - [param1, public_params_rest @ ..] => { - let PublicParams::Multi { + match (public_params, from_prover_1, from_prover_2) { + ([], _, _) => Err(Error::EmptyPublicParams), + ([PublicParams::Single(param)], [], []) => final_proof.verify(param, &commitment), + ([PublicParams::Single(_), ..], _, _) => Err(Error::SingleWithNonEmptySteps), + ([PublicParams::Multi { .. }, ..], [], _) + | ([PublicParams::Multi { .. }, ..], _, []) => Err(Error::MultiParamsWithEmptySteps), + // take the first element of public_params, prover_step_1, prover_step_2 + ( + [PublicParams::Multi { delta_1r, delta_1l, delta_2r, delta_2l, x, .. - } = param1 - else { - panic!() + }, public_params_rest @ ..], + [ReduceProverStep1Elements { + d1l, d1r, d2l, d2r, .. + }, from_prover_1_rest @ ..], + [ReduceProverStep2Elements { + c_plus, c_minus, .. + }, from_prover_2_rest @ ..], + ) => { + let Commitment { c, d1, d2 } = commitment; + + let step_1_element = ReduceProverStep1Elements { + d1l: *d1l, + d1r: *d1r, + d2l: *d2l, + d2r: *d2r, + c, + d1, + d2, }; - match (from_prover_1, from_prover_2) { - ( - [ReduceProverStep1Elements { - d1l, d1r, d2l, d2r, .. - }, from_prover_1_rest @ ..], - [ReduceProverStep2Elements { - c_plus, c_minus, .. - }, from_prover_2_rest @ ..], - ) => { - let Commitment { c, d1, d2 } = commitment; - - let step_1_element = ReduceProverStep1Elements { - d1l: *d1l, - d1r: *d1r, - d2l: *d2l, - d2r: *d2r, - c, - d1, - d2, - }; - - // update transcript with step_1_elements - step_1_element.append_to_transcript(transcript); - // Get from Transcript - let betha: Zr = transcript.challenge_scalar(); - - let step_2_element = ReduceProverStep2Elements { - c_plus: *c_plus, - c_minus: *c_minus, - }; - - // update transcript with step_2_elements - step_2_element.append_to_transcript(transcript); - // Get from Transcript - let alpha: Zr = transcript.challenge_scalar(); - let inverse_alpha = JoltField::inverse(&alpha).ok_or(Error::ZrZero)?; - let inverse_betha = JoltField::inverse(&betha).ok_or(Error::ZrZero)?; - - let c_prime = mul_gt(&[ - c, - *x, - d2 * betha, - d1 * inverse_betha, - *c_plus * alpha, - *c_minus * inverse_alpha, - ]) - .expect("slice is not empty"); - - let d1_prime = mul_gt(&[ - *d1l * alpha, - *d1r, - *delta_1l * alpha * betha, - *delta_1r * betha, - ]) - .expect("slice is not empty"); - - let d2_prime = mul_gt(&[ - *d2l * inverse_alpha, - *d2r, - *delta_2l * inverse_alpha * inverse_betha, - *delta_2r * inverse_betha, - ]) - .expect("slice is not empty"); - - let next_commitment = Commitment { - c: c_prime, - d1: d1_prime, - d2: d2_prime, - }; - - Self::verify_recursive( - transcript, - public_params_rest, - next_commitment, - from_prover_1_rest, - from_prover_2_rest, - final_proof, - ) - } - _ => todo!(), - } + // update transcript with step_1_elements + step_1_element.append_to_transcript(transcript); + // Get from Transcript + let betha: Zr = transcript.challenge_scalar(); + + let step_2_element = ReduceProverStep2Elements { + c_plus: *c_plus, + c_minus: *c_minus, + }; + + // update transcript with step_2_elements + step_2_element.append_to_transcript(transcript); + // Get from Transcript + let alpha: Zr = transcript.challenge_scalar(); + + let inverse_alpha = JoltField::inverse(&alpha).ok_or(Error::ZrZero)?; + let inverse_betha = JoltField::inverse(&betha).ok_or(Error::ZrZero)?; + + let c_prime = mul_gt(&[ + c, + *x, + d2 * betha, + d1 * inverse_betha, + *c_plus * alpha, + *c_minus * inverse_alpha, + ]); + + let d1_prime = mul_gt(&[ + *d1l * alpha, + *d1r, + *delta_1l * alpha * betha, + *delta_1r * betha, + ]); + + let d2_prime = mul_gt(&[ + *d2l * inverse_alpha, + *d2r, + *delta_2l * inverse_alpha * inverse_betha, + *delta_2r * inverse_betha, + ]); + + let next_commitment = Commitment { + c: c_prime, + d1: d1_prime, + d2: d2_prime, + }; + + Self::verify_recursive( + transcript, + public_params_rest, + next_commitment, + from_prover_1_rest, + from_prover_2_rest, + final_proof, + ) } } } @@ -181,16 +175,6 @@ impl AppendToTranscript for ReduceProverStep1Elements

{ } } -fn append_gt(transcript: &mut ProofTranscript, gt: Gt

) { - let mut buf = vec![]; - gt.serialize_uncompressed(&mut buf).unwrap(); - // Serialize uncompressed gives the scalar in LE byte order which is not - // a natural representation in the EVM for scalar math so we reverse - // to get an EVM compatible version. - buf = buf.into_iter().rev().collect(); - transcript.append_bytes(&buf); -} - #[derive(Clone, CanonicalDeserialize, CanonicalSerialize)] pub struct ReduceProverStep2Elements { c_plus: Gt, @@ -216,23 +200,17 @@ where G2Vec: Add, Output = G2Vec>, { match params { - [] => unimplemented!(), - [param1, rest_param @ ..] => { - let PublicParams::Multi { - g1v, - g2v, - x, - gamma_1_prime, - gamma_2_prime, - delta_1r, - delta_1l, - delta_2r, - delta_2l, - } = param1 - else { - panic!() - }; - + [PublicParams::Multi { + g1v, + g2v, + x, + gamma_1_prime, + gamma_2_prime, + delta_1r, + delta_1l, + delta_2r, + delta_2l, + }, rest_param @ ..] => { let m = g1v.len() / 2; // P: @@ -261,7 +239,7 @@ where // Get from Transcript let betha: Zr = transcript.challenge_scalar(); - let inverse_betha = JoltField::inverse(&betha).unwrap(); + let inverse_betha = JoltField::inverse(&betha).ok_or(Error::ZrZero)?; // P: let v1 = witness.v1 + (g1v * betha); @@ -281,7 +259,7 @@ where step_2_element.append_to_transcript(transcript); // Get from Transcript let alpha: Zr = transcript.challenge_scalar(); - let inverse_alpha = JoltField::inverse(&alpha).unwrap(); + let inverse_alpha = JoltField::inverse(&alpha).ok_or(Error::ZrZero)?; let v1_prime = v1l * alpha + v1r; let v2_prime = v2l * inverse_alpha + v2r; @@ -291,6 +269,7 @@ where v2: v2_prime, }; + // we return earlier if == 1 since we don't need to calculate the next_commitment if m == 1 { return Ok(DoryProof { from_prover_1: vec![step_1_element], @@ -306,24 +285,21 @@ where d1 * inverse_betha, c_plus * alpha, c_minus * inverse_alpha, - ]) - .unwrap(); + ]); let d1_prime = mul_gt(&[ d1l * alpha, d1r, *delta_1l * alpha * betha, *delta_1r * betha, - ]) - .unwrap(); + ]); let d2_prime = mul_gt(&[ d2l * inverse_alpha, d2r, *delta_2l * inverse_alpha * inverse_betha, *delta_2r * inverse_betha, - ]) - .unwrap(); + ]); let next_commitment = Commitment { c: c_prime, @@ -348,5 +324,7 @@ where final_proof: scalar_product_proof, }) } + [PublicParams::Single(_), ..] => Err(Error::ReduceSinglePublicParam), + [] => Err(Error::EmptyPublicParams), } } diff --git a/jolt-core/src/poly/commitment/dory/scalar.rs b/jolt-core/src/poly/commitment/dory/scalar.rs index c66bd2b57..d70c66d84 100644 --- a/jolt-core/src/poly/commitment/dory/scalar.rs +++ b/jolt-core/src/poly/commitment/dory/scalar.rs @@ -6,10 +6,14 @@ use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use rand::thread_rng; use crate::{ - field::JoltField, msm::VariableBaseMSM, poly::multilinear_polynomial::MultilinearPolynomial, + field::JoltField, + msm::VariableBaseMSM, + poly::multilinear_polynomial::MultilinearPolynomial, + utils::transcript::{AppendToTranscript, Transcript}, }; use super::{ + append_gt, params::SingleParam, vec_operations::{e, mul_gt, InnerProd}, Error, G1Vec, G2Vec, Gt, PublicParams, Zr, G1, G2, @@ -59,6 +63,14 @@ pub struct Commitment { pub d2: Gt, } +impl AppendToTranscript for Commitment

{ + fn append_to_transcript(&self, transcript: &mut ProofTranscript) { + append_gt(transcript, self.c); + append_gt(transcript, self.d1); + append_gt(transcript, self.d2); + } +} + pub fn commit( Witness { v1, v2 }: Witness, public_params: &PublicParams, @@ -105,7 +117,7 @@ impl ScalarProof { let g2 = G2Vec::::from(&[self.e2, pp.g2 * d_inv]).sum(); let left_eq = e(g1, g2); - let right_eq = mul_gt(&[pp.x, *c, *d2 * d, *d1 * d_inv]).expect("has more than one item"); + let right_eq = mul_gt(&[pp.x, *c, *d2 * d, *d1 * d_inv]); Ok(left_eq == right_eq) } diff --git a/jolt-core/src/poly/commitment/dory/vec_operations.rs b/jolt-core/src/poly/commitment/dory/vec_operations.rs index 4d0350937..8786cc6c5 100644 --- a/jolt-core/src/poly/commitment/dory/vec_operations.rs +++ b/jolt-core/src/poly/commitment/dory/vec_operations.rs @@ -1,4 +1,7 @@ -use std::ops::{Add, Deref, Mul}; +use std::{ + iter::Sum, + ops::{Add, Deref, Mul}, +}; use ark_ec::{pairing::Pairing, Group}; use ark_ff::UniformRand; @@ -14,11 +17,11 @@ pub fn e(g1: G1, g2: G2) -> Gt { Curve::pairing(g1, g2) } -pub fn mul_gt(gts: &[Gt]) -> Option> { - gts.iter().fold(None, |prev, curr| match prev { - Some(prev) => Some(curr + prev), - None => Some(*curr), - }) +pub fn mul_gt(gts: &[Gt]) -> Gt +where + Gt: Sum, +{ + gts.iter().sum() } pub trait InnerProd { @@ -265,7 +268,7 @@ mod tests { let g2b = G2::::rand(&mut rng); let g2c = G2::::rand(&mut rng); - let expected = mul_gt(&[e(g1a, g2a), e(g1b, g2b), e(g1c, g2c)]).unwrap(); + let expected = mul_gt(&[e(g1a, g2a), e(g1b, g2b), e(g1c, g2c)]); let g1v = &[g1a, g1b, g1c]; let g1v: G1Vec = g1v.into(); From aec6462115f21fdfcb43e34d693c4362ef26b37f Mon Sep 17 00:00:00 2001 From: Ray Date: Sun, 16 Feb 2025 15:55:53 +0100 Subject: [PATCH 06/10] refactor: use iterator for getting max len --- jolt-core/src/poly/commitment/dory.rs | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/jolt-core/src/poly/commitment/dory.rs b/jolt-core/src/poly/commitment/dory.rs index 9841cdbe3..941b8b4a7 100644 --- a/jolt-core/src/poly/commitment/dory.rs +++ b/jolt-core/src/poly/commitment/dory.rs @@ -83,13 +83,11 @@ where fn setup(shapes: &[CommitShape]) -> Self::Setup { // Dory's setup procedure initializes - let mut max_len: usize = 0; - for shape in shapes { - let len = shape.input_length.log_2(); - if len > max_len { - max_len = len; - } - } + let max_len = shapes + .iter() + .map(|shape| shape.input_length.log_2()) + .max() + .unwrap(); let mut rng = ark_std::rand::thread_rng(); PublicParams::generate_public_params(&mut rng, max_len) .expect("Length must be greater than 0") From 703bf81b6fbfd5a90b3c1123264965bff1e7edd4 Mon Sep 17 00:00:00 2001 From: Ray Date: Sun, 16 Feb 2025 16:04:45 +0100 Subject: [PATCH 07/10] refactor: use binding to get prover step 2 elements --- jolt-core/src/poly/commitment/dory/reduce.rs | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/jolt-core/src/poly/commitment/dory/reduce.rs b/jolt-core/src/poly/commitment/dory/reduce.rs index 994fee35d..a35bcf4f9 100644 --- a/jolt-core/src/poly/commitment/dory/reduce.rs +++ b/jolt-core/src/poly/commitment/dory/reduce.rs @@ -55,7 +55,7 @@ where [ReduceProverStep1Elements { d1l, d1r, d2l, d2r, .. }, from_prover_1_rest @ ..], - [ReduceProverStep2Elements { + [step_2_element @ ReduceProverStep2Elements { c_plus, c_minus, .. }, from_prover_2_rest @ ..], ) => { @@ -76,11 +76,6 @@ where // Get from Transcript let betha: Zr = transcript.challenge_scalar(); - let step_2_element = ReduceProverStep2Elements { - c_plus: *c_plus, - c_minus: *c_minus, - }; - // update transcript with step_2_elements step_2_element.append_to_transcript(transcript); // Get from Transcript From 054aa33afafbd1a6ebf6abe01d48bc48b86a3707 Mon Sep 17 00:00:00 2001 From: Ray Date: Sun, 16 Feb 2025 16:10:27 +0100 Subject: [PATCH 08/10] refactor: relax constraints --- jolt-core/src/poly/commitment/dory/params.rs | 22 ++++---- jolt-core/src/poly/commitment/dory/reduce.rs | 52 +++++++++++++------ jolt-core/src/poly/commitment/dory/scalar.rs | 48 +++++++++++------ .../poly/commitment/dory/vec_operations.rs | 10 +--- 4 files changed, 83 insertions(+), 49 deletions(-) diff --git a/jolt-core/src/poly/commitment/dory/params.rs b/jolt-core/src/poly/commitment/dory/params.rs index 313eac26f..ca1640024 100644 --- a/jolt-core/src/poly/commitment/dory/params.rs +++ b/jolt-core/src/poly/commitment/dory/params.rs @@ -1,18 +1,23 @@ use ark_ec::pairing::Pairing; -use ark_ff::UniformRand; use ark_std::rand::Rng; use super::{error::GType, vec_operations::InnerProd, Error, G1Vec, G2Vec, Gt, G1, G2}; #[derive(Clone)] -pub struct SingleParam { +pub struct SingleParam

+where + P: Pairing, +{ pub g1: G1

, pub g2: G2

, pub x: Gt

, } #[derive(Clone)] -pub enum PublicParams { +pub enum PublicParams

+where + P: Pairing, +{ Single(SingleParam

), Multi { @@ -30,7 +35,10 @@ pub enum PublicParams { }, } -impl PublicParams { +impl PublicParams +where + Curve: Pairing, +{ pub fn g1v(&self) -> Vec> { match self { PublicParams::Single(SingleParam { g1, .. }) => vec![*g1], @@ -65,11 +73,7 @@ impl PublicParams { Ok(res) } - pub fn new(rng: &mut impl Rng, n: usize) -> Result - where - G1: UniformRand, - G2: UniformRand, - { + pub fn new(rng: &mut impl Rng, n: usize) -> Result { let g1v = G1Vec::random(rng, n); let g2v = G2Vec::random(rng, n); Self::params_with_provided_g(rng, g1v, g2v) diff --git a/jolt-core/src/poly/commitment/dory/reduce.rs b/jolt-core/src/poly/commitment/dory/reduce.rs index a35bcf4f9..358e15afa 100644 --- a/jolt-core/src/poly/commitment/dory/reduce.rs +++ b/jolt-core/src/poly/commitment/dory/reduce.rs @@ -18,24 +18,31 @@ use super::{ /// Proof #[derive(Clone, CanonicalDeserialize, CanonicalSerialize)] -pub struct DoryProof { +pub struct DoryProof +where + Curve: Pairing, +{ pub from_prover_1: Vec>, pub from_prover_2: Vec>, pub final_proof: ScalarProof, } -impl DoryProof +impl DoryProof where + Curve: Pairing, Curve::ScalarField: JoltField, { - fn verify_recursive( + fn verify_recursive( transcript: &mut ProofTranscript, public_params: &[PublicParams], commitment: Commitment, from_prover_1: &[ReduceProverStep1Elements], from_prover_2: &[ReduceProverStep2Elements], final_proof: &ScalarProof, - ) -> Result { + ) -> Result + where + ProofTranscript: Transcript, + { match (public_params, from_prover_1, from_prover_2) { ([], _, _) => Err(Error::EmptyPublicParams), ([PublicParams::Single(param)], [], []) => final_proof.verify(param, &commitment), @@ -125,16 +132,14 @@ where } } - pub fn verify( + pub fn verify( &self, transcript: &mut ProofTranscript, public_params: &[PublicParams], commitment: Commitment, ) -> Result where - Gt: Mul, Output = Gt>, - G1: Mul, Output = G1>, - G2: Mul, Output = G2>, + ProofTranscript: Transcript, { Self::verify_recursive( transcript, @@ -148,7 +153,10 @@ where } #[derive(Clone, CanonicalDeserialize, CanonicalSerialize)] -pub struct ReduceProverStep1Elements { +pub struct ReduceProverStep1Elements +where + Curve: Pairing, +{ d1l: Gt, d1r: Gt, d2l: Gt, @@ -158,8 +166,14 @@ pub struct ReduceProverStep1Elements { d2: Gt, } -impl AppendToTranscript for ReduceProverStep1Elements

{ - fn append_to_transcript(&self, transcript: &mut ProofTranscript) { +impl

AppendToTranscript for ReduceProverStep1Elements

+where + P: Pairing, +{ + fn append_to_transcript(&self, transcript: &mut ProofTranscript) + where + ProofTranscript: Transcript, + { append_gt(transcript, self.d1l); append_gt(transcript, self.d1r); append_gt(transcript, self.d2l); @@ -176,23 +190,29 @@ pub struct ReduceProverStep2Elements { c_minus: Gt, } -impl AppendToTranscript for ReduceProverStep2Elements

{ - fn append_to_transcript(&self, transcript: &mut ProofTranscript) { +impl

AppendToTranscript for ReduceProverStep2Elements

+where + P: Pairing, +{ + fn append_to_transcript(&self, transcript: &mut ProofTranscript) + where + ProofTranscript: Transcript, + { append_gt(transcript, self.c_plus); append_gt(transcript, self.c_minus); } } -pub fn reduce( +pub fn reduce( transcript: &mut ProofTranscript, params: &[PublicParams], witness: Witness, Commitment { c, d1, d2 }: Commitment, ) -> Result, Error> where + Curve: Pairing, Curve::ScalarField: JoltField, - G1Vec: Add, Output = G1Vec>, - G2Vec: Add, Output = G2Vec>, + ProofTranscript: Transcript, { match params { [PublicParams::Multi { diff --git a/jolt-core/src/poly/commitment/dory/scalar.rs b/jolt-core/src/poly/commitment/dory/scalar.rs index d70c66d84..0be3b0e70 100644 --- a/jolt-core/src/poly/commitment/dory/scalar.rs +++ b/jolt-core/src/poly/commitment/dory/scalar.rs @@ -21,13 +21,17 @@ use super::{ /// Witness over set Zr #[derive(Clone)] -pub struct Witness { +pub struct Witness +where + Curve: Pairing, +{ pub v1: G1Vec, pub v2: G2Vec, } -impl Witness

+impl

Witness

where + P: Pairing, P::G1: VariableBaseMSM, P::ScalarField: JoltField, { @@ -57,24 +61,36 @@ where } #[derive(Clone, Copy, CanonicalSerialize, CanonicalDeserialize, Debug, Default, PartialEq, Eq)] -pub struct Commitment { +pub struct Commitment +where + Curve: Pairing, +{ pub c: Gt, pub d1: Gt, pub d2: Gt, } -impl AppendToTranscript for Commitment

{ - fn append_to_transcript(&self, transcript: &mut ProofTranscript) { +impl

AppendToTranscript for Commitment

+where + P: Pairing, +{ + fn append_to_transcript(&self, transcript: &mut ProofTranscript) + where + ProofTranscript: Transcript, + { append_gt(transcript, self.c); append_gt(transcript, self.d1); append_gt(transcript, self.d2); } } -pub fn commit( +pub fn commit( Witness { v1, v2 }: Witness, public_params: &PublicParams, -) -> Result, Error> { +) -> Result, Error> +where + Curve: Pairing, +{ let d1 = v1.inner_prod(&public_params.g2v())?; let d2 = public_params.g1v().inner_prod(&v2)?; let c = v1.inner_prod(&v2)?; @@ -84,12 +100,18 @@ pub fn commit( } #[derive(Clone, CanonicalDeserialize, CanonicalSerialize)] -pub struct ScalarProof { +pub struct ScalarProof +where + Curve: Pairing, +{ e1: G1, e2: G2, } -impl ScalarProof { +impl ScalarProof +where + Curve: Pairing, +{ pub fn new(witness: Witness) -> Self { Self { e1: witness.v1[0], @@ -101,13 +123,7 @@ impl ScalarProof { &self, pp: &SingleParam, Commitment { c, d1, d2 }: &Commitment, - ) -> Result - where - for<'c> &'c G1Vec: Mul, Output = G1Vec>, - G1: Mul, Output = G1>, - G2: Mul, Output = G2>, - Gt: Mul, Output = Gt>, - { + ) -> Result { let mut rng = thread_rng(); let d: Zr = Zr::::rand(&mut rng); let d_inv = d.inverse().ok_or(Error::CouldntInvertD)?; diff --git a/jolt-core/src/poly/commitment/dory/vec_operations.rs b/jolt-core/src/poly/commitment/dory/vec_operations.rs index 8786cc6c5..542127993 100644 --- a/jolt-core/src/poly/commitment/dory/vec_operations.rs +++ b/jolt-core/src/poly/commitment/dory/vec_operations.rs @@ -68,10 +68,7 @@ impl G1Vec { self.iter().sum() } - pub fn random(rng: &mut impl Rng, n: usize) -> Self - where - G1: UniformRand, - { + pub fn random(rng: &mut impl Rng, n: usize) -> Self { Self( (0..n) .map(|_| { @@ -160,10 +157,7 @@ impl G2Vec { self.iter().sum() } - pub fn random(rng: &mut impl Rng, n: usize) -> Self - where - G2: UniformRand, - { + pub fn random(rng: &mut impl Rng, n: usize) -> Self { Self( (0..n) .map(|_| { From f948b2dff0504a123530e26766ef214fa74a3926 Mon Sep 17 00:00:00 2001 From: Ray Date: Sun, 16 Feb 2025 17:04:04 +0100 Subject: [PATCH 09/10] refactor: rename to look closer to book --- jolt-core/src/poly/commitment/dory.rs | 27 +- jolt-core/src/poly/commitment/dory/params.rs | 95 ++++-- jolt-core/src/poly/commitment/dory/reduce.rs | 271 +++++++++--------- jolt-core/src/poly/commitment/dory/scalar.rs | 28 +- jolt-core/src/poly/commitment/dory/tests.rs | 13 +- .../poly/commitment/dory/vec_operations.rs | 21 +- 6 files changed, 246 insertions(+), 209 deletions(-) diff --git a/jolt-core/src/poly/commitment/dory.rs b/jolt-core/src/poly/commitment/dory.rs index 941b8b4a7..86d7cd853 100644 --- a/jolt-core/src/poly/commitment/dory.rs +++ b/jolt-core/src/poly/commitment/dory.rs @@ -2,6 +2,7 @@ use std::{marker::PhantomData, ops::Mul}; use ark_ec::pairing::{Pairing, PairingOutput}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; +use ark_std::Zero; use error::Error; use params::PublicParams; use rayon::iter::IntoParallelIterator; @@ -13,6 +14,8 @@ use scalar::{commit, Commitment, Witness}; use vec_operations::{G1Vec, G2Vec}; use crate::msm::VariableBaseMSM; +use crate::poly::dense_mlpoly::DensePolynomial; +use crate::poly::multilinear_polynomial::PolynomialEvaluation; use crate::utils::errors::ProofVerifyError; use crate::{ field::JoltField, @@ -113,9 +116,13 @@ where fn prove( setup: &Self::Setup, poly: &MultilinearPolynomial, - _opening_point: &[Self::Field], // point at which the polynomial is evaluated + opening_point: &[Self::Field], // point at which the polynomial is evaluated transcript: &mut ProofTranscript, ) -> Self::Proof { + //let f: &DensePolynomial = poly.try_into().unwrap(); + //let h = compute_witness_polynomial::

(&f.evals(), opening_point); + // + //let a = poly.evaluate(_opening_point); let public_param = setup.first().unwrap(); let witness = Witness::new(public_param, poly); @@ -144,3 +151,21 @@ where b"dory" } } + +fn compute_witness_polynomial( + f: &[P::ScalarField], + u: P::ScalarField, +) -> Vec +where +

::ScalarField: JoltField, +{ + let d = f.len(); + + // Compute h(x) = f(x)/(x - u) + let mut h = vec![P::ScalarField::zero(); d]; + for i in (1..d).rev() { + h[i - 1] = f[i] + h[i] * u; + } + + h +} diff --git a/jolt-core/src/poly/commitment/dory/params.rs b/jolt-core/src/poly/commitment/dory/params.rs index ca1640024..e393ccb14 100644 --- a/jolt-core/src/poly/commitment/dory/params.rs +++ b/jolt-core/src/poly/commitment/dory/params.rs @@ -8,9 +8,18 @@ pub struct SingleParam

where P: Pairing, { + /// random g1 generator + /// + /// only known by the **prover** pub g1: G1

, + /// random g2 generator + /// + /// only known by the **prover** pub g2: G2

, - pub x: Gt

, + /// commitment of (inner product) + /// + /// known by the **verifier** + pub c: Gt

, } #[derive(Clone)] @@ -21,17 +30,48 @@ where Single(SingleParam

), Multi { + /// random vec of generators of g1 + /// + /// only known by the **prover** g1v: G1Vec

, + /// random vec of generators of g2 + /// + /// only known by the **prover** g2v: G2Vec

, - x: Gt

, - gamma_1_prime: G1Vec

, - gamma_2_prime: G2Vec

, - - delta_1r: Gt

, + /// random vec of generators of g1 that contains half of len(g1v) and it's used to + /// calculate deltas + /// + /// only known by the **prover** + gamma_1: G1Vec

, + /// random vec of generators of g2 that contains half of len(g2v) and it's used to + /// calculate deltas + /// + /// only known by the **prover** + gamma_2: G2Vec

, + + /// commitment of (inner product) + /// + /// known by the **verifier** + c: Gt

, + + /// commitment of (inner product) + /// + /// known by the **verifier** delta_1l: Gt

, - delta_2r: Gt

, + /// commitment of (inner product) + /// + /// known by the **verifier** + delta_1r: Gt

, + + /// commitment of (inner product) + /// + /// known by the **verifier** delta_2l: Gt

, + /// commitment of (inner product) + /// + /// known by the **verifier** + delta_2r: Gt

, }, } @@ -55,7 +95,7 @@ where pub fn x(&self) -> &Gt { match self { - PublicParams::Single(SingleParam { x, .. }) | PublicParams::Multi { x, .. } => x, + PublicParams::Single(SingleParam { c: x, .. }) | PublicParams::Multi { c: x, .. } => x, } } @@ -84,37 +124,38 @@ where g1v: G1Vec, g2v: G2Vec, ) -> Result { - let x = g1v.inner_prod(&g2v)?; + let c = g1v.inner_prod(&g2v)?; match (&*g1v, &*g2v) { // if there's a single element, return a single param ([g1], [g2]) => Ok(Self::Single(SingleParam { g1: *g1, g2: *g2, - x, + c, })), // else, prepare gamma and delta public params (a, b) if !a.is_empty() & !b.is_empty() && a.len() == b.len() => { let m = g1v.len() / 2; - let gamma_1l: G1Vec = (&g1v[..m]).into(); - let gamma_1r: G1Vec = (&g1v[m..]).into(); - let gamma_2l: G2Vec = (&g2v[..m]).into(); - let gamma_2r: G2Vec = (&g2v[m..]).into(); + let g1l: G1Vec = (&g1v[..m]).into(); + let g1r: G1Vec = (&g1v[m..]).into(); + + let g2l: G2Vec = (&g2v[..m]).into(); + let g2r: G2Vec = (&g2v[m..]).into(); - let gamma_1_prime = G1Vec::random(rng, m); - let gamma_2_prime = G2Vec::random(rng, m); + let gamma_1 = G1Vec::random(rng, m); + let gamma_2 = G2Vec::random(rng, m); - let delta_1l = gamma_1l.inner_prod(&gamma_2_prime)?; - let delta_1r = gamma_1r.inner_prod(&gamma_2_prime)?; - let delta_2l = gamma_1_prime.inner_prod(&gamma_2l)?; - let delta_2r = gamma_1_prime.inner_prod(&gamma_2r)?; + let delta_1l = g1l.inner_prod(&gamma_2)?; + let delta_1r = g1r.inner_prod(&gamma_2)?; + let delta_2l = gamma_1.inner_prod(&g2l)?; + let delta_2r = gamma_1.inner_prod(&g2r)?; Ok(Self::Multi { g1v, g2v, - x, - gamma_1_prime, - gamma_2_prime, + c, + gamma_1, + gamma_2, delta_1r, delta_1l, delta_2r, @@ -129,16 +170,14 @@ where fn new_derived(&self, rng: &mut impl Rng) -> Result { let Self::Multi { - gamma_1_prime, - gamma_2_prime, - .. + gamma_1, gamma_2, .. } = self else { return Err(Error::DerivedFromSingle); }; - let g1v = gamma_1_prime.clone(); - let g2v = gamma_2_prime.clone(); + let g1v = gamma_1.clone(); + let g2v = gamma_2.clone(); Self::params_with_provided_g(rng, g1v, g2v) } } diff --git a/jolt-core/src/poly/commitment/dory/reduce.rs b/jolt-core/src/poly/commitment/dory/reduce.rs index 358e15afa..92a14536c 100644 --- a/jolt-core/src/poly/commitment/dory/reduce.rs +++ b/jolt-core/src/poly/commitment/dory/reduce.rs @@ -1,7 +1,5 @@ #![allow(dead_code)] -use std::ops::{Add, Mul}; - use ark_ec::pairing::Pairing; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; @@ -11,9 +9,7 @@ use crate::{ }; use super::{ - append_gt, - vec_operations::{mul_gt, InnerProd}, - Commitment, Error, G1Vec, G2Vec, Gt, PublicParams, ScalarProof, Witness, Zr, G1, G2, + append_gt, Commitment, Error, G1Vec, G2Vec, Gt, PublicParams, ScalarProof, Witness, Zr, }; /// Proof @@ -22,8 +18,10 @@ pub struct DoryProof where Curve: Pairing, { - pub from_prover_1: Vec>, - pub from_prover_2: Vec>, + pub from_prover: Vec<( + ReduceProverRound1Elements, + ReduceProverRound2Elements, + )>, pub final_proof: ScalarProof, } @@ -35,20 +33,21 @@ where fn verify_recursive( transcript: &mut ProofTranscript, public_params: &[PublicParams], - commitment: Commitment, - from_prover_1: &[ReduceProverStep1Elements], - from_prover_2: &[ReduceProverStep2Elements], + commitment @ Commitment { c, d1, d2 }: Commitment, + from_prover: &[( + ReduceProverRound1Elements, + ReduceProverRound2Elements, + )], final_proof: &ScalarProof, ) -> Result where ProofTranscript: Transcript, { - match (public_params, from_prover_1, from_prover_2) { - ([], _, _) => Err(Error::EmptyPublicParams), - ([PublicParams::Single(param)], [], []) => final_proof.verify(param, &commitment), - ([PublicParams::Single(_), ..], _, _) => Err(Error::SingleWithNonEmptySteps), - ([PublicParams::Multi { .. }, ..], [], _) - | ([PublicParams::Multi { .. }, ..], _, []) => Err(Error::MultiParamsWithEmptySteps), + match (public_params, from_prover) { + ([], _) => Err(Error::EmptyPublicParams), + ([PublicParams::Single(param)], []) => final_proof.verify(param, &commitment), + ([PublicParams::Single(_), ..], _) => Err(Error::SingleWithNonEmptySteps), + ([PublicParams::Multi { .. }, ..], []) => Err(Error::MultiParamsWithEmptySteps), // take the first element of public_params, prover_step_1, prover_step_2 ( [PublicParams::Multi { @@ -56,63 +55,64 @@ where delta_1l, delta_2r, delta_2l, - x, + c: c_g, .. }, public_params_rest @ ..], - [ReduceProverStep1Elements { - d1l, d1r, d2l, d2r, .. - }, from_prover_1_rest @ ..], - [step_2_element @ ReduceProverStep2Elements { - c_plus, c_minus, .. - }, from_prover_2_rest @ ..], + [( + step_1_element @ ReduceProverRound1Elements { + d1l: v1l, + d1r: v1r, + d2l: v2l, + d2r: v2r, + }, + step_2_element @ ReduceProverRound2Elements { + vl: c_plus, + vr: c_minus, + }, + ), from_prover_rest @ ..], ) => { - let Commitment { c, d1, d2 } = commitment; - - let step_1_element = ReduceProverStep1Elements { - d1l: *d1l, - d1r: *d1r, - d2l: *d2l, - d2r: *d2r, - c, - d1, - d2, - }; - // update transcript with step_1_elements + commitment.append_to_transcript(transcript); step_1_element.append_to_transcript(transcript); // Get from Transcript - let betha: Zr = transcript.challenge_scalar(); + let alpha_1: Zr = transcript.challenge_scalar(); // update transcript with step_2_elements step_2_element.append_to_transcript(transcript); // Get from Transcript - let alpha: Zr = transcript.challenge_scalar(); + let alpha_2: Zr = transcript.challenge_scalar(); - let inverse_alpha = JoltField::inverse(&alpha).ok_or(Error::ZrZero)?; - let inverse_betha = JoltField::inverse(&betha).ok_or(Error::ZrZero)?; + let inverse_alpha_1 = JoltField::inverse(&alpha_1).ok_or(Error::ZrZero)?; + let inverse_alpha_2 = JoltField::inverse(&alpha_2).ok_or(Error::ZrZero)?; - let c_prime = mul_gt(&[ + let c_prime = [ c, - *x, - d2 * betha, - d1 * inverse_betha, - *c_plus * alpha, - *c_minus * inverse_alpha, - ]); - - let d1_prime = mul_gt(&[ - *d1l * alpha, - *d1r, - *delta_1l * alpha * betha, - *delta_1r * betha, - ]); - - let d2_prime = mul_gt(&[ - *d2l * inverse_alpha, - *d2r, - *delta_2l * inverse_alpha * inverse_betha, - *delta_2r * inverse_betha, - ]); + *c_g, + d2 * alpha_1, + d1 * inverse_alpha_1, + *c_plus * alpha_2, + *c_minus * inverse_alpha_2, + ] + .iter() + .sum(); + + let d1_prime = [ + *v1l * alpha_2, + *v1r, + *delta_1l * alpha_2 * alpha_1, + *delta_1r * alpha_1, + ] + .iter() + .sum(); + + let d2_prime = [ + *v2l * inverse_alpha_2, + *v2r, + *delta_2l * inverse_alpha_2 * inverse_alpha_1, + *delta_2r * inverse_alpha_1, + ] + .iter() + .sum(); let next_commitment = Commitment { c: c_prime, @@ -124,8 +124,7 @@ where transcript, public_params_rest, next_commitment, - from_prover_1_rest, - from_prover_2_rest, + from_prover_rest, final_proof, ) } @@ -145,15 +144,14 @@ where transcript, public_params, commitment, - &self.from_prover_1, - &self.from_prover_2, + &self.from_prover, &self.final_proof, ) } } #[derive(Clone, CanonicalDeserialize, CanonicalSerialize)] -pub struct ReduceProverStep1Elements +pub struct ReduceProverRound1Elements where Curve: Pairing, { @@ -161,12 +159,9 @@ where d1r: Gt, d2l: Gt, d2r: Gt, - c: Gt, - d1: Gt, - d2: Gt, } -impl

AppendToTranscript for ReduceProverStep1Elements

+impl

AppendToTranscript for ReduceProverRound1Elements

where P: Pairing, { @@ -178,19 +173,16 @@ where append_gt(transcript, self.d1r); append_gt(transcript, self.d2l); append_gt(transcript, self.d2r); - append_gt(transcript, self.c); - append_gt(transcript, self.d1); - append_gt(transcript, self.d2); } } #[derive(Clone, CanonicalDeserialize, CanonicalSerialize)] -pub struct ReduceProverStep2Elements { - c_plus: Gt, - c_minus: Gt, +pub struct ReduceProverRound2Elements { + vl: Gt, + vr: Gt, } -impl

AppendToTranscript for ReduceProverStep2Elements

+impl

AppendToTranscript for ReduceProverRound2Elements

where P: Pairing, { @@ -198,29 +190,29 @@ where where ProofTranscript: Transcript, { - append_gt(transcript, self.c_plus); - append_gt(transcript, self.c_minus); + append_gt(transcript, self.vl); + append_gt(transcript, self.vr); } } -pub fn reduce( +pub fn reduce( transcript: &mut ProofTranscript, - params: &[PublicParams], - witness: Witness, - Commitment { c, d1, d2 }: Commitment, -) -> Result, Error> + params: &[PublicParams

], + witness: Witness

, + commitment @ Commitment { c, d1, d2 }: Commitment

, +) -> Result, Error> where - Curve: Pairing, - Curve::ScalarField: JoltField, + P: Pairing, + P::ScalarField: JoltField, ProofTranscript: Transcript, { match params { [PublicParams::Multi { g1v, g2v, - x, - gamma_1_prime, - gamma_2_prime, + c: c_g, + gamma_1, + gamma_2, delta_1r, delta_1l, delta_2r, @@ -229,92 +221,92 @@ where let m = g1v.len() / 2; // P: - let v1l: G1Vec = (&witness.v1[..m]).into(); - let v1r: G1Vec = (&witness.v1[m..]).into(); - let v2l: G2Vec = (&witness.v2[..m]).into(); - let v2r: G2Vec = (&witness.v2[m..]).into(); + let u1l = &witness.u1[..m]; + let u1r = &witness.u1[m..]; + + let u2l = &witness.u2[..m]; + let u2r = &witness.u2[m..]; // P --> V: - let d1l = v1l.inner_prod(gamma_2_prime)?; - let d1r = v1r.inner_prod(gamma_2_prime)?; - let d2l = gamma_1_prime.inner_prod(&v2l)?; - let d2r = gamma_1_prime.inner_prod(&v2r)?; + let d1l = P::multi_pairing(u1l, gamma_2.as_ref()); + let d1r = P::multi_pairing(u1r, gamma_2.as_ref()); - let step_1_element = ReduceProverStep1Elements { - d1l, - d1r, - d2l, - d2r, - c, - d1, - d2, - }; + let d2l = P::multi_pairing(gamma_1.as_ref(), u2l); + let d2r = P::multi_pairing(gamma_1.as_ref(), u2r); + + let step_1_element = ReduceProverRound1Elements { d1l, d1r, d2l, d2r }; // update transcript with step 1 element + commitment.append_to_transcript(transcript); step_1_element.append_to_transcript(transcript); // Get from Transcript - let betha: Zr = transcript.challenge_scalar(); + let betha: Zr

= transcript.challenge_scalar(); + let inverse_betha = JoltField::inverse(&betha).ok_or(Error::ZrZero)?; // P: - let v1 = witness.v1 + (g1v * betha); - let v2 = witness.v2 + (g2v * inverse_betha); + let w1: G1Vec

= witness.u1 + (g1v * betha); + let w2: G2Vec

= witness.u2 + (g2v * inverse_betha); - let v1l: G1Vec = v1[..m].to_vec().into(); - let v1r: G1Vec = v1[m..].to_vec().into(); - let v2l: G2Vec = v2[..m].to_vec().into(); - let v2r: G2Vec = v2[m..].to_vec().into(); + let w1l = &w1[..m]; + let w1r = &w1[m..]; + let w2l = &w2[..m]; + let w2r = &w2[m..]; // P --> V: - let c_plus = v1l.inner_prod(&v2r)?; - let c_minus = v1r.inner_prod(&v2l)?; - - let step_2_element = ReduceProverStep2Elements { c_plus, c_minus }; + let vl = P::multi_pairing(w1l, w2r); + let vr = P::multi_pairing(w1r, w2l); + let step_2_element = ReduceProverRound2Elements { vl, vr }; // update transcript with step 2 elements step_2_element.append_to_transcript(transcript); + // Get from Transcript - let alpha: Zr = transcript.challenge_scalar(); + let alpha: Zr

= transcript.challenge_scalar(); let inverse_alpha = JoltField::inverse(&alpha).ok_or(Error::ZrZero)?; - let v1_prime = v1l * alpha + v1r; - let v2_prime = v2l * inverse_alpha + v2r; + let u1_prime = G1Vec::from(w1l) * alpha + G1Vec::from(w1r); + let u2_prime = G2Vec::from(w2l) * inverse_alpha + G2Vec::from(w2r); let next_witness = Witness { - v1: v1_prime, - v2: v2_prime, + u1: u1_prime, + u2: u2_prime, }; - // we return earlier if == 1 since we don't need to calculate the next_commitment if m == 1 { return Ok(DoryProof { - from_prover_1: vec![step_1_element], - from_prover_2: vec![step_2_element], + from_prover: vec![(step_1_element, step_2_element)], final_proof: ScalarProof::new(next_witness), }); } - let c_prime = mul_gt(&[ + let c_prime = [ c, - *x, + *c_g, d2 * betha, d1 * inverse_betha, - c_plus * alpha, - c_minus * inverse_alpha, - ]); + vl * alpha, + vr * inverse_alpha, + ] + .iter() + .sum(); - let d1_prime = mul_gt(&[ + let d1_prime = [ d1l * alpha, d1r, *delta_1l * alpha * betha, *delta_1r * betha, - ]); + ] + .iter() + .sum(); - let d2_prime = mul_gt(&[ + let d2_prime = [ d2l * inverse_alpha, d2r, *delta_2l * inverse_alpha * inverse_betha, *delta_2r * inverse_betha, - ]); + ] + .iter() + .sum(); let next_commitment = Commitment { c: c_prime, @@ -323,22 +315,19 @@ where }; let DoryProof { - from_prover_1: step_1_elements, - from_prover_2: step_2_elements, + from_prover: step_elements, final_proof: scalar_product_proof, } = reduce(transcript, rest_param, next_witness, next_commitment)?; - let mut from_prover_1 = vec![step_1_element]; - from_prover_1.extend(step_1_elements); - let mut from_prover_2 = vec![step_2_element]; - from_prover_2.extend(step_2_elements); + let mut from_prover = vec![(step_1_element, step_2_element)]; + from_prover.extend(step_elements); Ok(DoryProof { - from_prover_1, - from_prover_2, + from_prover, final_proof: scalar_product_proof, }) } + // Send u, g and gamma [PublicParams::Single(_), ..] => Err(Error::ReduceSinglePublicParam), [] => Err(Error::EmptyPublicParams), } diff --git a/jolt-core/src/poly/commitment/dory/scalar.rs b/jolt-core/src/poly/commitment/dory/scalar.rs index 0be3b0e70..d3197ea2b 100644 --- a/jolt-core/src/poly/commitment/dory/scalar.rs +++ b/jolt-core/src/poly/commitment/dory/scalar.rs @@ -1,5 +1,3 @@ -use std::ops::Mul; - use ark_ec::pairing::Pairing; use ark_ff::{Field, UniformRand}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; @@ -15,7 +13,7 @@ use crate::{ use super::{ append_gt, params::SingleParam, - vec_operations::{e, mul_gt, InnerProd}, + vec_operations::{e, InnerProd}, Error, G1Vec, G2Vec, Gt, PublicParams, Zr, G1, G2, }; @@ -25,8 +23,8 @@ pub struct Witness where Curve: Pairing, { - pub v1: G1Vec, - pub v2: G2Vec, + pub u1: G1Vec, + pub u2: G2Vec, } impl

Witness

@@ -56,7 +54,7 @@ where let v1 = v1.into(); let v2 = v2.into(); - Self { v1, v2 } + Self { u1: v1, u2: v2 } } } @@ -85,15 +83,15 @@ where } pub fn commit( - Witness { v1, v2 }: Witness, + Witness { u1, u2 }: Witness, public_params: &PublicParams, ) -> Result, Error> where Curve: Pairing, { - let d1 = v1.inner_prod(&public_params.g2v())?; - let d2 = public_params.g1v().inner_prod(&v2)?; - let c = v1.inner_prod(&v2)?; + let d1 = u1.inner_prod(&public_params.g2v())?; + let d2 = public_params.g1v().inner_prod(&u2)?; + let c = u1.inner_prod(&u2)?; let commitment = Commitment { d1, d2, c }; Ok(commitment) @@ -114,8 +112,8 @@ where { pub fn new(witness: Witness) -> Self { Self { - e1: witness.v1[0], - e2: witness.v2[0], + e1: witness.u1[0], + e2: witness.u2[0], } } @@ -128,12 +126,12 @@ where let d: Zr = Zr::::rand(&mut rng); let d_inv = d.inverse().ok_or(Error::CouldntInvertD)?; - let g1 = G1Vec::::from(&[self.e1, pp.g1 * d]).sum(); + let g1 = [self.e1, pp.g1 * d].iter().sum(); - let g2 = G2Vec::::from(&[self.e2, pp.g2 * d_inv]).sum(); + let g2 = [self.e2, pp.g2 * d_inv].iter().sum(); let left_eq = e(g1, g2); - let right_eq = mul_gt(&[pp.x, *c, *d2 * d, *d1 * d_inv]); + let right_eq = [pp.c, *c, *d2 * d, *d1 * d_inv].iter().sum(); Ok(left_eq == right_eq) } diff --git a/jolt-core/src/poly/commitment/dory/tests.rs b/jolt-core/src/poly/commitment/dory/tests.rs index 19df2ca0a..6a83bae11 100644 --- a/jolt-core/src/poly/commitment/dory/tests.rs +++ b/jolt-core/src/poly/commitment/dory/tests.rs @@ -19,8 +19,8 @@ fn test_scalar_product_proof() { let g1v = vec![G1::::rand(&mut rng)]; let g2v = vec![G2::::rand(&mut rng)]; let witness = Witness { - v1: g1v.into(), - v2: g2v.into(), + u1: g1v.into(), + u2: g2v.into(), }; let commitment = commit(witness.clone(), &public_params).unwrap(); @@ -38,14 +38,13 @@ fn test_dory_reduce() { let params = PublicParams::generate_public_params(&mut rng, n).unwrap(); - let witness = Witness { v1: g1v, v2: g2v }; + let witness = Witness { u1: g1v, u2: g2v }; let commitment = commit(witness.clone(), ¶ms[0]).unwrap(); let mut transcript = KeccakTranscript::new(&[]); let proof = reduce::reduce(&mut transcript, ¶ms, witness, commitment).unwrap(); - assert_eq!(proof.from_prover_1.len(), 3); - assert_eq!(proof.from_prover_2.len(), 3); + assert_eq!(proof.from_prover.len(), 3); assert_eq!(params[0].g1v().len(), 8); assert_eq!(params[1].g1v().len(), 4); @@ -54,11 +53,11 @@ fn test_dory_reduce() { let mut prev = n; for param in ¶ms[..params.len() - 1] { - let PublicParams::Multi { gamma_1_prime, .. } = param else { + let PublicParams::Multi { gamma_1, .. } = param else { panic!() }; prev /= 2; - assert_eq!(gamma_1_prime.len(), prev); + assert_eq!(gamma_1.len(), prev); } assert!(matches!(params[3], PublicParams::Single(_))); diff --git a/jolt-core/src/poly/commitment/dory/vec_operations.rs b/jolt-core/src/poly/commitment/dory/vec_operations.rs index 542127993..48bd28c1b 100644 --- a/jolt-core/src/poly/commitment/dory/vec_operations.rs +++ b/jolt-core/src/poly/commitment/dory/vec_operations.rs @@ -1,7 +1,4 @@ -use std::{ - iter::Sum, - ops::{Add, Deref, Mul}, -}; +use std::ops::{Add, Deref, Mul}; use ark_ec::{pairing::Pairing, Group}; use ark_ff::UniformRand; @@ -17,13 +14,6 @@ pub fn e(g1: G1, g2: G2) -> Gt { Curve::pairing(g1, g2) } -pub fn mul_gt(gts: &[Gt]) -> Gt -where - Gt: Sum, -{ - gts.iter().sum() -} - pub trait InnerProd { type Gt; fn inner_prod(&self, g2v: &G2) -> Result; @@ -64,10 +54,6 @@ impl InnerProd> for G1Vec { pub struct G1Vec(Vec>); impl G1Vec { - pub fn sum(&self) -> G1 { - self.iter().sum() - } - pub fn random(rng: &mut impl Rng, n: usize) -> Self { Self( (0..n) @@ -243,12 +229,13 @@ mod tests { use ark_ff::UniformRand; use crate::poly::commitment::dory::vec_operations::G2Vec; + use crate::poly::commitment::dory::Gt; use super::InnerProd; use super::{ super::{G1Vec, G1, G2}, - e, mul_gt, + e, }; #[test] @@ -262,7 +249,7 @@ mod tests { let g2b = G2::::rand(&mut rng); let g2c = G2::::rand(&mut rng); - let expected = mul_gt(&[e(g1a, g2a), e(g1b, g2b), e(g1c, g2c)]); + let expected: Gt = [e(g1a, g2a), e(g1b, g2b), e(g1c, g2c)].iter().sum(); let g1v = &[g1a, g1b, g1c]; let g1v: G1Vec = g1v.into(); From 03539c3ed5ba67bb6a89229de0398a91d2b6680f Mon Sep 17 00:00:00 2001 From: Ray Date: Sun, 16 Feb 2025 19:35:51 +0100 Subject: [PATCH 10/10] fix: use the same c1, c2, c3 as the book --- jolt-core/src/poly/commitment/dory/params.rs | 16 +-- jolt-core/src/poly/commitment/dory/reduce.rs | 109 +++++++++---------- jolt-core/src/poly/commitment/dory/scalar.rs | 28 +++-- 3 files changed, 73 insertions(+), 80 deletions(-) diff --git a/jolt-core/src/poly/commitment/dory/params.rs b/jolt-core/src/poly/commitment/dory/params.rs index e393ccb14..00d967f6d 100644 --- a/jolt-core/src/poly/commitment/dory/params.rs +++ b/jolt-core/src/poly/commitment/dory/params.rs @@ -19,7 +19,7 @@ where /// commitment of (inner product) /// /// known by the **verifier** - pub c: Gt

, + pub c_g: Gt

, } #[derive(Clone)] @@ -53,7 +53,7 @@ where /// commitment of (inner product) /// /// known by the **verifier** - c: Gt

, + c_g: Gt

, /// commitment of (inner product) /// @@ -93,12 +93,6 @@ where } } - pub fn x(&self) -> &Gt { - match self { - PublicParams::Single(SingleParam { c: x, .. }) | PublicParams::Multi { c: x, .. } => x, - } - } - pub fn generate_public_params(rng: &mut impl Rng, mut n: usize) -> Result, Error> { let mut res = Vec::new(); let mut params = Self::new(rng, n)?; @@ -124,13 +118,13 @@ where g1v: G1Vec, g2v: G2Vec, ) -> Result { - let c = g1v.inner_prod(&g2v)?; + let c_g = g1v.inner_prod(&g2v)?; match (&*g1v, &*g2v) { // if there's a single element, return a single param ([g1], [g2]) => Ok(Self::Single(SingleParam { g1: *g1, g2: *g2, - c, + c_g, })), // else, prepare gamma and delta public params (a, b) if !a.is_empty() & !b.is_empty() && a.len() == b.len() => { @@ -153,7 +147,7 @@ where Ok(Self::Multi { g1v, g2v, - c, + c_g, gamma_1, gamma_2, delta_1r, diff --git a/jolt-core/src/poly/commitment/dory/reduce.rs b/jolt-core/src/poly/commitment/dory/reduce.rs index 92a14536c..a8a3220db 100644 --- a/jolt-core/src/poly/commitment/dory/reduce.rs +++ b/jolt-core/src/poly/commitment/dory/reduce.rs @@ -33,7 +33,7 @@ where fn verify_recursive( transcript: &mut ProofTranscript, public_params: &[PublicParams], - commitment @ Commitment { c, d1, d2 }: Commitment, + commitment @ Commitment { c1, c2, c3 }: Commitment, from_prover: &[( ReduceProverRound1Elements, ReduceProverRound2Elements, @@ -55,69 +55,61 @@ where delta_1l, delta_2r, delta_2l, - c: c_g, + c_g, .. }, public_params_rest @ ..], [( - step_1_element @ ReduceProverRound1Elements { - d1l: v1l, - d1r: v1r, - d2l: v2l, - d2r: v2r, - }, - step_2_element @ ReduceProverRound2Elements { - vl: c_plus, - vr: c_minus, - }, + step_1_element @ ReduceProverRound1Elements { d1l, d1r, d2l, d2r }, + step_2_element @ ReduceProverRound2Elements { vl, vr }, ), from_prover_rest @ ..], ) => { // update transcript with step_1_elements commitment.append_to_transcript(transcript); step_1_element.append_to_transcript(transcript); // Get from Transcript - let alpha_1: Zr = transcript.challenge_scalar(); + let betha: Zr = transcript.challenge_scalar(); // update transcript with step_2_elements step_2_element.append_to_transcript(transcript); // Get from Transcript - let alpha_2: Zr = transcript.challenge_scalar(); + let alpha: Zr = transcript.challenge_scalar(); - let inverse_alpha_1 = JoltField::inverse(&alpha_1).ok_or(Error::ZrZero)?; - let inverse_alpha_2 = JoltField::inverse(&alpha_2).ok_or(Error::ZrZero)?; + let inverse_betha = JoltField::inverse(&betha).ok_or(Error::ZrZero)?; + let inverse_alpha = JoltField::inverse(&alpha).ok_or(Error::ZrZero)?; - let c_prime = [ - c, + let c1_prime = [ + c1, + c2 * inverse_betha, + c3 * betha, *c_g, - d2 * alpha_1, - d1 * inverse_alpha_1, - *c_plus * alpha_2, - *c_minus * inverse_alpha_2, + *vl * alpha * alpha, + *vr * inverse_alpha * inverse_alpha, ] .iter() .sum(); - let d1_prime = [ - *v1l * alpha_2, - *v1r, - *delta_1l * alpha_2 * alpha_1, - *delta_1r * alpha_1, + let c2_prime = [ + *d1l * alpha, + *d1r * inverse_alpha, + *delta_1l * alpha * betha, + *delta_1r * inverse_alpha * betha, ] .iter() .sum(); - let d2_prime = [ - *v2l * inverse_alpha_2, - *v2r, - *delta_2l * inverse_alpha_2 * inverse_alpha_1, - *delta_2r * inverse_alpha_1, + let c3_prime = [ + *d2l * inverse_alpha, + *d2r * alpha, + *delta_2l * inverse_alpha * inverse_betha, + *delta_2r * alpha * inverse_betha, ] .iter() .sum(); let next_commitment = Commitment { - c: c_prime, - d1: d1_prime, - d2: d2_prime, + c1: c1_prime, + c2: c2_prime, + c3: c3_prime, }; Self::verify_recursive( @@ -199,7 +191,7 @@ pub fn reduce( transcript: &mut ProofTranscript, params: &[PublicParams

], witness: Witness

, - commitment @ Commitment { c, d1, d2 }: Commitment

, + commitment @ Commitment { c1, c2, c3 }: Commitment

, ) -> Result, Error> where P: Pairing, @@ -210,7 +202,7 @@ where [PublicParams::Multi { g1v, g2v, - c: c_g, + c_g, gamma_1, gamma_2, delta_1r, @@ -264,8 +256,8 @@ where let alpha: Zr

= transcript.challenge_scalar(); let inverse_alpha = JoltField::inverse(&alpha).ok_or(Error::ZrZero)?; - let u1_prime = G1Vec::from(w1l) * alpha + G1Vec::from(w1r); - let u2_prime = G2Vec::from(w2l) * inverse_alpha + G2Vec::from(w2r); + let u1_prime = G1Vec::from(w1l) * alpha + G1Vec::from(w1r) * inverse_alpha; + let u2_prime = G2Vec::from(w2l) * inverse_alpha + G2Vec::from(w2r) * alpha; let next_witness = Witness { u1: u1_prime, @@ -279,44 +271,44 @@ where }); } - let c_prime = [ - c, + let c1_prime = [ + c1, + c2 * inverse_betha, + c3 * betha, *c_g, - d2 * betha, - d1 * inverse_betha, - vl * alpha, - vr * inverse_alpha, + vl * alpha * alpha, + vr * inverse_alpha * inverse_alpha, ] .iter() .sum(); - let d1_prime = [ + let c2_prime = [ d1l * alpha, - d1r, + d1r * inverse_alpha, *delta_1l * alpha * betha, - *delta_1r * betha, + *delta_1r * inverse_alpha * betha, ] .iter() .sum(); - let d2_prime = [ + let c3_prime = [ d2l * inverse_alpha, - d2r, + d2r * alpha, *delta_2l * inverse_alpha * inverse_betha, - *delta_2r * inverse_betha, + *delta_2r * alpha * inverse_betha, ] .iter() .sum(); let next_commitment = Commitment { - c: c_prime, - d1: d1_prime, - d2: d2_prime, + c1: c1_prime, + c2: c2_prime, + c3: c3_prime, }; let DoryProof { from_prover: step_elements, - final_proof: scalar_product_proof, + final_proof, } = reduce(transcript, rest_param, next_witness, next_commitment)?; let mut from_prover = vec![(step_1_element, step_2_element)]; @@ -324,11 +316,14 @@ where Ok(DoryProof { from_prover, - final_proof: scalar_product_proof, + final_proof, }) } // Send u, g and gamma - [PublicParams::Single(_), ..] => Err(Error::ReduceSinglePublicParam), + [PublicParams::Single(_), ..] => Ok(DoryProof { + from_prover: vec![], + final_proof: ScalarProof::new(witness), + }), [] => Err(Error::EmptyPublicParams), } } diff --git a/jolt-core/src/poly/commitment/dory/scalar.rs b/jolt-core/src/poly/commitment/dory/scalar.rs index d3197ea2b..cbbf93919 100644 --- a/jolt-core/src/poly/commitment/dory/scalar.rs +++ b/jolt-core/src/poly/commitment/dory/scalar.rs @@ -63,9 +63,9 @@ pub struct Commitment where Curve: Pairing, { - pub c: Gt, - pub d1: Gt, - pub d2: Gt, + pub c1: Gt, + pub c2: Gt, + pub c3: Gt, } impl

AppendToTranscript for Commitment

@@ -76,9 +76,9 @@ where where ProofTranscript: Transcript, { - append_gt(transcript, self.c); - append_gt(transcript, self.d1); - append_gt(transcript, self.d2); + append_gt(transcript, self.c1); + append_gt(transcript, self.c2); + append_gt(transcript, self.c3); } } @@ -89,11 +89,11 @@ pub fn commit( where Curve: Pairing, { - let d1 = u1.inner_prod(&public_params.g2v())?; - let d2 = public_params.g1v().inner_prod(&u2)?; - let c = u1.inner_prod(&u2)?; + let c1 = u1.inner_prod(&u2)?; + let c2 = u1.inner_prod(&public_params.g2v())?; + let c3 = public_params.g1v().inner_prod(&u2)?; - let commitment = Commitment { d1, d2, c }; + let commitment = Commitment { c2, c3, c1 }; Ok(commitment) } @@ -120,7 +120,11 @@ where pub fn verify( &self, pp: &SingleParam, - Commitment { c, d1, d2 }: &Commitment, + Commitment { + c1: c, + c2: d1, + c3: d2, + }: &Commitment, ) -> Result { let mut rng = thread_rng(); let d: Zr = Zr::::rand(&mut rng); @@ -131,7 +135,7 @@ where let g2 = [self.e2, pp.g2 * d_inv].iter().sum(); let left_eq = e(g1, g2); - let right_eq = [pp.c, *c, *d2 * d, *d1 * d_inv].iter().sum(); + let right_eq = [pp.c_g, *c, *d2 * d, *d1 * d_inv].iter().sum(); Ok(left_eq == right_eq) }