Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(raiko): proof aggregation #347

Open
wants to merge 11 commits into
base: main
Choose a base branch
from
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

57 changes: 56 additions & 1 deletion core/src/interfaces.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@ use alloy_primitives::{Address, B256};
use clap::{Args, ValueEnum};
use raiko_lib::{
consts::VerifierType,
input::{BlobProofType, GuestInput, GuestOutput},
input::{
AggregationGuestInput, AggregationGuestOutput, BlobProofType, GuestInput, GuestOutput,
},
prover::{IdStore, IdWrite, Proof, ProofKey, Prover, ProverError},
};
use serde::{Deserialize, Serialize};
Expand Down Expand Up @@ -203,6 +205,47 @@ impl ProofType {
}
}

/// Run the prover driver depending on the proof type.
pub async fn aggregate_proofs(
&self,
input: AggregationGuestInput,
output: &AggregationGuestOutput,
config: &Value,
store: Option<&mut dyn IdWrite>,
) -> RaikoResult<Proof> {
let proof = match self {
ProofType::Native => NativeProver::aggregate(input.clone(), output, config, store)
.await
.map_err(<ProverError as Into<RaikoError>>::into),
ProofType::Sp1 => {
#[cfg(feature = "sp1")]
return sp1_driver::Sp1Prover::aggregate(input.clone(), output, config, store)
.await
.map_err(|e| e.into());
#[cfg(not(feature = "sp1"))]
Err(RaikoError::FeatureNotSupportedError(*self))
}
ProofType::Risc0 => {
#[cfg(feature = "risc0")]
return risc0_driver::Risc0Prover::aggregate(input.clone(), output, config, store)
.await
.map_err(|e| e.into());
#[cfg(not(feature = "risc0"))]
Err(RaikoError::FeatureNotSupportedError(*self))
}
ProofType::Sgx => {
#[cfg(feature = "sgx")]
return sgx_prover::SgxProver::aggregate(input.clone(), output, config, store)
.await
.map_err(|e| e.into());
#[cfg(not(feature = "sgx"))]
Err(RaikoError::FeatureNotSupportedError(*self))
}
}?;

Ok(proof)
}

pub async fn cancel_proof(
&self,
proof_key: ProofKey,
Expand Down Expand Up @@ -398,3 +441,15 @@ impl TryFrom<ProofRequestOpt> for ProofRequest {
})
}
}

#[serde_as]
#[derive(Clone, Debug, Serialize, Deserialize)]
/// A request for proof aggregation of multiple proofs.
pub struct AggregationRequest {
/// All the proofs to verify
pub proofs: Vec<Proof>,
/// The proof type.
pub proof_type: ProofType,
/// Additional prover params.
pub prover_args: HashMap<String, Value>,
}
71 changes: 59 additions & 12 deletions core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -226,8 +226,9 @@ mod tests {
use clap::ValueEnum;
use raiko_lib::{
consts::{Network, SupportedChainSpecs},
input::BlobProofType,
input::{AggregationGuestInput, AggregationGuestOutput, BlobProofType},
primitives::B256,
prover::Proof,
};
use serde_json::{json, Value};
use std::{collections::HashMap, env};
Expand All @@ -242,7 +243,7 @@ mod tests {
ci == "1"
}

fn test_proof_params() -> HashMap<String, Value> {
fn test_proof_params(enable_aggregation: bool) -> HashMap<String, Value> {
let mut prover_args = HashMap::new();
prover_args.insert(
"native".to_string(),
Expand All @@ -256,7 +257,7 @@ mod tests {
"sp1".to_string(),
json! {
{
"recursion": "core",
"recursion": if enable_aggregation { "compressed" } else { "plonk" },
"prover": "mock",
"verify": true
}
Expand All @@ -278,8 +279,8 @@ mod tests {
json! {
{
"instance_id": 121,
"setup": true,
"bootstrap": true,
"setup": enable_aggregation,
"bootstrap": enable_aggregation,
"prove": true,
}
},
Expand All @@ -291,7 +292,7 @@ mod tests {
l1_chain_spec: ChainSpec,
taiko_chain_spec: ChainSpec,
proof_request: ProofRequest,
) {
) -> Proof {
let provider =
RpcBlockDataProvider::new(&taiko_chain_spec.rpc, proof_request.block_number - 1)
.expect("Could not create RpcBlockDataProvider");
Expand All @@ -301,10 +302,10 @@ mod tests {
.await
.expect("input generation failed");
let output = raiko.get_output(&input).expect("output generation failed");
let _proof = raiko
raiko
.prove(input, &output, None)
.await
.expect("proof generation failed");
.expect("proof generation failed")
}

#[ignore]
Expand Down Expand Up @@ -332,7 +333,7 @@ mod tests {
l1_network,
proof_type,
blob_proof_type: BlobProofType::ProofOfEquivalence,
prover_args: test_proof_params(),
prover_args: test_proof_params(false),
};
prove_block(l1_chain_spec, taiko_chain_spec, proof_request).await;
}
Expand Down Expand Up @@ -361,7 +362,7 @@ mod tests {
l1_network,
proof_type,
blob_proof_type: BlobProofType::ProofOfEquivalence,
prover_args: test_proof_params(),
prover_args: test_proof_params(false),
};
prove_block(l1_chain_spec, taiko_chain_spec, proof_request).await;
}
Expand Down Expand Up @@ -399,7 +400,7 @@ mod tests {
l1_network,
proof_type,
blob_proof_type: BlobProofType::ProofOfEquivalence,
prover_args: test_proof_params(),
prover_args: test_proof_params(false),
};
prove_block(l1_chain_spec, taiko_chain_spec, proof_request).await;
}
Expand Down Expand Up @@ -432,9 +433,55 @@ mod tests {
l1_network,
proof_type,
blob_proof_type: BlobProofType::ProofOfEquivalence,
prover_args: test_proof_params(),
prover_args: test_proof_params(false),
};
prove_block(l1_chain_spec, taiko_chain_spec, proof_request).await;
}
}

#[tokio::test(flavor = "multi_thread")]
async fn test_prove_block_taiko_a7_aggregated() {
let proof_type = get_proof_type_from_env();
let l1_network = Network::Holesky.to_string();
let network = Network::TaikoA7.to_string();
// Give the CI an simpler block to test because it doesn't have enough memory.
// Unfortunately that also means that kzg is not getting fully verified by CI.
let block_number = if is_ci() { 105987 } else { 101368 };
let taiko_chain_spec = SupportedChainSpecs::default()
.get_chain_spec(&network)
.unwrap();
let l1_chain_spec = SupportedChainSpecs::default()
.get_chain_spec(&l1_network)
.unwrap();

let proof_request = ProofRequest {
l1_inclusion_block_number: 0,
block_number,
network,
graffiti: B256::ZERO,
prover: Address::ZERO,
l1_network,
proof_type,
blob_proof_type: BlobProofType::ProofOfEquivalence,
prover_args: test_proof_params(true),
};
let proof = prove_block(l1_chain_spec, taiko_chain_spec, proof_request).await;

let input = AggregationGuestInput {
proofs: vec![proof.clone(), proof],
};

let output = AggregationGuestOutput { hash: B256::ZERO };

let aggregated_proof = proof_type
.aggregate_proofs(
input,
&output,
&serde_json::to_value(&test_proof_params(false)).unwrap(),
None,
)
.await
.expect("proof aggregation failed");
println!("aggregated proof: {:?}", aggregated_proof);
}
}
14 changes: 14 additions & 0 deletions core/src/prover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,14 +58,28 @@ impl Prover for NativeProver {
}

Ok(Proof {
input: None,
proof: None,
quote: None,
uuid: None,
kzg_proof: None,
})
}

async fn cancel(_proof_key: ProofKey, _read: Box<&mut dyn IdStore>) -> ProverResult<()> {
Ok(())
}

async fn aggregate(
_input: raiko_lib::input::AggregationGuestInput,
_output: &raiko_lib::input::AggregationGuestOutput,
_config: &ProverConfig,
_store: Option<&mut dyn IdWrite>,
) -> ProverResult<Proof> {
Ok(Proof {
..Default::default()
})
}
}

#[ignore = "Only used to test serialized data"]
Expand Down
2 changes: 2 additions & 0 deletions host/src/server/api/v2/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,8 @@ pub fn create_router() -> Router<ProverState> {
// Only add the concurrency limit to the proof route. We want to still be able to call
// healthchecks and metrics to have insight into the system.
.nest("/proof", proof::create_router())
// TODO: Separate task or try to get it into /proof somehow? Probably separate
.nest("/aggregate", proof::create_router())
.nest("/health", v1::health::create_router())
.nest("/metrics", v1::metrics::create_router())
.merge(SwaggerUi::new("/swagger-ui").url("/api-docs/openapi.json", docs.clone()))
Expand Down
40 changes: 39 additions & 1 deletion lib/src/input.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,9 @@ use serde_with::serde_as;

#[cfg(not(feature = "std"))]
use crate::no_std::*;
use crate::{consts::ChainSpec, primitives::mpt::MptNode, utils::zlib_compress_data};
use crate::{
consts::ChainSpec, primitives::mpt::MptNode, prover::Proof, utils::zlib_compress_data,
};

/// Represents the state of an account's storage.
/// The storage trie together with the used storage slots allow us to reconstruct all the
Expand Down Expand Up @@ -41,6 +43,42 @@ pub struct GuestInput {
pub taiko: TaikoGuestInput,
}

/// External aggregation input.
#[derive(Debug, Clone, Default, Deserialize, Serialize)]
pub struct AggregationGuestInput {
/// All block proofs to prove
pub proofs: Vec<Proof>,
}

/// The raw proof data necessary to verify a proof
#[derive(Debug, Clone, Default, Deserialize, Serialize)]
pub struct RawProof {
/// The actual proof
pub proof: Vec<u8>,
/// The resulting hash
pub input: B256,
}

/// External aggregation input.
#[derive(Debug, Clone, Default, Deserialize, Serialize)]
pub struct RawAggregationGuestInput {
/// All block proofs to prove
pub proofs: Vec<RawProof>,
}

/// External aggregation input.
#[derive(Debug, Clone, Default, Deserialize, Serialize)]
pub struct AggregationGuestOutput {
/// The resulting hash
pub hash: B256,
}

#[derive(Clone, Serialize, Deserialize)]
pub struct ZkAggregationGuestInput {
pub image_id: [u32; 8],
pub block_inputs: Vec<B256>,
}

impl From<(Block, Header, ChainSpec, TaikoGuestInput)> for GuestInput {
fn from(
(block, parent_header, chain_spec, taiko): (Block, Header, ChainSpec, TaikoGuestInput),
Expand Down
21 changes: 21 additions & 0 deletions lib/src/protocol_instance.rs
Original file line number Diff line number Diff line change
Expand Up @@ -315,6 +315,27 @@ fn bytes_to_bytes32(input: &[u8]) -> [u8; 32] {
bytes
}

pub fn words_to_bytes_le(words: &[u32; 8]) -> [u8; 32] {
let mut bytes = [0u8; 32];
for i in 0..8 {
let word_bytes = words[i].to_le_bytes();
bytes[i * 4..(i + 1) * 4].copy_from_slice(&word_bytes);
}
bytes
}

pub fn aggregation_output_combine(public_inputs: Vec<B256>) -> Vec<u8> {
let mut output = Vec::with_capacity(public_inputs.len() * 32);
for public_input in public_inputs.iter() {
output.extend_from_slice(&public_input.0);
}
output
}

pub fn aggregation_output(program: B256, public_inputs: Vec<B256>) -> Vec<u8> {
aggregation_output_combine([vec![program], public_inputs].concat())
}

#[cfg(test)]
mod tests {
use alloy_primitives::{address, b256};
Expand Down
17 changes: 15 additions & 2 deletions lib/src/prover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use reth_primitives::{ChainId, B256};
use serde::{Deserialize, Serialize};
use utoipa::ToSchema;

use crate::input::{GuestInput, GuestOutput};
use crate::input::{AggregationGuestInput, AggregationGuestOutput, GuestInput, GuestOutput};

#[derive(thiserror::Error, Debug)]
pub enum ProverError {
Expand All @@ -26,13 +26,19 @@ pub type ProverResult<T, E = ProverError> = core::result::Result<T, E>;
pub type ProverConfig = serde_json::Value;
pub type ProofKey = (ChainId, B256, u8);

#[derive(Debug, Serialize, ToSchema, Deserialize, Default)]
#[derive(Clone, Debug, Serialize, ToSchema, Deserialize, Default)]
/// The response body of a proof request.
pub struct Proof {
/// The proof either TEE or ZK.
pub proof: Option<String>,
/// The public input
pub input: Option<B256>,
/// The TEE quote.
pub quote: Option<String>,
/// The assumption UUID.
pub uuid: Option<String>,
/// The kzg proof.
pub kzg_proof: Option<String>,
}

#[async_trait::async_trait]
Expand All @@ -56,5 +62,12 @@ pub trait Prover {
store: Option<&mut dyn IdWrite>,
) -> ProverResult<Proof>;

async fn aggregate(
input: AggregationGuestInput,
output: &AggregationGuestOutput,
config: &ProverConfig,
store: Option<&mut dyn IdWrite>,
) -> ProverResult<Proof>;

async fn cancel(proof_key: ProofKey, read: Box<&mut dyn IdStore>) -> ProverResult<()>;
}
Loading
Loading