Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion crates/circuits/batch-circuit/batch_exe_commit.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#![cfg_attr(rustfmt, rustfmt_skip)]
//! Generated by crates/build-guest. DO NOT EDIT!

pub const COMMIT: [u32; 8] = [1950246364, 1636590102, 198747183, 721429575, 920539053, 838315603, 215973995, 453669913];
pub const COMMIT: [u32; 8] = [378983583, 1588043245, 997558768, 232971842, 98921912, 1094114240, 1668965390, 439165940];
2 changes: 1 addition & 1 deletion crates/circuits/bundle-circuit/bundle_exe_commit.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#![cfg_attr(rustfmt, rustfmt_skip)]
//! Generated by crates/build-guest. DO NOT EDIT!

pub const COMMIT: [u32; 8] = [1948048916, 1022190518, 1051765913, 997565840, 1008935769, 1678268764, 1464235949, 1741788930];
pub const COMMIT: [u32; 8] = [1319970937, 1591263281, 1491142464, 295379798, 1634034263, 1525537339, 1450517202, 914878096];
2 changes: 1 addition & 1 deletion crates/circuits/chunk-circuit/chunk_exe_commit.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#![cfg_attr(rustfmt, rustfmt_skip)]
//! Generated by crates/build-guest. DO NOT EDIT!

pub const COMMIT: [u32; 8] = [404732286, 1668992133, 680101068, 1459241968, 1634825407, 869878951, 1443138776, 876807865];
pub const COMMIT: [u32; 8] = [686772141, 1415860009, 661100161, 25568772, 174445417, 1760732590, 769291679, 1509851629];
65 changes: 2 additions & 63 deletions crates/integration/src/utils/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -352,7 +352,7 @@ pub fn build_batch_witnesses_validium(
.collect::<eyre::Result<Vec<_>>>()?;

// collect tx bytes from chunk tasks
let (meta_chunk_sizes, chunk_digests, chunk_tx_bytes) = chunks.iter().fold(
let (_, chunk_digests, _) = chunks.iter().fold(
(Vec::new(), Vec::new(), Vec::new()),
|(mut meta_chunk_sizes, mut chunk_digests, mut payload_bytes), chunk_wit| {
let tx_bytes = blks_tx_bytes(chunk_wit.blocks.iter());
Expand All @@ -368,69 +368,8 @@ pub fn build_batch_witnesses_validium(
assert_eq!(digest, &chunk_info.tx_data_digest);
}

const LEGACY_MAX_CHUNKS: usize = 45;

let meta_chunk_bytes = {
let valid_chunk_size = chunks.len() as u16;
meta_chunk_sizes
.into_iter()
.chain(std::iter::repeat(0))
.take(LEGACY_MAX_CHUNKS)
.fold(
Vec::from(valid_chunk_size.to_be_bytes()),
|mut bytes, len| {
bytes.extend_from_slice(&(len as u32).to_be_bytes());
bytes
},
)
};

// collect all data together for payload
let version = testing_version_validium();
let mut payload = if version.fork >= ForkName::EuclidV2 {
Vec::new()
} else {
meta_chunk_bytes.clone()
};

if version.fork >= ForkName::EuclidV2 {
let num_blocks = chunks.iter().map(|w| w.blocks.len()).sum::<usize>() as u16;
let prev_msg_queue_hash = chunks[0].prev_msg_queue_hash;
let initial_block_number = chunks[0].blocks[0].header.number;
let post_msg_queue_hash = chunk_infos
.last()
.expect("at least one chunk")
.post_msg_queue_hash;

payload.extend_from_slice(prev_msg_queue_hash.as_slice());
payload.extend_from_slice(post_msg_queue_hash.as_slice());
payload.extend(initial_block_number.to_be_bytes());
payload.extend(num_blocks.to_be_bytes());
assert_eq!(payload.len(), 74);
for chunk_info in &chunk_infos {
for ctx in &chunk_info.block_ctxs {
payload.extend(ctx.to_bytes());
}
}
assert_eq!(payload.len(), 74 + 52 * num_blocks as usize);
}
payload.extend(chunk_tx_bytes);
// compress ...
let compressed_payload = zstd_encode(&payload);

let heading = compressed_payload.len() as u32 + ((version.as_version_byte() as u32) << 24);

let blob_bytes = if version.fork >= ForkName::EuclidV2 {
let mut blob_bytes = Vec::from(heading.to_be_bytes());
blob_bytes.push(1u8); // compressed flag
blob_bytes.extend(compressed_payload);
blob_bytes.resize(4096 * 31, 0);
blob_bytes
} else {
let mut blob_bytes = vec![1];
blob_bytes.extend(compressed_payload);
blob_bytes
};

let last_chunk = chunk_infos.last().expect("at least 1 chunk in batch");
let reference_header =
Expand Down Expand Up @@ -464,7 +403,7 @@ pub fn build_batch_witnesses_validium(
chunk_proofs,
chunk_infos,
reference_header,
blob_bytes,
blob_bytes: Vec::default(),
point_eval_witness: None,
fork_name: version.fork,
})
Expand Down
20 changes: 17 additions & 3 deletions crates/types/base/src/public_inputs/batch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ impl BatchInfo {
/// withdraw root ||
/// prev msg queue hash ||
/// post msg queue hash
/// )
/// )
fn pi_hash_euclidv2(&self) -> B256 {
keccak256(
std::iter::empty()
Expand All @@ -108,9 +108,23 @@ impl BatchInfo {
}

/// Public input hash for a L3 validium @ v1.
fn pi_hash_validium_v1(&self) -> B256 {
///
/// keccak(
/// version ||
/// parent state root ||
/// parent batch hash ||
/// state root ||
/// batch hash ||
/// chain id ||
/// withdraw root ||
/// prev msg queue hash ||
/// post msg queue hash
/// encryption key
/// )
fn pi_hash_validium(&self, version: Version) -> B256 {
keccak256(
std::iter::empty()
.chain(&[version.as_version_byte()])
.chain(self.parent_state_root.as_slice())
.chain(self.parent_batch_hash.as_slice())
.chain(self.state_root.as_slice())
Expand Down Expand Up @@ -142,7 +156,7 @@ impl MultiVersionPublicInputs for BatchInfo {
(Domain::Scroll, STFVersion::V6) => self.pi_hash_by_fork(ForkName::EuclidV1),
(Domain::Scroll, STFVersion::V7) => self.pi_hash_by_fork(ForkName::EuclidV2),
(Domain::Scroll, STFVersion::V8) => self.pi_hash_by_fork(ForkName::Feynman),
(Domain::Validium, STFVersion::V1) => self.pi_hash_validium_v1(),
(Domain::Validium, STFVersion::V1) => self.pi_hash_validium(version),
(domain, stf_version) => {
unreachable!("unsupported version=({domain:?}, {stf_version:?})")
}
Expand Down
8 changes: 6 additions & 2 deletions crates/types/base/src/public_inputs/chunk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,8 @@ impl std::fmt::Display for ChunkInfo {
.field("post_msg_queue_hash", &self.0.post_msg_queue_hash)
.field("tx_data_length", &self.0.tx_data_length)
.field("initial_block_number", &self.0.initial_block_number)
.field("prev_blockhash", &self.0.prev_blockhash)
.field("post_blockhash", &self.0.post_blockhash)
.field("block_ctxs", &"<omitted>")
.finish()
}
Expand Down Expand Up @@ -273,6 +275,7 @@ impl ChunkInfo {
/// Public input hash for a given chunk for L3 validium @ v1:
///
/// keccak(
/// version ||
/// chain id ||
/// prev state root ||
/// post state root ||
Expand All @@ -286,9 +289,10 @@ impl ChunkInfo {
/// post blockhash ||
/// encryption key
/// )
pub fn pi_hash_validium_v1(&self) -> B256 {
pub fn pi_hash_validium(&self, version: Version) -> B256 {
keccak256(
std::iter::empty()
.chain(&[version.as_version_byte()])
.chain(&self.chain_id.to_be_bytes())
.chain(self.prev_state_root.as_slice())
.chain(self.post_state_root.as_slice())
Expand Down Expand Up @@ -337,7 +341,7 @@ impl MultiVersionPublicInputs for ChunkInfo {
(Domain::Scroll, STFVersion::V6) => self.pi_hash_by_fork(ForkName::EuclidV1),
(Domain::Scroll, STFVersion::V7) => self.pi_hash_by_fork(ForkName::EuclidV2),
(Domain::Scroll, STFVersion::V8) => self.pi_hash_by_fork(ForkName::Feynman),
(Domain::Validium, STFVersion::V1) => self.pi_hash_validium_v1(),
(Domain::Validium, STFVersion::V1) => self.pi_hash_validium(version),
(domain, stf_version) => {
unreachable!("unsupported version=({domain:?}, {stf_version:?})")
}
Expand Down
49 changes: 32 additions & 17 deletions crates/types/batch/src/builder/validium.rs
Original file line number Diff line number Diff line change
@@ -1,26 +1,22 @@
use types_base::public_inputs::{batch::BatchInfo, chunk::ChunkInfo};

use crate::{
header::{BatchHeader, ValidiumBatchHeader, validium::BatchHeaderValidium},
payload::validium::{ValidiumEnvelopeV1, ValidiumPayloadV1},
use types_base::{
public_inputs::{batch::BatchInfo, chunk::ChunkInfo},
version::Version,
};

use crate::header::{BatchHeader, ValidiumBatchHeader, validium::BatchHeaderValidium};

pub struct ValidiumBuilderArgs {
pub version: u8,
pub header: BatchHeaderValidium,
pub chunk_infos: Vec<ChunkInfo>,
pub batch_bytes: Vec<u8>,
}

impl ValidiumBuilderArgs {
pub fn new(
header: BatchHeaderValidium,
chunk_infos: Vec<ChunkInfo>,
batch_bytes: Vec<u8>,
) -> Self {
pub fn new(version: u8, header: BatchHeaderValidium, chunk_infos: Vec<ChunkInfo>) -> Self {
Self {
version,
header,
chunk_infos,
batch_bytes,
}
}
}
Expand All @@ -29,15 +25,34 @@ pub struct ValidiumBatchInfoBuilder;

impl ValidiumBatchInfoBuilder {
pub fn build(args: ValidiumBuilderArgs) -> BatchInfo {
let envelope = ValidiumEnvelopeV1::from_bytes(args.batch_bytes.as_slice());
let payload = ValidiumPayloadV1::from_envelope(&envelope);
// Check that the batch's STF-version is correct.
let version = Version::from(args.version);
assert_eq!(version.stf_version as u8, args.header.version());

match &args.header {
BatchHeaderValidium::V1(_) => {
// nothing to do for v1 header since blob data is not included in validium
}
}

// Validate payload (batch data).
let (first_chunk, last_chunk) = payload.validate(&args.header, args.chunk_infos.as_slice());
let (first_chunk, last_chunk) = (
args.chunk_infos
.first()
.expect("at least one chunk in batch"),
args.chunk_infos
.last()
.expect("at least one chunk in batch"),
);

// Additionally check that the batch's commitment field is set correctly.
// Check that the batch's commitment field is set correctly.
assert_eq!(last_chunk.post_blockhash.to_vec(), args.header.commitment());

// Check that the batch's state root is correct.
assert_eq!(last_chunk.post_state_root, args.header.post_state_root());

// Check that the batch's withdraw root is correct.
assert_eq!(last_chunk.withdraw_root, args.header.withdraw_root());

BatchInfo {
parent_state_root: first_chunk.prev_state_root,
parent_batch_hash: args.header.parent_batch_hash(),
Expand Down
6 changes: 6 additions & 0 deletions crates/types/batch/src/header/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,12 @@ pub trait BatchHeader {
pub trait ValidiumBatchHeader: BatchHeader {
/// The commitment attached to the batch header.
fn commitment(&self) -> Vec<u8>;

/// The state root after applying batch.
fn post_state_root(&self) -> B256;

/// The withdraw root from the last block in the batch.
fn withdraw_root(&self) -> B256;
}

/// Reference header indicate the version of batch header base on which batch hash
Expand Down
16 changes: 16 additions & 0 deletions crates/types/batch/src/header/validium.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,12 +63,28 @@ impl ValidiumBatchHeader for BatchHeaderValidium {
Self::V1(header) => header.commitment(),
}
}
fn post_state_root(&self) -> B256 {
match self {
Self::V1(header) => header.post_state_root(),
}
}
fn withdraw_root(&self) -> B256 {
match self {
Self::V1(header) => header.withdraw_root(),
}
}
}

impl ValidiumBatchHeader for BatchHeaderValidiumV1 {
fn commitment(&self) -> Vec<u8> {
self.commitment.to_vec()
}
fn post_state_root(&self) -> B256 {
self.post_state_root
}
fn withdraw_root(&self) -> B256 {
self.withdraw_root
}
}

impl BatchHeader for BatchHeaderValidium {
Expand Down
2 changes: 0 additions & 2 deletions crates/types/batch/src/payload/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,6 @@ pub mod v7;

pub mod v8;

pub mod validium;

/// The number data bytes we pack each BLS12-381 scalar into. The most-significant byte is 0.
pub const N_DATA_BYTES_PER_COEFFICIENT: usize = 31;

Expand Down
Loading