Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

gasOracle & Sp1 prover #559

Merged
merged 14 commits into from
Sep 30, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@ contracts/yarn.lock
*/build

build/bin

cache
out
ops/l2-genesis/.devnet
ops/l2-genesis/.qanet
ops/l2-genesis/.testnet
Expand Down
87 changes: 20 additions & 67 deletions gas-oracle/app/src/abi/Rollup.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,12 @@
},
{
"inputs": [],
"name": "ErrorIncorrectChunkLength",
"name": "ErrorIncorrectBatchLength",
"type": "error"
},
{
"inputs": [],
"name": "ErrorNoBlockInChunk",
"name": "ErrorNoBlockInBatch",
"type": "error"
},
{
Expand Down Expand Up @@ -240,25 +240,6 @@
"name": "UpdateFinalizationPeriodSeconds",
"type": "event"
},
{
"anonymous": false,
"inputs": [
{
"indexed": false,
"internalType": "uint256",
"name": "oldMaxNumTxInChunk",
"type": "uint256"
},
{
"indexed": false,
"internalType": "uint256",
"name": "newMaxNumTxInChunk",
"type": "uint256"
}
],
"name": "UpdateMaxNumTxInChunk",
"type": "event"
},
{
"anonymous": false,
"inputs": [
Expand Down Expand Up @@ -310,6 +291,19 @@
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "__maxNumTxInChunk",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
Expand Down Expand Up @@ -550,9 +544,9 @@
"type": "bytes"
},
{
"internalType": "bytes[]",
"name": "chunks",
"type": "bytes[]"
"internalType": "bytes",
"name": "blockContexts",
"type": "bytes"
},
{
"internalType": "bytes",
Expand Down Expand Up @@ -673,11 +667,6 @@
},
{
"inputs": [
{
"internalType": "uint256",
"name": "_batchIndex",
"type": "uint256"
},
{
"internalType": "bytes",
"name": "_batchHeader",
Expand Down Expand Up @@ -719,11 +708,6 @@
"name": "_verifier",
"type": "address"
},
{
"internalType": "uint256",
"name": "_maxNumTxInChunk",
"type": "uint256"
},
{
"internalType": "uint256",
"name": "_finalizationPeriodSeconds",
Expand Down Expand Up @@ -817,19 +801,6 @@
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "maxNumTxInChunk",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "messageQueue",
Expand Down Expand Up @@ -891,12 +862,7 @@
},
{
"internalType": "bytes",
"name": "_aggrProof",
"type": "bytes"
},
{
"internalType": "bytes",
"name": "_kzgDataProof",
"name": "_batchProof",
"type": "bytes"
}
],
Expand Down Expand Up @@ -995,19 +961,6 @@
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint256",
"name": "_maxNumTxInChunk",
"type": "uint256"
}
],
"name": "updateMaxNumTxInChunk",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
Expand Down Expand Up @@ -1070,4 +1023,4 @@
"stateMutability": "payable",
"type": "receive"
}
]
]
58 changes: 2 additions & 56 deletions gas-oracle/app/src/da_scalar/blob.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,6 @@ use ethers::{
use super::zstd_util::init_zstd_decoder;

const MAX_BLOB_TX_PAYLOAD_SIZE: usize = 131072; // 131072 = 4096 * 32 = 1024 * 4 * 32 = 128kb
const MAX_AGG_SNARKS: usize = 45;
const METADATA_LENGTH: usize = 2 + 4 * MAX_AGG_SNARKS;

#[derive(Debug, Clone)]
pub struct Blob(pub [u8; MAX_BLOB_TX_PAYLOAD_SIZE]);
Expand All @@ -20,7 +18,7 @@ impl Blob {
let compressed_data = self.get_compressed_batch()?;
decompress_batch(&compressed_data)
}

pub fn get_compressed_batch(&self) -> Result<Vec<u8>, BlobError> {
// Decode blob, recovering BLS12-381 scalars.
let mut data = vec![0u8; MAX_BLOB_TX_PAYLOAD_SIZE];
Expand All @@ -36,9 +34,7 @@ impl Blob {
}

// detect_zstd_compressed
let compressed_batch = Self::detect_zstd_compressed(data)?;

Ok(compressed_batch)
Ok(Self::detect_zstd_compressed(data)?)
}

fn detect_zstd_compressed(decoded_blob: Vec<u8>) -> Result<Vec<u8>, BlobError> {
Expand Down Expand Up @@ -112,40 +108,8 @@ impl Blob {
compressed_data.len() as f32 / MAX_BLOB_TX_PAYLOAD_SIZE as f32,
orgin_content_size as f32 / compressed_data.len() as f32
);

Self::decode_raw_tx_payload(origin_batch)?;
Ok(())
}

// The format of batch is as follows:
// origin_batch = be_bytes(num_valid_chunks as u16) || be_bytes(chunks[0].chunk_size as u32) ||
// ...be_bytes(chunks[MAX_AGG_SNARKS-1].chunk_size as u32)||all_l2_tx_signed_rlp_in_batch
pub fn decode_raw_tx_payload(origin_batch: Vec<u8>) -> Result<Vec<u8>, BlobError> {
if origin_batch.len() < METADATA_LENGTH {
log::warn!("batch.len < METADATA_LENGTH ");
return Ok(Vec::new());
}
let num_valid_chunks = u16::from_be_bytes(origin_batch[0..2].try_into().unwrap()); // size of num_valid_chunks is 2bytes.
if num_valid_chunks as usize > MAX_AGG_SNARKS {
return Err(BlobError::InvalidData(anyhow!(format!(
"Invalid blob data: num_valid_chunks bigger than MAX_AGG_SNARKS. parsed num_valid_chunks: {}",
num_valid_chunks
))));
}

let data_size: u64 = origin_batch[2..2 + 4 * num_valid_chunks as usize]
.chunks_exact(4)
.map(|chunk| u32::from_be_bytes(chunk.try_into().unwrap()) as u64)
.sum();

let tx_payload_end = METADATA_LENGTH + data_size as usize;
if origin_batch.len() < tx_payload_end {
return Err(BlobError::InvalidData(anyhow!(
"The batch does not contain the complete tx_payload"
)));
}
Ok(origin_batch[METADATA_LENGTH..tx_payload_end].to_vec())
}
}

pub fn decompress_batch(compressed_batch: &Vec<u8>) -> Result<Vec<u8>, BlobError> {
Expand Down Expand Up @@ -212,8 +176,6 @@ pub enum BlobError {
Error(eyre::Error),
#[error("{0}")]
InvalidBlob(eyre::Error),
#[error("{0}")]
InvalidData(eyre::Error),
}

pub fn kzg_to_versioned_hash(commitment: &[u8]) -> H256 {
Expand All @@ -235,10 +197,6 @@ mod tests {

#[test]
fn test_decode_blob_with_zstd_batch() {
use crate::da_scalar::{
calculate::decode_transactions_from_blob, typed_tx::TypedTransaction,
};

let blob_bytes = load_zstd_blob();
let blob = Blob(blob_bytes);

Expand All @@ -250,18 +208,6 @@ mod tests {

let origin_batch = super::decompress_batch(&compressed_batch).unwrap();
assert_eq!(origin_batch.len(), 125091);

let chunks_len = u16::from_be_bytes(origin_batch[0..2].try_into().expect("chunks_len"));
// size of num_valid_chunks is 2bytes.
assert_eq!(chunks_len, 11);

let tx_payload =
super::Blob::decode_raw_tx_payload(origin_batch).expect("decode_raw_tx_payload");
assert!(tx_payload.len() == 124909, "tx_payload.len()");

let txs_decoded: Vec<TypedTransaction> =
decode_transactions_from_blob(tx_payload.as_slice());
assert!(txs_decoded.len() == 200, "txs_decoded.len()");
}

#[test]
Expand Down
45 changes: 23 additions & 22 deletions gas-oracle/app/src/da_scalar/calculate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -81,37 +81,38 @@ pub(super) fn extract_tx_payload(
Ok(tx_payload)
}

pub(super) fn extract_txn_num(chunks: Vec<Bytes>) -> Option<u64> {
if chunks.is_empty() {
pub fn extract_txn_num(block_contexts: Bytes) -> Option<u64> {
if block_contexts.is_empty() || block_contexts.len() < 2 {
return None;
}

let mut txn_in_batch = 0;
let mut l1_txn_in_batch = 0;
for chunk in chunks.iter() {
let mut chunk_bn: Vec<u64> = vec![];
let bs: &[u8] = chunk;
// decode blockcontext from chunk
// | 1 byte | 60 bytes | ... | 60 bytes |
// | num blocks | block 1 | ... | block n |
let num_blocks = U256::from_big_endian(bs.get(..1)?);
for i in 0..num_blocks.as_usize() {
let block_num = U256::from_big_endian(bs.get((60.mul(i) + 1)..(60.mul(i) + 1 + 8))?);
let txs_num =
U256::from_big_endian(bs.get((60.mul(i) + 1 + 56)..(60.mul(i) + 1 + 58))?);
let l1_txs_num =
U256::from_big_endian(bs.get((60.mul(i) + 1 + 58)..(60.mul(i) + 1 + 60))?);
txn_in_batch += txs_num.as_u32();
l1_txn_in_batch += l1_txs_num.as_u32();
chunk_bn.push(block_num.as_u64());
}
let mut txn_in_batch = 0u64;
let mut l1_txn_in_batch = 0u64;
let bs: &[u8] = &block_contexts;

// decode blocks from batch
// | 2 byte | 60 bytes | ... | 60 bytes |
// | num blocks | block 1 | ... | block n |
// https://github.com/morph-l2/morph/blob/main/contracts/contracts/libraries/codec/BatchCodecV0.sol
let num_blocks: u16 = ((bs[0] as u16) << 8) | (bs[1] as u16);

for i in 0..num_blocks as usize {
let txs_num = u16::from_be_bytes(
bs.get((60.mul(i) + 2 + 56)..(60.mul(i) + 2 + 58))?.try_into().ok()?,
);
let l1_txs_num = u16::from_be_bytes(
bs.get((60.mul(i) + 2 + 58)..(60.mul(i) + 2 + 60))?.try_into().ok()?,
);
txn_in_batch += txs_num as u64;
l1_txn_in_batch += l1_txs_num as u64;
}

log::debug!("total_txn_in_batch: {:#?}, l1_txn_in_batch: {:#?}", txn_in_batch, l1_txn_in_batch);
if txn_in_batch < l1_txn_in_batch {
log::error!("total_txn_in_batch < l1_txn_in_batch");
return None;
}
Some((txn_in_batch - l1_txn_in_batch) as u64)
Some(txn_in_batch - l1_txn_in_batch)
}

#[derive(Debug, Serialize, Deserialize)]
Expand Down
4 changes: 2 additions & 2 deletions gas-oracle/app/src/da_scalar/l1_scalar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -259,8 +259,8 @@ impl ScalarUpdater {
)))
})?;

let chunks: Vec<Bytes> = param.batch_data_input.chunks;
let l2_txn = extract_txn_num(chunks).unwrap_or(0);
let block_contexts: Bytes = param.batch_data_input.block_contexts;
let l2_txn = extract_txn_num(block_contexts).unwrap_or(0);

//Step3. Calculate l2 data gas
let l2_data_len = self
Expand Down
Loading
Loading