Skip to content

Commit

Permalink
clippy
Browse files Browse the repository at this point in the history
  • Loading branch information
kariy committed Sep 24, 2024
1 parent a095d9d commit 526ebc5
Show file tree
Hide file tree
Showing 6 changed files with 128 additions and 34 deletions.
139 changes: 112 additions & 27 deletions crates/katana/primitives/src/da/encoding.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,36 @@
//! Data availability encoding and decoding.
//!
//! The encoding format is based on the format of which Starknet's publishes its state diffs onto
//! the Ethereum blockchain, refer to the Starknet [docs](https://docs.starknet.io/architecture-and-concepts/network-architecture/data-availability/) for more information.
//!
//! Example of a Starknet's encoded state diff that might be published on onchain:
//!
//! ```
//! ┌───────┬─────────────────────────────────────────────────────────────────────┐
//! │ Index │ Field Element │
//! ├───────┼─────────────────────────────────────────────────────────────────────┤
//! │ [0] │ 1 │
//! │ [1] │ 2019172390095051323869047481075102003731246132997057518965927979... │
//! │ [2] │ 18446744073709551617 │
//! │ [3] │ 100 │
//! │ [4] │ 200 │
//! │ [5] │ 1 │
//! │ [6] │ 1351148242645005540004162531550805076995747746087542030095186557... │
//! │ [7] │ 5584042735604047785084552540304580210136563524662166906885950118... │
//! └───────┴─────────────────────────────────────────────────────────────────────┘
//!
//! Explanation:-
//!
//! [0] The number of contracts whose state was updated.
//! [1] The address of the first, and only, contract whose state changed.
//! [2] Meta information regarding the update, see [Metadata] for more details.
//! [3] Key of the storage update
//! [4] Value of the storage update (value of key 100 is set to 200)
//! [5] New declare section: 1 declare v2 transaction in this state update
//! [6] Encoding of the class hash
//! [7] Encoding of the compiled class hash of the declared class
//! ```

use std::collections::BTreeMap;

use num_bigint::BigUint;
Expand All @@ -8,8 +41,35 @@ use crate::contract::{ContractAddress, StorageKey, StorageValue};
use crate::state::StateUpdates;
use crate::Felt;

use super::eip4844::BLOB_LEN;
#[derive(Debug, thiserror::Error)]

Check warning on line 44 in crates/katana/primitives/src/da/encoding.rs

View check run for this annotation

Codecov / codecov/patch

crates/katana/primitives/src/da/encoding.rs#L44

Added line #L44 was not covered by tests
pub enum EncodingError {
#[error("Missing contract updates entry count")]
MissingUpdatesCount,
#[error("Missing class declarations entry count")]
MissingDeclarationsCount,
#[error("Missing contract address")]
MissingAddress,
#[error("Missing contract update metadata")]
MissingMetadata,
#[error("Missing updated storage key")]
MissingStorageKey,
#[error("Missing updated storage value")]
MissingStorageValue,
#[error("Missing new updated class hash")]
MissingNewClassHash,
#[error("Missing class hash")]
MissingClassHash,
#[error("Missing compiled class hash")]
MissingCompiledClassHash,
#[error("invalid value")]
InvalidValue,
}

/// This function doesn't enforce that the resulting [Vec] is of a certain length.
///
/// In a scenario where the state diffs of a block corresponds to a single data availability's
/// blob object (eg an EIP4844 blob), it should be the sequencer's responsibility to ensure that
/// the state diffs should fit inside the single blob object.
pub fn encode_state_updates(value: StateUpdates) -> Vec<BigUint> {
let mut contract_updates = BTreeMap::<ContractAddress, ContractUpdate>::new();

Expand Down Expand Up @@ -53,30 +113,49 @@ pub fn encode_state_updates(value: StateUpdates) -> Vec<BigUint> {
buffer
}

pub fn decode_state_updates<'a>(value: impl IntoIterator<Item = &'a BigUint>) -> StateUpdates {
/// Similar to the [encode_state_updates] function, this function doesn't enforce that the input
/// [BigUint] values are of a certain length either.
///
/// # Errors
///
/// Will return an error if the list is already exhausted while decoding the intermediary fields
/// that are expected to exist.
pub fn decode_state_updates(value: &[BigUint]) -> Result<StateUpdates, EncodingError> {
let mut state_updates = StateUpdates::default();
let mut iter = value.into_iter();

let total_contract_updates = iter.next().and_then(|v| v.to_usize()).expect("valid usize");
if value.is_empty() {
return Ok(state_updates);

Check warning on line 127 in crates/katana/primitives/src/da/encoding.rs

View check run for this annotation

Codecov / codecov/patch

crates/katana/primitives/src/da/encoding.rs#L127

Added line #L127 was not covered by tests
}

let mut iter = value.iter();

let total_updates = iter.next().ok_or(EncodingError::MissingUpdatesCount)?;
let total_updates = total_updates.to_usize().ok_or(EncodingError::InvalidValue)?;

for _ in 0..total_updates {
let address = iter.next().ok_or(EncodingError::MissingAddress)?;
let address: ContractAddress = Felt::from(address).into();

for _ in 0..total_contract_updates {
let address: ContractAddress = iter.next().map(Felt::from).expect("valid address").into();
let metadata = iter.next().map(Metadata::decode).expect("valid metadata");
let metadata = iter.next().ok_or(EncodingError::MissingMetadata)?;
let metadata = Metadata::decode(metadata);

let class_hash = if metadata.class_information_flag {
iter.next().map(Felt::from).map(Some).expect("valid class hash")
let hash = iter.next().ok_or(EncodingError::MissingNewClassHash)?;
Some(Felt::from(hash))
} else {
None
};

let mut storages = BTreeMap::new();

for _ in 0..metadata.total_storage_updates {
let key = iter.next().map(StorageKey::from).expect("valid storage key");
if let Some(value) = iter.next().map(StorageValue::from) {
storages.insert(key, value);
} else {
return state_updates;
}
let key = iter.next().ok_or(EncodingError::MissingStorageKey)?;
let key = StorageKey::from(key);

let value = iter.next().ok_or(EncodingError::MissingStorageValue)?;
let value = StorageValue::from(value);

storages.insert(key, value);
}

if !storages.is_empty() {
Expand All @@ -92,24 +171,30 @@ pub fn decode_state_updates<'a>(value: impl IntoIterator<Item = &'a BigUint>) ->
}
}

let total_declared_classes = iter.next().and_then(|v| v.to_usize()).expect("valid usize");
let total_declarations = iter.next().ok_or(EncodingError::MissingDeclarationsCount)?;
let total_declarations = total_declarations.to_usize().ok_or(EncodingError::InvalidValue)?;

for _ in 0..total_declarations {
let class_hash = iter.next().ok_or(EncodingError::MissingClassHash)?;
let class_hash = ClassHash::from(class_hash);

let compiled_class_hash = iter.next().ok_or(EncodingError::MissingCompiledClassHash)?;
let compiled_class_hash = CompiledClassHash::from(compiled_class_hash);

for _ in 0..total_declared_classes {
let class_hash = iter.next().map(ClassHash::from).expect("valid class hash");
let compiled_class_hash =
iter.next().map(CompiledClassHash::from).expect("valid compiled class hash");
state_updates.declared_classes.insert(class_hash, compiled_class_hash);
}

state_updates
Ok(state_updates)
}

/// Metadata information about the contract update.
///
/// Encoding format:
///
/// |---padding---|---class flag---|---new nonce---|---no. storage updates---|
/// 127 bits 1 bit 64 bits 64 bits
// Encoding format:
//
// ┌───────────────┬───────────────┬───────────────┬───────────────────────────┐
// │ padding │ class flag │ new nonce │ no. storage updates │
// ├───────────────┼───────────────┼───────────────┼───────────────────────────┤
// │ 127 bits │ 1 bit │ 64 bits │ 64 bits │
// └───────────────┴───────────────┴───────────────┴───────────────────────────┘
#[derive(Debug, Default)]
struct Metadata {
/// Class information flag, whose value in the encoded format is one of the following:
Expand All @@ -135,7 +220,7 @@ impl Metadata {

let flag = bits.get(127..(127 + 1)).unwrap();
let flag = u8::from_str_radix(flag, 2).unwrap();
let class_information_flag = if flag == 1 { true } else { false };
let class_information_flag = flag == 1;

let nonce = bits.get(128..(128 + 64)).unwrap();
let nonce = u64::from_str_radix(nonce, 2).unwrap();
Expand Down Expand Up @@ -254,7 +339,7 @@ mod tests {
biguint!("558404273560404778508455254030458021013656352466216690688595011803280448032"),
];

let state_updates = super::decode_state_updates(&input);
let state_updates = super::decode_state_updates(&input).unwrap();

assert_eq!(state_updates.nonce_updates.len(), 1);
assert_eq!(state_updates.storage_updates.len(), 1);
Expand Down
19 changes: 13 additions & 6 deletions crates/katana/primitives/tests/blobs.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,24 @@
use katana_primitives::da::{blob, encoding, serde::parse_str_to_blob_data};
use anyhow::Result;
use katana_primitives::da::encoding::encode_state_updates;
use katana_primitives::da::serde::parse_str_to_blob_data;
use katana_primitives::da::{blob, encoding};
use num_bigint::BigUint;
use rstest::rstest;

fn read(path: &str) -> Vec<BigUint> {
let content = std::fs::read_to_string(path).unwrap();
let content = content.trim();
parse_str_to_blob_data(content.strip_prefix("0x").unwrap_or(&content))
parse_str_to_blob_data(content.strip_prefix("0x").unwrap_or(content))
}

/// Pre-SNAR Tree blobs
#[rstest]
#[case("./tests/test-data/blobs/blob1.txt")]
fn parse_blobs(#[case] blob: &str) {
#[case("./tests/test-data/blobs/block_636262.txt")]
#[case("./tests/test-data/blobs/block_636263.txt")]
#[case("./tests/test-data/blobs/block_636264.txt")]
fn parse_blobs_rt(#[case] blob: &str) -> Result<()> {
let encoded = blob::recover(read(blob));
let state_update = encoding::decode_state_updates(&encoded);
println!("{}", serde_json::to_string_pretty(&state_update).unwrap());
let state_update = encoding::decode_state_updates(&encoded)?;
let _ = encode_state_updates(state_update);
Ok(())
}
1 change: 0 additions & 1 deletion crates/katana/primitives/tests/test-data/blobs/blob1.txt

This file was deleted.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

0 comments on commit 526ebc5

Please sign in to comment.