Skip to content

Commit e8faa96

Browse files
authored
fix: remove tiny-keccak (#168)
1 parent 4f32af3 commit e8faa96

File tree

3 files changed

+50
-43
lines changed

3 files changed

+50
-43
lines changed

Cargo.lock

Lines changed: 0 additions & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

crates/types/chunk/Cargo.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ sbv-primitives = { workspace = true, features = ["hardforks", "scroll-hardforks"
1919
sbv-kv = { workspace = true }
2020
serde.workspace = true
2121
itertools.workspace = true
22-
tiny-keccak.workspace = true
2322

2423
openvm = { workspace = true, features = ["std"] }
2524
openvm-rv32im-guest = { workspace = true }

crates/types/chunk/src/types.rs

Lines changed: 50 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
use std::ops::Deref;
22

3+
use alloy_primitives::keccak256;
4+
use sbv_helpers::manually_drop_on_zkvm;
35
use sbv_primitives::{
46
B256, U256,
57
types::{
@@ -8,8 +10,11 @@ use sbv_primitives::{
810
},
911
};
1012

11-
// FIXME as alloy-primitive
12-
use tiny_keccak::{Hasher, Keccak};
13+
const LEGACY_DA_HEADER_LEN: usize = size_of::<u64>() // block number
14+
+ size_of::<u64>() // timestamp
15+
+ U256::BYTES // base fee per gas
16+
+ size_of::<u64>() // gas limit
17+
+ size_of::<u16>(); // l1 tx count
1318

1419
pub trait ChunkExt {
1520
/// Hash the transaction bytes.
@@ -29,23 +34,35 @@ impl<T: Deref<Target = [RecoveredBlock<Block>]>> ChunkExt for T {
2934
blocks
3035
.iter()
3136
.flat_map(|b| b.body().transactions.iter())
32-
.tx_bytes_hash_in(rlp_buffer.as_mut())
37+
.tx_bytes_hash_in(rlp_buffer)
3338
}
3439

3540
#[inline]
3641
fn legacy_data_hash(&self) -> B256 {
3742
let blocks = self.as_ref();
3843

39-
let mut data_hasher = Keccak::v256();
44+
let num_l1_txs: usize = blocks
45+
.iter()
46+
.map(|b| {
47+
b.body()
48+
.transactions
49+
.iter()
50+
.filter(|tx| tx.is_l1_message())
51+
.count()
52+
})
53+
.sum();
54+
55+
let mut buffer = manually_drop_on_zkvm!(Vec::with_capacity(
56+
blocks.len() * LEGACY_DA_HEADER_LEN + num_l1_txs * size_of::<B256>(),
57+
));
58+
4059
for block in blocks.iter() {
41-
block.legacy_hash_da_header(&mut data_hasher);
60+
block.encode_legacy_da_header(&mut buffer);
4261
}
4362
for block in blocks.iter() {
44-
block.legacy_hash_l1_msg(&mut data_hasher);
63+
block.encode_legacy_l1_msg(&mut buffer);
4564
}
46-
let mut data_hash = B256::ZERO;
47-
data_hasher.finalize(&mut data_hash.0);
48-
data_hash
65+
keccak256(&*buffer)
4966
}
5067

5168
#[inline]
@@ -66,82 +83,74 @@ trait TxBytesHashExt {
6683
fn tx_bytes_hash_in(self, rlp_buffer: &mut Vec<u8>) -> (usize, B256);
6784
}
6885

69-
impl<'a, I: IntoIterator<Item = &'a TransactionSigned>> TxBytesHashExt for I
70-
where
71-
I: IntoIterator<Item = &'a TransactionSigned>,
86+
impl<'a, I: Iterator<Item = &'a TransactionSigned>> TxBytesHashExt for I
7287
{
7388
#[inline]
7489
fn tx_bytes_hash_in(self, rlp_buffer: &mut Vec<u8>) -> (usize, B256) {
75-
use tiny_keccak::{Hasher, Keccak};
76-
77-
let mut tx_bytes_hasher = Keccak::v256();
78-
let mut len = 0;
79-
90+
rlp_buffer.clear();
8091
// Ignore L1 msg txs.
81-
for tx in self.into_iter().filter(|&tx| !tx.is_l1_message()) {
92+
for tx in self.filter(|&tx| !tx.is_l1_message()) {
8293
tx.encode_2718(rlp_buffer);
83-
len += rlp_buffer.len();
84-
tx_bytes_hasher.update(rlp_buffer);
85-
rlp_buffer.clear();
8694
}
87-
88-
let mut tx_bytes_hash = B256::ZERO;
89-
tx_bytes_hasher.finalize(&mut tx_bytes_hash.0);
90-
(len, tx_bytes_hash)
95+
let hash = keccak256(&rlp_buffer);
96+
rlp_buffer.clear();
97+
(rlp_buffer.len(), hash)
9198
}
9299
}
93100

94101
/// Chunk related extension methods for Block
95102
trait BlockChunkExt {
96103
/// Hash the header of the block
97-
fn legacy_hash_da_header(&self, hasher: &mut impl tiny_keccak::Hasher);
104+
fn encode_legacy_da_header(&self, buffer: &mut Vec<u8>);
98105
/// Hash the l1 messages of the block
99-
fn legacy_hash_l1_msg(&self, hasher: &mut impl Hasher);
106+
fn encode_legacy_l1_msg(&self, buffer: &mut Vec<u8>);
100107
/// Hash the l1 messages of the block
101108
fn hash_msg_queue(&self, initial_queue_hash: &B256) -> B256;
102109
}
103110

104111
impl BlockChunkExt for RecoveredBlock<Block> {
105112
#[inline]
106-
fn legacy_hash_da_header(&self, hasher: &mut impl Hasher) {
107-
hasher.update(&self.number.to_be_bytes());
108-
hasher.update(&self.timestamp.to_be_bytes());
109-
hasher.update(
113+
fn encode_legacy_da_header(&self, buffer: &mut Vec<u8>) {
114+
buffer.extend_from_slice(&self.number.to_be_bytes());
115+
buffer.extend_from_slice(&self.timestamp.to_be_bytes());
116+
buffer.extend_from_slice(
110117
&U256::from_limbs([self.base_fee_per_gas.unwrap_or_default(), 0, 0, 0])
111118
.to_be_bytes::<{ U256::BYTES }>(),
112119
);
113-
hasher.update(&self.gas_limit.to_be_bytes());
120+
buffer.extend_from_slice(&self.gas_limit.to_be_bytes());
114121
// FIXME: l1 tx could be skipped, the actual tx count needs to be calculated
115-
hasher.update(&(self.body().transactions.len() as u16).to_be_bytes());
122+
buffer.extend_from_slice(&(self.body().transactions.len() as u16).to_be_bytes());
116123
}
117124

118125
#[inline]
119-
fn legacy_hash_l1_msg(&self, hasher: &mut impl Hasher) {
126+
fn encode_legacy_l1_msg(&self, buffer: &mut Vec<u8>) {
120127
for tx in self
121128
.body()
122129
.transactions
123130
.iter()
124-
.filter(|tx| tx.is_l1_message())
131+
.filter_map(|tx| tx.as_l1_message())
125132
{
126-
hasher.update(tx.tx_hash().as_slice())
133+
buffer.extend_from_slice(tx.hash_ref().as_ref());
127134
}
128135
}
129136

130137
#[inline]
131138
fn hash_msg_queue(&self, initial_queue_hash: &B256) -> B256 {
132139
let mut rolling_hash = *initial_queue_hash;
140+
141+
let mut buffer = [0u8; { size_of::<B256>() * 2 }];
142+
buffer[..32].copy_from_slice(rolling_hash.as_ref());
143+
133144
for tx in self
134145
.body()
135146
.transactions
136147
.iter()
137148
.filter(|tx| tx.is_l1_message())
138149
{
139-
let mut hasher = Keccak::v256();
140-
hasher.update(rolling_hash.as_slice());
141-
hasher.update(tx.tx_hash().as_slice());
142-
143-
hasher.finalize(rolling_hash.as_mut_slice());
150+
buffer[..size_of::<B256>()].copy_from_slice(rolling_hash.as_ref());
151+
buffer[size_of::<B256>()..].copy_from_slice(tx.tx_hash().as_ref());
144152

153+
rolling_hash = keccak256(&buffer);
145154
// clear last 32 bits, i.e. 4 bytes.
146155
// https://github.com/scroll-tech/da-codec/blob/26dc8d575244560611548fada6a3a2745c60fe83/encoding/da.go#L817-L825
147156
// see also https://github.com/scroll-tech/da-codec/pull/42

0 commit comments

Comments
 (0)