Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Note possible checked_sub cases #57

Merged
merged 25 commits into from
Jun 10, 2023
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
fb0616b
Add TODOs for checked_sub locations
doubledup May 10, 2023
404f368
Remove extra split_at_mut
doubledup May 17, 2023
4c0b8f0
Replace slicing with split_at
doubledup May 17, 2023
c7c467e
Replace magic number with BYTES_PER_CHUNK
doubledup May 17, 2023
479b76b
Measure the length of an incomplete last chunk
doubledup May 17, 2023
6e90e37
Handle checked_sub cases
doubledup May 15, 2023
79a7c71
Reformat checked subtraction comments
doubledup May 29, 2023
39d20fa
Clarify comment
doubledup Jun 6, 2023
4d1999a
Add test case for empty last byte in Bitlist
doubledup Jun 7, 2023
536f46a
Add SAFETY prefix to comments
doubledup Jun 7, 2023
392cd26
Replace assert! with OffsetNotIncreasing error
doubledup Jun 7, 2023
b231461
Add notes about slice lengths after splitting
doubledup Jun 7, 2023
8b9f952
Merge branch 'main' into checked-sub
doubledup Jun 7, 2023
c48e8ff
Merge branch 'main' into checked-sub
doubledup Jun 8, 2023
9bf8905
Swap InvalidByte for InstanceError::Bounded
doubledup Jun 8, 2023
2ad9a4b
rustfmt
doubledup Jun 8, 2023
f541e76
Only push increasing offsets
doubledup Jun 9, 2023
8e8add5
Merge branch 'main' into checked-sub
doubledup Jun 9, 2023
e303981
Update ssz-rs-derive/src/lib.rs
ralexstokes Jun 9, 2023
3efa277
Update ssz-rs/src/merkleization/mod.rs
ralexstokes Jun 9, 2023
220b6c4
Update ssz-rs/src/merkleization/mod.rs
ralexstokes Jun 9, 2023
946b442
simplify naive merkle hashing code
ralexstokes Jun 9, 2023
6accced
formatting: code org
ralexstokes Jun 10, 2023
9400cc4
add some clarifying docs
ralexstokes Jun 10, 2023
8bde6b5
factor out hashing from node juggling
ralexstokes Jun 10, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions ssz-rs-derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -192,8 +192,9 @@ fn derive_deserialize_impl(data: &Data) -> TokenStream {
Some(field_name) => quote_spanned! { f.span() =>
let bytes_read = if <#field_type>::is_variable_size() {
let end = start + #BYTES_PER_LENGTH_OFFSET;
let next_offset = u32::deserialize(&encoding[start..end])?;
offsets.push((#i, next_offset as usize));
let next_offset = u32::deserialize(&encoding[start..end])? as usize;
offsets.last().map(|(_, previous_offset)| assert!(next_offset >= *previous_offset));
doubledup marked this conversation as resolved.
Show resolved Hide resolved
offsets.push((#i, next_offset));
doubledup marked this conversation as resolved.
Show resolved Hide resolved

#BYTES_PER_LENGTH_OFFSET
} else {
Expand Down Expand Up @@ -228,6 +229,7 @@ fn derive_deserialize_impl(data: &Data) -> TokenStream {
let (_, end) = span[1];

container.__ssz_rs_set_by_index(index, &encoding[start..end])?;
// checked_sub is unnecessary, as offsets are increasing
doubledup marked this conversation as resolved.
Show resolved Hide resolved
total_bytes_read += end - start;
}

Expand Down
10 changes: 10 additions & 0 deletions ssz-rs/src/bitlist.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ impl<const N: usize> fmt::Debug for Bitlist<N> {
let value = i32::from(*bit);
write!(f, "{value}")?;
bits_written += 1;
// checked_sub is unnecessary, as len >= 1 when this for loop runs
if bits_written % 4 == 0 && index != len - 1 {
write!(f, "_")?;
}
Expand Down Expand Up @@ -110,6 +111,7 @@ impl<const N: usize> Bitlist<N> {
*last |= 1u8 << marker_index;
}
}
// checked_sub is unnecessary, as buffer.len() > start_len
Ok(buffer.len() - start_len)
}
}
Expand Down Expand Up @@ -159,14 +161,22 @@ impl<const N: usize> Deserialize for Bitlist<N> {
}

let (last_byte, prefix) = encoding.split_last().unwrap();
if *last_byte == 0u8 {
ralexstokes marked this conversation as resolved.
Show resolved Hide resolved
return Err(DeserializeError::InvalidByte(*last_byte));
}

let mut result = BitlistInner::from_slice(prefix);
let last = BitlistInner::from_element(*last_byte);
// checked_sub is unnecessary, as last_byte != 0, so last.trailing_zeros <= 7
// high_bit_index >= 1
doubledup marked this conversation as resolved.
Show resolved Hide resolved
let high_bit_index = 8 - last.trailing_zeros();

// checked_sub is unnecessary, as high_bit_index >= 1
if !last[high_bit_index - 1] {
return Err(DeserializeError::InvalidByte(*last_byte))
}

// checked_sub is unnecessary, as high_bit_index >= 1
for bit in last.iter().take(high_bit_index - 1) {
result.push(*bit);
}
Expand Down
1 change: 1 addition & 0 deletions ssz-rs/src/bitvector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ impl<const N: usize> fmt::Debug for Bitvector<N> {
let value = i32::from(*bit);
write!(f, "{value}")?;
bits_written += 1;
// checked_sub is unnecessary, as len >= 1 for bitvectors
if bits_written % 4 == 0 && index != len - 1 {
write!(f, "_")?;
}
Expand Down
1 change: 1 addition & 0 deletions ssz-rs/src/de.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ where
if remainder != 0 {
return Err(DeserializeError::AdditionalInput {
provided: encoding.len(),
// checked_sub is unnecessary, as encoding.len() > remainder
expected: encoding.len() - remainder,
})
}
Expand Down
41 changes: 33 additions & 8 deletions ssz-rs/src/merkleization/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,10 @@ impl Display for MerkleizationError {
impl std::error::Error for MerkleizationError {}

pub fn pack_bytes(buffer: &mut Vec<u8>) {
ralexstokes marked this conversation as resolved.
Show resolved Hide resolved
let data_len = buffer.len();
if data_len % BYTES_PER_CHUNK != 0 {
let bytes_to_pad = BYTES_PER_CHUNK - data_len % BYTES_PER_CHUNK;
let incomplete_chunk_len = buffer.len() % BYTES_PER_CHUNK;
if incomplete_chunk_len != 0 {
// checked_sub is unnecessary, as BYTES_PER_CHUNK > incomplete_chunk_len
let bytes_to_pad = BYTES_PER_CHUNK - incomplete_chunk_len;
let pad = vec![0u8; bytes_to_pad];
buffer.extend_from_slice(&pad);
ralexstokes marked this conversation as resolved.
Show resolved Hide resolved
}
Expand Down Expand Up @@ -116,11 +117,13 @@ fn merkleize_chunks_with_virtual_padding(
let height = leaf_count.trailing_zeros() + 1;

if chunk_count == 0 {
// checked_sub is unnecessary, as height >= 1
let depth = height - 1;
return Ok(CONTEXT[depth as usize].try_into().expect("can produce a single root chunk"))
}

let mut layer = chunks.to_vec();
// checked_sub is unnecessary, as we return early when chunk_count == 0
let mut last_index = chunk_count - 1;
for k in (1..height).rev() {
for i in (0..2usize.pow(k)).step_by(2) {
Expand All @@ -129,10 +132,17 @@ fn merkleize_chunks_with_virtual_padding(
Ordering::Less => {
let focus =
&mut layer[parent_index * BYTES_PER_CHUNK..(i + 2) * BYTES_PER_CHUNK];
// checked_sub is unnecessary:
// i >= parent_index
// and
// focus.len() = (i + 2 - parent_index) * BYTES_PER_CHUNK
// so focus.len() >= 2 * BYTES_PER_CHUNK
let children_index = focus.len() - 2 * BYTES_PER_CHUNK;
let (parent, children) = focus.split_at_mut(children_index);
let (left, right) = children.split_at_mut(BYTES_PER_CHUNK);
if parent.is_empty() {
// TODO: Why not i == 0? Is there another case where the left node is used
// to store the parent?
ralexstokes marked this conversation as resolved.
Show resolved Hide resolved
// NOTE: have to specially handle the situation where the children nodes and
// parent node share memory
hasher.update(&left);
Expand All @@ -145,9 +155,15 @@ fn merkleize_chunks_with_virtual_padding(
Ordering::Equal => {
let focus =
&mut layer[parent_index * BYTES_PER_CHUNK..(i + 1) * BYTES_PER_CHUNK];
// checked_sub is unnecessary:
// i >= parent_index
// and
// focus.len() = (i + 1 - parent_index) * BYTES_PER_CHUNK
// so focus.len() >= BYTES_PER_CHUNK
let children_index = focus.len() - BYTES_PER_CHUNK;
let (parent, children) = focus.split_at_mut(children_index);
let (left, _) = children.split_at_mut(BYTES_PER_CHUNK);
let (parent, left) = focus.split_at_mut(children_index);
ralexstokes marked this conversation as resolved.
Show resolved Hide resolved
// checked_sub is unnecessary, as k <= height - 1
// so depth >= height - (height - 1) - 1 = 0
let depth = height - k - 1;
let right = &CONTEXT[depth as usize];
if parent.is_empty() {
Expand Down Expand Up @@ -265,27 +281,36 @@ mod tests {
debug_assert!(chunks.len() % BYTES_PER_CHUNK == 0);
debug_assert!(leaf_count.next_power_of_two() == leaf_count);

// checked_sub is unnecessary, as leaf_count != 0 (0.next_power_of_two() == 1)
let node_count = 2 * leaf_count - 1;
// checked_sub is unnecessary, as node_count >= leaf_count
let interior_count = node_count - leaf_count;
let leaf_start = interior_count * BYTES_PER_CHUNK;

let mut hasher = Sha256::new();
let mut buffer = vec![0u8; node_count * BYTES_PER_CHUNK];
buffer[leaf_start..leaf_start + chunks.len()].copy_from_slice(chunks);
let zero_chunk = [0u8; 32];
let zero_chunk = [0u8; BYTES_PER_CHUNK];
doubledup marked this conversation as resolved.
Show resolved Hide resolved
for i in chunks.len()..leaf_count {
let start = leaf_start + (i * BYTES_PER_CHUNK);
let end = leaf_start + (i + 1) * BYTES_PER_CHUNK;
buffer[start..end].copy_from_slice(&zero_chunk);
}
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think the bounds on this iterator are correct chunks.len()..leaf_count and it was fine only bc we are just writing zeros to memory that was already zero (initialized in buffer)

I deleted it anyway bc it didn't seem to be doing anything


for i in (1..node_count).rev().step_by(2) {
// checked_sub is unnecessary, as i >= 1
let parent_index = (i - 1) / 2;
let focus = &mut buffer[parent_index * BYTES_PER_CHUNK..(i + 1) * BYTES_PER_CHUNK];
// TODO: checked_sub
// focus.len() = (i + 1 - parent_index) * BYTES_PER_CHUNK
// = ((2*i + 2 - i + 1) / 2) * BYTES_PER_CHUNK
// = ((i + 3) / 2) * BYTES_PER_CHUNK
// and
// i >= 1
// so focus.len() >= 2 * BYTES_PER_CHUNK
let children_index = focus.len() - 2 * BYTES_PER_CHUNK;
let (parent, children) = focus.split_at_mut(children_index);
let left = &children[0..BYTES_PER_CHUNK];
let right = &children[BYTES_PER_CHUNK..2 * BYTES_PER_CHUNK];
let (left, right) = children.split_at(BYTES_PER_CHUNK);
ralexstokes marked this conversation as resolved.
Show resolved Hide resolved
hash_nodes(&mut hasher, left, right, &mut parent[..BYTES_PER_CHUNK]);
}
Ok(buffer[0..BYTES_PER_CHUNK].try_into().expect("can produce a single root chunk"))
Expand Down