Skip to content
This repository has been archived by the owner on Jan 22, 2025. It is now read-only.

Commit

Permalink
use trait to simplify and consolidate cumulative code
Browse files Browse the repository at this point in the history
  • Loading branch information
jeffwashington committed Jun 9, 2021
1 parent d7a0fd8 commit af2e970
Showing 1 changed file with 30 additions and 18 deletions.
48 changes: 30 additions & 18 deletions runtime/src/accounts_hash.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,22 @@ impl CumulativeOffset {
}
}

pub trait ExtractSliceFromRawData<'b, T: 'b> {
fn extract<'a>(&'b self, offset: &'a CumulativeOffset, start: usize) -> &'b [T];
}

impl<'b, T: 'b> ExtractSliceFromRawData<'b, T> for Vec<Vec<T>> {
fn extract<'a>(&'b self, offset: &'a CumulativeOffset, start: usize) -> &'b [T] {
&self[offset.index[0]][start..]
}
}

impl<'b, T: 'b> ExtractSliceFromRawData<'b, T> for Vec<Vec<Vec<T>>> {
fn extract<'a>(&'b self, offset: &'a CumulativeOffset, start: usize) -> &'b [T] {
&self[offset.index[0]][offset.index[1]][start..]
}
}

// Allow retrieving &[start..end] from a logical src: Vec<T>, where src is really Vec<Vec<T>> (or later Vec<Vec<Vec<T>>>)
// This model prevents callers from having to flatten which saves both working memory and time.
#[derive(Default, Debug)]
Expand Down Expand Up @@ -163,32 +179,28 @@ impl CumulativeOffsets {
}
}

fn find(&self, start: usize) -> (usize, &CumulativeOffset) {
fn find_index(&self, start: usize) -> usize {
assert!(!self.cumulative_offsets.is_empty());
let index = match self.cumulative_offsets[..]
.binary_search_by(|index| index.start_offset.cmp(&start))
{
match self.cumulative_offsets[..].binary_search_by(|index| index.start_offset.cmp(&start)) {
Ok(index) => index,
Err(index) => index - 1, // we would insert at index so we are before the item at index
};
}
}

fn find(&self, start: usize) -> (usize, &CumulativeOffset) {
let index = self.find_index(start);
let index = &self.cumulative_offsets[index];
let start = start - index.start_offset;
(start, index)
}

// return the biggest slice possible that starts at 'start'
pub fn get_slice<'a, T>(&self, raw: &'a [Vec<T>], start: usize) -> &'a [T] {
let (start, index) = self.find(start);
const DIMENSION: usize = 0;
&raw[index.index[DIMENSION]][start..]
}

// return the biggest slice possible that starts at 'start'
pub fn get_slice_2d<'a, T>(&self, raw: &'a [Vec<Vec<T>>], start: usize) -> &'a [T] {
pub fn get_slice<'a, 'b, T, U>(&'a self, raw: &'b U, start: usize) -> &'b [T]
where
U: ExtractSliceFromRawData<'b, T> + 'b,
{
let (start, index) = self.find(start);
const DIMENSION_0: usize = 0;
const DIMENSION_1: usize = 1;
&raw[index.index[DIMENSION_0]][index.index[DIMENSION_1]][start..]
raw.extract(index, start)
}
}

Expand Down Expand Up @@ -751,7 +763,7 @@ impl AccountsHash {
// move tail hashes that don't evenly hash into a 1d vector for next time
let mut i = hash_total - left_over_hashes;
while i < hash_total {
let data = cumulative.get_slice_2d(&hashes, i);
let data = cumulative.get_slice(&hashes, i);
next_pass.remaining_unhashed.extend(data);
i += data.len();
}
Expand All @@ -769,7 +781,7 @@ impl AccountsHash {
hash_total, // note this does not include the ones that didn't divide evenly, unless we're in the last iteration
MERKLE_FANOUT,
Some(TARGET_FANOUT_LEVEL),
|start| cumulative.get_slice_2d(&hashes, start),
|start| cumulative.get_slice(&hashes, start),
Some(TARGET_FANOUT_LEVEL),
)
.1;
Expand Down

0 comments on commit af2e970

Please sign in to comment.