Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

bump reed-solomon-novelpoly version #3065

Merged
merged 6 commits into from
Jan 26, 2024
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 5 additions & 31 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion polkadot/erasure-coding/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ workspace = true
[dependencies]
polkadot-primitives = { path = "../primitives" }
polkadot-node-primitives = { package = "polkadot-node-primitives", path = "../node/primitives" }
novelpoly = { package = "reed-solomon-novelpoly", version = "1.0.0" }
novelpoly = { package = "reed-solomon-novelpoly", version = "2.0.0" }
parity-scale-codec = { version = "3.6.1", default-features = false, features = ["derive", "std"] }
sp-core = { path = "../../substrate/primitives/core" }
sp-trie = { path = "../../substrate/primitives/trie" }
Expand Down
97 changes: 18 additions & 79 deletions polkadot/erasure-coding/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,20 @@ pub enum Error {
UnknownCodeParam,
}

impl From<novelpoly::Error> for Error {
fn from(error: novelpoly::Error) -> Self {
match error {
novelpoly::Error::NeedMoreShards { .. } => Self::NotEnoughChunks,
novelpoly::Error::ParamterMustBePowerOf2 { .. } => Self::UnevenLength,
novelpoly::Error::WantedShardCountTooHigh(_) => Self::TooManyValidators,
novelpoly::Error::WantedShardCountTooLow(_) => Self::NotEnoughValidators,
novelpoly::Error::PayloadSizeIsZero { .. } => Self::BadPayload,
novelpoly::Error::InconsistentShardLengths { .. } => Self::NonUniformChunks,
_ => Self::UnknownReconstruction,
}
}
}

/// Obtain a threshold of chunks that should be enough to recover the data.
pub const fn recovery_threshold(n_validators: usize) -> Result<usize, Error> {
if n_validators > MAX_VALIDATORS {
Expand Down Expand Up @@ -166,42 +180,17 @@ where
{
let params = code_params(n_validators)?;
let mut received_shards: Vec<Option<WrappedShard>> = vec![None; n_validators];
let mut shard_len = None;
for (chunk_data, chunk_idx) in chunks.into_iter().take(n_validators) {
if chunk_idx >= n_validators {
return Err(Error::ChunkIndexOutOfBounds { chunk_index: chunk_idx, n_validators })
}

let shard_len = shard_len.get_or_insert_with(|| chunk_data.len());

if *shard_len % 2 != 0 {
if chunk_data.len() % 2 != 0 {
return Err(Error::UnevenLength)
}

if *shard_len != chunk_data.len() || *shard_len == 0 {
return Err(Error::NonUniformChunks)
}

received_shards[chunk_idx] = Some(WrappedShard::new(chunk_data.to_vec()));
}

let res = params.make_encoder().reconstruct(received_shards);

let payload_bytes = match res {
Err(e) => match e {
novelpoly::Error::NeedMoreShards { .. } => return Err(Error::NotEnoughChunks),
novelpoly::Error::ParamterMustBePowerOf2 { .. } => return Err(Error::UnevenLength),
novelpoly::Error::WantedShardCountTooHigh(_) => return Err(Error::TooManyValidators),
novelpoly::Error::WantedShardCountTooLow(_) => return Err(Error::NotEnoughValidators),
novelpoly::Error::PayloadSizeIsZero { .. } => return Err(Error::BadPayload),
novelpoly::Error::InconsistentShardLengths { .. } =>
return Err(Error::NonUniformChunks),
_ => return Err(Error::UnknownReconstruction),
},
Ok(payload_bytes) => payload_bytes,
};

Decode::decode(&mut &payload_bytes[..]).or_else(|_e| Err(Error::BadPayload))
let payload_bytes = params.make_encoder().reconstruct(received_shards)?;

Decode::decode(&mut &payload_bytes[..]).map_err(|_| Error::BadPayload)
}

/// An iterator that yields merkle branches and chunk data for all chunks to
Expand Down Expand Up @@ -294,56 +283,6 @@ pub fn branch_hash(root: &H256, branch_nodes: &Proof, index: usize) -> Result<H2
}
}

// input for `codec` which draws data from the data shards
struct ShardInput<'a, I> {
remaining_len: usize,
shards: I,
cur_shard: Option<(&'a [u8], usize)>,
}

impl<'a, I: Iterator<Item = &'a [u8]>> parity_scale_codec::Input for ShardInput<'a, I> {
fn remaining_len(&mut self) -> Result<Option<usize>, parity_scale_codec::Error> {
Ok(Some(self.remaining_len))
}

fn read(&mut self, into: &mut [u8]) -> Result<(), parity_scale_codec::Error> {
let mut read_bytes = 0;

loop {
if read_bytes == into.len() {
break
}

let cur_shard = self.cur_shard.take().or_else(|| self.shards.next().map(|s| (s, 0)));
let (active_shard, mut in_shard) = match cur_shard {
Some((s, i)) => (s, i),
None => break,
};

if in_shard >= active_shard.len() {
continue
}

let remaining_len_out = into.len() - read_bytes;
let remaining_len_shard = active_shard.len() - in_shard;

let write_len = std::cmp::min(remaining_len_out, remaining_len_shard);
into[read_bytes..][..write_len].copy_from_slice(&active_shard[in_shard..][..write_len]);

in_shard += write_len;
read_bytes += write_len;
self.cur_shard = Some((active_shard, in_shard))
}

self.remaining_len -= read_bytes;
if read_bytes == into.len() {
Ok(())
} else {
Err("slice provided too big for input".into())
}
}
}

#[cfg(test)]
mod tests {
use super::*;
Expand Down
14 changes: 14 additions & 0 deletions prdoc/pr_3065.prdoc
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# Schema: Polkadot SDK PRDoc Schema (prdoc) v1.0.0
alindima marked this conversation as resolved.
Show resolved Hide resolved
# See doc at https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json

title: Bump reed-solomon-novelpoly to version 2.0.0

author: alindima

doc:
- audience: Node Dev
description: |
Use the new reed-solomon-novelpoly release. Brings some performance improvements.

crates:
- name: "erasure-coding"
Loading