From 26990dbf0bc4ff3bbdfab6c1f9f19859d3c37ec5 Mon Sep 17 00:00:00 2001 From: Nazar Mokrynskyi Date: Mon, 29 Nov 2021 08:47:33 +0200 Subject: [PATCH] Introduce `FlatPieces` for more efficient work with memory, refactor archiver to use it, refactor plotter to use batch plotting --- crates/sc-consensus-subspace/src/tests.rs | 11 +-- crates/subspace-archiving/src/archiver.rs | 79 +++++++++---------- crates/subspace-archiving/src/utils.rs | 8 +- .../tests/integration/archiver.rs | 18 +++-- .../tests/integration/reconstructor.rs | 68 +++++++++++----- crates/subspace-core-primitives/src/lib.rs | 65 +++++++++++++++ crates/subspace-farmer/src/commitments.rs | 6 +- .../subspace-farmer/src/commitments/tests.rs | 23 ++---- crates/subspace-farmer/src/farming/tests.rs | 6 +- crates/subspace-farmer/src/plot.rs | 16 ++-- crates/subspace-farmer/src/plot/tests.rs | 6 +- crates/subspace-farmer/src/plotting.rs | 16 ++-- crates/subspace-solving/src/codec.rs | 3 + 13 files changed, 205 insertions(+), 120 deletions(-) diff --git a/crates/sc-consensus-subspace/src/tests.rs b/crates/sc-consensus-subspace/src/tests.rs index 38dac18170a07..81d2551f894e4 100644 --- a/crates/sc-consensus-subspace/src/tests.rs +++ b/crates/sc-consensus-subspace/src/tests.rs @@ -73,7 +73,7 @@ use std::sync::Arc; use std::{cell::RefCell, task::Poll, time::Duration}; use subspace_archiving::archiver::Archiver; use subspace_core_primitives::objects::BlockObjectMapping; -use subspace_core_primitives::{LocalChallenge, Piece, Signature, Tag}; +use subspace_core_primitives::{FlatPieces, LocalChallenge, Piece, Signature, Tag, PIECE_SIZE}; use subspace_solving::{SubspaceCodec, SOLUTION_SIGNING_CONTEXT}; use substrate_test_runtime::{Block as TestBlock, Hash}; @@ -465,7 +465,7 @@ fn rejects_empty_block() { }) } -fn get_archived_pieces(client: &TestClient) -> Vec { +fn get_archived_pieces(client: &TestClient) -> Vec { let genesis_block_id = BlockId::Number(Zero::zero()); let runtime_api = client.runtime_api(); @@ -481,7 +481,7 @@ fn get_archived_pieces(client: &TestClient) -> Vec { archiver .add_block(genesis_block.encode(), BlockObjectMapping::default()) .into_iter() - .flat_map(|archived_segment| archived_segment.pieces.into_iter()) + .map(|archived_segment| archived_segment.pieces) .collect() } @@ -599,10 +599,11 @@ fn run_one_test(mutator: impl Fn(&mut TestHeader, Stage) + Send + Sync + 'static let (piece_index, mut encoding) = archived_pieces_receiver .await .unwrap() - .into_iter() + .iter() + .flat_map(|flat_pieces| flat_pieces.chunks_exact(PIECE_SIZE)) .enumerate() .choose(&mut rand::thread_rng()) - .map(|(piece_index, piece)| (piece_index as u64, piece)) + .map(|(piece_index, piece)| (piece_index as u64, Piece::try_from(piece).unwrap())) .unwrap(); subspace_solving.encode(&mut encoding, piece_index).unwrap(); diff --git a/crates/subspace-archiving/src/archiver.rs b/crates/subspace-archiving/src/archiver.rs index 87f751f3b134f..2d81e75f1c947 100644 --- a/crates/subspace-archiving/src/archiver.rs +++ b/crates/subspace-archiving/src/archiver.rs @@ -15,6 +15,7 @@ use crate::merkle_tree::{MerkleTree, Witness}; use crate::utils; +use crate::utils::{Gf16Element, GF_16_ELEMENT_BYTES}; use parity_scale_codec::{Compact, CompactLen, Decode, Encode}; use reed_solomon_erasure::galois_16::ReedSolomon; use serde::{Deserialize, Serialize}; @@ -27,8 +28,8 @@ use subspace_core_primitives::objects::{ BlockObject, BlockObjectMapping, PieceObject, PieceObjectMapping, }; use subspace_core_primitives::{ - crypto, ArchivedBlockProgress, LastArchivedBlock, Piece, RootBlock, Sha256Hash, PIECE_SIZE, - SHA256_HASH_SIZE, + crypto, ArchivedBlockProgress, FlatPieces, LastArchivedBlock, RootBlock, Sha256Hash, + PIECE_SIZE, SHA256_HASH_SIZE, }; use thiserror::Error; @@ -110,7 +111,7 @@ pub struct ArchivedSegment { /// Root block of the segment pub root_block: RootBlock, /// Pieces that correspond to this segment - pub pieces: Vec, + pub pieces: FlatPieces, /// Mappings for objects stored in corresponding pieces. /// /// NOTE: Only first half (data pieces) will have corresponding mapping item in this `Vec`. @@ -599,15 +600,15 @@ impl Archiver { segment }; - let data_shards: Vec> = segment + let data_shards: Vec> = segment .chunks_exact(self.record_size) .map(utils::slice_to_arrays) .collect(); drop(segment); - let mut parity_shards: Vec> = - iter::repeat(vec![[0u8; 2]; self.record_size / 2]) + let mut parity_shards: Vec> = + iter::repeat(vec![Gf16Element::default(); self.record_size / 2]) .take(data_shards.len()) .collect(); @@ -616,49 +617,45 @@ impl Archiver { .encode_sep(&data_shards, &mut parity_shards) .expect("Encoding is running with fixed parameters and should never fail"); - // Combine data and parity records back into vectors for further processing - let records: Vec> = data_shards - .into_iter() - .chain(parity_shards) - .map(|shard| { - let mut record = Vec::with_capacity(self.record_size); - - for chunk in shard { - record.extend_from_slice(&chunk); - } - - record + let mut pieces = vec![0u8; (data_shards.len() + parity_shards.len()) * PIECE_SIZE]; + // Combine data and parity records back into flat vector of pieces (witness part of piece is + // still zeroes after this and is filled later) + pieces + .chunks_exact_mut(PIECE_SIZE) + .zip(data_shards.into_iter().chain(parity_shards)) + .flat_map(|(piece, shard)| { + piece + .chunks_exact_mut(GF_16_ELEMENT_BYTES) + .zip(shard.into_iter()) }) - .collect(); - - // Build a Merkle tree over data and parity records - let merkle_tree = MerkleTree::from_data(records.iter()); - - // Take records, combine them with witnesses (Merkle proofs) to produce data and parity - // pieces - let pieces: Vec = records - .into_iter() + .for_each(|(piece_chunk, shard_chunk)| { + piece_chunk + .as_mut() + .write_all(&shard_chunk) + .expect("Both source and target are exactly the same size; qed"); + }); + + // Build a Merkle tree over all records + let merkle_tree = MerkleTree::from_data( + pieces + .chunks_exact(PIECE_SIZE) + .map(|piece| &piece[..self.record_size]), + ); + + // Fill witnesses (Merkle proofs) in pieces created earlier + pieces + .chunks_exact_mut(PIECE_SIZE) .enumerate() - .map(|(position, record)| { + .for_each(|(position, piece)| { let witness = merkle_tree .get_witness(position) .expect("We use the same indexes as during Merkle tree creation; qed"); - let mut piece = Piece::default(); - - piece.as_mut().write_all(&record).expect( - "With correct archiver parameters record is always smaller than \ - piece size; qed", - ); - drop(record); (&mut piece[self.record_size..]).write_all(&witness).expect( "Parameters are verified in the archiver constructor to make sure this \ never happens; qed", ); - - piece - }) - .collect(); + }); // Now produce root block let root_block = RootBlock::V0 { @@ -678,7 +675,9 @@ impl Archiver { ArchivedSegment { root_block, - pieces, + pieces: pieces + .try_into() + .expect("Pieces length is correct as created above; qed"), object_mapping, } } diff --git a/crates/subspace-archiving/src/utils.rs b/crates/subspace-archiving/src/utils.rs index 57d9a3ac42bfc..74bc367df307e 100644 --- a/crates/subspace-archiving/src/utils.rs +++ b/crates/subspace-archiving/src/utils.rs @@ -2,14 +2,14 @@ use reed_solomon_erasure::galois_16::Field as Galois16Field; use reed_solomon_erasure::Field; use std::mem; -type Elem = ::Elem; -const ELEM_BYTES: usize = mem::size_of::(); +pub(crate) type Gf16Element = ::Elem; +pub(crate) const GF_16_ELEMENT_BYTES: usize = mem::size_of::(); /// Convert slice to a vector of arrays for `reed_solomon_erasure` library. -pub(crate) fn slice_to_arrays + ?Sized>(slice: &S) -> Vec { +pub(crate) fn slice_to_arrays + ?Sized>(slice: &S) -> Vec { slice .as_ref() - .chunks_exact(ELEM_BYTES) + .chunks_exact(GF_16_ELEMENT_BYTES) .map(|s| s.try_into().unwrap()) .collect() } diff --git a/crates/subspace-archiving/tests/integration/archiver.rs b/crates/subspace-archiving/tests/integration/archiver.rs index aa1867d311fd9..3a36cacbe0b45 100644 --- a/crates/subspace-archiving/tests/integration/archiver.rs +++ b/crates/subspace-archiving/tests/integration/archiver.rs @@ -106,7 +106,7 @@ fn archiver() { assert_eq!(archived_segments.len(), 1); let first_archived_segment = archived_segments.into_iter().next().unwrap(); - assert_eq!(first_archived_segment.pieces.len(), MERKLE_NUM_LEAVES); + assert_eq!(first_archived_segment.pieces.count(), MERKLE_NUM_LEAVES); assert_eq!(first_archived_segment.root_block.segment_index(), 0); assert_eq!( first_archived_segment.root_block.prev_root_block_hash(), @@ -130,7 +130,7 @@ fn archiver() { .chain(iter::repeat(block_1.as_ref()).zip(block_1_object_mapping.objects.iter())); let piece_objects = first_archived_segment .pieces - .iter() + .chunks_exact(PIECE_SIZE) .zip(&first_archived_segment.object_mapping) .flat_map(|(piece, object_mapping)| { iter::repeat(piece.as_ref()).zip(&object_mapping.objects) @@ -140,7 +140,11 @@ fn archiver() { } // Check that all pieces are valid - for (position, piece) in first_archived_segment.pieces.iter().enumerate() { + for (position, piece) in first_archived_segment + .pieces + .chunks_exact(PIECE_SIZE) + .enumerate() + { assert!(archiver::is_piece_valid( piece, first_archived_segment.root_block.records_root(), @@ -189,7 +193,7 @@ fn archiver() { iter::repeat(block_1.as_ref()).zip(block_1_object_mapping.objects.iter().skip(2)); let piece_objects = archived_segments[0] .pieces - .iter() + .chunks_exact(PIECE_SIZE) .zip(&archived_segments[0].object_mapping) .flat_map(|(piece, object_mapping)| { iter::repeat(piece.as_ref()).zip(&object_mapping.objects) @@ -217,7 +221,7 @@ fn archiver() { let mut previous_root_block_hash = first_archived_segment.root_block.hash(); let last_root_block = archived_segments.iter().last().unwrap().root_block; for archived_segment in archived_segments { - assert_eq!(archived_segment.pieces.len(), MERKLE_NUM_LEAVES); + assert_eq!(archived_segment.pieces.count(), MERKLE_NUM_LEAVES); assert_eq!( archived_segment.root_block.segment_index(), expected_segment_index @@ -227,7 +231,7 @@ fn archiver() { previous_root_block_hash ); - for (position, piece) in archived_segment.pieces.iter().enumerate() { + for (position, piece) in archived_segment.pieces.chunks_exact(PIECE_SIZE).enumerate() { assert!(archiver::is_piece_valid( piece, archived_segment.root_block.records_root(), @@ -270,7 +274,7 @@ fn archiver() { assert_eq!(last_archived_block.number, 3); assert_eq!(last_archived_block.partial_archived(), None); - for (position, piece) in archived_segment.pieces.iter().enumerate() { + for (position, piece) in archived_segment.pieces.chunks_exact(PIECE_SIZE).enumerate() { assert!(archiver::is_piece_valid( piece, archived_segment.root_block.records_root(), diff --git a/crates/subspace-archiving/tests/integration/reconstructor.rs b/crates/subspace-archiving/tests/integration/reconstructor.rs index ca98dd504a3bf..11eabefb2d7e6 100644 --- a/crates/subspace-archiving/tests/integration/reconstructor.rs +++ b/crates/subspace-archiving/tests/integration/reconstructor.rs @@ -6,7 +6,7 @@ use subspace_archiving::reconstructor::{ }; use subspace_core_primitives::objects::BlockObjectMapping; use subspace_core_primitives::{ - ArchivedBlockProgress, LastArchivedBlock, Piece, PIECE_SIZE, SHA256_HASH_SIZE, + ArchivedBlockProgress, FlatPieces, LastArchivedBlock, Piece, PIECE_SIZE, SHA256_HASH_SIZE, }; const MERKLE_NUM_LEAVES: usize = 8_usize; @@ -14,6 +14,13 @@ const WITNESS_SIZE: usize = SHA256_HASH_SIZE * MERKLE_NUM_LEAVES.log2() as usize const RECORD_SIZE: usize = PIECE_SIZE - WITNESS_SIZE; const SEGMENT_SIZE: usize = RECORD_SIZE * MERKLE_NUM_LEAVES / 2; +fn flat_pieces_to_regular(pieces: &FlatPieces) -> Vec { + pieces + .chunks_exact(PIECE_SIZE) + .map(|piece| piece.try_into().unwrap()) + .collect() +} + fn pieces_to_option_of_pieces(pieces: &[Piece]) -> Vec> { pieces.iter().copied().map(Some).collect() } @@ -46,7 +53,9 @@ fn basic() { { let contents = reconstructor - .add_segment(&pieces_to_option_of_pieces(&archived_segments[0].pieces)) + .add_segment(&pieces_to_option_of_pieces(&flat_pieces_to_regular( + &archived_segments[0].pieces, + ))) .unwrap(); // Only first block fits @@ -56,7 +65,9 @@ fn basic() { { let contents = reconstructor - .add_segment(&pieces_to_option_of_pieces(&archived_segments[1].pieces)) + .add_segment(&pieces_to_option_of_pieces(&flat_pieces_to_regular( + &archived_segments[1].pieces, + ))) .unwrap(); // Second block is finished, but also third is included @@ -73,7 +84,9 @@ fn basic() { let mut partial_reconstructor = Reconstructor::new(RECORD_SIZE, SEGMENT_SIZE).unwrap(); let contents = partial_reconstructor - .add_segment(&pieces_to_option_of_pieces(&archived_segments[1].pieces)) + .add_segment(&pieces_to_option_of_pieces(&flat_pieces_to_regular( + &archived_segments[1].pieces, + ))) .unwrap(); // Only third block is fully contained @@ -91,7 +104,9 @@ fn basic() { { let contents = reconstructor - .add_segment(&pieces_to_option_of_pieces(&archived_segments[2].pieces)) + .add_segment(&pieces_to_option_of_pieces(&flat_pieces_to_regular( + &archived_segments[2].pieces, + ))) .unwrap(); // Nothing is fully contained here @@ -108,7 +123,9 @@ fn basic() { let mut partial_reconstructor = Reconstructor::new(RECORD_SIZE, SEGMENT_SIZE).unwrap(); let contents = partial_reconstructor - .add_segment(&pieces_to_option_of_pieces(&archived_segments[2].pieces)) + .add_segment(&pieces_to_option_of_pieces(&flat_pieces_to_regular( + &archived_segments[2].pieces, + ))) .unwrap(); // Nothing is fully contained here @@ -126,7 +143,9 @@ fn basic() { { let contents = reconstructor - .add_segment(&pieces_to_option_of_pieces(&archived_segments[3].pieces)) + .add_segment(&pieces_to_option_of_pieces(&flat_pieces_to_regular( + &archived_segments[3].pieces, + ))) .unwrap(); // Nothing is fully contained here @@ -145,7 +164,9 @@ fn basic() { { let mut partial_reconstructor = Reconstructor::new(RECORD_SIZE, SEGMENT_SIZE).unwrap(); let contents = partial_reconstructor - .add_segment(&pieces_to_option_of_pieces(&archived_segments[3].pieces)) + .add_segment(&pieces_to_option_of_pieces(&flat_pieces_to_regular( + &archived_segments[3].pieces, + ))) .unwrap(); // Nothing is fully contained here @@ -163,7 +184,9 @@ fn basic() { { let contents = reconstructor - .add_segment(&pieces_to_option_of_pieces(&archived_segments[4].pieces)) + .add_segment(&pieces_to_option_of_pieces(&flat_pieces_to_regular( + &archived_segments[4].pieces, + ))) .unwrap(); // Enough data to reconstruct fourth block @@ -182,7 +205,9 @@ fn basic() { { let mut partial_reconstructor = Reconstructor::new(RECORD_SIZE, SEGMENT_SIZE).unwrap(); let contents = partial_reconstructor - .add_segment(&pieces_to_option_of_pieces(&archived_segments[4].pieces)) + .add_segment(&pieces_to_option_of_pieces(&flat_pieces_to_regular( + &archived_segments[4].pieces, + ))) .unwrap(); // Nothing is fully contained here @@ -215,7 +240,7 @@ fn partial_data() { assert_eq!(archived_segments.len(), 1); - let pieces = archived_segments.into_iter().next().unwrap().pieces; + let pieces = flat_pieces_to_regular(&archived_segments.into_iter().next().unwrap().pieces); { // Take just data shards @@ -302,8 +327,7 @@ fn invalid_usage() { let result = Reconstructor::new(RECORD_SIZE, SEGMENT_SIZE) .unwrap() .add_segment( - &archived_segments[0] - .pieces + &flat_pieces_to_regular(&archived_segments[0].pieces) .iter() .take(MERKLE_NUM_LEAVES / 2 - 1) .copied() @@ -332,11 +356,14 @@ fn invalid_usage() { let mut reconstructor = Reconstructor::new(RECORD_SIZE, SEGMENT_SIZE).unwrap(); reconstructor - .add_segment(&pieces_to_option_of_pieces(&archived_segments[0].pieces)) + .add_segment(&pieces_to_option_of_pieces(&flat_pieces_to_regular( + &archived_segments[0].pieces, + ))) .unwrap(); - let result = - reconstructor.add_segment(&pieces_to_option_of_pieces(&archived_segments[2].pieces)); + let result = reconstructor.add_segment(&pieces_to_option_of_pieces( + &flat_pieces_to_regular(&archived_segments[2].pieces), + )); assert_eq!( result, @@ -347,11 +374,14 @@ fn invalid_usage() { ); reconstructor - .add_segment(&pieces_to_option_of_pieces(&archived_segments[1].pieces)) + .add_segment(&pieces_to_option_of_pieces(&flat_pieces_to_regular( + &archived_segments[1].pieces, + ))) .unwrap(); - let result = - reconstructor.add_segment(&pieces_to_option_of_pieces(&archived_segments[3].pieces)); + let result = reconstructor.add_segment(&pieces_to_option_of_pieces( + &flat_pieces_to_regular(&archived_segments[3].pieces), + )); assert_eq!( result, diff --git a/crates/subspace-core-primitives/src/lib.rs b/crates/subspace-core-primitives/src/lib.rs index 4303cfa0487fd..3445b58ff3a09 100644 --- a/crates/subspace-core-primitives/src/lib.rs +++ b/crates/subspace-core-primitives/src/lib.rs @@ -23,6 +23,9 @@ pub mod crypto; pub mod objects; +extern crate alloc; + +use alloc::vec::Vec; use core::convert::AsRef; use core::ops::{Deref, DerefMut}; use parity_scale_codec::{Decode, Encode}; @@ -245,6 +248,68 @@ impl AsMut<[u8]> for Piece { } } +/// Flat representation of multiple pieces concatenated for higher efficient for processing. +#[derive(Clone, PartialEq, Eq, Ord, PartialOrd, Hash, Encode, Decode, TypeInfo)] +#[cfg_attr(feature = "std", derive(Debug))] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "std", serde(rename_all = "camelCase"))] +pub struct FlatPieces(Vec); + +impl FlatPieces { + /// Create new instance, returns error if `pieces` is not multiple of full pieces. + pub fn new(pieces: Vec) -> Result> { + if pieces.len() % PIECE_SIZE != 0 { + return Err(pieces); + } + + Ok(Self(pieces)) + } + + /// Number of pieces contained. + pub fn count(&self) -> usize { + self.0.len() / PIECE_SIZE + } + + /// Extract internal flat representation of bytes. + pub fn into_inner(self) -> Vec { + self.0 + } +} + +impl TryFrom> for FlatPieces { + type Error = Vec; + + fn try_from(value: Vec) -> Result { + Self::new(value) + } +} + +impl Deref for FlatPieces { + type Target = [u8]; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for FlatPieces { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl AsRef<[u8]> for FlatPieces { + fn as_ref(&self) -> &[u8] { + &self.0 + } +} + +impl AsMut<[u8]> for FlatPieces { + fn as_mut(&mut self) -> &mut [u8] { + &mut self.0 + } +} + /// Progress of an archived block. #[derive(Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash, Encode, Decode, TypeInfo)] #[cfg_attr(feature = "std", derive(Debug))] diff --git a/crates/subspace-farmer/src/commitments.rs b/crates/subspace-farmer/src/commitments.rs index eafcaa83ba1cb..e9f892963bc68 100644 --- a/crates/subspace-farmer/src/commitments.rs +++ b/crates/subspace-farmer/src/commitments.rs @@ -12,7 +12,7 @@ use rocksdb::{Options, DB}; use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::sync::Arc; -use subspace_core_primitives::{Piece, Salt, Tag, PIECE_SIZE}; +use subspace_core_primitives::{FlatPieces, Salt, Tag, PIECE_SIZE}; use thiserror::Error; const BATCH_SIZE: u64 = (16 * 1024 * 1024 / PIECE_SIZE) as u64; @@ -281,7 +281,7 @@ impl Commitments { /// Create commitments for all salts for specified pieces pub(crate) async fn create_for_pieces( &self, - pieces: &Arc>, + pieces: &Arc, start_offset: u64, ) -> Result<(), CommitmentError> { let salts = self @@ -323,7 +323,7 @@ impl Commitments { move || { let tags: Vec = pieces - .par_iter() + .par_chunks_exact(PIECE_SIZE) .map(|piece| subspace_solving::create_tag(piece, salt)) .collect(); diff --git a/crates/subspace-farmer/src/commitments/tests.rs b/crates/subspace-farmer/src/commitments/tests.rs index 5022515e3f715..f7e9fa1a6a53a 100644 --- a/crates/subspace-farmer/src/commitments/tests.rs +++ b/crates/subspace-farmer/src/commitments/tests.rs @@ -3,7 +3,7 @@ use crate::plot::Plot; use rand::prelude::*; use rand::rngs::StdRng; use std::sync::Arc; -use subspace_core_primitives::{Piece, Salt, Tag}; +use subspace_core_primitives::{FlatPieces, Salt, Tag, PIECE_SIZE}; use tempfile::TempDir; fn init() { @@ -15,7 +15,7 @@ async fn create() { init(); let base_directory = TempDir::new().unwrap(); - let piece: Piece = [9u8; 4096].into(); + let pieces: FlatPieces = vec![9u8; 4096].try_into().unwrap(); let salt: Salt = [1u8; 8]; let correct_tag: Tag = [23, 245, 162, 52, 107, 135, 192, 210]; let solution_range = u64::from_be_bytes([0xff_u8, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]); @@ -25,7 +25,7 @@ async fn create() { let commitments = Commitments::new(base_directory.path().join("commitments").into()) .await .unwrap(); - plot.write_many(Arc::new(vec![piece]), index).await.unwrap(); + plot.write_many(Arc::new(pieces), index).await.unwrap(); commitments.create(salt, plot).await.unwrap(); let (tag, _) = commitments @@ -50,20 +50,9 @@ async fn find_by_tag() { // Generate deterministic pieces, such that we don't have random errors in CI let mut rng = StdRng::seed_from_u64(0); - plot.write_many( - Arc::new( - (0..1024_usize) - .map(|_| { - let mut piece = Piece::default(); - rng.fill(&mut piece[..]); - piece - }) - .collect(), - ), - 0, - ) - .await - .unwrap(); + let mut pieces: FlatPieces = vec![0u8; 1024 * PIECE_SIZE].try_into().unwrap(); + rng.fill(pieces.as_mut()); + plot.write_many(Arc::new(pieces), 0).await.unwrap(); commitments.create(salt, plot).await.unwrap(); diff --git a/crates/subspace-farmer/src/farming/tests.rs b/crates/subspace-farmer/src/farming/tests.rs index 34c356d44bf57..3b069a596a5a0 100644 --- a/crates/subspace-farmer/src/farming/tests.rs +++ b/crates/subspace-farmer/src/farming/tests.rs @@ -4,7 +4,7 @@ use crate::identity::Identity; use crate::mock_rpc::MockRpc; use crate::plot::Plot; use std::sync::Arc; -use subspace_core_primitives::{Piece, Salt, Tag, TAG_SIZE}; +use subspace_core_primitives::{FlatPieces, Salt, Tag, TAG_SIZE}; use subspace_rpc_primitives::SlotInfo; use tempfile::TempDir; use tokio::sync::mpsc; @@ -19,7 +19,7 @@ async fn farming_simulator(slots: Vec, tags: Vec) { let base_directory = TempDir::new().unwrap(); - let piece: Piece = [9u8; 4096].into(); + let pieces: FlatPieces = vec![9u8; 4096].try_into().unwrap(); let salt: Salt = slots[0].salt; // the first slots salt should be used for the initial commitments let index = 0; @@ -29,7 +29,7 @@ async fn farming_simulator(slots: Vec, tags: Vec) { .await .unwrap(); - plot.write_many(Arc::new(vec![piece]), index).await.unwrap(); + plot.write_many(Arc::new(pieces), index).await.unwrap(); commitments.create(salt, plot.clone()).await.unwrap(); let identity = diff --git a/crates/subspace-farmer/src/plot.rs b/crates/subspace-farmer/src/plot.rs index 3c294ba3cd2d1..7349dd39da7f5 100644 --- a/crates/subspace-farmer/src/plot.rs +++ b/crates/subspace-farmer/src/plot.rs @@ -12,7 +12,7 @@ use std::io::SeekFrom; use std::path::Path; use std::sync::atomic::{AtomicU64, Ordering}; use std::sync::{Arc, Weak}; -use subspace_core_primitives::{Piece, RootBlock, PIECE_SIZE}; +use subspace_core_primitives::{FlatPieces, Piece, RootBlock, PIECE_SIZE}; use thiserror::Error; const LAST_ROOT_BLOCK_KEY: &[u8] = b"last_root_block"; @@ -42,7 +42,7 @@ enum ReadRequests { #[derive(Debug)] enum WriteRequests { WriteEncodings { - encodings: Arc>, + encodings: Arc, first_index: u64, result_sender: oneshot::Sender>, }, @@ -179,15 +179,9 @@ impl Plot { .seek(SeekFrom::Start(first_index * PIECE_SIZE as u64)) .await?; { - let mut whole_encoding = Vec::with_capacity( - encodings[0].len() * encodings.len(), - ); - for encoding in encodings.iter() { - whole_encoding.extend_from_slice(encoding); - } - plot_file.write_all(&whole_encoding).await?; + plot_file.write_all(&encodings).await?; piece_count.fetch_max( - first_index + encodings.len() as u64, + first_index + encodings.count() as u64, Ordering::AcqRel, ); } @@ -266,7 +260,7 @@ impl Plot { /// Writes a piece/s to the plot by index, will overwrite if piece exists (updates) pub(crate) async fn write_many( &self, - encodings: Arc>, + encodings: Arc, first_index: u64, ) -> io::Result<()> { if encodings.is_empty() { diff --git a/crates/subspace-farmer/src/plot/tests.rs b/crates/subspace-farmer/src/plot/tests.rs index 21b991f35c6dc..2d82acd85d44d 100644 --- a/crates/subspace-farmer/src/plot/tests.rs +++ b/crates/subspace-farmer/src/plot/tests.rs @@ -19,16 +19,16 @@ async fn read_write() { init(); let base_directory = TempDir::new().unwrap(); - let piece = generate_random_piece(); + let pieces = Arc::new(generate_random_piece().to_vec().try_into().unwrap()); let index = 0; let plot = Plot::open_or_create(&base_directory).await.unwrap(); assert_eq!(true, plot.is_empty()); - plot.write_many(Arc::new(vec![piece]), index).await.unwrap(); + plot.write_many(Arc::clone(&pieces), index).await.unwrap(); assert_eq!(false, plot.is_empty()); let extracted_piece = plot.read(index).await.unwrap(); - assert_eq!(piece[..], extracted_piece[..]); + assert_eq!(pieces[..], extracted_piece[..]); drop(plot); diff --git a/crates/subspace-farmer/src/plotting.rs b/crates/subspace-farmer/src/plotting.rs index 22bcb6ade73b2..829858a8011e7 100644 --- a/crates/subspace-farmer/src/plotting.rs +++ b/crates/subspace-farmer/src/plotting.rs @@ -226,14 +226,14 @@ async fn background_plotting( let object_mapping = create_global_object_mapping(piece_index_offset, object_mapping); - // TODO: Batch encoding - for (position, piece) in pieces.iter_mut().enumerate() { - if let Err(error) = - subspace_codec.encode(piece, piece_index_offset + position as u64) - { - error!("Failed to encode a piece: error: {}", error); - continue; - } + // TODO: Batch encoding with more than 1 archived segment worth of data + let piece_indexes = (piece_index_offset..) + .take(pieces.count()) + .collect::>(); + if let Err(error) = subspace_codec.batch_encode(&mut pieces, &piece_indexes) + { + error!("Failed to encode a piece: error: {}", error); + continue; } if let Some(plot) = weak_plot.upgrade() { diff --git a/crates/subspace-solving/src/codec.rs b/crates/subspace-solving/src/codec.rs index 1b96f8658b2f5..e4f2600d6aaad 100644 --- a/crates/subspace-solving/src/codec.rs +++ b/crates/subspace-solving/src/codec.rs @@ -118,6 +118,9 @@ impl SubspaceCodec { /// size, input should ideally contain at least that many worth of pieces to achieve highest /// efficiency, it is recommended that the input is a multiple of that, but, strictly speaking, /// doesn't have to be. + /// + /// NOTE: When error is returned, some pieces might have been modified and should be considered + /// in inconsistent state! #[allow(unused_mut)] pub fn batch_encode( &self,