Skip to content

Commit

Permalink
[feature] #0000: On-chain predictable iteration order
Browse files Browse the repository at this point in the history
Signed-off-by: Marin Veršić <marin.versic101@gmail.com>
  • Loading branch information
mversic committed Dec 8, 2023
1 parent 2c3a4ce commit 780434d
Show file tree
Hide file tree
Showing 15 changed files with 72 additions and 60 deletions.
27 changes: 14 additions & 13 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 3 additions & 1 deletion config/base/derive/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -357,7 +357,9 @@ pub fn extract_box_generic(box_seg: &mut syn::PathSegment) -> &mut syn::Type {
generics.args.len() == 1,
"`Box` should have exactly one generic argument"
);
let syn::GenericArgument::Type(generic_type) = generics.args.first_mut().expect("Can't be empty") else {
let syn::GenericArgument::Type(generic_type) =
generics.args.first_mut().expect("Can't be empty")
else {
panic!("`Box` should have type as a generic argument")
};

Expand Down
2 changes: 2 additions & 0 deletions core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,9 @@ displaydoc = { workspace = true }
wasmtime = { workspace = true }
parking_lot = { workspace = true, features = ["deadlock_detection"] }
derive_more = { workspace = true }

uuid = { version = "1.4.1", features = ["v4"] }
indexmap = "2.1.0"

[dev-dependencies]
criterion = { workspace = true }
Expand Down
1 change: 1 addition & 0 deletions core/clippy.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
disallowed-types = []
1 change: 1 addition & 0 deletions core/src/block_sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ impl BlockSynchronizer {
}

/// Get a random online peer.
#[allow(clippy::disallowed_types)]
pub fn random_peer(peers: &std::collections::HashSet<PeerId>) -> Option<Peer> {
use rand::{seq::IteratorRandom, SeedableRng};

Expand Down
11 changes: 6 additions & 5 deletions core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,10 @@ pub mod tx;
pub mod wsv;

use core::time::Duration;
use std::collections::{BTreeSet, HashMap, HashSet};
use std::collections::BTreeSet;

use gossiper::TransactionGossip;
use indexmap::{IndexMap, IndexSet};
use iroha_data_model::{permission::Permissions, prelude::*};
use iroha_primitives::unique_vec::UniqueVec;
use parity_scale_codec::{Decode, Encode};
Expand All @@ -38,16 +39,16 @@ pub type IrohaNetwork = iroha_p2p::NetworkHandle<NetworkMessage>;
pub type PeersIds = UniqueVec<PeerId>;

/// Parameters set.
pub type Parameters = HashSet<Parameter>;
pub type Parameters = IndexSet<Parameter>;

/// API to work with collections of [`DomainId`] : [`Domain`] mappings.
pub type DomainsMap = HashMap<DomainId, Domain>;
pub type DomainsMap = IndexMap<DomainId, Domain>;

/// API to work with a collections of [`RoleId`]: [`Role`] mappings.
pub type RolesMap = HashMap<RoleId, Role>;
pub type RolesMap = IndexMap<RoleId, Role>;

/// API to work with a collections of [`AccountId`] [`Permissions`] mappings.
pub type PermissionTokensMap = HashMap<AccountId, Permissions>;
pub type PermissionTokensMap = IndexMap<AccountId, Permissions>;

/// API to work with a collections of [`AccountId`] to [`RoleId`] mappings.
pub type AccountRolesSet = BTreeSet<role::RoleIdWithOwner>;
Expand Down
18 changes: 12 additions & 6 deletions core/src/query/store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@

use std::{
cmp::Ordering,
collections::HashMap,
num::NonZeroU64,
time::{Duration, Instant},
};

use indexmap::IndexMap;
use iroha_config::live_query_store::Configuration;
use iroha_data_model::{
asset::AssetValue,
Expand Down Expand Up @@ -67,15 +67,15 @@ type LiveQuery = Batched<Vec<Value>>;
/// Clients can handle their queries using [`LiveQueryStoreHandle`]
#[derive(Debug)]
pub struct LiveQueryStore {
queries: HashMap<QueryId, (LiveQuery, Instant)>,
queries: IndexMap<QueryId, (LiveQuery, Instant)>,
query_idle_time: Duration,
}

impl LiveQueryStore {
/// Construct [`LiveQueryStore`] from configuration.
pub fn from_configuration(cfg: Configuration) -> Self {
Self {
queries: HashMap::default(),
queries: IndexMap::new(),
query_idle_time: Duration::from_millis(cfg.query_idle_time_ms.into()),
}
}
Expand Down Expand Up @@ -326,13 +326,19 @@ mod tests {
.handle_query_output(query_output, &sorting, pagination, fetch_size)
.unwrap()
.into();
let Value::Vec(v) = batch else { panic!("not expected result") };
let Value::Vec(v) = batch else {
panic!("not expected result")
};
counter += v.len();

while cursor.cursor.is_some() {
let Ok(batched) = query_store_handle.handle_query_cursor(cursor) else { break };
let Ok(batched) = query_store_handle.handle_query_cursor(cursor) else {
break;
};
let (batch, new_cursor) = batched.into();
let Value::Vec(v) = batch else { panic!("not expected result") };
let Value::Vec(v) = batch else {
panic!("not expected result")
};
counter += v.len();

cursor = new_cursor;
Expand Down
4 changes: 2 additions & 2 deletions core/src/queue.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
//! Module with queue actor
use core::time::Duration;
use std::collections::HashSet;

use crossbeam_queue::ArrayQueue;
use dashmap::{mapref::entry::Entry, DashMap};
use eyre::{Report, Result};
use indexmap::IndexSet;
use iroha_config::queue::Configuration;
use iroha_crypto::HashOf;
use iroha_data_model::{account::AccountId, transaction::prelude::*};
Expand Down Expand Up @@ -326,7 +326,7 @@ impl Queue {
self.pop_from_queue(&mut seen_queue, wsv, &mut expired_transactions_queue)
});

let transactions_hashes: HashSet<HashOf<TransactionPayload>> =
let transactions_hashes: IndexSet<HashOf<TransactionPayload>> =
transactions.iter().map(|tx| tx.payload().hash()).collect();
let txs = txs_from_queue
.filter(|tx| !transactions_hashes.contains(&tx.payload().hash()))
Expand Down
33 changes: 17 additions & 16 deletions core/src/smartcontracts/isi/triggers/set.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@
//! trigger hooks.

use core::cmp::min;
use std::{collections::HashMap, fmt};
use std::fmt;

use indexmap::IndexMap;
use iroha_crypto::HashOf;
use iroha_data_model::{
events::Filter as EventFilter,
Expand Down Expand Up @@ -138,17 +139,17 @@ impl<F: Filter + Into<TriggeringFilterBox> + Clone> LoadedActionTrait for Loaded
#[derive(Debug, Default)]
pub struct Set {
/// Triggers using [`DataEventFilter`]
data_triggers: HashMap<TriggerId, LoadedAction<DataEventFilter>>,
data_triggers: IndexMap<TriggerId, LoadedAction<DataEventFilter>>,
/// Triggers using [`PipelineEventFilter`]
pipeline_triggers: HashMap<TriggerId, LoadedAction<PipelineEventFilter>>,
pipeline_triggers: IndexMap<TriggerId, LoadedAction<PipelineEventFilter>>,
/// Triggers using [`TimeEventFilter`]
time_triggers: HashMap<TriggerId, LoadedAction<TimeEventFilter>>,
time_triggers: IndexMap<TriggerId, LoadedAction<TimeEventFilter>>,
/// Triggers using [`ExecuteTriggerEventFilter`]
by_call_triggers: HashMap<TriggerId, LoadedAction<ExecuteTriggerEventFilter>>,
by_call_triggers: IndexMap<TriggerId, LoadedAction<ExecuteTriggerEventFilter>>,
/// Trigger ids with type of events they process
ids: HashMap<TriggerId, TriggeringEventType>,
ids: IndexMap<TriggerId, TriggeringEventType>,
/// Original [`WasmSmartContract`]s by [`TriggerId`] for querying purposes.
original_contracts: HashMap<HashOf<WasmSmartContract>, WasmSmartContract>,
original_contracts: IndexMap<HashOf<WasmSmartContract>, WasmSmartContract>,
/// List of actions that should be triggered by events provided by `handle_*` methods.
/// Vector is used to save the exact triggers order.
matched_ids: Vec<(Event, TriggerId)>,
Expand All @@ -157,14 +158,14 @@ pub struct Set {
/// Helper struct for serializing triggers.
struct TriggersWithContext<'s, F> {
/// Triggers being serialized
triggers: &'s HashMap<TriggerId, LoadedAction<F>>,
triggers: &'s IndexMap<TriggerId, LoadedAction<F>>,
/// Containing Set, used for looking up origignal [`WasmSmartContract`]s
/// during serialization.
set: &'s Set,
}

impl<'s, F> TriggersWithContext<'s, F> {
fn new(triggers: &'s HashMap<TriggerId, LoadedAction<F>>, set: &'s Set) -> Self {
fn new(triggers: &'s IndexMap<TriggerId, LoadedAction<F>>, set: &'s Set) -> Self {
Self { triggers, set }
}
}
Expand Down Expand Up @@ -236,15 +237,15 @@ impl<'de> DeserializeSeed<'de> for WasmSeed<'_, Set> {
while let Some(key) = map.next_key::<String>()? {
match key.as_str() {
"data_triggers" => {
let triggers: HashMap<TriggerId, Action<DataEventFilter>> =
let triggers: IndexMap<TriggerId, Action<DataEventFilter>> =
map.next_value()?;
for (id, action) in triggers {
set.add_data_trigger(self.loader.engine, Trigger::new(id, action))
.unwrap();
}
}
"pipeline_triggers" => {
let triggers: HashMap<TriggerId, Action<PipelineEventFilter>> =
let triggers: IndexMap<TriggerId, Action<PipelineEventFilter>> =
map.next_value()?;
for (id, action) in triggers {
set.add_pipeline_trigger(
Expand All @@ -255,15 +256,15 @@ impl<'de> DeserializeSeed<'de> for WasmSeed<'_, Set> {
}
}
"time_triggers" => {
let triggers: HashMap<TriggerId, Action<TimeEventFilter>> =
let triggers: IndexMap<TriggerId, Action<TimeEventFilter>> =
map.next_value()?;
for (id, action) in triggers {
set.add_time_trigger(self.loader.engine, Trigger::new(id, action))
.unwrap();
}
}
"by_call_triggers" => {
let triggers: HashMap<TriggerId, Action<ExecuteTriggerEventFilter>> =
let triggers: IndexMap<TriggerId, Action<ExecuteTriggerEventFilter>> =
map.next_value()?;
for (id, action) in triggers {
set.add_by_call_trigger(
Expand Down Expand Up @@ -387,7 +388,7 @@ impl Set {
engine: &wasmtime::Engine,
trigger: Trigger<F>,
event_type: TriggeringEventType,
map: impl FnOnce(&mut Self) -> &mut HashMap<TriggerId, LoadedAction<F>>,
map: impl FnOnce(&mut Self) -> &mut IndexMap<TriggerId, LoadedAction<F>>,
) -> Result<bool> {
if self.contains(trigger.id()) {
return Ok(false);
Expand Down Expand Up @@ -816,8 +817,8 @@ impl Set {

/// Remove actions with zero execution count from `triggers`
fn remove_zeros<F: Filter>(
ids: &mut HashMap<TriggerId, TriggeringEventType>,
triggers: &mut HashMap<TriggerId, LoadedAction<F>>,
ids: &mut IndexMap<TriggerId, TriggeringEventType>,
triggers: &mut IndexMap<TriggerId, LoadedAction<F>>,
) {
let to_remove: Vec<TriggerId> = triggers
.iter()
Expand Down
Loading

0 comments on commit 780434d

Please sign in to comment.