Skip to content
This repository has been archived by the owner on Nov 15, 2023. It is now read-only.

Fix Clippy #2522

Merged
merged 17 commits into from
May 6, 2023
32 changes: 32 additions & 0 deletions .cargo/config.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
#
# An auto defined `clippy` feature was introduced,
# but it was found to clash with user defined features,
# so was renamed to `cargo-clippy`.
#
# If you want standard clippy run:
# RUSTFLAGS= cargo clippy
[target.'cfg(feature = "cargo-clippy")']
rustflags = [
"-Aclippy::all",
"-Dclippy::correctness",
"-Aclippy::if-same-then-else",
"-Aclippy::clone-double-ref",
"-Dclippy::complexity",
"-Aclippy::zero-prefixed-literal", # 00_1000_000
"-Aclippy::type_complexity", # raison d'etre
"-Aclippy::nonminimal-bool", # maybe
"-Aclippy::borrowed-box", # Reasonable to fix this one
"-Aclippy::too-many-arguments", # (Turning this on would lead to)
"-Aclippy::unnecessary_cast", # Types may change
"-Aclippy::identity-op", # One case where we do 0 +
"-Aclippy::useless_conversion", # Types may change
"-Aclippy::unit_arg", # styalistic.
"-Aclippy::option-map-unit-fn", # styalistic
"-Aclippy::bind_instead_of_map", # styalistic
"-Aclippy::erasing_op", # E.g. 0 * DOLLARS
"-Aclippy::eq_op", # In tests we test equality.
"-Aclippy::while_immutable_condition", # false positives
"-Aclippy::needless_option_as_deref", # false positives
"-Aclippy::derivable_impls", # false positives
"-Aclippy::stable_sort_primitive", # prefer stable sort
]
ggwpez marked this conversation as resolved.
Show resolved Hide resolved
12 changes: 2 additions & 10 deletions bridges/bin/runtime-common/src/messages_benchmarking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -182,11 +182,7 @@ where
// update runtime storage
let (_, bridged_header_hash) = insert_header_to_grandpa_pallet::<R, FI>(state_root);

FromBridgedChainMessagesDeliveryProof {
bridged_header_hash: bridged_header_hash.into(),
storage_proof,
lane,
}
FromBridgedChainMessagesDeliveryProof { bridged_header_hash, storage_proof, lane }
}

/// Prepare proof of messages delivery for the `receive_messages_delivery_proof` call.
Expand All @@ -211,11 +207,7 @@ where
let (_, bridged_header_hash) =
insert_header_to_parachains_pallet::<R, PI, UnderlyingChainOf<BridgedChain<B>>>(state_root);

FromBridgedChainMessagesDeliveryProof {
bridged_header_hash: bridged_header_hash.into(),
storage_proof,
lane,
}
FromBridgedChainMessagesDeliveryProof { bridged_header_hash, storage_proof, lane }
}

/// Prepare in-memory message delivery proof, without inserting anything to the runtime storage.
Expand Down
2 changes: 1 addition & 1 deletion bridges/bin/runtime-common/src/parachains_benchmarking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ where
TrieDBMutBuilderV1::<RelayBlockHasher>::new(&mut mdb, &mut state_root).build();

// insert parachain heads
for (i, parachain) in parachains.into_iter().enumerate() {
for (i, parachain) in parachains.iter().enumerate() {
let storage_key =
parachain_head_storage_key_at_source(R::ParasPalletName::get(), *parachain);
let leaf_data = if i == 0 {
Expand Down
12 changes: 6 additions & 6 deletions bridges/modules/messages/src/benchmarking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ benchmarks_instance_pallet! {
}: receive_messages_proof(RawOrigin::Signed(relayer_id_on_target), relayer_id_on_source, proof, 1, dispatch_weight)
verify {
assert_eq!(
crate::InboundLanes::<T, I>::get(&T::bench_lane_id()).last_delivered_nonce(),
crate::InboundLanes::<T, I>::get(T::bench_lane_id()).last_delivered_nonce(),
21,
);
}
Expand Down Expand Up @@ -172,7 +172,7 @@ benchmarks_instance_pallet! {
}: receive_messages_proof(RawOrigin::Signed(relayer_id_on_target), relayer_id_on_source, proof, 2, dispatch_weight)
verify {
assert_eq!(
crate::InboundLanes::<T, I>::get(&T::bench_lane_id()).last_delivered_nonce(),
crate::InboundLanes::<T, I>::get(T::bench_lane_id()).last_delivered_nonce(),
22,
);
}
Expand Down Expand Up @@ -208,7 +208,7 @@ benchmarks_instance_pallet! {
});
}: receive_messages_proof(RawOrigin::Signed(relayer_id_on_target), relayer_id_on_source, proof, 1, dispatch_weight)
verify {
let lane_state = crate::InboundLanes::<T, I>::get(&T::bench_lane_id());
let lane_state = crate::InboundLanes::<T, I>::get(T::bench_lane_id());
assert_eq!(lane_state.last_delivered_nonce(), 21);
assert_eq!(lane_state.last_confirmed_nonce, 20);
}
Expand Down Expand Up @@ -240,7 +240,7 @@ benchmarks_instance_pallet! {
}: receive_messages_proof(RawOrigin::Signed(relayer_id_on_target), relayer_id_on_source, proof, 1, dispatch_weight)
verify {
assert_eq!(
crate::InboundLanes::<T, I>::get(&T::bench_lane_id()).last_delivered_nonce(),
crate::InboundLanes::<T, I>::get(T::bench_lane_id()).last_delivered_nonce(),
21,
);
}
Expand Down Expand Up @@ -274,7 +274,7 @@ benchmarks_instance_pallet! {
}: receive_messages_proof(RawOrigin::Signed(relayer_id_on_target), relayer_id_on_source, proof, 1, dispatch_weight)
verify {
assert_eq!(
crate::InboundLanes::<T, I>::get(&T::bench_lane_id()).last_delivered_nonce(),
crate::InboundLanes::<T, I>::get(T::bench_lane_id()).last_delivered_nonce(),
21,
);
}
Expand Down Expand Up @@ -432,7 +432,7 @@ benchmarks_instance_pallet! {
}: receive_messages_proof(RawOrigin::Signed(relayer_id_on_target), relayer_id_on_source, proof, 1, dispatch_weight)
verify {
assert_eq!(
crate::InboundLanes::<T, I>::get(&T::bench_lane_id()).last_delivered_nonce(),
crate::InboundLanes::<T, I>::get(T::bench_lane_id()).last_delivered_nonce(),
21,
);
assert!(T::is_message_successfully_dispatched(21));
Expand Down
2 changes: 1 addition & 1 deletion bridges/modules/messages/src/mock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ impl crate::benchmarking::Config<()> for TestRuntime {
// in mock run we only care about benchmarks correctness, not the benchmark results
// => ignore size related arguments
let (messages, total_dispatch_weight) =
params.message_nonces.into_iter().map(|n| message(n, REGULAR_PAYLOAD)).fold(
params.message_nonces.map(|n| message(n, REGULAR_PAYLOAD)).fold(
(Vec::new(), Weight::zero()),
|(mut messages, total_dispatch_weight), message| {
let weight = REGULAR_PAYLOAD.declared_weight;
Expand Down
6 changes: 3 additions & 3 deletions bridges/modules/relayers/src/benchmarking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ benchmarks! {
// create slash destination account
let lane = LaneId([0, 0, 0, 0]);
let slash_destination = RewardsAccountParams::new(lane, *b"test", RewardsAccountOwner::ThisChain);
T::prepare_rewards_account(slash_destination.clone(), Zero::zero());
T::prepare_rewards_account(slash_destination, Zero::zero());
}: {
crate::Pallet::<T>::slash_and_deregister(&relayer, slash_destination)
}
Expand All @@ -121,10 +121,10 @@ benchmarks! {
let account_params =
RewardsAccountParams::new(lane, *b"test", RewardsAccountOwner::ThisChain);
}: {
crate::Pallet::<T>::register_relayer_reward(account_params.clone(), &relayer, One::one());
crate::Pallet::<T>::register_relayer_reward(account_params, &relayer, One::one());
}
verify {
assert_eq!(RelayerRewards::<T>::get(relayer, &account_params), Some(One::one()));
assert_eq!(RelayerRewards::<T>::get(relayer, account_params), Some(One::one()));
}

impl_benchmark_test_suite!(Pallet, crate::mock::new_test_ext(), crate::mock::TestRuntime)
Expand Down
4 changes: 2 additions & 2 deletions client/cli/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ impl PurgeChainCmd {
io::stdin().read_line(&mut input)?;
let input = input.trim();

match input.chars().nth(0) {
match input.chars().next() {
Some('y') | Some('Y') => {},
_ => {
println!("Aborted");
Expand All @@ -103,7 +103,7 @@ impl PurgeChainCmd {
}

for db_path in &db_paths {
match fs::remove_dir_all(&db_path) {
match fs::remove_dir_all(db_path) {
Ok(_) => {
println!("{:?} removed.", &db_path);
},
Expand Down
30 changes: 13 additions & 17 deletions client/collator/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -270,16 +270,14 @@ where

let (header, extrinsics) = candidate.block.deconstruct();

let compact_proof = match candidate
.proof
.into_compact_proof::<HashFor<Block>>(last_head.state_root().clone())
{
Ok(proof) => proof,
Err(e) => {
tracing::error!(target: "cumulus-collator", "Failed to compact proof: {:?}", e);
return None
},
};
let compact_proof =
match candidate.proof.into_compact_proof::<HashFor<Block>>(*last_head.state_root()) {
Ok(proof) => proof,
Err(e) => {
tracing::error!(target: "cumulus-collator", "Failed to compact proof: {:?}", e);
return None
},
};

// Create the parachain block data for the validators.
let b = ParachainBlockData::<Block>::new(header, extrinsics, compact_proof);
Expand Down Expand Up @@ -451,7 +449,7 @@ mod tests {
.build()
.expect("Builds overseer");

spawner.spawn("overseer", None, overseer.run().then(|_| async { () }).boxed());
spawner.spawn("overseer", None, overseer.run().then(|_| async {}).boxed());

let collator_start = start_collator(StartCollatorParams {
runtime_api: client.clone(),
Expand All @@ -461,20 +459,18 @@ mod tests {
spawner,
para_id,
key: CollatorPair::generate().0,
parachain_consensus: Box::new(DummyParachainConsensus { client: client.clone() }),
parachain_consensus: Box::new(DummyParachainConsensus { client }),
});
block_on(collator_start);

let msg = block_on(sub_rx.into_future())
.0
.expect("message should be send by `start_collator` above.");

let config = match msg {
CollationGenerationMessage::Initialize(config) => config,
};
let CollationGenerationMessage::Initialize(config) = msg;

let mut validation_data = PersistedValidationData::default();
validation_data.parent_head = header.encode().into();
let validation_data =
PersistedValidationData { parent_head: header.encode().into(), ..Default::default() };
let relay_parent = Default::default();

let collation = block_on((config.collator)(relay_parent, &validation_data))
Expand Down
4 changes: 2 additions & 2 deletions client/consensus/aura/src/import_queue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,15 +51,15 @@ pub struct ImportQueueParams<'a, I, C, CIDP, S> {
}

/// Start an import queue for the Aura consensus algorithm.
pub fn import_queue<'a, P, Block, I, C, S, CIDP>(
pub fn import_queue<P, Block, I, C, S, CIDP>(
ImportQueueParams {
block_import,
client,
create_inherent_data_providers,
spawner,
registry,
telemetry,
}: ImportQueueParams<'a, I, C, CIDP, S>,
}: ImportQueueParams<'_, I, C, CIDP, S>,
) -> Result<DefaultImportQueue<Block, C>, sp_consensus::Error>
where
Block: BlockT,
Expand Down
2 changes: 1 addition & 1 deletion client/consensus/common/src/level_monitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ use std::{
sync::Arc,
};

const LOG_TARGET: &'static str = "level-monitor";
const LOG_TARGET: &str = "level-monitor";

/// Value good enough to be used with parachains using the current backend implementation
/// that ships with Substrate. This value may change in the future.
Expand Down
1 change: 0 additions & 1 deletion client/consensus/common/src/parachain_consensus.rs
Original file line number Diff line number Diff line change
Expand Up @@ -325,7 +325,6 @@ async fn handle_new_block_imported<Block, P>(

match parachain.block_status(unset_hash) {
Ok(BlockStatus::InChainWithState) => {
drop(unset_best_header);
let unset_best_header = unset_best_header_opt
.take()
.expect("We checked above that the value is set; qed");
Expand Down
10 changes: 2 additions & 8 deletions client/consensus/common/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -337,7 +337,7 @@ fn follow_new_best_with_dummy_recovery_works() {
Some(recovery_chan_tx),
);

let block = build_block(&*client.clone(), None, None);
let block = build_block(&*client, None, None);
let block_clone = block.clone();
let client_clone = client.clone();

Expand Down Expand Up @@ -547,7 +547,6 @@ fn do_not_set_best_block_to_older_block() {
let client = Arc::new(TestClientBuilder::with_backend(backend).build());

let blocks = (0..NUM_BLOCKS)
.into_iter()
.map(|_| build_and_import_block(client.clone(), true))
.collect::<Vec<_>>();

Expand All @@ -559,7 +558,6 @@ fn do_not_set_best_block_to_older_block() {
let consensus =
run_parachain_consensus(100.into(), client.clone(), relay_chain, Arc::new(|_, _| {}), None);

let client2 = client.clone();
let work = async move {
new_best_heads_sender
.unbounded_send(blocks[NUM_BLOCKS - 2].header().clone())
Expand All @@ -579,7 +577,7 @@ fn do_not_set_best_block_to_older_block() {
});

// Build and import a new best block.
build_and_import_block(client2.clone(), true);
build_and_import_block(client, true);
}

#[test]
Expand Down Expand Up @@ -607,7 +605,6 @@ fn prune_blocks_on_level_overflow() {
let id0 = block0.header.hash();

let blocks1 = (0..LEVEL_LIMIT)
.into_iter()
.map(|i| {
build_and_import_block_ext(
&*client,
Expand All @@ -622,7 +619,6 @@ fn prune_blocks_on_level_overflow() {
let id10 = blocks1[0].header.hash();

let blocks2 = (0..2)
.into_iter()
.map(|i| {
build_and_import_block_ext(
&*client,
Expand Down Expand Up @@ -720,7 +716,6 @@ fn restore_limit_monitor() {
let id00 = block00.header.hash();

let blocks1 = (0..LEVEL_LIMIT + 1)
.into_iter()
.map(|i| {
build_and_import_block_ext(
&*client,
Expand All @@ -735,7 +730,6 @@ fn restore_limit_monitor() {
let id10 = blocks1[0].header.hash();

let _ = (0..LEVEL_LIMIT)
.into_iter()
.map(|i| {
build_and_import_block_ext(
&*client,
Expand Down
4 changes: 2 additions & 2 deletions client/consensus/relay-chain/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ where
relay_parent: PHash,
validation_data: &PersistedValidationData,
) -> Option<ParachainCandidate<B>> {
let proposer_future = self.proposer_factory.lock().init(&parent);
let proposer_future = self.proposer_factory.lock().init(parent);

let proposer = proposer_future
.await
Expand All @@ -171,7 +171,7 @@ where
.ok()?;

let inherent_data =
self.inherent_data(parent.hash(), &validation_data, relay_parent).await?;
self.inherent_data(parent.hash(), validation_data, relay_parent).await?;

let Proposal { block, storage_changes, proof } = proposer
.propose(
Expand Down
6 changes: 3 additions & 3 deletions client/network/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ impl RelayChainInterface for DummyRelayChainInterface {
para_id: 0u32.into(),
relay_parent: PHash::random(),
collator: CollatorPair::generate().0.public(),
persisted_validation_data_hash: PHash::random().into(),
persisted_validation_data_hash: PHash::random(),
pov_hash: PHash::random(),
erasure_root: PHash::random(),
signature: sp_core::sr25519::Signature([0u8; 64]).into(),
Expand Down Expand Up @@ -293,7 +293,7 @@ async fn make_gossip_message_and_header(
para_id: 0u32.into(),
relay_parent,
collator: CollatorPair::generate().0.public(),
persisted_validation_data_hash: PHash::random().into(),
persisted_validation_data_hash: PHash::random(),
pov_hash: PHash::random(),
erasure_root: PHash::random(),
signature: sp_core::sr25519::Signature([0u8; 64]).into(),
Expand Down Expand Up @@ -484,7 +484,7 @@ async fn check_statement_seconded() {
para_id: 0u32.into(),
relay_parent: PHash::random(),
collator: CollatorPair::generate().0.public(),
persisted_validation_data_hash: PHash::random().into(),
persisted_validation_data_hash: PHash::random(),
pov_hash: PHash::random(),
erasure_root: PHash::random(),
signature: sp_core::sr25519::Signature([0u8; 64]).into(),
Expand Down
Loading