Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

preliminaries for bumping nightly to 2023-08-25 #33047

Merged
merged 16 commits into from
Aug 29, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions accounts-db/src/accounts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1933,7 +1933,7 @@ mod tests {
assert_eq!(loaded_transaction.program_indices.len(), 1);
assert_eq!(loaded_transaction.program_indices[0].len(), 0);
}
(Err(e), _nonce) => Err(e).unwrap(),
(Err(e), _nonce) => panic!("{e}"),
}
}

Expand Down Expand Up @@ -2282,7 +2282,7 @@ mod tests {
}
}
}
(Err(e), _nonce) => Err(e).unwrap(),
(Err(e), _nonce) => panic!("{e}"),
}
}

Expand Down
27 changes: 11 additions & 16 deletions accounts-db/src/accounts_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2427,7 +2427,7 @@ pub struct PubkeyHashAccount {
}

impl AccountsDb {
pub const ACCOUNTS_HASH_CACHE_DIR: &str = "accounts_hash_cache";
pub const ACCOUNTS_HASH_CACHE_DIR: &'static str = "accounts_hash_cache";
apfitzge marked this conversation as resolved.
Show resolved Hide resolved

pub fn default_for_tests() -> Self {
Self::default_with_accounts_index(AccountInfoAccountsIndex::default_for_tests(), None)
Expand Down Expand Up @@ -4741,10 +4741,7 @@ impl AccountsDb {
It is a performance optimization to not send the ENTIRE old/pre-shrunk append vec to clean in the normal case.
*/

let mut uncleaned_pubkeys = self
.uncleaned_pubkeys
.entry(slot)
.or_insert_with(Vec::default);
let mut uncleaned_pubkeys = self.uncleaned_pubkeys.entry(slot).or_default();
uncleaned_pubkeys.extend(pubkeys);
}

Expand Down Expand Up @@ -8522,7 +8519,7 @@ impl AccountsDb {
.lock()
.unwrap()
.entry(accounts.target_slot())
.or_insert_with(BankHashStats::default)
.or_default()
.accumulate(&stats);
}

Expand Down Expand Up @@ -9490,9 +9487,7 @@ impl AccountsDb {
let mut storage_size_accounts_map_flatten_time =
Measure::start("storage_size_accounts_map_flatten_time");
if !accounts_map.is_empty() {
let mut info = storage_info
.entry(store_id)
.or_insert_with(StorageSizeAndCount::default);
let mut info = storage_info.entry(store_id).or_default();
info.stored_size += storage_info_local.stored_size;
info.count += storage_info_local.count;
}
Expand Down Expand Up @@ -10395,7 +10390,7 @@ pub mod tests {
CalculateHashIntermediate::new(Hash::default(), 256, pubkey255),
];

let expected_hashes = vec![
let expected_hashes = [
Hash::from_str("5K3NW73xFHwgTWVe4LyCg4QfQda8f88uZj2ypDx2kmmH").unwrap(),
Hash::from_str("84ozw83MZ8oeSF4hRAg7SeW1Tqs9LMXagX1BrDRjtZEx").unwrap(),
Hash::from_str("5XqtnEJ41CG2JWNp7MAg9nxkRUAnyjLxfsKsdrLxQUbC").unwrap(),
Expand Down Expand Up @@ -10740,7 +10735,7 @@ pub mod tests {
let slot = MAX_ITEMS_PER_CHUNK as Slot;
let (storages, raw_expected) =
sample_storages_and_account_in_slot(slot, &accounts_db, INCLUDE_SLOT_IN_HASH_TESTS);
let storage_data = vec![(&storages[0], slot)];
let storage_data = [(&storages[0], slot)];

let sorted_storages =
SortedStorages::new_debug(&storage_data[..], 0, MAX_ITEMS_PER_CHUNK as usize + 1);
Expand Down Expand Up @@ -10837,7 +10832,7 @@ pub mod tests {
}

let bins = 256;
let bin_locations = vec![0, 127, 128, 255];
let bin_locations = [0, 127, 128, 255];
let range = 1;
for bin in 0..bins {
let accounts_db = AccountsDb::new_single_for_tests();
Expand Down Expand Up @@ -10879,7 +10874,7 @@ pub mod tests {
let slot = MAX_ITEMS_PER_CHUNK as Slot;
let (storages, raw_expected) =
sample_storages_and_account_in_slot(slot, &accounts_db, INCLUDE_SLOT_IN_HASH_TESTS);
let storage_data = vec![(&storages[0], slot)];
let storage_data = [(&storages[0], slot)];

let sorted_storages =
SortedStorages::new_debug(&storage_data[..], 0, MAX_ITEMS_PER_CHUNK as usize + 1);
Expand Down Expand Up @@ -14552,7 +14547,7 @@ pub mod tests {
})
.unwrap();
assert_eq!(account_info.0, slot);
let reclaims = vec![account_info];
let reclaims = [account_info];
accounts_db.remove_dead_accounts(reclaims.iter(), None, None, true);
let after_size = storage0.alive_bytes.load(Ordering::Acquire);
assert_eq!(before_size, after_size + account.stored_size());
Expand Down Expand Up @@ -16270,7 +16265,7 @@ pub mod tests {
&mut purged_stored_account_slots,
&pubkeys_removed_from_accounts_index,
);
for (pk, slots) in vec![(pk1, vec![slot1, slot2]), (pk2, vec![slot1])] {
for (pk, slots) in [(pk1, vec![slot1, slot2]), (pk2, vec![slot1])] {
let result = purged_stored_account_slots.remove(&pk).unwrap();
assert_eq!(result, slots.into_iter().collect::<HashSet<_>>());
}
Expand Down Expand Up @@ -17752,7 +17747,7 @@ pub mod tests {
let slot0 = 0;
let dropped_roots = vec![slot0];
db.accounts_index.add_root(slot0);
db.accounts_index.add_uncleaned_roots([slot0].into_iter());
db.accounts_index.add_uncleaned_roots([slot0]);
assert!(db.accounts_index.is_uncleaned_root(slot0));
assert!(db.accounts_index.is_alive_root(slot0));
db.handle_dropped_roots_for_ancient(dropped_roots.into_iter());
Expand Down
4 changes: 2 additions & 2 deletions accounts-db/src/accounts_index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2959,7 +2959,7 @@ pub mod tests {
assert_eq!(0, index.roots_tracker.read().unwrap().uncleaned_roots.len());
index.add_root(0);
index.add_root(1);
index.add_uncleaned_roots([0, 1].into_iter());
index.add_uncleaned_roots([0, 1]);
assert_eq!(2, index.roots_tracker.read().unwrap().uncleaned_roots.len());

assert_eq!(
Expand All @@ -2986,7 +2986,7 @@ pub mod tests {

index.add_root(2);
index.add_root(3);
index.add_uncleaned_roots([2, 3].into_iter());
index.add_uncleaned_roots([2, 3]);
assert_eq!(4, index.roots_tracker.read().unwrap().alive_roots.len());
assert_eq!(2, index.roots_tracker.read().unwrap().uncleaned_roots.len());
assert_eq!(
Expand Down
6 changes: 3 additions & 3 deletions accounts-db/src/ancient_append_vecs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2071,7 +2071,7 @@ pub mod tests {
let can_randomly_shrink = false;
for method in TestCollectInfo::iter() {
for slot1_is_alive in [false, true] {
let alives = vec![false /*dummy*/, slot1_is_alive, !slot1_is_alive];
let alives = [false /*dummy*/, slot1_is_alive, !slot1_is_alive];
let slots = 2;
// 1_040_000 is big enough relative to page size to cause shrink ratio to be triggered
for data_size in [None, Some(1_040_000)] {
Expand All @@ -2098,7 +2098,7 @@ pub mod tests {
});
let alive_storages = storages
.iter()
.filter_map(|storage| alives[storage.slot() as usize].then_some(storage))
.filter(|storage| alives[storage.slot() as usize])
.collect::<Vec<_>>();
let alive_bytes_expected = alive_storages
.iter()
Expand Down Expand Up @@ -2362,7 +2362,7 @@ pub mod tests {
let can_randomly_shrink = false;
for method in TestCollectInfo::iter() {
for slot1_shrink in [false, true] {
let shrinks = vec![false /*dummy*/, slot1_shrink, !slot1_shrink];
let shrinks = [false /*dummy*/, slot1_shrink, !slot1_shrink];
let slots = 2;
// 1_040_000 is big enough relative to page size to cause shrink ratio to be triggered
let data_sizes = shrinks
Expand Down
10 changes: 5 additions & 5 deletions accounts-db/src/append_vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -787,7 +787,7 @@ pub mod tests {
// for (Slot, &'a [(&'a Pubkey, &'a T)], IncludeSlotInHash)
let account = AccountSharedData::default();
let slot = 0 as Slot;
let pubkeys = vec![Pubkey::default()];
let pubkeys = [Pubkey::default()];
let hashes = Vec::<Hash>::default();
let write_versions = Vec::default();
let mut accounts = vec![(&pubkeys[0], &account)];
Expand All @@ -808,10 +808,10 @@ pub mod tests {
// for (Slot, &'a [(&'a Pubkey, &'a T)], IncludeSlotInHash)
let account = AccountSharedData::default();
let slot = 0 as Slot;
let pubkeys = vec![Pubkey::from([5; 32]), Pubkey::from([6; 32])];
let pubkeys = [Pubkey::from([5; 32]), Pubkey::from([6; 32])];
let hashes = vec![Hash::new(&[3; 32]), Hash::new(&[4; 32])];
let write_versions = vec![42, 43];
let accounts = vec![(&pubkeys[0], &account), (&pubkeys[1], &account)];
let accounts = [(&pubkeys[0], &account), (&pubkeys[1], &account)];
let accounts2 = (slot, &accounts[..], INCLUDE_SLOT_IN_HASH_TESTS);
let storable =
StorableAccountsWithHashesAndWriteVersions::new_with_hashes_and_write_versions(
Expand Down Expand Up @@ -842,7 +842,7 @@ pub mod tests {
let pubkey = Pubkey::default();
let hashes = vec![Hash::default()];
let write_versions = vec![0];
let accounts = vec![(&pubkey, &account)];
let accounts = [(&pubkey, &account)];
let accounts2 = (slot, &accounts[..], INCLUDE_SLOT_IN_HASH_TESTS);
let storable =
StorableAccountsWithHashesAndWriteVersions::new_with_hashes_and_write_versions(
Expand All @@ -861,7 +861,7 @@ pub mod tests {
}
.to_account_shared_data();
// for (Slot, &'a [(&'a Pubkey, &'a T)], IncludeSlotInHash)
let accounts = vec![(&pubkey, &account)];
let accounts = [(&pubkey, &account)];
let accounts2 = (slot, &accounts[..], INCLUDE_SLOT_IN_HASH_TESTS);
let storable =
StorableAccountsWithHashesAndWriteVersions::new_with_hashes_and_write_versions(
Expand Down
2 changes: 1 addition & 1 deletion accounts-db/src/storable_accounts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -571,7 +571,7 @@ pub mod tests {
let remaining2 = entries.saturating_sub(entries0 + entries1);
for entries2 in 0..=remaining2 {
let remaining3 = entries.saturating_sub(entries0 + entries1 + entries2);
let entries_by_level = vec![entries0, entries1, entries2, remaining3];
let entries_by_level = [entries0, entries1, entries2, remaining3];
let mut overall_index = 0;
let mut expected_slots = Vec::default();
let slots_and_accounts = entries_by_level
Expand Down
8 changes: 4 additions & 4 deletions cli-output/src/display.rs
Original file line number Diff line number Diff line change
Expand Up @@ -837,7 +837,7 @@ mod test {

assert_eq!(
output,
r#"Block Time: 2021-08-10T22:16:31Z
r"Block Time: 2021-08-10T22:16:31Z
Version: legacy
Recent Blockhash: 11111111111111111111111111111111
Signature 0: 5pkjrE4VBa3Bu9CMKXgh1U345cT1gGo8QBVRTzHAo6gHeiPae5BTbShP15g6NgqRMNqu8Qrhph1ATmrfC1Ley3rx (pass)
Expand All @@ -860,7 +860,7 @@ Return Data from Program 8qbHbw2BbbTHBW1sbeqakYXVKRQM8Ne7pLK7m6CVfeR:
Rewards:
Address Type Amount New Balance \0
4vJ9JU1bJJE96FWSJKvHsmmFADCg4gpZQff4P3bkLKi rent -◎0.000000100 ◎0.000009900 \0
"#.replace("\\0", "") // replace marker used to subvert trailing whitespace linter on CI
".replace("\\0", "") // replace marker used to subvert trailing whitespace linter on CI
);
}

Expand Down Expand Up @@ -916,7 +916,7 @@ Rewards:

assert_eq!(
output,
r#"Block Time: 2021-08-10T22:16:31Z
r"Block Time: 2021-08-10T22:16:31Z
Version: 0
Recent Blockhash: 11111111111111111111111111111111
Signature 0: 5iEy3TT3ZhTA1NkuCY8GrQGNVY8d5m1bpjdh5FT3Ca4Py81fMipAZjafDuKJKrkw5q5UAAd8oPcgZ4nyXpHt4Fp7 (pass)
Expand Down Expand Up @@ -948,7 +948,7 @@ Return Data from Program 8qbHbw2BbbTHBW1sbeqakYXVKRQM8Ne7pLK7m6CVfeR:
Rewards:
Address Type Amount New Balance \0
CktRuQ2mttgRGkXJtyksdKHjUdc2C4TgDzyB98oEzy8 rent -◎0.000000100 ◎0.000014900 \0
"#.replace("\\0", "") // replace marker used to subvert trailing whitespace linter on CI
".replace("\\0", "") // replace marker used to subvert trailing whitespace linter on CI
);
}

Expand Down
6 changes: 3 additions & 3 deletions cli/src/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1919,8 +1919,8 @@ mod tests {
assert!(parse_command(&test_bad_signature, &default_signer, &mut None).is_err());

// Test CreateAddressWithSeed
let from_pubkey = Some(solana_sdk::pubkey::new_rand());
let from_str = from_pubkey.unwrap().to_string();
let from_pubkey = solana_sdk::pubkey::new_rand();
let from_str = from_pubkey.to_string();
for (name, program_id) in &[
("STAKE", stake::program::id()),
("VOTE", solana_vote_program::id()),
Expand All @@ -1938,7 +1938,7 @@ mod tests {
parse_command(&test_create_address_with_seed, &default_signer, &mut None).unwrap(),
CliCommandInfo {
command: CliCommand::CreateAddressWithSeed {
from_pubkey,
from_pubkey: Some(from_pubkey),
seed: "seed".to_string(),
program_id: *program_id
},
Expand Down
2 changes: 1 addition & 1 deletion core/benches/banking_stage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ fn bench_consume_buffered(bencher: &mut Bencher) {
.collect::<Vec<_>>();
let batches_len = batches.len();
let mut transaction_buffer = UnprocessedTransactionStorage::new_transaction_storage(
UnprocessedPacketBatches::from_iter(batches.into_iter(), 2 * batches_len),
UnprocessedPacketBatches::from_iter(batches, 2 * batches_len),
ThreadType::Transactions,
);
let (s, _r) = unbounded();
Expand Down
6 changes: 3 additions & 3 deletions core/src/banking_stage/consumer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1834,7 +1834,7 @@ mod tests {
let mut buffered_packet_batches =
UnprocessedTransactionStorage::new_transaction_storage(
UnprocessedPacketBatches::from_iter(
deserialized_packets.into_iter(),
deserialized_packets,
num_conflicting_transactions,
),
ThreadType::Transactions,
Expand Down Expand Up @@ -1912,7 +1912,7 @@ mod tests {
let mut buffered_packet_batches =
UnprocessedTransactionStorage::new_transaction_storage(
UnprocessedPacketBatches::from_iter(
deserialized_packets.into_iter(),
deserialized_packets,
num_conflicting_transactions,
),
ThreadType::Transactions,
Expand Down Expand Up @@ -1964,7 +1964,7 @@ mod tests {
let mut buffered_packet_batches =
UnprocessedTransactionStorage::new_transaction_storage(
UnprocessedPacketBatches::from_iter(
deserialized_packets.into_iter(),
deserialized_packets,
num_conflicting_transactions,
),
ThreadType::Transactions,
Expand Down
5 changes: 1 addition & 4 deletions core/src/banking_stage/forwarder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -441,10 +441,7 @@ mod tests {
};

let mut unprocessed_packet_batches = UnprocessedTransactionStorage::new_transaction_storage(
UnprocessedPacketBatches::from_iter(
vec![forwarded_packet, normal_packet].into_iter(),
2,
),
UnprocessedPacketBatches::from_iter(vec![forwarded_packet, normal_packet], 2),
ThreadType::Transactions,
);
let connection_cache = ConnectionCache::new("connection_cache_test");
Expand Down
2 changes: 1 addition & 1 deletion core/src/banking_stage/immutable_deserialized_packet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -162,6 +162,6 @@ mod tests {
let packet = Packet::from_data(None, tx).unwrap();
let deserialized_packet = ImmutableDeserializedPacket::new(packet);

assert!(matches!(deserialized_packet, Ok(_)));
assert!(deserialized_packet.is_ok());
}
}
12 changes: 6 additions & 6 deletions core/src/banking_stage/unprocessed_transaction_storage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1046,7 +1046,7 @@ mod tests {
// all packets are forwarded
{
let buffered_packet_batches: UnprocessedPacketBatches =
UnprocessedPacketBatches::from_iter(packets.clone().into_iter(), packets.len());
UnprocessedPacketBatches::from_iter(packets.clone(), packets.len());
let mut transaction_storage = UnprocessedTransactionStorage::new_transaction_storage(
buffered_packet_batches,
ThreadType::Transactions,
Expand Down Expand Up @@ -1085,7 +1085,7 @@ mod tests {
packet.forwarded = true;
}
let buffered_packet_batches: UnprocessedPacketBatches =
UnprocessedPacketBatches::from_iter(packets.clone().into_iter(), packets.len());
UnprocessedPacketBatches::from_iter(packets.clone(), packets.len());
let mut transaction_storage = UnprocessedTransactionStorage::new_transaction_storage(
buffered_packet_batches,
ThreadType::Transactions,
Expand Down Expand Up @@ -1119,7 +1119,7 @@ mod tests {
assert_eq!(current_bank.process_transaction(tx), Ok(()));
}
let buffered_packet_batches: UnprocessedPacketBatches =
UnprocessedPacketBatches::from_iter(packets.clone().into_iter(), packets.len());
UnprocessedPacketBatches::from_iter(packets.clone(), packets.len());
let mut transaction_storage = UnprocessedTransactionStorage::new_transaction_storage(
buffered_packet_batches,
ThreadType::Transactions,
Expand Down Expand Up @@ -1285,7 +1285,7 @@ mod tests {
// all tracer packets are forwardable
{
let buffered_packet_batches: UnprocessedPacketBatches =
UnprocessedPacketBatches::from_iter(packets.clone().into_iter(), packets.len());
UnprocessedPacketBatches::from_iter(packets.clone(), packets.len());
let (
total_tracer_packets_in_buffer,
total_packets_to_forward,
Expand All @@ -1303,7 +1303,7 @@ mod tests {
packet.forwarded = true;
}
let buffered_packet_batches: UnprocessedPacketBatches =
UnprocessedPacketBatches::from_iter(packets.clone().into_iter(), packets.len());
UnprocessedPacketBatches::from_iter(packets.clone(), packets.len());
let (
total_tracer_packets_in_buffer,
total_packets_to_forward,
Expand All @@ -1320,7 +1320,7 @@ mod tests {
packet.forwarded = true;
}
let buffered_packet_batches: UnprocessedPacketBatches =
UnprocessedPacketBatches::from_iter(packets.clone().into_iter(), packets.len());
UnprocessedPacketBatches::from_iter(packets.clone(), packets.len());
let (
total_tracer_packets_in_buffer,
total_packets_to_forward,
Expand Down
2 changes: 1 addition & 1 deletion core/src/cluster_info_vote_listener.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1197,7 +1197,7 @@ mod tests {
let all_expected_slots: BTreeSet<_> = gossip_vote_slots
.clone()
.into_iter()
.chain(replay_vote_slots.clone().into_iter())
.chain(replay_vote_slots.clone())
.collect();
let mut pubkey_to_votes: HashMap<Pubkey, BTreeSet<Slot>> = HashMap::new();
for (received_pubkey, new_votes) in verified_vote_receiver.try_iter() {
Expand Down
Loading