Skip to content

Commit

Permalink
Add ability to output components that go into Bank hash (solana-labs#…
Browse files Browse the repository at this point in the history
…32632)

When a consensus divergance occurs, the current workflow involves a
handful of manual steps to hone in on the offending slot and
transaction. This process isn't overly difficult to execute; however, it
is tedious and currently involves creating and parsing logs.

This change introduces functionality to output a debug file that
contains the components go into the bank hash. The file can be generated
in two ways:
- Via solana-validator when the node realizes it has diverged
- Via solana-ledger-tool verify by passing a flag

When a divergance occurs now, the steps to debug would be:
- Grab the file from the node that diverged
- Generate a file for the same slot with ledger-tool with a known good
  version
- Diff the files, they are pretty-printed json
  • Loading branch information
steviez committed Nov 16, 2023
1 parent a2847d0 commit cc01ef8
Show file tree
Hide file tree
Showing 10 changed files with 387 additions and 13 deletions.
3 changes: 3 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion core/src/replay_stage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ use {
solana_rpc_client_api::response::SlotUpdate,
solana_runtime::{
accounts_background_service::AbsRequestSender,
bank::{Bank, NewBankOptions},
bank::{bank_hash_details, Bank, NewBankOptions},
bank_forks::{BankForks, MAX_ROOT_DISTANCE_FOR_VOTE_ONLY},
commitment::BlockCommitmentCache,
prioritization_fee_cache::PrioritizationFeeCache,
Expand Down Expand Up @@ -1502,6 +1502,7 @@ impl ReplayStage {
let bank = w_bank_forks
.remove(*slot)
.expect("BankForks should not have been purged yet");
let _ = bank_hash_details::write_bank_hash_details_file(&bank);
((*slot, bank.bank_id()), bank)
})
.unzip()
Expand Down
1 change: 1 addition & 0 deletions ledger-tool/src/args.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ pub fn get_accounts_db_config(
accounts_hash_cache_path: Some(
ledger_path.join(AccountsDb::DEFAULT_ACCOUNTS_HASH_CACHE_DIR),
),
base_working_path: Some(ledger_path.to_path_buf()),
filler_accounts_config,
ancient_append_vec_offset: value_t!(arg_matches, "accounts_db_ancient_append_vecs", i64)
.ok(),
Expand Down
15 changes: 14 additions & 1 deletion ledger-tool/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ use {
accounts::Accounts,
accounts_db::CalcAccountsHashDataSource,
accounts_index::ScanConfig,
bank::{Bank, RewardCalculationEvent, TotalAccountsStats},
bank::{bank_hash_details, Bank, RewardCalculationEvent, TotalAccountsStats},
bank_forks::BankForks,
cost_model::CostModel,
cost_tracker::CostTracker,
Expand Down Expand Up @@ -1627,6 +1627,14 @@ fn main() {
.takes_value(false)
.help("After verifying the ledger, print some information about the account stores"),
)
.arg(
Arg::with_name("write_bank_file")
.long("write-bank-file")
.takes_value(false)
.help("After verifying the ledger, write a file that contains the information \
that went into computing the completed bank's bank hash. The file will be \
written within <LEDGER_DIR>/bank_hash_details/"),
)
).subcommand(
SubCommand::with_name("graph")
.about("Create a Graphviz rendering of the ledger")
Expand Down Expand Up @@ -2590,6 +2598,7 @@ fn main() {
..ProcessOptions::default()
};
let print_accounts_stats = arg_matches.is_present("print_accounts_stats");
let write_bank_file = arg_matches.is_present("write_bank_file");
let genesis_config = open_genesis_config_by(&ledger_path, arg_matches);
info!("genesis hash: {}", genesis_config.hash());

Expand All @@ -2615,6 +2624,10 @@ fn main() {
let working_bank = bank_forks.read().unwrap().working_bank();
working_bank.print_accounts_stats();
}
if write_bank_file {
let working_bank = bank_forks.read().unwrap().working_bank();
let _ = bank_hash_details::write_bank_hash_details_file(&working_bank);
}
exit_signal.store(true, Ordering::Relaxed);
system_monitor_service.join().unwrap();
}
Expand Down
3 changes: 3 additions & 0 deletions programs/sbf/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions runtime/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ edition = { workspace = true }

[dependencies]
arrayref = { workspace = true }
base64 = { workspace = true }
bincode = { workspace = true }
blake3 = { workspace = true }
bv = { workspace = true, features = ["serde"] }
Expand Down Expand Up @@ -43,6 +44,7 @@ rayon = { workspace = true }
regex = { workspace = true }
serde = { workspace = true, features = ["rc"] }
serde_derive = { workspace = true }
serde_json = { workspace = true }
solana-address-lookup-table-program = { workspace = true }
solana-bpf-loader-program = { workspace = true }
solana-bucket-map = { workspace = true }
Expand All @@ -59,6 +61,7 @@ solana-rayon-threadlimit = { workspace = true }
solana-sdk = { workspace = true }
solana-stake-program = { workspace = true }
solana-system-program = { workspace = true }
solana-version = { workspace = true }
solana-vote-program = { workspace = true }
solana-zk-token-proof-program = { workspace = true }
solana-zk-token-sdk = { workspace = true }
Expand Down
93 changes: 82 additions & 11 deletions runtime/src/accounts_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -469,6 +469,7 @@ pub(crate) struct ShrinkCollect<'a, T: ShrinkCollectRefs<'a>> {

pub const ACCOUNTS_DB_CONFIG_FOR_TESTING: AccountsDbConfig = AccountsDbConfig {
index: Some(ACCOUNTS_INDEX_CONFIG_FOR_TESTING),
base_working_path: None,
accounts_hash_cache_path: None,
filler_accounts_config: FillerAccountsConfig::const_default(),
write_cache_limit_bytes: None,
Expand All @@ -480,6 +481,7 @@ pub const ACCOUNTS_DB_CONFIG_FOR_TESTING: AccountsDbConfig = AccountsDbConfig {
};
pub const ACCOUNTS_DB_CONFIG_FOR_BENCHMARKS: AccountsDbConfig = AccountsDbConfig {
index: Some(ACCOUNTS_INDEX_CONFIG_FOR_BENCHMARKS),
base_working_path: None,
accounts_hash_cache_path: None,
filler_accounts_config: FillerAccountsConfig::const_default(),
write_cache_limit_bytes: None,
Expand Down Expand Up @@ -539,6 +541,8 @@ const ANCIENT_APPEND_VEC_DEFAULT_OFFSET: Option<i64> = Some(-10_000);
#[derive(Debug, Default, Clone)]
pub struct AccountsDbConfig {
pub index: Option<AccountsIndexConfig>,
/// Base directory for various necessary files
pub base_working_path: Option<PathBuf>,
pub accounts_hash_cache_path: Option<PathBuf>,
pub filler_accounts_config: FillerAccountsConfig,
pub write_cache_limit_bytes: Option<u64>,
Expand Down Expand Up @@ -1396,6 +1400,8 @@ pub struct AccountsDb {
pub(crate) paths: Vec<PathBuf>,

accounts_hash_cache_path: PathBuf,
/// Base directory for various necessary files
base_working_path: PathBuf,

// used by tests
// holds this until we are dropped
Expand Down Expand Up @@ -2347,6 +2353,13 @@ impl<'a> AppendVecScan for ScanState<'a> {
}
}

#[derive(Clone, Debug, Eq, PartialEq)]
pub struct PubkeyHashAccount {
pub pubkey: Pubkey,
pub hash: Hash,
pub account: AccountSharedData,
}

impl AccountsDb {
pub const DEFAULT_ACCOUNTS_HASH_CACHE_DIR: &str = "accounts_hash_cache";

Expand All @@ -2356,18 +2369,33 @@ impl AccountsDb {

fn default_with_accounts_index(
accounts_index: AccountInfoAccountsIndex,
accounts_hash_cache_path: Option<PathBuf>,
base_working_path: Option<PathBuf>,
) -> Self {
let num_threads = get_thread_count();
const MAX_READ_ONLY_CACHE_DATA_SIZE: usize = 400_000_000; // 400M bytes

let (accounts_hash_cache_path, temp_accounts_hash_cache_path) =
if let Some(accounts_hash_cache_path) = accounts_hash_cache_path {
(accounts_hash_cache_path, None)
} else {
let temp_dir = TempDir::new().expect("new tempdir");
let cache_path = temp_dir.path().to_path_buf();
(cache_path, Some(temp_dir))
let (base_working_path, accounts_hash_cache_path, temp_accounts_hash_cache_path) =
match base_working_path {
Some(base_working_path) => {
let accounts_hash_cache_path =
base_working_path.join(Self::DEFAULT_ACCOUNTS_HASH_CACHE_DIR);
(base_working_path, accounts_hash_cache_path, None)
}
None => {
let temp_accounts_hash_cache_path = Some(TempDir::new().unwrap());
let base_working_path = temp_accounts_hash_cache_path
.as_ref()
.unwrap()
.path()
.to_path_buf();
let accounts_hash_cache_path =
base_working_path.join(Self::DEFAULT_ACCOUNTS_HASH_CACHE_DIR);
(
base_working_path,
accounts_hash_cache_path,
temp_accounts_hash_cache_path,
)
}
};

let mut bank_hash_stats = HashMap::new();
Expand Down Expand Up @@ -2399,6 +2427,7 @@ impl AccountsDb {
write_version: AtomicU64::new(0),
paths: vec![],
accounts_hash_cache_path,
base_working_path,
temp_accounts_hash_cache_path,
shrink_paths: RwLock::new(None),
temp_paths: None,
Expand Down Expand Up @@ -2477,9 +2506,10 @@ impl AccountsDb {
accounts_db_config.as_mut().and_then(|x| x.index.take()),
exit,
);
let accounts_hash_cache_path = accounts_db_config
let base_working_path = accounts_db_config
.as_ref()
.and_then(|config| config.accounts_hash_cache_path.clone());
.and_then(|x| x.base_working_path.clone());

let filler_accounts_config = accounts_db_config
.as_ref()
.map(|config| config.filler_accounts_config)
Expand Down Expand Up @@ -2534,7 +2564,7 @@ impl AccountsDb {
.and_then(|x| x.write_cache_limit_bytes),
partitioned_epoch_rewards_config,
exhaustively_verify_refcounts,
..Self::default_with_accounts_index(accounts_index, accounts_hash_cache_path)
..Self::default_with_accounts_index(accounts_index, base_working_path)
};
if paths_is_empty {
// Create a temporary set of accounts directories, used primarily
Expand Down Expand Up @@ -2581,6 +2611,11 @@ impl AccountsDb {
self.file_size
}

/// Get the base working directory
pub fn get_base_working_path(&self) -> PathBuf {
self.base_working_path.clone()
}

pub fn new_single_for_tests() -> Self {
AccountsDb::new_for_tests(Vec::new(), &ClusterType::Development)
}
Expand Down Expand Up @@ -7775,6 +7810,42 @@ impl AccountsDb {
(hashes, scan.as_us(), accumulate)
}

/// Return all of the accounts for a given slot
pub fn get_pubkey_hash_account_for_slot(&self, slot: Slot) -> Vec<PubkeyHashAccount> {
type ScanResult =
ScanStorageResult<PubkeyHashAccount, DashMap<Pubkey, (Hash, AccountSharedData)>>;
let scan_result: ScanResult = self.scan_account_storage(
slot,
|loaded_account: LoadedAccount| {
// Cache only has one version per key, don't need to worry about versioning
Some(PubkeyHashAccount {
pubkey: *loaded_account.pubkey(),
hash: loaded_account.loaded_hash(),
account: loaded_account.take_account(),
})
},
|accum: &DashMap<Pubkey, (Hash, AccountSharedData)>, loaded_account: LoadedAccount| {
// Storage may have duplicates so only keep the latest version for each key
accum.insert(
*loaded_account.pubkey(),
(loaded_account.loaded_hash(), loaded_account.take_account()),
);
},
);

match scan_result {
ScanStorageResult::Cached(cached_result) => cached_result,
ScanStorageResult::Stored(stored_result) => stored_result
.into_iter()
.map(|(pubkey, (hash, account))| PubkeyHashAccount {
pubkey,
hash,
account,
})
.collect(),
}
}

/// Calculate accounts delta hash for `slot`
///
/// As part of calculating the accounts delta hash, get a list of accounts modified this slot
Expand Down
1 change: 1 addition & 0 deletions runtime/src/bank.rs
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,7 @@ struct VerifyAccountsHashConfig {
}

mod address_lookup_table;
pub mod bank_hash_details;
mod builtin_programs;
mod metrics;
mod sysvar_cache;
Expand Down
Loading

0 comments on commit cc01ef8

Please sign in to comment.