Skip to content

Commit

Permalink
feat(storage): rename tables (#6787)
Browse files Browse the repository at this point in the history
  • Loading branch information
shekhirin authored Feb 27, 2024
1 parent e0128c7 commit 46bb03f
Show file tree
Hide file tree
Showing 66 changed files with 729 additions and 655 deletions.
57 changes: 32 additions & 25 deletions bin/reth/src/commands/db/diff.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,12 @@ use crate::{
use clap::Parser;
use reth_db::{
cursor::DbCursorRO, database::Database, mdbx::DatabaseArguments, open_db_read_only,
table::Table, transaction::DbTx, AccountChangeSet, AccountHistory, AccountsTrie,
table::Table, transaction::DbTx, AccountChangeSets, AccountsHistory, AccountsTrie,
BlockBodyIndices, BlockOmmers, BlockWithdrawals, Bytecodes, CanonicalHeaders, DatabaseEnv,
HashedAccount, HashedStorage, HeaderNumbers, HeaderTD, Headers, PlainAccountState,
PlainStorageState, PruneCheckpoints, Receipts, StorageChangeSet, StorageHistory, StoragesTrie,
SyncStage, SyncStageProgress, Tables, TransactionBlock, Transactions, TxHashNumber, TxSenders,
HashedAccounts, HashedStorages, HeaderNumbers, HeaderTerminalDifficulties, Headers,
PlainAccountState, PlainStorageState, PruneCheckpoints, Receipts, StageCheckpointProgresses,
StageCheckpoints, StorageChangeSets, StoragesHistory, StoragesTrie, Tables, TransactionBlocks,
TransactionHashNumbers, TransactionSenders, Transactions,
};
use std::{
collections::HashMap,
Expand Down Expand Up @@ -78,7 +79,9 @@ impl Command {
Tables::CanonicalHeaders => {
find_diffs::<CanonicalHeaders>(primary_tx, secondary_tx, output_dir)?
}
Tables::HeaderTD => find_diffs::<HeaderTD>(primary_tx, secondary_tx, output_dir)?,
Tables::HeaderTerminalDifficulties => {
find_diffs::<HeaderTerminalDifficulties>(primary_tx, secondary_tx, output_dir)?
}
Tables::HeaderNumbers => {
find_diffs::<HeaderNumbers>(primary_tx, secondary_tx, output_dir)?
}
Expand All @@ -92,14 +95,14 @@ impl Command {
Tables::BlockWithdrawals => {
find_diffs::<BlockWithdrawals>(primary_tx, secondary_tx, output_dir)?
}
Tables::TransactionBlock => {
find_diffs::<TransactionBlock>(primary_tx, secondary_tx, output_dir)?
Tables::TransactionBlocks => {
find_diffs::<TransactionBlocks>(primary_tx, secondary_tx, output_dir)?
}
Tables::Transactions => {
find_diffs::<Transactions>(primary_tx, secondary_tx, output_dir)?
}
Tables::TxHashNumber => {
find_diffs::<TxHashNumber>(primary_tx, secondary_tx, output_dir)?
Tables::TransactionHashNumbers => {
find_diffs::<TransactionHashNumbers>(primary_tx, secondary_tx, output_dir)?
}
Tables::Receipts => find_diffs::<Receipts>(primary_tx, secondary_tx, output_dir)?,
Tables::PlainAccountState => {
Expand All @@ -109,34 +112,38 @@ impl Command {
find_diffs::<PlainStorageState>(primary_tx, secondary_tx, output_dir)?
}
Tables::Bytecodes => find_diffs::<Bytecodes>(primary_tx, secondary_tx, output_dir)?,
Tables::AccountHistory => {
find_diffs::<AccountHistory>(primary_tx, secondary_tx, output_dir)?
Tables::AccountsHistory => {
find_diffs::<AccountsHistory>(primary_tx, secondary_tx, output_dir)?
}
Tables::StorageHistory => {
find_diffs::<StorageHistory>(primary_tx, secondary_tx, output_dir)?
Tables::StoragesHistory => {
find_diffs::<StoragesHistory>(primary_tx, secondary_tx, output_dir)?
}
Tables::AccountChangeSet => {
find_diffs::<AccountChangeSet>(primary_tx, secondary_tx, output_dir)?
Tables::AccountChangeSets => {
find_diffs::<AccountChangeSets>(primary_tx, secondary_tx, output_dir)?
}
Tables::StorageChangeSet => {
find_diffs::<StorageChangeSet>(primary_tx, secondary_tx, output_dir)?
Tables::StorageChangeSets => {
find_diffs::<StorageChangeSets>(primary_tx, secondary_tx, output_dir)?
}
Tables::HashedAccount => {
find_diffs::<HashedAccount>(primary_tx, secondary_tx, output_dir)?
Tables::HashedAccounts => {
find_diffs::<HashedAccounts>(primary_tx, secondary_tx, output_dir)?
}
Tables::HashedStorage => {
find_diffs::<HashedStorage>(primary_tx, secondary_tx, output_dir)?
Tables::HashedStorages => {
find_diffs::<HashedStorages>(primary_tx, secondary_tx, output_dir)?
}
Tables::AccountsTrie => {
find_diffs::<AccountsTrie>(primary_tx, secondary_tx, output_dir)?
}
Tables::StoragesTrie => {
find_diffs::<StoragesTrie>(primary_tx, secondary_tx, output_dir)?
}
Tables::TxSenders => find_diffs::<TxSenders>(primary_tx, secondary_tx, output_dir)?,
Tables::SyncStage => find_diffs::<SyncStage>(primary_tx, secondary_tx, output_dir)?,
Tables::SyncStageProgress => {
find_diffs::<SyncStageProgress>(primary_tx, secondary_tx, output_dir)?
Tables::TransactionSenders => {
find_diffs::<TransactionSenders>(primary_tx, secondary_tx, output_dir)?
}
Tables::StageCheckpoints => {
find_diffs::<StageCheckpoints>(primary_tx, secondary_tx, output_dir)?
}
Tables::StageCheckpointProgresses => {
find_diffs::<StageCheckpointProgresses>(primary_tx, secondary_tx, output_dir)?
}
Tables::PruneCheckpoints => {
find_diffs::<PruneCheckpoints>(primary_tx, secondary_tx, output_dir)?
Expand Down
19 changes: 10 additions & 9 deletions bin/reth/src/commands/db/get.rs
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ mod tests {
use clap::{Args, Parser};
use reth_db::{
models::{storage_sharded_key::StorageShardedKey, ShardedKey},
AccountHistory, HashedAccount, Headers, StorageHistory, SyncStage,
AccountsHistory, HashedAccounts, Headers, StageCheckpoints, StoragesHistory,
};
use reth_primitives::{Address, B256};
use std::str::FromStr;
Expand All @@ -132,12 +132,12 @@ mod tests {

let args = CommandParser::<Command>::parse_from([
"reth",
"HashedAccount",
"HashedAccounts",
"0x0ac361fe774b78f8fc4e86c1916930d150865c3fc2e21dca2e58833557608bac",
])
.args;
assert_eq!(
args.table_key::<HashedAccount>().unwrap(),
args.table_key::<HashedAccounts>().unwrap(),
B256::from_str("0x0ac361fe774b78f8fc4e86c1916930d150865c3fc2e21dca2e58833557608bac")
.unwrap()
);
Expand All @@ -146,15 +146,16 @@ mod tests {
#[test]
fn parse_string_key_args() {
let args =
CommandParser::<Command>::parse_from(["reth", "SyncStage", "MerkleExecution"]).args;
assert_eq!(args.table_key::<SyncStage>().unwrap(), "MerkleExecution");
CommandParser::<Command>::parse_from(["reth", "StageCheckpoints", "MerkleExecution"])
.args;
assert_eq!(args.table_key::<StageCheckpoints>().unwrap(), "MerkleExecution");
}

#[test]
fn parse_json_key_args() {
let args = CommandParser::<Command>::parse_from(["reth", "StorageHistory", r#"{ "address": "0x01957911244e546ce519fbac6f798958fafadb41", "sharded_key": { "key": "0x0000000000000000000000000000000000000000000000000000000000000003", "highest_block_number": 18446744073709551615 } }"#]).args;
let args = CommandParser::<Command>::parse_from(["reth", "StoragesHistory", r#"{ "address": "0x01957911244e546ce519fbac6f798958fafadb41", "sharded_key": { "key": "0x0000000000000000000000000000000000000000000000000000000000000003", "highest_block_number": 18446744073709551615 } }"#]).args;
assert_eq!(
args.table_key::<StorageHistory>().unwrap(),
args.table_key::<StoragesHistory>().unwrap(),
StorageShardedKey::new(
Address::from_str("0x01957911244e546ce519fbac6f798958fafadb41").unwrap(),
B256::from_str(
Expand All @@ -168,9 +169,9 @@ mod tests {

#[test]
fn parse_json_key_for_account_history() {
let args = CommandParser::<Command>::parse_from(["reth", "AccountHistory", r#"{ "key": "0x4448e1273fd5a8bfdb9ed111e96889c960eee145", "highest_block_number": 18446744073709551615 }"#]).args;
let args = CommandParser::<Command>::parse_from(["reth", "AccountsHistory", r#"{ "key": "0x4448e1273fd5a8bfdb9ed111e96889c960eee145", "highest_block_number": 18446744073709551615 }"#]).args;
assert_eq!(
args.table_key::<AccountHistory>().unwrap(),
args.table_key::<AccountsHistory>().unwrap(),
ShardedKey::new(
Address::from_str("0x4448e1273fd5a8bfdb9ed111e96889c960eee145").unwrap(),
18446744073709551615
Expand Down
2 changes: 1 addition & 1 deletion bin/reth/src/commands/recover/storage_tries.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ impl Command {

let mut deleted_tries = 0;
let tx_mut = provider.tx_mut();
let mut hashed_account_cursor = tx_mut.cursor_read::<tables::HashedAccount>()?;
let mut hashed_account_cursor = tx_mut.cursor_read::<tables::HashedAccounts>()?;
let mut storage_trie_cursor = tx_mut.cursor_dup_read::<tables::StoragesTrie>()?;
let mut entry = storage_trie_cursor.first()?;

Expand Down
57 changes: 30 additions & 27 deletions bin/reth/src/commands/stage/drop.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,100 +66,103 @@ impl Command {
StageEnum::Bodies => {
tx.clear::<tables::BlockBodyIndices>()?;
tx.clear::<tables::Transactions>()?;
tx.clear::<tables::TransactionBlock>()?;
tx.clear::<tables::TransactionBlocks>()?;
tx.clear::<tables::BlockOmmers>()?;
tx.clear::<tables::BlockWithdrawals>()?;
tx.put::<tables::SyncStage>(StageId::Bodies.to_string(), Default::default())?;
tx.put::<tables::StageCheckpoints>(
StageId::Bodies.to_string(),
Default::default(),
)?;
insert_genesis_header::<DatabaseEnv>(tx, self.chain)?;
}
StageEnum::Senders => {
tx.clear::<tables::TxSenders>()?;
tx.put::<tables::SyncStage>(
tx.clear::<tables::TransactionSenders>()?;
tx.put::<tables::StageCheckpoints>(
StageId::SenderRecovery.to_string(),
Default::default(),
)?;
}
StageEnum::Execution => {
tx.clear::<tables::PlainAccountState>()?;
tx.clear::<tables::PlainStorageState>()?;
tx.clear::<tables::AccountChangeSet>()?;
tx.clear::<tables::StorageChangeSet>()?;
tx.clear::<tables::AccountChangeSets>()?;
tx.clear::<tables::StorageChangeSets>()?;
tx.clear::<tables::Bytecodes>()?;
tx.clear::<tables::Receipts>()?;
tx.put::<tables::SyncStage>(
tx.put::<tables::StageCheckpoints>(
StageId::Execution.to_string(),
Default::default(),
)?;
insert_genesis_state::<DatabaseEnv>(tx, self.chain.genesis())?;
}
StageEnum::AccountHashing => {
tx.clear::<tables::HashedAccount>()?;
tx.put::<tables::SyncStage>(
tx.clear::<tables::HashedAccounts>()?;
tx.put::<tables::StageCheckpoints>(
StageId::AccountHashing.to_string(),
Default::default(),
)?;
}
StageEnum::StorageHashing => {
tx.clear::<tables::HashedStorage>()?;
tx.put::<tables::SyncStage>(
tx.clear::<tables::HashedStorages>()?;
tx.put::<tables::StageCheckpoints>(
StageId::StorageHashing.to_string(),
Default::default(),
)?;
}
StageEnum::Hashing => {
// Clear hashed accounts
tx.clear::<tables::HashedAccount>()?;
tx.put::<tables::SyncStage>(
tx.clear::<tables::HashedAccounts>()?;
tx.put::<tables::StageCheckpoints>(
StageId::AccountHashing.to_string(),
Default::default(),
)?;

// Clear hashed storages
tx.clear::<tables::HashedStorage>()?;
tx.put::<tables::SyncStage>(
tx.clear::<tables::HashedStorages>()?;
tx.put::<tables::StageCheckpoints>(
StageId::StorageHashing.to_string(),
Default::default(),
)?;
}
StageEnum::Merkle => {
tx.clear::<tables::AccountsTrie>()?;
tx.clear::<tables::StoragesTrie>()?;
tx.put::<tables::SyncStage>(
tx.put::<tables::StageCheckpoints>(
StageId::MerkleExecute.to_string(),
Default::default(),
)?;
tx.put::<tables::SyncStage>(
tx.put::<tables::StageCheckpoints>(
StageId::MerkleUnwind.to_string(),
Default::default(),
)?;
tx.delete::<tables::SyncStageProgress>(
tx.delete::<tables::StageCheckpointProgresses>(
StageId::MerkleExecute.to_string(),
None,
)?;
}
StageEnum::AccountHistory | StageEnum::StorageHistory => {
tx.clear::<tables::AccountHistory>()?;
tx.clear::<tables::StorageHistory>()?;
tx.put::<tables::SyncStage>(
tx.clear::<tables::AccountsHistory>()?;
tx.clear::<tables::StoragesHistory>()?;
tx.put::<tables::StageCheckpoints>(
StageId::IndexAccountHistory.to_string(),
Default::default(),
)?;
tx.put::<tables::SyncStage>(
tx.put::<tables::StageCheckpoints>(
StageId::IndexStorageHistory.to_string(),
Default::default(),
)?;
}
StageEnum::TotalDifficulty => {
tx.clear::<tables::HeaderTD>()?;
tx.put::<tables::SyncStage>(
tx.clear::<tables::HeaderTerminalDifficulties>()?;
tx.put::<tables::StageCheckpoints>(
StageId::TotalDifficulty.to_string(),
Default::default(),
)?;
insert_genesis_header::<DatabaseEnv>(tx, self.chain)?;
}
StageEnum::TxLookup => {
tx.clear::<tables::TxHashNumber>()?;
tx.put::<tables::SyncStage>(
tx.clear::<tables::TransactionHashNumbers>()?;
tx.put::<tables::StageCheckpoints>(
StageId::TransactionLookup.to_string(),
Default::default(),
)?;
Expand All @@ -171,7 +174,7 @@ impl Command {
}
}

tx.put::<tables::SyncStage>(StageId::Finish.to_string(), Default::default())?;
tx.put::<tables::StageCheckpoints>(StageId::Finish.to_string(), Default::default())?;

Ok::<_, eyre::Error>(())
})??;
Expand Down
12 changes: 10 additions & 2 deletions bin/reth/src/commands/stage/dump/execution.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,11 @@ fn import_tables_with_range<DB: Database>(
tx.import_table_with_range::<tables::CanonicalHeaders, _>(&db_tool.db.tx()?, Some(from), to)
})??;
output_db.update(|tx| {
tx.import_table_with_range::<tables::HeaderTD, _>(&db_tool.db.tx()?, Some(from), to)
tx.import_table_with_range::<tables::HeaderTerminalDifficulties, _>(
&db_tool.db.tx()?,
Some(from),
to,
)
})??;
output_db.update(|tx| {
tx.import_table_with_range::<tables::Headers, _>(&db_tool.db.tx()?, Some(from), to)
Expand Down Expand Up @@ -81,7 +85,11 @@ fn import_tables_with_range<DB: Database>(
})??;

output_db.update(|tx| {
tx.import_table_with_range::<tables::TxSenders, _>(&db_tool.db.tx()?, Some(from_tx), to_tx)
tx.import_table_with_range::<tables::TransactionSenders, _>(
&db_tool.db.tx()?,
Some(from_tx),
to_tx,
)
})??;

Ok(())
Expand Down
6 changes: 5 additions & 1 deletion bin/reth/src/commands/stage/dump/hashing_account.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,11 @@ pub(crate) async fn dump_hashing_account_stage<DB: Database>(

// Import relevant AccountChangeSets
output_db.update(|tx| {
tx.import_table_with_range::<tables::AccountChangeSet, _>(&db_tool.db.tx()?, Some(from), to)
tx.import_table_with_range::<tables::AccountChangeSets, _>(
&db_tool.db.tx()?,
Some(from),
to,
)
})??;

unwind_and_copy(db_tool, from, tip_block_number, &output_db)?;
Expand Down
3 changes: 2 additions & 1 deletion bin/reth/src/commands/stage/dump/hashing_storage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,8 @@ fn unwind_and_copy<DB: Database>(
// TODO optimize we can actually just get the entries we need for both these tables
output_db
.update(|tx| tx.import_dupsort::<tables::PlainStorageState, _>(&unwind_inner_tx))??;
output_db.update(|tx| tx.import_dupsort::<tables::StorageChangeSet, _>(&unwind_inner_tx))??;
output_db
.update(|tx| tx.import_dupsort::<tables::StorageChangeSets, _>(&unwind_inner_tx))??;

Ok(())
}
Expand Down
13 changes: 9 additions & 4 deletions bin/reth/src/commands/stage/dump/merkle.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,11 @@ pub(crate) async fn dump_merkle_stage<DB: Database>(
})??;

output_db.update(|tx| {
tx.import_table_with_range::<tables::AccountChangeSet, _>(&db_tool.db.tx()?, Some(from), to)
tx.import_table_with_range::<tables::AccountChangeSets, _>(
&db_tool.db.tx()?,
Some(from),
to,
)
})??;

unwind_and_copy(db_tool, (from, to), tip_block_number, &output_db).await?;
Expand Down Expand Up @@ -100,10 +104,11 @@ async fn unwind_and_copy<DB: Database>(
let unwind_inner_tx = provider.into_tx();

// TODO optimize we can actually just get the entries we need
output_db.update(|tx| tx.import_dupsort::<tables::StorageChangeSet, _>(&unwind_inner_tx))??;
output_db
.update(|tx| tx.import_dupsort::<tables::StorageChangeSets, _>(&unwind_inner_tx))??;

output_db.update(|tx| tx.import_table::<tables::HashedAccount, _>(&unwind_inner_tx))??;
output_db.update(|tx| tx.import_dupsort::<tables::HashedStorage, _>(&unwind_inner_tx))??;
output_db.update(|tx| tx.import_table::<tables::HashedAccounts, _>(&unwind_inner_tx))??;
output_db.update(|tx| tx.import_dupsort::<tables::HashedStorages, _>(&unwind_inner_tx))??;
output_db.update(|tx| tx.import_table::<tables::AccountsTrie, _>(&unwind_inner_tx))??;
output_db.update(|tx| tx.import_dupsort::<tables::StoragesTrie, _>(&unwind_inner_tx))??;

Expand Down
Loading

0 comments on commit 46bb03f

Please sign in to comment.